diff --git a/frameworks/C/h2o/h2o.dockerfile b/frameworks/C/h2o/h2o.dockerfile index 976c938337a..109c5e4375a 100644 --- a/frameworks/C/h2o/h2o.dockerfile +++ b/frameworks/C/h2o/h2o.dockerfile @@ -6,19 +6,28 @@ FROM "ubuntu:${UBUNTU_VERSION}" AS compile ARG DEBIAN_FRONTEND=noninteractive RUN apt-get -yqq update && \ + apt-get -yqq install \ + ca-certificates \ + curl \ + lsb-release && \ + install -dm755 /usr/share/postgresql-common/pgdg && \ + curl --fail -LSso /usr/share/postgresql-common/pgdg/apt.postgresql.org.asc \ + "https://www.postgresql.org/media/keys/ACCC4CF8.asc" && \ + sh -c 'echo "deb [signed-by=/usr/share/postgresql-common/pgdg/apt.postgresql.org.asc] \ + https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > \ + /etc/apt/sources.list.d/pgdg.list' && \ + apt-get -yqq update && \ apt-get -yqq install \ autoconf \ bison \ cmake \ - curl \ flex \ g++ \ libbpfcc-dev \ libbrotli-dev \ libcap-dev \ - libicu-dev \ libnuma-dev \ - libreadline-dev \ + libpq-dev \ libssl-dev \ libtool \ libuv1-dev \ @@ -57,18 +66,6 @@ RUN curl -LSs "https://github.com/x86-64/mustache-c/archive/${MUSTACHE_C_REVISIO CFLAGS="-flto -march=native -mtune=native -O3" ./autogen.sh && \ make -j "$(nproc)" install -ARG POSTGRESQL_VERSION=a37bb7c13995b834095d9d064cad1023a6f99b10 - -WORKDIR /tmp/postgresql-build -RUN curl -LSs "https://github.com/postgres/postgres/archive/${POSTGRESQL_VERSION}.tar.gz" | \ - tar --strip-components=1 -xz && \ - CFLAGS="-flto -march=native -mtune=native -O3" ./configure \ - --includedir=/usr/local/include/postgresql \ - --prefix=/usr/local \ - --with-ssl=openssl && \ - make -j "$(nproc)" -C src/include install && \ - make -j "$(nproc)" -C src/interfaces/libpq install - ARG H2O_APP_PREFIX WORKDIR /tmp/build COPY CMakeLists.txt ../ @@ -85,15 +82,28 @@ RUN cmake \ FROM "ubuntu:${UBUNTU_VERSION}" +ARG POSTGRESQL_VERSION=17 + ARG DEBIAN_FRONTEND=noninteractive RUN apt-get -yqq update && \ + apt-get -yqq install \ + ca-certificates \ + curl \ + lsb-release && \ + install -dm755 /usr/share/postgresql-common/pgdg && \ + curl --fail -LSso /usr/share/postgresql-common/pgdg/apt.postgresql.org.asc \ + "https://www.postgresql.org/media/keys/ACCC4CF8.asc" && \ + sh -c 'echo "deb [signed-by=/usr/share/postgresql-common/pgdg/apt.postgresql.org.asc] \ + https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > \ + /etc/apt/sources.list.d/pgdg.list' && \ + apt-get -yqq update && \ apt-get -yqq install \ libnuma1 \ - libyajl2 + libyajl2 \ + "postgresql-client-${POSTGRESQL_VERSION}" ARG H2O_APP_PREFIX COPY --from=compile "${H2O_APP_PREFIX}" "${H2O_APP_PREFIX}/" COPY --from=compile /usr/local/lib/libmustache_c.so "${H2O_APP_PREFIX}/lib/" -COPY --from=compile /usr/local/lib/libpq.so.5.17 "${H2O_APP_PREFIX}/lib/libpq.so.5" ENV LD_LIBRARY_PATH="${H2O_APP_PREFIX}/lib" EXPOSE 8080 ARG BENCHMARK_ENV diff --git a/frameworks/CSharp/appmpower/src/appMpower.Orm/Data/DbCommand.cs b/frameworks/CSharp/appmpower/src/appMpower.Orm/Data/DbCommand.cs index a580aa5e110..0dfe4581906 100644 --- a/frameworks/CSharp/appmpower/src/appMpower.Orm/Data/DbCommand.cs +++ b/frameworks/CSharp/appmpower/src/appMpower.Orm/Data/DbCommand.cs @@ -20,12 +20,6 @@ public DbCommand(string commandText, DbConnection dbConnection) _dbConnection = dbConnection; } - public DbCommand(string commandText, DbConnection dbConnection, bool keyed) - { - _odbcCommand = dbConnection.GetCommand(commandText, CommandType.Text, keyed); - _dbConnection = dbConnection; - } - public DbCommand(string commandText, CommandType commandType, DbConnection dbConnection) { _odbcCommand = dbConnection.GetCommand(commandText, commandType); @@ -175,7 +169,7 @@ public IDataReader ExecuteReader() public async Task ExecuteNonQueryAsync() { - return await (_odbcCommand as System.Data.Common.DbCommand).ExecuteNonQueryAsync(); + return await _odbcCommand.ExecuteNonQueryAsync(); } public IDataReader ExecuteReader(CommandBehavior behavior) @@ -183,6 +177,11 @@ public IDataReader ExecuteReader(CommandBehavior behavior) return _odbcCommand.ExecuteReader(behavior); } + public async Task ExecuteReaderAsync(CommandBehavior behavior) + { + return await _odbcCommand.ExecuteReaderAsync(behavior); + } + #nullable enable public object? ExecuteScalar() { @@ -197,8 +196,7 @@ public void Prepare() public void Dispose() { - if (_dbConnection._keyed) _dbConnection._keyedOdbcCommands.TryAdd(_odbcCommand.CommandText, _odbcCommand); - else _dbConnection._odbcCommands.Push(_odbcCommand); + _dbConnection.Release(_odbcCommand); } } } \ No newline at end of file diff --git a/frameworks/CSharp/appmpower/src/appMpower.Orm/Data/DbConnection.cs b/frameworks/CSharp/appmpower/src/appMpower.Orm/Data/DbConnection.cs index ba3b6cf85b6..1a9c5ae0355 100644 --- a/frameworks/CSharp/appmpower/src/appMpower.Orm/Data/DbConnection.cs +++ b/frameworks/CSharp/appmpower/src/appMpower.Orm/Data/DbConnection.cs @@ -1,26 +1,27 @@ using System.Collections.Concurrent; using System.Data; -using System.Data.Odbc; +using System.Data.Odbc; namespace appMpower.Orm.Data { public class DbConnection : IDbConnection { private string _connectionString; - internal bool _keyed = false; - internal int _number; - internal OdbcConnection _odbcConnection; - internal ConcurrentStack _odbcCommands = new(); - internal Dictionary _keyedOdbcCommands; + private bool _keyed = false; + private int _number; + private OdbcConnection _odbcConnection; + private ConcurrentStack _odbcCommands = new(); + private Dictionary _keyedOdbcCommands; public DbConnection() { - _connectionString = DbProviderFactory.ConnectionString; } - public DbConnection(string connectionString) + public DbConnection(string connectionString, bool keyed = false) { - _connectionString = connectionString; + _keyed = keyed; + _connectionString = connectionString; + GetConnection(); } public IDbConnection Connection @@ -43,7 +44,22 @@ public string ConnectionString } set { - _odbcConnection.ConnectionString = value; + _connectionString = value; + GetConnection(); + } + } + + private void GetConnection() + { + if (_keyed) + { + (_number, _odbcConnection, _keyedOdbcCommands) = + DbConnectionsKeyed.GetConnectionBase(_connectionString).GetAwaiter().GetResult(); + } + else + { + (_number, _odbcConnection, _odbcCommands) = + DbConnections.GetConnectionBase(_connectionString).GetAwaiter().GetResult(); } } @@ -99,23 +115,33 @@ public IDbCommand CreateCommand() public void Open() { - if (_odbcConnection is null) + if (_odbcConnection.State == ConnectionState.Closed) { - DbConnections.GetConnection(_connectionString, this); + _odbcConnection.Open(); } + } + public async Task OpenAsync() + { if (_odbcConnection.State == ConnectionState.Closed) { - _odbcConnection.Open(); + await _odbcConnection.OpenAsync(); } } public void Dispose() { - DbConnections.Release(this); + if (_keyed) + { + DbConnectionsKeyed.Release((Number: _number, OdbcConnection: _odbcConnection, KeyedOdbcCommands: _keyedOdbcCommands)); + } + else + { + DbConnections.Release((Number: _number, OdbcConnection: _odbcConnection, OdbcCommands: _odbcCommands)); + } } - internal OdbcCommand GetCommand(string commandText, CommandType commandType, bool keyed = false) + internal OdbcCommand GetCommand(string commandText, CommandType commandType) { OdbcCommand odbcCommand; @@ -129,25 +155,20 @@ internal OdbcCommand GetCommand(string commandText, CommandType commandType, boo return odbcCommand; } - else if (_keyed && _keyedOdbcCommands.TryGetValue(commandText, out odbcCommand)) - { - return odbcCommand; - } - else - { - if (!_keyed && keyed) - { - _keyedOdbcCommands = new(); - _keyed = keyed; - } - - odbcCommand = _odbcConnection.CreateCommand(); - odbcCommand.CommandText = commandText; - odbcCommand.CommandType = commandType; - odbcCommand.Prepare(); + else if (_keyed && _keyedOdbcCommands.TryGetValue(commandText, out odbcCommand)) return odbcCommand; - return odbcCommand; - } + odbcCommand = _odbcConnection.CreateCommand(); + odbcCommand.CommandText = commandText; + odbcCommand.CommandType = commandType; + odbcCommand.Prepare(); + + return odbcCommand; + } + + internal void Release(OdbcCommand odbcCommand) + { + if (_keyed) _keyedOdbcCommands.TryAdd(odbcCommand.CommandText, odbcCommand); + else _odbcCommands.Push(odbcCommand); } } } \ No newline at end of file diff --git a/frameworks/CSharp/appmpower/src/appMpower.Orm/Data/DbConnections.cs b/frameworks/CSharp/appmpower/src/appMpower.Orm/Data/DbConnections.cs index 514e2945d38..b278e54fa57 100644 --- a/frameworks/CSharp/appmpower/src/appMpower.Orm/Data/DbConnections.cs +++ b/frameworks/CSharp/appmpower/src/appMpower.Orm/Data/DbConnections.cs @@ -1,61 +1,61 @@ using System.Collections.Concurrent; +using System.Data.Odbc; namespace appMpower.Orm.Data { - public static class DbConnections + internal static class DbConnections { + private static bool _maxConnectionsCreated = false; private static short _createdConnections = 0; - private static ConcurrentStack _connectionsStack = new(); + private static short _maxConnections = 500; - public static DbConnection GetConnection(string connectionString) + private static ConcurrentStack<(int Number, OdbcConnection OdbcConnection, ConcurrentStack OdbcCommands)> _connectionsStack = new(); + private static ConcurrentQueue OdbcCommands)>> _waitingQueue = new(); + + internal static async Task<(int Number, OdbcConnection OdbcConnection, ConcurrentStack OdbcCommands)> GetConnectionBase(string connectionString) { - DbConnection popDbConnection; + (int Number, OdbcConnection OdbcConnection, ConcurrentStack OdbcCommands) dbConnectionBase; - if (!_connectionsStack.TryPop(out popDbConnection)) + if (!_connectionsStack.TryPop(out dbConnectionBase)) { - popDbConnection = new DbConnection(); - popDbConnection._odbcConnection = new System.Data.Odbc.OdbcConnection(connectionString); + if (_maxConnectionsCreated) + { + dbConnectionBase = await GetDbConnectionBaseAsync(); + } + else + { + _createdConnections++; + dbConnectionBase = (Number: _maxConnections, OdbcConnection: new OdbcConnection(connectionString), OdbcCommands: new ConcurrentStack()); - _createdConnections++; - popDbConnection._number = _createdConnections; + if (_createdConnections == _maxConnections) _maxConnectionsCreated = true; - if (_createdConnections % 25 == 0) - { - Console.WriteLine("Pooled connections created: " + _createdConnections.ToString()); + //Console.WriteLine("opened connection number: " + dbConnectionBase._number); } } - return popDbConnection; + return dbConnectionBase; } - - public static void GetConnection(string connectionString, DbConnection dbConnection) + internal static void Release((int Number, OdbcConnection OdbcConnection, ConcurrentStack OdbcCommands) dbConnectionBase) { - DbConnection popDbConnection = null; + TaskCompletionSource<(int Number, OdbcConnection OdbcConnection, ConcurrentStack OdbcCommands)> taskCompletionSource; - if (_connectionsStack.TryPop(out popDbConnection)) + if (_waitingQueue.TryDequeue(out taskCompletionSource)) { - dbConnection._odbcConnection = popDbConnection._odbcConnection; - dbConnection._odbcCommands = popDbConnection._odbcCommands; - dbConnection._number = popDbConnection._number; + taskCompletionSource.SetResult(dbConnectionBase); } else { - dbConnection._odbcConnection = new System.Data.Odbc.OdbcConnection(connectionString); - - _createdConnections++; - dbConnection._number = _createdConnections; - - if (_createdConnections % 25 == 0) - { - Console.WriteLine("Pooled connections created: " + _createdConnections.ToString()); - } - } + _connectionsStack.Push(dbConnectionBase); + } } - public static void Release(DbConnection dbConnection) + private static Task<(int Number, OdbcConnection OdbcConnection, ConcurrentStack OdbcCommands)> GetDbConnectionBaseAsync() { - _connectionsStack.Push(dbConnection); + var taskCompletionSource = new TaskCompletionSource<(int Number, OdbcConnection OdbcConnection, ConcurrentStack OdbcCommands)>(TaskCreationOptions.RunContinuationsAsynchronously); + + _waitingQueue.Enqueue(taskCompletionSource); + return taskCompletionSource.Task; } } } \ No newline at end of file diff --git a/frameworks/CSharp/appmpower/src/appMpower.Orm/Data/DbConnectionsKeyed.cs b/frameworks/CSharp/appmpower/src/appMpower.Orm/Data/DbConnectionsKeyed.cs new file mode 100644 index 00000000000..25cd4dcbab7 --- /dev/null +++ b/frameworks/CSharp/appmpower/src/appMpower.Orm/Data/DbConnectionsKeyed.cs @@ -0,0 +1,61 @@ +using System.Collections.Concurrent; +using System.Data.Odbc; + +namespace appMpower.Orm.Data +{ + internal static class DbConnectionsKeyed + { + private static bool _maxConnectionsCreated = false; + private static short _createdConnections = 0; + private static short _maxConnections = 500; + + private static ConcurrentStack<(int Number, OdbcConnection OdbcConnection, Dictionary)> _connectionsStack = new(); + private static ConcurrentQueue)>> _waitingQueue = new(); + + internal static async Task<(int Number, OdbcConnection OdbcConnection, Dictionary KeyedOdbcCommands)> GetConnectionBase(string connectionString) + { + (int Number, OdbcConnection OdbcConnection, Dictionary KeyedOdbcCommands) dbConnectionBase; + + if (!_connectionsStack.TryPop(out dbConnectionBase)) + { + if (_maxConnectionsCreated) + { + dbConnectionBase = await GetDbConnectionBaseAsync(); + } + else + { + _createdConnections++; + dbConnectionBase = (Number: _maxConnections, OdbcConnection: new OdbcConnection(connectionString), KeyedOdbcCommands: new Dictionary()); + + if (_createdConnections == _maxConnections) _maxConnectionsCreated = true; + + //Console.WriteLine("opened connection number: " + dbConnectionBase._number); + } + } + + return dbConnectionBase; + } + + internal static void Release((int Number, OdbcConnection OdbcConnection, Dictionary KeyedOdbcCommands) dbConnectionBase) + { + TaskCompletionSource<(int Number, OdbcConnection OdbcConnection, Dictionary)> taskCompletionSource; + + if (_waitingQueue.TryDequeue(out taskCompletionSource)) + { + taskCompletionSource.SetResult(dbConnectionBase); + } + else + { + _connectionsStack.Push(dbConnectionBase); + } + } + + private static Task<(int Number, OdbcConnection OdbcConnection, Dictionary)> GetDbConnectionBaseAsync() + { + var taskCompletionSource = new TaskCompletionSource<(int Number, OdbcConnection OdbcConnection, Dictionary)>(TaskCreationOptions.RunContinuationsAsynchronously); + + _waitingQueue.Enqueue(taskCompletionSource); + return taskCompletionSource.Task; + } + } +} \ No newline at end of file diff --git a/frameworks/CSharp/appmpower/src/appMpower.Orm/DotnetMethods.cs b/frameworks/CSharp/appmpower/src/appMpower.Orm/DotnetMethods.cs index f6b8b4cad1c..5ea4ce43fa2 100644 --- a/frameworks/CSharp/appmpower/src/appMpower.Orm/DotnetMethods.cs +++ b/frameworks/CSharp/appmpower/src/appMpower.Orm/DotnetMethods.cs @@ -21,7 +21,7 @@ public static class DotnetMethods public static byte[] Db() { - var world = RawDb.LoadSingleQueryRow(); + var world = RawDb.LoadSingleQueryRow().GetAwaiter().GetResult(); var memoryStream = new MemoryStream(); using var utf8JsonWriter = new Utf8JsonWriter(memoryStream, _jsonWriterOptions); @@ -33,7 +33,7 @@ public static byte[] Db() public static byte[] Query(int queries) { - World[] worlds = RawDb.ReadMultipleRows(queries); + World[] worlds = RawDb.ReadMultipleRows(queries).GetAwaiter().GetResult(); var memoryStream = new MemoryStream(); using var utf8JsonWriter = new Utf8JsonWriter(memoryStream, _jsonWriterOptions); @@ -45,7 +45,7 @@ public static byte[] Query(int queries) public static byte[] Updates(int count) { - World[] worlds = RawDb.LoadMultipleUpdatesRows(count); + World[] worlds = RawDb.LoadMultipleUpdatesRows(count).GetAwaiter().GetResult(); var memoryStream = new MemoryStream(); using var utf8JsonWriter = new Utf8JsonWriter(memoryStream, _jsonWriterOptions); @@ -57,7 +57,7 @@ public static byte[] Updates(int count) public static byte[] Fortunes() { - List fortunes = RawDb.LoadFortunesRows(); + List fortunes = RawDb.LoadFortunesRows().GetAwaiter().GetResult(); string fortunesView = FortunesView.Render(fortunes); byte[] byteArray = Encoding.UTF8.GetBytes(fortunesView); diff --git a/frameworks/CSharp/appmpower/src/appMpower.Orm/NativeMethods.cs b/frameworks/CSharp/appmpower/src/appMpower.Orm/NativeMethods.cs index 0f3ee5b59c2..64337e6269e 100644 --- a/frameworks/CSharp/appmpower/src/appMpower.Orm/NativeMethods.cs +++ b/frameworks/CSharp/appmpower/src/appMpower.Orm/NativeMethods.cs @@ -42,7 +42,7 @@ public static void FreeHandlePointer(IntPtr handlePointer) [UnmanagedCallersOnly(EntryPoint = "Db")] public static unsafe IntPtr Db(int* length, IntPtr* handlePointer) { - var world = RawDb.LoadSingleQueryRow(); + var world = RawDb.LoadSingleQueryRow().GetAwaiter().GetResult(); var memoryStream = new MemoryStream(); using var utf8JsonWriter = new Utf8JsonWriter(memoryStream, _jsonWriterOptions); @@ -69,7 +69,7 @@ public static unsafe IntPtr Db(int* length, IntPtr* handlePointer) [UnmanagedCallersOnly(EntryPoint = "Fortunes")] public static unsafe IntPtr Fortunes(int* length, IntPtr* handlePointer) { - List fortunes = RawDb.LoadFortunesRows(); + List fortunes = RawDb.LoadFortunesRows().GetAwaiter().GetResult(); string fortunesView = FortunesView.Render(fortunes); byte[] byteArray = Encoding.UTF8.GetBytes(fortunesView); @@ -85,7 +85,7 @@ public static unsafe IntPtr Fortunes(int* length, IntPtr* handlePointer) [UnmanagedCallersOnly(EntryPoint = "Query")] public static unsafe IntPtr Query(int queries, int* length, IntPtr* handlePointer) { - World[] worlds = RawDb.ReadMultipleRows(queries); + World[] worlds = RawDb.ReadMultipleRows(queries).GetAwaiter().GetResult(); var memoryStream = new MemoryStream(); using var utf8JsonWriter = new Utf8JsonWriter(memoryStream, _jsonWriterOptions); @@ -105,7 +105,7 @@ public static unsafe IntPtr Query(int queries, int* length, IntPtr* handlePointe [UnmanagedCallersOnly(EntryPoint = "Updates")] public static unsafe IntPtr Updates(int count, int* length, IntPtr* handlePointer) { - World[] worlds = RawDb.LoadMultipleUpdatesRows(count); + World[] worlds = RawDb.LoadMultipleUpdatesRows(count).GetAwaiter().GetResult(); var memoryStream = new MemoryStream(); using var utf8JsonWriter = new Utf8JsonWriter(memoryStream, _jsonWriterOptions); @@ -125,7 +125,7 @@ public static unsafe IntPtr Updates(int count, int* length, IntPtr* handlePointe [UnmanagedCallersOnly(EntryPoint = "DbById")] public static unsafe IntPtr DbById(int id, int* length, IntPtr* handlePointer) { - var world = RawDb.LoadSingleQueryRowById(id); + var world = RawDb.LoadSingleQueryRowById(id).GetAwaiter().GetResult(); var memoryStream = new MemoryStream(); using var utf8JsonWriter = new Utf8JsonWriter(memoryStream, _jsonWriterOptions); diff --git a/frameworks/CSharp/appmpower/src/appMpower.Orm/RawDb.cs b/frameworks/CSharp/appmpower/src/appMpower.Orm/RawDb.cs index b748d78a251..fec986f8755 100644 --- a/frameworks/CSharp/appmpower/src/appMpower.Orm/RawDb.cs +++ b/frameworks/CSharp/appmpower/src/appMpower.Orm/RawDb.cs @@ -13,42 +13,42 @@ public static class RawDb private static string[] _queriesMultipleRows = new string[MaxBatch + 1]; - public static World LoadSingleQueryRow() + public static async Task LoadSingleQueryRow() { - using var pooledConnection = DbConnections.GetConnection(DbProviderFactory.ConnectionString); - pooledConnection.Open(); + using var pooledConnection = new DbConnection(DbProviderFactory.ConnectionString); + await pooledConnection.OpenAsync(); var (dbCommand, _) = CreateReadCommand(pooledConnection); using (dbCommand) { - World world = ReadSingleRow(dbCommand); + World world = await ReadSingleRow(dbCommand); return world; } } - public static World LoadSingleQueryRowById(int id) + public static async Task LoadSingleQueryRowById(int id) { - using var pooledConnection = DbConnections.GetConnection(DbProviderFactory.ConnectionString); - pooledConnection.Open(); + using var pooledConnection = new DbConnection(DbProviderFactory.ConnectionString); + await pooledConnection.OpenAsync(); var (dbCommand, _) = CreateReadCommandById(pooledConnection, id); using (dbCommand) { - World world = ReadSingleRow(dbCommand); + World world = await ReadSingleRow(dbCommand); return world; } } - public static World[] LoadMultipleQueriesRows(int count) + public static async Task LoadMultipleQueriesRows(int count) { var worlds = new World[count]; - using var pooledConnection = DbConnections.GetConnection(DbProviderFactory.ConnectionString); - pooledConnection.Open(); + using var pooledConnection = new DbConnection(DbProviderFactory.ConnectionString); + await pooledConnection.OpenAsync(); var (dbCommand, dbDataParameter) = CreateReadCommand(pooledConnection); @@ -56,7 +56,7 @@ public static World[] LoadMultipleQueriesRows(int count) { for (int i = 0; i < count; i++) { - worlds[i] = ReadSingleRow(dbCommand); + worlds[i] = await ReadSingleRow(dbCommand); dbDataParameter.Value = _random.Next(1, 10001); } } @@ -64,18 +64,18 @@ public static World[] LoadMultipleQueriesRows(int count) return worlds; } - public static List LoadFortunesRows() + public static async Task> LoadFortunesRows() { var fortunes = new List(); - using var pooledConnection = DbConnections.GetConnection(DbProviderFactory.ConnectionString); - pooledConnection.Open(); + using var pooledConnection = new DbConnection(DbProviderFactory.ConnectionString); + await pooledConnection.OpenAsync(); var dbCommand = new DbCommand("SELECT * FROM fortune", pooledConnection); using (dbCommand) { - IDataReader dataReader = dbCommand.ExecuteReader(CommandBehavior.SingleResult & CommandBehavior.SequentialAccess); + IDataReader dataReader = await dbCommand.ExecuteReaderAsync(CommandBehavior.SingleResult & CommandBehavior.SequentialAccess); while (dataReader.Read()) { @@ -97,25 +97,25 @@ public static List LoadFortunesRows() return fortunes; } - public static World[] LoadMultipleUpdatesRows(int count) + public static async Task LoadMultipleUpdatesRows(int count) { var worlds = new World[count]; - using var pooledConnection = DbConnections.GetConnection(DbProviderFactory.ConnectionString); - pooledConnection.Open(); + using var pooledConnection = new DbConnection(DbProviderFactory.ConnectionString, true); + await pooledConnection.OpenAsync(); - var (queryCommand, dbDataParameter) = CreateReadCommand(pooledConnection, true); + var (queryCommand, dbDataParameter) = CreateReadCommand(pooledConnection); using (queryCommand) { for (int i = 0; i < count; i++) { - worlds[i] = ReadSingleRow(queryCommand); + worlds[i] = await ReadSingleRow(queryCommand); dbDataParameter.Value = _random.Next(1, 10001); } } - using var updateCommand = new DbCommand(BatchUpdateString.Query(count), pooledConnection, true); + using var updateCommand = new DbCommand(BatchUpdateString.Query(count), pooledConnection); var ids = BatchUpdateString.Ids; var randoms = BatchUpdateString.Randoms; @@ -152,13 +152,6 @@ internal static (DbCommand dbCommand, IDbDataParameter dbDataParameter) CreateRe return (dbCommand, dbCommand.CreateParameter("Id", DbType.Int32, _random.Next(1, 10001))); } - internal static (DbCommand dbCommand, IDbDataParameter dbDataParameter) CreateReadCommand(DbConnection pooledConnection, bool keyed) - { - DbCommand dbCommand = new DbCommand("SELECT * FROM world WHERE id=?", pooledConnection, keyed); - - return (dbCommand, dbCommand.CreateParameter("Id", DbType.Int32, _random.Next(1, 10001))); - } - internal static (DbCommand dbCommand, IDbDataParameter dbDataParameter) CreateReadCommandById(DbConnection pooledConnection, int id) { DbCommand dbCommand = new DbCommand("SELECT * FROM world WHERE id=?", pooledConnection); @@ -166,9 +159,9 @@ internal static (DbCommand dbCommand, IDbDataParameter dbDataParameter) CreateRe return (dbCommand, dbCommand.CreateParameter("Id", DbType.Int32, id)); } - internal static World ReadSingleRow(DbCommand dbCommand) + internal static async Task ReadSingleRow(DbCommand dbCommand) { - var dataReader = dbCommand.ExecuteReader(CommandBehavior.SingleRow & CommandBehavior.SequentialAccess); + var dataReader = await dbCommand.ExecuteReaderAsync(CommandBehavior.SingleRow & CommandBehavior.SequentialAccess); dataReader.Read(); @@ -183,7 +176,7 @@ internal static World ReadSingleRow(DbCommand dbCommand) return world; } - public static World[] ReadMultipleRows(int count) + public static async Task ReadMultipleRows(int count) { int j = 0; var ids = BatchUpdateString.Ids; @@ -206,8 +199,8 @@ public static World[] ReadMultipleRows(int count) queryString = _queriesMultipleRows[count] = StringBuilderCache.GetStringAndRelease(stringBuilder); } - using var pooledConnection = DbConnections.GetConnection(DbProviderFactory.ConnectionString); - pooledConnection.Open(); + using var pooledConnection = new DbConnection(DbProviderFactory.ConnectionString); + await pooledConnection.OpenAsync(); using var dbCommand = new DbCommand(queryString, pooledConnection); @@ -216,7 +209,7 @@ public static World[] ReadMultipleRows(int count) dbCommand.CreateParameter(ids[i], DbType.Int32, _random.Next(1, 10001)); } - var dataReader = dbCommand.ExecuteReader(CommandBehavior.Default & CommandBehavior.SequentialAccess); + var dataReader = await dbCommand.ExecuteReaderAsync(CommandBehavior.Default & CommandBehavior.SequentialAccess); do { @@ -229,7 +222,7 @@ public static World[] ReadMultipleRows(int count) }; j++; - } while (dataReader.NextResult()); + } while (await dataReader.NextResultAsync()); dataReader.Close(); diff --git a/frameworks/CSharp/beetlex/PlatformBenchmarks/PlatformBenchmarks.csproj b/frameworks/CSharp/beetlex/PlatformBenchmarks/PlatformBenchmarks.csproj index ba5b9d4202d..7518d6b0de0 100644 --- a/frameworks/CSharp/beetlex/PlatformBenchmarks/PlatformBenchmarks.csproj +++ b/frameworks/CSharp/beetlex/PlatformBenchmarks/PlatformBenchmarks.csproj @@ -9,10 +9,10 @@ - + - + diff --git a/frameworks/Elixir/phoenix/config/prod.exs b/frameworks/Elixir/phoenix/config/prod.exs index 9ad458a09be..c920950ee04 100755 --- a/frameworks/Elixir/phoenix/config/prod.exs +++ b/frameworks/Elixir/phoenix/config/prod.exs @@ -21,7 +21,8 @@ config :hello, Hello.Repo, password: "benchmarkdbpass", database: "hello_world", hostname: "tfb-database", - pool_size: 50, + pool_count: 56, + pool_size: 15, queue_target: 5000, log: false @@ -33,17 +34,3 @@ config :logger, ], level: :error, backends: [] - -# ## SSL Support -# -# To get SSL working, you will need to add the `https` key -# to the previous section: -# -# config:hello, Hello.Endpoint, -# ... -# https: [port: 443, -# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"), -# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")] -# -# Where those two env variables point to a file on -# disk for the key and cert. diff --git a/frameworks/Elixir/phoenix/lib/hello/world_cache.ex b/frameworks/Elixir/phoenix/lib/hello/world_cache.ex index b4943e14a35..e6cde7ab58e 100644 --- a/frameworks/Elixir/phoenix/lib/hello/world_cache.ex +++ b/frameworks/Elixir/phoenix/lib/hello/world_cache.ex @@ -23,9 +23,9 @@ defmodule Hello.WorldCache do world = Repo.get(World, id) :ok = __MODULE__.put(id, world) world + world -> world end end - end diff --git a/frameworks/Elixir/phoenix/lib/hello_web.ex b/frameworks/Elixir/phoenix/lib/hello_web.ex index 1193aab23a5..1114ed22e51 100644 --- a/frameworks/Elixir/phoenix/lib/hello_web.ex +++ b/frameworks/Elixir/phoenix/lib/hello_web.ex @@ -95,8 +95,8 @@ defmodule HelloWeb do end @doc """ - When used, dispatch to the appropriate controller/view/etc. - """ + When used, dispatch to the appropriate controller/view/etc. + """ defmacro __using__(which) when is_atom(which) do apply(__MODULE__, which, []) end diff --git a/frameworks/Elixir/phoenix/lib/hello_web/controllers/page_controller.ex b/frameworks/Elixir/phoenix/lib/hello_web/controllers/page_controller.ex index 34c1fc120fd..59f2f5c2f35 100644 --- a/frameworks/Elixir/phoenix/lib/hello_web/controllers/page_controller.ex +++ b/frameworks/Elixir/phoenix/lib/hello_web/controllers/page_controller.ex @@ -1,5 +1,4 @@ defmodule HelloWeb.PageController do - use HelloWeb, :controller alias Hello.Models.Fortune @@ -9,6 +8,8 @@ defmodule HelloWeb.PageController do @random_max 10_000 + plug :accepts, ~w(html json) when action == :fortunes + def index(conn, _params) do json(conn, %{"TE Benchmarks\n" => "Started"}) end @@ -25,13 +26,12 @@ defmodule HelloWeb.PageController do end def queries(conn, params) do - :rand.seed(:exsp) - worlds = - Stream.repeatedly(&random_id/0) - |> Stream.uniq() - |> Stream.map(&Repo.get(World, &1)) - |> Enum.take(size(params["queries"])) + Repo.checkout(fn -> + params["queries"] + |> random_ids_sample() + |> Enum.map(&Repo.get(World, &1)) + end) json(conn, worlds) end @@ -44,49 +44,53 @@ defmodule HelloWeb.PageController do fortunes = [additional_fortune | Repo.all(Fortune)] - |> Enum.sort_by(& &1.message) + |> Enum.sort(fn a, b -> a.message < b.message end) render(conn, :fortunes, fortunes: fortunes) end def updates(conn, params) do - :rand.seed(:exsp) - - worlds = - Stream.repeatedly(&random_id/0) - |> Stream.uniq() - |> Stream.map(&Repo.get(World, &1)) - |> Stream.map(fn world -> %{id: world.id, randomnumber: :rand.uniform(@random_max)} end) - |> Enum.take(size(params["queries"])) - # If this is not sorted it sometimes generates - # FAIL for http://tfb-server:8080/updates/20 - # Only 20470 executed queries in the database out of roughly 20480 expected. - |> Enum.sort_by(& &1.id) + world_updates = + Repo.checkout(fn -> + params["queries"] + |> random_ids_sample() + |> Enum.sort() + # + # If this is not sorted it will intermittently generate: + # + # FAIL for http://tfb-server:8080/updates/20 + # Only 20470 executed queries in the database out of roughly 20480 expected. + # + |> Enum.map(fn id -> + world = Repo.get(World, id) + %{id: world.id, randomnumber: :rand.uniform(@random_max)} + end) + end) Repo.insert_all( World, - worlds, + world_updates, on_conflict: {:replace_all_except, [:id]}, conflict_target: [:id], returning: false ) - json(conn, worlds) + json(conn, world_updates) end - def plaintext(conn, _params) do - text(conn, "Hello, World!") + def plaintext(conn, _params) do + conn + |> put_resp_header("content-type", "text/plain") + |> send_resp(200, "Hello, World!") end def cached(conn, params) do - :rand.seed(:exsp) WorldCache.seed() worlds = - Stream.repeatedly(&random_id/0) - |> Stream.uniq() - |> Stream.map(&WorldCache.fetch(&1)) - |> Enum.take(size(params["count"])) + params["count"] + |> random_ids_sample() + |> Enum.map(&WorldCache.fetch(&1)) json(conn, worlds) end @@ -95,11 +99,17 @@ defmodule HelloWeb.PageController do :rand.uniform(@random_max) end - defp size(nil), do: 1 - defp size(""), do: 1 + defp random_ids_sample(count) do + # Use the fastest rand algorithm + :rand.seed(:exsp) + + Stream.repeatedly(&random_id/0) + |> Stream.uniq() + |> Enum.take(size(count)) + end - defp size(queries) when is_bitstring(queries) do - case Integer.parse(queries) do + defp size(param_count) when is_bitstring(param_count) do + case Integer.parse(param_count) do {count, _} -> max(1, min(500, count)) _ -> 1 end diff --git a/frameworks/Elixir/phoenix/lib/hello_web/endpoint.ex b/frameworks/Elixir/phoenix/lib/hello_web/endpoint.ex index f709d00a2e8..a764bbf653e 100644 --- a/frameworks/Elixir/phoenix/lib/hello_web/endpoint.ex +++ b/frameworks/Elixir/phoenix/lib/hello_web/endpoint.ex @@ -20,4 +20,3 @@ defmodule HelloWeb.Endpoint do plug HelloWeb.HeadersPlug plug HelloWeb.Router end - diff --git a/frameworks/Elixir/phoenix/lib/hello_web/router.ex b/frameworks/Elixir/phoenix/lib/hello_web/router.ex index 1634f3403fd..7e3caed3b8f 100755 --- a/frameworks/Elixir/phoenix/lib/hello_web/router.ex +++ b/frameworks/Elixir/phoenix/lib/hello_web/router.ex @@ -1,13 +1,7 @@ defmodule HelloWeb.Router do use HelloWeb, :router - pipeline :browser do - plug :accepts, ~w(html json) - end - scope "/", HelloWeb do - pipe_through [:browser] - get "/json", PageController, :_json get "/db", PageController, :db get "/queries", PageController, :queries diff --git a/frameworks/Elixir/phoenix/mix.lock b/frameworks/Elixir/phoenix/mix.lock index 4c59758f12a..0c47cab2458 100644 --- a/frameworks/Elixir/phoenix/mix.lock +++ b/frameworks/Elixir/phoenix/mix.lock @@ -6,8 +6,8 @@ "cowlib": {:hex, :cowlib, "2.13.0", "db8f7505d8332d98ef50a3ef34b34c1afddec7506e4ee4dd4a3a266285d282ca", [:make, :rebar3], [], "hexpm", "e1e1284dc3fc030a64b1ad0d8382ae7e99da46c3246b815318a4b848873800a4"}, "db_connection": {:hex, :db_connection, "2.7.0", "b99faa9291bb09892c7da373bb82cba59aefa9b36300f6145c5f201c7adf48ec", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "dcf08f31b2701f857dfc787fbad78223d61a32204f217f15e881dd93e4bdd3ff"}, "decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"}, - "ecto": {:hex, :ecto, "3.11.2", "e1d26be989db350a633667c5cda9c3d115ae779b66da567c68c80cfb26a8c9ee", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "3c38bca2c6f8d8023f2145326cc8a80100c3ffe4dcbd9842ff867f7fc6156c65"}, - "ecto_sql": {:hex, :ecto_sql, "3.11.3", "4eb7348ff8101fbc4e6bbc5a4404a24fecbe73a3372d16569526b0cf34ebc195", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.11.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.6", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.16 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "e5f36e3d736b99c7fee3e631333b8394ade4bafe9d96d35669fca2d81c2be928"}, + "ecto": {:hex, :ecto, "3.12.4", "267c94d9f2969e6acc4dd5e3e3af5b05cdae89a4d549925f3008b2b7eb0b93c3", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "ef04e4101688a67d061e1b10d7bc1fbf00d1d13c17eef08b71d070ff9188f747"}, + "ecto_sql": {:hex, :ecto_sql, "3.12.1", "c0d0d60e85d9ff4631f12bafa454bc392ce8b9ec83531a412c12a0d415a3a4d0", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.12", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.7", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.19 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "aff5b958a899762c5f09028c847569f7dfb9cc9d63bdb8133bff8a5546de6bf5"}, "expo": {:hex, :expo, "1.0.0", "647639267e088717232f4d4451526e7a9de31a3402af7fcbda09b27e9a10395a", [:mix], [], "hexpm", "18d2093d344d97678e8a331ca0391e85d29816f9664a25653fd7e6166827827c"}, "gettext": {:hex, :gettext, "0.25.0", "98a95a862a94e2d55d24520dd79256a15c87ea75b49673a2e2f206e6ebc42e5d", [:mix], [{:expo, "~> 0.5.1 or ~> 1.0", [hex: :expo, repo: "hexpm", optional: false]}], "hexpm", "38e5d754e66af37980a94fb93bb20dcde1d2361f664b0a19f01e87296634051f"}, "hpax": {:hex, :hpax, "1.0.0", "28dcf54509fe2152a3d040e4e3df5b265dcb6cb532029ecbacf4ce52caea3fd2", [:mix], [], "hexpm", "7f1314731d711e2ca5fdc7fd361296593fc2542570b3105595bb0bc6d0fad601"}, diff --git a/frameworks/Elixir/plug/lib/framework_benchmarks/handlers/cached-world.ex b/frameworks/Elixir/plug/lib/framework_benchmarks/handlers/cached-world.ex index c80b71053e7..60b441557c5 100644 --- a/frameworks/Elixir/plug/lib/framework_benchmarks/handlers/cached-world.ex +++ b/frameworks/Elixir/plug/lib/framework_benchmarks/handlers/cached-world.ex @@ -11,10 +11,10 @@ defmodule FrameworkBenchmarks.Handlers.CachedWorld do :rand.uniform(10_000) end) - {:ok, json} = + json = ids |> Enum.map(&FrameworkBenchmarks.CachedWorld.get/1) - |> Jason.encode() + |> Jason.encode_to_iodata!() conn |> Plug.Conn.put_resp_content_type("application/json") diff --git a/frameworks/Elixir/plug/lib/framework_benchmarks/handlers/db.ex b/frameworks/Elixir/plug/lib/framework_benchmarks/handlers/db.ex index b1abba5dd8b..101b7e5f8fd 100644 --- a/frameworks/Elixir/plug/lib/framework_benchmarks/handlers/db.ex +++ b/frameworks/Elixir/plug/lib/framework_benchmarks/handlers/db.ex @@ -5,11 +5,9 @@ defmodule FrameworkBenchmarks.Handlers.DB do def handle(conn) do id = :rand.uniform(10_000) - {:ok, json} = + json = FrameworkBenchmarks.Repo.get(FrameworkBenchmarks.Models.World, id) - |> Map.from_struct() - |> Map.drop([:__meta__]) - |> Jason.encode() + |> Jason.encode_to_iodata!() conn |> Plug.Conn.put_resp_content_type("application/json") diff --git a/frameworks/Elixir/plug/lib/framework_benchmarks/handlers/json.ex b/frameworks/Elixir/plug/lib/framework_benchmarks/handlers/json.ex index c013110a6de..3a7ada06cdb 100644 --- a/frameworks/Elixir/plug/lib/framework_benchmarks/handlers/json.ex +++ b/frameworks/Elixir/plug/lib/framework_benchmarks/handlers/json.ex @@ -3,7 +3,7 @@ defmodule FrameworkBenchmarks.Handlers.JSON do This is the handle for the /json route """ def handle(conn) do - {:ok, json} = Jason.encode(%{message: "Hello, World!"}) + json = Jason.encode_to_iodata!(%{message: "Hello, World!"}) conn |> Plug.Conn.put_resp_content_type("application/json") diff --git a/frameworks/Elixir/plug/lib/framework_benchmarks/handlers/query.ex b/frameworks/Elixir/plug/lib/framework_benchmarks/handlers/query.ex index b4e83026a82..6ad2af9061a 100644 --- a/frameworks/Elixir/plug/lib/framework_benchmarks/handlers/query.ex +++ b/frameworks/Elixir/plug/lib/framework_benchmarks/handlers/query.ex @@ -5,7 +5,7 @@ defmodule FrameworkBenchmarks.Handlers.Query do def handle(conn) do number_of_queries = FrameworkBenchmarks.Handlers.Helpers.parse_queries(conn, "queries") - records = + json = 1..number_of_queries |> Enum.map(fn _ -> :rand.uniform(10_000) @@ -15,16 +15,8 @@ defmodule FrameworkBenchmarks.Handlers.Query do FrameworkBenchmarks.Repo.get(FrameworkBenchmarks.Models.World, &1) end) ) - |> Enum.map(&Task.await(&1)) - - {:ok, json} = - records - |> Enum.map(fn record -> - record - |> Map.from_struct() - |> Map.drop([:__meta__]) - end) - |> Jason.encode() + |> Enum.map(&Task.await(&1, :infinity)) + |> Jason.encode_to_iodata!() conn |> Plug.Conn.put_resp_content_type("application/json") diff --git a/frameworks/Elixir/plug/lib/framework_benchmarks/handlers/update.ex b/frameworks/Elixir/plug/lib/framework_benchmarks/handlers/update.ex index d06744adfde..0b5ad54c4b8 100644 --- a/frameworks/Elixir/plug/lib/framework_benchmarks/handlers/update.ex +++ b/frameworks/Elixir/plug/lib/framework_benchmarks/handlers/update.ex @@ -22,7 +22,7 @@ defmodule FrameworkBenchmarks.Handlers.Update do :rand.uniform(10_000) end) - records = + json = ids |> Enum.map( &Task.async(fn -> @@ -38,16 +38,8 @@ defmodule FrameworkBenchmarks.Handlers.Update do |> FrameworkBenchmarks.Repo.update!() end) ) - |> Enum.map(&Task.await(&1)) - - {:ok, json} = - records - |> Enum.map(fn record -> - record - |> Map.from_struct() - |> Map.drop([:__meta__]) - end) - |> Jason.encode() + |> Enum.map(&Task.await(&1, :infinity)) + |> Jason.encode_to_iodata!() conn |> Plug.Conn.put_resp_content_type("application/json") diff --git a/frameworks/Elixir/plug/lib/framework_benchmarks/models/world.ex b/frameworks/Elixir/plug/lib/framework_benchmarks/models/world.ex index 549d8415a9e..d3db33611c1 100644 --- a/frameworks/Elixir/plug/lib/framework_benchmarks/models/world.ex +++ b/frameworks/Elixir/plug/lib/framework_benchmarks/models/world.ex @@ -1,6 +1,7 @@ defmodule FrameworkBenchmarks.Models.World do use Ecto.Schema + @derive {Jason.Encoder, only: [:id, :randomnumber]} schema "world" do field(:randomnumber, :integer) end diff --git a/frameworks/Go/chi/chi-gojay-prefork.dockerfile b/frameworks/Go/chi/chi-gojay-prefork.dockerfile index 2403792062f..5c9eec667f7 100644 --- a/frameworks/Go/chi/chi-gojay-prefork.dockerfile +++ b/frameworks/Go/chi/chi-gojay-prefork.dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/golang:1.19 +FROM docker.io/golang:1.23.1 ADD ./src/chi-gojay /chi WORKDIR /chi diff --git a/frameworks/Go/chi/chi-gojay.dockerfile b/frameworks/Go/chi/chi-gojay.dockerfile index 4f4e2ccf72d..c9a6de386d5 100644 --- a/frameworks/Go/chi/chi-gojay.dockerfile +++ b/frameworks/Go/chi/chi-gojay.dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/golang:1.19 +FROM docker.io/golang:1.23.1 ADD ./src/chi-gojay /chi WORKDIR /chi diff --git a/frameworks/Go/chi/chi-prefork.dockerfile b/frameworks/Go/chi/chi-prefork.dockerfile index a516efa3726..fb363d7b06a 100644 --- a/frameworks/Go/chi/chi-prefork.dockerfile +++ b/frameworks/Go/chi/chi-prefork.dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/golang:1.19 +FROM docker.io/golang:1.23.1 ADD ./src/chi /chi WORKDIR /chi diff --git a/frameworks/Go/chi/chi-scratch.dockerfile b/frameworks/Go/chi/chi-scratch.dockerfile index 63f9b7ac031..2361e658ca7 100644 --- a/frameworks/Go/chi/chi-scratch.dockerfile +++ b/frameworks/Go/chi/chi-scratch.dockerfile @@ -1,13 +1,12 @@ # build layer -FROM docker.io/golang:1.19-alpine as builder +FROM docker.io/golang:1.23.1-alpine as builder ADD ./src/chi /chi WORKDIR /chi RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 GOAMD64=v3 \ - go build -ldflags="-w -s" -o server - -RUN apk --no-cache add --update ca-certificates + go build -ldflags="-w -s" -o server && \ + apk --no-cache add --update ca-certificates # release layer FROM scratch diff --git a/frameworks/Go/chi/chi-sjson-prefork.dockerfile b/frameworks/Go/chi/chi-sjson-prefork.dockerfile index 1e6e7029c01..97071fe5e22 100644 --- a/frameworks/Go/chi/chi-sjson-prefork.dockerfile +++ b/frameworks/Go/chi/chi-sjson-prefork.dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/golang:1.19 +FROM docker.io/golang:1.23.1 ADD ./src/chi-sjson /chi WORKDIR /chi diff --git a/frameworks/Go/chi/chi-sjson.dockerfile b/frameworks/Go/chi/chi-sjson.dockerfile index 6af99dc4011..99a2204147f 100644 --- a/frameworks/Go/chi/chi-sjson.dockerfile +++ b/frameworks/Go/chi/chi-sjson.dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/golang:1.19 +FROM docker.io/golang:1.23.1 ADD ./src/chi-sjson /chi WORKDIR /chi diff --git a/frameworks/Go/chi/chi.dockerfile b/frameworks/Go/chi/chi.dockerfile index 205c2bdf7d5..6facc00990f 100644 --- a/frameworks/Go/chi/chi.dockerfile +++ b/frameworks/Go/chi/chi.dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/golang:1.19 +FROM docker.io/golang:1.23.1 ADD ./src/chi /chi WORKDIR /chi diff --git a/frameworks/Go/chi/src/chi-gojay/go.mod b/frameworks/Go/chi/src/chi-gojay/go.mod index 8521c5e7b7f..f8c266996ec 100644 --- a/frameworks/Go/chi/src/chi-gojay/go.mod +++ b/frameworks/Go/chi/src/chi-gojay/go.mod @@ -1,8 +1,10 @@ module chi/server -go 1.19 +go 1.23.1 require ( - github.com/go-chi/chi/v5 v5.0.7 - github.com/go-sql-driver/mysql v1.6.0 + github.com/go-chi/chi/v5 v5.1.0 + github.com/go-sql-driver/mysql v1.8.1 ) + +require filippo.io/edwards25519 v1.1.0 // indirect diff --git a/frameworks/Go/chi/src/chi-gojay/go.sum b/frameworks/Go/chi/src/chi-gojay/go.sum index 68a8dd38f73..701d0184cbb 100644 --- a/frameworks/Go/chi/src/chi-gojay/go.sum +++ b/frameworks/Go/chi/src/chi-gojay/go.sum @@ -1,4 +1,6 @@ -github.com/go-chi/chi/v5 v5.0.7 h1:rDTPXLDHGATaeHvVlLcR4Qe0zftYethFucbjVQ1PxU8= -github.com/go-chi/chi/v5 v5.0.7/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= -github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= -github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= +filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= +github.com/go-chi/chi/v5 v5.1.0 h1:acVI1TYaD+hhedDJ3r54HyA6sExp3HfXq7QWEEY/xMw= +github.com/go-chi/chi/v5 v5.1.0/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= +github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= +github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= diff --git a/frameworks/Go/chi/src/chi-sjson/go.mod b/frameworks/Go/chi/src/chi-sjson/go.mod index 8521c5e7b7f..f8c266996ec 100644 --- a/frameworks/Go/chi/src/chi-sjson/go.mod +++ b/frameworks/Go/chi/src/chi-sjson/go.mod @@ -1,8 +1,10 @@ module chi/server -go 1.19 +go 1.23.1 require ( - github.com/go-chi/chi/v5 v5.0.7 - github.com/go-sql-driver/mysql v1.6.0 + github.com/go-chi/chi/v5 v5.1.0 + github.com/go-sql-driver/mysql v1.8.1 ) + +require filippo.io/edwards25519 v1.1.0 // indirect diff --git a/frameworks/Go/chi/src/chi-sjson/go.sum b/frameworks/Go/chi/src/chi-sjson/go.sum index 68a8dd38f73..701d0184cbb 100644 --- a/frameworks/Go/chi/src/chi-sjson/go.sum +++ b/frameworks/Go/chi/src/chi-sjson/go.sum @@ -1,4 +1,6 @@ -github.com/go-chi/chi/v5 v5.0.7 h1:rDTPXLDHGATaeHvVlLcR4Qe0zftYethFucbjVQ1PxU8= -github.com/go-chi/chi/v5 v5.0.7/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= -github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= -github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= +filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= +github.com/go-chi/chi/v5 v5.1.0 h1:acVI1TYaD+hhedDJ3r54HyA6sExp3HfXq7QWEEY/xMw= +github.com/go-chi/chi/v5 v5.1.0/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= +github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= +github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= diff --git a/frameworks/Go/chi/src/chi/go.mod b/frameworks/Go/chi/src/chi/go.mod index 829aae200bd..906b029b388 100644 --- a/frameworks/Go/chi/src/chi/go.mod +++ b/frameworks/Go/chi/src/chi/go.mod @@ -1,11 +1,14 @@ module chi/server -go 1.19 +go 1.23.1 require ( - github.com/go-chi/chi/v5 v5.0.7 - github.com/go-sql-driver/mysql v1.6.0 + github.com/go-chi/chi/v5 v5.1.0 + github.com/go-sql-driver/mysql v1.8.1 github.com/mailru/easyjson v0.7.7 ) -require github.com/josharian/intern v1.0.0 // indirect +require ( + filippo.io/edwards25519 v1.1.0 // indirect + github.com/josharian/intern v1.0.0 // indirect +) diff --git a/frameworks/Go/chi/src/chi/go.sum b/frameworks/Go/chi/src/chi/go.sum index f18160ebd73..b39d37edbe5 100644 --- a/frameworks/Go/chi/src/chi/go.sum +++ b/frameworks/Go/chi/src/chi/go.sum @@ -1,7 +1,9 @@ -github.com/go-chi/chi/v5 v5.0.7 h1:rDTPXLDHGATaeHvVlLcR4Qe0zftYethFucbjVQ1PxU8= -github.com/go-chi/chi/v5 v5.0.7/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= -github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= -github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= +filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= +github.com/go-chi/chi/v5 v5.1.0 h1:acVI1TYaD+hhedDJ3r54HyA6sExp3HfXq7QWEEY/xMw= +github.com/go-chi/chi/v5 v5.1.0/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= +github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= +github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= diff --git a/frameworks/Go/fiber/fiber-prefork.dockerfile b/frameworks/Go/fiber/fiber-prefork.dockerfile index c741baca1b8..43995ca7de7 100644 --- a/frameworks/Go/fiber/fiber-prefork.dockerfile +++ b/frameworks/Go/fiber/fiber-prefork.dockerfile @@ -1,14 +1,18 @@ -FROM docker.io/golang:1.23 +FROM golang:1.23-alpine as builder WORKDIR /fiber COPY ./src /fiber -RUN go mod download +RUN go mod download && \ + go generate -x ./templates && \ + GOAMD64=v3 go build -ldflags="-s -w" -o app . -RUN go generate -x ./templates +FROM alpine:latest -RUN GOAMD64=v3 go build -ldflags="-s -w" -o app . +WORKDIR /fiber + +COPY --from=builder /fiber/app . EXPOSE 8080 diff --git a/frameworks/Go/fiber/fiber.dockerfile b/frameworks/Go/fiber/fiber.dockerfile index 23c8ab72ea7..38f97b03d56 100644 --- a/frameworks/Go/fiber/fiber.dockerfile +++ b/frameworks/Go/fiber/fiber.dockerfile @@ -1,14 +1,18 @@ -FROM docker.io/golang:1.23 +FROM golang:1.23-alpine as builder WORKDIR /fiber COPY ./src /fiber -RUN go mod download +RUN go mod download && \ + go generate -x ./templates && \ + GOAMD64=v3 go build -ldflags="-s -w" -o app . -RUN go generate -x ./templates +FROM alpine:latest -RUN GOAMD64=v3 go build -ldflags="-s -w" -o app . +WORKDIR /fiber + +COPY --from=builder /fiber/app . EXPOSE 8080 diff --git a/frameworks/Go/fiber/src/go.mod b/frameworks/Go/fiber/src/go.mod index 252db9355e6..36e65622cc4 100644 --- a/frameworks/Go/fiber/src/go.mod +++ b/frameworks/Go/fiber/src/go.mod @@ -5,7 +5,7 @@ go 1.23 require ( github.com/goccy/go-json v0.10.3 github.com/gofiber/fiber/v2 v2.52.5 - github.com/jackc/pgx/v5 v5.6.0 + github.com/jackc/pgx/v5 v5.7.1 github.com/valyala/quicktemplate v1.8.0 ) @@ -14,7 +14,7 @@ require ( github.com/google/uuid v1.6.0 // indirect github.com/jackc/pgpassfile v1.0.0 // indirect github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect - github.com/jackc/puddle/v2 v2.2.1 // indirect + github.com/jackc/puddle/v2 v2.2.2 // indirect github.com/klauspost/compress v1.17.9 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect diff --git a/frameworks/Go/fiber/src/go.sum b/frameworks/Go/fiber/src/go.sum index 1cada1ee655..62741e5425b 100644 --- a/frameworks/Go/fiber/src/go.sum +++ b/frameworks/Go/fiber/src/go.sum @@ -13,10 +13,10 @@ github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsI github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo= github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= -github.com/jackc/pgx/v5 v5.6.0 h1:SWJzexBzPL5jb0GEsrPMLIsi/3jOo7RHlzTjcAeDrPY= -github.com/jackc/pgx/v5 v5.6.0/go.mod h1:DNZ/vlrUnhWCoFGxHAG8U2ljioxukquj7utPDgtQdTw= -github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk= -github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= +github.com/jackc/pgx/v5 v5.7.1 h1:x7SYsPBYDkHDksogeSmZZ5xzThcTgRz++I5E+ePFUcs= +github.com/jackc/pgx/v5 v5.7.1/go.mod h1:e7O26IywZZ+naJtWWos6i6fvWK+29etgITqrqHLfoZA= +github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo= +github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= github.com/klauspost/compress v1.17.9 h1:6KIumPrER1LHsvBVuDa0r5xaG0Es51mhhB9BQB2qeMA= github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= diff --git a/frameworks/Go/gnet/benchmark_config.json b/frameworks/Go/gnet/benchmark_config.json index 3999bd74f97..1ff8439be6d 100644 --- a/frameworks/Go/gnet/benchmark_config.json +++ b/frameworks/Go/gnet/benchmark_config.json @@ -4,8 +4,8 @@ "default": { "plaintext_url": "/plaintext", "port": 8080, - "approach": "stripped", - "classification": "Micro", + "approach": "Realistic", + "classification": "Platform", "database": "None", "framework": "gnet", "language": "Go", diff --git a/frameworks/Go/gnet/gnet.dockerfile b/frameworks/Go/gnet/gnet.dockerfile index 2b060d96acd..bf9715306d8 100644 --- a/frameworks/Go/gnet/gnet.dockerfile +++ b/frameworks/Go/gnet/gnet.dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/golang:1.19 +FROM docker.io/golang:latest WORKDIR /gnet diff --git a/frameworks/Go/gnet/src/go.mod b/frameworks/Go/gnet/src/go.mod index bc59a7c9cfd..77b8811ce55 100644 --- a/frameworks/Go/gnet/src/go.mod +++ b/frameworks/Go/gnet/src/go.mod @@ -1,17 +1,19 @@ module gnet -go 1.19 +go 1.23.1 require ( - github.com/panjf2000/gnet/v2 v2.1.2 - go.uber.org/multierr v1.8.0 // indirect - golang.org/x/sys v0.0.0-20220915200043-7b5979e65e41 // indirect + github.com/evanphx/wildcat v0.0.0-20141114174135-e7012f664567 + github.com/panjf2000/gnet/v2 v2.5.7 ) require ( - github.com/josharian/intern v1.0.0 // indirect - github.com/mailru/easyjson v0.7.7 // indirect - go.uber.org/atomic v1.10.0 // indirect - go.uber.org/zap v1.23.0 // indirect - gopkg.in/natefinch/lumberjack.v2 v2.0.0 // indirect + github.com/valyala/bytebufferpool v1.0.0 // indirect + github.com/vektra/errors v0.0.0-20140903201135-c64d83aba85a // indirect + go.uber.org/atomic v1.7.0 // indirect + go.uber.org/multierr v1.6.0 // indirect + go.uber.org/zap v1.21.0 // indirect + golang.org/x/sync v0.7.0 // indirect + golang.org/x/sys v0.21.0 // indirect + gopkg.in/natefinch/lumberjack.v2 v2.2.1 // indirect ) diff --git a/frameworks/Go/gnet/src/go.sum b/frameworks/Go/gnet/src/go.sum index f9f9c868493..e39645c8777 100644 --- a/frameworks/Go/gnet/src/go.sum +++ b/frameworks/Go/gnet/src/go.sum @@ -1,46 +1,39 @@ -github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= -github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= -github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/evanphx/wildcat v0.0.0-20141114174135-e7012f664567 h1:7+oQw6YjB/kk9x27AEC7DMXudqERHD583hZpno18lRw= +github.com/evanphx/wildcat v0.0.0-20141114174135-e7012f664567/go.mod h1:XNGflD53X+hfdCAt1NGeBUgiUpe9QmweW/zI1gV26Zw= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= -github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= -github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= -github.com/panjf2000/ants/v2 v2.4.8 h1:JgTbolX6K6RreZ4+bfctI0Ifs+3mrE5BIHudQxUDQ9k= -github.com/panjf2000/ants/v2 v2.4.8/go.mod h1:f6F0NZVFsGCp5A7QW/Zj/m92atWwOkY0OIhFxRNFr4A= -github.com/panjf2000/gnet/v2 v2.1.2 h1:WJ/PkbfV6G0wcGOng2pyCwv8oadKiqtP8p+38smN7ao= -github.com/panjf2000/gnet/v2 v2.1.2/go.mod h1:unWr2B4jF0DQPJH3GsXBGQiDcAamM6+Pf5FiK705kc4= +github.com/panjf2000/ants/v2 v2.10.0 h1:zhRg1pQUtkyRiOFo2Sbqwjp0GfBNo9cUY2/Grpx1p+8= +github.com/panjf2000/ants/v2 v2.10.0/go.mod h1:7ZxyxsqE4vvW0M7LSD8aI3cKwgFhBHbxnlN8mDqHa1I= +github.com/panjf2000/gnet/v2 v2.5.7 h1:EGGIfLYEVAp2l5WSYT2XddSjpQ642PjwphbWhcJ0WBY= +github.com/panjf2000/gnet/v2 v2.5.7/go.mod h1:ppopMJ8VrDbJu8kDsqFQTgNmpMS8Le5CmPxISf+Sauk= github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= +github.com/vektra/errors v0.0.0-20140903201135-c64d83aba85a h1:lUVfiMMY/te9icPKBqOKkBIMZNxSpM90dxokDeCcfBg= +github.com/vektra/errors v0.0.0-20140903201135-c64d83aba85a/go.mod h1:KUxJS71XlMs+ztT+RzsLRoWUQRUpECo/+Rb0EBk8/Wc= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= -go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= -go.uber.org/atomic v1.10.0 h1:9qC72Qh0+3MqyJbAn8YU5xVq1frD8bn3JtD2oXtafVQ= -go.uber.org/atomic v1.10.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= go.uber.org/goleak v1.1.11 h1:wy28qYRKZgnJTxGxvye5/wgWr1EKjmUDGYox5mGlRlI= go.uber.org/goleak v1.1.11/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= +go.uber.org/multierr v1.6.0 h1:y6IPFStTAIT5Ytl7/XYmHvzXQ7S3g/IeZW9hyZ5thw4= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= -go.uber.org/multierr v1.7.0/go.mod h1:7EAYxJLBy9rStEaz58O2t4Uvip6FSURkq8/ppBp95ak= -go.uber.org/multierr v1.8.0 h1:dg6GjLku4EH+249NNmoIciG9N/jURbDG+pFlTkhzIC8= -go.uber.org/multierr v1.8.0/go.mod h1:7EAYxJLBy9rStEaz58O2t4Uvip6FSURkq8/ppBp95ak= +go.uber.org/zap v1.21.0 h1:WefMeulhovoZ2sYXz7st6K0sLj7bBhpiFaud4r4zST8= go.uber.org/zap v1.21.0/go.mod h1:wjWOCqI0f2ZZrJF/UufIOkiC8ii6tm1iqIsLo76RfJw= -go.uber.org/zap v1.23.0 h1:OjGQ5KQDEUawVHxNwQgPpiypGHOxo2mNZsOqTak4fFY= -go.uber.org/zap v1.23.0/go.mod h1:D+nX8jyLsMHMYrln8A0rJjFt/T/9/bGgIhAqxv5URuY= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= @@ -51,14 +44,15 @@ golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220224120231-95c6836cb0e7/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220915200043-7b5979e65e41 h1:ohgcoMbSofXygzo6AD2I1kz3BFmW1QArPYTtwEM3UXc= -golang.org/x/sys v0.0.0-20220915200043-7b5979e65e41/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws= +golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= @@ -71,12 +65,11 @@ golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/natefinch/lumberjack.v2 v2.0.0 h1:1Lc07Kr7qY4U2YPouBjpCLxpiyxIVoxqXgkXLknAOE8= -gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST95x9zc= +gopkg.in/natefinch/lumberjack.v2 v2.2.1/go.mod h1:YD8tP3GAjkrDg1eZH7EGmyESg/lsYskCTPBJVb9jqSc= gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/frameworks/Go/gnet/src/main.go b/frameworks/Go/gnet/src/main.go index 88f1fba167c..125e12ac10b 100644 --- a/frameworks/Go/gnet/src/main.go +++ b/frameworks/Go/gnet/src/main.go @@ -7,9 +7,11 @@ import ( "fmt" "log" "runtime" + "strconv" "sync/atomic" "time" + "github.com/evanphx/wildcat" "github.com/panjf2000/gnet/v2" ) @@ -22,30 +24,68 @@ type httpServer struct { } type httpCodec struct { - delimiter []byte - buf []byte + parser *wildcat.HTTPParser + contentLength int + buf []byte } -func (hc *httpCodec) appendResponse() { - hc.buf = append(hc.buf, "HTTP/1.1 200 OK\r\nServer: gnet\r\nContent-Type: text/plain\r\nDate: "...) - //hc.buf = time.Now().AppendFormat(hc.buf, "Mon, 02 Jan 2006 15:04:05 GMT") - hc.buf = append(hc.buf, NowTimeFormat()...) - hc.buf = append(hc.buf, "\r\nContent-Length: 13\r\n\r\nHello, World!"...) -} - -var errCRLFNotFound = errors.New("CRLF not found") +var CRLF = []byte("\r\n\r\n") func (hc *httpCodec) parse(data []byte) (int, error) { - if idx := bytes.Index(data, hc.delimiter); idx != -1 { + // Perform a legit HTTP request parsing. + bodyOffset, err := hc.parser.Parse(data) + if err != nil { + return 0, err + } + + // First check if the Content-Length header is present. + contentLength := hc.getContentLength() + if contentLength > -1 { + return bodyOffset + contentLength, nil + } + + // If the Content-Length header is not found, + // we need to find the end of the body section. + if idx := bytes.Index(data, CRLF); idx != -1 { return idx + 4, nil } - return -1, errCRLFNotFound + + return 0, errors.New("invalid http request") +} + +var contentLengthKey = []byte("Content-Length") + +func (hc *httpCodec) getContentLength() int { + if hc.contentLength != -1 { + return hc.contentLength + } + + val := hc.parser.FindHeader(contentLengthKey) + if val != nil { + i, err := strconv.ParseInt(string(val), 10, 0) + if err == nil { + hc.contentLength = int(i) + } + } + + return hc.contentLength +} + +func (hc *httpCodec) resetParser() { + hc.contentLength = -1 } func (hc *httpCodec) reset() { + hc.resetParser() hc.buf = hc.buf[:0] } +func (hc *httpCodec) appendResponse() { + hc.buf = append(hc.buf, "HTTP/1.1 200 OK\r\nServer: gnet\r\nContent-Type: text/plain\r\nDate: "...) + hc.buf = append(hc.buf, NowTimeFormat()...) + hc.buf = append(hc.buf, "\r\nContent-Length: 13\r\n\r\nHello, World!"...) +} + func (hs *httpServer) OnBoot(eng gnet.Engine) gnet.Action { hs.eng = eng log.Printf("echo server with multi-core=%t is listening on %s\n", hs.multicore, hs.addr) @@ -53,18 +93,20 @@ func (hs *httpServer) OnBoot(eng gnet.Engine) gnet.Action { } func (hs *httpServer) OnOpen(c gnet.Conn) ([]byte, gnet.Action) { - c.SetContext(&httpCodec{delimiter: []byte("\r\n\r\n")}) + c.SetContext(&httpCodec{parser: wildcat.NewHTTPParser()}) return nil, gnet.None } func (hs *httpServer) OnTraffic(c gnet.Conn) gnet.Action { - buf, _ := c.Next(-1) hc := c.Context().(*httpCodec) + buf, _ := c.Next(-1) + pipeline: nextOffset, err := hc.parse(buf) if err != nil { goto response } + hc.resetParser() hc.appendResponse() buf = buf[nextOffset:] if len(buf) > 0 { diff --git a/frameworks/Java/helidon/nima/pom.xml b/frameworks/Java/helidon/nima/pom.xml index c884659a0a0..c859b758cfc 100644 --- a/frameworks/Java/helidon/nima/pom.xml +++ b/frameworks/Java/helidon/nima/pom.xml @@ -21,7 +21,7 @@ io.helidon.applications helidon-se - 4.0.3 + 4.1.2 diff --git a/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/JsonSerializer.java b/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/JsonSerializer.java new file mode 100644 index 00000000000..322a7cf030c --- /dev/null +++ b/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/JsonSerializer.java @@ -0,0 +1,92 @@ +package io.helidon.benchmark.nima; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Map; +import java.util.List; + +import com.jsoniter.output.JsonStream; +import com.jsoniter.output.JsonStreamPool; +import com.jsoniter.spi.JsonException; + +public class JsonSerializer { + + private JsonSerializer() { + } + + /** + * Serialize an instance into a JSON object and return it as a byte array. + * + * @param obj the instance + * @return the byte array + */ + public static byte[] serialize(Object obj) { + JsonStream stream = JsonStreamPool.borrowJsonStream(); + try { + stream.reset(null); + stream.writeVal(obj.getClass(), obj); + return Arrays.copyOfRange(stream.buffer().data(), 0, stream.buffer().tail()); + } catch (IOException e) { + throw new JsonException(e); + } finally { + JsonStreamPool.returnJsonStream(stream); + } + } + + /** + * Serialize a map of strings into a JSON object and return it as a byte array. + * + * @param map the map + * @return the byte array + */ + public static byte[] serialize(Map map) { + JsonStream stream = JsonStreamPool.borrowJsonStream(); + try { + stream.reset(null); + stream.writeObjectStart(); + map.forEach((k, v) -> { + try { + stream.writeObjectField(k); + stream.writeVal(v); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + stream.writeObjectEnd(); + return Arrays.copyOfRange(stream.buffer().data(), 0, stream.buffer().tail()); + } catch (IOException e) { + throw new JsonException(e); + } finally { + JsonStreamPool.returnJsonStream(stream); + } + } + + /** + * Serialize a list of objects into a JSON array and return it as a byte array. + * + * @param objs the list of objects + * @return the byte array + */ + public static byte[] serialize(List objs) { + JsonStream stream = JsonStreamPool.borrowJsonStream(); + try { + stream.reset(null); + stream.writeArrayStart(); + int i = 0; + int n = objs.size(); + for (Object obj : objs) { + stream.writeVal(obj.getClass(), obj); + if (i++ < n - 1) { + stream.writeMore(); + } + + } + stream.writeArrayEnd(); + return Arrays.copyOfRange(stream.buffer().data(), 0, stream.buffer().tail()); + } catch (IOException e) { + throw new JsonException(e); + } finally { + JsonStreamPool.returnJsonStream(stream); + } + } +} diff --git a/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/Main.java b/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/Main.java index 92896867246..df669d8a7a7 100644 --- a/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/Main.java +++ b/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/Main.java @@ -16,14 +16,9 @@ package io.helidon.benchmark.nima; -import java.io.IOException; import java.nio.charset.StandardCharsets; -import java.util.Arrays; import java.util.logging.Logger; -import com.jsoniter.output.JsonStream; -import com.jsoniter.output.JsonStreamPool; -import com.jsoniter.spi.JsonException; import io.helidon.benchmark.nima.models.DbRepository; import io.helidon.benchmark.nima.models.HikariJdbcRepository; import io.helidon.benchmark.nima.models.PgClientRepository; @@ -41,6 +36,8 @@ import io.helidon.webserver.http.ServerRequest; import io.helidon.webserver.http.ServerResponse; +import static io.helidon.benchmark.nima.JsonSerializer.serialize; + /** * Main class of the benchmark. * Opens server on localhost:8080 and exposes {@code /plaintext} and {@code /json} endpoints adhering to the @@ -90,29 +87,14 @@ static void routing(HttpRules rules) { rules.get("/plaintext", new PlaintextHandler()) .get("/json", new JsonHandler()) - .get("/10k", new JsonKHandler(10)) .get("/fortunes", new FortuneHandler(repository)) .register("/", new DbService(repository)); } - private static byte[] serializeMsg(Message obj) { - JsonStream stream = JsonStreamPool.borrowJsonStream(); - try { - stream.reset(null); - stream.writeVal(Message.class, obj); - return Arrays.copyOfRange(stream.buffer().data(), 0, stream.buffer().tail()); - } catch (IOException e) { - throw new JsonException(e); - } finally { - JsonStreamPool.returnJsonStream(stream); - } - } - static class PlaintextHandler implements Handler { static final Header CONTENT_TYPE = HeaderValues.createCached(HeaderNames.CONTENT_TYPE, - "text/plain; charset=UTF-8"); + "text/plain; charset=UTF-8"); static final Header CONTENT_LENGTH = HeaderValues.createCached(HeaderNames.CONTENT_LENGTH, "13"); - private static final byte[] RESPONSE_BYTES = "Hello, World!".getBytes(StandardCharsets.UTF_8); @Override @@ -126,44 +108,16 @@ public void handle(ServerRequest req, ServerResponse res) { static class JsonHandler implements Handler { private static final String MESSAGE = "Hello, World!"; - private static final int JSON_LENGTH = serializeMsg(new Message(MESSAGE)).length; + private static final int JSON_LENGTH = serialize(new Message(MESSAGE)).length; static final Header CONTENT_LENGTH = HeaderValues.createCached(HeaderNames.CONTENT_LENGTH, - String.valueOf(JSON_LENGTH)); + String.valueOf(JSON_LENGTH)); @Override public void handle(ServerRequest req, ServerResponse res) { res.header(CONTENT_LENGTH); res.header(HeaderValues.CONTENT_TYPE_JSON); res.header(Main.SERVER); - res.send(serializeMsg(newMsg())); - } - - private static Message newMsg() { - return new Message("Hello, World!"); - } - } - - static class JsonKHandler implements Handler { - private final Header contentLength; - private final String message; - - JsonKHandler(int kilobytes) { - this.message = "a".repeat(1024 * kilobytes); - int length = serializeMsg(new Message(message)).length; - this.contentLength = HeaderValues.createCached(HeaderNames.CONTENT_LENGTH, - String.valueOf(length)); - } - - @Override - public void handle(ServerRequest req, ServerResponse res) { - res.header(contentLength); - res.header(HeaderValues.CONTENT_TYPE_JSON); - res.header(Main.SERVER); - res.send(serializeMsg(newMsg())); - } - - private Message newMsg() { - return new Message(message); + res.send(serialize(new Message(MESSAGE))); } } diff --git a/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/models/DbRepository.java b/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/models/DbRepository.java index 204c9ad5ad1..d1f75b558c3 100644 --- a/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/models/DbRepository.java +++ b/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/models/DbRepository.java @@ -6,35 +6,16 @@ import java.util.concurrent.ThreadLocalRandom; import jakarta.json.Json; -import jakarta.json.JsonArray; -import jakarta.json.JsonArrayBuilder; import jakarta.json.JsonBuilderFactory; -import jakarta.json.JsonObject; public interface DbRepository { JsonBuilderFactory JSON = Json.createBuilderFactory(Collections.emptyMap()); - default World getWorld() { - return getWorld(randomWorldNumber()); - } - World getWorld(int id); - default JsonObject getWorldAsJson(int id) { - return getWorld().toJson(); - } - List getWorlds(int count); - default JsonArray getWorldsAsJson(int count) { - JsonArrayBuilder result = JSON.createArrayBuilder(); - for (World world : getWorlds(count)) { - result.add(world.toJson()); - } - return result.build(); - } - World updateWorld(World world); List updateWorlds(int count); diff --git a/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/models/HikariJdbcRepository.java b/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/models/HikariJdbcRepository.java index 686559b2fd9..fd9760939df 100644 --- a/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/models/HikariJdbcRepository.java +++ b/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/models/HikariJdbcRepository.java @@ -7,6 +7,8 @@ import java.sql.SQLException; import java.util.ArrayList; import java.util.List; +import java.util.concurrent.Executors; +import java.util.concurrent.ThreadFactory; import java.util.logging.Logger; import com.zaxxer.hikari.HikariConfig; @@ -22,20 +24,31 @@ public class HikariJdbcRepository implements DbRepository { private final HikariConfig hikariConfig; public HikariJdbcRepository(Config config) { + // hikari connection configuration String url = "jdbc:postgresql://" + config.get("host").asString().orElse("tfb-database") + ":" + config.get("port").asString().orElse("5432") + "/" + config.get("db").asString().orElse("hello_world"); - hikariConfig = new HikariConfig(); hikariConfig.setJdbcUrl(url); hikariConfig.setUsername(config.get("username").asString().orElse("benchmarkdbuser")); hikariConfig.setPassword(config.get("password").asString().orElse("benchmarkdbpass")); - hikariConfig.addDataSourceProperty("cachePrepStmts", "true"); + // hikari additional configuration int poolSize = config.get("sql-pool-size").asInt().orElse(64); - hikariConfig.addDataSourceProperty("maximumPoolSize", poolSize); - LOGGER.info("Db pool size is set to " + poolSize); + hikariConfig.setMaximumPoolSize(poolSize); + LOGGER.info("Hikari pool size is set to " + poolSize); + ThreadFactory vtThreadFactory = Thread.ofVirtual().factory(); + hikariConfig.setThreadFactory(vtThreadFactory); + hikariConfig.setScheduledExecutor(Executors.newScheduledThreadPool(poolSize, vtThreadFactory)); + LOGGER.info("Set thread factory to VTs"); + + // data source properties + hikariConfig.addDataSourceProperty("cachePrepStmts","true"); + hikariConfig.addDataSourceProperty("prepStmtCacheSize","250"); + hikariConfig.addDataSourceProperty("prepStmtCacheSqlLimit","2048"); + hikariConfig.addDataSourceProperty("ssl", "false"); + hikariConfig.addDataSourceProperty("tcpKeepAlive", "true"); } private Connection getConnection() throws SQLException { diff --git a/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/models/PgClientRepository.java b/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/models/PgClientRepository.java index 7775a177537..e5166b10fbc 100644 --- a/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/models/PgClientRepository.java +++ b/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/models/PgClientRepository.java @@ -8,27 +8,25 @@ import java.util.concurrent.TimeoutException; import java.util.logging.Logger; -import io.helidon.common.reactive.Multi; -import io.helidon.common.reactive.Single; import io.helidon.config.Config; + import io.vertx.core.Vertx; import io.vertx.core.VertxOptions; +import io.vertx.core.Future; import io.vertx.pgclient.PgConnectOptions; import io.vertx.pgclient.PgPool; import io.vertx.sqlclient.PoolOptions; +import io.vertx.sqlclient.PreparedQuery; import io.vertx.sqlclient.Row; +import io.vertx.sqlclient.RowSet; import io.vertx.sqlclient.SqlClient; import io.vertx.sqlclient.Tuple; -import jakarta.json.JsonArray; -import jakarta.json.JsonArrayBuilder; -import jakarta.json.JsonObject; import static io.helidon.benchmark.nima.models.DbRepository.randomWorldNumber; public class PgClientRepository implements DbRepository { private static final Logger LOGGER = Logger.getLogger(PgClientRepository.class.getName()); - private final SqlClient queryPool; private final SqlClient updatePool; @@ -36,9 +34,13 @@ public class PgClientRepository implements DbRepository { private final long updateTimeout; private final int maxRetries; + private final PreparedQuery> getFortuneQuery; + private final PreparedQuery> getWorldQuery; + private final PreparedQuery> updateWorldQuery; + public PgClientRepository(Config config) { Vertx vertx = Vertx.vertx(new VertxOptions() - .setPreferNativeTransport(true)); + .setPreferNativeTransport(true)); PgConnectOptions connectOptions = new PgConnectOptions() .setPort(config.get("port").asInt().orElse(5432)) .setCachePreparedStatements(config.get("cache-prepared-statements").asBoolean().orElse(true)) @@ -59,31 +61,20 @@ public PgClientRepository(Config config) { queryPool = PgPool.client(vertx, connectOptions, clientOptions); updatePool = PgPool.client(vertx, connectOptions, clientOptions); - } - @Override - public JsonObject getWorldAsJson(int id) { - return getWorld(id, queryPool).map(World::toJson).await(); + getWorldQuery = queryPool.preparedQuery("SELECT id, randomnumber FROM world WHERE id = $1"); + updateWorldQuery = queryPool.preparedQuery("UPDATE world SET randomnumber = $1 WHERE id = $2"); + getFortuneQuery = queryPool.preparedQuery("SELECT id, message FROM fortune"); } @Override public World getWorld(int id) { try { - return getWorld(id, queryPool).toCompletableFuture().get(); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - @Override - public JsonArray getWorldsAsJson(int count) { - try { - return Multi.range(0, count) - .flatMap(i -> getWorld(randomWorldNumber(), queryPool)) - .map(World::toJson) - .reduce(JSON::createArrayBuilder, JsonArrayBuilder::add) - .map(JsonArrayBuilder::build) - .await(); + return getWorldQuery.execute(Tuple.of(id)) + .map(rows -> { + Row r = rows.iterator().next(); + return new World(r.getInteger(0), r.getInteger(1)); + }).toCompletionStage().toCompletableFuture().get(); } catch (Exception e) { throw new RuntimeException(e); } @@ -92,17 +83,15 @@ public JsonArray getWorldsAsJson(int count) { @Override public List getWorlds(int count) { try { - List result = new ArrayList<>(count); + List> futures = new ArrayList<>(); for (int i = 0; i < count; i++) { - World world = queryPool.preparedQuery("SELECT id, randomnumber FROM world WHERE id = $1") - .execute(Tuple.of(randomWorldNumber())) - .map(rows -> { - Row r = rows.iterator().next(); - return new World(r.getInteger(0), r.getInteger(1)); - }).toCompletionStage().toCompletableFuture().get(); - result.add(world); + futures.add(getWorldQuery.execute(Tuple.of(randomWorldNumber())) + .map(rows -> { + Row r = rows.iterator().next(); + return new World(r.getInteger(0), r.getInteger(1)); + })); } - return result; + return Future.all(futures).toCompletionStage().toCompletableFuture().get().list(); } catch (Exception e) { throw new RuntimeException(e); } @@ -110,10 +99,14 @@ public List getWorlds(int count) { @Override public World updateWorld(World world) { - return Single.create(queryPool.preparedQuery("UPDATE world SET randomnumber = $1 WHERE id = $2") - .execute(Tuple.of(world.id, world.id)) - .toCompletionStage() - .thenApply(rows -> world)).await(); + try { + return updateWorldQuery.execute(Tuple.of(world.id, world.id)) + .toCompletionStage() + .thenApply(rows -> world) + .toCompletableFuture().get(); + } catch (Exception e) { + throw new RuntimeException(e); + } } @Override @@ -165,25 +158,18 @@ private List updateWorldsRetry(List worlds, int from, int retries) @Override public List getFortunes() { - return Single.create(queryPool.preparedQuery("SELECT id, message FROM fortune") - .execute() - .map(rows -> { - List fortunes = new ArrayList<>(rows.size() + 1); - for (Row r : rows) { - fortunes.add(new Fortune(r.getInteger(0), r.getString(1))); - } - return fortunes; - }).toCompletionStage()).await(); - } - - private static Single getWorld(int id, SqlClient pool) { - return Single.create(pool.preparedQuery("SELECT id, randomnumber FROM world WHERE id = $1") - .execute(Tuple.of(id)) - .map(rows -> { - Row r = rows.iterator().next(); - return new World(r.getInteger(0), r.getInteger(1)); - }).toCompletionStage()); - + try { + return getFortuneQuery.execute() + .map(rows -> { + List fortunes = new ArrayList<>(rows.size() + 1); + for (Row r : rows) { + fortunes.add(new Fortune(r.getInteger(0), r.getString(1))); + } + return fortunes; + }).toCompletionStage().toCompletableFuture().get(); + } catch (Exception e) { + throw new RuntimeException(e); + } } private CompletableFuture> updateWorlds(List worlds, int from, SqlClient pool) { @@ -193,8 +179,7 @@ private CompletableFuture> updateWorlds(List worlds, int from World w = worlds.get(i); tuples.add(Tuple.of(w.randomNumber, w.id)); } - return pool.preparedQuery("UPDATE world SET randomnumber = $1 WHERE id = $2") - .executeBatch(tuples) + return updateWorldQuery.executeBatch(tuples) .toCompletionStage() .thenApply(rows -> worlds) .toCompletableFuture(); diff --git a/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/models/World.java b/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/models/World.java index ee8eb9194cd..39deafea11b 100644 --- a/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/models/World.java +++ b/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/models/World.java @@ -9,9 +9,9 @@ public final class World { - private static final String ID_KEY = "id"; - private static final String ID_RANDOM_NUMBER = "randomNumber"; - private static final JsonBuilderFactory JSON = Json.createBuilderFactory(Collections.emptyMap()); + static final String ID_KEY = "id"; + static final String ID_RANDOM_NUMBER = "randomNumber"; + static final JsonBuilderFactory JSON = Json.createBuilderFactory(Collections.emptyMap()); public int id; public int randomNumber; diff --git a/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/services/DbService.java b/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/services/DbService.java index 46c244d96f5..e3bd1fe39fc 100644 --- a/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/services/DbService.java +++ b/frameworks/Java/helidon/nima/src/main/java/io/helidon/benchmark/nima/services/DbService.java @@ -1,27 +1,23 @@ package io.helidon.benchmark.nima.services; -import java.util.Collections; import java.util.List; import io.helidon.benchmark.nima.models.DbRepository; import io.helidon.benchmark.nima.models.World; import io.helidon.common.parameters.Parameters; +import io.helidon.http.HeaderValues; import io.helidon.webserver.http.HttpRules; import io.helidon.webserver.http.HttpService; import io.helidon.webserver.http.ServerRequest; import io.helidon.webserver.http.ServerResponse; - -import jakarta.json.Json; -import jakarta.json.JsonArrayBuilder; -import jakarta.json.JsonBuilderFactory; -import jakarta.json.JsonObject; +import io.helidon.common.mapper.OptionalValue; import static io.helidon.benchmark.nima.Main.SERVER; import static io.helidon.benchmark.nima.models.DbRepository.randomWorldNumber; +import static io.helidon.benchmark.nima.JsonSerializer.serialize; public class DbService implements HttpService { - private static final JsonBuilderFactory JSON = Json.createBuilderFactory(Collections.emptyMap()); private final DbRepository repository; @@ -38,36 +34,33 @@ public void routing(HttpRules httpRules) { private void db(ServerRequest req, ServerResponse res) { res.header(SERVER); - res.send(repository.getWorldAsJson(randomWorldNumber())); + res.header(HeaderValues.CONTENT_TYPE_JSON); + res.send(serialize(repository.getWorld(randomWorldNumber()))); } private void queries(ServerRequest req, ServerResponse res) { res.header(SERVER); + res.header(HeaderValues.CONTENT_TYPE_JSON); int count = parseQueryCount(req.query()); - res.send(repository.getWorldsAsJson(count)); + res.send(serialize(repository.getWorlds(count))); } private void updates(ServerRequest req, ServerResponse res) { res.header(SERVER); + res.header(HeaderValues.CONTENT_TYPE_JSON); int count = parseQueryCount(req.query()); List worlds = repository.updateWorlds(count); - JsonArrayBuilder arrayBuilder = JSON.createArrayBuilder(); - for (World world : worlds) { - JsonObject json = world.toJson(); - arrayBuilder.add(json); - } - res.send(arrayBuilder.build()); + res.send(serialize(worlds)); } private int parseQueryCount(Parameters parameters) { - List values = parameters.all("queries"); - if (values.isEmpty()) { + OptionalValue value = parameters.first("queries"); + if (value.isEmpty()) { return 1; } - String first = values.get(0); int parsedValue; try { - parsedValue = Integer.parseInt(first, 10); + parsedValue = Integer.parseInt(value.get(), 10); } catch (NumberFormatException e) { return 1; } diff --git a/frameworks/Java/helidon/nima/src/main/resources/application.yaml b/frameworks/Java/helidon/nima/src/main/resources/application.yaml index ff4aa100b67..41f4d64ec12 100644 --- a/frameworks/Java/helidon/nima/src/main/resources/application.yaml +++ b/frameworks/Java/helidon/nima/src/main/resources/application.yaml @@ -19,6 +19,7 @@ server: port: 8080 backlog: 8192 write-queue-length: 8192 + smart-async-writes: true connection-options: read-timeout: PT0S connect-timeout: PT0S diff --git a/frameworks/Java/jetty/pom.xml b/frameworks/Java/jetty/pom.xml index db5ba4a6b7b..6156c5603f8 100644 --- a/frameworks/Java/jetty/pom.xml +++ b/frameworks/Java/jetty/pom.xml @@ -11,7 +11,7 @@ UTF-8 11 11 - 10.0.14 + 10.0.24 hello.handler.HelloWebServer diff --git a/frameworks/Java/jooby/jooby-jetty.dockerfile b/frameworks/Java/jooby/jooby-jetty.dockerfile index bd3786886ca..c5d636ada2d 100644 --- a/frameworks/Java/jooby/jooby-jetty.dockerfile +++ b/frameworks/Java/jooby/jooby-jetty.dockerfile @@ -1,4 +1,4 @@ -FROM maven:3.9.6-eclipse-temurin-21-jammy +FROM maven:3.9.9-eclipse-temurin-22-alpine WORKDIR /jooby COPY pom.xml pom.xml COPY src src diff --git a/frameworks/Java/jooby/jooby-mvc.dockerfile b/frameworks/Java/jooby/jooby-mvc.dockerfile index feaeb23bc8e..b2a4db4712b 100644 --- a/frameworks/Java/jooby/jooby-mvc.dockerfile +++ b/frameworks/Java/jooby/jooby-mvc.dockerfile @@ -1,4 +1,4 @@ -FROM maven:3.9.6-eclipse-temurin-21-jammy +FROM maven:3.9.9-eclipse-temurin-22-alpine WORKDIR /jooby COPY pom.xml pom.xml COPY src src diff --git a/frameworks/Java/jooby/jooby-netty.dockerfile b/frameworks/Java/jooby/jooby-netty.dockerfile index dd6afe017c7..1c3efc2585e 100644 --- a/frameworks/Java/jooby/jooby-netty.dockerfile +++ b/frameworks/Java/jooby/jooby-netty.dockerfile @@ -1,4 +1,4 @@ -FROM maven:3.9.6-eclipse-temurin-21-jammy +FROM maven:3.9.9-eclipse-temurin-22-alpine WORKDIR /jooby COPY pom.xml pom.xml COPY src src diff --git a/frameworks/Java/jooby/jooby-pgclient.dockerfile b/frameworks/Java/jooby/jooby-pgclient.dockerfile index e3c58df435c..044cdd5c5de 100644 --- a/frameworks/Java/jooby/jooby-pgclient.dockerfile +++ b/frameworks/Java/jooby/jooby-pgclient.dockerfile @@ -1,4 +1,4 @@ -FROM maven:3.9.6-eclipse-temurin-21-jammy +FROM maven:3.9.9-eclipse-temurin-22-alpine WORKDIR /jooby COPY pom.xml pom.xml COPY src src diff --git a/frameworks/Java/jooby/jooby.dockerfile b/frameworks/Java/jooby/jooby.dockerfile index 98d1b029d36..a5d4570a60d 100644 --- a/frameworks/Java/jooby/jooby.dockerfile +++ b/frameworks/Java/jooby/jooby.dockerfile @@ -1,4 +1,4 @@ -FROM maven:3.9.6-eclipse-temurin-21-jammy +FROM maven:3.9.9-eclipse-temurin-22-alpine WORKDIR /jooby COPY pom.xml pom.xml COPY src src diff --git a/frameworks/Java/jooby/pom.xml b/frameworks/Java/jooby/pom.xml index 99170fb8cfe..3b6edfc79ec 100644 --- a/frameworks/Java/jooby/pom.xml +++ b/frameworks/Java/jooby/pom.xml @@ -11,13 +11,13 @@ jooby - 3.2.9 - 4.1.112.Final + 3.4.0 + 4.1.113.Final 2.0.2 42.7.4 UTF-8 - 21 - 21 + 22 + 22 com.techempower.App diff --git a/frameworks/Java/redkale/BenchmarkService.java b/frameworks/Java/redkale/BenchmarkService.java deleted file mode 100644 index 39e9956a75d..00000000000 --- a/frameworks/Java/redkale/BenchmarkService.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * To change this license header, choose License Headers in Project Properties. - * To change this template file, choose Tools | Templates - * and open the template in the editor. - */ -package org.redkalex.benchmark; - -import java.util.*; -import java.util.concurrent.*; -import java.util.stream.Stream; -import org.redkale.annotation.*; -import org.redkale.net.http.*; -import org.redkale.service.AbstractService; -import org.redkale.source.DataSource; -import org.redkale.util.AnyValue; - -/** - * 测试redkale-jdbc, 需要覆盖到原BenchmarkService - * - * @author zhangjx - */ -@RestService(name = " ", repair = false) -public class BenchmarkService extends AbstractService { - - private static final byte[] helloBytes = "Hello, world!".getBytes(); - - @Resource - private DataSource source; - - public void init(AnyValue conf) { - source.finds(CachedWorld.class, 1); - } - - @NonBlocking - @RestMapping(auth = false) - public byte[] plaintext() { - return helloBytes; - } - - @NonBlocking - @RestMapping(auth = false) - public Message json() { - return new Message("Hello, World!"); - } - - @RestMapping(auth = false) - public World db() { - return source.find(World.class, ThreadLocalRandom.current().nextInt(10000) + 1); - } - - @RestMapping(auth = false) - public List queries(int q) { - return source.findsList(World.class, random(q)); - } - - @RestMapping(auth = false) - public List updates(int q) { - int size = Math.min(500, Math.max(1, q)); - int[] newNumbers = ThreadLocalRandom.current().ints(size, 1, 10001).toArray(); - List words = source.findsList(World.class, random(q)); - source.update(World.updateNewNumbers(words, newNumbers)); - return words; - } - - @RestMapping(auth = false) - public HttpScope fortunes() { - List fortunes = source.queryList(Fortune.class); - fortunes.add(new Fortune(0, "Additional fortune added at request time.")); - Collections.sort(fortunes); - return HttpScope.refer("").referObj(fortunes); - } - - @NonBlocking - @RestMapping(name = "cached-worlds", auth = false) - public CachedWorld[] cachedWorlds(int q) { - return source.finds(CachedWorld.class, random(q)); - } - - private Stream random(int q) { - int size = Math.min(500, Math.max(1, q)); - return ThreadLocalRandom.current().ints(size, 1, 10001).boxed(); - } -} diff --git a/frameworks/Java/redkale/benchmark_config.json b/frameworks/Java/redkale/benchmark_config.json index fa84965daa3..52f8550ce9f 100644 --- a/frameworks/Java/redkale/benchmark_config.json +++ b/frameworks/Java/redkale/benchmark_config.json @@ -26,30 +26,6 @@ "notes": "", "versus": "Redkale" }, - "graalvm": { - "plaintext_url": "/plaintext", - "json_url": "/json", - "db_url": "/db", - "query_url": "/queries?q=", - "fortune_url": "/fortunes", - "update_url": "/updates?q=", - "cached_query_url": "/cached-worlds?q=", - "port": 8080, - "approach": "Realistic", - "classification": "Fullstack", - "database": "Postgres", - "framework": "Redkale", - "language": "Java", - "flavor": "None", - "orm": "Raw", - "platform": "Redkale", - "webserver": "Redkale", - "os": "Linux", - "database_os": "Linux", - "display_name": "redkale-graalvm", - "notes": "", - "versus": "Redkale" - }, "native": { "plaintext_url": "/plaintext", "json_url": "/json", @@ -95,27 +71,6 @@ "notes": "", "versus": "Redkale" }, - "block": { - "db_url": "/db", - "query_url": "/queries?q=", - "fortune_url": "/fortunes", - "update_url": "/updates?q=", - "port": 8080, - "approach": "Realistic", - "classification": "Fullstack", - "database": "Postgres", - "framework": "Redkale", - "language": "Java", - "flavor": "None", - "orm": "Raw", - "platform": "Redkale", - "webserver": "Redkale", - "os": "Linux", - "database_os": "Linux", - "display_name": "redkale-block", - "notes": "", - "versus": "Redkale" - }, "pgclient": { "db_url": "/db", "query_url": "/queries?q=", diff --git a/frameworks/Java/redkale/conf/application.xml b/frameworks/Java/redkale/conf/application.xml index aa535805113..642b3da5928 100644 --- a/frameworks/Java/redkale/conf/application.xml +++ b/frameworks/Java/redkale/conf/application.xml @@ -8,7 +8,7 @@ - + diff --git a/frameworks/Java/redkale/config.toml b/frameworks/Java/redkale/config.toml index c2fcbb80d77..149aaad007e 100644 --- a/frameworks/Java/redkale/config.toml +++ b/frameworks/Java/redkale/config.toml @@ -19,24 +19,6 @@ platform = "Redkale" webserver = "Redkale" versus = "Redkale" -[graalvm] -urls.plaintext = "/plaintext" -urls.json = "/json" -urls.db = "/db" -urls.fortune = "/fortunes" -urls.query = "/queries?q=" -urls.update = "/updates?q=" -urls.cached_query = "/cached-worlds?q=" -approach = "Realistic" -classification = "Fullstack" -database = "Postgres" -database_os = "Linux" -os = "Linux" -orm = "Raw" -platform = "Redkale" -webserver = "Redkale" -versus = "Redkale" - [native] urls.plaintext = "/plaintext" urls.json = "/json" @@ -70,21 +52,6 @@ platform = "Redkale" webserver = "Redkale" versus = "Redkale" -[block] -urls.db = "/db" -urls.fortune = "/fortunes" -urls.query = "/queries?q=" -urls.update = "/updates?q=" -approach = "Realistic" -classification = "Fullstack" -database = "Postgres" -database_os = "Linux" -os = "Linux" -orm = "Raw" -platform = "Redkale" -webserver = "Redkale" -versus = "Redkale" - [pgclient] urls.db = "/db" urls.fortune = "/fortunes" diff --git a/frameworks/Java/redkale/pom-jdbc.xml b/frameworks/Java/redkale/pom-jdbc.xml index e617e4e4059..86b6c6d5fd6 100644 --- a/frameworks/Java/redkale/pom-jdbc.xml +++ b/frameworks/Java/redkale/pom-jdbc.xml @@ -8,7 +8,7 @@ org.redkale.boot.Application 2.8.0-SNAPSHOT - 1.3.0-SNAPSHOT + 1.2.0-SNAPSHOT 42.7.2 UTF-8 21 diff --git a/frameworks/Java/redkale/pom.xml b/frameworks/Java/redkale/pom.xml index 8844a8b7aff..3c7970fb4e3 100644 --- a/frameworks/Java/redkale/pom.xml +++ b/frameworks/Java/redkale/pom.xml @@ -79,6 +79,8 @@ --no-fallback + -J-XX:+UseNUMA + -J-XX:+UseParallelGC diff --git a/frameworks/Java/redkale/redkale-block.dockerfile b/frameworks/Java/redkale/redkale-block.dockerfile deleted file mode 100644 index b163c60e53f..00000000000 --- a/frameworks/Java/redkale/redkale-block.dockerfile +++ /dev/null @@ -1,16 +0,0 @@ -FROM maven:3.9.6-amazoncorretto-21-debian as maven -WORKDIR /redkale -COPY src src -COPY conf conf -COPY pom.xml pom.xml -COPY BenchmarkService.java src/main/java/org/redkalex/benchmark/BenchmarkService.java -RUN mvn package -q - -FROM openjdk:23-jdk-slim -WORKDIR /redkale -COPY conf conf -COPY --from=maven /redkale/target/redkale-benchmark-1.0.0.jar redkale-benchmark.jar - -EXPOSE 8080 - -CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-DAPP_HOME=./", "-jar", "redkale-benchmark.jar"] \ No newline at end of file diff --git a/frameworks/Java/redkale/redkale-graalvm.dockerfile b/frameworks/Java/redkale/redkale-graalvm.dockerfile deleted file mode 100644 index 0e3a9a4a597..00000000000 --- a/frameworks/Java/redkale/redkale-graalvm.dockerfile +++ /dev/null @@ -1,16 +0,0 @@ -FROM maven:3.9.6-amazoncorretto-21-debian as maven -WORKDIR /redkale -COPY src src -COPY conf conf -COPY pom.xml pom.xml -RUN mvn package -q - - -FROM ghcr.io/graalvm/jdk-community:22.0.2 -WORKDIR /redkale -COPY conf conf -COPY --from=maven /redkale/target/redkale-benchmark-1.0.0.jar redkale-benchmark.jar - -EXPOSE 8080 - -CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-DAPP_HOME=./", "-jar", "redkale-benchmark.jar"] diff --git a/frameworks/Java/redkale/redkale-jdbc.dockerfile b/frameworks/Java/redkale/redkale-jdbc.dockerfile index b39997adf5f..ac3a85bec8b 100644 --- a/frameworks/Java/redkale/redkale-jdbc.dockerfile +++ b/frameworks/Java/redkale/redkale-jdbc.dockerfile @@ -3,7 +3,6 @@ WORKDIR /redkale COPY src src COPY conf conf COPY pom-jdbc.xml pom.xml -COPY BenchmarkService.java src/main/java/org/redkalex/benchmark/BenchmarkService.java RUN mvn package -q FROM openjdk:23-jdk-slim @@ -13,4 +12,4 @@ COPY --from=maven /redkale/target/redkale-benchmark-1.0.0.jar redkale-benchmark. EXPOSE 8080 -CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-DAPP_HOME=./", "-jar", "redkale-benchmark.jar"] \ No newline at end of file +CMD ["java", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-DAPP_HOME=./", "-jar", "redkale-benchmark.jar"] \ No newline at end of file diff --git a/frameworks/Java/redkale/redkale-pgclient.dockerfile b/frameworks/Java/redkale/redkale-pgclient.dockerfile index 6da37e715c5..af47b952c72 100644 --- a/frameworks/Java/redkale/redkale-pgclient.dockerfile +++ b/frameworks/Java/redkale/redkale-pgclient.dockerfile @@ -12,4 +12,4 @@ COPY --from=maven /redkale/target/redkale-benchmark-1.0.0.jar redkale-benchmark. EXPOSE 8080 -CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-DAPP_HOME=./", "-jar", "redkale-benchmark.jar"] \ No newline at end of file +CMD ["java", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-DAPP_HOME=./", "-jar", "redkale-benchmark.jar"] \ No newline at end of file diff --git a/frameworks/Java/redkale/redkale.dockerfile b/frameworks/Java/redkale/redkale.dockerfile index 4bb09a8dd56..79ec585c205 100644 --- a/frameworks/Java/redkale/redkale.dockerfile +++ b/frameworks/Java/redkale/redkale.dockerfile @@ -12,4 +12,4 @@ COPY --from=maven /redkale/target/redkale-benchmark-1.0.0.jar redkale-benchmark. EXPOSE 8080 -CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-DAPP_HOME=./", "-jar", "redkale-benchmark.jar"] +CMD ["java", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-DAPP_HOME=./", "-jar", "redkale-benchmark.jar"] diff --git a/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/BenchmarkService.java b/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/BenchmarkService.java index 48af592f928..e2ebc7b8aad 100644 --- a/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/BenchmarkService.java +++ b/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/BenchmarkService.java @@ -7,13 +7,11 @@ import java.util.*; import java.util.concurrent.*; -import java.util.stream.IntStream; -import java.util.stream.Stream; +import java.util.stream.*; import org.redkale.annotation.*; import org.redkale.net.http.*; import org.redkale.service.AbstractService; import org.redkale.source.DataSource; -import org.redkale.util.AnyValue; /** * @@ -28,10 +26,6 @@ public class BenchmarkService extends AbstractService { @Resource private DataSource source; - public void init(AnyValue conf) { - source.finds(CachedWorld.class, 1); - } - @RestMapping(auth = false) public byte[] plaintext() { return helloBytes; diff --git a/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/Message.java b/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/Message.java index 00c529e9eee..a5e7aed358d 100644 --- a/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/Message.java +++ b/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/Message.java @@ -6,7 +6,7 @@ package org.redkalex.benchmark; import org.redkale.annotation.Serial; -import org.redkale.convert.ConvertSmallString; +import org.redkale.convert.ConvertStandardString; import org.redkale.convert.json.JsonConvert; /** @@ -16,7 +16,7 @@ @Serial public final class Message { - @ConvertSmallString + @ConvertStandardString private String message; public Message() {} diff --git a/frameworks/Java/solon/README.md b/frameworks/Java/solon/README.md index f2370ab2305..49d4aef9fe8 100644 --- a/frameworks/Java/solon/README.md +++ b/frameworks/Java/solon/README.md @@ -10,7 +10,7 @@ This is the solon portion of a [benchmarking test suite](../) comparing a variet ## Versions * [Java OpenJDK 21](http://openjdk.java.net/) -* [solon 2.9.1](https://github.com/noear/solon) +* [solon 3.0.2](https://github.com/noear/solon) ## Test URLs diff --git a/frameworks/Java/solon/pom.xml b/frameworks/Java/solon/pom.xml index 035ad60951a..0c9d8d570ce 100644 --- a/frameworks/Java/solon/pom.xml +++ b/frameworks/Java/solon/pom.xml @@ -5,7 +5,7 @@ org.noear solon-parent - 2.9.2 + 3.0.2 hello @@ -20,14 +20,12 @@ org.noear - solon.boot.smarthttp - ${solon.version} + solon-boot-smarthttp org.noear - solon.serialization.fastjson2 - ${solon.version} + solon-serialization-snack3 diff --git a/frameworks/Java/spring-webflux/pom.xml b/frameworks/Java/spring-webflux/pom.xml index c15b211c92d..7e1b6865d2d 100644 --- a/frameworks/Java/spring-webflux/pom.xml +++ b/frameworks/Java/spring-webflux/pom.xml @@ -13,13 +13,14 @@ org.springframework.boot spring-boot-starter-parent - 3.3.3 + 3.3.4 - 21 - 21 - UTF-8 + 21 + 1.3.6 + 1.0.2.RELEASE + 1.0.7.RELEASE @@ -36,8 +37,16 @@ r2dbc-postgresql - org.springframework.boot - spring-boot-starter-mustache + io.jstach + jstachio + ${jstachio.version} + + + io.jstach + jstachio-apt + ${jstachio.version} + provided + true org.springframework.boot @@ -55,6 +64,15 @@ org.apache.maven.plugins maven-compiler-plugin + + + + io.jstach + jstachio-apt + ${jstachio.version} + + + diff --git a/frameworks/Java/spring-webflux/spring-webflux-mongo.dockerfile b/frameworks/Java/spring-webflux/spring-webflux-mongo.dockerfile index 41eedefa4c6..d565d1556c3 100644 --- a/frameworks/Java/spring-webflux/spring-webflux-mongo.dockerfile +++ b/frameworks/Java/spring-webflux/spring-webflux-mongo.dockerfile @@ -13,4 +13,4 @@ RUN java -Djarmode=tools -jar app.jar extract EXPOSE 8080 -CMD ["java", "-Dlogging.level.root=OFF", "-Dreactor.netty.http.server.lastFlushWhenNoRead=true", "-jar", "app/app.jar", "--spring.profiles.active=mongo"] \ No newline at end of file +CMD ["java", "-Dlogging.level.root=OFF", "-Dio.netty.leakDetection.level=disabled", "-Dreactor.netty.http.server.lastFlushWhenNoRead=true", "-jar", "app/app.jar", "--spring.profiles.active=mongo"] \ No newline at end of file diff --git a/frameworks/Java/spring-webflux/spring-webflux.dockerfile b/frameworks/Java/spring-webflux/spring-webflux.dockerfile index 6ff2ed4e537..e1cee08ff31 100644 --- a/frameworks/Java/spring-webflux/spring-webflux.dockerfile +++ b/frameworks/Java/spring-webflux/spring-webflux.dockerfile @@ -12,4 +12,4 @@ RUN java -Djarmode=tools -jar app.jar extract EXPOSE 8080 -CMD ["java", "-Dlogging.level.root=OFF", "-Dreactor.netty.http.server.lastFlushWhenNoRead=true", "-jar", "app/app.jar", "--spring.profiles.active=r2dbc"] +CMD ["java", "-Dlogging.level.root=OFF", "-Dio.netty.leakDetection.level=disabled", "-Dreactor.netty.http.server.lastFlushWhenNoRead=true", "-jar", "app/app.jar", "--spring.profiles.active=r2dbc"] diff --git a/frameworks/Java/spring-webflux/src/main/java/benchmark/App.java b/frameworks/Java/spring-webflux/src/main/java/benchmark/App.java index c574863a9a9..3ba49056611 100644 --- a/frameworks/Java/spring-webflux/src/main/java/benchmark/App.java +++ b/frameworks/Java/spring-webflux/src/main/java/benchmark/App.java @@ -11,7 +11,6 @@ import org.springframework.web.reactive.function.server.RouterFunction; import org.springframework.web.reactive.function.server.RouterFunctions; import org.springframework.web.reactive.function.server.ServerResponse; -import org.springframework.web.reactive.result.view.ViewResolver; import org.springframework.web.server.WebHandler; import org.springframework.web.server.adapter.WebHttpHandlerBuilder; @@ -20,8 +19,8 @@ public class App { @Bean - public HttpHandler httpHandler(RouterFunction route, ServerFilter serverFilter, ViewResolver viewResolver) { - WebHandler webHandler = RouterFunctions.toWebHandler(route, HandlerStrategies.builder().viewResolver(viewResolver).build()); + public HttpHandler httpHandler(RouterFunction route, ServerFilter serverFilter) { + WebHandler webHandler = RouterFunctions.toWebHandler(route, HandlerStrategies.builder().build()); return WebHttpHandlerBuilder.webHandler(webHandler).filter(serverFilter).build(); } diff --git a/frameworks/Java/spring-webflux/src/main/java/benchmark/Utils.java b/frameworks/Java/spring-webflux/src/main/java/benchmark/Utils.java new file mode 100644 index 00000000000..4631be27f11 --- /dev/null +++ b/frameworks/Java/spring-webflux/src/main/java/benchmark/Utils.java @@ -0,0 +1,19 @@ +package benchmark; + +import java.util.concurrent.ThreadLocalRandom; +import java.util.stream.IntStream; + +abstract public class Utils { + + private static final int MIN_WORLD_NUMBER = 1; + private static final int MAX_WORLD_NUMBER_PLUS_ONE = 10_001; + + public static int randomWorldNumber() { + return ThreadLocalRandom.current().nextInt(MIN_WORLD_NUMBER, MAX_WORLD_NUMBER_PLUS_ONE); + } + + public static IntStream randomWorldNumbers() { + return ThreadLocalRandom.current().ints(MIN_WORLD_NUMBER, MAX_WORLD_NUMBER_PLUS_ONE).distinct(); + } + +} diff --git a/frameworks/Java/spring-webflux/src/main/java/benchmark/model/Fortune.java b/frameworks/Java/spring-webflux/src/main/java/benchmark/model/Fortune.java index 12ae17e0448..d66f56215fc 100644 --- a/frameworks/Java/spring-webflux/src/main/java/benchmark/model/Fortune.java +++ b/frameworks/Java/spring-webflux/src/main/java/benchmark/model/Fortune.java @@ -4,9 +4,11 @@ import org.springframework.data.mongodb.core.mapping.Document; @Document -public final class Fortune { +public final class Fortune implements Comparable { + @Id public int id; + public String message; public Fortune(int id, String message) { @@ -14,11 +16,8 @@ public Fortune(int id, String message) { this.message = message; } - public int getId() { - return id; - } - - public String getMessage() { - return message; + @Override + public int compareTo(final Fortune other) { + return message.compareTo(other.message); } } diff --git a/frameworks/Java/spring-webflux/src/main/java/benchmark/model/Message.java b/frameworks/Java/spring-webflux/src/main/java/benchmark/model/Message.java deleted file mode 100644 index 8a94c8d3ed1..00000000000 --- a/frameworks/Java/spring-webflux/src/main/java/benchmark/model/Message.java +++ /dev/null @@ -1,15 +0,0 @@ -package benchmark.model; - -public class Message { - - private final String message; - - public Message(String message) { - this.message = message; - } - - public String getMessage() { - return message; - } - -} \ No newline at end of file diff --git a/frameworks/Java/spring-webflux/src/main/java/benchmark/model/World.java b/frameworks/Java/spring-webflux/src/main/java/benchmark/model/World.java index 612c7fef03a..ab096a1e313 100644 --- a/frameworks/Java/spring-webflux/src/main/java/benchmark/model/World.java +++ b/frameworks/Java/spring-webflux/src/main/java/benchmark/model/World.java @@ -9,6 +9,7 @@ public final class World { @Id public int id; + @Field("randomNumber") public int randomnumber; diff --git a/frameworks/Java/spring-webflux/src/main/java/benchmark/repository/DbRepository.java b/frameworks/Java/spring-webflux/src/main/java/benchmark/repository/DbRepository.java index 20e753e317e..54b6d0d9d02 100644 --- a/frameworks/Java/spring-webflux/src/main/java/benchmark/repository/DbRepository.java +++ b/frameworks/Java/spring-webflux/src/main/java/benchmark/repository/DbRepository.java @@ -6,6 +6,7 @@ import reactor.core.publisher.Mono; public interface DbRepository { + Mono getWorld(int id); Mono findAndUpdateWorld(int id, int randomNumber); diff --git a/frameworks/Java/spring-webflux/src/main/java/benchmark/repository/JdbcDbRepository.java b/frameworks/Java/spring-webflux/src/main/java/benchmark/repository/JdbcDbRepository.java deleted file mode 100644 index 5e159f816ed..00000000000 --- a/frameworks/Java/spring-webflux/src/main/java/benchmark/repository/JdbcDbRepository.java +++ /dev/null @@ -1,63 +0,0 @@ -package benchmark.repository; - -import benchmark.model.Fortune; -import benchmark.model.World; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.context.annotation.Profile; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.stereotype.Component; -import reactor.core.publisher.Flux; -import reactor.core.publisher.Mono; -import reactor.core.scheduler.Scheduler; - -@Component -@Profile("jdbc") -public class JdbcDbRepository implements DbRepository { - private final Logger log = LoggerFactory.getLogger(getClass()); - private final JdbcTemplate jdbcTemplate; - private final Scheduler scheduler; - - public JdbcDbRepository(JdbcTemplate jdbcTemplate, Scheduler scheduler) { - this.jdbcTemplate = jdbcTemplate; - this.scheduler = scheduler; - } - - @Override - public Mono getWorld(int id) { - log.debug("getWorld({})", id); - return Mono.fromCallable(() -> { - return jdbcTemplate.queryForObject( - "SELECT * FROM world WHERE id = ?", - (rs, rn) -> new World(rs.getInt("id"), rs.getInt("randomnumber")), - id); - }).subscribeOn(scheduler); - } - - private Mono updateWorld(World world) { - return Mono.fromCallable(() -> { - jdbcTemplate.update( - "UPDATE world SET randomnumber = ? WHERE id = ?", - world.randomnumber, - world.id); - return world; - }).subscribeOn(scheduler); - } - - @Override - public Mono findAndUpdateWorld(int id, int randomNumber) { - return getWorld(id).flatMap(world -> { - world.randomnumber = randomNumber; - return updateWorld(world); - }); - } - - @Override - public Flux fortunes() { - return Mono.fromCallable(() -> { - return jdbcTemplate.query( - "SELECT * FROM fortune", - (rs, rn) -> new Fortune(rs.getInt("id"), rs.getString("message"))); - }).subscribeOn(scheduler).flatMapIterable(fortunes -> fortunes); - } -} \ No newline at end of file diff --git a/frameworks/Java/spring-webflux/src/main/java/benchmark/repository/R2dbcDbRepository.java b/frameworks/Java/spring-webflux/src/main/java/benchmark/repository/R2dbcDbRepository.java index c7cfb89dea4..c2524f07862 100644 --- a/frameworks/Java/spring-webflux/src/main/java/benchmark/repository/R2dbcDbRepository.java +++ b/frameworks/Java/spring-webflux/src/main/java/benchmark/repository/R2dbcDbRepository.java @@ -1,16 +1,18 @@ package benchmark.repository; -import benchmark.model.Fortune; -import benchmark.model.World; import org.springframework.context.annotation.Profile; import org.springframework.r2dbc.core.DatabaseClient; import org.springframework.stereotype.Component; + +import benchmark.model.Fortune; +import benchmark.model.World; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; @Component @Profile("r2dbc") public class R2dbcDbRepository implements DbRepository { + private final DatabaseClient databaseClient; public R2dbcDbRepository(DatabaseClient databaseClient) { @@ -24,10 +26,9 @@ public Mono getWorld(int id) { .bind("$1", id) .mapProperties(World.class) .first(); - } - public Mono updateWorld(World world) { + private Mono updateWorld(World world) { return databaseClient .sql("UPDATE world SET randomnumber=$2 WHERE id = $1") .bind("$1", world.id) @@ -37,6 +38,8 @@ public Mono updateWorld(World world) { .map(count -> world); } + + @Override public Mono findAndUpdateWorld(int id, int randomNumber) { return getWorld(id).flatMap(world -> { world.randomnumber = randomNumber; @@ -51,4 +54,5 @@ public Flux fortunes() { .mapProperties(Fortune.class) .all(); } + } \ No newline at end of file diff --git a/frameworks/Java/spring-webflux/src/main/java/benchmark/web/DbHandler.java b/frameworks/Java/spring-webflux/src/main/java/benchmark/web/DbHandler.java index bafddaf83c6..e85d9c5be15 100644 --- a/frameworks/Java/spring-webflux/src/main/java/benchmark/web/DbHandler.java +++ b/frameworks/Java/spring-webflux/src/main/java/benchmark/web/DbHandler.java @@ -1,26 +1,27 @@ package benchmark.web; -import java.util.Collections; import java.util.List; -import java.util.concurrent.ThreadLocalRandom; +import benchmark.Utils; import benchmark.model.Fortune; import benchmark.model.World; import benchmark.repository.DbRepository; +import io.jstach.jstachio.JStachio; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import org.springframework.core.ParameterizedTypeReference; +import org.springframework.http.HttpHeaders; import org.springframework.http.MediaType; import org.springframework.stereotype.Component; import org.springframework.web.reactive.function.server.ServerRequest; import org.springframework.web.reactive.function.server.ServerResponse; -import static java.util.Comparator.comparing; - @Component public class DbHandler { + private static final String CONTENT_TYPE_VALUE = "text/html; charset=utf-8"; + private final DbRepository dbRepository; public DbHandler(DbRepository dbRepository) { @@ -28,24 +29,24 @@ public DbHandler(DbRepository dbRepository) { } public Mono db(ServerRequest request) { - int id = randomWorldNumber(); + int id = Utils.randomWorldNumber(); Mono world = dbRepository.getWorld(id) .switchIfEmpty(Mono.error(new Exception("No World found with Id: " + id))); return ServerResponse.ok() - .contentType(MediaType.APPLICATION_JSON) + .header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE) .body(world, World.class); } public Mono queries(ServerRequest request) { int queries = parseQueryCount(request.queryParams().getFirst("queries")); - Mono> worlds = Flux.range(0, queries) - .flatMap(i -> dbRepository.getWorld(randomWorldNumber())) + Mono> worlds = Flux.fromStream(Utils.randomWorldNumbers().limit(queries).boxed()) + .flatMap(dbRepository::getWorld) .collectList(); return ServerResponse.ok() - .contentType(MediaType.APPLICATION_JSON) + .header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE) .body(worlds, new ParameterizedTypeReference>() { }); } @@ -66,28 +67,24 @@ private static int parseQueryCount(String maybeTextValue) { public Mono updates(ServerRequest request) { int queries = parseQueryCount(request.queryParams().getFirst("queries")); - Mono> worlds = Flux.range(0, queries) - .flatMap(i -> dbRepository.findAndUpdateWorld(randomWorldNumber(), randomWorldNumber())) + Mono> worlds = Flux.fromStream(Utils.randomWorldNumbers().limit(queries).boxed()) + .flatMap(i -> dbRepository.findAndUpdateWorld(i, Utils.randomWorldNumber())) .collectList(); return ServerResponse.ok() - .contentType(MediaType.APPLICATION_JSON) + .header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE) .body(worlds, new ParameterizedTypeReference>() { }); } public Mono fortunes(ServerRequest request) { - Mono> result = dbRepository.fortunes().collectList().flatMap(fortunes -> { - fortunes.add(new Fortune(0, "Additional fortune added at request time.")); - fortunes.sort(comparing(fortune -> fortune.message)); - return Mono.just(fortunes); - }); - - return ServerResponse.ok() - .render("fortunes", Collections.singletonMap("fortunes", result)); + return dbRepository.fortunes() + .concatWith(Mono.just(new Fortune(0, "Additional fortune added at request time."))) + .collectSortedList() + .flatMap(fortunes -> + ServerResponse.ok() + .header(HttpHeaders.CONTENT_TYPE, CONTENT_TYPE_VALUE) + .bodyValue(JStachio.render(new Fortunes(fortunes)))); } - private static int randomWorldNumber() { - return 1 + ThreadLocalRandom.current().nextInt(10000); - } } \ No newline at end of file diff --git a/frameworks/Java/spring-webflux/src/main/java/benchmark/web/Fortunes.java b/frameworks/Java/spring-webflux/src/main/java/benchmark/web/Fortunes.java new file mode 100644 index 00000000000..d8fc3dd7e2d --- /dev/null +++ b/frameworks/Java/spring-webflux/src/main/java/benchmark/web/Fortunes.java @@ -0,0 +1,10 @@ +package benchmark.web; + +import java.util.List; + +import benchmark.model.Fortune; +import io.jstach.jstache.JStache; + +@JStache(path = "fortunes.mustache") +public record Fortunes(List fortunes) { +} diff --git a/frameworks/Java/spring-webflux/src/main/resources/application.yml b/frameworks/Java/spring-webflux/src/main/resources/application.yml index af7743831e3..45ca20c7d80 100755 --- a/frameworks/Java/spring-webflux/src/main/resources/application.yml +++ b/frameworks/Java/spring-webflux/src/main/resources/application.yml @@ -15,7 +15,9 @@ spring: r2dbc: username: ${database.username} password: ${database.password} - url: r2dbc:postgresql://${database.host}:${database.port}/${database.name} + url: r2dbc:postgresql://${database.host}:${database.port}/${database.name}?loggerLevel=OFF&disableColumnSanitiser=true&assumeMinServerVersion=16&sslmode=disable + pool: + max-size: 256 --- spring: diff --git a/frameworks/Java/spring-webflux/src/main/resources/templates/fortunes.mustache b/frameworks/Java/spring-webflux/src/main/resources/fortunes.mustache similarity index 100% rename from frameworks/Java/spring-webflux/src/main/resources/templates/fortunes.mustache rename to frameworks/Java/spring-webflux/src/main/resources/fortunes.mustache diff --git a/frameworks/Java/spring/README.md b/frameworks/Java/spring/README.md index 8e25a585d44..742649fff7e 100644 --- a/frameworks/Java/spring/README.md +++ b/frameworks/Java/spring/README.md @@ -2,9 +2,7 @@ This is the Spring MVC portion of a [benchmarking test suite](../) comparing a variety of web development platforms. -An embedded undertow is used for the web server, with nearly everything configured with default settings. -The only thing changed is Hikari can use up to (2 * cores count) connections (the default is 10). -See [About-Pool-Sizing](https://github.com/brettwooldridge/HikariCP/wiki/About-Pool-Sizing) +An embedded undertow is used for the web server. There are two implementations : * For postgresql access, JdbcTemplate is used. See [JdbcDbRepository](src/main/java/hello/JdbcDbRepository.java). diff --git a/frameworks/Java/spring/benchmark_config.json b/frameworks/Java/spring/benchmark_config.json index 615c4a478ca..72362984811 100644 --- a/frameworks/Java/spring/benchmark_config.json +++ b/frameworks/Java/spring/benchmark_config.json @@ -24,27 +24,6 @@ "notes": "", "versus": "" }, - "jpa": { - "db_url": "/db", - "query_url": "/queries?queries=", - "fortune_url": "/fortunes", - "update_url": "/updates?queries=", - "port": 8080, - "approach": "Realistic", - "classification": "Fullstack", - "database": "Postgres", - "framework": "spring", - "language": "Java", - "flavor": "None", - "orm": "Full", - "platform": "Servlet", - "webserver": "Undertow", - "os": "Linux", - "database_os": "Linux", - "display_name": "spring-jpa", - "notes": "", - "versus": "spring" - }, "mongo": { "db_url": "/db", "query_url": "/queries?queries=", diff --git a/frameworks/Java/spring/pom.xml b/frameworks/Java/spring/pom.xml index 4a8d36e9749..6c9341fed54 100644 --- a/frameworks/Java/spring/pom.xml +++ b/frameworks/Java/spring/pom.xml @@ -11,11 +11,12 @@ org.springframework.boot spring-boot-starter-parent - 3.3.3 + 3.3.4 21 + 1.3.6 @@ -35,15 +36,23 @@ org.springframework.boot - spring-boot-starter-data-jpa + spring-boot-starter-jdbc org.springframework.boot spring-boot-starter-data-mongodb - org.springframework.boot - spring-boot-starter-mustache + io.jstach + jstachio + ${jstachio.version} + + + io.jstach + jstachio-apt + ${jstachio.version} + provided + true @@ -61,6 +70,15 @@ org.apache.maven.plugins maven-compiler-plugin + + + + io.jstach + jstachio-apt + ${jstachio.version} + + + diff --git a/frameworks/Java/spring/spring-jpa.dockerfile b/frameworks/Java/spring/spring-jpa.dockerfile deleted file mode 100644 index 0598e9c1f28..00000000000 --- a/frameworks/Java/spring/spring-jpa.dockerfile +++ /dev/null @@ -1,15 +0,0 @@ -FROM maven:3.9.5-eclipse-temurin-21 as maven -WORKDIR /spring -COPY src src -COPY pom.xml pom.xml -RUN mvn package -q - -FROM bellsoft/liberica-openjre-debian:21 -WORKDIR /spring -COPY --from=maven /spring/target/hello-spring-1.0-SNAPSHOT.jar app.jar -# See https://docs.spring.io/spring-boot/reference/packaging/efficient.html -RUN java -Djarmode=tools -jar app.jar extract - -EXPOSE 8080 - -CMD ["java", "-XX:+DisableExplicitGC", "-XX:+UseStringDeduplication", "-Dlogging.level.root=OFF", "-jar", "app/app.jar", "--spring.profiles.active=jpa"] \ No newline at end of file diff --git a/frameworks/Java/spring/src/main/java/hello/App.java b/frameworks/Java/spring/src/main/java/hello/App.java index 0484b46517e..da87b679f87 100644 --- a/frameworks/Java/spring/src/main/java/hello/App.java +++ b/frameworks/Java/spring/src/main/java/hello/App.java @@ -1,17 +1,10 @@ package hello; -import javax.sql.DataSource; - import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; -import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties; import org.springframework.boot.context.event.ApplicationReadyEvent; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Profile; import org.springframework.context.event.EventListener; -import com.zaxxer.hikari.HikariDataSource; - @SpringBootApplication public class App { @@ -20,18 +13,8 @@ public static void main(String[] args) { } @EventListener(ApplicationReadyEvent.class) - public void runAfterStartup() { - System.out.println("Application is ready"); - } - - @Bean - @Profile({ "jdbc", "jpa" }) - DataSource datasource(DataSourceProperties dataSourceProperties) { - HikariDataSource dataSource = dataSourceProperties.initializeDataSourceBuilder().type(HikariDataSource.class) - .build(); - dataSource.setMaximumPoolSize(Runtime.getRuntime().availableProcessors() * 2); - - return dataSource; + public void runAfterStartup() { + System.out.println("Application is ready"); } } diff --git a/frameworks/Java/spring/src/main/java/hello/JpaConfig.java b/frameworks/Java/spring/src/main/java/hello/JpaConfig.java deleted file mode 100644 index c5b8576acab..00000000000 --- a/frameworks/Java/spring/src/main/java/hello/JpaConfig.java +++ /dev/null @@ -1,12 +0,0 @@ -package hello; - -import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Profile; -import org.springframework.data.jpa.repository.config.EnableJpaRepositories; - -@Profile("jpa") -@Configuration -@EnableJpaRepositories(basePackages = "hello.jpa") -public class JpaConfig { - -} diff --git a/frameworks/Java/spring/src/main/java/hello/UpdateWorldService.java b/frameworks/Java/spring/src/main/java/hello/UpdateWorldService.java deleted file mode 100644 index 11c6568c076..00000000000 --- a/frameworks/Java/spring/src/main/java/hello/UpdateWorldService.java +++ /dev/null @@ -1,9 +0,0 @@ -package hello; - -import hello.model.World; - -public interface UpdateWorldService { - - World updateWorld(int worldId); - -} diff --git a/frameworks/Java/spring/src/main/java/hello/UpdateWorldServiceImpl.java b/frameworks/Java/spring/src/main/java/hello/UpdateWorldServiceImpl.java deleted file mode 100644 index 2bd4304a9ee..00000000000 --- a/frameworks/Java/spring/src/main/java/hello/UpdateWorldServiceImpl.java +++ /dev/null @@ -1,43 +0,0 @@ -package hello; - -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; - -import hello.web.DbHandler; -import hello.web.WebmvcRouter; -import hello.model.World; -import hello.repository.DbRepository; - -@Service -public class UpdateWorldServiceImpl implements UpdateWorldService { - - private DbRepository dbRepository; - - public UpdateWorldServiceImpl(DbRepository dbRepository) { - this.dbRepository = dbRepository; - } - - @Override - @Transactional - public World updateWorld(int worldId) { - var world = dbRepository.getWorld(worldId); - // Ensure that the new random number is not equal to the old one. - // That would cause the JPA-based implementation to avoid sending the - // UPDATE query to the database, which would violate the test - // requirements. - - // Locally the records doesn't exist, maybe in the yours is ok but we need to - // make this check - if (world == null) { - return null; - } - - int newRandomNumber; - do { - newRandomNumber = DbHandler.randomWorldNumber(); - } while (newRandomNumber == world.randomnumber); - - return dbRepository.updateWorld(world, newRandomNumber); - } - -} diff --git a/frameworks/Java/spring/src/main/java/hello/Utils.java b/frameworks/Java/spring/src/main/java/hello/Utils.java new file mode 100644 index 00000000000..fbb1216624f --- /dev/null +++ b/frameworks/Java/spring/src/main/java/hello/Utils.java @@ -0,0 +1,19 @@ +package hello; + +import java.util.concurrent.ThreadLocalRandom; +import java.util.stream.IntStream; + +abstract public class Utils { + + private static final int MIN_WORLD_NUMBER = 1; + private static final int MAX_WORLD_NUMBER_PLUS_ONE = 10_001; + + public static int randomWorldNumber() { + return ThreadLocalRandom.current().nextInt(MIN_WORLD_NUMBER, MAX_WORLD_NUMBER_PLUS_ONE); + } + + public static IntStream randomWorldNumbers() { + return ThreadLocalRandom.current().ints(MIN_WORLD_NUMBER, MAX_WORLD_NUMBER_PLUS_ONE).distinct(); + } + +} diff --git a/frameworks/Java/spring/src/main/java/hello/jpa/FortuneRepository.java b/frameworks/Java/spring/src/main/java/hello/jpa/FortuneRepository.java deleted file mode 100644 index 30dea98cc27..00000000000 --- a/frameworks/Java/spring/src/main/java/hello/jpa/FortuneRepository.java +++ /dev/null @@ -1,12 +0,0 @@ -package hello.jpa; - -import org.springframework.context.annotation.Profile; -import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.stereotype.Repository; - -import hello.model.Fortune; - -@Repository -@Profile("jpa") -public interface FortuneRepository extends JpaRepository { -} diff --git a/frameworks/Java/spring/src/main/java/hello/jpa/JpaDbRepository.java b/frameworks/Java/spring/src/main/java/hello/jpa/JpaDbRepository.java deleted file mode 100644 index 2b58841a035..00000000000 --- a/frameworks/Java/spring/src/main/java/hello/jpa/JpaDbRepository.java +++ /dev/null @@ -1,38 +0,0 @@ -package hello.jpa; - -import java.util.List; - -import org.springframework.context.annotation.Profile; -import org.springframework.stereotype.Service; - -import hello.model.Fortune; -import hello.model.World; -import hello.repository.DbRepository; - -@Service -@Profile("jpa") -public class JpaDbRepository implements DbRepository { - private final WorldRepository worldRepository; - private final FortuneRepository fortuneRepository; - - public JpaDbRepository(WorldRepository worldRepository, FortuneRepository fortuneRepository) { - this.worldRepository = worldRepository; - this.fortuneRepository = fortuneRepository; - } - - @Override - public World getWorld(int id) { - return worldRepository.findById(id).orElse(null); - } - - @Override - public World updateWorld(World world, int randomNumber) { - world.randomnumber = randomNumber; - return worldRepository.save(world); - } - - @Override - public List fortunes() { - return fortuneRepository.findAll(); - } -} diff --git a/frameworks/Java/spring/src/main/java/hello/jpa/WorldRepository.java b/frameworks/Java/spring/src/main/java/hello/jpa/WorldRepository.java deleted file mode 100644 index 70361aa40d6..00000000000 --- a/frameworks/Java/spring/src/main/java/hello/jpa/WorldRepository.java +++ /dev/null @@ -1,12 +0,0 @@ -package hello.jpa; - -import org.springframework.context.annotation.Profile; -import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.stereotype.Repository; - -import hello.model.World; - -@Repository -@Profile("jpa") -public interface WorldRepository extends JpaRepository { -} diff --git a/frameworks/Java/spring/src/main/java/hello/model/Fortune.java b/frameworks/Java/spring/src/main/java/hello/model/Fortune.java index e4ff559610a..a628d3c755f 100644 --- a/frameworks/Java/spring/src/main/java/hello/model/Fortune.java +++ b/frameworks/Java/spring/src/main/java/hello/model/Fortune.java @@ -1,33 +1,25 @@ package hello.model; -import jakarta.persistence.Entity; - import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.Field; @Document -@Entity -public final class Fortune { +public final class Fortune implements Comparable{ + @Id - @jakarta.persistence.Id - public int id; - @Field("message") - public String message; + public final int id; - protected Fortune() { - } + @Field("message") + public final String message; public Fortune(int id, String message) { this.id = id; this.message = message; } - public int getId() { - return id; - } - - public String getMessage() { - return message; + @Override + public int compareTo(final Fortune other) { + return message.compareTo(other.message); } -} \ No newline at end of file +} diff --git a/frameworks/Java/spring/src/main/java/hello/model/Message.java b/frameworks/Java/spring/src/main/java/hello/model/Message.java deleted file mode 100644 index 4c675c8a162..00000000000 --- a/frameworks/Java/spring/src/main/java/hello/model/Message.java +++ /dev/null @@ -1,15 +0,0 @@ -package hello.model; - -public class Message { - - private final String message; - - public Message(String message) { - this.message = message; - } - - public String getMessage() { - return message; - } - -} \ No newline at end of file diff --git a/frameworks/Java/spring/src/main/java/hello/model/World.java b/frameworks/Java/spring/src/main/java/hello/model/World.java index 2855df8a5d8..762e9e622ce 100644 --- a/frameworks/Java/spring/src/main/java/hello/model/World.java +++ b/frameworks/Java/spring/src/main/java/hello/model/World.java @@ -1,26 +1,22 @@ package hello.model; -import jakarta.persistence.Entity; - import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.Field; @Document -@Entity public final class World { @Id - @jakarta.persistence.Id public int id; + @Field("randomNumber") - public int randomnumber; + public int randomNumber; - protected World() { - } - public World(int id, int randomnumber) { + public World(int id, int randomNumber) { this.id = id; - this.randomnumber = randomnumber; + this.randomNumber = randomNumber; } + } \ No newline at end of file diff --git a/frameworks/Java/spring/src/main/java/hello/repository/DbRepository.java b/frameworks/Java/spring/src/main/java/hello/repository/DbRepository.java index 5cfa8c7d5c3..d7733754c2c 100644 --- a/frameworks/Java/spring/src/main/java/hello/repository/DbRepository.java +++ b/frameworks/Java/spring/src/main/java/hello/repository/DbRepository.java @@ -6,9 +6,10 @@ import hello.model.World; public interface DbRepository { + World getWorld(int id); - World updateWorld(World world, int randomNumber); + void updateWorlds(List worlds); List fortunes(); } diff --git a/frameworks/Java/spring/src/main/java/hello/repository/JdbcDbRepository.java b/frameworks/Java/spring/src/main/java/hello/repository/JdbcDbRepository.java index f1dcdae0352..bc706e232c8 100644 --- a/frameworks/Java/spring/src/main/java/hello/repository/JdbcDbRepository.java +++ b/frameworks/Java/spring/src/main/java/hello/repository/JdbcDbRepository.java @@ -1,10 +1,15 @@ package hello.repository; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; import java.util.List; import org.springframework.context.annotation.Profile; import org.springframework.dao.EmptyResultDataAccessException; import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.ParameterizedPreparedStatementSetter; import org.springframework.stereotype.Repository; import hello.model.Fortune; @@ -22,27 +27,34 @@ public JdbcDbRepository(JdbcTemplate jdbcTemplate) { @Override public World getWorld(int id) { try { - return jdbcTemplate.queryForObject("SELECT * FROM world WHERE id = ?", - (rs, rn) -> new World(rs.getInt("id"), rs.getInt("randomnumber")), id); + return jdbcTemplate.queryForObject("SELECT id, randomnumber FROM world WHERE id = ?", + (rs, rn) -> new World(rs.getInt(1), rs.getInt(2)), id); } catch (EmptyResultDataAccessException e) { return null; } } - private World updateWorld(World world) { - jdbcTemplate.update("UPDATE world SET randomnumber = ? WHERE id = ?", world.randomnumber, world.id); - return world; - } - @Override - public World updateWorld(World world, int randomNumber) { - world.randomnumber = randomNumber; - return updateWorld(world); + public void updateWorlds(List worlds) { + jdbcTemplate.batchUpdate("UPDATE world SET randomnumber = ? WHERE id = ?", worlds, worlds.size(), new ParameterizedPreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps, World world) throws SQLException { + ps.setInt(1, world.randomNumber); + ps.setInt(2, world.id); + } + }); } @Override public List fortunes() { - return jdbcTemplate.query("SELECT * FROM fortune", - (rs, rn) -> new Fortune(rs.getInt("id"), rs.getString("message"))); + return jdbcTemplate.query(con -> con.prepareStatement("SELECT id, message FROM fortune", + ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY), rs -> { + List results = new ArrayList<>(); + while (rs.next()) { + results.add(new Fortune(rs.getInt(1), rs.getString(2))); + } + return results; + }); } + } diff --git a/frameworks/Java/spring/src/main/java/hello/repository/MongoDbRepository.java b/frameworks/Java/spring/src/main/java/hello/repository/MongoDbRepository.java index 66c81e64f1d..9b6b67c4c95 100644 --- a/frameworks/Java/spring/src/main/java/hello/repository/MongoDbRepository.java +++ b/frameworks/Java/spring/src/main/java/hello/repository/MongoDbRepository.java @@ -1,11 +1,18 @@ package hello.repository; +import java.util.ArrayList; import java.util.List; import org.springframework.context.annotation.Profile; +import org.springframework.data.mongodb.core.BulkOperations; import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; import org.springframework.stereotype.Repository; +import com.mongodb.bulk.BulkWriteResult; +import hello.Utils; import hello.model.Fortune; import hello.model.World; @@ -24,9 +31,14 @@ public World getWorld(int id) { } @Override - public World updateWorld(World world, int randomNumber) { - world.randomnumber = randomNumber; - return mongoTemplate.save(world); + public void updateWorlds(List worlds) { + BulkOperations bulkOps = mongoTemplate.bulkOps(BulkOperations.BulkMode.UNORDERED, World.class); + for (World world : worlds) { + Query query = new Query().addCriteria(new Criteria("_id").is(world.id)); + Update update = new Update().set("randomNumber", world.randomNumber); + bulkOps.updateOne(query, update); + } + bulkOps.execute(); } @Override diff --git a/frameworks/Java/spring/src/main/java/hello/web/DbHandler.java b/frameworks/Java/spring/src/main/java/hello/web/DbHandler.java index 1611cf21170..983eb79f6b9 100644 --- a/frameworks/Java/spring/src/main/java/hello/web/DbHandler.java +++ b/frameworks/Java/spring/src/main/java/hello/web/DbHandler.java @@ -1,83 +1,73 @@ package hello.web; -import java.util.concurrent.ThreadLocalRandom; -import java.util.stream.IntStream; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; -import hello.UpdateWorldService; +import hello.Utils; import hello.model.Fortune; import hello.model.World; import hello.repository.DbRepository; +import io.jstach.jstachio.JStachio; import org.springframework.http.HttpHeaders; import org.springframework.http.MediaType; import org.springframework.stereotype.Component; -import org.springframework.web.servlet.function.RenderingResponse; import org.springframework.web.servlet.function.ServerRequest; import org.springframework.web.servlet.function.ServerResponse; -import static java.util.Comparator.comparing; - @Component public class DbHandler { - private DbRepository dbRepository; - private UpdateWorldService updateWorldService; + private final DbRepository dbRepository; - public DbHandler(DbRepository dbRepository, UpdateWorldService updateWorldService) { + public DbHandler(DbRepository dbRepository) { this.dbRepository = dbRepository; - this.updateWorldService = updateWorldService; } ServerResponse db(ServerRequest request) { return ServerResponse.ok() - .contentType(MediaType.APPLICATION_JSON) - .body(dbRepository.getWorld(randomWorldNumber())); + .header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE) + .body(dbRepository.getWorld(Utils.randomWorldNumber())); } ServerResponse queries(ServerRequest request) { - String queries = request.params().getFirst("queries"); - World[] worlds = randomWorldNumbers() - .mapToObj(dbRepository::getWorld).limit(parseQueryCount(queries)) + int queries = parseQueryCount(request.params().getFirst("queries")); + World[] worlds = Utils.randomWorldNumbers() + .mapToObj(dbRepository::getWorld).limit(queries) .toArray(World[]::new); return ServerResponse.ok() - .contentType(MediaType.APPLICATION_JSON) + .header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE) .body(worlds); } ServerResponse updates(ServerRequest request) { - String queries = request.params().getFirst("queries"); - World[] worlds = randomWorldNumbers() - .mapToObj(id -> updateWorldService.updateWorld(id)) - .limit(parseQueryCount(queries)).toArray(World[]::new); + int queries = parseQueryCount(request.params().getFirst("queries")); + List worlds = Utils.randomWorldNumbers() + .mapToObj(id -> { + World world = dbRepository.getWorld(id); + int randomNumber; + do { + randomNumber = Utils.randomWorldNumber(); + } while (randomNumber == world.randomNumber); + world.randomNumber = randomNumber; + return world; + }).limit(queries) + .sorted(Comparator.comparingInt(w -> w.id)) + .toList(); + dbRepository.updateWorlds(worlds); return ServerResponse.ok() - .contentType(MediaType.APPLICATION_JSON) + .header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE) .body(worlds); } ServerResponse fortunes(ServerRequest request) { - var fortunes = dbRepository.fortunes(); + List fortunes = dbRepository.fortunes(); fortunes.add(new Fortune(0, "Additional fortune added at request time.")); - fortunes.sort(comparing(fortune -> fortune.message)); - return RenderingResponse - .create("fortunes") - .modelAttribute("fortunes", fortunes) + Collections.sort(fortunes); + return ServerResponse.ok() .header(HttpHeaders.CONTENT_TYPE, MediaType.TEXT_HTML_VALUE) - .build(); - } - - private static final int MIN_WORLD_NUMBER = 1; - private static final int MAX_WORLD_NUMBER_PLUS_ONE = 10_001; - - public static int randomWorldNumber() { - return ThreadLocalRandom.current().nextInt(MIN_WORLD_NUMBER, MAX_WORLD_NUMBER_PLUS_ONE); - } - - private static IntStream randomWorldNumbers() { - return ThreadLocalRandom.current().ints(MIN_WORLD_NUMBER, MAX_WORLD_NUMBER_PLUS_ONE) - // distinct() allows us to avoid using Hibernate's first-level cache in - // the JPA-based implementation. Using a cache like that would bypass - // querying the database, which would violate the test requirements. - .distinct(); + .body(JStachio.render(new Fortunes(fortunes))); } private static int parseQueryCount(String textValue) { diff --git a/frameworks/Java/spring/src/main/java/hello/web/Fortunes.java b/frameworks/Java/spring/src/main/java/hello/web/Fortunes.java new file mode 100644 index 00000000000..cbd6daf2396 --- /dev/null +++ b/frameworks/Java/spring/src/main/java/hello/web/Fortunes.java @@ -0,0 +1,10 @@ +package hello.web; + +import java.util.List; + +import hello.model.Fortune; +import io.jstach.jstache.JStache; + +@JStache(path = "fortunes.mustache") +public record Fortunes(List fortunes) { +} diff --git a/frameworks/Java/spring/src/main/resources/application.yml b/frameworks/Java/spring/src/main/resources/application.yml index 4f6592dc53b..efde83cda61 100644 --- a/frameworks/Java/spring/src/main/resources/application.yml +++ b/frameworks/Java/spring/src/main/resources/application.yml @@ -1,21 +1,21 @@ +server: + server-header: Spring + servlet: + encoding: + force: true --- spring: config: activate: on-profile: jdbc autoconfigure: - exclude: org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration,org.springframework.boot.autoconfigure.data.jpa.JpaRepositoriesAutoConfiguration,org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration,org.springframework.boot.autoconfigure.data.mongo.MongoDataAutoConfiguration,org.springframework.boot.autoconfigure.data.mongo.MongoRepositoriesAutoConfiguration,org.springframework.boot.autoconfigure.data.web.SpringDataWebAutoConfiguration - ---- -spring: - config: - activate: - on-profile: jdbc,jpa + exclude: org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration,org.springframework.boot.autoconfigure.data.mongo.MongoDataAutoConfiguration,org.springframework.boot.autoconfigure.data.mongo.MongoRepositoriesAutoConfiguration,org.springframework.boot.autoconfigure.data.web.SpringDataWebAutoConfiguration datasource: - url: jdbc:postgresql://${database.host}:${database.port}/${database.name} + url: jdbc:postgresql://${database.host}:${database.port}/${database.name}?loggerLevel=OFF&disableColumnSanitiser=true&assumeMinServerVersion=16&sslmode=disable username: ${database.username} password: ${database.password} - + hikari: + maximum-pool-size: 256 database: name: hello_world host: tfb-database @@ -23,23 +23,13 @@ database: username: benchmarkdbuser password: benchmarkdbpass ---- -spring: - config: - activate: - on-profile: jpa - autoconfigure: - exclude: org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration,org.springframework.boot.autoconfigure.data.mongo.MongoRepositoriesAutoConfiguration,org.springframework.boot.autoconfigure.data.mongo.MongoDataAutoConfiguration,org.springframework.boot.autoconfigure.data.web.SpringDataWebAutoConfiguration - jpa: - database-platform: org.hibernate.dialect.PostgreSQLDialect - --- spring: config: activate: on-profile: mongo autoconfigure: - exclude: org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration,org.springframework.boot.autoconfigure.data.jpa.JpaRepositoriesAutoConfiguration,org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration,org.springframework.boot.autoconfigure.data.web.SpringDataWebAutoConfiguration + exclude: org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration,org.springframework.boot.autoconfigure.data.web.SpringDataWebAutoConfiguration spring.data.mongodb: host: tfb-database @@ -50,7 +40,3 @@ spring.data.mongodb: spring: profiles: active: jdbc - -server.server-header: Spring -server.servlet.encoding.force: true -spring.jpa.open-in-view: false diff --git a/frameworks/Java/spring/src/main/resources/templates/fortunes.mustache b/frameworks/Java/spring/src/main/resources/fortunes.mustache similarity index 100% rename from frameworks/Java/spring/src/main/resources/templates/fortunes.mustache rename to frameworks/Java/spring/src/main/resources/fortunes.mustache diff --git a/frameworks/Java/undertow-jersey/pom.xml b/frameworks/Java/undertow-jersey/pom.xml index bf4603735ca..af553eea552 100644 --- a/frameworks/Java/undertow-jersey/pom.xml +++ b/frameworks/Java/undertow-jersey/pom.xml @@ -174,7 +174,7 @@ io.undertow undertow-core - 2.3.14.Final + 2.3.17.Final diff --git a/frameworks/Java/undertow/pom.xml b/frameworks/Java/undertow/pom.xml index da2a6dd086b..133d1ce186c 100644 --- a/frameworks/Java/undertow/pom.xml +++ b/frameworks/Java/undertow/pom.xml @@ -20,7 +20,7 @@ 3.2.2 0.9.10 42.7.2 - 2.3.14.Final + 2.3.17.Final diff --git a/frameworks/Java/wildfly-ee/pom.xml b/frameworks/Java/wildfly-ee/pom.xml index c2e6472f6e3..bd77be766bb 100644 --- a/frameworks/Java/wildfly-ee/pom.xml +++ b/frameworks/Java/wildfly-ee/pom.xml @@ -13,27 +13,69 @@ 17 3.9.0 3.3.2 - 8.0 - 26.0.1.Final - 7.0.0.Final - 2.0.6.Final + 10.0.0 + 5.0.1.Final + 8.0.1.Final - + + + + jakarta.platform + jakarta.jakartaee-bom + ${version.jakarta.ee} + import + pom + + + + - javax - javaee-api - ${version.javaee.api} + jakarta.annotation + jakarta.annotation-api + provided + + + jakarta.enterprise + jakarta.enterprise.cdi-api provided - - org.glassfish.jaxb - jaxb-runtime - 2.4.0-b180830.0438 + jakarta.enterprise.concurrent + jakarta.enterprise.concurrent-api + provided + + + jakarta.inject + jakarta.inject-api + provided + + + jakarta.json + jakarta.json-api + provided + + + jakarta.persistence + jakarta.persistence-api + provided + + + jakarta.transaction + jakarta.transaction-api + provided + + + jakarta.validation + jakarta.validation-api + provided + + + jakarta.ws.rs + jakarta.ws.rs-api + provided - @@ -50,63 +92,50 @@ maven-compiler-plugin ${version.compiler.plugin} - ${java.version} - ${java.version} ${java.version} + + org.wildfly.plugins + wildfly-maven-plugin + ${version.wildfly-maven-plugin} + + ROOT.war + + + + org.wildfly + wildfly-ee-galleon-pack + + + org.wildfly + wildfly-datasources-galleon-pack + ${version.wildfly.galleon.datasources.feature.pack} + + + + + + org.wildfly.channels + wildfly-ee + + + + + jaxrs-server + jpa + jsf + mysql-driver + + + + + + + + + + - - - - bootable-jar - - false - - - - - org.wildfly.plugins - wildfly-jar-maven-plugin - ${version.wildfly.maven.jar.plugin} - - - - wildfly@maven(org.jboss.universe:community-universe)#${version.wildfly.bootable} - - - org.wildfly - wildfly-datasources-galleon-pack - ${version.wildfly.galleon.datasources.feature.pack} - - - - jaxrs-server - jsf - mysql-driver - - - deployment-scanner - - - - - - - - - - - - - package - - - - - - - - diff --git a/frameworks/Java/wildfly-ee/scripts/bootable-jar.cli b/frameworks/Java/wildfly-ee/scripts/wildfly-setup.cli similarity index 100% rename from frameworks/Java/wildfly-ee/scripts/bootable-jar.cli rename to frameworks/Java/wildfly-ee/scripts/wildfly-setup.cli diff --git a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/jpa/PersistenceResources.java b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/jpa/PersistenceResources.java similarity index 56% rename from frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/jpa/PersistenceResources.java rename to frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/jpa/PersistenceResources.java index fcdc4aafa0d..aa580b31f68 100644 --- a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/jpa/PersistenceResources.java +++ b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/jpa/PersistenceResources.java @@ -1,9 +1,9 @@ package com.techempower.ee7.jpa; -import javax.enterprise.context.Dependent; -import javax.enterprise.inject.Produces; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; +import jakarta.enterprise.context.Dependent; +import jakarta.enterprise.inject.Produces; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; public class PersistenceResources { diff --git a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/model/Fortune.java b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/model/Fortune.java similarity index 76% rename from frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/model/Fortune.java rename to frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/model/Fortune.java index dce49416f75..7648c05b539 100644 --- a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/model/Fortune.java +++ b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/model/Fortune.java @@ -2,13 +2,13 @@ import java.io.Serializable; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.NamedQuery; -import javax.validation.constraints.NotNull; -import javax.validation.constraints.Size; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.NamedQuery; +import jakarta.validation.constraints.NotNull; +import jakarta.validation.constraints.Size; @NamedQuery(name = "allFortunes", query = "SELECT f FROM Fortune f") @Entity diff --git a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/model/World.java b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/model/World.java similarity index 74% rename from frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/model/World.java rename to frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/model/World.java index 44fd914aee7..1ee3ae0e97a 100644 --- a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/model/World.java +++ b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/model/World.java @@ -2,11 +2,11 @@ import java.io.Serializable; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.validation.constraints.NotNull; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.validation.constraints.NotNull; @Entity public class World implements Serializable { diff --git a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/rest/CatchAllExceptionMapper.java b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/rest/CatchAllExceptionMapper.java similarity index 62% rename from frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/rest/CatchAllExceptionMapper.java rename to frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/rest/CatchAllExceptionMapper.java index 0beb6e85c3d..bc287fda07a 100644 --- a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/rest/CatchAllExceptionMapper.java +++ b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/rest/CatchAllExceptionMapper.java @@ -1,9 +1,9 @@ package com.techempower.ee7.rest; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.Response.Status; -import javax.ws.rs.ext.ExceptionMapper; -import javax.ws.rs.ext.Provider; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.Response.Status; +import jakarta.ws.rs.ext.ExceptionMapper; +import jakarta.ws.rs.ext.Provider; @Provider public class CatchAllExceptionMapper implements ExceptionMapper { diff --git a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/rest/MyApplication.java b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/rest/MyApplication.java similarity index 59% rename from frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/rest/MyApplication.java rename to frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/rest/MyApplication.java index 5c82e49f3b9..c1f912ec17d 100644 --- a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/rest/MyApplication.java +++ b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/rest/MyApplication.java @@ -1,7 +1,7 @@ package com.techempower.ee7.rest; -import javax.ws.rs.ApplicationPath; -import javax.ws.rs.core.Application; +import jakarta.ws.rs.ApplicationPath; +import jakarta.ws.rs.core.Application; @ApplicationPath("rest") public class MyApplication extends Application { diff --git a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/tests/Fortunes.java b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/tests/Fortunes.java similarity index 79% rename from frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/tests/Fortunes.java rename to frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/tests/Fortunes.java index 6bbff0fead8..8bd06ae545c 100644 --- a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/tests/Fortunes.java +++ b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/tests/Fortunes.java @@ -3,11 +3,11 @@ import java.util.Collections; import java.util.List; -import javax.annotation.PostConstruct; -import javax.enterprise.context.RequestScoped; -import javax.inject.Inject; -import javax.inject.Named; -import javax.persistence.EntityManager; +import jakarta.annotation.PostConstruct; +import jakarta.enterprise.context.RequestScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; import com.techempower.ee7.model.Fortune; diff --git a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/tests/JsonSerialization.java b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/tests/JsonSerialization.java similarity index 81% rename from frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/tests/JsonSerialization.java rename to frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/tests/JsonSerialization.java index ad5d947b042..019afc50fc7 100644 --- a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/tests/JsonSerialization.java +++ b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/tests/JsonSerialization.java @@ -1,9 +1,9 @@ package com.techempower.ee7.tests; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.MediaType; @Path("/json") public class JsonSerialization { diff --git a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/tests/MultipleQueries.java b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/tests/MultipleQueries.java similarity index 78% rename from frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/tests/MultipleQueries.java rename to frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/tests/MultipleQueries.java index 936ddfb4b04..fbd764af7ee 100644 --- a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/tests/MultipleQueries.java +++ b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/tests/MultipleQueries.java @@ -3,13 +3,13 @@ import java.util.ArrayList; import java.util.List; -import javax.inject.Inject; -import javax.persistence.EntityManager; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.MediaType; +import jakarta.inject.Inject; +import jakarta.persistence.EntityManager; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.MediaType; import com.techempower.ee7.model.World; import com.techempower.ee7.util.Helpers; diff --git a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/tests/PlainText.java b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/tests/PlainText.java similarity index 78% rename from frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/tests/PlainText.java rename to frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/tests/PlainText.java index f40da2ab7da..7ce9f37f3b9 100644 --- a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/tests/PlainText.java +++ b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/tests/PlainText.java @@ -1,9 +1,9 @@ package com.techempower.ee7.tests; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.MediaType; @Path("/plaintext") public class PlainText { diff --git a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/tests/SingleQuery.java b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/tests/SingleQuery.java similarity index 64% rename from frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/tests/SingleQuery.java rename to frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/tests/SingleQuery.java index 30279ae4295..517ad710f45 100644 --- a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/tests/SingleQuery.java +++ b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/tests/SingleQuery.java @@ -1,11 +1,11 @@ package com.techempower.ee7.tests; -import javax.inject.Inject; -import javax.persistence.EntityManager; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; +import jakarta.inject.Inject; +import jakarta.persistence.EntityManager; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.MediaType; import com.techempower.ee7.model.World; import com.techempower.ee7.util.Helpers; diff --git a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/tests/TestActions.java b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/tests/TestActions.java similarity index 73% rename from frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/tests/TestActions.java rename to frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/tests/TestActions.java index 9302373516c..d3847c9b661 100644 --- a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/tests/TestActions.java +++ b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/tests/TestActions.java @@ -1,9 +1,9 @@ package com.techempower.ee7.tests; -import javax.enterprise.context.RequestScoped; -import javax.inject.Inject; -import javax.persistence.EntityManager; -import javax.transaction.Transactional; +import jakarta.enterprise.context.RequestScoped; +import jakarta.inject.Inject; +import jakarta.persistence.EntityManager; +import jakarta.transaction.Transactional; import com.techempower.ee7.model.World; import com.techempower.ee7.util.Helpers; diff --git a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/tests/Updates.java b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/tests/Updates.java similarity index 81% rename from frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/tests/Updates.java rename to frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/tests/Updates.java index b041f24c939..5c3879bc363 100644 --- a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/tests/Updates.java +++ b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/tests/Updates.java @@ -3,12 +3,12 @@ import java.util.ArrayList; import java.util.List; -import javax.inject.Inject; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.MediaType; +import jakarta.inject.Inject; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.MediaType; import com.techempower.ee7.model.World; import com.techempower.ee7.util.Helpers; diff --git a/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/util/Helpers.java b/frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/util/Helpers.java similarity index 100% rename from frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee7/util/Helpers.java rename to frameworks/Java/wildfly-ee/src/main/java/com/techempower/ee/util/Helpers.java diff --git a/frameworks/Java/wildfly-ee/src/main/resources/META-INF/persistence.xml b/frameworks/Java/wildfly-ee/src/main/resources/META-INF/persistence.xml index 2ac545e5dde..ff728916fe5 100644 --- a/frameworks/Java/wildfly-ee/src/main/resources/META-INF/persistence.xml +++ b/frameworks/Java/wildfly-ee/src/main/resources/META-INF/persistence.xml @@ -6,7 +6,6 @@ NONE - diff --git a/frameworks/Java/wildfly-ee/src/main/webapp/WEB-INF/web.xml b/frameworks/Java/wildfly-ee/src/main/webapp/WEB-INF/web.xml index 96378e310c9..126c5588694 100644 --- a/frameworks/Java/wildfly-ee/src/main/webapp/WEB-INF/web.xml +++ b/frameworks/Java/wildfly-ee/src/main/webapp/WEB-INF/web.xml @@ -8,7 +8,7 @@ Faces Servlet - javax.faces.webapp.FacesServlet + jakarta.faces.webapp.FacesServlet 1 @@ -18,12 +18,12 @@ - javax.faces.STATE_SAVING_METHOD + jakarta.faces.STATE_SAVING_METHOD client - javax.faces.PROJECT_STAGE + jakarta.faces.PROJECT_STAGE Production diff --git a/frameworks/Java/wildfly-ee/wildfly-ee.dockerfile b/frameworks/Java/wildfly-ee/wildfly-ee.dockerfile index ec4a1c6e063..b04e261e64d 100644 --- a/frameworks/Java/wildfly-ee/wildfly-ee.dockerfile +++ b/frameworks/Java/wildfly-ee/wildfly-ee.dockerfile @@ -1,9 +1,8 @@ FROM maven:3-openjdk-17 WORKDIR /wildfly EXPOSE 8080 -ENV MAVEN_OPTS="--add-exports=java.xml/com.sun.org.apache.xerces.internal.parsers=ALL-UNNAMED --add-exports=java.xml/com.sun.org.apache.xerces.internal.util=ALL-UNNAMED" COPY src src COPY scripts scripts COPY pom.xml pom.xml -RUN mvn clean package -P bootable-jar -CMD java -Djava.net.preferIPv4Stack=true -XX:SoftMaxHeapSize=18g -Xmx24g -XX:+UseZGC -jar target/wildfly-ee-bootable.jar +RUN mvn clean package wildfly:package +CMD JAVA_OPTS="-Djava.net.preferIPv4Stack=true -XX:SoftMaxHeapSize=18g -Xmx24g -XX:+UseZGC" ./target/server/bin/standalone.sh diff --git a/frameworks/OCaml/dream/README.md b/frameworks/OCaml/dream/README.md new file mode 100755 index 00000000000..86e484bc978 --- /dev/null +++ b/frameworks/OCaml/dream/README.md @@ -0,0 +1,35 @@ +# Dream + +## Overview + +Most of all of the code is inside of `test_dream/bin/main.ml` file. + +## Implemented tests + +| Test Name | Endpoint | +|------------|-------------------------------| +| Plain text | http://0.0.0.0:8080/plaintext | +| Json | http://0.0.0.0:8080/json | + +## Headers + +A simple middleware was added that adds the required headers for the Techempower Benchmarks. +The date header is refreshed only once per second as allowed by the rules for performance. + +## Dependencies + +The `test_dream/dune-project` and `test_dream/bin/dune` are where dependencies are managed +for this project. If you add a dependency to those locations and then run the following: + +``` +cd test_dream +dune build test_dream.opam +opam install --yes --deps-only . +``` + +You will update the opam package list and install your changes locally. + +## Running tests + +$ tfb --mode verify --test dream + diff --git a/frameworks/OCaml/dream/benchmark_config.json b/frameworks/OCaml/dream/benchmark_config.json new file mode 100755 index 00000000000..d5d83b838f4 --- /dev/null +++ b/frameworks/OCaml/dream/benchmark_config.json @@ -0,0 +1,26 @@ +{ + "framework": "dream", + "tests": [ + { + "default": { + "json_url": "/json", + "plaintext_url": "/plaintext", + "port": 8080, + "approach": "Realistic", + "classification": "Fullstack", + "database": "postgres", + "framework": "Dream", + "language": "OCaml", + "flavor": "None", + "orm": "Micro", + "platform": "http/af", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "Dream", + "notes": "", + "versus": "httpaf" + } + } + ] +} diff --git a/frameworks/OCaml/dream/dream.dockerfile b/frameworks/OCaml/dream/dream.dockerfile new file mode 100644 index 00000000000..8a806026a4e --- /dev/null +++ b/frameworks/OCaml/dream/dream.dockerfile @@ -0,0 +1,12 @@ +FROM ocaml/opam:debian-ocaml-5.1 +COPY ./dream_test/ /app +WORKDIR /app +USER root +RUN apt install -y libgmp-dev libev-dev pkg-config libssl-dev +RUN opam install --yes --deps-only . +RUN opam install dune +RUN eval $(opam env) +RUN opam exec -- dune build --profile release +CMD [ "./_build/default/bin/main.exe" ] +EXPOSE 8080 + diff --git a/frameworks/OCaml/dream/dream_test/bin/dune b/frameworks/OCaml/dream/dream_test/bin/dune new file mode 100755 index 00000000000..171e6829182 --- /dev/null +++ b/frameworks/OCaml/dream/dream_test/bin/dune @@ -0,0 +1,5 @@ +(executable + (public_name dream_test) + (name main) + (preprocess (pps lwt_ppx ppx_yojson_conv)) + (libraries dream_test dream ppx_yojson_conv calendar)) diff --git a/frameworks/OCaml/dream/dream_test/bin/main.ml b/frameworks/OCaml/dream/dream_test/bin/main.ml new file mode 100755 index 00000000000..4c5ac18c30c --- /dev/null +++ b/frameworks/OCaml/dream/dream_test/bin/main.ml @@ -0,0 +1,53 @@ +open Ppx_yojson_conv_lib.Yojson_conv.Primitives + +type message_object = { + message : string; +} [@@deriving yojson] + +let time_cache = Atomic.make false;; +let time_ref = ref("None");; + +let time () = + if not @@ Atomic.get time_cache + then begin + time_ref := CalendarLib.Printer.Calendar.sprint "%a, %d %b %Y %H:%M:%S UTC" @@ CalendarLib.Calendar.now (); + Atomic.set time_cache true; + (!time_ref) + end + else + (!time_ref);; + +let tech_empower_headers (inner_handler: Dream.handler) = + (fun (req) -> + let%lwt res = inner_handler req in + Dream.set_header res "Server" "dream"; + Dream.set_header res "Date" @@ time (); + Lwt.return res + );; + +let rec timer () = + Unix.sleepf 0.9; + Atomic.set time_cache false; + timer();; + +let () = + let time_invalidator = Domain.spawn(fun () -> timer ()) in + Dream.run ~interface: "0.0.0.0" + @@ tech_empower_headers + @@ Dream.router [ + Dream.get "/" (fun _ -> + Dream.html "Hello, world!" + ); + Dream.get "/plaintext" (fun _ -> + Dream.response ~headers: [("Content-Type", "text/plain")] + "Hello, world!" + |> Lwt.return + ); + Dream.get "/json" (fun _ -> + { message = "Hello, world!" } + |> yojson_of_message_object + |> Yojson.Safe.to_string + |> Dream.json + ); + ]; + Domain.join time_invalidator;; diff --git a/frameworks/OCaml/dream/dream_test/dream_test.opam b/frameworks/OCaml/dream/dream_test/dream_test.opam new file mode 100644 index 00000000000..b9db80a34bd --- /dev/null +++ b/frameworks/OCaml/dream/dream_test/dream_test.opam @@ -0,0 +1,35 @@ +# This file is generated by dune, edit dune-project instead +opam-version: "2.0" +synopsis: "A short synopsis" +description: "A longer description" +maintainer: ["Maintainer Name"] +authors: ["Author Name"] +license: "LICENSE" +tags: ["topics" "to describe" "your" "project"] +homepage: "https://github.com/username/reponame" +doc: "https://url/to/documentation" +bug-reports: "https://github.com/username/reponame/issues" +depends: [ + "ocaml" + "dune" {>= "3.14"} + "dream" + "lwt" + "ppx_yojson_conv" + "calendar" + "odoc" {with-doc} +] +build: [ + ["dune" "subst"] {dev} + [ + "dune" + "build" + "-p" + name + "-j" + jobs + "@install" + "@runtest" {with-test} + "@doc" {with-doc} + ] +] +dev-repo: "git+https://github.com/username/reponame.git" diff --git a/frameworks/OCaml/dream/dream_test/dune-project b/frameworks/OCaml/dream/dream_test/dune-project new file mode 100755 index 00000000000..aaeaf8c0df8 --- /dev/null +++ b/frameworks/OCaml/dream/dream_test/dune-project @@ -0,0 +1,26 @@ +(lang dune 3.14) + +(name dream_test) + +(generate_opam_files true) + +(source + (github username/reponame)) + +(authors "Author Name") + +(maintainers "Maintainer Name") + +(license LICENSE) + +(documentation https://url/to/documentation) + +(package + (name dream_test) + (synopsis "A short synopsis") + (description "A longer description") + (depends ocaml dune dream lwt ppx_yojson_conv calendar) + (tags + (topics "to describe" your project))) + +; See the complete stanza docs at https://dune.readthedocs.io/en/stable/dune-files.html#dune-project diff --git a/frameworks/OCaml/dream/dream_test/lib/dune b/frameworks/OCaml/dream/dream_test/lib/dune new file mode 100755 index 00000000000..b8d991f91da --- /dev/null +++ b/frameworks/OCaml/dream/dream_test/lib/dune @@ -0,0 +1,2 @@ +(library + (name dream_test)) diff --git a/frameworks/OCaml/dream/dream_test/test/dune b/frameworks/OCaml/dream/dream_test/test/dune new file mode 100755 index 00000000000..434f4a8ee36 --- /dev/null +++ b/frameworks/OCaml/dream/dream_test/test/dune @@ -0,0 +1,2 @@ +(test + (name test_dream_test)) diff --git a/frameworks/OCaml/dream/dream_test/test/test_dream_test.ml b/frameworks/OCaml/dream/dream_test/test/test_dream_test.ml new file mode 100755 index 00000000000..e69de29bb2d diff --git a/frameworks/PHP/laravel/benchmark_config.json b/frameworks/PHP/laravel/benchmark_config.json index f8421378e51..053534b12fa 100644 --- a/frameworks/PHP/laravel/benchmark_config.json +++ b/frameworks/PHP/laravel/benchmark_config.json @@ -139,7 +139,7 @@ "notes": "", "versus": "php" }, - "pripple": { + "ripple": { "json_url": "/json", "db_url": "/db", "query_url": "/queries/", @@ -154,11 +154,11 @@ "language": "PHP", "flavor": "PHP8.3", "orm": "Full", - "platform": "PRipple", + "platform": "Ripple", "webserver": "PServer", "os": "Linux", "database_os": "Linux", - "display_name": "laravel-pripple", + "display_name": "laravel-ripple", "notes": "", "versus": "php" } diff --git a/frameworks/PHP/laravel/laravel-pripple.dockerfile b/frameworks/PHP/laravel/laravel-ripple.dockerfile similarity index 80% rename from frameworks/PHP/laravel/laravel-pripple.dockerfile rename to frameworks/PHP/laravel/laravel-ripple.dockerfile index 869661a0432..4f1bb8bc3a9 100644 --- a/frameworks/PHP/laravel/laravel-pripple.dockerfile +++ b/frameworks/PHP/laravel/laravel-ripple.dockerfile @@ -33,16 +33,17 @@ RUN mkdir -p bootstrap/cache \ storage/framework/views \ storage/framework/cache -# Configure RUN echo "PRP_HTTP_LISTEN=http://0.0.0.0:8080" >> .env -RUN echo "PRP_HTTP_COUNT=64" >> .env +RUN echo "PRP_HTTP_WORKERS=64" >> .env +RUN echo "PRP_HTTP_RELOAD=0" >> .env +RUN echo "PRP_HTTP_SANDBOX=1" >> .env +# Configure RUN composer install --quiet +RUN composer require cloudtay/ripple-driver --quiet +RUN php artisan vendor:publish --tag=ripple-config RUN php artisan optimize -RUN composer require cclilshy/p-ripple-drive --quiet -RUN composer update --quiet - +# Start EXPOSE 8080 - -ENTRYPOINT ["php","artisan","p:server","start"] +ENTRYPOINT ["php","artisan","ripple:server","start"] diff --git a/frameworks/PHP/swoole/10-opcache.ini b/frameworks/PHP/swoole/10-opcache.ini index 3e65df6b191..bc77a43761e 100644 --- a/frameworks/PHP/swoole/10-opcache.ini +++ b/frameworks/PHP/swoole/10-opcache.ini @@ -6,4 +6,5 @@ opcache.save_comments=0 opcache.enable_file_override=1 opcache.huge_code_pages=1 opcache.jit_buffer_size=128M -opcache.jit=1225 +mysqlnd.collect_statistics = Off +opcache.jit=tracing diff --git a/frameworks/PHP/swoole/database.php b/frameworks/PHP/swoole/database.php index 97d3af6887a..a0c15234921 100644 --- a/frameworks/PHP/swoole/database.php +++ b/frameworks/PHP/swoole/database.php @@ -15,7 +15,7 @@ class Operation public static function db(PDOStatement|PDOStatementProxy $db): string { $db->execute([mt_rand(1, 10000)]); - return json_encode($db->fetch(PDO::FETCH_ASSOC), JSON_NUMERIC_CHECK); + return json_encode($db->fetch(PDO::FETCH_ASSOC)); } public static function fortunes(PDOStatement|PDOStatementProxy $fortune): string @@ -42,7 +42,7 @@ public static function query(PDOStatement|PDOStatementProxy $query, int $queries $results[] = $query->fetch(PDO::FETCH_ASSOC); } - return json_encode($results, JSON_NUMERIC_CHECK); + return json_encode($results); } public static function updates(PDOStatement|PDOStatementProxy $random, PDOStatement|PDOStatementProxy $update, int $queries, string $driver): string @@ -65,7 +65,7 @@ public static function updates(PDOStatement|PDOStatementProxy $random, PDOStatem if ($driver == 'pgsql') { $update->execute([...$values, ...$keys]); } - return json_encode($results, JSON_NUMERIC_CHECK); + return json_encode($results); } } @@ -130,9 +130,6 @@ class Connections { private static PDOPool $pool; private static string $driver; - private static array $dbs = []; - private static array $fortunes = []; - private static array $updates = []; public static function init(string $driver): void { @@ -195,28 +192,14 @@ private static function put(PDO|PDOProxy $db): void private static function getStatement(PDO|PDOProxy $pdo, string $type, int $queries = 0): PDOStatement|PDOStatementProxy { - $hash = spl_object_id($pdo); - if ('select' == $type) { - if (!isset(self::$dbs[$hash])) { - self::$dbs[$hash] = $pdo->prepare(Operation::WORLD_SELECT_SQL); - } - - return self::$dbs[$hash]; + return $pdo->prepare(Operation::WORLD_SELECT_SQL); } elseif ('fortunes' == $type) { - if (!isset(self::$fortunes[$hash])) { - self::$fortunes[$hash] = $pdo->prepare(Operation::FORTUNE_SQL); - } - - return self::$fortunes[$hash]; + return $pdo->prepare(Operation::FORTUNE_SQL); } else { - if (!isset(self::$updates[$hash][$queries])) { - self::$updates[$hash][$queries] = self::$driver == 'pgsql' - ? $pdo->prepare('UPDATE World SET randomNumber = CASE id'.\str_repeat(' WHEN ?::INTEGER THEN ?::INTEGER ', $queries).'END WHERE id IN ('.\str_repeat('?::INTEGER,', $queries - 1).'?::INTEGER)') - : $pdo->prepare(Operation::WORLD_UPDATE_SQL); - } - - return self::$updates[$hash][$queries]; + return self::$driver == 'pgsql' + ? $pdo->prepare('UPDATE World SET randomNumber = CASE id'.\str_repeat(' WHEN ?::INTEGER THEN ?::INTEGER ', $queries).'END WHERE id IN ('.\str_repeat('?::INTEGER,', $queries - 1).'?::INTEGER)') + : $pdo->prepare(Operation::WORLD_UPDATE_SQL); } } } diff --git a/frameworks/PHP/swoole/swoole-async-mysql.dockerfile b/frameworks/PHP/swoole/swoole-async-mysql.dockerfile index b9ce354edb2..786f80c88cd 100644 --- a/frameworks/PHP/swoole/swoole-async-mysql.dockerfile +++ b/frameworks/PHP/swoole/swoole-async-mysql.dockerfile @@ -1,6 +1,6 @@ FROM ubuntu:24.04 -ENV SWOOLE_VERSION 5.1.4 +ENV SWOOLE_VERSION 5.1.5 ENV ENABLE_COROUTINE 1 ENV CPU_MULTIPLES 1 ENV DATABASE_DRIVER mysql @@ -16,16 +16,14 @@ RUN apt update -yqq > /dev/null \ && cd /tmp/swoole-src-${SWOOLE_VERSION} \ && phpize > /dev/null \ && ./configure > /dev/null \ - && make -j8 > /dev/null \ + && make -j "$(nproc)" > /dev/null \ && make install > /dev/null \ && echo "extension=swoole.so" > /etc/php/8.3/cli/conf.d/50-swoole.ini \ - && echo "memory_limit=1024M" >> /etc/php/8.3/cli/php.ini \ - && php -m + && echo "memory_limit=1024M" >> /etc/php/8.3/cli/php.ini WORKDIR /swoole ADD ./swoole-server.php /swoole -ADD 10-opcache.ini /swoole ADD ./database.php /swoole COPY 10-opcache.ini /etc/php/8.3/cli/conf.d/10-opcache.ini diff --git a/frameworks/PHP/swoole/swoole-async-postgres.dockerfile b/frameworks/PHP/swoole/swoole-async-postgres.dockerfile index 3bed20ce226..b658f569f0e 100644 --- a/frameworks/PHP/swoole/swoole-async-postgres.dockerfile +++ b/frameworks/PHP/swoole/swoole-async-postgres.dockerfile @@ -1,6 +1,6 @@ FROM ubuntu:24.04 -ENV SWOOLE_VERSION 5.1.4 +ENV SWOOLE_VERSION 5.1.5 ENV ENABLE_COROUTINE 1 ENV CPU_MULTIPLES 1 ENV DATABASE_DRIVER pgsql @@ -16,16 +16,14 @@ RUN apt update -yqq > /dev/null \ && cd /tmp/swoole-src-${SWOOLE_VERSION} \ && phpize > /dev/null \ && ./configure --enable-swoole-pgsql > /dev/null \ - && make -j8 > /dev/null \ + && make -j "$(nproc)" > /dev/null \ && make install > /dev/null \ && echo "extension=swoole.so" > /etc/php/8.3/cli/conf.d/50-swoole.ini \ - && echo "memory_limit=1024M" >> /etc/php/8.3/cli/php.ini \ - && php -m + && echo "memory_limit=1024M" >> /etc/php/8.3/cli/php.ini WORKDIR /swoole ADD ./swoole-server.php /swoole -ADD 10-opcache.ini /swoole ADD ./database.php /swoole COPY 10-opcache.ini /etc/php/8.3/cli/conf.d/10-opcache.ini diff --git a/frameworks/PHP/swoole/swoole-server.php b/frameworks/PHP/swoole/swoole-server.php index 13050d75ebb..38a4152476f 100644 --- a/frameworks/PHP/swoole/swoole-server.php +++ b/frameworks/PHP/swoole/swoole-server.php @@ -12,7 +12,8 @@ 'worker_num' => swoole_cpu_num() * ((int) getenv('CPU_MULTIPLES')), 'log_file' => '/dev/null', 'enable_coroutine' => $enableCoroutine, - 'enable_reuse_port' => true + 'enable_reuse_port' => true, + 'http_compression' => false ]; if ($enableCoroutine) { diff --git a/frameworks/PHP/swoole/swoole-sync-mysql.dockerfile b/frameworks/PHP/swoole/swoole-sync-mysql.dockerfile index e8de37fd49b..86304d151fc 100644 --- a/frameworks/PHP/swoole/swoole-sync-mysql.dockerfile +++ b/frameworks/PHP/swoole/swoole-sync-mysql.dockerfile @@ -1,6 +1,6 @@ FROM ubuntu:24.04 -ENV SWOOLE_VERSION 5.1.4 +ENV SWOOLE_VERSION 5.1.5 ENV ENABLE_COROUTINE 0 ENV CPU_MULTIPLES 1 ENV DATABASE_DRIVER mysql @@ -16,16 +16,14 @@ RUN apt update -yqq > /dev/null \ && cd /tmp/swoole-src-${SWOOLE_VERSION} \ && phpize > /dev/null \ && ./configure > /dev/null \ - && make -j8 > /dev/null \ + && make -j "$(nproc)" > /dev/null \ && make install > /dev/null \ && echo "extension=swoole.so" > /etc/php/8.3/cli/conf.d/50-swoole.ini \ - && echo "memory_limit=1024M" >> /etc/php/8.3/cli/php.ini \ - && php -m + && echo "memory_limit=1024M" >> /etc/php/8.3/cli/php.ini WORKDIR /swoole ADD ./swoole-server.php /swoole -ADD 10-opcache.ini /swoole ADD ./database.php /swoole COPY 10-opcache.ini /etc/php/8.3/cli/conf.d/10-opcache.ini diff --git a/frameworks/PHP/swoole/swoole-sync-postgres.dockerfile b/frameworks/PHP/swoole/swoole-sync-postgres.dockerfile index 670ea96ad4b..8054b66912b 100644 --- a/frameworks/PHP/swoole/swoole-sync-postgres.dockerfile +++ b/frameworks/PHP/swoole/swoole-sync-postgres.dockerfile @@ -1,6 +1,6 @@ FROM ubuntu:24.04 -ENV SWOOLE_VERSION 5.1.4 +ENV SWOOLE_VERSION 5.1.5 ENV ENABLE_COROUTINE 0 ENV CPU_MULTIPLES 4 ENV DATABASE_DRIVER pgsql @@ -16,16 +16,14 @@ RUN apt update -yqq > /dev/null \ && cd /tmp/swoole-src-${SWOOLE_VERSION} \ && phpize > /dev/null \ && ./configure > /dev/null \ - && make -j8 > /dev/null \ + && make -j "$(nproc)" > /dev/null \ && make install > /dev/null \ && echo "extension=swoole.so" > /etc/php/8.3/cli/conf.d/50-swoole.ini \ - && echo "memory_limit=1024M" >> /etc/php/8.3/cli/php.ini \ - && php -m + && echo "memory_limit=1024M" >> /etc/php/8.3/cli/php.ini WORKDIR /swoole ADD ./swoole-server.php /swoole -ADD 10-opcache.ini /swoole ADD ./database.php /swoole COPY 10-opcache.ini /etc/php/8.3/cli/conf.d/10-opcache.ini diff --git a/frameworks/PHP/webman/benchmark_config.json b/frameworks/PHP/webman/benchmark_config.json index 864deacb0bf..7e185ce1b24 100644 --- a/frameworks/PHP/webman/benchmark_config.json +++ b/frameworks/PHP/webman/benchmark_config.json @@ -1,9 +1,29 @@ { "framework": "webman", + "maintainers": ["walkor"], "tests": [{ "default": { + "dockerfile": "webman.dockerfile", "json_url": "/json", "plaintext_url": "/plaintext", + "port": 8080, + "approach": "Realistic", + "classification": "Micro", + "database": "Postgres", + "framework": "webman", + "language": "PHP", + "flavor": "PHP7", + "orm": "Raw", + "platform": "workerman", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "webman", + "notes": "", + "versus": "workerman" + }, + "pgsql": { + "dockerfile": "webman-pgsql.dockerfile", "db_url": "/db", "query_url": "/queries/", "update_url": "/updates/", @@ -20,7 +40,7 @@ "webserver": "None", "os": "Linux", "database_os": "Linux", - "display_name": "webman", + "display_name": "webman-pgsql", "notes": "", "versus": "workerman" } diff --git a/frameworks/PHP/webman/config.toml b/frameworks/PHP/webman/config.toml index b4cfc7e64d6..3aa7a920672 100644 --- a/frameworks/PHP/webman/config.toml +++ b/frameworks/PHP/webman/config.toml @@ -17,3 +17,21 @@ orm = "Raw" platform = "workerman" webserver = "None" versus = "workerman" + + +[pgsql] +urls.plaintext = "/plaintext" +urls.json = "/json" +urls.db = "/db" +urls.query = "/queries/" +urls.update = "/updates/" +urls.fortune = "/fortunes" +approach = "Realistic" +classification = "Micro" +database = "Postgres" +database_os = "Linux" +os = "Linux" +orm = "Raw" +platform = "workerman" +webserver = "None" +versus = "workerman" \ No newline at end of file diff --git a/frameworks/PHP/webman/config/server.php b/frameworks/PHP/webman/config/server.php index 39f256488ef..a9e2686c8dc 100644 --- a/frameworks/PHP/webman/config/server.php +++ b/frameworks/PHP/webman/config/server.php @@ -17,9 +17,10 @@ 'transport' => 'tcp', 'context' => [], 'name' => 'webman', - 'count' => cpu_count() * 4, + 'count' => cpu_count() * ( getenv('TEST_TYPE') === 'default' ? 1 : 4 ), 'user' => '', 'group' => '', + 'reuse_port' => true, 'pid_file' => runtime_path() . '/webman.pid', 'status_file' => runtime_path() . '/webman.status', 'stdout_file' => runtime_path() . '/logs/stdout.log', diff --git a/frameworks/PHP/webman/php.ini b/frameworks/PHP/webman/php.ini index f0c616f9fb2..f4817cc9e3a 100644 --- a/frameworks/PHP/webman/php.ini +++ b/frameworks/PHP/webman/php.ini @@ -1,13 +1,11 @@ +zend_extension=opcache.so opcache.enable=1 opcache.enable_cli=1 opcache.validate_timestamps=0 opcache.save_comments=0 opcache.enable_file_override=1 opcache.huge_code_pages=1 - mysqlnd.collect_statistics = Off - memory_limit = 512M - opcache.jit_buffer_size=128M -opcache.jit=tracing +opcache.jit=tracing \ No newline at end of file diff --git a/frameworks/PHP/webman/webman-pgsql.dockerfile b/frameworks/PHP/webman/webman-pgsql.dockerfile new file mode 100644 index 00000000000..491396785ea --- /dev/null +++ b/frameworks/PHP/webman/webman-pgsql.dockerfile @@ -0,0 +1,26 @@ +FROM ubuntu:24.04 + +ENV TEST_TYPE pgsql + +ARG DEBIAN_FRONTEND=noninteractive + +RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /dev/null +RUN LC_ALL=C.UTF-8 add-apt-repository ppa:ondrej/php > /dev/null && \ + apt-get update -yqq > /dev/null && apt-get upgrade -yqq > /dev/null + +RUN apt-get install -yqq php8.3-cli php8.3-pgsql php8.3-xml > /dev/null + +COPY --from=composer:latest /usr/bin/composer /usr/local/bin/composer + +RUN apt-get update -yqq && apt-get install -y php-pear php8.3-dev libevent-dev git > /dev/null +RUN pecl install event-3.1.4 > /dev/null && echo "extension=event.so" > /etc/php/8.3/cli/conf.d/30-event.ini + +ADD ./ /webman +WORKDIR /webman + +RUN composer install --optimize-autoloader --classmap-authoritative --no-dev --quiet +COPY php.ini /etc/php/8.3/cli/conf.d/10-opcache.ini + +EXPOSE 8080 + +CMD php /webman/start.php start diff --git a/frameworks/PHP/webman/webman.dockerfile b/frameworks/PHP/webman/webman.dockerfile index 5b64cd4aa16..20ef9d61064 100644 --- a/frameworks/PHP/webman/webman.dockerfile +++ b/frameworks/PHP/webman/webman.dockerfile @@ -1,4 +1,6 @@ -FROM ubuntu:22.04 +FROM ubuntu:24.04 + +ENV TEST_TYPE default ARG DEBIAN_FRONTEND=noninteractive @@ -11,14 +13,13 @@ RUN apt-get install -yqq php8.3-cli php8.3-pgsql php8.3-xml > /dev/null COPY --from=composer:latest /usr/bin/composer /usr/local/bin/composer RUN apt-get update -yqq && apt-get install -y php-pear php8.3-dev libevent-dev git > /dev/null -RUN pecl install event-3.1.3 > /dev/null && echo "extension=event.so" > /etc/php/8.3/cli/conf.d/event.ini - -COPY php.ini /etc/php/8.3/cli/php.ini +RUN pecl install event-3.1.4 > /dev/null && echo "extension=event.so" > /etc/php/8.3/cli/conf.d/30-event.ini ADD ./ /webman WORKDIR /webman RUN composer install --optimize-autoloader --classmap-authoritative --no-dev --quiet +COPY php.ini /etc/php/8.3/cli/conf.d/10-opcache.ini EXPOSE 8080 diff --git a/frameworks/PHP/workerman/benchmark_config.json b/frameworks/PHP/workerman/benchmark_config.json index 68ce09ced79..bcb286db6d8 100644 --- a/frameworks/PHP/workerman/benchmark_config.json +++ b/frameworks/PHP/workerman/benchmark_config.json @@ -1,13 +1,11 @@ { "framework": "workerman", + "maintainers": ["walkor"], "tests": [{ "default": { + "dockerfile": "workerman-jit.dockerfile", "json_url": "/json", "plaintext_url": "/plaintext", - "db_url": "/db", - "query_url": "/query?q=", - "update_url": "/update?q=", - "fortune_url": "/fortunes", "port": 8080, "approach": "Realistic", "classification": "Platform", @@ -20,11 +18,12 @@ "webserver": "None", "os": "Linux", "database_os": "Linux", - "display_name": "workerman", + "display_name": "workerman [jit]", "notes": "", "versus": "php" }, "pgsql": { + "dockerfile": "workerman-pgsql-jit.dockerfile", "db_url": "/db", "query_url": "/query?q=", "update_url": "/update?q=", @@ -41,12 +40,15 @@ "webserver": "None", "os": "Linux", "database_os": "Linux", - "display_name": "workerman-postgres", + "display_name": "workerman [jit, pgsql]", "notes": "", "versus": "php" }, - "async": { + "mysql": { + "dockerfile": "workerman-mysql-jit.dockerfile", "db_url": "/db", + "query_url": "/query?q=", + "update_url": "/update?q=", "fortune_url": "/fortunes", "port": 8080, "approach": "Realistic", @@ -54,22 +56,38 @@ "database": "MySQL", "framework": "workerman", "language": "PHP", - "flavor": "PHP7", + "flavor": "PHP8", "orm": "Raw", "platform": "workerman", "webserver": "None", "os": "Linux", "database_os": "Linux", - "display_name": "workerman-async-db", + "display_name": "workerman [jit, mysql]", "notes": "", - "versus": "php", - "tags": [ - "broken" - ] + "versus": "php" }, - "php8-jit": { + "without-jit": { + "dockerfile": "workerman.dockerfile", "json_url": "/json", "plaintext_url": "/plaintext", + "port": 8080, + "approach": "Realistic", + "classification": "Platform", + "database": "MySQL", + "framework": "workerman", + "language": "PHP", + "flavor": "PHP8", + "orm": "Raw", + "platform": "workerman", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "workerman", + "notes": "", + "versus": "php" + }, + "pgsql-without-jit": { + "dockerfile": "workerman-pgsql.dockerfile", "db_url": "/db", "query_url": "/query?q=", "update_url": "/update?q=", @@ -86,8 +104,8 @@ "webserver": "None", "os": "Linux", "database_os": "Linux", - "display_name": "workerman-php8-jit", - "notes": "php8 jit", + "display_name": "workerman [pgsql]", + "notes": "", "versus": "php" } }] diff --git a/frameworks/PHP/workerman/config.toml b/frameworks/PHP/workerman/config.toml index 2f3bbffd22c..b2f50bda1a0 100644 --- a/frameworks/PHP/workerman/config.toml +++ b/frameworks/PHP/workerman/config.toml @@ -33,7 +33,7 @@ platform = "workerman" webserver = "None" versus = "php" -[php8-jit] +[mysql] urls.plaintext = "/plaintext" urls.json = "/json" urls.db = "/db" @@ -50,8 +50,12 @@ platform = "workerman" webserver = "None" versus = "php" -[async] +[without-jit] +urls.plaintext = "/plaintext" +urls.json = "/json" urls.db = "/db" +urls.query = "/query?q=" +urls.update = "/update?q=" urls.fortune = "/fortunes" approach = "Realistic" classification = "Platform" @@ -62,3 +66,20 @@ orm = "Raw" platform = "workerman" webserver = "None" versus = "php" + +[pgsql-without-jit] +urls.plaintext = "/plaintext" +urls.json = "/json" +urls.db = "/db" +urls.query = "/query?q=" +urls.update = "/update?q=" +urls.fortune = "/fortunes" +approach = "Realistic" +classification = "Platform" +database = "Postgres" +database_os = "Linux" +os = "Linux" +orm = "Raw" +platform = "workerman" +webserver = "None" +versus = "php" \ No newline at end of file diff --git a/frameworks/PHP/workerman/php-jit.ini b/frameworks/PHP/workerman/php-jit.ini index f0c616f9fb2..f4817cc9e3a 100644 --- a/frameworks/PHP/workerman/php-jit.ini +++ b/frameworks/PHP/workerman/php-jit.ini @@ -1,13 +1,11 @@ +zend_extension=opcache.so opcache.enable=1 opcache.enable_cli=1 opcache.validate_timestamps=0 opcache.save_comments=0 opcache.enable_file_override=1 opcache.huge_code_pages=1 - mysqlnd.collect_statistics = Off - memory_limit = 512M - opcache.jit_buffer_size=128M -opcache.jit=tracing +opcache.jit=tracing \ No newline at end of file diff --git a/frameworks/PHP/workerman/php.ini b/frameworks/PHP/workerman/php.ini index e12bbd2fb0c..f6be852042e 100644 --- a/frameworks/PHP/workerman/php.ini +++ b/frameworks/PHP/workerman/php.ini @@ -7,4 +7,4 @@ opcache.huge_code_pages=1 mysqlnd.collect_statistics = Off -memory_limit = 512M +memory_limit = 512M \ No newline at end of file diff --git a/frameworks/PHP/workerman/server-async.php b/frameworks/PHP/workerman/server-async.php deleted file mode 100644 index ce468df5cd6..00000000000 --- a/frameworks/PHP/workerman/server-async.php +++ /dev/null @@ -1,80 +0,0 @@ -count = (int) shell_exec('nproc') * 2; -$http_worker->onWorkerStart = static function() { - global $mysql; - - $loop = Worker::getEventLoop(); - - $mysql = new React\MySQL\Connection($loop, [ - 'host' => 'tfb-database', - 'dbname' => 'hello_world', - 'user' => 'benchmarkdbuser', - 'passwd' => 'benchmarkdbpass' - ]); - - $mysql->on('error', function($e){ - echo $e; - }); - - $mysql->connect(function ($e) {}); -}; - -$http_worker->onMessage = static function ($connection, $request) { - - global $mysql; - - switch ($request->path()) { - case '/db': - $mysql->query('SELECT id,randomNumber FROM World WHERE id='.mt_rand(1, 10000), - static function ($command) use ($connection) { - $connection->send(new Response(200, ['Content-Type' => 'application/json', 'Date' => gmdate('D, d M Y H:i:s').' GMT'], json_encode($command->resultRows, JSON_NUMERIC_CHECK))); - } - ); - return; - - case '/fortunes': - // By default use 'Content-Type: text/html; charset=utf-8'; - $mysql->query('SELECT id,message FROM Fortune', - static function ($command) use ($connection) { - $arr = $command->resultRows; - foreach ($arr as $row) { - $fortune[$row['id']] = htmlspecialchars($row['message'], ENT_QUOTES, 'UTF-8'); - } - $fortune[0] = 'Additional fortune added at request time.'; - asort($fortune); - - $html = 'Fortunes'; - foreach ($fortune as $id => $message) { - $html .= ""; - } - - $connection->send(new Response(200, ['Date' => gmdate('D, d M Y H:i:s').' GMT'], $html.'
idmessage
$id$message
')); - - } - ); - return; - - //case '/update': - // Http::header('Content-Type: application/json'); - // return $connection->send(update()); - - //case '/info': - // Http::header('Content-Type: text/plain'); - // ob_start(); - // phpinfo(); - // return $connection->send(ob_get_clean()); - - default: - $connection->send(new Response(200, [], 'Error 404')); - - } -}; - -Worker::runAll(); diff --git a/frameworks/PHP/workerman/server.php b/frameworks/PHP/workerman/server.php index 5a38cf1e0fc..f856790efa9 100644 --- a/frameworks/PHP/workerman/server.php +++ b/frameworks/PHP/workerman/server.php @@ -1,29 +1,38 @@ count = (int) shell_exec('nproc') * 4; -$http_worker->onWorkerStart = static function () { +$http_worker->reusePort = true; +$http_worker->count = $process_count; +$http_worker->onWorkerStart = static function () use ($test_type) { Header::$date = gmdate('D, d M Y H:i:s').' GMT'; Timer::add(1, function() { Header::$date = gmdate('D, d M Y H:i:s').' GMT'; }); - init(); + if ($test_type === 'pgsql') { + DbRaw::init(); + } else { + init(); + } }; $http_worker->onMessage = static function ($connection, $request) { - $connection->send(router($request)); - }; Worker::runAll(); - class Header { public static $date = null; } diff --git a/frameworks/PHP/workerman/workerman-async.dockerfile b/frameworks/PHP/workerman/workerman-jit.dockerfile similarity index 65% rename from frameworks/PHP/workerman/workerman-async.dockerfile rename to frameworks/PHP/workerman/workerman-jit.dockerfile index 3730d43e58e..c90794ab9be 100644 --- a/frameworks/PHP/workerman/workerman-async.dockerfile +++ b/frameworks/PHP/workerman/workerman-jit.dockerfile @@ -1,26 +1,26 @@ FROM ubuntu:24.04 +ENV TEST_TYPE default + ARG DEBIAN_FRONTEND=noninteractive RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /dev/null RUN LC_ALL=C.UTF-8 add-apt-repository ppa:ondrej/php > /dev/null && \ apt-get update -yqq > /dev/null && apt-get upgrade -yqq > /dev/null -RUN apt-get install -yqq php8.3-cli php8.3-mysql > /dev/null +RUN apt-get install -yqq php8.3-cli php8.3-mysql php8.3-xml > /dev/null COPY --from=composer/composer:latest-bin --link /composer /usr/local/bin/composer RUN apt-get install -y php-pear php8.3-dev libevent-dev git > /dev/null && \ - pecl install event-3.1.3 > /dev/null && echo "extension=event.so" > /etc/php/8.3/cli/conf.d/event.ini - -COPY --link php.ini /etc/php/8.3/cli/php.ini + pecl install event-3.1.4 > /dev/null && echo "extension=event.so" > /etc/php/8.3/cli/conf.d/30-event.ini WORKDIR /workerman COPY --link . . -RUN composer require react/mysql "^0.6" --quiet RUN composer install --optimize-autoloader --classmap-authoritative --no-dev --quiet +COPY php-jit.ini /etc/php/8.3/cli/conf.d/10-opcache.ini EXPOSE 8080 -CMD php /workerman/server-async.php start +CMD php /workerman/server.php start diff --git a/frameworks/PHP/workerman/workerman-mysql-jit.dockerfile b/frameworks/PHP/workerman/workerman-mysql-jit.dockerfile new file mode 100644 index 00000000000..7d8fd9e238d --- /dev/null +++ b/frameworks/PHP/workerman/workerman-mysql-jit.dockerfile @@ -0,0 +1,26 @@ +FROM ubuntu:24.04 + +ENV TEST_TYPE mysql + +ARG DEBIAN_FRONTEND=noninteractive + +RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /dev/null +RUN LC_ALL=C.UTF-8 add-apt-repository ppa:ondrej/php > /dev/null && \ + apt-get update -yqq > /dev/null && apt-get upgrade -yqq > /dev/null + +RUN apt-get install -yqq php8.3-cli php8.3-mysql php8.3-xml > /dev/null + +COPY --from=composer/composer:latest-bin --link /composer /usr/local/bin/composer + +RUN apt-get install -y php-pear php8.3-dev libevent-dev git > /dev/null && \ + pecl install event-3.1.4 > /dev/null && echo "extension=event.so" > /etc/php/8.3/cli/conf.d/30-event.ini + +WORKDIR /workerman +COPY --link . . + +RUN composer install --optimize-autoloader --classmap-authoritative --no-dev --quiet +COPY php-jit.ini /etc/php/8.3/cli/conf.d/10-opcache.ini + +EXPOSE 8080 + +CMD php /workerman/server.php start diff --git a/frameworks/PHP/workerman/workerman-php8-jit.dockerfile b/frameworks/PHP/workerman/workerman-pgsql-jit.dockerfile similarity index 66% rename from frameworks/PHP/workerman/workerman-php8-jit.dockerfile rename to frameworks/PHP/workerman/workerman-pgsql-jit.dockerfile index 528a5c312f6..6c4af626cc0 100644 --- a/frameworks/PHP/workerman/workerman-php8-jit.dockerfile +++ b/frameworks/PHP/workerman/workerman-pgsql-jit.dockerfile @@ -1,5 +1,7 @@ FROM ubuntu:24.04 +ENV TEST_TYPE pgsql + ARG DEBIAN_FRONTEND=noninteractive RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /dev/null @@ -11,18 +13,13 @@ RUN apt-get install -yqq php8.3-cli php8.3-pgsql php8.3-xml > /dev/null COPY --from=composer/composer:latest-bin --link /composer /usr/local/bin/composer RUN apt-get install -y php-pear php8.3-dev libevent-dev git > /dev/null && \ - pecl install event-3.1.3 > /dev/null && echo "extension=event.so" > /etc/php/8.3/cli/conf.d/event.ini - -COPY --link php-jit.ini /etc/php/8.3/cli/php.ini + pecl install event-3.1.4 > /dev/null && echo "extension=event.so" > /etc/php/8.3/cli/conf.d/30-event.ini WORKDIR /workerman COPY --link . . -RUN sed -i "s|'/app.php|'/app-pg.php|g" server.php -RUN sed -i "s|init()|DbRaw::init()|g" server.php -RUN sed -i "s|opcache.jit=off|opcache.jit=function|g" /etc/php/8.3/cli/conf.d/10-opcache.ini - RUN composer install --optimize-autoloader --classmap-authoritative --no-dev --quiet +COPY php-jit.ini /etc/php/8.3/cli/conf.d/10-opcache.ini EXPOSE 8080 diff --git a/frameworks/PHP/workerman/workerman-pgsql.dockerfile b/frameworks/PHP/workerman/workerman-pgsql.dockerfile index 826a6a39383..9b1552d354e 100644 --- a/frameworks/PHP/workerman/workerman-pgsql.dockerfile +++ b/frameworks/PHP/workerman/workerman-pgsql.dockerfile @@ -1,5 +1,7 @@ FROM ubuntu:24.04 +ENV TEST_TYPE pgsql + ARG DEBIAN_FRONTEND=noninteractive RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /dev/null @@ -11,17 +13,13 @@ RUN apt-get install -yqq php8.3-cli php8.3-pgsql php8.3-xml > /dev/null COPY --from=composer/composer:latest-bin --link /composer /usr/local/bin/composer RUN apt-get install -y php-pear php8.3-dev libevent-dev git > /dev/null && \ - pecl install event-3.1.3 > /dev/null && echo "extension=event.so" > /etc/php/8.3/cli/conf.d/event.ini - -COPY --link php.ini /etc/php/8.3/cli/php.ini + pecl install event-3.1.4 > /dev/null && echo "extension=event.so" > /etc/php/8.3/cli/conf.d/30-event.ini WORKDIR /workerman COPY --link . . -RUN sed -i "s|'/app.php|'/app-pg.php|g" server.php -RUN sed -i "s|init()|DbRaw::init()|g" server.php - RUN composer install --optimize-autoloader --classmap-authoritative --no-dev --quiet +COPY php.ini /etc/php/8.3/cli/php.ini EXPOSE 8080 diff --git a/frameworks/PHP/workerman/workerman.dockerfile b/frameworks/PHP/workerman/workerman.dockerfile index 5d09ee6362a..a4d9602019e 100644 --- a/frameworks/PHP/workerman/workerman.dockerfile +++ b/frameworks/PHP/workerman/workerman.dockerfile @@ -1,5 +1,7 @@ FROM ubuntu:24.04 +ENV TEST_TYPE default + ARG DEBIAN_FRONTEND=noninteractive RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /dev/null @@ -11,14 +13,13 @@ RUN apt-get install -yqq php8.3-cli php8.3-mysql php8.3-xml > /dev/null COPY --from=composer/composer:latest-bin --link /composer /usr/local/bin/composer RUN apt-get install -y php-pear php8.3-dev libevent-dev git > /dev/null && \ - pecl install event-3.1.3 > /dev/null && echo "extension=event.so" > /etc/php/8.3/cli/conf.d/event.ini - -COPY --link php-jit.ini /etc/php/8.3/cli/php.ini + pecl install event-3.1.4 > /dev/null && echo "extension=event.so" > /etc/php/8.3/cli/conf.d/30-event.ini WORKDIR /workerman COPY --link . . RUN composer install --optimize-autoloader --classmap-authoritative --no-dev --quiet +COPY php.ini /etc/php/8.3/cli/php.ini EXPOSE 8080 diff --git a/frameworks/Pascal/mormot/setup_and_build.sh b/frameworks/Pascal/mormot/setup_and_build.sh index 8f2d533127f..62bd68746f8 100755 --- a/frameworks/Pascal/mormot/setup_and_build.sh +++ b/frameworks/Pascal/mormot/setup_and_build.sh @@ -27,7 +27,7 @@ rm -rf ./libs mkdir -p ./libs/mORMot/static # echo "Getting the latest pre-release URL..." # USED_TAG=$(wget -qO- https://api.github.com/repos/synopse/mORMot2/releases/latest | jq -r '.tag_name') -USED_TAG="2.2.stable" +USED_TAG="2.3.stable" echo "Used release tag $USED_TAG" URL="https://github.com/synopse/mORMot2/releases/download/$USED_TAG/mormot2static.tgz" @@ -35,8 +35,8 @@ echo "Download statics from $URL ..." wget -qO- "$URL" | tar -xz -C ./libs/mORMot/static # uncomment for fixed commit URL -URL=https://github.com/synopse/mORMot2/tarball/6dc09ceca456931384857b383ed61b63f11f3be7 -#URL="https://api.github.com/repos/synopse/mORMot2/tarball/$USED_TAG" +#URL=https://github.com/synopse/mORMot2/tarball/6dc09ceca456931384857b383ed61b63f11f3be7 +URL="https://api.github.com/repos/synopse/mORMot2/tarball/$USED_TAG" echo "Download and unpacking mORMot sources from $URL ..." wget -qO- "$URL" | tar -xz -C ./libs/mORMot --strip-components=1 @@ -80,7 +80,7 @@ fpc -MDelphi -Sci -Ci -O3 -g -gl -gw2 -Xg -k'-rpath=$ORIGIN' -k-L$BIN \ -Fu"$MSRC/core" -Fu"$MSRC/db" -Fu"$MSRC/rest" -Fu"$MSRC/crypt" \ -Fu"$MSRC/app" -Fu"$MSRC/net" -Fu"$MSRC/lib" -Fu"$MSRC/orm" -Fu"$MSRC/soa" \ -FU"$BIN/fpc-$ARCH_TG/.dcu" -FE"$BIN/fpc-$ARCH_TG" -o"$BIN/fpc-$ARCH_TG/$dest_fn" \ - -dFPC_X64MM -dFPCMM_SERVER \ + -dFPC_LIBCMM -dFPC_LIBCMM_NOMSIZE \ -B -Se1 "./src/raw.pas" | grep "[Warning|Error|Fatal]:" script_successful \ No newline at end of file diff --git a/frameworks/Python/emmett/app.py b/frameworks/Python/emmett/app.py index 50a942ce7ad..7d012fe70b3 100644 --- a/frameworks/Python/emmett/app.py +++ b/frameworks/Python/emmett/app.py @@ -32,7 +32,7 @@ def _serialize(self, row): app.config.db.user = 'benchmarkdbuser' app.config.db.password = 'benchmarkdbpass' app.config.db.database = 'hello_world' -app.config.db.pool_size = 10 +app.config.db.pool_size = 16 db = Database(app) db.define_models(World, Fortune) diff --git a/frameworks/Python/emmett/requirements.txt b/frameworks/Python/emmett/requirements.txt index 627a2187091..1621f272390 100644 --- a/frameworks/Python/emmett/requirements.txt +++ b/frameworks/Python/emmett/requirements.txt @@ -1,2 +1,2 @@ -emmett[orjson]>=2.5.12,<2.6.0 -psycopg2-binary==2.9.5 +emmett[orjson]>=2.6.0,<2.7.0 +psycopg2-binary==2.9.9 diff --git a/frameworks/Python/emmett/run.py b/frameworks/Python/emmett/run.py index 02a21e458f9..aad30c549c6 100644 --- a/frameworks/Python/emmett/run.py +++ b/frameworks/Python/emmett/run.py @@ -1,10 +1,10 @@ import multiprocessing -from emmett.server import run +from emmett_core.server import run if __name__ == "__main__": - workers = round(multiprocessing.cpu_count() / 2) + workers = multiprocessing.cpu_count() run( "rsgi", diff --git a/frameworks/Python/emmett55/README.md b/frameworks/Python/emmett55/README.md new file mode 100644 index 00000000000..0e36619cfc8 --- /dev/null +++ b/frameworks/Python/emmett55/README.md @@ -0,0 +1,26 @@ +# Emmett55 Benchmark Test + +This is the Emmett55 portion of a [benchmarking tests suite](../../) comparing a variety of web development platforms. + +The information below is specific to Emmett55. For further guidance, review the [documentation](https://github.com/TechEmpower/FrameworkBenchmarks/wiki). + +Also note that there is additional information provided in the [Python README](../). + +## Description + +[Emmett55](https://github.com/emmett-framework/emmett55) is a Python asyncIO micro web framework. + +## Test Paths & Source + +* [JSON Serialization](app.py): "/json" +* [Single Database Query](app.py): "/db" +* [Multiple Database Queries](app.py): "queries?queries=#" +* [Fortunes](app.py): "/fortunes" +* [Database Updates](app.py): "updates?queries=#" +* [Plaintext](app.py): "/plaintext" + +*Replace # with an actual number.* + +### Resources + +* [Github repository](https://github.com/emmett-framework/emmett55) diff --git a/frameworks/Python/emmett55/app.py b/frameworks/Python/emmett55/app.py new file mode 100644 index 00000000000..e417e8f75fa --- /dev/null +++ b/frameworks/Python/emmett55/app.py @@ -0,0 +1,129 @@ +import os +from operator import itemgetter +from random import randint, sample + +import asyncpg +from emmett55 import App, Pipe, current, request, response +from emmett55.extensions import Extension, Signals, listen_signal +from emmett55.tools import service +from renoir import Renoir + + +class AsyncPG(Extension): + __slots__ = ["pool"] + + def on_load(self): + self.pool = None + self.pipe = AsyncPGPipe(self) + + async def build_pool(self): + self.pool = await asyncpg.create_pool( + user=os.getenv('PGUSER', 'benchmarkdbuser'), + password=os.getenv('PGPASS', 'benchmarkdbpass'), + database='hello_world', + host='tfb-database', + port=5432, + min_size=16, + max_size=16, + max_queries=64_000_000_000, + max_inactive_connection_lifetime=0 + ) + + @listen_signal(Signals.after_loop) + def _init_pool(self, loop): + loop.run_until_complete(self.build_pool()) + + +class AsyncPGPipe(Pipe): + __slots__ = ["ext"] + + def __init__(self, ext): + self.ext = ext + + async def open(self): + conn = current._db_conn = self.ext.pool.acquire() + current.db = await conn.__aenter__() + + async def close(self): + await current._db_conn.__aexit__() + + +app = App(__name__) +app.config.handle_static = False +templates = Renoir() + +db_ext = app.use_extension(AsyncPG) + +SQL_SELECT = 'SELECT "randomnumber", "id" FROM "world" WHERE id = $1' +SQL_UPDATE = 'UPDATE "world" SET "randomnumber"=$1 WHERE id=$2' +ROW_ADD = [0, 'Additional fortune added at request time.'] +sort_key = itemgetter(1) + + +@app.route() +@service.json +async def json(): + return {'message': 'Hello, World!'} + + +@app.route("/db", pipeline=[db_ext.pipe]) +@service.json +async def get_random_world(): + row_id = randint(1, 10000) + number = await current.db.fetchval(SQL_SELECT, row_id) + return {'id': row_id, 'randomNumber': number} + + +def get_qparam(): + try: + rv = int(request.query_params.queries or 1) + except ValueError: + return 1 + if rv < 1: + return 1 + if rv > 500: + return 500 + return rv + + +@app.route("/queries", pipeline=[db_ext.pipe]) +@service.json +async def get_random_worlds(): + num_queries = get_qparam() + row_ids = sample(range(1, 10000), num_queries) + worlds = [] + statement = await current.db.prepare(SQL_SELECT) + for row_id in row_ids: + number = await statement.fetchval(row_id) + worlds.append({'id': row_id, 'randomNumber': number}) + return worlds + + +@app.route(pipeline=[db_ext.pipe], output='str') +async def fortunes(): + response.content_type = "text/html; charset=utf-8" + fortunes = await current.db.fetch('SELECT * FROM Fortune') + fortunes.append(ROW_ADD) + fortunes.sort(key=sort_key) + return templates.render("templates/fortunes.html", {"fortunes": fortunes}) + + +@app.route(pipeline=[db_ext.pipe]) +@service.json +async def updates(): + num_queries = get_qparam() + updates = list(zip( + sample(range(1, 10000), num_queries), + sorted(sample(range(1, 10000), num_queries)) + )) + worlds = [{'id': row_id, 'randomNumber': number} for row_id, number in updates] + statement = await current.db.prepare(SQL_SELECT) + for row_id, _ in updates: + await statement.fetchval(row_id) + await current.db.executemany(SQL_UPDATE, updates) + return worlds + + +@app.route(output='bytes') +async def plaintext(): + return b'Hello, World!' diff --git a/frameworks/Python/emmett55/benchmark_config.json b/frameworks/Python/emmett55/benchmark_config.json new file mode 100644 index 00000000000..e842529efcb --- /dev/null +++ b/frameworks/Python/emmett55/benchmark_config.json @@ -0,0 +1,27 @@ +{ + "framework": "emmett55", + "tests": [{ + "default": { + "json_url": "/json", + "db_url": "/db", + "query_url": "/queries?queries=", + "fortune_url": "/fortunes", + "update_url": "/updates?queries=", + "plaintext_url": "/plaintext", + "port": 8080, + "approach": "Realistic", + "classification": "Micro", + "database": "Postgres", + "framework": "Emmett55", + "language": "Python", + "orm": "Raw", + "platform": "RSGI", + "webserver": "granian", + "os": "Linux", + "database_os": "Linux", + "display_name": "Emmett55", + "notes": "CPython 3.7", + "versus": "uvicorn" + } + }] +} diff --git a/frameworks/Python/emmett55/config.toml b/frameworks/Python/emmett55/config.toml new file mode 100644 index 00000000000..11586c4e530 --- /dev/null +++ b/frameworks/Python/emmett55/config.toml @@ -0,0 +1,19 @@ +[framework] +name = "emmett55" + +[main] +urls.plaintext = "/plaintext" +urls.json = "/json" +urls.db = "/db" +urls.query = "/queries?queries=" +urls.update = "/updates?queries=" +urls.fortune = "/fortunes" +approach = "Realistic" +classification = "Micro" +database = "Postgres" +database_os = "Linux" +os = "Linux" +orm = "Raw" +platform = "RSGI" +webserver = "granian" +versus = "uvicorn" diff --git a/frameworks/Python/emmett55/emmett55.dockerfile b/frameworks/Python/emmett55/emmett55.dockerfile new file mode 100644 index 00000000000..49438fd442d --- /dev/null +++ b/frameworks/Python/emmett55/emmett55.dockerfile @@ -0,0 +1,11 @@ +FROM python:3.11-slim + +ADD ./ /emmett55 + +WORKDIR /emmett55 + +RUN pip install --no-cache-dir -r /emmett55/requirements.txt + +EXPOSE 8080 + +CMD python run.py diff --git a/frameworks/Python/emmett55/requirements.txt b/frameworks/Python/emmett55/requirements.txt new file mode 100644 index 00000000000..ecf2313a108 --- /dev/null +++ b/frameworks/Python/emmett55/requirements.txt @@ -0,0 +1,3 @@ +asyncpg==0.29.0 +emmett55[orjson]>=1.0.0,<1.1.0 +renoir==1.8.0 diff --git a/frameworks/Python/emmett55/run.py b/frameworks/Python/emmett55/run.py new file mode 100644 index 00000000000..aad30c549c6 --- /dev/null +++ b/frameworks/Python/emmett55/run.py @@ -0,0 +1,20 @@ +import multiprocessing + +from emmett_core.server import run + + +if __name__ == "__main__": + workers = multiprocessing.cpu_count() + + run( + "rsgi", + ("app", "app"), + host="0.0.0.0", + port=8080, + workers=workers, + backlog=16384, + threading_mode="runtime", + http="1", + enable_websockets=False, + log_level="warn" + ) diff --git a/frameworks/Python/emmett55/templates/fortunes.html b/frameworks/Python/emmett55/templates/fortunes.html new file mode 100644 index 00000000000..c64ff16ec6f --- /dev/null +++ b/frameworks/Python/emmett55/templates/fortunes.html @@ -0,0 +1,20 @@ + + + + Fortunes + + + + + + + + {{ for fortune in fortunes: }} + + + + + {{ pass }} +
idmessage
{{ =fortune[0] }}{{ =fortune[1] }}
+ + diff --git a/frameworks/Python/robyn/app-const.py b/frameworks/Python/robyn/app-const.py index 4b806389fa5..ceec8488829 100755 --- a/frameworks/Python/robyn/app-const.py +++ b/frameworks/Python/robyn/app-const.py @@ -8,8 +8,8 @@ class SpecialConfig(Config): def __init__(self): super().__init__() - self.workers = (os.cpu_count() * 2) + 1 - self.processes = os.cpu_count() + self.workers = 2 + self.processes = ( os.cpu_count() * 2 ) + 1 self.log_level = "WARN" diff --git a/frameworks/Python/robyn/app.py b/frameworks/Python/robyn/app.py index e8c975e530f..bbbb037868a 100755 --- a/frameworks/Python/robyn/app.py +++ b/frameworks/Python/robyn/app.py @@ -8,8 +8,8 @@ class SpecialConfig(Config): def __init__(self): super().__init__() - self.workers = (os.cpu_count() * 2) + 1 - self.processes = os.cpu_count() + self.workers = 2 + self.processes = ( os.cpu_count() * 2 ) + 1 self.log_level = "WARN" diff --git a/frameworks/Python/robyn/requirements-const.txt b/frameworks/Python/robyn/requirements-const.txt index fbb88344955..484d4b7704f 100644 --- a/frameworks/Python/robyn/requirements-const.txt +++ b/frameworks/Python/robyn/requirements-const.txt @@ -1,2 +1,2 @@ uvloop==0.19.0 -robyn==0.60.2 +robyn==0.62.0 diff --git a/frameworks/Python/robyn/requirements.txt b/frameworks/Python/robyn/requirements.txt index fbb88344955..484d4b7704f 100644 --- a/frameworks/Python/robyn/requirements.txt +++ b/frameworks/Python/robyn/requirements.txt @@ -1,2 +1,2 @@ uvloop==0.19.0 -robyn==0.60.2 +robyn==0.62.0 diff --git a/frameworks/Python/robyn/robyn-const.dockerfile b/frameworks/Python/robyn/robyn-const.dockerfile index 90b201eccff..9429ff37376 100644 --- a/frameworks/Python/robyn/robyn-const.dockerfile +++ b/frameworks/Python/robyn/robyn-const.dockerfile @@ -8,4 +8,4 @@ RUN pip3 install -r /robyn/requirements-const.txt EXPOSE 8080 -CMD ["robyn", "app-const.py", "--fast"] +CMD ["python", "app-const.py", "--log-level", "warn"] diff --git a/frameworks/Python/robyn/robyn.dockerfile b/frameworks/Python/robyn/robyn.dockerfile index 8de6fea2c19..bc42b5be462 100644 --- a/frameworks/Python/robyn/robyn.dockerfile +++ b/frameworks/Python/robyn/robyn.dockerfile @@ -8,4 +8,4 @@ RUN pip3 install -r /robyn/requirements.txt EXPOSE 8080 -CMD ["robyn", "app.py", "--fast"] +CMD ["python", "app.py", "--log-level", "warn"] diff --git a/frameworks/Python/sanic/app.py b/frameworks/Python/sanic/app.py index 47b374ce9a3..4070503205d 100644 --- a/frameworks/Python/sanic/app.py +++ b/frameworks/Python/sanic/app.py @@ -11,6 +11,8 @@ import sanic from sanic import response +from orjson import dumps + logger = getLogger(__name__) @@ -41,23 +43,26 @@ def get_num_queries(queries): return query_count -connection_pool = None sort_fortunes_key = itemgetter(1) template = load_fortunes_template() -app = sanic.Sanic(name=__name__) +app = sanic.Sanic(name=__name__, dumps=dumps) @app.listener('before_server_start') async def setup_database(app, loop): - global connection_pool - connection_pool = await asyncpg.create_pool( - user=os.getenv('PGUSER', 'benchmarkdbuser'), - password=os.getenv('PGPASS', 'benchmarkdbpass'), - database='hello_world', - host='tfb-database', - port=5432 - ) + app.ctx.pool = await asyncpg.create_pool( + user=os.getenv('PGUSER', 'benchmarkdbuser'), + password=os.getenv('PGPASS', 'benchmarkdbpass'), + database='hello_world', + host='tfb-database', + port=5432 + ) + + +@app.listener('after_server_stop') +async def close_database(app, loop): + app.ctx.pool.close() @app.get('/json') @@ -69,7 +74,7 @@ def json_view(request): async def single_database_query_view(request): row_id = randint(1, 10000) - async with connection_pool.acquire() as connection: + async with request.app.ctx.pool.acquire() as connection: number = await connection.fetchval(READ_ROW_SQL, row_id) return response.json( @@ -84,7 +89,7 @@ async def multiple_database_queries_view(request): row_ids = sample(range(1, 10000), num_queries) worlds = [] - async with connection_pool.acquire() as connection: + async with request.app.ctx.pool.acquire() as connection: statement = await connection.prepare(READ_ROW_SQL) for row_id in row_ids: number = await statement.fetchval(row_id) @@ -100,7 +105,7 @@ async def multiple_database_queries_view(request): @app.get('/fortunes') async def fortunes_view(request): - async with connection_pool.acquire() as connection: + async with request.app.ctx.pool.acquire() as connection: fortunes = await connection.fetch('SELECT * FROM Fortune') fortunes.append(ADDITIONAL_ROW) @@ -112,22 +117,21 @@ async def fortunes_view(request): @app.get('/updates') async def database_updates_view(request): - worlds = [] - updates = set() queries = request.args.get('queries', 1) + num_queries = get_num_queries(queries) + # To avoid deadlock + ids = sorted(sample(range(1, 10000 + 1), num_queries)) + numbers = sorted(sample(range(1, 10000), num_queries)) + updates = list(zip(ids, numbers)) - async with connection_pool.acquire() as connection: - statement = await connection.prepare(READ_ROW_SQL_TO_UPDATE) - - for row_id in sample(range(1, 10000), get_num_queries(queries)): - record = await statement.fetchrow(row_id) - world = dict( - id=record['id'], randomNumber=record['randomnumber'] - ) - world['randomNumber'] = randint(1, 10000) - worlds.append(world) - updates.add((world['id'], world['randomNumber'])) + worlds = [ + {"id": row_id, "randomNumber": number} for row_id, number in updates + ] + async with request.app.ctx.pool.acquire() as connection: + statement = await connection.prepare(READ_ROW_SQL) + for row_id, _ in updates: + await statement.fetchval(row_id) await connection.executemany(WRITE_ROW_SQL, updates) return response.json(worlds, headers=get_headers()) diff --git a/frameworks/Python/sanic/benchmark_config.json b/frameworks/Python/sanic/benchmark_config.json index b1bdd529d60..b3c6799b42b 100644 --- a/frameworks/Python/sanic/benchmark_config.json +++ b/frameworks/Python/sanic/benchmark_config.json @@ -23,8 +23,7 @@ "database_os": "Linux", "display_name": "Sanic", "notes": "", - "versus": "None", - "tags": ["broken"] + "versus": "None" } } ] diff --git a/frameworks/Python/sanic/requirements.txt b/frameworks/Python/sanic/requirements.txt index 5b4738e167d..ff8b4afd8a9 100644 --- a/frameworks/Python/sanic/requirements.txt +++ b/frameworks/Python/sanic/requirements.txt @@ -1,4 +1,5 @@ -asyncpg==0.25.0 +asyncpg==0.29.0 Jinja2==3.1.4 -sanic==22.6.1 -uvloop==0.16.0 +sanic==24.6.0 +uvloop==0.20.0 +orjson==3.10.7 \ No newline at end of file diff --git a/frameworks/Python/sanic/sanic.dockerfile b/frameworks/Python/sanic/sanic.dockerfile index 9619237ed21..d12ebcb391a 100644 --- a/frameworks/Python/sanic/sanic.dockerfile +++ b/frameworks/Python/sanic/sanic.dockerfile @@ -1,8 +1,8 @@ -FROM python:3.8 +FROM python:3.12 ADD ./requirements.txt /sanic/requirements.txt -RUN pip3 install cython==0.29.13 && \ +RUN pip3 install cython==3.0.11 && \ pip3 install -r /sanic/requirements.txt ADD ./ /sanic diff --git a/frameworks/Python/starlette/requirements.txt b/frameworks/Python/starlette/requirements.txt index ef5a1748ca4..47b27a87e9a 100644 --- a/frameworks/Python/starlette/requirements.txt +++ b/frameworks/Python/starlette/requirements.txt @@ -6,6 +6,6 @@ Jinja2==3.1.4 MarkupSafe==2.1.1 python-dotenv==0.20.0 PyYAML==6.0 -starlette==0.36.2 +starlette==0.40.0 uvicorn==0.20.0 uvloop==0.17.0 diff --git a/frameworks/Ruby/rack/Gemfile b/frameworks/Ruby/rack/Gemfile index af0fc5b1d04..ab7f29130b7 100644 --- a/frameworks/Ruby/rack/Gemfile +++ b/frameworks/Ruby/rack/Gemfile @@ -4,16 +4,25 @@ source 'https://rubygems.org' gem 'rack', '~> 3.0' gem 'connection_pool', '~> 2.4' -gem 'falcon', '~> 0.47', platforms: %i[ruby mswin] gem 'jdbc-postgres', '~> 42.2', platforms: :jruby, require: 'jdbc/postgres' gem 'json', '~> 2.6', platforms: :jruby gem 'oj', '~> 3.14', platforms: %i[ruby mswin] gem 'pg', '~> 1.5', platforms: %i[ruby mswin] -gem 'puma', '~> 6.4' gem 'sequel' gem 'sequel_pg', platforms: %i[ruby mswin] gem 'tzinfo-data', '1.2023.3' -gem 'unicorn', '~> 6.1', platforms: %i[ruby mswin], require: false + +group :falcon do + gem 'falcon', '~> 0.47', platforms: %i[ruby mswin] +end + +group :puma do + gem 'puma', '~> 6.4' +end + +group :unicorn do + gem 'unicorn', '~> 6.1', platforms: %i[ruby mswin] +end group :development do gem 'rack-test' diff --git a/frameworks/Ruby/rack/Gemfile.lock b/frameworks/Ruby/rack/Gemfile.lock index cf785ab1792..ee579e13085 100644 --- a/frameworks/Ruby/rack/Gemfile.lock +++ b/frameworks/Ruby/rack/Gemfile.lock @@ -86,8 +86,7 @@ GEM rainbow (3.1.1) raindrops (0.20.1) regexp_parser (2.9.2) - rexml (3.3.6) - strscan + rexml (3.3.9) rubocop (1.64.1) json (~> 2.3) language_server-protocol (>= 3.17.0) @@ -110,7 +109,6 @@ GEM sequel_pg (1.17.1) pg (>= 0.18.0, != 1.2.0) sequel (>= 4.38.0) - strscan (3.1.0) traces (0.11.1) tzinfo (2.0.6) concurrent-ruby (~> 1.0) diff --git a/frameworks/Ruby/rack/hello_world.rb b/frameworks/Ruby/rack/hello_world.rb index 2fcacec605b..050b6eac801 100644 --- a/frameworks/Ruby/rack/hello_world.rb +++ b/frameworks/Ruby/rack/hello_world.rb @@ -30,13 +30,13 @@ class HelloWorld SERVER_STRING = if defined?(PhusionPassenger) 'Passenger' elsif defined?(Puma) - Puma::Const::PUMA_SERVER_STRING + 'Puma' elsif defined?(Unicorn) 'Unicorn' elsif defined?(Falcon) 'Falcon' else - ' Ruby Rack' + 'Ruby Rack' end TEMPLATE_PREFIX = ' diff --git a/frameworks/Ruby/rack/rack-falcon.dockerfile b/frameworks/Ruby/rack/rack-falcon.dockerfile index f6ba1a106a2..f030cd54177 100644 --- a/frameworks/Ruby/rack/rack-falcon.dockerfile +++ b/frameworks/Ruby/rack/rack-falcon.dockerfile @@ -9,10 +9,10 @@ ENV LD_PRELOAD=libjemalloc.so.2 WORKDIR /rack -COPY Gemfile ./ +COPY Gemfile ./ ENV BUNDLE_FORCE_RUBY_PLATFORM=true -RUN bundle config set without 'development test' +RUN bundle config set without 'development test puma unicorn' RUN bundle install --jobs=8 COPY . . diff --git a/frameworks/Ruby/rack/rack-jruby.dockerfile b/frameworks/Ruby/rack/rack-jruby.dockerfile index 7bf4b329af1..ab06ae132ed 100644 --- a/frameworks/Ruby/rack/rack-jruby.dockerfile +++ b/frameworks/Ruby/rack/rack-jruby.dockerfile @@ -6,7 +6,7 @@ WORKDIR /rack COPY Gemfile ./ -RUN bundle config set without 'development test' +RUN bundle config set without 'development test falcon unicorn' RUN bundle install --jobs=8 COPY . . diff --git a/frameworks/Ruby/rack/rack-unicorn.dockerfile b/frameworks/Ruby/rack/rack-unicorn.dockerfile index 8609febfd7b..74b3e82041c 100644 --- a/frameworks/Ruby/rack/rack-unicorn.dockerfile +++ b/frameworks/Ruby/rack/rack-unicorn.dockerfile @@ -12,13 +12,11 @@ WORKDIR /rack COPY Gemfile ./ ENV BUNDLE_FORCE_RUBY_PLATFORM=true -RUN bundle config set without 'development test' +RUN bundle config set without 'development test falcon puma' RUN bundle install --jobs=8 COPY . . EXPOSE 8080 -#CMD nginx -c /rack/config/nginx.conf && bundle exec unicorn -E production -c config/unicorn.rb - CMD bundle exec unicorn -c config/unicorn.rb -o 0.0.0.0 -p 8080 -E production diff --git a/frameworks/Ruby/rack/rack.dockerfile b/frameworks/Ruby/rack/rack.dockerfile index b9b39c43122..615775cf9ea 100644 --- a/frameworks/Ruby/rack/rack.dockerfile +++ b/frameworks/Ruby/rack/rack.dockerfile @@ -10,10 +10,10 @@ ENV LD_PRELOAD=libjemalloc.so.2 WORKDIR /rack -COPY Gemfile ./ +COPY Gemfile ./ ENV BUNDLE_FORCE_RUBY_PLATFORM=true -RUN bundle config set without 'development test' +RUN bundle config set without 'development test falcon unicorn' RUN bundle install --jobs=8 COPY . . @@ -21,4 +21,3 @@ COPY . . EXPOSE 8080 CMD bundle exec puma -C config/puma.rb -b tcp://0.0.0.0:8080 -e production - diff --git a/frameworks/Ruby/rage/Gemfile b/frameworks/Ruby/rage/Gemfile index 96d6e7042fc..89c3affcb58 100644 --- a/frameworks/Ruby/rage/Gemfile +++ b/frameworks/Ruby/rage/Gemfile @@ -1,6 +1,6 @@ source "https://rubygems.org" -gem "rage-rb", "~> 1.3" +gem "rage-rb", "~> 1.10" gem "pg", "~> 1.0" gem "activerecord", "~> 7.2.0", require: "active_record" diff --git a/frameworks/Ruby/rage/app/models/fortune.rb b/frameworks/Ruby/rage/app/models/fortune.rb index 6b7ad122f80..0080d6363c4 100644 --- a/frameworks/Ruby/rage/app/models/fortune.rb +++ b/frameworks/Ruby/rage/app/models/fortune.rb @@ -1,7 +1,3 @@ class Fortune < ApplicationRecord self.table_name = "Fortune" - - def as_json(*) - attributes - end end diff --git a/frameworks/Ruby/rage/app/models/world.rb b/frameworks/Ruby/rage/app/models/world.rb index 951aab55b64..836783137c6 100644 --- a/frameworks/Ruby/rage/app/models/world.rb +++ b/frameworks/Ruby/rage/app/models/world.rb @@ -1,9 +1,5 @@ class World < ApplicationRecord self.table_name = "World" - def as_json(*) - attributes - end - alias_attribute(:randomNumber, :randomnumber) end diff --git a/frameworks/Ruby/rage/config.ru b/frameworks/Ruby/rage/config.ru index 52de8a40479..049a1ad509d 100644 --- a/frameworks/Ruby/rage/config.ru +++ b/frameworks/Ruby/rage/config.ru @@ -1,4 +1,3 @@ require_relative "config/application" run Rage.application -Rage.load_middlewares(self) diff --git a/frameworks/Ruby/rage/config/initializers/activerecord.rb b/frameworks/Ruby/rage/config/initializers/activerecord.rb index c0e3eb08d44..34a3b019a66 100644 --- a/frameworks/Ruby/rage/config/initializers/activerecord.rb +++ b/frameworks/Ruby/rage/config/initializers/activerecord.rb @@ -2,16 +2,7 @@ require "etc" -connection = { - adapter: "postgresql", - host: "tfb-database", - username: "benchmarkdbuser", - password: "benchmarkdbpass", - database: "hello_world", - reaping_frequency: 0, - pool: (2 * Math.log(256 / Etc.nprocessors)).floor -} +pool_size = (2 * Math.log(256 / Etc.nprocessors)).floor +puts "ActiveRecord pool size: #{pool_size}" -puts "ActiveRecord connection options: #{connection.inspect}" - -ActiveRecord::Base.establish_connection(connection) +ENV["DATABASE_URL"]="postgres://benchmarkdbuser:benchmarkdbpass@tfb-database/hello_world?pool=#{pool_size}&reaping_frequency=0" diff --git a/frameworks/Ruby/rage/rage.dockerfile b/frameworks/Ruby/rage/rage.dockerfile index bbacb344f29..6c65b51fba4 100644 --- a/frameworks/Ruby/rage/rage.dockerfile +++ b/frameworks/Ruby/rage/rage.dockerfile @@ -8,7 +8,6 @@ RUN bundle install --jobs=8 COPY . /rage ENV RUBY_YJIT_ENABLE=1 -ENV RAGE_PATCH_AR_POOL=1 ENV BUNDLE_FORCE_RUBY_PLATFORM=true CMD bundle exec rage s -b 0.0.0.0 -p 8080 -e production diff --git a/frameworks/Ruby/rails/Gemfile b/frameworks/Ruby/rails/Gemfile index 8039ed09fd9..b188913327b 100644 --- a/frameworks/Ruby/rails/Gemfile +++ b/frameworks/Ruby/rails/Gemfile @@ -1,9 +1,31 @@ source 'https://rubygems.org' gem 'oj', '~> 3.16' -gem 'pg', '~> 1.5', group: :postgresql -gem 'puma', '~> 6.4' gem 'rails', '~> 7.2.0' gem 'redis', '~> 5.0' -gem 'trilogy', '~> 2.8.1', group: :mysql gem 'tzinfo-data' + +group :mysql do + gem 'trilogy', '~> 2.8.1' +end + +group :postgresql do + gem 'pg', '~> 1.5' +end + +group :falcon do + gem 'falcon', '~> 0.47', require: false +end + +group :puma do + gem 'puma', '~> 6.4', require: false +end + +group :unicorn do + gem 'unicorn', '~> 6.1', require: false +end + +group :agoo do + gem 'agoo', require: false + gem 'rackup' +end diff --git a/frameworks/Ruby/rails/Gemfile.lock b/frameworks/Ruby/rails/Gemfile.lock index dfa97091616..9773df3e123 100644 --- a/frameworks/Ruby/rails/Gemfile.lock +++ b/frameworks/Ruby/rails/Gemfile.lock @@ -1,29 +1,29 @@ GEM remote: https://rubygems.org/ specs: - actioncable (7.2.0) - actionpack (= 7.2.0) - activesupport (= 7.2.0) + actioncable (7.2.1.1) + actionpack (= 7.2.1.1) + activesupport (= 7.2.1.1) nio4r (~> 2.0) websocket-driver (>= 0.6.1) zeitwerk (~> 2.6) - actionmailbox (7.2.0) - actionpack (= 7.2.0) - activejob (= 7.2.0) - activerecord (= 7.2.0) - activestorage (= 7.2.0) - activesupport (= 7.2.0) + actionmailbox (7.2.1.1) + actionpack (= 7.2.1.1) + activejob (= 7.2.1.1) + activerecord (= 7.2.1.1) + activestorage (= 7.2.1.1) + activesupport (= 7.2.1.1) mail (>= 2.8.0) - actionmailer (7.2.0) - actionpack (= 7.2.0) - actionview (= 7.2.0) - activejob (= 7.2.0) - activesupport (= 7.2.0) + actionmailer (7.2.1.1) + actionpack (= 7.2.1.1) + actionview (= 7.2.1.1) + activejob (= 7.2.1.1) + activesupport (= 7.2.1.1) mail (>= 2.8.0) rails-dom-testing (~> 2.2) - actionpack (7.2.0) - actionview (= 7.2.0) - activesupport (= 7.2.0) + actionpack (7.2.1.1) + actionview (= 7.2.1.1) + activesupport (= 7.2.1.1) nokogiri (>= 1.8.5) racc rack (>= 2.2.4, < 3.2) @@ -32,35 +32,35 @@ GEM rails-dom-testing (~> 2.2) rails-html-sanitizer (~> 1.6) useragent (~> 0.16) - actiontext (7.2.0) - actionpack (= 7.2.0) - activerecord (= 7.2.0) - activestorage (= 7.2.0) - activesupport (= 7.2.0) + actiontext (7.2.1.1) + actionpack (= 7.2.1.1) + activerecord (= 7.2.1.1) + activestorage (= 7.2.1.1) + activesupport (= 7.2.1.1) globalid (>= 0.6.0) nokogiri (>= 1.8.5) - actionview (7.2.0) - activesupport (= 7.2.0) + actionview (7.2.1.1) + activesupport (= 7.2.1.1) builder (~> 3.1) erubi (~> 1.11) rails-dom-testing (~> 2.2) rails-html-sanitizer (~> 1.6) - activejob (7.2.0) - activesupport (= 7.2.0) + activejob (7.2.1.1) + activesupport (= 7.2.1.1) globalid (>= 0.3.6) - activemodel (7.2.0) - activesupport (= 7.2.0) - activerecord (7.2.0) - activemodel (= 7.2.0) - activesupport (= 7.2.0) + activemodel (7.2.1.1) + activesupport (= 7.2.1.1) + activerecord (7.2.1.1) + activemodel (= 7.2.1.1) + activesupport (= 7.2.1.1) timeout (>= 0.4.0) - activestorage (7.2.0) - actionpack (= 7.2.0) - activejob (= 7.2.0) - activerecord (= 7.2.0) - activesupport (= 7.2.0) + activestorage (7.2.1.1) + actionpack (= 7.2.1.1) + activejob (= 7.2.1.1) + activerecord (= 7.2.1.1) + activesupport (= 7.2.1.1) marcel (~> 1.0) - activesupport (7.2.0) + activesupport (7.2.1.1) base64 bigdecimal concurrent-ruby (~> 1.0, >= 1.3.1) @@ -71,24 +71,76 @@ GEM minitest (>= 5.1) securerandom (>= 0.3) tzinfo (~> 2.0, >= 2.0.5) + agoo (2.15.13) + async (2.17.0) + console (~> 1.26) + fiber-annotation + io-event (~> 1.6, >= 1.6.5) + async-container (0.18.3) + async (~> 2.10) + async-http (0.82.1) + async (>= 2.10.2) + async-pool (~> 0.9) + io-endpoint (~> 0.14) + io-stream (~> 0.6) + metrics (~> 0.12) + protocol-http (~> 0.37) + protocol-http1 (>= 0.28.1) + protocol-http2 (~> 0.19) + traces (~> 0.10) + async-http-cache (0.4.4) + async-http (~> 0.56) + async-pool (0.10.1) + async (>= 1.25) + traces + async-service (0.12.0) + async + async-container (~> 0.16) base64 (0.2.0) bigdecimal (3.1.8) builder (3.3.0) concurrent-ruby (1.3.4) connection_pool (2.4.1) + console (1.27.0) + fiber-annotation + fiber-local (~> 1.1) + json crass (1.0.6) date (3.3.4) drb (2.2.1) erubi (1.13.0) + falcon (0.48.3) + async + async-container (~> 0.18) + async-http (~> 0.75) + async-http-cache (~> 0.4) + async-service (~> 0.10) + bundler + localhost (~> 1.1) + openssl (~> 3.0) + process-metrics (~> 0.2) + protocol-http (~> 0.31) + protocol-rack (~> 0.7) + samovar (~> 2.3) + fiber-annotation (0.2.0) + fiber-local (1.1.0) + fiber-storage + fiber-storage (1.0.0) globalid (1.2.1) activesupport (>= 6.1) - i18n (1.14.5) + i18n (1.14.6) concurrent-ruby (~> 1.0) io-console (0.7.2) - irb (1.14.0) + io-endpoint (0.14.0) + io-event (1.7.2) + io-stream (0.6.0) + irb (1.14.1) rdoc (>= 4.0.0) reline (>= 0.4.2) - logger (1.6.0) + json (2.7.2) + kgio (2.11.4) + localhost (1.3.1) + logger (1.6.1) loofah (2.22.0) crass (~> 1.0.2) nokogiri (>= 1.12.0) @@ -97,11 +149,13 @@ GEM net-imap net-pop net-smtp + mapping (1.1.1) marcel (1.0.4) + metrics (0.12.0) mini_mime (1.1.5) mini_portile2 (2.8.7) - minitest (5.24.1) - net-imap (0.4.14) + minitest (5.25.1) + net-imap (0.4.17) date net-protocol net-pop (0.1.2) @@ -118,17 +172,32 @@ GEM racc (~> 1.4) nokogiri (1.16.7-x86_64-linux) racc (~> 1.4) - oj (3.16.5) + oj (3.16.6) bigdecimal (>= 3.0) ostruct (>= 0.2) + openssl (3.2.0) ostruct (0.6.0) - pg (1.5.7) + pg (1.5.8) + process-metrics (0.3.0) + console (~> 1.8) + json (~> 2) + samovar (~> 2.1) + protocol-hpack (1.5.1) + protocol-http (0.40.0) + protocol-http1 (0.28.1) + protocol-http (~> 0.22) + protocol-http2 (0.19.3) + protocol-hpack (~> 1.4) + protocol-http (~> 0.18) + protocol-rack (0.10.1) + protocol-http (~> 0.37) + rack (>= 1.0) psych (5.1.2) stringio - puma (6.4.2) + puma (6.4.3) nio4r (~> 2.0) racc (1.8.1) - rack (3.1.7) + rack (3.1.8) rack-session (2.0.0) rack (>= 3.0.0) rack-test (2.1.0) @@ -136,20 +205,20 @@ GEM rackup (2.1.0) rack (>= 3) webrick (~> 1.8) - rails (7.2.0) - actioncable (= 7.2.0) - actionmailbox (= 7.2.0) - actionmailer (= 7.2.0) - actionpack (= 7.2.0) - actiontext (= 7.2.0) - actionview (= 7.2.0) - activejob (= 7.2.0) - activemodel (= 7.2.0) - activerecord (= 7.2.0) - activestorage (= 7.2.0) - activesupport (= 7.2.0) + rails (7.2.1.1) + actioncable (= 7.2.1.1) + actionmailbox (= 7.2.1.1) + actionmailer (= 7.2.1.1) + actionpack (= 7.2.1.1) + actiontext (= 7.2.1.1) + actionview (= 7.2.1.1) + activejob (= 7.2.1.1) + activemodel (= 7.2.1.1) + activerecord (= 7.2.1.1) + activestorage (= 7.2.1.1) + activesupport (= 7.2.1.1) bundler (>= 1.15.0) - railties (= 7.2.0) + railties (= 7.2.1.1) rails-dom-testing (2.2.0) activesupport (>= 5.0.0) minitest @@ -157,38 +226,46 @@ GEM rails-html-sanitizer (1.6.0) loofah (~> 2.21) nokogiri (~> 1.14) - railties (7.2.0) - actionpack (= 7.2.0) - activesupport (= 7.2.0) + railties (7.2.1.1) + actionpack (= 7.2.1.1) + activesupport (= 7.2.1.1) irb (~> 1.13) rackup (>= 1.0.0) rake (>= 12.2) thor (~> 1.0, >= 1.2.2) zeitwerk (~> 2.6) + raindrops (0.20.1) rake (13.2.1) rdoc (6.7.0) psych (>= 4.0.0) - redis (5.2.0) + redis (5.3.0) redis-client (>= 0.22.0) redis-client (0.22.2) connection_pool - reline (0.5.9) + reline (0.5.10) io-console (~> 0.5) + samovar (2.3.0) + console (~> 1.0) + mapping (~> 1.0) securerandom (0.3.1) stringio (3.1.1) - thor (1.3.1) + thor (1.3.2) timeout (0.4.1) + traces (0.13.1) trilogy (2.8.1) tzinfo (2.0.6) concurrent-ruby (~> 1.0) - tzinfo-data (1.2024.1) + tzinfo-data (1.2024.2) tzinfo (>= 1.0.0) + unicorn (6.1.0) + kgio (~> 2.6) + raindrops (~> 0.7) useragent (0.16.10) - webrick (1.8.1) + webrick (1.8.2) websocket-driver (0.7.6) websocket-extensions (>= 0.1.0) websocket-extensions (0.1.5) - zeitwerk (2.6.17) + zeitwerk (2.7.0) PLATFORMS arm64-darwin-20 @@ -196,13 +273,17 @@ PLATFORMS x86_64-linux DEPENDENCIES + agoo + falcon (~> 0.47) oj (~> 3.16) pg (~> 1.5) puma (~> 6.4) + rackup rails (~> 7.2.0) redis (~> 5.0) trilogy (~> 2.8.1) tzinfo-data + unicorn (~> 6.1) BUNDLED WITH 2.3.3 diff --git a/frameworks/Ruby/rails/benchmark_config.json b/frameworks/Ruby/rails/benchmark_config.json index 7e309e25450..f663c793a3d 100644 --- a/frameworks/Ruby/rails/benchmark_config.json +++ b/frameworks/Ruby/rails/benchmark_config.json @@ -44,6 +44,52 @@ "display_name": "rails-mysql", "notes": "", "versus": "rack-puma-mri" + }, + "falcon": { + "db_url": "/db", + "json_url": "/json", + "query_url": "/queries?queries=", + "fortune_url": "/fortunes", + "update_url": "/updates?queries=", + "plaintext_url": "/plaintext", + "cached_query_url": "/cached?queries=", + "port": 8080, + "approach": "Realistic", + "classification": "Fullstack", + "database": "Postgres", + "framework": "rails", + "language": "Ruby", + "orm": "Full", + "platform": "Rack", + "webserver": "Falcon", + "os": "Linux", + "database_os": "Linux", + "display_name": "rails-falcon", + "notes": "", + "versus": "rack-falcon-mri" + }, + "agoo": { + "db_url": "/db", + "json_url": "/json", + "query_url": "/queries?queries=", + "fortune_url": "/fortunes", + "update_url": "/updates?queries=", + "plaintext_url": "/plaintext", + "cached_query_url": "/cached?queries=", + "port": 8080, + "approach": "Realistic", + "classification": "Fullstack", + "database": "Postgres", + "framework": "rails", + "language": "Ruby", + "orm": "Full", + "platform": "Rack", + "webserver": "Agoo", + "os": "Linux", + "database_os": "Linux", + "display_name": "rails-agoo", + "notes": "", + "versus": "" } }] } diff --git a/frameworks/Ruby/rails/config/agoo.rb b/frameworks/Ruby/rails/config/agoo.rb new file mode 100644 index 00000000000..e69de29bb2d diff --git a/frameworks/Ruby/rails/config/application.rb b/frameworks/Ruby/rails/config/application.rb index 75a12720345..291c4f983ae 100644 --- a/frameworks/Ruby/rails/config/application.rb +++ b/frameworks/Ruby/rails/config/application.rb @@ -51,5 +51,7 @@ class Application < Rails::Application config.middleware.delete Rack::Sendfile config.middleware.delete Rack::TempfileReaper config.middleware.delete Rails::Rack::Logger + + config.active_support.isolation_level = :fiber if defined?(Falcon) end end diff --git a/frameworks/Ruby/rails/config/environments/production.rb b/frameworks/Ruby/rails/config/environments/production.rb index 45b097c9f04..c6594e5d17d 100644 --- a/frameworks/Ruby/rails/config/environments/production.rb +++ b/frameworks/Ruby/rails/config/environments/production.rb @@ -47,7 +47,7 @@ # "info" includes generic and useful information about system operation, but avoids logging too much # information to avoid inadvertent exposure of personally identifiable information (PII). If you # want to log everything, set the level to "debug". - config.log_level = ENV.fetch("RAILS_LOG_LEVEL", "info") + config.log_level = :fatal # Use a different cache store in production. config.cache_store = :redis_cache_store, { diff --git a/frameworks/Ruby/rails/config/falcon_preload.rb b/frameworks/Ruby/rails/config/falcon_preload.rb new file mode 100644 index 00000000000..647c7b948d9 --- /dev/null +++ b/frameworks/Ruby/rails/config/falcon_preload.rb @@ -0,0 +1,3 @@ +# required by Falcon: +# https://github.com/socketry/falcon/blob/19fe8ece7cc49aa03222afe2c940682aeb69fe37/guides/rails-integration/readme.md?plain=1#L38 +require_relative "../config/environment" diff --git a/frameworks/Ruby/rails/falcon.rb b/frameworks/Ruby/rails/falcon.rb new file mode 100644 index 00000000000..147d6b3b66f --- /dev/null +++ b/frameworks/Ruby/rails/falcon.rb @@ -0,0 +1,12 @@ +#!/usr/bin/env -S falcon host +# frozen_string_literal: true + +load :rack + +hostname = File.basename(__dir__) +port = ENV["PORT"] || 8080 + +rack hostname do + append preload "config/falcon_preload.rb" + endpoint Async::HTTP::Endpoint.parse("http://0.0.0.0:#{port}") +end diff --git a/frameworks/Ruby/rails/rails-agoo.dockerfile b/frameworks/Ruby/rails/rails-agoo.dockerfile new file mode 100644 index 00000000000..7160fe32f84 --- /dev/null +++ b/frameworks/Ruby/rails/rails-agoo.dockerfile @@ -0,0 +1,27 @@ +FROM ruby:3.4-rc + +RUN apt-get update -yqq && apt-get install -yqq --no-install-recommends redis-server + +EXPOSE 8080 +WORKDIR /rails + +# ENV RUBY_YJIT_ENABLE=1 YJIT is enabled in config/initializers/enable_yjit.rb + +# Use Jemalloc +RUN apt-get update && \ + apt-get install -y --no-install-recommends libjemalloc2 +ENV LD_PRELOAD=libjemalloc.so.2 + +COPY ./Gemfile* /rails/ + +ENV BUNDLE_FORCE_RUBY_PLATFORM=true +ENV BUNDLE_WITHOUT=trilogy +RUN bundle install --jobs=8 + +COPY . /rails/ + +ENV RAILS_ENV=production_postgresql +ENV PORT=8080 +ENV REDIS_URL=redis://localhost:6379/0 +CMD service redis-server start +CMD RACK_ENV=production bundle exec rackup -r agoo -s agoo -p 8080 -q -O workers=$(ruby config/auto_tune.rb | grep -Eo '[0-9]+' | head -n 1) diff --git a/frameworks/Ruby/rails/rails-falcon.dockerfile b/frameworks/Ruby/rails/rails-falcon.dockerfile new file mode 100644 index 00000000000..3244b73aa02 --- /dev/null +++ b/frameworks/Ruby/rails/rails-falcon.dockerfile @@ -0,0 +1,26 @@ +FROM ruby:3.4-rc + +RUN apt-get update -yqq && apt-get install -yqq --no-install-recommends redis-server + +EXPOSE 8080 +WORKDIR /rails + +# ENV RUBY_YJIT_ENABLE=1 YJIT is enabled in config/initializers/enable_yjit.rb + +# Use Jemalloc +RUN apt-get update && \ + apt-get install -y --no-install-recommends libjemalloc2 +ENV LD_PRELOAD=libjemalloc.so.2 + +COPY ./Gemfile* /rails/ + +ENV BUNDLE_FORCE_RUBY_PLATFORM=true +ENV BUNDLE_WITHOUT=mysql +RUN bundle install --jobs=8 + +COPY . /rails/ + +ENV RAILS_ENV=production_postgresql +ENV PORT=8080 +ENV REDIS_URL=redis://localhost:6379/0 +CMD bundle exec falcon host diff --git a/frameworks/Ruby/rails/rails.dockerfile b/frameworks/Ruby/rails/rails.dockerfile index 392920be9c4..8eb53c4dbd2 100644 --- a/frameworks/Ruby/rails/rails.dockerfile +++ b/frameworks/Ruby/rails/rails.dockerfile @@ -15,7 +15,7 @@ ENV LD_PRELOAD=libjemalloc.so.2 COPY ./Gemfile* /rails/ ENV BUNDLE_FORCE_RUBY_PLATFORM=true -ENV BUNDLE_WITHOUT=trilogy +ENV BUNDLE_WITHOUT=mysql RUN bundle install --jobs=8 COPY . /rails/ diff --git a/frameworks/Ruby/rails/run-with-redis.sh b/frameworks/Ruby/rails/run-with-redis.sh index 9ce9b243b74..036224f0df5 100755 --- a/frameworks/Ruby/rails/run-with-redis.sh +++ b/frameworks/Ruby/rails/run-with-redis.sh @@ -1,3 +1,3 @@ #!/bin/bash service redis-server start -rails server +bundle exec falcon host diff --git a/frameworks/Ruby/roda-sequel/benchmark_config.json b/frameworks/Ruby/roda-sequel/benchmark_config.json index 27db92a9b59..2ca8400a156 100644 --- a/frameworks/Ruby/roda-sequel/benchmark_config.json +++ b/frameworks/Ruby/roda-sequel/benchmark_config.json @@ -44,28 +44,6 @@ "versus": "rack-sequel-postgres-puma-mri", "notes": "" }, - "passenger-mri": { - "json_url": "/json", - "db_url": "/db", - "query_url": "/queries?queries=", - "fortune_url": "/fortunes", - "update_url": "/updates?queries=", - "plaintext_url": "/plaintext", - "port": 8080, - "approach": "Realistic", - "classification": "Micro", - "database": "MySQL", - "framework": "roda-sequel", - "language": "Ruby", - "orm": "Full", - "platform": "Rack", - "webserver": "Passenger", - "os": "Linux", - "database_os": "Linux", - "display_name": "roda-sequel-passenger-mri", - "versus": "rack-sequel-passenger-mri", - "notes": "" - }, "postgres-passenger-mri": { "db_url": "/db", "query_url": "/queries?queries=", @@ -86,28 +64,6 @@ "versus": "rack-sequel-postgres-passenger-mri", "notes": "" }, - "unicorn-mri": { - "json_url": "/json", - "db_url": "/db", - "query_url": "/queries?queries=", - "fortune_url": "/fortunes", - "update_url": "/updates?queries=", - "plaintext_url": "/plaintext", - "port": 8080, - "approach": "Realistic", - "classification": "Micro", - "database": "MySQL", - "framework": "roda-sequel", - "language": "Ruby", - "orm": "Full", - "platform": "Rack", - "webserver": "Unicorn", - "os": "Linux", - "database_os": "Linux", - "display_name": "roda-sequel-unicorn-mri", - "versus": "rack-sequel-unicorn-mri", - "notes": "" - }, "postgres-unicorn-mri": { "db_url": "/db", "query_url": "/queries?queries=", diff --git a/frameworks/Ruby/roda-sequel/roda-sequel-passenger-mri.dockerfile b/frameworks/Ruby/roda-sequel/roda-sequel-passenger-mri.dockerfile deleted file mode 100644 index e25f21e20d9..00000000000 --- a/frameworks/Ruby/roda-sequel/roda-sequel-passenger-mri.dockerfile +++ /dev/null @@ -1,27 +0,0 @@ -FROM ruby:3.3 - -ADD ./ /roda-sequel -WORKDIR /roda-sequel - -ENV RUBY_YJIT_ENABLE=1 - -# Use Jemalloc -RUN apt-get update && \ - apt-get install -y --no-install-recommends libjemalloc2 -ENV LD_PRELOAD=libjemalloc.so.2 - -ENV BUNDLE_FORCE_RUBY_PLATFORM=true -RUN bundle install --jobs=8 - -# TODO: https://github.com/phusion/passenger/issues/1916 -ENV _PASSENGER_FORCE_HTTP_SESSION=true -ENV DBTYPE=mysql - -RUN ruby -r /roda-sequel/config/auto_tune -e 'puts auto_tune.first' > instances - -EXPOSE 8080 - -CMD bundle exec passenger start --log-level 1 \ - --engine builtin --disable-turbocaching --disable-security-update-check \ - --spawn-method direct --max-pool-size $(cat instances) --min-instances $(cat instances) --max-request-queue-size 1024 \ - --address 0.0.0.0 --port 8080 --environment production diff --git a/frameworks/Ruby/roda-sequel/roda-sequel-unicorn-mri.dockerfile b/frameworks/Ruby/roda-sequel/roda-sequel-unicorn-mri.dockerfile deleted file mode 100644 index d0cd81077bb..00000000000 --- a/frameworks/Ruby/roda-sequel/roda-sequel-unicorn-mri.dockerfile +++ /dev/null @@ -1,20 +0,0 @@ -FROM ruby:3.4-rc - -ADD ./ /roda-sequel -WORKDIR /roda-sequel - -ENV RUBY_YJIT_ENABLE=1 - -# Use Jemalloc -RUN apt-get update && \ - apt-get install -y --no-install-recommends libjemalloc2 -ENV LD_PRELOAD=libjemalloc.so.2 - -ENV BUNDLE_FORCE_RUBY_PLATFORM=true -RUN bundle install --jobs=8 - -ENV DBTYPE=mysql - -EXPOSE 8080 - -CMD bundle exec unicorn -c config/mri_unicorn.rb -o 0.0.0.0 -p 8080 -E production diff --git a/frameworks/Ruby/sinatra-sequel/Gemfile b/frameworks/Ruby/sinatra-sequel/Gemfile index f4ac3eb1fe2..4aace641277 100644 --- a/frameworks/Ruby/sinatra-sequel/Gemfile +++ b/frameworks/Ruby/sinatra-sequel/Gemfile @@ -4,7 +4,7 @@ gem 'oj' gem 'passenger', '~> 6.0', :platforms=>[:ruby, :mswin], :require=>false gem 'puma', '~> 6.4', :require=>false gem 'sequel', '~> 5.0' -gem 'sinatra', '~> 3.0', :require=>'sinatra/base' +gem 'sinatra', '~> 4.0', :require=>'sinatra/base' gem 'unicorn', '~> 6.1', :platforms=>[:ruby, :mswin], :require=>false group :mysql do diff --git a/frameworks/Ruby/sinatra-sequel/README.md b/frameworks/Ruby/sinatra-sequel/README.md index 147e20cb2ed..9f382526d82 100644 --- a/frameworks/Ruby/sinatra-sequel/README.md +++ b/frameworks/Ruby/sinatra-sequel/README.md @@ -15,9 +15,9 @@ The tests will be run with: * [Ruby 3.3](http://www.ruby-lang.org) * [JRuby 9.4](http://jruby.org) * [Puma 6](http://puma.io) -* [Passenger 5](https://www.phusionpassenger.com) +* [Passenger 6](https://www.phusionpassenger.com) * [Unicorn 5](https://bogomips.org/unicorn/) -* [Sinatra 3](http://www.sinatrarb.com) +* [Sinatra 4](http://www.sinatrarb.com) * [Sequel 5](http://sequel.jeremyevans.net) * [Slim 3](http://slim-lang.com) * [MySQL 5.5](https://www.mysql.com) diff --git a/frameworks/Ruby/sinatra-sequel/benchmark_config.json b/frameworks/Ruby/sinatra-sequel/benchmark_config.json index c16da1e0e26..9a267813727 100644 --- a/frameworks/Ruby/sinatra-sequel/benchmark_config.json +++ b/frameworks/Ruby/sinatra-sequel/benchmark_config.json @@ -42,26 +42,6 @@ "versus": "rack-sequel-postgres-puma-mri", "notes": "" }, - "passenger-mri": { - "db_url": "/db", - "query_url": "/queries?queries=", - "fortune_url": "/fortunes", - "update_url": "/updates?queries=", - "port": 8080, - "approach": "Realistic", - "classification": "Micro", - "database": "MySQL", - "framework": "sinatra", - "language": "Ruby", - "orm": "Full", - "platform": "Rack", - "webserver": "Passenger", - "os": "Linux", - "database_os": "Linux", - "display_name": "sinatra-sequel-passenger-mri", - "versus": "rack-sequel-passenger-mri", - "notes": "" - }, "postgres-passenger-mri": { "db_url": "/db", "query_url": "/queries?queries=", @@ -82,26 +62,6 @@ "versus": "rack-sequel-postgres-passenger-mri", "notes": "" }, - "unicorn-mri": { - "db_url": "/db", - "query_url": "/queries?queries=", - "fortune_url": "/fortunes", - "update_url": "/updates?queries=", - "port": 8080, - "approach": "Realistic", - "classification": "Micro", - "database": "MySQL", - "framework": "sinatra", - "language": "Ruby", - "orm": "Full", - "platform": "Rack", - "webserver": "Unicorn", - "os": "Linux", - "database_os": "Linux", - "display_name": "sinatra-sequel-unicorn-mri", - "versus": "rack-sequel-unicorn-mri", - "notes": "" - }, "postgres-unicorn-mri": { "db_url": "/db", "query_url": "/queries?queries=", diff --git a/frameworks/Ruby/sinatra-sequel/sinatra-sequel-passenger-mri.dockerfile b/frameworks/Ruby/sinatra-sequel/sinatra-sequel-passenger-mri.dockerfile deleted file mode 100644 index 39bd11758da..00000000000 --- a/frameworks/Ruby/sinatra-sequel/sinatra-sequel-passenger-mri.dockerfile +++ /dev/null @@ -1,26 +0,0 @@ -FROM ruby:3.3 - -ENV RUBY_YJIT_ENABLE=1 - -# Use Jemalloc -RUN apt-get update && \ - apt-get install -y --no-install-recommends libjemalloc2 -ENV LD_PRELOAD=libjemalloc.so.2 - -ADD ./ /sinatra-sequel -WORKDIR /sinatra-sequel - -RUN bundle install --jobs=4 --gemfile=/sinatra-sequel/Gemfile - -# TODO: https://github.com/phusion/passenger/issues/1916 -ENV _PASSENGER_FORCE_HTTP_SESSION=true -ENV DBTYPE=mysql - -RUN ruby -r /sinatra-sequel/config/auto_tune -e 'puts auto_tune.first' > instances - -EXPOSE 8080 - -CMD bundle exec passenger start --log-level 1 \ - --engine builtin --disable-turbocaching --disable-security-update-check \ - --spawn-method direct --max-pool-size $(cat instances) --min-instances $(cat instances) --max-request-queue-size 1024 \ - --address 0.0.0.0 --port 8080 --environment production diff --git a/frameworks/Ruby/sinatra-sequel/sinatra-sequel-unicorn-mri.dockerfile b/frameworks/Ruby/sinatra-sequel/sinatra-sequel-unicorn-mri.dockerfile deleted file mode 100644 index c3e2bc7de13..00000000000 --- a/frameworks/Ruby/sinatra-sequel/sinatra-sequel-unicorn-mri.dockerfile +++ /dev/null @@ -1,19 +0,0 @@ -FROM ruby:3.4-rc - -ENV RUBY_YJIT_ENABLE=1 - -# Use Jemalloc -RUN apt-get update && \ - apt-get install -y --no-install-recommends libjemalloc2 -ENV LD_PRELOAD=libjemalloc.so.2 - -ADD ./ /sinatra-sequel -WORKDIR /sinatra-sequel - -RUN bundle install --jobs=4 --gemfile=/sinatra-sequel/Gemfile - -ENV DBTYPE=mysql - -EXPOSE 8080 - -CMD bundle exec unicorn -c config/mri_unicorn.rb -o 0.0.0.0 -p 8080 -E production diff --git a/frameworks/Ruby/sinatra/Gemfile b/frameworks/Ruby/sinatra/Gemfile index ff34570a2cb..95fe5743580 100644 --- a/frameworks/Ruby/sinatra/Gemfile +++ b/frameworks/Ruby/sinatra/Gemfile @@ -1,10 +1,10 @@ source 'https://rubygems.org' -gem 'activerecord', '~> 7.1', require: 'active_record' +gem 'activerecord', '~> 7.2', require: 'active_record' gem 'oj' gem 'passenger', '~> 6.0', platforms: [:ruby, :mswin], require: false gem 'puma', '~> 6.4', require: false -gem 'sinatra', '~> 3.0', require: 'sinatra/base' +gem 'sinatra', '~> 4.0', require: 'sinatra/base' gem 'unicorn', '~> 6.1', platforms: [:ruby, :mswin], require: false group :mysql do diff --git a/frameworks/Ruby/sinatra/README.md b/frameworks/Ruby/sinatra/README.md index ff118f6ff21..e0f14b9a171 100644 --- a/frameworks/Ruby/sinatra/README.md +++ b/frameworks/Ruby/sinatra/README.md @@ -16,7 +16,7 @@ The tests will be run with: * [Puma 6](http://puma.io) * [Passenger 6](https://www.phusionpassenger.com) * [Unicorn 6](https://bogomips.org/unicorn/) -* [Sinatra 3](http://www.sinatrarb.com) +* [Sinatra 4](http://www.sinatrarb.com) * [ActiveRecord 7](https://github.com/rails/rails/tree/master/activerecord) * [MySQL 5.5](https://www.mysql.com) * [Postgres 9.3](https://www.postgresql.org) diff --git a/frameworks/Ruby/sinatra/benchmark_config.json b/frameworks/Ruby/sinatra/benchmark_config.json index 166a2e9e2bf..2c0be64ed47 100644 --- a/frameworks/Ruby/sinatra/benchmark_config.json +++ b/frameworks/Ruby/sinatra/benchmark_config.json @@ -44,28 +44,6 @@ "versus": "rack-postgres-puma-mri", "notes": "" }, - "passenger-mri": { - "json_url": "/json", - "db_url": "/db", - "query_url": "/queries?queries=", - "fortune_url": "/fortunes", - "update_url": "/updates?queries=", - "plaintext_url": "/plaintext", - "port": 8080, - "approach": "Realistic", - "classification": "Micro", - "database": "MySQL", - "framework": "sinatra", - "language": "Ruby", - "orm": "Full", - "platform": "Rack", - "webserver": "Passenger", - "os": "Linux", - "database_os": "Linux", - "display_name": "sinatra-passenger-mri", - "versus": "rack-passenger-mri", - "notes": "" - }, "postgres-passenger-mri": { "db_url": "/db", "query_url": "/queries?queries=", @@ -86,28 +64,6 @@ "versus": "rack-postgres-passenger-mri", "notes": "" }, - "unicorn-mri": { - "json_url": "/json", - "db_url": "/db", - "query_url": "/queries?queries=", - "fortune_url": "/fortunes", - "update_url": "/updates?queries=", - "plaintext_url": "/plaintext", - "port": 8080, - "approach": "Realistic", - "classification": "Micro", - "database": "MySQL", - "framework": "sinatra", - "language": "Ruby", - "orm": "Full", - "platform": "Rack", - "webserver": "Unicorn", - "os": "Linux", - "database_os": "Linux", - "display_name": "sinatra-unicorn-mri", - "versus": "rack-unicorn-mri", - "notes": "" - }, "postgres-unicorn-mri": { "db_url": "/db", "query_url": "/queries?queries=", diff --git a/frameworks/Ruby/sinatra/hello_world.rb b/frameworks/Ruby/sinatra/hello_world.rb index 328fa402921..dad43a41c20 100644 --- a/frameworks/Ruby/sinatra/hello_world.rb +++ b/frameworks/Ruby/sinatra/hello_world.rb @@ -39,10 +39,6 @@ def rand1 response['Server'] = SERVER_STRING end if SERVER_STRING - after do - ActiveRecord::Base.connection_handler.clear_active_connections! - end - # Test type 1: JSON serialization get '/json' do json message: 'Hello, World!' @@ -51,7 +47,7 @@ def rand1 # Test type 2: Single database query get '/db' do world = - ActiveRecord::Base.connection_pool.with_connection do + ActiveRecord::Base.with_connection do World.find(rand1).attributes end @@ -61,7 +57,7 @@ def rand1 # Test type 3: Multiple database queries get '/queries' do worlds = - ActiveRecord::Base.connection_pool.with_connection do + ActiveRecord::Base.with_connection do ALL_IDS.sample(bounded_queries).map do |id| World.find(id).attributes end @@ -72,7 +68,7 @@ def rand1 # Test type 4: Fortunes get '/fortunes' do - @fortunes = ActiveRecord::Base.connection_pool.with_connection do + @fortunes = ActiveRecord::Base.with_connection do Fortune.all end.to_a @fortunes << Fortune.new( @@ -86,17 +82,18 @@ def rand1 # Test type 5: Database updates get '/updates' do - worlds = - ALL_IDS.sample(bounded_queries).map do |id| - world = ActiveRecord::Base.connection_pool.with_connection do - World.find(id) - end + worlds = nil + ActiveRecord::Base.with_connection do + worlds = ALL_IDS.sample(bounded_queries).map do |id| + world = World.find(id) new_value = rand1 new_value = rand1 until new_value != world.randomNumber { id: id, randomNumber: new_value } end - ActiveRecord::Base.connection_pool.with_connection do - World.upsert_all(worlds.sort_by!{_1[:id]}) + end + worlds.sort_by!{_1[:id]} + ActiveRecord::Base.with_connection do + World.upsert_all(worlds) end json worlds end diff --git a/frameworks/Ruby/sinatra/sinatra-passenger-mri.dockerfile b/frameworks/Ruby/sinatra/sinatra-passenger-mri.dockerfile deleted file mode 100644 index eb07d7b5ba7..00000000000 --- a/frameworks/Ruby/sinatra/sinatra-passenger-mri.dockerfile +++ /dev/null @@ -1,26 +0,0 @@ -FROM ruby:3.3 - -ENV RUBY_YJIT_ENABLE=1 - -# Use Jemalloc -RUN apt-get update && \ - apt-get install -y --no-install-recommends libjemalloc2 -ENV LD_PRELOAD=libjemalloc.so.2 - -ADD ./ /sinatra -WORKDIR /sinatra - -RUN bundle install --jobs=4 --gemfile=/sinatra/Gemfile - -# TODO: https://github.com/phusion/passenger/issues/1916 -ENV _PASSENGER_FORCE_HTTP_SESSION=true -ENV DBTYPE=mysql - -RUN ruby -r /sinatra/config/auto_tune -e 'puts auto_tune.first' > instances - -EXPOSE 8080 - -CMD bundle exec passenger start --log-level 1 \ - --engine builtin --disable-turbocaching --disable-security-update-check \ - --spawn-method direct --max-pool-size $(cat instances) --min-instances $(cat instances) --max-request-queue-size 1024 \ - --address 0.0.0.0 --port 8080 --environment production diff --git a/frameworks/Ruby/sinatra/sinatra-unicorn-mri.dockerfile b/frameworks/Ruby/sinatra/sinatra-unicorn-mri.dockerfile deleted file mode 100644 index f0dede838aa..00000000000 --- a/frameworks/Ruby/sinatra/sinatra-unicorn-mri.dockerfile +++ /dev/null @@ -1,19 +0,0 @@ -FROM ruby:3.4-rc - -ENV RUBY_YJIT_ENABLE=1 - -# Use Jemalloc -RUN apt-get update && \ - apt-get install -y --no-install-recommends libjemalloc2 -ENV LD_PRELOAD=libjemalloc.so.2 - -ADD ./ /sinatra -WORKDIR /sinatra - -RUN bundle install --jobs=4 --gemfile=/sinatra/Gemfile - -ENV DBTYPE=mysql - -EXPOSE 8080 - -CMD bundle exec unicorn -c config/mri_unicorn.rb -o 0.0.0.0 -p 8080 -E production diff --git a/frameworks/Rust/axum/Cargo.lock b/frameworks/Rust/axum/Cargo.lock index 005a10fb7ca..8f756182439 100644 --- a/frameworks/Rust/axum/Cargo.lock +++ b/frameworks/Rust/axum/Cargo.lock @@ -121,9 +121,9 @@ checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" [[package]] name = "axum" -version = "0.7.5" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf" +checksum = "8f43644eed690f5374f1af436ecd6aea01cd201f6fbdf0178adaf6907afb2cec" dependencies = [ "async-trait", "axum-core", @@ -147,16 +147,16 @@ dependencies = [ "serde_urlencoded", "sync_wrapper 1.0.1", "tokio", - "tower 0.4.13", + "tower 0.5.1", "tower-layer", "tower-service", ] [[package]] name = "axum-core" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a15c63fd72d41492dc4f497196f5da1fb04fb7529e631d73630d1b491e47a2e3" +checksum = "5e6b8ba012a258d63c9adfa28b9ddcf66149da6f986c5b5452e629d5ee64bf00" dependencies = [ "async-trait", "bytes", @@ -167,7 +167,7 @@ dependencies = [ "mime", "pin-project-lite", "rustversion", - "sync_wrapper 0.1.2", + "sync_wrapper 1.0.1", "tower-layer", "tower-service", ] @@ -201,7 +201,7 @@ dependencies = [ "tokio-pg-mapper", "tokio-pg-mapper-derive", "tokio-postgres", - "tower 0.5.0", + "tower 0.5.1", "tower-http", "yarte", ] @@ -1056,6 +1056,8 @@ dependencies = [ "hyper", "pin-project-lite", "tokio", + "tower 0.4.13", + "tower-service", ] [[package]] @@ -2779,14 +2781,15 @@ dependencies = [ [[package]] name = "tower" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36b837f86b25d7c0d7988f00a54e74739be6477f2aac6201b8f429a7569991b7" +checksum = "2873938d487c3cfb9aed7546dc9f2711d867c9f90c46b889989a2cb84eba6b4f" dependencies = [ "futures-core", "futures-util", "pin-project-lite", "sync_wrapper 0.1.2", + "tokio", "tower-layer", "tower-service", ] diff --git a/frameworks/Rust/axum/Cargo.toml b/frameworks/Rust/axum/Cargo.toml index 49b594972d1..a47514726c7 100644 --- a/frameworks/Rust/axum/Cargo.toml +++ b/frameworks/Rust/axum/Cargo.toml @@ -39,7 +39,7 @@ simd-json = [ ] [dependencies] -axum = { version = "0.7.5", default-features = false, features = [ +axum = { version = "0.7.6", default-features = false, features = [ "json", "query", "http1", diff --git a/frameworks/Rust/axum/src/main.rs b/frameworks/Rust/axum/src/main.rs index b29da958d20..70858e27ddc 100644 --- a/frameworks/Rust/axum/src/main.rs +++ b/frameworks/Rust/axum/src/main.rs @@ -11,11 +11,13 @@ use axum::Json; use common::simd_json::Json; /// Return a plaintext static string. -pub async fn plaintext() -> impl IntoResponse { - (StatusCode::OK, "Hello, World!") +#[inline(always)] +pub async fn plaintext() -> &'static str { + "Hello, World!" } /// Return a JSON message. +#[inline(always)] pub async fn json() -> impl IntoResponse { let message = Message { message: "Hello, World!", diff --git a/frameworks/Rust/axum/src/main_mongo.rs b/frameworks/Rust/axum/src/main_mongo.rs index 8f7ac7961c3..6d301189b9a 100644 --- a/frameworks/Rust/axum/src/main_mongo.rs +++ b/frameworks/Rust/axum/src/main_mongo.rs @@ -1,5 +1,6 @@ mod common; mod mongo; +mod server; //mod mongo_raw; use std::time::Duration; @@ -24,8 +25,6 @@ use mongodb::{ use rand::{rngs::SmallRng, thread_rng, Rng, SeedableRng}; use yarte::Template; -mod server; - use common::{ get_env, utils::{parse_params, Params, Utf8Html}, @@ -117,25 +116,10 @@ async fn fortunes(DatabaseConnection(db): DatabaseConnection) -> impl IntoRespon fn main() { dotenv().ok(); - - let rt = tokio::runtime::Builder::new_current_thread() - .enable_all() - .build() - .unwrap(); - - for _ in 1..num_cpus::get() { - std::thread::spawn(move || { - let rt = tokio::runtime::Builder::new_current_thread() - .enable_all() - .build() - .unwrap(); - rt.block_on(serve()); - }); - } - rt.block_on(serve()); + server::start_tokio(serve_app) } -async fn serve() { +async fn serve_app() { let database_url: String = get_env("MONGODB_URL"); let max_pool_size: u32 = get_env("MONGODB_MAX_POOL_SIZE"); let min_pool_size: u32 = get_env("MONGODB_MIN_POOL_SIZE"); diff --git a/frameworks/Rust/axum/src/main_mongo_raw.rs b/frameworks/Rust/axum/src/main_mongo_raw.rs index fdf09c179a3..0d2735ad3bb 100644 --- a/frameworks/Rust/axum/src/main_mongo_raw.rs +++ b/frameworks/Rust/axum/src/main_mongo_raw.rs @@ -86,25 +86,10 @@ async fn updates( fn main() { dotenv().ok(); - - let rt = tokio::runtime::Builder::new_current_thread() - .enable_all() - .build() - .unwrap(); - - for _ in 1..num_cpus::get() { - std::thread::spawn(move || { - let rt = tokio::runtime::Builder::new_current_thread() - .enable_all() - .build() - .unwrap(); - rt.block_on(serve()); - }); - } - rt.block_on(serve()); + server::start_tokio(serve_app) } -async fn serve() { +async fn serve_app() { let database_url: String = get_env("MONGODB_URL"); let max_pool_size: u32 = get_env("MONGODB_MAX_POOL_SIZE"); let min_pool_size: u32 = get_env("MONGODB_MIN_POOL_SIZE"); diff --git a/frameworks/Rust/axum/src/main_pg.rs b/frameworks/Rust/axum/src/main_pg.rs index 20acb2a1769..be691f9720c 100644 --- a/frameworks/Rust/axum/src/main_pg.rs +++ b/frameworks/Rust/axum/src/main_pg.rs @@ -78,10 +78,12 @@ async fn updates( (StatusCode::OK, Json(worlds)) } -#[tokio::main] -async fn main() { +fn main() { dotenv().ok(); + server::start_tokio(serve_app) +} +async fn serve_app() { let database_url: String = get_env("POSTGRES_URL"); // Create shared database connection diff --git a/frameworks/Rust/axum/src/main_sqlx.rs b/frameworks/Rust/axum/src/main_sqlx.rs index d10b1ea99ef..163372b83d8 100644 --- a/frameworks/Rust/axum/src/main_sqlx.rs +++ b/frameworks/Rust/axum/src/main_sqlx.rs @@ -1,8 +1,6 @@ mod common; mod sqlx; -use std::sync::Arc; - use ::sqlx::PgPool; use axum::{ extract::{Query, State}, @@ -98,7 +96,7 @@ async fn cache( ) -> impl IntoResponse { let count = parse_params(params); let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap(); - let mut worlds: Vec>> = Vec::with_capacity(count); + let mut worlds: Vec> = Vec::with_capacity(count); for id in random_ids(&mut rng, count) { worlds.push(cache.get(&id).await); @@ -115,14 +113,15 @@ async fn preload_cache(AppState { db, cache }: &AppState) { .expect("error loading worlds"); for world in worlds { - cache.insert(world.id, Arc::new(world)).await; + cache.insert(world.id, world).await; } } +/// Application state #[derive(Clone)] struct AppState { db: PgPool, - cache: Cache>, + cache: Cache, } #[tokio::main] @@ -135,7 +134,10 @@ async fn main() { let state = AppState { db: create_pool(database_url, max_pool_size, min_pool_size).await, - cache: Cache::new(10000), + cache: Cache::builder() + .initial_capacity(10000) + .max_capacity(10000) + .build() }; // Prime the cache with CachedWorld objects diff --git a/frameworks/Rust/axum/src/server.rs b/frameworks/Rust/axum/src/server.rs index 5e03a6aca88..6d861277c10 100644 --- a/frameworks/Rust/axum/src/server.rs +++ b/frameworks/Rust/axum/src/server.rs @@ -1,4 +1,5 @@ use std::{ + future::Future, io, net::{Ipv4Addr, SocketAddr, TcpListener}, }; @@ -104,3 +105,26 @@ pub async fn serve_hyper(app: Router<()>, port: Option) { }); } } + +/// Start a single-threaded tokio runtime on multiple threads. +#[allow(dead_code)] +pub fn start_tokio(f: fn() -> Fut) +where + Fut: Future + 'static, +{ + let rt = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .unwrap(); + + for _ in 1..num_cpus::get() { + std::thread::spawn(move || { + let rt = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .unwrap(); + rt.block_on(f()); + }); + } + rt.block_on(f()); +} diff --git a/frameworks/Rust/viz/Cargo.toml b/frameworks/Rust/viz/Cargo.toml index f28595febb5..b2e90cb4836 100644 --- a/frameworks/Rust/viz/Cargo.toml +++ b/frameworks/Rust/viz/Cargo.toml @@ -24,23 +24,28 @@ path = "src/main_diesel.rs" required-features = ["diesel", "diesel-async", "sailfish"] [dependencies] -viz = "0.8" -hyper = "1.0" +viz = "0.9" +hyper = "1.4" hyper-util = "0.1" atoi = "2.0" serde = { version = "1.0", features = ["derive"] } -nanorand = "0.7" rand = { version = "0.8", features = ["small_rng"] } thiserror = "1.0" futures-util = "0.3" +[target.'cfg(not(unix))'.dependencies] +nanorand = { version = "0.7" } + +[target.'cfg(unix)'.dependencies] +nanorand = { version = "0.7", features = ["getrandom"] } + tokio = { version = "1", features = ["full"] } tokio-postgres = { version = "0.7", optional = true } -sqlx = { version = "0.7", features = [ +sqlx = { version = "0.8", features = [ "postgres", "macros", "runtime-tokio", - "tls-native-tls" + "tls-native-tls", ], optional = true } diesel = { version = "2.2", default-features = false, features = [ "i-implement-a-third-party-backend-and-opt-into-breaking-changes", @@ -53,7 +58,7 @@ diesel-async = { git = "https://github.com/weiznich/diesel_async.git", rev = "74 yarte = { version = "0.15", features = ["bytes-buf", "json"], optional = true } markup = { version = "0.15", optional = true } v_htmlescape = { version = "0.15", optional = true } -sailfish = { version = "0.8", optional = true } +sailfish = { version = "0.9", optional = true } [profile.release] lto = true diff --git a/frameworks/Rust/viz/src/db_sqlx.rs b/frameworks/Rust/viz/src/db_sqlx.rs index af8322e9d1d..40c39290265 100644 --- a/frameworks/Rust/viz/src/db_sqlx.rs +++ b/frameworks/Rust/viz/src/db_sqlx.rs @@ -60,7 +60,7 @@ pub async fn get_world( id: i32, ) -> Result { let mut args = PgArguments::default(); - args.add(id); + let _ = args.add(id); let world = sqlx::query_as_with("SELECT id, randomnumber FROM World WHERE id = $1", args) @@ -86,8 +86,8 @@ pub async fn update_worlds( for w in &worlds { let mut args = PgArguments::default(); - args.add(w.randomnumber); - args.add(w.id); + let _ = args.add(w.randomnumber); + let _ = args.add(w.id); sqlx::query_with("UPDATE World SET randomNumber = $1 WHERE id = $2", args) .execute(&mut *conn) diff --git a/frameworks/Rust/viz/templates/fortune.stpl b/frameworks/Rust/viz/templates/fortune.stpl index eb1abe6a4fa..874b48cc6f5 100644 --- a/frameworks/Rust/viz/templates/fortune.stpl +++ b/frameworks/Rust/viz/templates/fortune.stpl @@ -4,7 +4,7 @@ - <% for item in items { %><% } %> + <% for item in self.items { %><% } %>
idmessage
<%= item.id %><%= &*item.message %>
<%= item.id %><%= &*item.message %>
- \ No newline at end of file + diff --git a/frameworks/Rust/xitca-web/.cargo/config.toml b/frameworks/Rust/xitca-web/.cargo/config.toml index 7d3c09f8a1a..df736010a76 100644 --- a/frameworks/Rust/xitca-web/.cargo/config.toml +++ b/frameworks/Rust/xitca-web/.cargo/config.toml @@ -1,5 +1,5 @@ [build] -rustflags = ["-C", "target-cpu=native"] +rustflags = ["-C", "target-cpu=native", "--cfg", "tokio_unstable"] incremental = false [target.wasm32-wasip1-threads] diff --git a/frameworks/Rust/xitca-web/Cargo.lock b/frameworks/Rust/xitca-web/Cargo.lock index 14daebadc05..070a2e67550 100644 --- a/frameworks/Rust/xitca-web/Cargo.lock +++ b/frameworks/Rust/xitca-web/Cargo.lock @@ -1,12 +1,12 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "addr2line" -version = "0.24.1" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5fb1d8e4442bd405fdfd1dacb42792696b0cf9cb15882e5d097b742a676d375" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" dependencies = [ "gimli", ] @@ -19,9 +19,9 @@ checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" [[package]] name = "async-trait" -version = "0.1.82" +version = "0.1.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a27b8a3a6e1a44fa4c8baf1f653e4172e81486d4941f2237e20dc2d0cf4ddff1" +checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", @@ -39,59 +39,9 @@ dependencies = [ [[package]] name = "autocfg" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" - -[[package]] -name = "axum" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf" -dependencies = [ - "async-trait", - "axum-core", - "bytes", - "futures-util", - "http", - "http-body", - "http-body-util", - "itoa", - "matchit", - "memchr", - "mime", - "percent-encoding", - "pin-project-lite", - "rustversion", - "serde", - "serde_json", - "serde_path_to_error", - "serde_urlencoded", - "sync_wrapper 1.0.1", - "tower", - "tower-layer", - "tower-service", -] - -[[package]] -name = "axum-core" -version = "0.4.3" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a15c63fd72d41492dc4f497196f5da1fb04fb7529e631d73630d1b491e47a2e3" -dependencies = [ - "async-trait", - "bytes", - "futures-util", - "http", - "http-body", - "http-body-util", - "mime", - "pin-project-lite", - "rustversion", - "sync_wrapper 0.1.2", - "tower-layer", - "tower-service", -] +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "backtrace" @@ -114,6 +64,18 @@ version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" +[[package]] +name = "bb8" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b10cf871f3ff2ce56432fddc2615ac7acc3aa22ca321f8fea800846fbb32f188" +dependencies = [ + "async-trait", + "futures-util", + "parking_lot", + "tokio", +] + [[package]] name = "bitflags" version = "1.3.2" @@ -135,6 +97,12 @@ dependencies = [ "generic-array", ] +[[package]] +name = "bumpalo" +version = "3.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" + [[package]] name = "byteorder" version = "1.5.0" @@ -143,15 +111,15 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.7.1" +version = "1.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8318a53db07bb3f8dca91a600466bdb3f2eaadeedfdbcf02e1accbad9271ba50" +checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3" [[package]] name = "cc" -version = "1.1.18" +version = "1.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b62ac837cdb5cb22e10a256099b4fc502b1dfe560cb282963a974d7abd80e476" +checksum = "b16803a61b81d9eabb7eae2588776c4c1e584b738ede45fdbb4c972cec1e9945" dependencies = [ "shlex", ] @@ -230,6 +198,20 @@ dependencies = [ "r2d2", ] +[[package]] +name = "diesel-async" +version = "0.5.0" +source = "git+https://github.com/weiznich/diesel_async?rev=5b8262b#5b8262b86d8ed0e13adbbc4aee39500b9931ef8d" +dependencies = [ + "async-trait", + "bb8", + "diesel", + "futures-util", + "scoped-futures", + "tokio", + "tokio-postgres", +] + [[package]] name = "diesel_derives" version = "2.2.3" @@ -289,18 +271,6 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" -[[package]] -name = "filetime" -version = "0.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" -dependencies = [ - "cfg-if", - "libc", - "libredox", - "windows-sys 0.59.0", -] - [[package]] name = "fnv" version = "1.0.7" @@ -316,25 +286,54 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "futures-channel" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +dependencies = [ + "futures-core", + "futures-sink", +] + [[package]] name = "futures-core" -version = "0.3.30" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" + +[[package]] +name = "futures-macro" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" [[package]] name = "futures-task" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" [[package]] name = "futures-util" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ "futures-core", + "futures-macro", + "futures-sink", "futures-task", "pin-project-lite", "pin-utils", @@ -364,9 +363,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.31.0" +version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32085ea23f3234fc7846555e85283ba4de91e21016dc0455a16286d87a292d64" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "heck" @@ -389,15 +388,6 @@ dependencies = [ "digest", ] -[[package]] -name = "home" -version = "0.5.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" -dependencies = [ - "windows-sys 0.52.0", -] - [[package]] name = "http" version = "1.1.0" @@ -409,34 +399,11 @@ dependencies = [ "itoa", ] -[[package]] -name = "http-body" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" -dependencies = [ - "bytes", - "http", -] - -[[package]] -name = "http-body-util" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" -dependencies = [ - "bytes", - "futures-util", - "http", - "http-body", - "pin-project-lite", -] - [[package]] name = "httparse" -version = "1.9.4" +version = "1.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fcc0b4a115bf80b728eb8ea024ad5bd707b615bfed49e0665b6e0f86fd082d9" +checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" [[package]] name = "httpdate" @@ -472,11 +439,20 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9028f49264629065d057f340a86acb84867925865f73bbf8d47b4d149a7e88b8" +[[package]] +name = "js-sys" +version = "0.3.72" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9" +dependencies = [ + "wasm-bindgen", +] + [[package]] name = "libc" -version = "0.2.158" +version = "0.2.159" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" +checksum = "561d97a539a36e26a9a5fad1ea11a3039a67714694aaa379433e580854bc3dc5" [[package]] name = "libmimalloc-sys" @@ -488,17 +464,6 @@ dependencies = [ "libc", ] -[[package]] -name = "libredox" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" -dependencies = [ - "bitflags 2.6.0", - "libc", - "redox_syscall", -] - [[package]] name = "lock_api" version = "0.4.12" @@ -515,12 +480,6 @@ version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" -[[package]] -name = "matchit" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" - [[package]] name = "md-5" version = "0.10.6" @@ -546,12 +505,6 @@ dependencies = [ "libmimalloc-sys", ] -[[package]] -name = "mime" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" - [[package]] name = "miniz_oxide" version = "0.8.0" @@ -569,7 +522,7 @@ dependencies = [ "hermit-abi", "libc", "wasi", - "windows-sys 0.52.0", + "windows-sys", ] [[package]] @@ -583,18 +536,18 @@ dependencies = [ [[package]] name = "object" -version = "0.36.4" +version = "0.36.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "084f1a5821ac4c651660a94a7153d27ac9d8a53736203f58b31945ded098070a" +checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.19.0" +version = "1.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" [[package]] name = "parking_lot" @@ -643,26 +596,6 @@ dependencies = [ "siphasher", ] -[[package]] -name = "pin-project" -version = "1.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" -dependencies = [ - "pin-project-internal", -] - -[[package]] -name = "pin-project-internal" -version = "1.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "pin-project-lite" version = "0.2.14" @@ -695,9 +628,9 @@ dependencies = [ [[package]] name = "postgres-types" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02048d9e032fb3cc3413bbf7b83a15d84a5d419778e2628751896d856498eee9" +checksum = "f66ea23a2d0e5734297357705193335e0a957696f34bed2f2faefacb2fec336f" dependencies = [ "bytes", "fallible-iterator", @@ -715,18 +648,18 @@ dependencies = [ [[package]] name = "pq-sys" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a92c30dd81695321846d4dfe348da67b1752ebb61cd1549d203a7b57e323c435" +checksum = "f6cc05d7ea95200187117196eee9edd0644424911821aeb28a18ce60ea0b8793" dependencies = [ "vcpkg", ] [[package]] name = "proc-macro2" -version = "1.0.86" +version = "1.0.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" +checksum = "b3e4daa0dcf6feba26f985457cdf104d4b4256fc5a09547140f3631bb076b19a" dependencies = [ "unicode-ident", ] @@ -783,9 +716,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.4" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0884ad60e090bf1345b93da0a5de8923c93884cd03f40dfcfddd3b4bee661853" +checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f" dependencies = [ "bitflags 2.6.0", ] @@ -796,12 +729,6 @@ version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" -[[package]] -name = "rustversion" -version = "1.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" - [[package]] name = "ryu" version = "1.0.18" @@ -816,41 +743,26 @@ checksum = "d4d5cd6d4f24f3ab107e949ab424738cf55b03deddce3b184c46985d7b1394ef" dependencies = [ "itoap", "ryu", - "sailfish-macros", "version_check", ] [[package]] -name = "sailfish-compiler" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7254ec7b3651f7f723a9073153f5dcddc1f2bf1bf8d1b23ac71c236ef6360d2b" -dependencies = [ - "filetime", - "home", - "memchr", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "sailfish-macros" -version = "0.9.0" +name = "scheduled-thread-pool" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00812289fe1891c191cc2d9db461352fc410619e07ec2bb748faaa06412619d0" +checksum = "3cbc66816425a074528352f5789333ecff06ca41b36b0b0efdfbb29edc391a19" dependencies = [ - "proc-macro2", - "sailfish-compiler", + "parking_lot", ] [[package]] -name = "scheduled-thread-pool" -version = "0.2.7" +name = "scoped-futures" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cbc66816425a074528352f5789333ecff06ca41b36b0b0efdfbb29edc391a19" +checksum = "b1473e24c637950c9bd38763220bea91ec3e095a89f672bbd7a10d03e77ba467" dependencies = [ - "parking_lot", + "cfg-if", + "pin-utils", ] [[package]] @@ -891,16 +803,6 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_path_to_error" -version = "0.1.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6" -dependencies = [ - "itoa", - "serde", -] - [[package]] name = "serde_urlencoded" version = "0.7.1" @@ -977,7 +879,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys", ] [[package]] @@ -1005,27 +907,15 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" -version = "2.0.77" +version = "2.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f35bcdf61fd8e7be6caf75f429fdca8beb3ed76584befb503b1569faee373ed" +checksum = "89132cd0bf050864e1d38dc3bbc07a0eb8e7530af26344d3d2bbbef83499f590" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] -[[package]] -name = "sync_wrapper" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" - -[[package]] -name = "sync_wrapper" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" - [[package]] name = "tinyvec" version = "1.8.0" @@ -1044,17 +934,43 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" version = "1.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998" +source = "git+https://github.com/tokio-rs/tokio.git?rev=512e9de#512e9decfb683d22f4a145459142542caa0894c9" dependencies = [ "backtrace", + "bytes", "libc", "mio", "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2 0.5.7", - "windows-sys 0.52.0", + "windows-sys", +] + +[[package]] +name = "tokio-postgres" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b5d3742945bc7d7f210693b0c58ae542c6fd47b17adbbda0885f3dcb34a6bdb" +dependencies = [ + "async-trait", + "byteorder", + "bytes", + "fallible-iterator", + "futures-channel", + "futures-util", + "log", + "parking_lot", + "percent-encoding", + "phf", + "pin-project-lite", + "postgres-protocol", + "postgres-types", + "rand", + "socket2 0.5.7", + "tokio", + "tokio-util", + "whoami", ] [[package]] @@ -1073,55 +989,24 @@ dependencies = [ ] [[package]] -name = "tower" -version = "0.4.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" -dependencies = [ - "futures-core", - "futures-util", - "pin-project", - "pin-project-lite", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tower-http" -version = "0.5.2" +name = "tokio-util" +version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e9cd434a998747dd2c4276bc96ee2e0c7a2eadf3cae88e52be55a05fa9053f5" +checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" dependencies = [ - "bitflags 2.6.0", "bytes", - "http", - "http-body", - "http-body-util", + "futures-core", + "futures-sink", "pin-project-lite", - "tower-layer", - "tower-service", + "tokio", ] -[[package]] -name = "tower-layer" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" - -[[package]] -name = "tower-service" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" - [[package]] name = "tracing" version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ - "log", "pin-project-lite", "tracing-core", ] @@ -1131,9 +1016,6 @@ name = "tracing-core" version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" -dependencies = [ - "once_cell", -] [[package]] name = "typenum" @@ -1143,9 +1025,9 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "unicode-bidi" -version = "0.3.15" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" +checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893" [[package]] name = "unicode-ident" @@ -1155,18 +1037,18 @@ checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" [[package]] name = "unicode-normalization" -version = "0.1.23" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" +checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" dependencies = [ "tinyvec", ] [[package]] name = "unicode-properties" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52ea75f83c0137a9b98608359a5f1af8144876eb67bcb1ce837368e906a9f524" +checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0" [[package]] name = "vcpkg" @@ -1186,6 +1068,88 @@ version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +[[package]] +name = "wasite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" + +[[package]] +name = "wasm-bindgen" +version = "0.2.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e" +dependencies = [ + "cfg-if", + "once_cell", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d" + +[[package]] +name = "web-sys" +version = "0.3.72" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6488b90108c040df0fe62fa815cbdee25124641df01814dd7282749234c6112" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "whoami" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "372d5b87f58ec45c384ba03563b03544dc5fadc3983e434b286913f5b4a9bb6d" +dependencies = [ + "redox_syscall", + "wasite", + "web-sys", +] + [[package]] name = "winapi" version = "0.3.9" @@ -1217,15 +1181,6 @@ dependencies = [ "windows-targets", ] -[[package]] -name = "windows-sys" -version = "0.59.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" -dependencies = [ - "windows-targets", -] - [[package]] name = "windows-targets" version = "0.52.6" @@ -1292,9 +1247,8 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "xitca-codegen" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "336b646a30e6d44093beaae1bbe5dda664e8466d387663fc9d61c55fb2d78424" +version = "0.4.0" +source = "git+http://github.com/HFQR/xitca-web?rev=1de8d9c#1de8d9c079e73f7fd9ba953741302d87e50d831a" dependencies = [ "quote", "syn", @@ -1302,9 +1256,8 @@ dependencies = [ [[package]] name = "xitca-http" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47b5b036e32261c69d4f0e81bcb28c2e058ed569518959336fd75fc086208d3f" +version = "0.7.0" +source = "git+http://github.com/HFQR/xitca-web?rev=1de8d9c#1de8d9c079e73f7fd9ba953741302d87e50d831a" dependencies = [ "futures-core", "http", @@ -1336,12 +1289,13 @@ dependencies = [ [[package]] name = "xitca-postgres" -version = "0.1.0" -source = "git+https://github.com/HFQR/xitca-web.git?rev=0cda225#0cda2254f98b40f21bc3170dd8983f16444f0bd0" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46046cb7a3d4fcfb5c858bab0651c73bb45c0c5f9f0f53dd4eb991f2d6e5d6f4" dependencies = [ "fallible-iterator", + "futures-core", "percent-encoding", - "phf", "postgres-protocol", "postgres-types", "tokio", @@ -1350,6 +1304,35 @@ dependencies = [ "xitca-unsafe-collection", ] +[[package]] +name = "xitca-postgres" +version = "0.3.0" +source = "git+http://github.com/HFQR/xitca-web?rev=1de8d9c#1de8d9c079e73f7fd9ba953741302d87e50d831a" +dependencies = [ + "fallible-iterator", + "futures-core", + "percent-encoding", + "postgres-protocol", + "postgres-types", + "tokio", + "tracing", + "xitca-io", + "xitca-unsafe-collection", +] + +[[package]] +name = "xitca-postgres-diesel" +version = "0.1.0" +source = "git+https://github.com/fakeshadow/xitca-postgres-diesel?rev=ae93ee9#ae93ee95277e281fb87b351c42bfc2fc5a56703a" +dependencies = [ + "diesel", + "diesel-async", + "futures-core", + "scoped-futures", + "tokio", + "xitca-postgres 0.2.1", +] + [[package]] name = "xitca-router" version = "0.3.0" @@ -1361,9 +1344,8 @@ dependencies = [ [[package]] name = "xitca-server" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a40a05f18780f1de843c5077583e4650b08d0518fcf9cf7948e28bc92e489736" +version = "0.5.0" +source = "git+http://github.com/HFQR/xitca-web?rev=1de8d9c#1de8d9c079e73f7fd9ba953741302d87e50d831a" dependencies = [ "socket2 0.5.7", "tokio", @@ -1376,9 +1358,8 @@ dependencies = [ [[package]] name = "xitca-service" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74b093ca75b264924773d53e445de08a937100bf1cbe4f62d4dc2c0d04a3ba4b" +version = "0.3.0" +source = "git+http://github.com/HFQR/xitca-web?rev=1de8d9c#1de8d9c079e73f7fd9ba953741302d87e50d831a" [[package]] name = "xitca-unsafe-collection" @@ -1394,42 +1375,39 @@ name = "xitca-web" version = "0.1.0" dependencies = [ "atoi", - "axum", "diesel", + "diesel-async", "futures-core", - "http-body", + "futures-util", + "httparse", "mimalloc", "rand", "sailfish", "serde", "serde_json", "tokio", - "tower", - "tower-http", + "tokio-uring", "xitca-http", "xitca-io", - "xitca-postgres", + "xitca-postgres 0.3.0", + "xitca-postgres-diesel", "xitca-server", "xitca-service", "xitca-unsafe-collection", - "xitca-web 0.6.2", + "xitca-web 0.7.0", ] [[package]] name = "xitca-web" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd4f8f16791ea2a8845f617f1e87887f917835e0603d01f03a51e638b9613d0c" +version = "0.7.0" +source = "git+http://github.com/HFQR/xitca-web?rev=1de8d9c#1de8d9c079e73f7fd9ba953741302d87e50d831a" dependencies = [ "futures-core", - "http-body", "pin-project-lite", "serde", "serde_json", "serde_urlencoded", "tokio", - "tower-layer", - "tower-service", "xitca-codegen", "xitca-http", "xitca-server", diff --git a/frameworks/Rust/xitca-web/Cargo.toml b/frameworks/Rust/xitca-web/Cargo.toml index 6c16d2d7a54..c547c16de48 100644 --- a/frameworks/Rust/xitca-web/Cargo.toml +++ b/frameworks/Rust/xitca-web/Cargo.toml @@ -9,9 +9,9 @@ path = "./src/main.rs" required-features = ["io-uring", "pg", "router", "template"] [[bin]] -name = "xitca-web-iou" -path = "./src/main_iou.rs" -required-features = ["io-uring", "perf", "pg", "template"] +name = "xitca-web-unrealistic" +path = "./src/main_unrealistic.rs" +required-features = ["perf", "pg", "template"] [[bin]] name = "xitca-web-wasm" @@ -19,9 +19,9 @@ path = "./src/main_wasm.rs" required-features = ["web"] [[bin]] -name = "xitca-web-axum" -path = "./src/main_axum.rs" -required-features = ["axum", "io-uring", "perf", "pg-sync", "template"] +name = "xitca-web-orm" +path = "./src/main_orm.rs" +required-features = ["pg-orm-async", "template", "web-codegen"] [[bin]] name = "xitca-web-sync" @@ -29,55 +29,56 @@ path = "./src/main_sync.rs" required-features = ["pg-orm", "template", "web-codegen"] [features] -# pg optional +# pg client optional pg = ["dep:xitca-postgres"] -# pg send/sync optional -pg-sync = ["dep:xitca-postgres"] -# pg orm optional -pg-orm = ["dep:diesel"] +# diesel orm optional +pg-orm = ["diesel/r2d2"] +# diesel async orm optional +pg-orm-async = ["dep:diesel", "dep:diesel-async", "dep:xitca-postgres-diesel", "dep:futures-util"] # http router optional router = ["xitca-http/router"] # web optional web = ["dep:xitca-web"] -# web codegen optional +# web with macros optional web-codegen = ["xitca-web/codegen", "xitca-web/urlencoded"] # template optional template = ["dep:sailfish"] # io-uring optional -io-uring = ["xitca-http/io-uring", "xitca-server/io-uring"] -# axum optional -axum = ["dep:axum", "dep:http-body", "dep:tower", "dep:tower-http", "xitca-web/tower-http-compat" ] +io-uring = ["dep:tokio-uring", "xitca-http/io-uring", "xitca-server/io-uring"] # unrealistic performance optimization perf = ["dep:mimalloc", "tokio/parking_lot"] [dependencies] -xitca-http = "0.6" -xitca-io = "0.4" -xitca-server = "0.4" -xitca-service = "0.2" +xitca-http = "0.7" +xitca-io = "0.4.1" +xitca-server = "0.5" +xitca-service = "0.3" xitca-unsafe-collection = "0.2" atoi = "2" +httparse = "1" serde = { version = "1" } serde_json = { version = "1" } # web optional -xitca-web = { version = "0.6", features = ["json"], optional = true } +xitca-web = { version = "0.7", features = ["json"], optional = true } # raw-pg optional -xitca-postgres = { version = "0.1", optional = true } +xitca-postgres = { version = "0.3", optional = true } # orm optional -diesel = { version = "2", features = ["postgres", "r2d2"], optional = true } +diesel = { version = "2", features = ["postgres"], optional = true } + +# orm async optional +diesel-async = { version = "0.5", features = ["bb8", "postgres"], optional = true } +xitca-postgres-diesel = { version = "0.1", optional = true } +futures-util = { version = "0.3", default-features = false, optional = true } # template optional -sailfish = { version = "0.9", default-features = false, features = ["derive", "perf-inline"], optional = true } +sailfish = { version = "0.9", default-features = false, features = ["perf-inline"], optional = true } -# axum optional -axum = { version = "0.7", optional = true, default-features = false, features = ["json", "query"] } -http-body = { version = "1", optional = true } -tower = { version = "0.4", optional = true } -tower-http = { version = "0.5", features = ["set-header"], optional = true } +# io-uring optional +tokio-uring = { version = "0.5", optional = true } # perf optional mimalloc = { version = "0.1", default-features = false, optional = true } @@ -95,5 +96,15 @@ codegen-units = 1 panic = "abort" [patch.crates-io] -xitca-postgres = { git = "https://github.com/HFQR/xitca-web.git", rev = "0cda225" } +xitca-postgres-diesel = { git = "https://github.com/fakeshadow/xitca-postgres-diesel", rev = "ae93ee9" } + +diesel-async = { git = "https://github.com/weiznich/diesel_async", rev = "5b8262b" } mio = { git = "https://github.com/fakeshadow/mio", rev = "9bae6012b7ecfc6083350785f71a5e8265358178" } +tokio = { git = "https://github.com/tokio-rs/tokio.git", rev = "512e9de" } + +xitca-codegen = { git = "http://github.com/HFQR/xitca-web", rev = "1de8d9c" } +xitca-http = { git = "http://github.com/HFQR/xitca-web", rev = "1de8d9c" } +xitca-postgres = { git = "http://github.com/HFQR/xitca-web", rev = "1de8d9c" } +xitca-server = { git = "http://github.com/HFQR/xitca-web", rev = "1de8d9c" } +xitca-service = { git = "http://github.com/HFQR/xitca-web", rev = "1de8d9c" } +xitca-web = { git = "http://github.com/HFQR/xitca-web", rev = "1de8d9c" } diff --git a/frameworks/Rust/xitca-web/benchmark_config.json b/frameworks/Rust/xitca-web/benchmark_config.json index f4c152eb729..db81e362742 100755 --- a/frameworks/Rust/xitca-web/benchmark_config.json +++ b/frameworks/Rust/xitca-web/benchmark_config.json @@ -24,7 +24,7 @@ "notes": "", "versus": "" }, - "iou": { + "unrealistic": { "json_url": "/json", "plaintext_url": "/plaintext", "db_url": "/db", @@ -35,7 +35,7 @@ "approach": "Stripped", "classification": "Platform", "database": "Postgres", - "framework": "xitca-web [unrealistic]", + "framework": "xitca-web", "language": "Rust", "orm": "Raw", "platform": "None", @@ -53,7 +53,7 @@ "approach": "Realistic", "classification": "Micro", "database": "none", - "framework": "xitca-web [wasm]", + "framework": "xitca-web", "language": "rust", "orm": "raw", "platform": "none", @@ -64,7 +64,7 @@ "notes": "", "versus": "" }, - "axum": { + "orm": { "json_url": "/json", "plaintext_url": "/plaintext", "db_url": "/db", @@ -73,16 +73,16 @@ "update_url": "/updates?q=", "port": 8080, "approach": "realistic", - "classification": "micro", + "classification": "fullstack", "database": "postgres", - "framework": "axum [xitca]", + "framework": "xitca-web", "language": "rust", - "orm": "raw", + "orm": "full", "platform": "none", "webserver": "xitca-server", "os": "linux", "database_os": "linux", - "display_name": "axum [xitca]", + "display_name": "xitca-web [orm]", "notes": "", "versus": "" }, @@ -97,7 +97,7 @@ "approach": "realistic", "classification": "micro", "database": "postgres", - "framework": "xitca-web [sync]", + "framework": "xitca-web", "language": "rust", "orm": "full", "platform": "none", diff --git a/frameworks/Rust/xitca-web/src/db.rs b/frameworks/Rust/xitca-web/src/db.rs index 818804341b1..abb9ef10870 100644 --- a/frameworks/Rust/xitca-web/src/db.rs +++ b/frameworks/Rust/xitca-web/src/db.rs @@ -1,102 +1,61 @@ -// clippy is dumb and have no idea what should be lazy or not -#![allow(clippy::unnecessary_lazy_evaluations)] +#[path = "./db_util.rs"] +mod db_util; -use xitca_io::bytes::BytesMut; -use xitca_postgres::{pipeline::Pipeline, pool::Pool, AsyncLendingIterator, Type}; +use core::cell::RefCell; + +use xitca_postgres::{iter::AsyncLendingIterator, pipeline::Pipeline, pool::Pool, statement::Statement, Execute}; use super::{ ser::{Fortune, Fortunes, World}, - util::{bulk_update_gen, HandleResult, Rand, DB_URL}, + util::{HandleResult, DB_URL}, }; +use db_util::{not_found, sort_update_params, update_query_from_num, Shared, FORTUNE_STMT, WORLD_STMT}; + pub struct Client { pool: Pool, - #[cfg(not(feature = "pg-sync"))] - shared: std::cell::RefCell, - #[cfg(feature = "pg-sync")] - shared: std::sync::Mutex, + shared: RefCell, updates: Box<[Box]>, } -type Shared = (Rand, BytesMut); - -const FORTUNE_SQL: &str = "SELECT * FROM fortune"; - -const FORTUNE_SQL_TYPES: &[Type] = &[]; - -const WORLD_SQL: &str = "SELECT * FROM world WHERE id=$1"; - -const WORLD_SQL_TYPES: &[Type] = &[Type::INT4]; - -fn update_query(num: usize) -> Box { - bulk_update_gen(|query| { - use std::fmt::Write; - (1..=num).fold((1, query), |(idx, query), _| { - write!(query, "(${}::int,${}::int),", idx, idx + 1).unwrap(); - (idx + 2, query) - }); - }) - .into_boxed_str() -} - pub async fn create() -> HandleResult { - let pool = Pool::builder(DB_URL).capacity(1).build()?; - - let shared = (Rand::default(), BytesMut::new()); - - let updates = core::iter::once(Box::from("")) - .chain((1..=500).map(update_query)) - .collect(); - Ok(Client { - pool, - #[cfg(not(feature = "pg-sync"))] - shared: std::cell::RefCell::new(shared), - #[cfg(feature = "pg-sync")] - shared: std::sync::Mutex::new(shared), - updates, + pool: Pool::builder(DB_URL).capacity(1).build()?, + shared: Default::default(), + updates: core::iter::once(Box::from("")) + .chain((1..=500).map(update_query_from_num)) + .collect(), }) } impl Client { - #[cfg(not(feature = "pg-sync"))] - fn shared(&self) -> std::cell::RefMut<'_, Shared> { - self.shared.borrow_mut() - } - - #[cfg(feature = "pg-sync")] - fn shared(&self) -> std::sync::MutexGuard<'_, Shared> { - self.shared.lock().unwrap() - } - pub async fn get_world(&self) -> HandleResult { let mut conn = self.pool.get().await?; - let stmt = conn.prepare(WORLD_SQL, WORLD_SQL_TYPES).await?; - let id = self.shared().0.gen_id(); - let mut res = conn.consume().query_raw(&stmt, [id])?; - let row = res.try_next().await?.ok_or_else(|| "World does not exist")?; - Ok(World::new(row.get_raw(0), row.get_raw(1))) + let stmt = WORLD_STMT.execute(&mut conn).await?; + let id = self.shared.borrow_mut().0.gen_id(); + let mut res = stmt.bind([id]).query(&conn.consume()).await?; + let row = res.try_next().await?.ok_or_else(not_found)?; + Ok(World::new(row.get(0), row.get(1))) } pub async fn get_worlds(&self, num: u16) -> HandleResult> { let len = num as usize; let mut conn = self.pool.get().await?; - let stmt = conn.prepare(WORLD_SQL, WORLD_SQL_TYPES).await?; + let stmt = WORLD_STMT.execute(&mut conn).await?; let mut res = { - let (ref mut rng, ref mut buf) = *self.shared(); + let (ref mut rng, ref mut buf) = *self.shared.borrow_mut(); let mut pipe = Pipeline::with_capacity_from_buf(len, buf); - (0..num).try_for_each(|_| pipe.query_raw(&stmt, [rng.gen_id()]))?; - conn.consume().pipeline(pipe)? + (0..num).try_for_each(|_| stmt.bind([rng.gen_id()]).query(&mut pipe))?; + pipe.query(&conn.consume())? }; let mut worlds = Vec::with_capacity(len); while let Some(mut item) = res.try_next().await? { - while let Some(row) = item.try_next().await? { - worlds.push(World::new(row.get_raw(0), row.get_raw(1))) - } + let row = item.try_next().await?.ok_or_else(not_found)?; + worlds.push(World::new(row.get(0), row.get(1))); } Ok(worlds) @@ -105,25 +64,24 @@ impl Client { pub async fn update(&self, num: u16) -> HandleResult> { let len = num as usize; - let update = self.updates.get(len).ok_or_else(|| "num out of bound")?; - + let update = self.updates.get(len).ok_or("request num is out of range")?; let mut conn = self.pool.get().await?; - let world_stmt = conn.prepare(WORLD_SQL, WORLD_SQL_TYPES).await?; - let update_stmt = conn.prepare(update, &[]).await?; + let world_stmt = WORLD_STMT.execute(&mut conn).await?; + let update_stmt = Statement::named(update, &[]).execute(&mut conn).await?; let mut params = Vec::with_capacity(len); let mut res = { - let (ref mut rng, ref mut buf) = *self.shared(); + let (ref mut rng, ref mut buf) = *self.shared.borrow_mut(); let mut pipe = Pipeline::with_capacity_from_buf(len + 1, buf); (0..num).try_for_each(|_| { let w_id = rng.gen_id(); let r_id = rng.gen_id(); params.push([w_id, r_id]); - pipe.query_raw(&world_stmt, [w_id]) + world_stmt.bind([w_id]).query(&mut pipe) })?; - pipe.query_raw(&update_stmt, sort_update_params(¶ms))?; - conn.consume().pipeline(pipe)? + update_stmt.bind(sort_update_params(¶ms)).query(&mut pipe)?; + pipe.query(&conn.consume())? }; let mut worlds = Vec::with_capacity(len); @@ -133,7 +91,7 @@ impl Client { while let Some(mut item) = res.try_next().await? { while let Some(row) = item.try_next().await? { let r_id = r_ids.next().unwrap()[1]; - worlds.push(World::new(row.get_raw(0), r_id)) + worlds.push(World::new(row.get(0), r_id)) } } @@ -145,11 +103,11 @@ impl Client { items.push(Fortune::new(0, "Additional fortune added at request time.")); let mut conn = self.pool.get().await?; - let stmt = conn.prepare(FORTUNE_SQL, FORTUNE_SQL_TYPES).await?; - let mut res = conn.consume().query_raw::<[i32; 0]>(&stmt, [])?; + let stmt = FORTUNE_STMT.execute(&mut conn).await?; + let mut res = stmt.query(&conn.consume()).await?; while let Some(row) = res.try_next().await? { - items.push(Fortune::new(row.get_raw(0), row.get_raw::(1))); + items.push(Fortune::new(row.get(0), row.get::(1))); } items.sort_by(|it, next| it.message.cmp(&next.message)); @@ -157,33 +115,3 @@ impl Client { Ok(Fortunes::new(items)) } } - -fn sort_update_params(params: &[[i32; 2]]) -> impl ExactSizeIterator { - let mut params = params.to_owned(); - params.sort_by(|a, b| a[0].cmp(&b[0])); - - struct ParamIter(I); - - impl Iterator for ParamIter - where - I: Iterator, - { - type Item = I::Item; - - #[inline] - fn next(&mut self) -> Option { - self.0.next() - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - self.0.size_hint() - } - } - - // impl depends on compiler optimization to flat Vec<[T]> to Vec when inferring - // it's size hint. possible to cause runtime panic. - impl ExactSizeIterator for ParamIter where I: Iterator {} - - ParamIter(params.into_iter().flatten()) -} diff --git a/frameworks/Rust/xitca-web/src/db_diesel.rs b/frameworks/Rust/xitca-web/src/db_diesel.rs index cf19768ac9f..1675a7c2cd1 100644 --- a/frameworks/Rust/xitca-web/src/db_diesel.rs +++ b/frameworks/Rust/xitca-web/src/db_diesel.rs @@ -1,3 +1,6 @@ +#[path = "./db_util.rs"] +mod db_util; + use std::{ io, sync::{Arc, Mutex}, @@ -7,9 +10,11 @@ use diesel::{prelude::*, r2d2}; use crate::{ ser::{Fortune, Fortunes, World}, - util::{bulk_update_gen, Error, HandleResult, Rand, DB_URL}, + util::{HandleResult, Rand, DB_URL}, }; +use db_util::{not_found, update_query_from_ids}; + pub type Pool = Arc<_Pool>; pub struct _Pool { @@ -34,12 +39,6 @@ pub fn create() -> io::Result> { }) } -#[cold] -#[inline(never)] -fn not_found() -> Error { - "world not found".into() -} - impl _Pool { pub fn get_world(&self) -> HandleResult { use crate::schema::world::dsl::*; @@ -53,16 +52,12 @@ impl _Pool { use crate::schema::world::dsl::*; let mut conn = self.pool.get()?; - (0..num) - .map(|_| { - let w_id = self.rng.lock().unwrap().gen_id(); - world - .filter(id.eq(w_id)) - .load::(&mut conn)? - .pop() - .ok_or_else(not_found) - }) - .collect() + core::iter::repeat_with(|| { + let w_id = self.rng.lock().unwrap().gen_id(); + world.filter(id.eq(w_id)).load(&mut conn)?.pop().ok_or_else(not_found) + }) + .take(num as _) + .collect() } pub fn update(&self, num: u16) -> HandleResult> { @@ -75,30 +70,20 @@ impl _Pool { rngs.sort_by(|(a, _), (b, _)| a.cmp(b)); - let mut worlds = { - let mut conn = self.pool.get()?; + let update_sql = update_query_from_ids(&rngs); - let worlds = rngs - .iter() - .map(|(w_id, num)| { - world - .filter(id.eq(w_id)) - .load::(&mut conn)? - .pop() - .map(|mut w| { - w.randomnumber = *num; - w - }) - .ok_or_else(not_found) - }) - .collect::>>()?; - - diesel::sql_query(update_query(&rngs)).execute(&mut conn)?; - - worlds - }; + let mut conn = self.pool.get()?; + + let worlds = rngs + .into_iter() + .map(|(w_id, num)| { + let mut w: World = world.filter(id.eq(w_id)).load(&mut conn)?.pop().ok_or_else(not_found)?; + w.randomnumber = num; + Ok(w) + }) + .collect::>>()?; - worlds.sort_by_key(|w| w.id); + diesel::sql_query(update_sql).execute(&mut conn)?; Ok(worlds) } @@ -108,7 +93,7 @@ impl _Pool { let mut items = { let mut conn = self.pool.get()?; - fortune.load::(&mut conn)? + fortune.load(&mut conn)? }; items.push(Fortune::new(0, "Additional fortune added at request time.")); @@ -117,14 +102,3 @@ impl _Pool { Ok(Fortunes::new(items)) } } - -// diesel does not support high level bulk update api. use raw sql to bypass the limitation. -// relate discussion: https://github.com/diesel-rs/diesel/discussions/2879 -fn update_query(ids: &[(i32, i32)]) -> String { - bulk_update_gen(|query| { - use std::fmt::Write; - ids.iter().for_each(|(w_id, num)| { - write!(query, "({}::int,{}::int),", w_id, num).unwrap(); - }); - }) -} diff --git a/frameworks/Rust/xitca-web/src/db_diesel_async.rs b/frameworks/Rust/xitca-web/src/db_diesel_async.rs new file mode 100644 index 00000000000..e3ee7e895fd --- /dev/null +++ b/frameworks/Rust/xitca-web/src/db_diesel_async.rs @@ -0,0 +1,122 @@ +#[path = "./db_util.rs"] +mod db_util; + +use std::{io, sync::Mutex}; + +use diesel::prelude::*; +use diesel_async::{ + pooled_connection::{bb8, AsyncDieselConnectionManager}, + RunQueryDsl, +}; +use futures_util::{ + future::join, + stream::{FuturesUnordered, TryStreamExt}, +}; +use xitca_postgres_diesel::AsyncPgConnection; + +use crate::{ + ser::{Fortune, Fortunes, World}, + util::{HandleResult, Rand, DB_URL}, +}; + +use db_util::{not_found, update_query_from_ids}; + +pub struct Pool { + pool: bb8::Pool, + rng: Mutex, +} + +pub async fn create() -> io::Result { + bb8::Pool::builder() + .max_size(1) + .min_idle(Some(1)) + .test_on_check_out(false) + .build(AsyncDieselConnectionManager::new(DB_URL)) + .await + .map_err(io::Error::other) + .map(|pool| Pool { + pool, + rng: Mutex::new(Rand::default()), + }) +} + +impl Pool { + pub async fn get_world(&self) -> HandleResult { + use crate::schema::world::dsl::*; + { + let w_id = self.rng.lock().unwrap().gen_id(); + let mut conn = self.pool.get().await?; + world.filter(id.eq(w_id)).load(&mut conn) + } + .await? + .pop() + .ok_or_else(not_found) + } + + pub async fn get_worlds(&self, num: u16) -> HandleResult> { + use crate::schema::world::dsl::*; + { + let mut conn = self.pool.get().await?; + let mut rng = self.rng.lock().unwrap(); + core::iter::repeat_with(|| { + let w_id = rng.gen_id(); + let fut = world.filter(id.eq(w_id)).load(&mut conn); + async { fut.await?.pop().ok_or_else(not_found) } + }) + .take(num as _) + .collect::>() + } + .try_collect() + .await + } + + pub async fn update(&self, num: u16) -> HandleResult> { + use crate::schema::world::dsl::*; + + let (select_res, update_res) = { + let mut conn = self.pool.get().await?; + let mut rng = self.rng.lock().unwrap(); + + let (select, mut rngs) = core::iter::repeat_with(|| { + let w_id = rng.gen_id(); + let num = rng.gen_id(); + + let fut = world.filter(id.eq(w_id)).load::(&mut conn); + let select = async move { + let mut w = fut.await?.pop().ok_or_else(not_found)?; + w.randomnumber = num; + HandleResult::Ok(w) + }; + + (select, (w_id, num)) + }) + .take(num as _) + .collect::<(FuturesUnordered<_>, Vec<_>)>(); + + rngs.sort_by(|(a, _), (b, _)| a.cmp(b)); + + let update = diesel::sql_query(update_query_from_ids(&rngs)).execute(&mut conn); + + join(select.try_collect::>(), update) + } + .await; + + update_res?; + select_res + } + + pub async fn tell_fortune(&self) -> HandleResult { + use crate::schema::fortune::dsl::*; + + let mut items = { + let mut conn = self.pool.get().await?; + fortune.load(&mut conn) + } + .await?; + + items.push(Fortune::new(0, "Additional fortune added at request time.")); + items.sort_by(|it, next| it.message.cmp(&next.message)); + + Ok(Fortunes::new(items)) + } +} diff --git a/frameworks/Rust/xitca-web/src/db_unrealistic.rs b/frameworks/Rust/xitca-web/src/db_unrealistic.rs new file mode 100644 index 00000000000..99e4249e39b --- /dev/null +++ b/frameworks/Rust/xitca-web/src/db_unrealistic.rs @@ -0,0 +1,131 @@ +//! this module is unrealistic. related issue: +//! https://github.com/TechEmpower/FrameworkBenchmarks/issues/8790 + +#[path = "./db_util.rs"] +mod db_util; + +use std::cell::RefCell; + +use xitca_postgres::{iter::AsyncLendingIterator, pipeline::Pipeline, statement::Statement, Execute}; + +use super::{ + ser::{Fortune, Fortunes, World}, + util::{HandleResult, DB_URL}, +}; + +use db_util::{not_found, sort_update_params, update_query_from_num, Shared, FORTUNE_STMT, WORLD_STMT}; + +pub struct Client { + cli: xitca_postgres::Client, + shared: RefCell, + fortune: Statement, + world: Statement, + updates: Box<[Statement]>, +} + +pub async fn create() -> HandleResult { + let (cli, mut drv) = xitca_postgres::Postgres::new(DB_URL).connect().await?; + + tokio::task::spawn(tokio::task::unconstrained(async move { + while drv.try_next().await?.is_some() {} + HandleResult::Ok(()) + })); + + let world = WORLD_STMT.execute(&cli).await?.leak(); + let fortune = FORTUNE_STMT.execute(&cli).await?.leak(); + + let mut updates = vec![Statement::default()]; + + for update in (1..=500).map(update_query_from_num).into_iter() { + let stmt = Statement::named(&update, &[]).execute(&cli).await?.leak(); + updates.push(stmt); + } + + Ok(Client { + cli, + shared: Default::default(), + world, + fortune, + updates: updates.into_boxed_slice(), + }) +} + +impl Client { + pub async fn get_world(&self) -> HandleResult { + let id = self.shared.borrow_mut().0.gen_id(); + let mut res = self.world.bind([id]).query(&self.cli).await?; + let row = res.try_next().await?.ok_or_else(not_found)?; + Ok(World::new(row.get(0), row.get(1))) + } + + pub async fn get_worlds(&self, num: u16) -> HandleResult> { + let len = num as usize; + + let mut res = Vec::with_capacity(len); + + { + let (ref mut rng, ..) = *self.shared.borrow_mut(); + for _ in 0..len { + let stream = self.world.bind([rng.gen_id()]).query(&self.cli).await?; + res.push(stream); + } + }; + + let mut worlds = Vec::with_capacity(len); + + for mut stream in res { + let row = stream.try_next().await?.ok_or_else(not_found)?; + worlds.push(World::new(row.get(0), row.get(1))); + } + + Ok(worlds) + } + + pub async fn update(&self, num: u16) -> HandleResult> { + let len = num as usize; + + let mut params = Vec::with_capacity(len); + + let mut res = { + let (ref mut rng, ref mut buf) = *self.shared.borrow_mut(); + // unrealistic as all queries are sent with only one sync point. + let mut pipe = Pipeline::unsync_with_capacity_from_buf(len + 1, buf); + (0..num).try_for_each(|_| { + let w_id = rng.gen_id(); + let r_id = rng.gen_id(); + params.push([w_id, r_id]); + self.world.bind([w_id]).query(&mut pipe) + })?; + self.updates[len].bind(sort_update_params(¶ms)).query(&mut pipe)?; + pipe.query(&self.cli)? + }; + + let mut worlds = Vec::with_capacity(len); + + let mut r_ids = params.into_iter(); + + while let Some(mut item) = res.try_next().await? { + while let Some(row) = item.try_next().await? { + let r_id = r_ids.next().unwrap()[1]; + worlds.push(World::new(row.get(0), r_id)) + } + } + + Ok(worlds) + } + + pub async fn tell_fortune(&self) -> HandleResult { + let mut items = Vec::with_capacity(32); + items.push(Fortune::new(0, "Additional fortune added at request time.")); + + let mut res = self.fortune.query(&self.cli).await?; + + while let Some(row) = res.try_next().await? { + items.push(Fortune::new(row.get(0), row.get::(1))); + } + + items.sort_by(|it, next| it.message.cmp(&next.message)); + + Ok(Fortunes::new(items)) + } +} diff --git a/frameworks/Rust/xitca-web/src/db_util.rs b/frameworks/Rust/xitca-web/src/db_util.rs new file mode 100644 index 00000000000..42c2c455bbe --- /dev/null +++ b/frameworks/Rust/xitca-web/src/db_util.rs @@ -0,0 +1,96 @@ +use crate::util::Error; + +#[cfg(any(feature = "pg-orm", feature = "pg-orm-async"))] +// diesel does not support high level bulk update api. use raw sql to bypass the limitation. +// relate discussion: https://github.com/diesel-rs/diesel/discussions/2879 +pub fn update_query_from_ids(ids: &[(i32, i32)]) -> String { + update_query(|query| { + use core::fmt::Write; + ids.iter().for_each(|(w_id, num)| { + write!(query, "({}::int,{}::int),", w_id, num).unwrap(); + }); + }) +} + +fn update_query(func: impl FnOnce(&mut String)) -> String { + const PREFIX: &str = "UPDATE world SET randomNumber = w.r FROM (VALUES "; + const SUFFIX: &str = ") AS w (i,r) WHERE world.id = w.i"; + + let mut query = String::from(PREFIX); + + func(&mut query); + + if query.ends_with(',') { + query.pop(); + } + + query.push_str(SUFFIX); + + query +} + +#[cold] +#[inline(never)] +pub fn not_found() -> Error { + "request World does not exist".into() +} + +#[cfg(feature = "pg")] +pub use pg::*; + +#[cfg(feature = "pg")] +pub mod pg { + use xitca_io::bytes::BytesMut; + use xitca_postgres::{ + statement::{Statement, StatementNamed}, + types::Type, + }; + + use crate::util::Rand; + + pub type Shared = (Rand, BytesMut); + + pub const FORTUNE_STMT: StatementNamed = Statement::named("SELECT * FROM fortune", &[]); + pub const WORLD_STMT: StatementNamed = Statement::named("SELECT * FROM world WHERE id=$1", &[Type::INT4]); + + pub fn update_query_from_num(num: usize) -> Box { + super::update_query(|query| { + use core::fmt::Write; + (1..=num).fold(1, |idx, _| { + write!(query, "(${}::int,${}::int),", idx, idx + 1).unwrap(); + idx + 2 + }); + }) + .into_boxed_str() + } + + pub fn sort_update_params(params: &[[i32; 2]]) -> impl ExactSizeIterator { + let mut params = params.to_owned(); + params.sort_by(|a, b| a[0].cmp(&b[0])); + + struct ParamIter(I); + + impl Iterator for ParamIter + where + I: Iterator, + { + type Item = I::Item; + + #[inline] + fn next(&mut self) -> Option { + self.0.next() + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.0.size_hint() + } + } + + // impl depends on compiler optimization to flat Vec<[T]> to Vec when inferring + // it's size hint. possible to cause runtime panic. + impl ExactSizeIterator for ParamIter where I: Iterator {} + + ParamIter(params.into_iter().flatten()) + } +} diff --git a/frameworks/Rust/xitca-web/src/main.rs b/frameworks/Rust/xitca-web/src/main.rs index 710ff577586..b7389fbb821 100755 --- a/frameworks/Rust/xitca-web/src/main.rs +++ b/frameworks/Rust/xitca-web/src/main.rs @@ -5,18 +5,22 @@ mod util; use xitca_http::{ h1::RequestBody, http::{header::SERVER, StatusCode}, - util::service::{ - route::get, - router::{Router, RouterError}, + util::{ + middleware::context::{Context, ContextBuilder}, + service::{ + route::get, + router::{Router, RouterError}, + }, }, HttpServiceBuilder, }; use xitca_service::{fn_service, Service, ServiceExt}; +use db::Client; use ser::{error_response, IntoResponse, Message, Request, Response}; -use util::{context_mw, HandleResult, QueryParse, SERVER_HEADER_VALUE}; +use util::{HandleResult, QueryParse, State, SERVER_HEADER_VALUE}; -type Ctx<'a> = util::Ctx<'a, Request>; +type Ctx<'a> = Context<'a, Request, State>; fn main() -> std::io::Result<()> { let service = Router::new() @@ -27,7 +31,7 @@ fn main() -> std::io::Result<()> { .insert("/queries", get(fn_service(queries))) .insert("/updates", get(fn_service(updates))) .enclosed_fn(middleware) - .enclosed(context_mw()) + .enclosed(ContextBuilder::new(|| async { db::create().await.map(State::new) })) .enclosed(HttpServiceBuilder::h1().io_uring()); xitca_server::Builder::new() .bind("xitca-web", "0.0.0.0:8080", service)? diff --git a/frameworks/Rust/xitca-web/src/main_axum.rs b/frameworks/Rust/xitca-web/src/main_axum.rs deleted file mode 100644 index 02fdfba8a20..00000000000 --- a/frameworks/Rust/xitca-web/src/main_axum.rs +++ /dev/null @@ -1,150 +0,0 @@ -//! show case of axum running on proper thread per core server with io-uring enabled. - -#[global_allocator] -static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; - -mod db; -mod ser; -mod util; - -use std::sync::Arc; - -use axum::{ - extract::{Json, Query, State}, - http::{ - header::{HeaderValue, SERVER}, - StatusCode, - }, - response::{Html, IntoResponse, Response}, - routing::{get, Router}, -}; -use tower_http::set_header::SetResponseHeaderLayer; - -use crate::{db::Client, ser::Num, tower_compat::TowerHttp}; - -fn main() -> std::io::Result<()> { - let service = TowerHttp::service(|| async { - let cli = db::create().await?; - let service = Router::new() - .route("/plaintext", get(plain_text)) - .route("/json", get(json)) - .route("/db", get(db)) - .route("/fortunes", get(fortunes)) - .route("/queries", get(queries)) - .route("/updates", get(updates)) - .with_state(Arc::new(cli)) - .layer(SetResponseHeaderLayer::if_not_present( - SERVER, - HeaderValue::from_static("A"), - )); - Ok(service) - }); - xitca_server::Builder::new() - .bind("xitca-axum", "0.0.0.0:8080", service)? - .build() - .wait() -} - -async fn plain_text() -> &'static str { - "Hello, World!" -} - -async fn json() -> impl IntoResponse { - Json(ser::Message::new()) -} - -async fn db(State(cli): State>) -> impl IntoResponse { - cli.get_world().await.map(Json).map_err(Error) -} - -async fn fortunes(State(cli): State>) -> impl IntoResponse { - use sailfish::TemplateOnce; - cli.tell_fortune() - .await - .map_err(Error)? - .render_once() - .map(Html) - .map_err(|e| Error(Box::new(e))) -} - -async fn queries(State(cli): State>, Query(Num(num)): Query) -> impl IntoResponse { - cli.get_worlds(num).await.map(Json).map_err(Error) -} - -async fn updates(State(cli): State>, Query(Num(num)): Query) -> impl IntoResponse { - cli.update(num).await.map(Json).map_err(Error) -} - -struct Error(util::Error); - -impl IntoResponse for Error { - fn into_response(self) -> Response { - let mut res = self.0.to_string().into_response(); - *res.status_mut() = StatusCode::INTERNAL_SERVER_ERROR; - res - } -} - -// compat module between xitca-http and axum. -mod tower_compat { - use core::{cell::RefCell, fmt, future::Future, marker::PhantomData}; - - use std::net::SocketAddr; - - use http_body::Body; - use xitca_http::{ - bytes::Bytes, - h1::RequestBody, - http::{Request, RequestExt, Response}, - HttpServiceBuilder, - }; - use xitca_io::net::io_uring::TcpStream; - use xitca_service::{fn_build, middleware::UncheckedReady, ready::ReadyService, Service, ServiceExt}; - use xitca_web::service::tower_http_compat::{CompatReqBody, CompatResBody}; - - pub struct TowerHttp { - service: RefCell, - _p: PhantomData, - } - - impl TowerHttp { - pub fn service( - func: F, - ) -> impl Service, Error = impl fmt::Debug> - where - F: Fn() -> Fut + Send + Sync + Clone, - Fut: Future>, - S: tower::Service, ()>>, Response = Response>, - S::Error: fmt::Debug, - B: Body + Send + 'static, - { - fn_build(move |_| { - let func = func.clone(); - async move { - func().await.map(|service| TowerHttp { - service: RefCell::new(service), - _p: PhantomData, - }) - } - }) - .enclosed(UncheckedReady) - .enclosed(HttpServiceBuilder::h1().io_uring()) - } - } - - impl Service>> for TowerHttp - where - S: tower::Service, ()>>, Response = Response>, - { - type Response = Response>; - type Error = S::Error; - - async fn call(&self, req: Request>) -> Result { - let (parts, ext) = req.into_parts(); - let req = Request::from_parts(parts, CompatReqBody::new(ext, ())); - let fut = self.service.borrow_mut().call(req); - let (parts, body) = fut.await?.into_parts(); - Ok(Response::from_parts(parts, CompatResBody::new(body))) - } - } -} diff --git a/frameworks/Rust/xitca-web/src/main_iou.rs b/frameworks/Rust/xitca-web/src/main_iou.rs deleted file mode 100644 index f66eb8778e6..00000000000 --- a/frameworks/Rust/xitca-web/src/main_iou.rs +++ /dev/null @@ -1,63 +0,0 @@ -// used as reference of if/how moving from epoll to io-uring(or mixture of the two) make sense for -// network io. - -#[global_allocator] -static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; - -mod db; -mod ser; -mod util; - -use std::{convert::Infallible, io}; - -use xitca_http::{ - body::ResponseBody, - http::{self, header::SERVER, StatusCode}, - HttpServiceBuilder, -}; -use xitca_service::{fn_service, ServiceExt}; - -use self::{ - ser::{error_response, IntoResponse, Message, Request}, - util::{context_mw, Ctx, QueryParse, SERVER_HEADER_VALUE}, -}; - -fn main() -> io::Result<()> { - let service = fn_service(handler) - .enclosed(context_mw()) - .enclosed(HttpServiceBuilder::h1().io_uring()); - xitca_server::Builder::new() - .bind("xitca-iou", "0.0.0.0:8080", service)? - .build() - .wait() -} - -async fn handler(ctx: Ctx<'_, Request>) -> Result, Infallible> { - let (req, state) = ctx.into_parts(); - let mut res = match req.uri().path() { - "/plaintext" => req.text_response().unwrap(), - "/json" => req.json_response(state, &Message::new()).unwrap(), - "/db" => { - let world = state.client.get_world().await.unwrap(); - req.json_response(state, &world).unwrap() - } - "/queries" => { - let num = req.uri().query().parse_query(); - let worlds = state.client.get_worlds(num).await.unwrap(); - req.json_response(state, &worlds).unwrap() - } - "/updates" => { - let num = req.uri().query().parse_query(); - let worlds = state.client.update(num).await.unwrap(); - req.json_response(state, &worlds).unwrap() - } - "/fortunes" => { - use sailfish::TemplateOnce; - let fortunes = state.client.tell_fortune().await.unwrap().render_once().unwrap(); - req.html_response(fortunes).unwrap() - } - _ => error_response(StatusCode::NOT_FOUND), - }; - res.headers_mut().insert(SERVER, SERVER_HEADER_VALUE); - Ok(res.map(Into::into)) -} diff --git a/frameworks/Rust/xitca-web/src/main_orm.rs b/frameworks/Rust/xitca-web/src/main_orm.rs new file mode 100644 index 00000000000..86e8037ac2e --- /dev/null +++ b/frameworks/Rust/xitca-web/src/main_orm.rs @@ -0,0 +1,67 @@ +mod db_diesel_async; +mod schema; +mod ser; +mod util; + +use serde::Serialize; +use xitca_web::{ + codegen::route, + handler::{html::Html, json::Json, query::Query, state::StateRef, text::Text}, + http::{header::SERVER, WebResponse}, + route::get, + App, +}; + +use db_diesel_async::Pool; +use ser::Num; +use util::{HandleResult, SERVER_HEADER_VALUE}; + +fn main() -> std::io::Result<()> { + App::new() + .with_async_state(db_diesel_async::create) + .at("/plaintext", get(Text("Hello, World!"))) + .at("/json", get(Json(ser::Message::new()))) + .at_typed(db) + .at_typed(fortunes) + .at_typed(queries) + .at_typed(updates) + .map(header) + .serve() + .disable_vectored_write() + .bind("0.0.0.0:8080")? + .run() + .wait() +} + +fn header(mut res: WebResponse) -> WebResponse { + res.headers_mut().insert(SERVER, SERVER_HEADER_VALUE); + res +} + +#[route("/db", method = get)] +async fn db(StateRef(pool): StateRef<'_, Pool>) -> HandleResult> { + pool.get_world().await.map(Json) +} + +#[route("/fortunes", method = get)] +async fn fortunes(StateRef(pool): StateRef<'_, Pool>) -> HandleResult> { + use sailfish::TemplateOnce; + let html = pool.tell_fortune().await?.render_once()?; + Ok(Html(html)) +} + +#[route("/queries", method = get)] +async fn queries( + Query(Num(num)): Query, + StateRef(pool): StateRef<'_, Pool>, +) -> HandleResult> { + pool.get_worlds(num).await.map(Json) +} + +#[route("/updates", method = get)] +async fn updates( + Query(Num(num)): Query, + StateRef(pool): StateRef<'_, Pool>, +) -> HandleResult> { + pool.update(num).await.map(Json) +} diff --git a/frameworks/Rust/xitca-web/src/main_unrealistic.rs b/frameworks/Rust/xitca-web/src/main_unrealistic.rs new file mode 100644 index 00000000000..e291799408c --- /dev/null +++ b/frameworks/Rust/xitca-web/src/main_unrealistic.rs @@ -0,0 +1,147 @@ +// unrealistic bench showcase popular tricks for boosting bench score artificially + +// custom global memory allocator don't affect real world performance in noticeable amount. +// in real world they should be used for reason like security, debug/profiling capability etc. +#[global_allocator] +static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; + +#[path = "db_unrealistic.rs"] +mod db; +mod ser; +mod util; + +use std::{convert::Infallible, io}; + +use xitca_http::{ + bytes::BufMutWriter, + h1::dispatcher_unreal::{Dispatcher, Request, Response}, + http::StatusCode, +}; +use xitca_io::net::TcpStream; +use xitca_service::Service; + +use self::{ + ser::Message, + util::{QueryParse, State}, +}; + +fn main() -> io::Result<()> { + let addr = "0.0.0.0:8080".parse().unwrap(); + + let cores = std::thread::available_parallelism().map(|num| num.get()).unwrap_or(56); + + let handle = core::iter::repeat_with(|| { + std::thread::spawn(move || { + tokio::runtime::Builder::new_current_thread() + .enable_all() + .build_local(&Default::default()) + .unwrap() + .block_on(async { + let socket = tokio::net::TcpSocket::new_v4()?; + socket.set_reuseaddr(true)?; + // unrealistic due to following reason: + // 1. this only works good on unix system. + // 2. no resource distribution adjustment between sockets on different threads. causing uneven workload + // where some threads are idle while others busy. resulting in overall increased latency + socket.set_reuseport(true)?; + socket.bind(addr)?; + let listener = socket.listen(1024)?; + + let client = db::create().await.unwrap(); + + // unrealistic http dispatcher. no spec check. no security feature. + let service = Dispatcher::new(handler, State::new(client)); + + loop { + match listener.accept().await { + Ok((stream, _)) => { + let stream = stream.into_std()?; + let stream = TcpStream::from_std(stream)?; + let service = service.clone(); + tokio::task::spawn_local(async move { + let _ = service.call(stream).await; + }); + } + Err(e) => return Err(e), + }; + } + }) + }) + }) + .take(cores) + .collect::>(); + + // unrealistic due to no signal handling, not shutdown handling. when killing this process all resources that + // need clean async shutdown will be leaked. + for handle in handle { + handle.join().unwrap()?; + } + + Ok(()) +} + +async fn handler<'h>(req: Request<'h>, res: Response<'h>, state: &State) -> Response<'h, 3> { + // unrealistic due to no http method check + match req.path { + // unrealistic due to no dynamic path matching + "/plaintext" => { + // unrealistic due to no body streaming and no post processing. violating middleware feature of xitca-web + res.status(StatusCode::OK) + .header("content-type", "text/plain") + .header("server", "X") + // unrealistic content length header. + .header("content-length", "13") + .body_writer(|buf| Ok::<_, Infallible>(buf.extend_from_slice(b"Hello, World!"))) + .unwrap() + } + "/json" => res + .status(StatusCode::OK) + .header("content-type", "application/json") + .header("server", "X") + // unrealistic content length header. + .header("content-length", "27") + .body_writer(|buf| serde_json::to_writer(BufMutWriter(buf), &Message::new())) + .unwrap(), + // all database related categories are unrealistic. please reference db_unrealistic module for detail. + "/fortunes" => { + use sailfish::TemplateOnce; + let fortunes = state.client.tell_fortune().await.unwrap().render_once().unwrap(); + res.status(StatusCode::OK) + .header("content-type", "text/html; charset=utf-8") + .header("server", "X") + .body(fortunes.as_bytes()) + } + "/db" => { + // unrealistic due to no error handling. any db/serialization error will cause process crash. + // the same goes for all following unwraps on database related functions. + let world = state.client.get_world().await.unwrap(); + json_response(res, state, &world) + } + p if p.starts_with("/q") => { + let num = p["/queries?q=".len()..].parse_query(); + let worlds = state.client.get_worlds(num).await.unwrap(); + json_response(res, state, &worlds) + } + p if p.starts_with("/u") => { + let num = p["/updates?q=".len()..].parse_query(); + let worlds = state.client.update(num).await.unwrap(); + json_response(res, state, &worlds) + } + _ => res.status(StatusCode::NOT_FOUND).header("server", "X").body(&[]), + } +} + +fn json_response<'r, DB, T>(res: Response<'r>, state: &State, val: &T) -> Response<'r, 3> +where + T: serde::Serialize, +{ + let buf = &mut *state.write_buf.borrow_mut(); + serde_json::to_writer(BufMutWriter(buf), val).unwrap(); + let res = res + .status(StatusCode::OK) + .header("content-type", "application/json") + .header("server", "X") + .body(buf.as_ref()); + buf.clear(); + res +} diff --git a/frameworks/Rust/xitca-web/src/ser.rs b/frameworks/Rust/xitca-web/src/ser.rs index 53dcff13cf0..edf9183a8c0 100644 --- a/frameworks/Rust/xitca-web/src/ser.rs +++ b/frameworks/Rust/xitca-web/src/ser.rs @@ -8,7 +8,7 @@ use xitca_http::{ bytes::{BufMutWriter, Bytes}, http::{ self, - const_header_value::{JSON, TEXT, TEXT_HTML_UTF8}, + const_header_value::{JSON, TEXT_HTML_UTF8, TEXT_UTF8}, header::CONTENT_TYPE, IntoResponse as _, RequestExt, StatusCode, }, @@ -33,7 +33,7 @@ impl Message { pub struct Num(pub u16); -#[cfg_attr(feature = "pg-orm", derive(diesel::Queryable))] +#[cfg_attr(any(feature = "pg-orm", feature = "pg-orm-async"), derive(diesel::Queryable))] pub struct World { pub id: i32, pub randomnumber: i32, @@ -46,7 +46,7 @@ impl World { } } -#[cfg_attr(feature = "pg-orm", derive(diesel::Queryable))] +#[cfg_attr(any(feature = "pg-orm", feature = "pg-orm-async"), derive(diesel::Queryable))] pub struct Fortune { pub id: i32, pub message: Cow<'static, str>, @@ -62,16 +62,41 @@ impl Fortune { } } -// TODO: use another template engine with faster compile time.(preferably with no proc macro) -#[cfg_attr( - feature = "template", - derive(sailfish::TemplateOnce), - template(path = "fortune.stpl", rm_whitespace = true) -)] pub struct Fortunes { items: Vec, } +// this is roughly the code generated by sailfish::TemplateOnce macro. +// using the macro does not have any perf cost and this piece of code is expanded manually to speed up compile time of +// bench to reduce resource usage of bench runner +#[cfg(feature = "template")] +impl sailfish::TemplateOnce for Fortunes { + fn render_once(self) -> sailfish::RenderResult { + use sailfish::runtime::{Buffer, Render}; + + const PREFIX: &str = "\n\nFortunes\n\n\n\n"; + const SUFFIX: &str = "\n
idmessage
\n\n"; + + let mut buf = Buffer::with_capacity(1236); + + buf.push_str(PREFIX); + for item in self.items { + buf.push_str(""); + Render::render_escaped(&item.id, &mut buf)?; + buf.push_str(""); + Render::render_escaped(&item.message, &mut buf)?; + buf.push_str(""); + } + buf.push_str(SUFFIX); + + Ok(buf.into_string()) + } + + fn render_once_to(self, _: &mut sailfish::runtime::Buffer) -> Result<(), sailfish::runtime::RenderError> { + unimplemented!("") + } +} + impl Fortunes { #[inline] pub const fn new(items: Vec) -> Self { @@ -188,7 +213,7 @@ impl IntoResponse for Request { fn text_response(self) -> Result { let mut res = self.into_response(const { Bytes::from_static(HELLO_BYTES) }); - res.headers_mut().insert(CONTENT_TYPE, TEXT); + res.headers_mut().insert(CONTENT_TYPE, TEXT_UTF8); Ok(res) } diff --git a/frameworks/Rust/xitca-web/src/util.rs b/frameworks/Rust/xitca-web/src/util.rs index 62888fb6c26..3998a2ef1ec 100755 --- a/frameworks/Rust/xitca-web/src/util.rs +++ b/frameworks/Rust/xitca-web/src/util.rs @@ -10,34 +10,16 @@ pub trait QueryParse { impl QueryParse for Option<&str> { fn parse_query(self) -> u16 { - self.and_then(|this| { - use atoi::FromRadix10; - this.find('q') - .map(|pos| u16::from_radix_10(this.split_at(pos + 2).1.as_ref()).0) - }) - .unwrap_or(1) - .clamp(1, 500) + self.and_then(|q| q.find('q').map(|pos| q.split_at(pos + 2).1.parse_query())) + .unwrap_or(1) } } -pub fn bulk_update_gen(func: F) -> String -where - F: FnOnce(&mut String), -{ - const PREFIX: &str = "UPDATE world SET randomNumber = w.r FROM (VALUES "; - const SUFFIX: &str = ") AS w (i,r) WHERE world.id = w.i"; - - let mut query = String::from(PREFIX); - - func(&mut query); - - if query.ends_with(',') { - query.pop(); +impl QueryParse for &str { + fn parse_query(self) -> u16 { + use atoi::FromRadix10; + u16::from_radix_10(self.as_bytes()).0.clamp(1, 500) } - - query.push_str(SUFFIX); - - query } #[allow(clippy::declare_interior_mutable_const)] @@ -54,8 +36,17 @@ pub struct State { pub write_buf: RefCell, } +impl State { + pub fn new(client: DB) -> Self { + Self { + client, + write_buf: Default::default(), + } + } +} + #[cfg(not(target_arch = "wasm32"))] -mod non_wasm { +pub mod non_wasm { use rand::{rngs::SmallRng, Rng, SeedableRng}; pub struct Rand(SmallRng); @@ -72,37 +63,6 @@ mod non_wasm { self.0.gen_range(1..=10000) } } - - #[cfg(feature = "pg")] - mod pg_state { - use core::{cell::RefCell, future::Future, pin::Pin}; - - use xitca_http::{ - bytes::BytesMut, - util::middleware::context::{Context, ContextBuilder}, - }; - - use crate::{ - db::{self, Client}, - util::{HandleResult, State}, - }; - - pub type Ctx<'a, Req> = Context<'a, Req, State>; - - pub fn context_mw() -> ContextBuilder Pin>>>>> { - ContextBuilder::new(|| { - Box::pin(async { - db::create().await.map(|client| State { - client, - write_buf: RefCell::new(BytesMut::new()), - }) - }) as _ - }) - } - } - - #[cfg(feature = "pg")] - pub use pg_state::*; } #[cfg(not(target_arch = "wasm32"))] diff --git a/frameworks/Rust/xitca-web/xitca-web-axum.dockerfile b/frameworks/Rust/xitca-web/xitca-web-axum.dockerfile deleted file mode 100644 index 3c6271834ea..00000000000 --- a/frameworks/Rust/xitca-web/xitca-web-axum.dockerfile +++ /dev/null @@ -1,10 +0,0 @@ -FROM rust:1.81 - -ADD ./ /xitca-web -WORKDIR /xitca-web - -RUN cargo build --release --bin xitca-web-axum --features axum,io-uring,perf,pg-sync,template - -EXPOSE 8080 - -CMD ./target/release/xitca-web-axum diff --git a/frameworks/Rust/xitca-web/xitca-web-iou.dockerfile b/frameworks/Rust/xitca-web/xitca-web-iou.dockerfile deleted file mode 100644 index 461d5740603..00000000000 --- a/frameworks/Rust/xitca-web/xitca-web-iou.dockerfile +++ /dev/null @@ -1,10 +0,0 @@ -FROM rust:1.81 - -ADD ./ /xitca-web -WORKDIR /xitca-web - -RUN cargo build --release --bin xitca-web-iou --features io-uring,perf,pg,template - -EXPOSE 8080 - -CMD ./target/release/xitca-web-iou diff --git a/frameworks/Rust/xitca-web/xitca-web-orm.dockerfile b/frameworks/Rust/xitca-web/xitca-web-orm.dockerfile new file mode 100644 index 00000000000..06e40825b00 --- /dev/null +++ b/frameworks/Rust/xitca-web/xitca-web-orm.dockerfile @@ -0,0 +1,10 @@ +FROM rust:1.81 + +ADD ./ /xitca-web +WORKDIR /xitca-web + +RUN cargo build --release --bin xitca-web-orm --features pg-orm-async,template,web-codegen + +EXPOSE 8080 + +CMD ./target/release/xitca-web-orm diff --git a/frameworks/Rust/xitca-web/xitca-web-unrealistic.dockerfile b/frameworks/Rust/xitca-web/xitca-web-unrealistic.dockerfile new file mode 100644 index 00000000000..f202947acc0 --- /dev/null +++ b/frameworks/Rust/xitca-web/xitca-web-unrealistic.dockerfile @@ -0,0 +1,10 @@ +FROM rust:1.81 + +ADD ./ /xitca-web +WORKDIR /xitca-web + +RUN cargo build --release --bin xitca-web-unrealistic --features perf,pg,template + +EXPOSE 8080 + +CMD ./target/release/xitca-web-unrealistic diff --git a/frameworks/Scala/otavia/otavia-overshoot.dockerfile b/frameworks/Scala/otavia/otavia-overshoot.dockerfile index 10df5b40e71..78a03840454 100644 --- a/frameworks/Scala/otavia/otavia-overshoot.dockerfile +++ b/frameworks/Scala/otavia/otavia-overshoot.dockerfile @@ -9,6 +9,8 @@ EXPOSE 8080 CMD java -server \ -Dcc.otavia.actor.worker.size=64 \ + -Dcc.otavia.buffer.page.size=8 \ + -Dio.netty5.noKeySetOptimization=true \ -jar \ out/benchmark/assembly.dest/out.jar \ jdbc:postgresql://tfb-database:5432/hello_world \ diff --git a/frameworks/Scala/otavia/otavia.dockerfile b/frameworks/Scala/otavia/otavia.dockerfile index 81e16063b03..f1bbd33234c 100644 --- a/frameworks/Scala/otavia/otavia.dockerfile +++ b/frameworks/Scala/otavia/otavia.dockerfile @@ -9,6 +9,8 @@ EXPOSE 8080 CMD java -server \ -Dcc.otavia.actor.worker.size=56 \ + -Dcc.otavia.buffer.page.size=8 \ + -Dio.netty5.noKeySetOptimization=true \ -jar \ out/benchmark/assembly.dest/out.jar \ jdbc:postgresql://tfb-database:5432/hello_world \ diff --git a/frameworks/Scala/zio-http/build.sbt b/frameworks/Scala/zio-http/build.sbt index 987ee8cbbd2..a985d9b5dc9 100644 --- a/frameworks/Scala/zio-http/build.sbt +++ b/frameworks/Scala/zio-http/build.sbt @@ -1,13 +1,14 @@ name := "zio-http" version := "1.0.0" -scalaVersion := "2.13.6" +scalaVersion := "2.13.14" lazy val root = (project in file(".")) .settings( - libraryDependencies ++= - Seq( - "com.github.plokhotnyuk.jsoniter-scala" %% "jsoniter-scala-core" % "2.9.1", - "com.github.plokhotnyuk.jsoniter-scala" %% "jsoniter-scala-macros" % "2.9.1" % "compile-internal", - "io.d11" % "zhttp" % "1.0.0-RC5", - ), + libraryDependencies += "dev.zio" %% "zio-http" % "3.0.0-RC10", testFrameworks += new TestFramework("zio.test.sbt.ZTestFramework"), + assembly / assemblyMergeStrategy := { + case x if x.contains("io.netty.versions.properties") => MergeStrategy.discard + case x => + val oldStrategy = (assembly / assemblyMergeStrategy).value + oldStrategy(x) + } ) diff --git a/frameworks/Scala/zio-http/project/build.properties b/frameworks/Scala/zio-http/project/build.properties index 215ddd2b39d..ee06c398644 100644 --- a/frameworks/Scala/zio-http/project/build.properties +++ b/frameworks/Scala/zio-http/project/build.properties @@ -1 +1 @@ -sbt.version = 1.5.5 \ No newline at end of file +sbt.version = 1.10.0 \ No newline at end of file diff --git a/frameworks/Scala/zio-http/project/plugins.sbt b/frameworks/Scala/zio-http/project/plugins.sbt index 585d1930dc6..ec25e7aa776 100644 --- a/frameworks/Scala/zio-http/project/plugins.sbt +++ b/frameworks/Scala/zio-http/project/plugins.sbt @@ -1 +1 @@ -addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "1.0.0") +addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "2.1.0") diff --git a/frameworks/Scala/zio-http/src/main/scala/Main.scala b/frameworks/Scala/zio-http/src/main/scala/Main.scala index b72da4e089c..8af102fd7f6 100644 --- a/frameworks/Scala/zio-http/src/main/scala/Main.scala +++ b/frameworks/Scala/zio-http/src/main/scala/Main.scala @@ -1,42 +1,43 @@ -import zhttp.http._ -import zhttp.service.Server -import zio.{App, ExitCode, URIO} -import com.github.plokhotnyuk.jsoniter_scala.macros._ -import com.github.plokhotnyuk.jsoniter_scala.core._ -import zhttp.http.Response - -import java.time.format.DateTimeFormatter -import java.time.{Instant, ZoneOffset} - -case class Message(message: String) - -object Main extends App { - val message: String = "Hello, World!" - implicit val codec: JsonValueCodec[Message] = JsonCodecMaker.make - - val app: Http[Any, HttpError, Request, Response] = Http.collect[Request] { - case Method.GET -> Root / "plaintext" => - Response.http( - content = HttpContent.Complete(message), - headers = Header.contentTypeTextPlain :: headers(), - ) - case Method.GET -> Root / "json" => - Response.http( - content = HttpContent.Complete(writeToString(Message(message))), - headers = Header.contentTypeJson :: headers(), - ) - } - - override def run(args: List[String]): URIO[zio.ZEnv, ExitCode] = Server.start(8080, app).exitCode - - val formatter: DateTimeFormatter = DateTimeFormatter.RFC_1123_DATE_TIME.withZone(ZoneOffset.UTC) - val constantHeaders: List[Header] = Header("server", "zio-http") :: Nil - @volatile var lastHeaders: (Long, List[Header]) = (0, Nil) - - def headers(): List[Header] = { - val t = System.currentTimeMillis() - if (t - lastHeaders._1 >= 1000) - lastHeaders = (t, Header("date", formatter.format(Instant.ofEpochMilli(t))) :: constantHeaders) - lastHeaders._2 - } -} +import zio._ +import zio.http._ +import zio.http.netty.NettyConfig +import zio.http.netty.NettyConfig.LeakDetectionLevel +import java.lang.{Runtime => JRuntime} + +object Main extends ZIOAppDefault { + + private val plainTextMessage: String = "hello, world!" + private val jsonMessage: String = """{"message": "hello, world!"}""" + + private val STATIC_SERVER_NAME = "zio-http" + private val NUM_PROCESSORS = JRuntime.getRuntime.availableProcessors() + + val app: Routes[Any, Response] = Routes( + Method.GET / "/plaintext" -> + Handler.fromResponse( + Response + .text(plainTextMessage) + .addHeader(Header.Server(STATIC_SERVER_NAME)), + ), + Method.GET / "/json" -> + Handler.fromResponse( + Response + .json(jsonMessage) + .addHeader(Header.Server(STATIC_SERVER_NAME)), + ), + ) + + private val config = Server.Config.default + .port(8080) + .enableRequestStreaming + + private val nettyConfig = NettyConfig.default + .leakDetection(LeakDetectionLevel.DISABLED) + .maxThreads(NUM_PROCESSORS) + + private val configLayer = ZLayer.succeed(config) + private val nettyConfigLayer = ZLayer.succeed(nettyConfig) + + val run: UIO[ExitCode] = + Server.serve(app).provide(configLayer, nettyConfigLayer, Server.customized).exitCode +} \ No newline at end of file diff --git a/frameworks/TypeScript/elysia/bun.lockb b/frameworks/TypeScript/elysia/bun.lockb index 0020cf63f06..5d4cdb0b97a 100755 Binary files a/frameworks/TypeScript/elysia/bun.lockb and b/frameworks/TypeScript/elysia/bun.lockb differ diff --git a/frameworks/TypeScript/elysia/package.json b/frameworks/TypeScript/elysia/package.json index c23716506c8..fa95e553bc3 100644 --- a/frameworks/TypeScript/elysia/package.json +++ b/frameworks/TypeScript/elysia/package.json @@ -12,7 +12,7 @@ "compile": "bun build --compile --minify --target bun --outfile server src/index.ts" }, "dependencies": { - "elysia": "^1.1.12", + "elysia": "^1.1.16", "postgres": "^3.4.4" } } diff --git a/frameworks/Zig/httpz/.gitignore b/frameworks/Zig/httpz/.gitignore new file mode 100644 index 00000000000..170dc0f1403 --- /dev/null +++ b/frameworks/Zig/httpz/.gitignore @@ -0,0 +1,2 @@ +zig-cache/**/*', +zig-out: 'zig-out/**/*', diff --git a/frameworks/Zig/httpz/README.md b/frameworks/Zig/httpz/README.md new file mode 100644 index 00000000000..e83169efe17 --- /dev/null +++ b/frameworks/Zig/httpz/README.md @@ -0,0 +1,25 @@ + +# [Httpz](https://github.com/karlseguin/http.zig) - An HTTP/1.1 server for Zig + +## Description + +Native Zig framework and zig http replacement + +## Test URLs + +### Test 1: JSON Encoding + + http://localhost:3000/json + +### Test 2: Plaintext + + http://localhost:3000/plaintext + +### Test 2: Single Row Query + + http://localhost:3000/db + +### Test 4: Fortunes (Template rendering) + + http://localhost:3000/fortunes + diff --git a/frameworks/Zig/httpz/benchmark_config.json b/frameworks/Zig/httpz/benchmark_config.json new file mode 100644 index 00000000000..e36c9c17a1c --- /dev/null +++ b/frameworks/Zig/httpz/benchmark_config.json @@ -0,0 +1,26 @@ +{ + "framework": "httpz", + "tests": [{ + "default": { + "json_url": "/json", + "plaintext_url": "/plaintext", + "db_url": "/db", + "fortune_url": "/fortunes", + "port": 3000, + "approach": "Realistic", + "classification": "Fullstack", + "database": "Postgres", + "framework": "httpz", + "language": "Zig", + "flavor": "None", + "orm": "raw", + "platform": "None", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "Httpz (Zig)", + "notes": "", + "versus": "" + } + }] +} diff --git a/frameworks/Zig/httpz/build.zig b/frameworks/Zig/httpz/build.zig new file mode 100644 index 00000000000..5978de7c6aa --- /dev/null +++ b/frameworks/Zig/httpz/build.zig @@ -0,0 +1,78 @@ +const std = @import("std"); +const ModuleMap = std.StringArrayHashMap(*std.Build.Module); +var gpa = std.heap.GeneralPurposeAllocator(.{}){}; +const allocator = gpa.allocator(); + +// Although this function looks imperative, note that its job is to +// declaratively construct a build graph that will be executed by an external +// runner. +pub fn build(b: *std.Build) !void { + // Standard target options allows the person running `zig build` to choose + // what target to build for. Here we do not override the defaults, which + // means any target is allowed, and the default is native. Other options + // for restricting supported target set are available. + const target = b.standardTargetOptions(.{}); + + // Standard optimization options allow the person running `zig build` to select + // between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall. Here we do nots + // set a preferred release mode, allowing the user to decide how to optimize. + const optimize = b.standardOptimizeOption(.{}); + + const dep_opts = .{ .target = target, .optimize = optimize }; + + const exe = b.addExecutable(.{ + .name = "httpz", + // In this case the main source file is merely a path, however, in more + // complicated build scripts, this could be a generated file. + .root_source_file = b.path("src/main.zig"), + .target = target, + .optimize = optimize, + }); + + var modules = ModuleMap.init(allocator); + defer modules.deinit(); + + const httpz_module = b.dependency("httpz", dep_opts).module("httpz"); + const pg_module = b.dependency("pg", dep_opts).module("pg"); + const datetimez_module = b.dependency("datetimez", dep_opts).module("zig-datetime"); + const mustache_module = b.dependency("mustache", dep_opts).module("mustache"); + + try modules.put("httpz", httpz_module); + try modules.put("pg", pg_module); + try modules.put("datetimez", datetimez_module); + try modules.put("mustache", mustache_module); + + // // Expose this as a module that others can import + exe.root_module.addImport("httpz", httpz_module); + exe.root_module.addImport("pg", pg_module); + exe.root_module.addImport("datetimez", datetimez_module); + exe.root_module.addImport("mustache", mustache_module); + + // This declares intent for the executable to be installed into the + // standard location when the user invokes the "install" step (the default + // step when running `zig build`). + b.installArtifact(exe); + + // This *creates* a Run step in the build graph, to be executed when another + // step is evaluated that depends on it. The next line below will establish + // such a dependency. + const run_cmd = b.addRunArtifact(exe); + + // By making the run step depend on the install step, it will be run from the + // installation directory rather than directly from within the cache directory. + // This is not necessary, however, if the application depends on other installed + // files, this ensures they will be present and in the expected location. + run_cmd.step.dependOn(b.getInstallStep()); + + // This allows the user to pass arguments to the application in the build + // command itself, like this: `zig build run -- arg1 arg2 etc` + if (b.args) |args| { + run_cmd.addArgs(args); + } + + // This creates a build step. It will be visible in the `zig build --help` menu, + // and can be selected like this: `zig build run` + // This will evaluate the `run` step rather than the default, which is "install". + const run_step = b.step("run", "Run the app"); + run_step.dependOn(&run_cmd.step); +} diff --git a/frameworks/Zig/httpz/build.zig.zon b/frameworks/Zig/httpz/build.zig.zon new file mode 100644 index 00000000000..58b494c2fe3 --- /dev/null +++ b/frameworks/Zig/httpz/build.zig.zon @@ -0,0 +1,19 @@ +.{ .name = "Zap testing", .version = "0.1.1", .paths = .{ + "build.zig", + "build.zig.zon", + "src", +}, .dependencies = .{ + .pg = .{ .url = "https://github.com/karlseguin/pg.zig/archive/239a4468163a49d8c0d03285632eabe96003e9e2.tar.gz", .hash = "1220a1d7e51e2fa45e547c76a9e099c09d06e14b0b9bfc6baa89367f56f1ded399a0" }, + .httpz = .{ + .url = "git+https://github.com/karlseguin/http.zig?ref=zig-0.13#7d2ddae87af9b110783085c0ea6b03985faa4584", + .hash = "12208c1f2c5f730c4c03aabeb0632ade7e21914af03e6510311b449458198d0835d6", + }, + .datetimez = .{ + .url = "git+https://github.com/frmdstryr/zig-datetime#70aebf28fb3e137cd84123a9349d157a74708721", + .hash = "122077215ce36e125a490e59ec1748ffd4f6ba00d4d14f7308978e5360711d72d77f", + }, + .mustache = .{ + .url = "git+https://github.com/batiati/mustache-zig#ae5ecc1522da983dc39bb0d8b27f5d1b1d7956e3", + .hash = "1220ac9e3316ce71ad9cd66c7f215462bf5c187828b50bb3d386549bf6af004e3bb0", + }, +} } diff --git a/frameworks/Zig/httpz/httpz.dockerfile b/frameworks/Zig/httpz/httpz.dockerfile new file mode 100644 index 00000000000..5257b77ea18 --- /dev/null +++ b/frameworks/Zig/httpz/httpz.dockerfile @@ -0,0 +1,23 @@ +FROM fedora:40 + +WORKDIR /httpz + +ENV PG_USER=benchmarkdbuser +ENV PG_PASS=benchmarkdbpass +ENV PG_DB=hello_world +ENV PG_HOST=tfb-database +ENV PG_PORT=5432 + +COPY src src +COPY build.zig.zon build.zig.zon +COPY build.zig build.zig +COPY run.sh run.sh + +RUN dnf install -y zig +RUN zig version +RUN zig build -Doptimize=ReleaseFast +RUN cp /httpz/zig-out/bin/httpz /usr/local/bin + +EXPOSE 3000 + +CMD ["sh", "run.sh"] \ No newline at end of file diff --git a/frameworks/Zig/httpz/run.sh b/frameworks/Zig/httpz/run.sh new file mode 100644 index 00000000000..582c2ad0228 --- /dev/null +++ b/frameworks/Zig/httpz/run.sh @@ -0,0 +1,3 @@ +echo "Waiting for Httpz framework to start..." + +httpz \ No newline at end of file diff --git a/frameworks/Zig/httpz/src/endpoints.zig b/frameworks/Zig/httpz/src/endpoints.zig new file mode 100644 index 00000000000..0ee22b274de --- /dev/null +++ b/frameworks/Zig/httpz/src/endpoints.zig @@ -0,0 +1,192 @@ +const std = @import("std"); +const httpz = @import("httpz"); +const pg = @import("pg"); +const datetimez = @import("datetimez"); +const mustache = @import("mustache"); + +const Allocator = std.mem.Allocator; +const Thread = std.Thread; +const Mutex = Thread.Mutex; +const template = "Fortunes{{#fortunes}}{{/fortunes}}
idmessage
{{id}}{{message}}
"; + +pub const Global = struct { + pool: *pg.Pool, + prng: *std.rand.DefaultPrng, + allocator: Allocator, + mutex: std.Thread.Mutex = .{}, +}; + +const Message = struct { + message: []const u8, +}; + +const World = struct { + id: i32, + randomNumber: i32, +}; + +const Fortune = struct { + id: i32, + message: []const u8, +}; + +pub fn plaintext(global: *Global, _: *httpz.Request, res: *httpz.Response) !void { + try setHeaders(global.allocator, res); + + res.content_type = .TEXT; + res.body = "Hello, World!"; +} + +pub fn json(global: *Global, _: *httpz.Request, res: *httpz.Response) !void { + try setHeaders(global.allocator, res); + + const message = Message{ .message = "Hello, World!" }; + + try res.json(message, .{}); +} + +pub fn db(global: *Global, _: *httpz.Request, res: *httpz.Response) !void { + try setHeaders(global.allocator, res); + + global.mutex.lock(); + const random_number = 1 + (global.prng.random().uintAtMost(u32, 9999)); + global.mutex.unlock(); + + const world = getWorld(global.pool, random_number) catch |err| { + std.debug.print("Error querying database: {}\n", .{err}); + return; + }; + + try res.json(world, .{}); +} + +pub fn fortune(global: *Global, _: *httpz.Request, res: *httpz.Response) !void { + try setHeaders(global.allocator, res); + + const fortunes_html = try getFortunesHtml(global.allocator, global.pool); + + res.header("content-type", "text/html; charset=utf-8"); + res.body = fortunes_html; +} + +fn getWorld(pool: *pg.Pool, random_number: u32) !World{ + var conn = try pool.acquire(); + defer conn.release(); + + const row_result = try conn.row("SELECT id, randomNumber FROM World WHERE id = $1", .{random_number}); + + var row = row_result.?; + defer row.deinit() catch {}; + + return World{ .id = row.get(i32, 0), .randomNumber = row.get(i32, 1) }; +} + +fn setHeaders(allocator: Allocator, res: *httpz.Response) !void { + res.header("Server", "Httpz"); + + const now = datetimez.datetime.Date.now(); + const time = datetimez.datetime.Time.now(); + + // Wed, 17 Apr 2013 12:00:00 GMT + // Return date in ISO format YYYY-MM-DD + const TB_DATE_FMT = "{s:0>3}, {d:0>2} {s:0>3} {d:0>4} {d:0>2}:{d:0>2}:{d:0>2} GMT"; + const now_str = try std.fmt.allocPrint(allocator, TB_DATE_FMT, .{ now.weekdayName()[0..3], now.day, now.monthName()[0..3], now.year, time.hour, time.minute, time.second }); + + //defer allocator.free(now_str); + + res.header("Date", now_str); +} + +fn getFortunesHtml(allocator: Allocator, pool: *pg.Pool) ![]const u8 { + const fortunes = try getFortunes(allocator, pool); + + const raw = try mustache.allocRenderText(allocator, template,.{ .fortunes = fortunes }); + + // std.debug.print("mustache output {s}\n", .{raw}); + + const html = try deescapeHtml(allocator, raw); + + // std.debug.print("html output {s}\n", .{html}); + + return html; +} + +fn getFortunes(allocator: Allocator, pool: *pg.Pool) ![]const Fortune { + var conn = try pool.acquire(); + defer conn.release(); + + var rows = try conn.query("SELECT id, message FROM Fortune", .{}); + defer rows.deinit(); + + var fortunes = std.ArrayList(Fortune).init(allocator); + defer fortunes.deinit(); + + while (try rows.next()) |row| { + const current_fortune = Fortune{ .id = row.get(i32, 0), .message = row.get([]const u8, 1) }; + try fortunes.append(current_fortune); + } + + const zero_fortune = Fortune{ .id = 0, .message = "Additional fortune added at request time." }; + try fortunes.append(zero_fortune); + + const fortunes_slice = try fortunes.toOwnedSlice(); + std.mem.sort(Fortune, fortunes_slice, {}, cmpFortuneByMessage); + + return fortunes_slice; +} + +fn cmpFortuneByMessage(_: void, a: Fortune, b: Fortune) bool { + return std.mem.order(u8, a.message, b.message).compare(std.math.CompareOperator.lt); +} + +fn deescapeHtml(allocator: Allocator, input: []const u8) ![]const u8 { + var output = std.ArrayList(u8).init(allocator); + defer output.deinit(); + + var i: usize = 0; + while (i < input.len) { + if (std.mem.startsWith(u8, input[i..], " ")) { + try output.append(' '); + i += 5; + } else if (std.mem.startsWith(u8, input[i..], """)) { + try output.append('"'); + i += 5; + } else if (std.mem.startsWith(u8, input[i..], "&")) { + try output.append('&'); + i += 5; + } else if (std.mem.startsWith(u8, input[i..], "'")) { + try output.append('\''); + i += 5; + } else if (std.mem.startsWith(u8, input[i..], "(")) { + try output.append('('); + i += 5; + } else if (std.mem.startsWith(u8, input[i..], ")")) { + try output.append(')'); + i += 5; + } else if (std.mem.startsWith(u8, input[i..], "+")) { + try output.append('+'); + i += 5; + } else if (std.mem.startsWith(u8, input[i..], ",")) { + try output.append(','); + i += 5; + } else if (std.mem.startsWith(u8, input[i..], ".")) { + try output.append('.'); + i += 5; + } else if (std.mem.startsWith(u8, input[i..], "/")) { + try output.append('/'); + i += 5; + } else if (std.mem.startsWith(u8, input[i..], ":")) { + try output.append(':'); + i += 5; + } else if (std.mem.startsWith(u8, input[i..], ";")) { + try output.append(';'); + i += 5; + } else { + try output.append(input[i]); + i += 1; + } + } + + return output.toOwnedSlice(); +} + diff --git a/frameworks/Zig/httpz/src/main.zig b/frameworks/Zig/httpz/src/main.zig new file mode 100644 index 00000000000..ae2c1a70ac4 --- /dev/null +++ b/frameworks/Zig/httpz/src/main.zig @@ -0,0 +1,71 @@ +const std = @import("std"); +const builtin = @import("builtin"); +const httpz = @import("httpz"); +const pg = @import("pg"); +const datetimez = @import("datetimez"); +const pool = @import("pool.zig"); + +const endpoints = @import("endpoints.zig"); + +const RndGen = std.rand.DefaultPrng; +const Allocator = std.mem.Allocator; +const Pool = pg.Pool; + +var server: httpz.ServerCtx(*endpoints.Global,*endpoints.Global) = undefined; + +pub fn main() !void { + var gpa = std.heap.GeneralPurposeAllocator(.{ + .thread_safe = true, + }){}; + + const allocator = gpa.allocator(); + + var pg_pool = try pool.initPool(allocator); + defer pg_pool.deinit(); + + var prng = std.rand.DefaultPrng.init(@as(u64, @bitCast(std.time.milliTimestamp()))); + + var global = endpoints.Global{ .pool = pg_pool, .prng = &prng, .allocator = allocator }; + + server = try httpz.ServerApp(*endpoints.Global).init(allocator, .{ + .port = 3000, .address = "0.0.0.0", }, &global); + defer server.deinit(); + + // now that our server is up, we register our intent to handle SIGINT + try std.posix.sigaction(std.posix.SIG.INT, &.{ + .handler = .{.handler = shutdown}, + .mask = std.posix.empty_sigset, + .flags = 0, + }, null); + + var router = server.router(); + router.get("/json", endpoints.json); + router.get("/plaintext", endpoints.plaintext); + router.get("/db", endpoints.db); + router.get("/fortunes", endpoints.fortune); + + std.debug.print("Httpz listening at 0.0.0.0:{d}\n", .{3000}); + + try server.listen(); +} + +fn shutdown(_: c_int) callconv(.C) void { + // this will unblock the server.listen() + server.stop(); +} + +fn notFound(_: *httpz.Request, res: *httpz.Response) !void { + res.status = 404; + + // you can set the body directly to a []u8, but note that the memory + // must be valid beyond your handler. Use the res.arena if you need to allocate + // memory for the body. + res.body = "Not Found"; +} + +// note that the error handler return `void` and not `!void` +fn errorHandler(req: *httpz.Request, res: *httpz.Response, err: anyerror) void { + res.status = 500; + res.body = "Internal Server Error"; + std.log.warn("httpz: unhandled exception for request: {s}\nErr: {}", .{req.url.raw, err}); +} \ No newline at end of file diff --git a/frameworks/Zig/httpz/src/pool.zig b/frameworks/Zig/httpz/src/pool.zig new file mode 100644 index 00000000000..c41cb329540 --- /dev/null +++ b/frameworks/Zig/httpz/src/pool.zig @@ -0,0 +1,87 @@ +const std = @import("std"); +const regex = @import("regex"); +const pg = @import("pg"); + +const Allocator = std.mem.Allocator; +const Pool = pg.Pool; +const ArrayList = std.ArrayList; + +pub fn initPool(allocator: Allocator) !*pg.Pool { + const info = try parsePostgresConnStr(allocator); + //std.debug.print("Connection: {s}:{s}@{s}:{d}/{s}\n", .{ info.username, info.password, info.hostname, info.port, info.database }); + + const pg_pool = try Pool.init(allocator, .{ + .size = 28, + .connect = .{ + .port = info.port, + .host = info.hostname, + }, + .auth = .{ + .username = info.username, + .database = info.database, + .password = info.password, + }, + .timeout = 10_000, + }); + + return pg_pool; +} + +pub const ConnectionInfo = struct { + username: []const u8, + password: []const u8, + hostname: []const u8, + port: u16, + database: []const u8, +}; + +fn addressAsString(address: std.net.Address) ![]const u8 { + const bytes = @as(*const [4]u8, @ptrCast(&address.in.sa.addr)); + + var buffer: [256]u8 = undefined; + var source = std.io.StreamSource{ .buffer = std.io.fixedBufferStream(&buffer) }; + var writer = source.writer(); + + //try writer.writeAll("Hello, World!"); + + try writer.print("{}.{}.{}.{}", .{ + bytes[0], + bytes[1], + bytes[2], + bytes[3], + }); + + const output = source.buffer.getWritten(); + + return output; +} + +fn parsePostgresConnStr(allocator: Allocator) !ConnectionInfo { + const pg_port = try getEnvVar(allocator, "PG_PORT", "5432"); + // std.debug.print("tfb port {s}\n", .{pg_port}); + var port = try std.fmt.parseInt(u16, pg_port, 0); + + if (port == 0) { + port = 5432; + } + + return ConnectionInfo{ + .username = try getEnvVar(allocator, "PG_USER", "benchmarkdbuser"), + .password = try getEnvVar(allocator, "PG_PASS", "benchmarkdbpass"), + .hostname = try getEnvVar(allocator, "PG_HOST", "localhost"), + .port = port, + .database = try getEnvVar(allocator, "PG_DB", "hello_world"), + }; +} + +fn getEnvVar(allocator: Allocator, name: []const u8, default: []const u8) ![]const u8 { + const env_var = std.process.getEnvVarOwned(allocator, name) catch |err| switch (err) { + error.EnvironmentVariableNotFound => return default, + error.OutOfMemory => return err, + error.InvalidWtf8 => return err, + }; + + if (env_var.len == 0) return default; + + return env_var; +} diff --git a/frameworks/Zig/zap/build-nginx-conf.sh b/frameworks/Zig/zap/build-nginx-conf.sh new file mode 100644 index 00000000000..ecb55c80bfa --- /dev/null +++ b/frameworks/Zig/zap/build-nginx-conf.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +CPU_COUNT=$(nproc) +P=3000 +END=$(($P+$CPU_COUNT)) +CONF="" + +while [ $P -lt $END ]; do + CONF+="\t\tserver 127.0.0.1:$P;\n" + let P=P+1 +done + +sed -i "s|# replace|$CONF|g" nginx.conf diff --git a/frameworks/Zig/zap/build.zig b/frameworks/Zig/zap/build.zig index 762284c2acb..2da55a07981 100644 --- a/frameworks/Zig/zap/build.zig +++ b/frameworks/Zig/zap/build.zig @@ -40,16 +40,13 @@ pub fn build(b: *std.Build) !void { const zap_module = b.dependency("zap", dep_opts).module("zap"); const pg_module = b.dependency("pg", dep_opts).module("pg"); - const dig_module = b.dependency("dig", dep_opts).module("dns"); try modules.put("zap", zap_module); try modules.put("pg", pg_module); - try modules.put("dig", dig_module); // // Expose this as a module that others can import exe.root_module.addImport("zap", zap_module); exe.root_module.addImport("pg", pg_module); - exe.root_module.addImport("dig", dig_module); exe.linkLibrary(zap.artifact("facil.io")); @@ -80,20 +77,4 @@ pub fn build(b: *std.Build) !void { // This will evaluate the `run` step rather than the default, which is "install". const run_step = b.step("run", "Run the app"); run_step.dependOn(&run_cmd.step); - - // Creates a step for unit testing. This only builds the test executable - // but does not run it. - const unit_tests = b.addTest(.{ - .root_source_file = b.path("src/main.zig"), - .target = target, - .optimize = optimize, - }); - - const run_unit_tests = b.addRunArtifact(unit_tests); - - // Similar to creating the run step earlier, this exposes a `test` step to - // the `zig build --help` menu, providing a way for the user to request - // running the unit tests. - const test_step = b.step("test", "Run unit tests"); - test_step.dependOn(&run_unit_tests.step); } diff --git a/frameworks/Zig/zap/build.zig.zon b/frameworks/Zig/zap/build.zig.zon index 871e73b7f76..3f492b7d954 100644 --- a/frameworks/Zig/zap/build.zig.zon +++ b/frameworks/Zig/zap/build.zig.zon @@ -9,5 +9,4 @@ }, .pg = .{ .url = "https://github.com/karlseguin/pg.zig/archive/239a4468163a49d8c0d03285632eabe96003e9e2.tar.gz", .hash = "1220a1d7e51e2fa45e547c76a9e099c09d06e14b0b9bfc6baa89367f56f1ded399a0" }, - .dig = .{ .url = "https://github.com/lun-4/zigdig/archive/a54c85c26aa83c64ee81e3ee1818890be5cbed0b.tar.gz", .hash = "1220f078ab62d1328339504f9122dc4d241be30ada451628d78b8a3bf5bb9be1dcba" }, } } diff --git a/frameworks/Zig/zap/nginx.conf b/frameworks/Zig/zap/nginx.conf new file mode 100644 index 00000000000..f394b2ad206 --- /dev/null +++ b/frameworks/Zig/zap/nginx.conf @@ -0,0 +1,33 @@ +error_log stderr; +worker_processes auto; + +events { + worker_connections 65535; + multi_accept off; +} + +http { + default_type application/octet-stream; + client_body_temp_path /tmp; + access_log off; + + sendfile on; + tcp_nopush on; + keepalive_requests 100000; + keepalive_timeout 65; + + upstream workers { + # replace + } + + server { + listen 8080; + server_name tfb-server; + + location / { + proxy_http_version 1.1; + proxy_set_header Connection ""; + proxy_pass http://workers; + } + } +} diff --git a/frameworks/Zig/zap/run.sh b/frameworks/Zig/zap/run.sh deleted file mode 100644 index b4698a15de1..00000000000 --- a/frameworks/Zig/zap/run.sh +++ /dev/null @@ -1,3 +0,0 @@ -echo "Waiting for ZAP framework to start..." - -zap \ No newline at end of file diff --git a/frameworks/Zig/zap/src/endpoints.zig b/frameworks/Zig/zap/src/endpoints.zig index 3492e686591..44a3afc43e2 100644 --- a/frameworks/Zig/zap/src/endpoints.zig +++ b/frameworks/Zig/zap/src/endpoints.zig @@ -168,20 +168,28 @@ pub const DbEndpoint = struct { } } - // std.debug.print("Attempting to return random: {}\n", .{random_number}); - if (random_number == 0) { return; } - var conn = pool.acquire() catch return; - defer conn.release(); - - const row_result = conn.row("SELECT id, randomNumber FROM World WHERE id = $1", .{random_number}) catch |err| { + const json_to_send = getJson(pool, random_number) catch |err| { std.debug.print("Error querying database: {}\n", .{err}); return; }; + + req.sendBody(json_to_send) catch return; + + return; + } + + fn getJson(pool: *pg.Pool, random_number: u32) ![]const u8{ + var conn = try pool.acquire(); + defer conn.release(); + + const row_result = try conn.row("SELECT id, randomNumber FROM World WHERE id = $1", .{random_number}); + var row = row_result.?; + defer row.deinit() catch {}; const world = World{ .id = row.get(i32, 0), .randomNumber = row.get(i32, 1) }; @@ -193,9 +201,7 @@ pub const DbEndpoint = struct { json_to_send = "null"; } - req.sendBody(json_to_send) catch return; - - return; + return json_to_send; } }; diff --git a/frameworks/Zig/zap/src/main.zig b/frameworks/Zig/zap/src/main.zig index 0c66a7639bb..e2792726f04 100644 --- a/frameworks/Zig/zap/src/main.zig +++ b/frameworks/Zig/zap/src/main.zig @@ -1,8 +1,8 @@ const std = @import("std"); +const builtin = @import("builtin"); const zap = @import("zap"); const pg = @import("pg"); const regex = @import("regex"); -const dns = @import("dns"); const pool = @import("pool.zig"); const endpoints = @import("endpoints.zig"); @@ -23,6 +23,24 @@ pub fn main() !void { const allocator = tsa.allocator(); + var zap_port: []u8 = undefined; + var arg_string = try std.fmt.allocPrint(allocator, "{s}", .{"0"}); + defer allocator.free(arg_string); + + var args = try std.process.argsWithAllocator(allocator); + defer args.deinit(); + while (args.next()) |arg| { + arg_string = try std.fmt.allocPrint(allocator, "{s}", .{arg}); + + zap_port = arg_string; // use arg + } + + var port = try std.fmt.parseInt(u16, zap_port, 0); + + if (port == 0) { + port = 3000; + } + var pg_pool = try pool.initPool(allocator); defer pg_pool.deinit(); @@ -68,7 +86,7 @@ pub fn main() !void { var listener = try zap.Middleware.Listener(middleware.Context).init( .{ .on_request = null, // must be null - .port = 3000, + .port = port, .log = false, .max_clients = 100000, }, @@ -78,13 +96,15 @@ pub fn main() !void { ); try listener.listen(); - const cpuCount = @as(i16, @intCast(std.Thread.getCpuCount() catch 1)); + //const cpuCount = @as(i16, @intCast(std.Thread.getCpuCount() catch 1)); + //const workers = if (builtin.mode == .Debug) 1 else cpuCount; + const threads = 128; - std.debug.print("Listening on 0.0.0.0:3000 on {d} threads\n", .{cpuCount}); + std.debug.print("Listening at 0.0.0.0:{d} on {d} threads\n", .{port, threads}); // start worker threads zap.start(.{ - .threads = 16 * cpuCount, + .threads = threads, .workers = 1, }); } diff --git a/frameworks/Zig/zap/src/pool.zig b/frameworks/Zig/zap/src/pool.zig index 84df32104b0..6615ae217ce 100644 --- a/frameworks/Zig/zap/src/pool.zig +++ b/frameworks/Zig/zap/src/pool.zig @@ -10,7 +10,7 @@ const Regex = regex.Regex; pub fn initPool(allocator: Allocator) !*pg.Pool { const info = try parsePostgresConnStr(allocator); - std.debug.print("Connection: {s}:{s}@{s}:{d}/{s}\n", .{ info.username, info.password, info.hostname, info.port, info.database }); + //std.debug.print("Connection: {s}:{s}@{s}:{d}/{s}\n", .{ info.username, info.password, info.hostname, info.port, info.database }); const pg_pool = try Pool.init(allocator, .{ .size = 28, @@ -60,7 +60,7 @@ fn addressAsString(address: std.net.Address) ![]const u8 { fn parsePostgresConnStr(allocator: Allocator) !ConnectionInfo { const pg_port = try getEnvVar(allocator, "PG_PORT", "5432"); - std.debug.print("tfb port {s}\n", .{pg_port}); + // std.debug.print("tfb port {s}\n", .{pg_port}); var port = try std.fmt.parseInt(u16, pg_port, 0); if (port == 0) { diff --git a/frameworks/Zig/zap/start-servers.sh b/frameworks/Zig/zap/start-servers.sh new file mode 100644 index 00000000000..b5cf175de41 --- /dev/null +++ b/frameworks/Zig/zap/start-servers.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +CPU_COUNT=$(nproc) +P=3000 +END=$(($P+$CPU_COUNT)) + +while [ $P -lt $END ]; do + zap $P & + let P=P+1 +done diff --git a/frameworks/Zig/zap/zap.dockerfile b/frameworks/Zig/zap/zap.dockerfile index 9aa8a4109ae..71123f4f3e7 100644 --- a/frameworks/Zig/zap/zap.dockerfile +++ b/frameworks/Zig/zap/zap.dockerfile @@ -9,38 +9,22 @@ ENV PG_HOST=tfb-database ENV PG_PORT=5432 COPY src src -COPY run.sh run.sh - COPY build.zig.zon build.zig.zon COPY build.zig build.zig +COPY start-servers.sh start-servers.sh +COPY build-nginx-conf.sh build-nginx-conf.sh +COPY nginx.conf nginx.conf -RUN dnf install -y zig -RUN zig version -# RUN zig build -Doptimize=ReleaseFast -RUN zig build -RUN cp /zap/zig-out/bin/zap /usr/local/bin - -EXPOSE 3000 - -CMD ["sh", "run.sh"] - -# FROM alpine:3.19 +RUN chmod +x start-servers.sh +RUN chmod +x build-nginx-conf.sh -# WORKDIR /zap +RUN ./build-nginx-conf.sh -# ENV PG_USER=benchmarkdbuser -# ENV PG_PASS=benchmarkdbpass -# ENV PG_DB=hello_world -# ENV PG_HOST=tfb-database -# ENV PG_PORT=5432 - -# RUN apk update -# RUN apk add libc6-compat - -# COPY run.sh run.sh - -# COPY --from=build /zap/zig-out/bin/zap /usr/local/bin +RUN dnf install -y zig nginx +RUN zig version +RUN zig build -Doptimize=ReleaseFast +RUN cp /zap/zig-out/bin/zap /usr/local/bin -# EXPOSE 3000 +EXPOSE 8080 -# CMD ["sh", "run.sh"] \ No newline at end of file +CMD ./start-servers.sh && nginx -c /zap/nginx.conf -g "daemon off;" \ No newline at end of file diff --git a/frameworks/Zig/zinc/.gitignore b/frameworks/Zig/zinc/.gitignore new file mode 100644 index 00000000000..170dc0f1403 --- /dev/null +++ b/frameworks/Zig/zinc/.gitignore @@ -0,0 +1,2 @@ +zig-cache/**/*', +zig-out: 'zig-out/**/*', diff --git a/frameworks/Zig/zinc/README.md b/frameworks/Zig/zinc/README.md new file mode 100755 index 00000000000..acf0f93bbcb --- /dev/null +++ b/frameworks/Zig/zinc/README.md @@ -0,0 +1,32 @@ +# [Zinc](https://zinc.zon.dev) web framework + +## Description + +Zinc is a web framework written in pure Zig with a focus on high performance, usability, security, and extensibility. + +* [Documentation](https://zinc.zon.dev/) + +### Some features are: +- **Fast** +- **Custom allocator** +- **Multithreading** +- **Middleware** +- **Routes grouping** +- **Rendering built-in** +- **Extensible** +- **Suite of unit tests** +- **Usability** + +## Important Libraries +The tests were run with: +* [Software](https://zinc.zon.dev/) +* [Example](https://github.com/zon-dev/zinc-examples) + +## Test URLs +### JSON + +http://localhost:8080/json + +### PLAINTEXT + +http://localhost:8080/plaintext diff --git a/frameworks/Zig/zinc/benchmark_config.json b/frameworks/Zig/zinc/benchmark_config.json new file mode 100755 index 00000000000..2e2e874ba2d --- /dev/null +++ b/frameworks/Zig/zinc/benchmark_config.json @@ -0,0 +1,26 @@ +{ + "framework": "zinc", + "tests": [ + { + "default": { + "json_url": "/json", + "plaintext_url": "/plaintext", + "port": 3000, + "approach": "Realistic", + "classification": "Fullstack", + "database": "None", + "framework": "Zinc", + "language": "Zig", + "flavor": "None", + "orm": "None", + "platform": "None", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "Zinc", + "notes": "", + "versus": "None" + } + } + ] +} diff --git a/frameworks/Zig/zinc/build.zig b/frameworks/Zig/zinc/build.zig new file mode 100644 index 00000000000..e5be63940e5 --- /dev/null +++ b/frameworks/Zig/zinc/build.zig @@ -0,0 +1,78 @@ +const std = @import("std"); + +// Although this function looks imperative, note that its job is to +// declaratively construct a build graph that will be executed by an external +// runner. +pub fn build(b: *std.Build) void { + // Standard target options allows the person running `zig build` to choose + // what target to build for. Here we do not override the defaults, which + // means any target is allowed, and the default is native. Other options + // for restricting supported target set are available. + const target = b.standardTargetOptions(.{}); + + // Standard optimization options allow the person running `zig build` to select + // between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall. Here we do not + // set a preferred release mode, allowing the user to decide how to optimize. + const optimize = b.standardOptimizeOption(.{}); + + const exe = b.addExecutable(.{ + .name = "zinc", + .root_source_file = b.path("src/main.zig"), + .target = target, + .optimize = optimize, + }); + + const zinc = b.dependency("zinc", .{ + .target = target, + .optimize = optimize, + }); + exe.root_module.addImport("zinc", zinc.module("zinc")); + + const datetime = b.dependency("zig-datetime", .{ + .target = target, + .optimize = optimize, + }); + exe.root_module.addImport("datetime", datetime.module("zig-datetime")); + + // This declares intent for the executable to be installed into the + // standard location when the user invokes the "install" step (the default + // step when running `zig build`). + b.installArtifact(exe); + + // This *creates* a Run step in the build graph, to be executed when another + // step is evaluated that depends on it. The next line below will establish + // such a dependency. + const run_cmd = b.addRunArtifact(exe); + + // By making the run step depend on the install step, it will be run from the + // installation directory rather than directly from within the cache directory. + // This is not necessary, however, if the application depends on other installed + // files, this ensures they will be present and in the expected location. + run_cmd.step.dependOn(b.getInstallStep()); + + // This allows the user to pass arguments to the application in the build + // command itself, like this: `zig build run -- arg1 arg2 etc` + if (b.args) |args| { + run_cmd.addArgs(args); + } + + // This creates a build step. It will be visible in the `zig build --help` menu, + // and can be selected like this: `zig build run` + // This will evaluate the `run` step rather than the default, which is "install". + const run_step = b.step("run", "Run the app"); + run_step.dependOn(&run_cmd.step); + + const exe_unit_tests = b.addTest(.{ + .root_source_file = b.path("src/main.zig"), + .target = target, + .optimize = optimize, + }); + + const run_exe_unit_tests = b.addRunArtifact(exe_unit_tests); + + // Similar to creating the run step earlier, this exposes a `test` step to + // the `zig build --help` menu, providing a way for the user to request + // running the unit tests. + const test_step = b.step("test", "Run unit tests"); + test_step.dependOn(&run_exe_unit_tests.step); +} diff --git a/frameworks/Zig/zinc/build.zig.zon b/frameworks/Zig/zinc/build.zig.zon new file mode 100644 index 00000000000..9a4c8f302cf --- /dev/null +++ b/frameworks/Zig/zinc/build.zig.zon @@ -0,0 +1,27 @@ +.{ + .name = "zinc", + .version = "0.1.0", + .dependencies = .{ + .zinc = .{ + .url = "https://github.com/zon-dev/zinc/archive/refs/tags/0.1.0-beta.5.tar.gz", + .hash = "12201444aa36b4a83f262f319e7c17ccdcff9fbde2efbeb5fc94f1a07eda0d99428e", + }, + .@"zig-datetime" = .{ + .url = "git+https://github.com/frmdstryr/zig-datetime#70aebf28fb3e137cd84123a9349d157a74708721", + .hash = "122077215ce36e125a490e59ec1748ffd4f6ba00d4d14f7308978e5360711d72d77f", + }, + .pg = .{ + .url = "git+https://github.com/karlseguin/pg.zig#21db2306aff657802f9cb10a1e7f8fe9c33e7990", + .hash = "1220df8995ceea78a4a37a505fc779ded75725d0606c33fded26103953524dde1619", + }, + .mustache = .{ + .url = "git+https://github.com/batiati/mustache-zig#ac358646ab9e6123285b90c947ecd40f7966d531", + .hash = "1220cd6e1b49bdd0a568682957dab9a6864554755908f7de990ec7c050f58cf41da2", + }, + }, + .paths = .{ + "build.zig", + "build.zig.zon", + "src", + }, +} diff --git a/frameworks/Zig/zinc/run.sh b/frameworks/Zig/zinc/run.sh new file mode 100644 index 00000000000..639c542fc3e --- /dev/null +++ b/frameworks/Zig/zinc/run.sh @@ -0,0 +1,3 @@ +echo "Waiting for Zinc framework to start..." + +zinc \ No newline at end of file diff --git a/frameworks/Zig/zinc/src/main.zig b/frameworks/Zig/zinc/src/main.zig new file mode 100644 index 00000000000..f06104dc4f1 --- /dev/null +++ b/frameworks/Zig/zinc/src/main.zig @@ -0,0 +1,43 @@ +const std = @import("std"); +const zinc = @import("zinc"); +const Datetime = @import("datetime").datetime.Datetime; + +pub fn main() !void { + var gpa = std.heap.GeneralPurposeAllocator(.{ .thread_safe = true }){}; + defer _ = gpa.deinit(); + const allocator = gpa.allocator(); + + var z = try zinc.init(.{ + .port = 3000, + .allocator = allocator, + .num_threads = 16 * @as(u8, @intCast(std.Thread.getCpuCount() catch 1)), + }); + defer z.deinit(); + + var router = z.getRouter(); + try router.use(&.{setupHeader}); + try router.get("/json", json); + try router.get("/plaintext", plaintext); + + try z.run(); +} + +fn plaintext(ctx: *zinc.Context) anyerror!void { + try ctx.setHeader("Content-Type", "text/plain; charset=utf-8"); + try ctx.setBody("Hello, world!"); +} + +fn json(ctx: *zinc.Context) anyerror!void { + try ctx.json(.{ .message = "Hello, World!" }, .{}); +} + +fn setupHeader(ctx: *zinc.Context) anyerror!void { + try ctx.setHeader("Server", "Zinc"); + + const now = Datetime.now(); + const now_str = try now.formatHttp(ctx.allocator); + // defer ctx.allocator.free(now_str); + + // The time is now: Fri, 20 Dec 2019 22:03:02 UTC + try ctx.setHeader("date", now_str); +} diff --git a/frameworks/Zig/zinc/zinc.dockerfile b/frameworks/Zig/zinc/zinc.dockerfile new file mode 100644 index 00000000000..11b64881d2c --- /dev/null +++ b/frameworks/Zig/zinc/zinc.dockerfile @@ -0,0 +1,21 @@ +FROM fedora:40 AS build + +WORKDIR /zinc + +COPY src src +COPY run.sh run.sh + +COPY build.zig.zon build.zig.zon +COPY build.zig build.zig + +RUN dnf install -y zig +RUN zig version +RUN zig build -Doptimize=ReleaseFast +RUN cp /zinc/zig-out/bin/zinc /usr/local/bin + +EXPOSE 3000 +ARG BENCHMARK_ENV +ARG TFB_TEST_DATABASE +ARG TFB_TEST_NAME + +CMD ["sh", "run.sh"] diff --git a/toolset/benchmark/benchmarker.py b/toolset/benchmark/benchmarker.py index 84e6be29912..8b5ce49d65b 100644 --- a/toolset/benchmark/benchmarker.py +++ b/toolset/benchmark/benchmarker.py @@ -1,3 +1,6 @@ +import threading + +from docker.models.containers import Container from toolset.utils.output_helper import log, FNULL from toolset.utils.docker_helper import DockerHelper from toolset.utils.time_logger import TimeLogger @@ -263,12 +266,6 @@ def benchmark_type(test_type): log("BENCHMARKING %s ... " % test_type.upper(), file=benchmark_log) test = framework_test.runTests[test_type] - raw_file = self.results.get_raw_file(framework_test.name, - test_type) - if not os.path.exists(raw_file): - # Open to create the empty file - with open(raw_file, 'w'): - pass if not test.failed: # Begin resource usage metrics collection @@ -281,8 +278,8 @@ def benchmark_type(test_type): framework_test.port, test.get_url())) - self.docker_helper.benchmark(script, script_variables, - raw_file) + benchmark_container = self.docker_helper.benchmark(script, script_variables) + self.__log_container_output(benchmark_container, framework_test, test_type) # End resource usage metrics collection self.__end_logging() @@ -323,3 +320,31 @@ def __end_logging(self): self.subprocess_handle.terminate() self.subprocess_handle.communicate() + def __log_container_output(self, container: Container, framework_test, test_type) -> None: + def save_docker_logs(stream): + raw_file_path = self.results.get_raw_file(framework_test.name, test_type) + with open(raw_file_path, 'w') as file: + for line in stream: + log(line.decode(), file=file) + + def save_docker_stats(stream): + docker_file_path = self.results.get_docker_stats_file(framework_test.name, test_type) + with open(docker_file_path, 'w') as file: + file.write('[\n') + is_first_line = True + for line in stream: + if is_first_line: + is_first_line = False + else: + file.write(',') + file.write(line.decode()) + file.write(']') + + threads = [ + threading.Thread(target=lambda: save_docker_logs(container.logs(stream=True))), + threading.Thread(target=lambda: save_docker_stats(container.stats(stream=True))) + ] + + [thread.start() for thread in threads] + [thread.join() for thread in threads] + diff --git a/toolset/databases/postgres/postgres.dockerfile b/toolset/databases/postgres/postgres.dockerfile index 757d3ba79e8..aa2bcebf01d 100644 --- a/toolset/databases/postgres/postgres.dockerfile +++ b/toolset/databases/postgres/postgres.dockerfile @@ -1,4 +1,4 @@ -FROM postgres:16-bookworm +FROM postgres:17-bookworm ENV PGDATA=/ssd/postgresql \ POSTGRES_DB=hello_world \ diff --git a/toolset/utils/docker_helper.py b/toolset/utils/docker_helper.py index 1f3ea692ee1..e48a910e99d 100644 --- a/toolset/utils/docker_helper.py +++ b/toolset/utils/docker_helper.py @@ -420,16 +420,11 @@ def server_container_exists(self, container_id_or_name): except: return False - def benchmark(self, script, variables, raw_file): + def benchmark(self, script, variables): ''' Runs the given remote_script on the wrk container on the client machine. ''' - def watch_container(container): - with open(raw_file, 'w') as benchmark_file: - for line in container.logs(stream=True): - log(line.decode(), file=benchmark_file) - if self.benchmarker.config.network_mode is None: sysctl = {'net.core.somaxconn': 65535} else: @@ -438,8 +433,7 @@ def watch_container(container): ulimit = [{'name': 'nofile', 'hard': 65535, 'soft': 65535}] - watch_container( - self.client.containers.run( + return self.client.containers.run( "techempower/tfb.wrk", "/bin/bash /%s" % script, environment=variables, @@ -450,4 +444,4 @@ def watch_container(container): ulimits=ulimit, sysctls=sysctl, remove=True, - log_config={'type': None})) + log_config={'type': None}) diff --git a/toolset/utils/results.py b/toolset/utils/results.py index 7b745a90bc0..0df26636404 100644 --- a/toolset/utils/results.py +++ b/toolset/utils/results.py @@ -212,29 +212,34 @@ def load(self): except (ValueError, IOError): pass + def __make_dir_for_file(self, test_name: str, test_type: str, file_name: str): + path = os.path.join(self.directory, test_name, test_type, file_name) + try: + os.makedirs(os.path.dirname(path), exist_ok=True) + except OSError: + pass + return path + + def get_docker_stats_file(self, test_name, test_type): + ''' + Returns the stats file name for this test_name and + Example: fw_root/results/timestamp/test_type/test_name/stats.txt + ''' + return self.__make_dir_for_file(test_name, test_type, "docker_stats.json") + def get_raw_file(self, test_name, test_type): ''' Returns the output file for this test_name and test_type Example: fw_root/results/timestamp/test_type/test_name/raw.txt ''' - path = os.path.join(self.directory, test_name, test_type, "raw.txt") - try: - os.makedirs(os.path.dirname(path)) - except OSError: - pass - return path + return self.__make_dir_for_file(test_name, test_type, "raw.txt") def get_stats_file(self, test_name, test_type): ''' Returns the stats file name for this test_name and Example: fw_root/results/timestamp/test_type/test_name/stats.txt ''' - path = os.path.join(self.directory, test_name, test_type, "stats.txt") - try: - os.makedirs(os.path.dirname(path)) - except OSError: - pass - return path + return self.__make_dir_for_file(test_name, test_type, "stats.txt") def report_verify_results(self, framework_test, test_type, result): '''