From 975036a224210626cc5cc92b4205f5c238cc4ada Mon Sep 17 00:00:00 2001 From: Markus Hennerbichler Date: Sun, 7 Jan 2024 17:20:22 +0000 Subject: [PATCH] Fixes for Clang 15 warnings I was trying to compile triton server with default Clang 15 and got a few warnings that triggered as errors, among those: * deprecation errors from open telemetry showing up, which shouldn't be an issue here. They come up because the headers aren't included as "SYSTEM" headers. * unused parameter in TritonParser::SetGlobalTraceArgs(). There is a `explicit_disable_trace` passed as bool, that was updated but never read. I believe this was intended to be a `bool&` @oandreeva-nv ? (error: parameter 'explicit_disable_trace' set but not used [-Werror,-Wunused-but-set-parameter]) * An unused `[this]` capture (-Wunused-lambda-capture) * Assignment with `std::move` of temporary variable resulting in: ` error: moving a temporary object prevents copy elision [-Werror,-Wpessimizing-move]` I am not quite sure what's the problem, but I think the std::move is redundant here anyway? * a few conditionally uninitialised variables in tests variable 'ba_memory_type' is used uninitialized whenever 'if' condition is false [-Werror,-Wsometimes-uninitialized] * unused private field in ` HTTPAPIServer`: error: private field 'end_' is not used [-Werror,-Wunused-private-field] bool end_{false}; --- src/CMakeLists.txt | 6 +++--- src/command_line_parser.cc | 2 +- src/command_line_parser.h | 2 +- src/grpc/CMakeLists.txt | 2 +- src/grpc/grpc_server.cc | 2 +- src/grpc/infer_handler.cc | 2 +- src/grpc/stream_infer_handler.cc | 2 +- src/http_server.cc | 2 +- src/http_server.h | 1 - src/test/dyna_sequence/src/dyna_sequence.cc | 2 +- src/test/implicit_state/src/implicit_state.cc | 12 ++++++------ 11 files changed, 17 insertions(+), 18 deletions(-) diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index fe9be17cbb..6658244253 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -244,7 +244,7 @@ if(${TRITON_ENABLE_TRACING}) if (NOT WIN32) target_include_directories( main - PRIVATE + SYSTEM PRIVATE ${OPENTELEMETRY_CPP_INCLUDE_DIRS} ) endif() @@ -397,7 +397,7 @@ if(${TRITON_ENABLE_HTTP} if (NOT WIN32 AND ${TRITON_ENABLE_TRACING}) target_include_directories( http-endpoint-library - PRIVATE ${OPENTELEMETRY_CPP_INCLUDE_DIRS} + SYSTEM PRIVATE ${OPENTELEMETRY_CPP_INCLUDE_DIRS} ) endif() @@ -512,7 +512,7 @@ if(${TRITON_ENABLE_TRACING}) target_include_directories( tracing-library - PRIVATE ${OPENTELEMETRY_CPP_INCLUDE_DIRS} + SYSTEM PRIVATE ${OPENTELEMETRY_CPP_INCLUDE_DIRS} ) target_link_libraries( diff --git a/src/command_line_parser.cc b/src/command_line_parser.cc index 208dc3cf22..0e54c040f5 100644 --- a/src/command_line_parser.cc +++ b/src/command_line_parser.cc @@ -2134,7 +2134,7 @@ void TritonParser::SetGlobalTraceArgs( TritonServerParameters& lparams, bool trace_level_present, bool trace_rate_present, bool trace_count_present, - bool explicit_disable_trace) + bool& explicit_disable_trace) { for (const auto& global_setting : lparams.trace_config_map_[""]) { try { diff --git a/src/command_line_parser.h b/src/command_line_parser.h index 707b0703dd..c3151d6174 100644 --- a/src/command_line_parser.h +++ b/src/command_line_parser.h @@ -298,7 +298,7 @@ class TritonParser { void SetGlobalTraceArgs( TritonServerParameters& lparams, bool trace_level_present, bool trace_rate_present, bool trace_count_present, - bool explicit_disable_trace); + bool& explicit_disable_trace); void SetTritonTraceArgs( TritonServerParameters& lparams, bool trace_filepath_present, bool trace_log_frequency_present); diff --git a/src/grpc/CMakeLists.txt b/src/grpc/CMakeLists.txt index 1308b6e1b2..d9b95f6add 100644 --- a/src/grpc/CMakeLists.txt +++ b/src/grpc/CMakeLists.txt @@ -85,7 +85,7 @@ target_include_directories( if (NOT WIN32 AND ${TRITON_ENABLE_TRACING}) target_include_directories( grpc-endpoint-library - PRIVATE ${OPENTELEMETRY_CPP_INCLUDE_DIRS} + SYSTEM PRIVATE ${OPENTELEMETRY_CPP_INCLUDE_DIRS} ) endif() diff --git a/src/grpc/grpc_server.cc b/src/grpc/grpc_server.cc index ebe53c82e0..9951ed523d 100644 --- a/src/grpc/grpc_server.cc +++ b/src/grpc/grpc_server.cc @@ -1501,7 +1501,7 @@ CommonHandler::RegisterLogging() ctx, request, responder, this->cq_, this->cq_, tag); }; - auto OnExecuteLogging = [this]( + auto OnExecuteLogging = []( inference::LogSettingsRequest& request, inference::LogSettingsResponse* response, ::grpc::Status* status) { diff --git a/src/grpc/infer_handler.cc b/src/grpc/infer_handler.cc index 30d93fa4f9..3aa1177d6e 100644 --- a/src/grpc/infer_handler.cc +++ b/src/grpc/infer_handler.cc @@ -917,7 +917,7 @@ ModelInferHandler::Execute(InferHandler::State* state) TRITONSERVER_InferenceTrace* triton_trace = nullptr; #ifdef TRITON_ENABLE_TRACING state->trace_ = - std::move(trace_manager_->SampleTrace(request.model_name())); + trace_manager_->SampleTrace(request.model_name()); if (state->trace_ != nullptr) { triton_trace = state->trace_->trace_; } diff --git a/src/grpc/stream_infer_handler.cc b/src/grpc/stream_infer_handler.cc index 9c162ad644..50f3bcfd22 100644 --- a/src/grpc/stream_infer_handler.cc +++ b/src/grpc/stream_infer_handler.cc @@ -310,7 +310,7 @@ ModelStreamInferHandler::Process(InferHandler::State* state, bool rpc_ok) TRITONSERVER_InferenceTrace* triton_trace = nullptr; #ifdef TRITON_ENABLE_TRACING state->trace_ = - std::move(trace_manager_->SampleTrace(request.model_name())); + trace_manager_->SampleTrace(request.model_name()); if (state->trace_ != nullptr) { triton_trace = state->trace_->trace_; } diff --git a/src/http_server.cc b/src/http_server.cc index 647c6d83de..b493cf6285 100644 --- a/src/http_server.cc +++ b/src/http_server.cc @@ -3056,7 +3056,7 @@ HTTPAPIServer::StartTrace( { #ifdef TRITON_ENABLE_TRACING std::shared_ptr trace; - trace = std::move(trace_manager_->SampleTrace(model_name)); + trace = trace_manager_->SampleTrace(model_name); if (trace != nullptr) { *triton_trace = trace->trace_; diff --git a/src/http_server.h b/src/http_server.h index 10a9ed6388..4a14294d10 100644 --- a/src/http_server.h +++ b/src/http_server.h @@ -367,7 +367,6 @@ class HTTPAPIServer : public HTTPServer { // ensure mutual exclusive access. std::mutex res_mtx_; std::queue pending_http_responses_; - bool end_{false}; }; // Simple structure that carries the userp payload needed for diff --git a/src/test/dyna_sequence/src/dyna_sequence.cc b/src/test/dyna_sequence/src/dyna_sequence.cc index 91f83db7c9..dec40c58c9 100644 --- a/src/test/dyna_sequence/src/dyna_sequence.cc +++ b/src/test/dyna_sequence/src/dyna_sequence.cc @@ -681,7 +681,7 @@ TRITONBACKEND_ModelInstanceExecute( responses, r, TRITONBACKEND_RequestCorrelationId(request, &correlation_id)); } else if (model_state->CorrelationIdType() == "TYPE_STRING") { - const char* correlation_id_str; + const char* correlation_id_str = ""; GUARDED_RESPOND_IF_ERROR( responses, r, TRITONBACKEND_RequestCorrelationIdString( diff --git a/src/test/implicit_state/src/implicit_state.cc b/src/test/implicit_state/src/implicit_state.cc index 7def94934c..7fa2d20e79 100644 --- a/src/test/implicit_state/src/implicit_state.cc +++ b/src/test/implicit_state/src/implicit_state.cc @@ -939,9 +939,9 @@ TRITONBACKEND_ModelInstanceExecute( response_state, &buffer_attributes)); // Testing for the StateBuffer attributes - TRITONSERVER_MemoryType ba_memory_type; - int64_t ba_memory_type_id; - size_t ba_byte_size; + TRITONSERVER_MemoryType ba_memory_type = TRITONSERVER_MEMORY_CPU; + int64_t ba_memory_type_id = 0; + size_t ba_byte_size = 0; GUARDED_RESPOND_IF_ERROR( responses, r, request, @@ -1046,7 +1046,7 @@ TRITONBACKEND_ModelInstanceExecute( } TRITONSERVER_MemoryType actual_memory_type = TRITONSERVER_MEMORY_CPU; int64_t actual_memory_type_id = 0; - char* buffer; + char* buffer = nullptr; // Request an output buffer in GPU. This is only for testing purposes // to make sure that GPU output buffers can be requested. @@ -1084,7 +1084,7 @@ TRITONBACKEND_ModelInstanceExecute( } TRITONSERVER_MemoryType actual_memory_type = TRITONSERVER_MEMORY_GPU; int64_t actual_memory_type_id = 0; - char* buffer; + char* buffer = nullptr; // Request an output buffer in GPU. This is only for testing purposes // to make sure that GPU output buffers can be requested. @@ -1111,7 +1111,7 @@ TRITONBACKEND_ModelInstanceExecute( actual_memory_type = TRITONSERVER_MEMORY_CPU; actual_memory_type_id = 0; - char* output_buffer; + char* output_buffer = nullptr; GUARDED_RESPOND_IF_ERROR( responses, r, request, TRITONBACKEND_OutputBuffer(