Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixes for Clang 15 warnings #310

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion src/backend_model_instance.h
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,6 @@ class TritonModelInstance {
std::deque<TritonModelInstance*> model_instances_;

std::thread backend_thread_;
std::atomic<bool> backend_thread_exit_;
};

struct WarmupData {
Expand Down
1 change: 1 addition & 0 deletions src/filesystem/implementations/common.h
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,7 @@ class FileSystem {
virtual Status MakeTemporaryDirectory(
std::string dir_path, std::string* temp_dir) = 0;
virtual Status DeletePath(const std::string& path) = 0;
virtual ~FileSystem() = default;
};

// Helper function to take care of lack of trailing slashes
Expand Down
8 changes: 4 additions & 4 deletions src/infer_request.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1644,8 +1644,8 @@ operator<<(std::ostream& out, const InferenceRequest::State& state)

bool
operator==(
const InferenceRequest::SequenceId lhs,
const InferenceRequest::SequenceId rhs)
const InferenceRequest::SequenceId& lhs,
const InferenceRequest::SequenceId& rhs)
{
if (lhs.Type() == rhs.Type()) {
switch (lhs.Type()) {
Expand All @@ -1663,8 +1663,8 @@ operator==(

bool
operator!=(
const InferenceRequest::SequenceId lhs,
const InferenceRequest::SequenceId rhs)
const InferenceRequest::SequenceId& lhs,
const InferenceRequest::SequenceId& rhs)
{
return !(lhs == rhs);
}
Expand Down
15 changes: 5 additions & 10 deletions src/infer_request.h
Original file line number Diff line number Diff line change
Expand Up @@ -257,6 +257,7 @@ class InferenceRequest {
SequenceId();
SequenceId(const std::string& sequence_label);
SequenceId(uint64_t sequence_index);
SequenceId(const SequenceId& other) = default;
SequenceId& operator=(const SequenceId& rhs) = default;
SequenceId& operator=(const std::string& rhs);
SequenceId& operator=(const uint64_t rhs);
Expand All @@ -275,8 +276,8 @@ class InferenceRequest {
private:
friend std::ostream& operator<<(
std::ostream& out, const InferenceRequest::SequenceId& correlation_id);
friend bool operator==(const SequenceId lhs, const SequenceId rhs);
friend bool operator!=(const SequenceId lhs, const SequenceId rhs);
friend bool operator==(const SequenceId& lhs, const SequenceId& rhs);
friend bool operator!=(const SequenceId& lhs, const SequenceId& rhs);

std::string sequence_label_;
uint64_t sequence_index_;
Expand Down Expand Up @@ -766,7 +767,6 @@ class InferenceRequest {
// The model version as requested and based on version policy the
// specific version that is actually used for inference.
int64_t requested_model_version_;
int64_t actual_model_version_;

std::string id_;

Expand Down Expand Up @@ -815,11 +815,6 @@ class InferenceRequest {
uint64_t cache_lookup_start_ns_;
uint64_t cache_lookup_end_ns_;

// Cache insertion start/end timestamps. Cache manages its own stats even
// when statistics are not being colleceted.
uint64_t cache_insertion_start_ns_;
uint64_t cache_insertion_end_ns_;

// Dedicated timestamp for batcher internal which can diverge from
// queue start timestamp to provide accurate queue time without affecting
// batcher functionalities.
Expand Down Expand Up @@ -864,8 +859,8 @@ std::ostream& operator<<(
std::ostream& operator<<(
std::ostream& out, const InferenceRequest::SequenceId& sequence_id);
bool operator==(
const InferenceRequest::SequenceId lhs,
const InferenceRequest::SequenceId rhs);
const InferenceRequest::SequenceId& lhs,
const InferenceRequest::SequenceId& rhs);
}} // namespace triton::core

namespace std {
Expand Down
1 change: 1 addition & 0 deletions src/memory.h
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ class Memory {
// Return the total byte size of the data buffer
size_t TotalByteSize() const { return total_byte_size_; }

virtual ~Memory() = default;
protected:
Memory() : total_byte_size_(0), buffer_count_(0) {}
size_t total_byte_size_;
Expand Down
2 changes: 1 addition & 1 deletion src/model_repository_manager/model_repository_manager.cc
Original file line number Diff line number Diff line change
Expand Up @@ -373,7 +373,7 @@ ModelRepositoryManager::ModelRepositoryManager(
return FindModelIdentifier(n, i);
};
} else {
find_identifier_fn_ = [this](const std::string& n, ModelIdentifier* i) {
find_identifier_fn_ = [](const std::string& n, ModelIdentifier* i) {
return Status::Success;
};
}
Expand Down
2 changes: 1 addition & 1 deletion src/sequence_batch_scheduler/sequence_batch_scheduler.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1447,7 +1447,7 @@ DirectSequenceBatch::DirectSequenceBatch(
const int nice = 0;
NewPayload();
scheduler_thread_.reset(
new std::thread([this, nice]() { BatcherThread(nice); }));
new std::thread([this]() { BatcherThread(nice); }));

*is_initialized = true;
}
Expand Down
1 change: 1 addition & 0 deletions src/sequence_batch_scheduler/sequence_batch_scheduler.h
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@ class SequenceBatchScheduler : public Scheduler {
struct BatcherSequenceSlot {
BatcherSequenceSlot() = default;
BatcherSequenceSlot(const BatcherSequenceSlot&) = default;
BatcherSequenceSlot& operator=(const BatcherSequenceSlot&) = default;
BatcherSequenceSlot(TritonModelInstance* i, uint32_t s)
: model_instance_(i), seq_slot_(s)
{
Expand Down
1 change: 1 addition & 0 deletions src/sequence_batch_scheduler/sequence_utils.h
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ class Sequencer {
// Sequencer will not reschedule requests
return Status::Success;
}
virtual ~Sequencer() = default;
};

class IterativeSequencer : public Sequencer {
Expand Down
2 changes: 1 addition & 1 deletion src/tritonserver.cc
Original file line number Diff line number Diff line change
Expand Up @@ -420,7 +420,7 @@ TritonServerOptions::AddRateLimiterResource(
}
auto ritr = ditr->second.find(name);
if (ritr == ditr->second.end()) {
ditr->second.emplace(name, count).first;
ditr->second.emplace(name, count);
} else {
// If already present then store the minimum of the two.
if (ritr->second > count) {
Expand Down