diff --git a/logd/SerializedLogBuffer.cpp b/logd/SerializedLogBuffer.cpp index acd093b25..fa90878a8 100644 --- a/logd/SerializedLogBuffer.cpp +++ b/logd/SerializedLogBuffer.cpp @@ -113,8 +113,8 @@ void SerializedLogBuffer::MaybePrune(log_id_t log_id) { if (total_size > max_size_[log_id]) { Prune(log_id, total_size - max_size_[log_id], 0); after_size = GetSizeUsed(log_id); - LOG(INFO) << "Pruned Logs from log_id: " << log_id << ", previous size: " << total_size - << " after size: " << after_size; + LOG(VERBOSE) << "Pruned Logs from log_id: " << log_id << ", previous size: " << total_size + << " after size: " << after_size; } stats_->set_overhead(log_id, after_size); diff --git a/logd/SerializedLogChunk.cpp b/logd/SerializedLogChunk.cpp index e4d89451d..1ffe7a8f7 100644 --- a/logd/SerializedLogChunk.cpp +++ b/logd/SerializedLogChunk.cpp @@ -27,8 +27,9 @@ SerializedLogChunk::~SerializedLogChunk() { void SerializedLogChunk::Compress() { CHECK_EQ(compressed_log_.size(), 0U); CompressionEngine::GetInstance().Compress(contents_, write_offset_, compressed_log_); - LOG(INFO) << "Compressed Log, buffer max size: " << contents_.size() - << " size used: " << write_offset_ << " compressed size: " << compressed_log_.size(); + LOG(VERBOSE) << "Compressed Log, buffer max size: " << contents_.size() + << " size used: " << write_offset_ + << " compressed size: " << compressed_log_.size(); } // TODO: Develop a better reference counting strategy to guard against the case where the writer is @@ -111,4 +112,4 @@ SerializedLogEntry* SerializedLogChunk::Log(uint64_t sequence, log_time realtime write_offset_ += entry->total_len(); highest_sequence_number_ = sequence; return entry; -} \ No newline at end of file +}