Merge "logd: drop logs about pruning and compression to VERBOSE" am: 8ba33eb34b

Original change: https://android-review.googlesource.com/c/platform/system/core/+/1440264

Change-Id: I5cdac4ac3d3ab4171c890a544c93a0b8437ad667
This commit is contained in:
Tom Cherry 2020-10-01 18:19:08 +00:00 committed by Automerger Merge Worker
commit b48cfffa61
2 changed files with 6 additions and 5 deletions

View file

@ -113,8 +113,8 @@ void SerializedLogBuffer::MaybePrune(log_id_t log_id) {
if (total_size > max_size_[log_id]) {
Prune(log_id, total_size - max_size_[log_id], 0);
after_size = GetSizeUsed(log_id);
LOG(INFO) << "Pruned Logs from log_id: " << log_id << ", previous size: " << total_size
<< " after size: " << after_size;
LOG(VERBOSE) << "Pruned Logs from log_id: " << log_id << ", previous size: " << total_size
<< " after size: " << after_size;
}
stats_->set_overhead(log_id, after_size);

View file

@ -27,8 +27,9 @@ SerializedLogChunk::~SerializedLogChunk() {
void SerializedLogChunk::Compress() {
CHECK_EQ(compressed_log_.size(), 0U);
CompressionEngine::GetInstance().Compress(contents_, write_offset_, compressed_log_);
LOG(INFO) << "Compressed Log, buffer max size: " << contents_.size()
<< " size used: " << write_offset_ << " compressed size: " << compressed_log_.size();
LOG(VERBOSE) << "Compressed Log, buffer max size: " << contents_.size()
<< " size used: " << write_offset_
<< " compressed size: " << compressed_log_.size();
}
// TODO: Develop a better reference counting strategy to guard against the case where the writer is
@ -111,4 +112,4 @@ SerializedLogEntry* SerializedLogChunk::Log(uint64_t sequence, log_time realtime
write_offset_ += entry->total_len();
highest_sequence_number_ = sequence;
return entry;
}
}