1
0
mirror of https://github.com/systemd/systemd.git synced 2024-12-23 21:35:11 +03:00

cgtop: Display cpu time in microseonds with --raw

this makes the CPU time easily parseable; which was the goal
of --raw in the first place.

This only triggers if --raw is combined with --cpu=time
This commit is contained in:
Arian van Putten 2020-10-14 13:47:17 +02:00 committed by Lennart Poettering
parent 9d7b11fdc8
commit 429495163c
3 changed files with 19 additions and 4 deletions

View File

@ -130,7 +130,7 @@
<term><option>-r</option></term>
<term><option>--raw</option></term>
<listitem><para>Format byte counts (as in memory usage and I/O metrics)
<listitem><para>Format byte counts (as in memory usage and I/O metrics) and CPU time
with raw numeric values rather than human-readable
numbers.</para></listitem>
</varlistentry>

View File

@ -281,6 +281,12 @@ static inline size_t GREEDY_ALLOC_ROUND_UP(size_t l) {
MAX(_c, z); \
})
#define MAX4(x, y, z, a) \
({ \
const typeof(x) _d = MAX3(x, y, z); \
MAX(_d, a); \
})
#undef MIN
#define MIN(a, b) __MIN(UNIQ, (a), UNIQ, (b))
#define __MIN(aq, a, bq, b) \

View File

@ -91,6 +91,15 @@ static Group *group_free(Group *g) {
return mfree(g);
}
static const char *maybe_format_timespan(char *buf, size_t l, usec_t t, usec_t accuracy) {
if (arg_raw) {
snprintf(buf, l, USEC_FMT, t);
return buf;
}
return format_timespan(buf, l, t, accuracy);
}
static const char *maybe_format_bytes(char *buf, size_t l, bool is_valid, uint64_t t) {
if (!is_valid)
return "-";
@ -586,7 +595,7 @@ static void display(Hashmap *a) {
Group **array;
signed path_columns;
unsigned rows, n = 0, j, maxtcpu = 0, maxtpath = 3; /* 3 for ellipsize() to work properly */
char buffer[MAX3(21U, FORMAT_BYTES_MAX, FORMAT_TIMESPAN_MAX)];
char buffer[MAX4(21U, FORMAT_BYTES_MAX, FORMAT_TIMESPAN_MAX, DECIMAL_STR_MAX(usec_t))];
assert(a);
@ -605,7 +614,7 @@ static void display(Hashmap *a) {
for (j = 0; j < n; j++) {
unsigned cputlen, pathtlen;
format_timespan(buffer, sizeof(buffer), (usec_t) (array[j]->cpu_usage / NSEC_PER_USEC), 0);
maybe_format_timespan(buffer, sizeof(buffer), (usec_t) (array[j]->cpu_usage / NSEC_PER_USEC), 0);
cputlen = strlen(buffer);
maxtcpu = MAX(maxtcpu, cputlen);
@ -674,7 +683,7 @@ static void display(Hashmap *a) {
else
fputs(" -", stdout);
} else
printf(" %*s", maxtcpu, format_timespan(buffer, sizeof(buffer), (usec_t) (g->cpu_usage / NSEC_PER_USEC), 0));
printf(" %*s", maxtcpu, maybe_format_timespan(buffer, sizeof(buffer), (usec_t) (g->cpu_usage / NSEC_PER_USEC), 0));
printf(" %8s", maybe_format_bytes(buffer, sizeof(buffer), g->memory_valid, g->memory));
printf(" %8s", maybe_format_bytes(buffer, sizeof(buffer), g->io_valid, g->io_input_bps));