summaryrefslogtreecommitdiffstats
path: root/kernel/trace/trace_branch.c
diff options
context:
space:
mode:
authorSteven Rostedt (VMware) <rostedt@goodmis.org>2017-01-19 14:57:41 +0100
committerSteven Rostedt (VMware) <rostedt@goodmis.org>2017-01-19 14:57:41 +0100
commit068f530b3f274d313395663bf8d674798d4858c6 (patch)
treeab243386f95697f056b6473ca820c0e616a0e6f5 /kernel/trace/trace_branch.c
parenttracing: Show number of constants profiled in likely profiler (diff)
downloadlinux-068f530b3f274d313395663bf8d674798d4858c6.tar.xz
linux-068f530b3f274d313395663bf8d674798d4858c6.zip
tracing: Add the constant count for branch tracer
The unlikely/likely branch profiler now gets called even if the if statement is a constant (always goes in one direction without a compare). Add a value to denote this in the likely/unlikely tracer as well. Signed-off-by: Steven Rostedt (VMware) <rostedt@goodmis.org>
Diffstat (limited to 'kernel/trace/trace_branch.c')
-rw-r--r--kernel/trace/trace_branch.c17
1 files changed, 9 insertions, 8 deletions
diff --git a/kernel/trace/trace_branch.c b/kernel/trace/trace_branch.c
index fd483d74f8e1..4d8fdf3184dc 100644
--- a/kernel/trace/trace_branch.c
+++ b/kernel/trace/trace_branch.c
@@ -27,7 +27,7 @@ static DEFINE_MUTEX(branch_tracing_mutex);
static struct trace_array *branch_tracer;
static void
-probe_likely_condition(struct ftrace_branch_data *f, int val, int expect)
+probe_likely_condition(struct ftrace_likely_data *f, int val, int expect)
{
struct trace_event_call *call = &event_branch;
struct trace_array *tr = branch_tracer;
@@ -68,16 +68,17 @@ probe_likely_condition(struct ftrace_branch_data *f, int val, int expect)
entry = ring_buffer_event_data(event);
/* Strip off the path, only save the file */
- p = f->file + strlen(f->file);
- while (p >= f->file && *p != '/')
+ p = f->data.file + strlen(f->data.file);
+ while (p >= f->data.file && *p != '/')
p--;
p++;
- strncpy(entry->func, f->func, TRACE_FUNC_SIZE);
+ strncpy(entry->func, f->data.func, TRACE_FUNC_SIZE);
strncpy(entry->file, p, TRACE_FILE_SIZE);
entry->func[TRACE_FUNC_SIZE] = 0;
entry->file[TRACE_FILE_SIZE] = 0;
- entry->line = f->line;
+ entry->constant = f->constant;
+ entry->line = f->data.line;
entry->correct = val == expect;
if (!call_filter_check_discard(call, entry, buffer, event))
@@ -89,7 +90,7 @@ probe_likely_condition(struct ftrace_branch_data *f, int val, int expect)
}
static inline
-void trace_likely_condition(struct ftrace_branch_data *f, int val, int expect)
+void trace_likely_condition(struct ftrace_likely_data *f, int val, int expect)
{
if (!branch_tracing_enabled)
return;
@@ -195,7 +196,7 @@ core_initcall(init_branch_tracer);
#else
static inline
-void trace_likely_condition(struct ftrace_branch_data *f, int val, int expect)
+void trace_likely_condition(struct ftrace_likely_data *f, int val, int expect)
{
}
#endif /* CONFIG_BRANCH_TRACER */
@@ -214,7 +215,7 @@ void ftrace_likely_update(struct ftrace_likely_data *f, int val,
* conditions that the recursive nightmare that exists is too
* much to try to get working. At least for now.
*/
- trace_likely_condition(&f->data, val, expect);
+ trace_likely_condition(f, val, expect);
/* FIXME: Make this atomic! */
if (val == expect)