Lines Matching +full:sample +full:- +full:time

1 // SPDX-License-Identifier: GPL-2.0
4 * Copyright (c) 2017-2018, Arm Ltd.
28 #include "thread-stack.h"
31 #include "util/synthetic-events.h"
33 #include "arm-spe.h"
34 #include "arm-spe-decoder/arm-spe-decoder.h"
35 #include "arm-spe-decoder/arm-spe-pkt-decoder.h"
95 u64 time; member
136 len -= pkt_len; in arm_spe_dump()
150 struct auxtrace_buffer *buffer = speq->buffer; in arm_spe_get_trace()
151 struct auxtrace_buffer *old_buffer = speq->old_buffer; in arm_spe_get_trace()
154 queue = &speq->spe->queues.queue_array[speq->queue_nr]; in arm_spe_get_trace()
161 b->len = 0; in arm_spe_get_trace()
165 speq->buffer = buffer; in arm_spe_get_trace()
168 if (!buffer->data) { in arm_spe_get_trace()
170 int fd = perf_data__fd(speq->spe->session->data); in arm_spe_get_trace()
172 buffer->data = auxtrace_buffer__get_data(buffer, fd); in arm_spe_get_trace()
173 if (!buffer->data) in arm_spe_get_trace()
174 return -ENOMEM; in arm_spe_get_trace()
177 b->len = buffer->size; in arm_spe_get_trace()
178 b->buf = buffer->data; in arm_spe_get_trace()
180 if (b->len) { in arm_spe_get_trace()
183 speq->old_buffer = buffer; in arm_spe_get_trace()
202 speq->event_buf = malloc(PERF_SAMPLE_MAX_SIZE); in arm_spe__alloc_queue()
203 if (!speq->event_buf) in arm_spe__alloc_queue()
206 speq->spe = spe; in arm_spe__alloc_queue()
207 speq->queue_nr = queue_nr; in arm_spe__alloc_queue()
208 speq->pid = -1; in arm_spe__alloc_queue()
209 speq->tid = -1; in arm_spe__alloc_queue()
210 speq->cpu = -1; in arm_spe__alloc_queue()
211 speq->period_instructions = 0; in arm_spe__alloc_queue()
218 speq->decoder = arm_spe_decoder_new(&params); in arm_spe__alloc_queue()
219 if (!speq->decoder) in arm_spe__alloc_queue()
225 zfree(&speq->event_buf); in arm_spe__alloc_queue()
233 return ip >= spe->kernel_start ? in arm_spe_cpumode()
241 struct arm_spe_queue *speq = queue->priv; in arm_spe_set_pid_tid_cpu()
244 tid = machine__get_current_tid(spe->machine, speq->cpu); in arm_spe_set_pid_tid_cpu()
245 if (tid != -1) { in arm_spe_set_pid_tid_cpu()
246 speq->tid = tid; in arm_spe_set_pid_tid_cpu()
247 thread__zput(speq->thread); in arm_spe_set_pid_tid_cpu()
249 speq->tid = queue->tid; in arm_spe_set_pid_tid_cpu()
251 if ((!speq->thread) && (speq->tid != -1)) { in arm_spe_set_pid_tid_cpu()
252 speq->thread = machine__find_thread(spe->machine, -1, in arm_spe_set_pid_tid_cpu()
253 speq->tid); in arm_spe_set_pid_tid_cpu()
256 if (speq->thread) { in arm_spe_set_pid_tid_cpu()
257 speq->pid = thread__pid(speq->thread); in arm_spe_set_pid_tid_cpu()
258 if (queue->cpu == -1) in arm_spe_set_pid_tid_cpu()
259 speq->cpu = thread__cpu(speq->thread); in arm_spe_set_pid_tid_cpu()
265 struct arm_spe *spe = speq->spe; in arm_spe_set_tid()
266 int err = machine__set_current_tid(spe->machine, speq->cpu, -1, tid); in arm_spe_set_tid()
271 arm_spe_set_pid_tid_cpu(spe, &spe->queues.queue_array[speq->queue_nr]); in arm_spe_set_tid()
280 if ((record->op & ARM_SPE_OP_LDST) && (record->op & ARM_SPE_OP_SVE_LDST)) in arm_spe__synth_simd_flags()
283 if ((record->op & ARM_SPE_OP_OTHER) && (record->op & ARM_SPE_OP_SVE_OTHER)) in arm_spe__synth_simd_flags()
286 if (record->type & ARM_SPE_SVE_PARTIAL_PRED) in arm_spe__synth_simd_flags()
289 if (record->type & ARM_SPE_SVE_EMPTY_PRED) in arm_spe__synth_simd_flags()
298 struct perf_sample *sample) in arm_spe_prep_sample() argument
300 struct arm_spe_record *record = &speq->decoder->record; in arm_spe_prep_sample()
302 if (!spe->timeless_decoding) in arm_spe_prep_sample()
303 sample->time = tsc_to_perf_time(record->timestamp, &spe->tc); in arm_spe_prep_sample()
305 sample->ip = record->from_ip; in arm_spe_prep_sample()
306 sample->cpumode = arm_spe_cpumode(spe, sample->ip); in arm_spe_prep_sample()
307 sample->pid = speq->pid; in arm_spe_prep_sample()
308 sample->tid = speq->tid; in arm_spe_prep_sample()
309 sample->period = 1; in arm_spe_prep_sample()
310 sample->cpu = speq->cpu; in arm_spe_prep_sample()
311 sample->simd_flags = arm_spe__synth_simd_flags(record); in arm_spe_prep_sample()
313 event->sample.header.type = PERF_RECORD_SAMPLE; in arm_spe_prep_sample()
314 event->sample.header.misc = sample->cpumode; in arm_spe_prep_sample()
315 event->sample.header.size = sizeof(struct perf_event_header); in arm_spe_prep_sample()
318 static int arm_spe__inject_event(union perf_event *event, struct perf_sample *sample, u64 type) in arm_spe__inject_event() argument
320 event->header.size = perf_event__sample_event_size(sample, type, 0); in arm_spe__inject_event()
321 return perf_event__synthesize_sample(event, type, 0, sample); in arm_spe__inject_event()
328 struct perf_sample *sample) in arm_spe_deliver_synth_event() argument
332 if (spe->synth_opts.inject) { in arm_spe_deliver_synth_event()
333 ret = arm_spe__inject_event(event, sample, spe->sample_type); in arm_spe_deliver_synth_event()
338 ret = perf_session__deliver_synth_event(spe->session, event, sample); in arm_spe_deliver_synth_event()
348 struct arm_spe *spe = speq->spe; in arm_spe__synth_mem_sample()
349 struct arm_spe_record *record = &speq->decoder->record; in arm_spe__synth_mem_sample()
350 union perf_event *event = speq->event_buf; in arm_spe__synth_mem_sample()
351 struct perf_sample sample = { .ip = 0, }; in arm_spe__synth_mem_sample() local
353 arm_spe_prep_sample(spe, speq, event, &sample); in arm_spe__synth_mem_sample()
355 sample.id = spe_events_id; in arm_spe__synth_mem_sample()
356 sample.stream_id = spe_events_id; in arm_spe__synth_mem_sample()
357 sample.addr = record->virt_addr; in arm_spe__synth_mem_sample()
358 sample.phys_addr = record->phys_addr; in arm_spe__synth_mem_sample()
359 sample.data_src = data_src; in arm_spe__synth_mem_sample()
360 sample.weight = record->latency; in arm_spe__synth_mem_sample()
362 return arm_spe_deliver_synth_event(spe, speq, event, &sample); in arm_spe__synth_mem_sample()
368 struct arm_spe *spe = speq->spe; in arm_spe__synth_branch_sample()
369 struct arm_spe_record *record = &speq->decoder->record; in arm_spe__synth_branch_sample()
370 union perf_event *event = speq->event_buf; in arm_spe__synth_branch_sample()
371 struct perf_sample sample = { .ip = 0, }; in arm_spe__synth_branch_sample() local
373 arm_spe_prep_sample(spe, speq, event, &sample); in arm_spe__synth_branch_sample()
375 sample.id = spe_events_id; in arm_spe__synth_branch_sample()
376 sample.stream_id = spe_events_id; in arm_spe__synth_branch_sample()
377 sample.addr = record->to_ip; in arm_spe__synth_branch_sample()
378 sample.weight = record->latency; in arm_spe__synth_branch_sample()
380 return arm_spe_deliver_synth_event(spe, speq, event, &sample); in arm_spe__synth_branch_sample()
386 struct arm_spe *spe = speq->spe; in arm_spe__synth_instruction_sample()
387 struct arm_spe_record *record = &speq->decoder->record; in arm_spe__synth_instruction_sample()
388 union perf_event *event = speq->event_buf; in arm_spe__synth_instruction_sample()
389 struct perf_sample sample = { .ip = 0, }; in arm_spe__synth_instruction_sample() local
394 speq->period_instructions++; in arm_spe__synth_instruction_sample()
395 if (speq->period_instructions < spe->instructions_sample_period) in arm_spe__synth_instruction_sample()
397 speq->period_instructions = 0; in arm_spe__synth_instruction_sample()
399 arm_spe_prep_sample(spe, speq, event, &sample); in arm_spe__synth_instruction_sample()
401 sample.id = spe_events_id; in arm_spe__synth_instruction_sample()
402 sample.stream_id = spe_events_id; in arm_spe__synth_instruction_sample()
403 sample.addr = record->virt_addr; in arm_spe__synth_instruction_sample()
404 sample.phys_addr = record->phys_addr; in arm_spe__synth_instruction_sample()
405 sample.data_src = data_src; in arm_spe__synth_instruction_sample()
406 sample.period = spe->instructions_sample_period; in arm_spe__synth_instruction_sample()
407 sample.weight = record->latency; in arm_spe__synth_instruction_sample()
409 return arm_spe_deliver_synth_event(spe, speq, event, &sample); in arm_spe__synth_instruction_sample()
425 * so for the time being we assume three exist. If a production system in arm_spe__synth_data_source_neoverse()
434 if (record->op & ARM_SPE_OP_ST) { in arm_spe__synth_data_source_neoverse()
435 data_src->mem_lvl = PERF_MEM_LVL_NA; in arm_spe__synth_data_source_neoverse()
436 data_src->mem_lvl_num = PERF_MEM_LVLNUM_NA; in arm_spe__synth_data_source_neoverse()
437 data_src->mem_snoop = PERF_MEM_SNOOP_NA; in arm_spe__synth_data_source_neoverse()
441 switch (record->source) { in arm_spe__synth_data_source_neoverse()
443 data_src->mem_lvl = PERF_MEM_LVL_L1 | PERF_MEM_LVL_HIT; in arm_spe__synth_data_source_neoverse()
444 data_src->mem_lvl_num = PERF_MEM_LVLNUM_L1; in arm_spe__synth_data_source_neoverse()
445 data_src->mem_snoop = PERF_MEM_SNOOP_NONE; in arm_spe__synth_data_source_neoverse()
448 data_src->mem_lvl = PERF_MEM_LVL_L2 | PERF_MEM_LVL_HIT; in arm_spe__synth_data_source_neoverse()
449 data_src->mem_lvl_num = PERF_MEM_LVLNUM_L2; in arm_spe__synth_data_source_neoverse()
450 data_src->mem_snoop = PERF_MEM_SNOOP_NONE; in arm_spe__synth_data_source_neoverse()
453 data_src->mem_lvl = PERF_MEM_LVL_L2 | PERF_MEM_LVL_HIT; in arm_spe__synth_data_source_neoverse()
454 data_src->mem_lvl_num = PERF_MEM_LVLNUM_L2; in arm_spe__synth_data_source_neoverse()
455 data_src->mem_snoopx = PERF_MEM_SNOOPX_PEER; in arm_spe__synth_data_source_neoverse()
458 * We don't know if this is L1, L2 but we do know it was a cache-2-cache in arm_spe__synth_data_source_neoverse()
463 data_src->mem_lvl = PERF_MEM_LVL_L3 | PERF_MEM_LVL_HIT; in arm_spe__synth_data_source_neoverse()
464 data_src->mem_lvl_num = PERF_MEM_LVLNUM_L3; in arm_spe__synth_data_source_neoverse()
465 data_src->mem_snoopx = PERF_MEM_SNOOPX_PEER; in arm_spe__synth_data_source_neoverse()
471 data_src->mem_lvl = PERF_MEM_LVL_L3 | PERF_MEM_LVL_HIT; in arm_spe__synth_data_source_neoverse()
472 data_src->mem_lvl_num = PERF_MEM_LVLNUM_L3; in arm_spe__synth_data_source_neoverse()
473 data_src->mem_snoop = PERF_MEM_SNOOP_HIT; in arm_spe__synth_data_source_neoverse()
480 data_src->mem_lvl = PERF_MEM_LVL_REM_CCE1; in arm_spe__synth_data_source_neoverse()
481 data_src->mem_lvl_num = PERF_MEM_LVLNUM_ANY_CACHE; in arm_spe__synth_data_source_neoverse()
482 data_src->mem_remote = PERF_MEM_REMOTE_REMOTE; in arm_spe__synth_data_source_neoverse()
483 data_src->mem_snoopx = PERF_MEM_SNOOPX_PEER; in arm_spe__synth_data_source_neoverse()
486 data_src->mem_lvl = PERF_MEM_LVL_LOC_RAM | PERF_MEM_LVL_HIT; in arm_spe__synth_data_source_neoverse()
487 data_src->mem_lvl_num = PERF_MEM_LVLNUM_RAM; in arm_spe__synth_data_source_neoverse()
488 data_src->mem_snoop = PERF_MEM_SNOOP_NONE; in arm_spe__synth_data_source_neoverse()
498 if (record->type & (ARM_SPE_LLC_ACCESS | ARM_SPE_LLC_MISS)) { in arm_spe__synth_data_source_generic()
499 data_src->mem_lvl = PERF_MEM_LVL_L3; in arm_spe__synth_data_source_generic()
501 if (record->type & ARM_SPE_LLC_MISS) in arm_spe__synth_data_source_generic()
502 data_src->mem_lvl |= PERF_MEM_LVL_MISS; in arm_spe__synth_data_source_generic()
504 data_src->mem_lvl |= PERF_MEM_LVL_HIT; in arm_spe__synth_data_source_generic()
505 } else if (record->type & (ARM_SPE_L1D_ACCESS | ARM_SPE_L1D_MISS)) { in arm_spe__synth_data_source_generic()
506 data_src->mem_lvl = PERF_MEM_LVL_L1; in arm_spe__synth_data_source_generic()
508 if (record->type & ARM_SPE_L1D_MISS) in arm_spe__synth_data_source_generic()
509 data_src->mem_lvl |= PERF_MEM_LVL_MISS; in arm_spe__synth_data_source_generic()
511 data_src->mem_lvl |= PERF_MEM_LVL_HIT; in arm_spe__synth_data_source_generic()
514 if (record->type & ARM_SPE_REMOTE_ACCESS) in arm_spe__synth_data_source_generic()
515 data_src->mem_lvl |= PERF_MEM_LVL_REM_CCE1; in arm_spe__synth_data_source_generic()
523 if (record->op & ARM_SPE_OP_LD) in arm_spe__synth_data_source()
525 else if (record->op & ARM_SPE_OP_ST) in arm_spe__synth_data_source()
535 if (record->type & (ARM_SPE_TLB_ACCESS | ARM_SPE_TLB_MISS)) { in arm_spe__synth_data_source()
538 if (record->type & ARM_SPE_TLB_MISS) in arm_spe__synth_data_source()
549 const struct arm_spe_record *record = &speq->decoder->record; in arm_spe_sample()
550 struct arm_spe *spe = speq->spe; in arm_spe_sample()
554 data_src = arm_spe__synth_data_source(record, spe->midr); in arm_spe_sample()
556 if (spe->sample_flc) { in arm_spe_sample()
557 if (record->type & ARM_SPE_L1D_MISS) { in arm_spe_sample()
558 err = arm_spe__synth_mem_sample(speq, spe->l1d_miss_id, in arm_spe_sample()
564 if (record->type & ARM_SPE_L1D_ACCESS) { in arm_spe_sample()
565 err = arm_spe__synth_mem_sample(speq, spe->l1d_access_id, in arm_spe_sample()
572 if (spe->sample_llc) { in arm_spe_sample()
573 if (record->type & ARM_SPE_LLC_MISS) { in arm_spe_sample()
574 err = arm_spe__synth_mem_sample(speq, spe->llc_miss_id, in arm_spe_sample()
580 if (record->type & ARM_SPE_LLC_ACCESS) { in arm_spe_sample()
581 err = arm_spe__synth_mem_sample(speq, spe->llc_access_id, in arm_spe_sample()
588 if (spe->sample_tlb) { in arm_spe_sample()
589 if (record->type & ARM_SPE_TLB_MISS) { in arm_spe_sample()
590 err = arm_spe__synth_mem_sample(speq, spe->tlb_miss_id, in arm_spe_sample()
596 if (record->type & ARM_SPE_TLB_ACCESS) { in arm_spe_sample()
597 err = arm_spe__synth_mem_sample(speq, spe->tlb_access_id, in arm_spe_sample()
604 if (spe->sample_branch && (record->type & ARM_SPE_BRANCH_MISS)) { in arm_spe_sample()
605 err = arm_spe__synth_branch_sample(speq, spe->branch_miss_id); in arm_spe_sample()
610 if (spe->sample_remote_access && in arm_spe_sample()
611 (record->type & ARM_SPE_REMOTE_ACCESS)) { in arm_spe_sample()
612 err = arm_spe__synth_mem_sample(speq, spe->remote_access_id, in arm_spe_sample()
620 * skip to synthesize memory sample for this case. in arm_spe_sample()
622 if (spe->sample_memory && data_src) { in arm_spe_sample()
623 err = arm_spe__synth_mem_sample(speq, spe->memory_id, data_src); in arm_spe_sample()
628 if (spe->sample_instructions) { in arm_spe_sample()
629 err = arm_spe__synth_instruction_sample(speq, spe->instructions_id, data_src); in arm_spe_sample()
639 struct arm_spe *spe = speq->spe; in arm_spe_run_decoder()
643 if (!spe->kernel_start) in arm_spe_run_decoder()
644 spe->kernel_start = machine__kernel_start(spe->machine); in arm_spe_run_decoder()
649 * based the record to synthesize sample; but here the flow is in arm_spe_run_decoder()
656 * is left to generate sample until run to here, so it's correct in arm_spe_run_decoder()
657 * to synthesize sample for the left record. in arm_spe_run_decoder()
662 * synthesize sample until run to here at the next time; so this in arm_spe_run_decoder()
664 * perf events with correct time ordering. in arm_spe_run_decoder()
670 record = &speq->decoder->record; in arm_spe_run_decoder()
671 if (!spe->timeless_decoding && record->context_id != (u64)-1) { in arm_spe_run_decoder()
672 ret = arm_spe_set_tid(speq, record->context_id); in arm_spe_run_decoder()
676 spe->use_ctx_pkt_for_pid = true; in arm_spe_run_decoder()
683 ret = arm_spe_decode(speq->decoder); in arm_spe_run_decoder()
696 record = &speq->decoder->record; in arm_spe_run_decoder()
699 if (record->timestamp > speq->timestamp) in arm_spe_run_decoder()
700 speq->timestamp = record->timestamp; in arm_spe_run_decoder()
707 if (!spe->timeless_decoding && speq->timestamp >= *timestamp) { in arm_spe_run_decoder()
708 *timestamp = speq->timestamp; in arm_spe_run_decoder()
720 struct arm_spe_queue *speq = queue->priv; in arm_spe__setup_queue()
723 if (list_empty(&queue->head) || speq) in arm_spe__setup_queue()
729 return -ENOMEM; in arm_spe__setup_queue()
731 queue->priv = speq; in arm_spe__setup_queue()
733 if (queue->cpu != -1) in arm_spe__setup_queue()
734 speq->cpu = queue->cpu; in arm_spe__setup_queue()
736 if (!speq->on_heap) { in arm_spe__setup_queue()
739 if (spe->timeless_decoding) in arm_spe__setup_queue()
743 ret = arm_spe_decode(speq->decoder); in arm_spe__setup_queue()
751 record = &speq->decoder->record; in arm_spe__setup_queue()
753 speq->timestamp = record->timestamp; in arm_spe__setup_queue()
754 ret = auxtrace_heap__add(&spe->heap, queue_nr, speq->timestamp); in arm_spe__setup_queue()
757 speq->on_heap = true; in arm_spe__setup_queue()
768 for (i = 0; i < spe->queues.nr_queues; i++) { in arm_spe__setup_queues()
769 ret = arm_spe__setup_queue(spe, &spe->queues.queue_array[i], i); in arm_spe__setup_queues()
779 if (spe->queues.new_data) { in arm_spe__update_queues()
780 spe->queues.new_data = false; in arm_spe__update_queues()
790 struct evlist *evlist = spe->session->evlist; in arm_spe__is_timeless_decoding()
795 * with the time bit set. in arm_spe__is_timeless_decoding()
798 if ((evsel->core.attr.sample_type & PERF_SAMPLE_TIME)) in arm_spe__is_timeless_decoding()
815 if (!spe->heap.heap_cnt) in arm_spe_process_queues()
818 if (spe->heap.heap_array[0].ordinal >= timestamp) in arm_spe_process_queues()
821 queue_nr = spe->heap.heap_array[0].queue_nr; in arm_spe_process_queues()
822 queue = &spe->queues.queue_array[queue_nr]; in arm_spe_process_queues()
823 speq = queue->priv; in arm_spe_process_queues()
825 auxtrace_heap__pop(&spe->heap); in arm_spe_process_queues()
827 if (spe->heap.heap_cnt) { in arm_spe_process_queues()
828 ts = spe->heap.heap_array[0].ordinal + 1; in arm_spe_process_queues()
836 * A previous context-switch event has set pid/tid in the machine's context, so in arm_spe_process_queues()
839 if (!spe->use_ctx_pkt_for_pid) in arm_spe_process_queues()
844 auxtrace_heap__add(&spe->heap, queue_nr, ts); in arm_spe_process_queues()
849 ret = auxtrace_heap__add(&spe->heap, queue_nr, ts); in arm_spe_process_queues()
853 speq->on_heap = false; in arm_spe_process_queues()
863 struct auxtrace_queues *queues = &spe->queues; in arm_spe_process_timeless_queues()
867 for (i = 0; i < queues->nr_queues; i++) { in arm_spe_process_timeless_queues()
868 struct auxtrace_queue *queue = &spe->queues.queue_array[i]; in arm_spe_process_timeless_queues()
869 struct arm_spe_queue *speq = queue->priv; in arm_spe_process_timeless_queues()
871 if (speq && (tid == -1 || speq->tid == tid)) { in arm_spe_process_timeless_queues()
872 speq->time = time_; in arm_spe_process_timeless_queues()
881 struct perf_sample *sample) in arm_spe_context_switch() argument
886 if (!(event->header.misc & PERF_RECORD_MISC_SWITCH_OUT)) in arm_spe_context_switch()
889 pid = event->context_switch.next_prev_pid; in arm_spe_context_switch()
890 tid = event->context_switch.next_prev_tid; in arm_spe_context_switch()
891 cpu = sample->cpu; in arm_spe_context_switch()
893 if (tid == -1) in arm_spe_context_switch()
896 return machine__set_current_tid(spe->machine, cpu, pid, tid); in arm_spe_context_switch()
901 struct perf_sample *sample, in arm_spe_process_event() argument
906 struct arm_spe *spe = container_of(session->auxtrace, in arm_spe_process_event()
912 if (!tool->ordered_events) { in arm_spe_process_event()
914 return -EINVAL; in arm_spe_process_event()
917 if (sample->time && (sample->time != (u64) -1)) in arm_spe_process_event()
918 timestamp = perf_time_to_tsc(sample->time, &spe->tc); in arm_spe_process_event()
922 if (timestamp || spe->timeless_decoding) { in arm_spe_process_event()
928 if (spe->timeless_decoding) { in arm_spe_process_event()
929 if (event->header.type == PERF_RECORD_EXIT) { in arm_spe_process_event()
931 event->fork.tid, in arm_spe_process_event()
932 sample->time); in arm_spe_process_event()
939 if (!spe->use_ctx_pkt_for_pid && in arm_spe_process_event()
940 (event->header.type == PERF_RECORD_SWITCH_CPU_WIDE || in arm_spe_process_event()
941 event->header.type == PERF_RECORD_SWITCH)) in arm_spe_process_event()
942 err = arm_spe_context_switch(spe, event, sample); in arm_spe_process_event()
952 struct arm_spe *spe = container_of(session->auxtrace, struct arm_spe, in arm_spe_process_auxtrace_event()
955 if (!spe->data_queued) { in arm_spe_process_auxtrace_event()
958 int fd = perf_data__fd(session->data); in arm_spe_process_auxtrace_event()
961 if (perf_data__is_pipe(session->data)) { in arm_spe_process_auxtrace_event()
965 if (data_offset == -1) in arm_spe_process_auxtrace_event()
966 return -errno; in arm_spe_process_auxtrace_event()
969 err = auxtrace_queues__add_event(&spe->queues, session, event, in arm_spe_process_auxtrace_event()
977 arm_spe_dump_event(spe, buffer->data, in arm_spe_process_auxtrace_event()
978 buffer->size); in arm_spe_process_auxtrace_event()
990 struct arm_spe *spe = container_of(session->auxtrace, struct arm_spe, in arm_spe_flush()
997 if (!tool->ordered_events) in arm_spe_flush()
998 return -EINVAL; in arm_spe_flush()
1004 if (spe->timeless_decoding) in arm_spe_flush()
1005 return arm_spe_process_timeless_queues(spe, -1, in arm_spe_flush()
1006 MAX_TIMESTAMP - 1); in arm_spe_flush()
1012 if (!spe->use_ctx_pkt_for_pid) in arm_spe_flush()
1025 thread__zput(speq->thread); in arm_spe_free_queue()
1026 arm_spe_decoder_free(speq->decoder); in arm_spe_free_queue()
1027 zfree(&speq->event_buf); in arm_spe_free_queue()
1033 struct arm_spe *spe = container_of(session->auxtrace, struct arm_spe, in arm_spe_free_events()
1035 struct auxtrace_queues *queues = &spe->queues; in arm_spe_free_events()
1038 for (i = 0; i < queues->nr_queues; i++) { in arm_spe_free_events()
1039 arm_spe_free_queue(queues->queue_array[i].priv); in arm_spe_free_events()
1040 queues->queue_array[i].priv = NULL; in arm_spe_free_events()
1047 struct arm_spe *spe = container_of(session->auxtrace, struct arm_spe, in arm_spe_free()
1050 auxtrace_heap__free(&spe->heap); in arm_spe_free()
1052 session->auxtrace = NULL; in arm_spe_free()
1059 struct arm_spe *spe = container_of(session->auxtrace, struct arm_spe, auxtrace); in arm_spe_evsel_is_auxtrace()
1061 return evsel->core.attr.type == spe->pmu_type; in arm_spe_evsel_is_auxtrace()
1082 if (evsel->core.id && evsel->core.id[0] == id) { in arm_spe_set_event_name()
1083 if (evsel->name) in arm_spe_set_event_name()
1084 zfree(&evsel->name); in arm_spe_set_event_name()
1085 evsel->name = strdup(name); in arm_spe_set_event_name()
1094 struct evlist *evlist = session->evlist; in arm_spe_synth_events()
1102 if (evsel->core.attr.type == spe->pmu_type) { in arm_spe_synth_events()
1116 attr.sample_type = evsel->core.attr.sample_type & in arm_spe_synth_events()
1121 if (spe->timeless_decoding) in arm_spe_synth_events()
1126 spe->sample_type = attr.sample_type; in arm_spe_synth_events()
1128 attr.exclude_user = evsel->core.attr.exclude_user; in arm_spe_synth_events()
1129 attr.exclude_kernel = evsel->core.attr.exclude_kernel; in arm_spe_synth_events()
1130 attr.exclude_hv = evsel->core.attr.exclude_hv; in arm_spe_synth_events()
1131 attr.exclude_host = evsel->core.attr.exclude_host; in arm_spe_synth_events()
1132 attr.exclude_guest = evsel->core.attr.exclude_guest; in arm_spe_synth_events()
1133 attr.sample_id_all = evsel->core.attr.sample_id_all; in arm_spe_synth_events()
1134 attr.read_format = evsel->core.attr.read_format; in arm_spe_synth_events()
1137 id = evsel->core.id[0] + 1000000000; in arm_spe_synth_events()
1142 if (spe->synth_opts.flc) { in arm_spe_synth_events()
1143 spe->sample_flc = true; in arm_spe_synth_events()
1149 spe->l1d_miss_id = id; in arm_spe_synth_events()
1150 arm_spe_set_event_name(evlist, id, "l1d-miss"); in arm_spe_synth_events()
1157 spe->l1d_access_id = id; in arm_spe_synth_events()
1158 arm_spe_set_event_name(evlist, id, "l1d-access"); in arm_spe_synth_events()
1162 if (spe->synth_opts.llc) { in arm_spe_synth_events()
1163 spe->sample_llc = true; in arm_spe_synth_events()
1169 spe->llc_miss_id = id; in arm_spe_synth_events()
1170 arm_spe_set_event_name(evlist, id, "llc-miss"); in arm_spe_synth_events()
1177 spe->llc_access_id = id; in arm_spe_synth_events()
1178 arm_spe_set_event_name(evlist, id, "llc-access"); in arm_spe_synth_events()
1182 if (spe->synth_opts.tlb) { in arm_spe_synth_events()
1183 spe->sample_tlb = true; in arm_spe_synth_events()
1189 spe->tlb_miss_id = id; in arm_spe_synth_events()
1190 arm_spe_set_event_name(evlist, id, "tlb-miss"); in arm_spe_synth_events()
1197 spe->tlb_access_id = id; in arm_spe_synth_events()
1198 arm_spe_set_event_name(evlist, id, "tlb-access"); in arm_spe_synth_events()
1202 if (spe->synth_opts.branches) { in arm_spe_synth_events()
1203 spe->sample_branch = true; in arm_spe_synth_events()
1209 spe->branch_miss_id = id; in arm_spe_synth_events()
1210 arm_spe_set_event_name(evlist, id, "branch-miss"); in arm_spe_synth_events()
1214 if (spe->synth_opts.remote_access) { in arm_spe_synth_events()
1215 spe->sample_remote_access = true; in arm_spe_synth_events()
1221 spe->remote_access_id = id; in arm_spe_synth_events()
1222 arm_spe_set_event_name(evlist, id, "remote-access"); in arm_spe_synth_events()
1226 if (spe->synth_opts.mem) { in arm_spe_synth_events()
1227 spe->sample_memory = true; in arm_spe_synth_events()
1232 spe->memory_id = id; in arm_spe_synth_events()
1237 if (spe->synth_opts.instructions) { in arm_spe_synth_events()
1238 if (spe->synth_opts.period_type != PERF_ITRACE_PERIOD_INSTRUCTIONS) { in arm_spe_synth_events()
1239 pr_warning("Only instruction-based sampling period is currently supported by Arm SPE.\n"); in arm_spe_synth_events()
1242 if (spe->synth_opts.period > 1) in arm_spe_synth_events()
1243 pr_warning("Arm SPE has a hardware-based sample period.\n" in arm_spe_synth_events()
1244 "Additional instruction events will be discarded by --itrace\n"); in arm_spe_synth_events()
1246 spe->sample_instructions = true; in arm_spe_synth_events()
1248 attr.sample_period = spe->synth_opts.period; in arm_spe_synth_events()
1249 spe->instructions_sample_period = attr.sample_period; in arm_spe_synth_events()
1253 spe->instructions_id = id; in arm_spe_synth_events()
1264 struct perf_record_auxtrace_info *auxtrace_info = &event->auxtrace_info; in arm_spe_process_auxtrace_info()
1266 struct perf_record_time_conv *tc = &session->time_conv; in arm_spe_process_auxtrace_info()
1267 const char *cpuid = perf_env__cpuid(session->evlist->env); in arm_spe_process_auxtrace_info()
1272 if (auxtrace_info->header.size < sizeof(struct perf_record_auxtrace_info) + in arm_spe_process_auxtrace_info()
1274 return -EINVAL; in arm_spe_process_auxtrace_info()
1278 return -ENOMEM; in arm_spe_process_auxtrace_info()
1280 err = auxtrace_queues__init(&spe->queues); in arm_spe_process_auxtrace_info()
1284 spe->session = session; in arm_spe_process_auxtrace_info()
1285 spe->machine = &session->machines.host; /* No kvm support */ in arm_spe_process_auxtrace_info()
1286 spe->auxtrace_type = auxtrace_info->type; in arm_spe_process_auxtrace_info()
1287 spe->pmu_type = auxtrace_info->priv[ARM_SPE_PMU_TYPE]; in arm_spe_process_auxtrace_info()
1288 spe->midr = midr; in arm_spe_process_auxtrace_info()
1290 spe->timeless_decoding = arm_spe__is_timeless_decoding(spe); in arm_spe_process_auxtrace_info()
1296 * in "spe->tc", which is used for later conversion between clock in arm_spe_process_auxtrace_info()
1302 spe->tc.time_shift = tc->time_shift; in arm_spe_process_auxtrace_info()
1303 spe->tc.time_mult = tc->time_mult; in arm_spe_process_auxtrace_info()
1304 spe->tc.time_zero = tc->time_zero; in arm_spe_process_auxtrace_info()
1307 spe->tc.time_cycles = tc->time_cycles; in arm_spe_process_auxtrace_info()
1308 spe->tc.time_mask = tc->time_mask; in arm_spe_process_auxtrace_info()
1309 spe->tc.cap_user_time_zero = tc->cap_user_time_zero; in arm_spe_process_auxtrace_info()
1310 spe->tc.cap_user_time_short = tc->cap_user_time_short; in arm_spe_process_auxtrace_info()
1313 spe->auxtrace.process_event = arm_spe_process_event; in arm_spe_process_auxtrace_info()
1314 spe->auxtrace.process_auxtrace_event = arm_spe_process_auxtrace_event; in arm_spe_process_auxtrace_info()
1315 spe->auxtrace.flush_events = arm_spe_flush; in arm_spe_process_auxtrace_info()
1316 spe->auxtrace.free_events = arm_spe_free_events; in arm_spe_process_auxtrace_info()
1317 spe->auxtrace.free = arm_spe_free; in arm_spe_process_auxtrace_info()
1318 spe->auxtrace.evsel_is_auxtrace = arm_spe_evsel_is_auxtrace; in arm_spe_process_auxtrace_info()
1319 session->auxtrace = &spe->auxtrace; in arm_spe_process_auxtrace_info()
1321 arm_spe_print_info(&auxtrace_info->priv[0]); in arm_spe_process_auxtrace_info()
1326 if (session->itrace_synth_opts && session->itrace_synth_opts->set) in arm_spe_process_auxtrace_info()
1327 spe->synth_opts = *session->itrace_synth_opts; in arm_spe_process_auxtrace_info()
1329 itrace_synth_opts__set_default(&spe->synth_opts, false); in arm_spe_process_auxtrace_info()
1335 err = auxtrace_queues__process_index(&spe->queues, session); in arm_spe_process_auxtrace_info()
1339 if (spe->queues.populated) in arm_spe_process_auxtrace_info()
1340 spe->data_queued = true; in arm_spe_process_auxtrace_info()
1345 auxtrace_queues__free(&spe->queues); in arm_spe_process_auxtrace_info()
1346 session->auxtrace = NULL; in arm_spe_process_auxtrace_info()