Lines Matching +full:cortex +full:- +full:a8

1 // SPDX-License-Identifier: GPL-2.0
3 * ARMv7 Cortex-A8 and Cortex-A9 Performance Events handling code.
11 * Cortex-A8 has up to 4 configurable performance counters and
13 * Cortex-A9 has up to 31 configurable performance counters and
53 * - all (taken) branch instructions,
54 * - instructions that explicitly write the PC,
55 * - exception generating instructions.
80 /* ARMv7 Cortex-A8 specific event types */
86 /* ARMv7 Cortex-A9 specific event types */
91 /* ARMv7 Cortex-A5 specific event types */
95 /* ARMv7 Cortex-A15 specific event types */
111 /* ARMv7 Cortex-A12 specific event types */
150 * Cortex-A8 HW events mapping
203 * Cortex-A9 HW events mapping
247 * Cortex-A5 HW events mapping
293 * Cortex-A15 HW events mapping
342 * Cortex-A7 HW events mapping
391 * Cortex-A12 HW events mapping
533 PMU_FORMAT_ATTR(event, "config:0-7");
659 * Per-CPU PMNC: config reg
666 #define ARMV7_PMNC_DP (1 << 5) /* Disable CCNT if non-invasive debug*/
693 #define ARMV7_SDER_SUNIDEN BIT(1) /* Permit non-invasive debug */
716 return test_bit(idx, cpu_pmu->cntr_mask); in armv7_pmnc_counter_valid()
732 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu); in armv7pmu_read_counter()
733 struct hw_perf_event *hwc = &event->hw; in armv7pmu_read_counter()
734 int idx = hwc->idx; in armv7pmu_read_counter()
752 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu); in armv7pmu_write_counter()
753 struct hw_perf_event *hwc = &event->hw; in armv7pmu_write_counter()
754 int idx = hwc->idx; in armv7pmu_write_counter()
838 for_each_set_bit(cnt, cpu_pmu->cntr_mask, ARMV7_IDX_COUNTER_MAX) { in armv7_pmnc_dump_regs()
850 struct hw_perf_event *hwc = &event->hw; in armv7pmu_enable_event()
851 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu); in armv7pmu_enable_event()
852 int idx = hwc->idx; in armv7pmu_enable_event()
875 if (cpu_pmu->set_event_filter || idx != ARMV7_IDX_CYCLE_COUNTER) in armv7pmu_enable_event()
876 armv7_pmnc_write_evtsel(idx, hwc->config_base); in armv7pmu_enable_event()
891 struct hw_perf_event *hwc = &event->hw; in armv7pmu_disable_event()
892 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu); in armv7pmu_disable_event()
893 int idx = hwc->idx; in armv7pmu_disable_event()
920 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in armv7pmu_handle_irq()
940 for_each_set_bit(idx, cpu_pmu->cntr_mask, ARMPMU_MAX_HWEVENTS) { in armv7pmu_handle_irq()
941 struct perf_event *event = cpuc->events[idx]; in armv7pmu_handle_irq()
955 hwc = &event->hw; in armv7pmu_handle_irq()
957 perf_sample_data_init(&data, 0, hwc->last_period); in armv7pmu_handle_irq()
962 cpu_pmu->disable(event); in armv7pmu_handle_irq()
993 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu); in armv7pmu_get_event_idx()
994 struct hw_perf_event *hwc = &event->hw; in armv7pmu_get_event_idx()
995 unsigned long evtype = hwc->config_base & ARMV7_EVTYPE_EVENT; in armv7pmu_get_event_idx()
999 if (test_and_set_bit(ARMV7_IDX_CYCLE_COUNTER, cpuc->used_mask)) in armv7pmu_get_event_idx()
1000 return -EAGAIN; in armv7pmu_get_event_idx()
1009 for_each_set_bit(idx, cpu_pmu->cntr_mask, ARMV7_IDX_COUNTER_MAX) { in armv7pmu_get_event_idx()
1010 if (!test_and_set_bit(idx, cpuc->used_mask)) in armv7pmu_get_event_idx()
1015 return -EAGAIN; in armv7pmu_get_event_idx()
1021 clear_bit(event->hw.idx, cpuc->used_mask); in armv7pmu_clear_event_idx()
1032 if (attr->exclude_idle) { in armv7pmu_set_event_filter()
1034 return -EOPNOTSUPP; in armv7pmu_set_event_filter()
1036 if (attr->exclude_user) in armv7pmu_set_event_filter()
1038 if (attr->exclude_kernel) in armv7pmu_set_event_filter()
1040 if (!attr->exclude_hv) in armv7pmu_set_event_filter()
1047 event->config_base = config_base; in armv7pmu_set_event_filter()
1057 if (cpu_pmu->secure_access) { in armv7pmu_reset()
1064 for_each_set_bit(idx, cpu_pmu->cntr_mask, ARMPMU_MAX_HWEVENTS) { in armv7pmu_reset()
1129 cpu_pmu->handle_irq = armv7pmu_handle_irq; in armv7pmu_init()
1130 cpu_pmu->enable = armv7pmu_enable_event; in armv7pmu_init()
1131 cpu_pmu->disable = armv7pmu_disable_event; in armv7pmu_init()
1132 cpu_pmu->read_counter = armv7pmu_read_counter; in armv7pmu_init()
1133 cpu_pmu->write_counter = armv7pmu_write_counter; in armv7pmu_init()
1134 cpu_pmu->get_event_idx = armv7pmu_get_event_idx; in armv7pmu_init()
1135 cpu_pmu->clear_event_idx = armv7pmu_clear_event_idx; in armv7pmu_init()
1136 cpu_pmu->start = armv7pmu_start; in armv7pmu_init()
1137 cpu_pmu->stop = armv7pmu_stop; in armv7pmu_init()
1138 cpu_pmu->reset = armv7pmu_reset; in armv7pmu_init()
1148 bitmap_set(cpu_pmu->cntr_mask, 0, nb_cnt); in armv7_read_num_pmnc_events()
1151 set_bit(ARMV7_IDX_CYCLE_COUNTER, cpu_pmu->cntr_mask); in armv7_read_num_pmnc_events()
1156 return smp_call_function_any(&arm_pmu->supported_cpus, in armv7_probe_num_events()
1164 cpu_pmu->name = "armv7_cortex_a8"; in armv7_a8_pmu_init()
1165 cpu_pmu->map_event = armv7_a8_map_event; in armv7_a8_pmu_init()
1166 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] = in armv7_a8_pmu_init()
1168 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] = in armv7_a8_pmu_init()
1176 cpu_pmu->name = "armv7_cortex_a9"; in armv7_a9_pmu_init()
1177 cpu_pmu->map_event = armv7_a9_map_event; in armv7_a9_pmu_init()
1178 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] = in armv7_a9_pmu_init()
1180 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] = in armv7_a9_pmu_init()
1188 cpu_pmu->name = "armv7_cortex_a5"; in armv7_a5_pmu_init()
1189 cpu_pmu->map_event = armv7_a5_map_event; in armv7_a5_pmu_init()
1190 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] = in armv7_a5_pmu_init()
1192 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] = in armv7_a5_pmu_init()
1200 cpu_pmu->name = "armv7_cortex_a15"; in armv7_a15_pmu_init()
1201 cpu_pmu->map_event = armv7_a15_map_event; in armv7_a15_pmu_init()
1202 cpu_pmu->set_event_filter = armv7pmu_set_event_filter; in armv7_a15_pmu_init()
1203 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] = in armv7_a15_pmu_init()
1205 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] = in armv7_a15_pmu_init()
1213 cpu_pmu->name = "armv7_cortex_a7"; in armv7_a7_pmu_init()
1214 cpu_pmu->map_event = armv7_a7_map_event; in armv7_a7_pmu_init()
1215 cpu_pmu->set_event_filter = armv7pmu_set_event_filter; in armv7_a7_pmu_init()
1216 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] = in armv7_a7_pmu_init()
1218 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] = in armv7_a7_pmu_init()
1226 cpu_pmu->name = "armv7_cortex_a12"; in armv7_a12_pmu_init()
1227 cpu_pmu->map_event = armv7_a12_map_event; in armv7_a12_pmu_init()
1228 cpu_pmu->set_event_filter = armv7pmu_set_event_filter; in armv7_a12_pmu_init()
1229 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] = in armv7_a12_pmu_init()
1231 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] = in armv7_a12_pmu_init()
1239 cpu_pmu->name = "armv7_cortex_a17"; in armv7_a17_pmu_init()
1240 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] = in armv7_a17_pmu_init()
1242 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] = in armv7_a17_pmu_init()
1251 * +--------------------------------+
1253 * +--------------------------------+
1255 * +--------------------------------+
1257 * +--------------------------------+
1259 * +--------------------------------+
1264 * hwc->config_base = 0xNRCCG
1396 /* Mix in mode-exclusion bits */ in krait_evt_setup()
1456 struct hw_perf_event *hwc = &event->hw; in krait_pmu_disable_event()
1457 int idx = hwc->idx; in krait_pmu_disable_event()
1467 if (hwc->config_base & KRAIT_EVENT_MASK) in krait_pmu_disable_event()
1468 krait_clearpmu(hwc->config_base); in krait_pmu_disable_event()
1476 struct hw_perf_event *hwc = &event->hw; in krait_pmu_enable_event()
1477 int idx = hwc->idx; in krait_pmu_enable_event()
1492 if (hwc->config_base & KRAIT_EVENT_MASK) in krait_pmu_enable_event()
1493 krait_evt_setup(idx, hwc->config_base); in krait_pmu_enable_event()
1495 armv7_pmnc_write_evtsel(idx, hwc->config_base); in krait_pmu_enable_event()
1522 for_each_set_bit(idx, cpu_pmu->cntr_mask, ARMV7_IDX_COUNTER_MAX) { in krait_pmu_reset()
1533 struct hw_perf_event *hwc = &event->hw; in krait_event_to_bit()
1534 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu); in krait_event_to_bit()
1536 if (hwc->config_base & VENUM_EVENT) in krait_event_to_bit()
1540 bit -= krait_get_pmresrn_event(0); in krait_event_to_bit()
1546 bit += bitmap_weight(cpu_pmu->cntr_mask, ARMV7_IDX_COUNTER_MAX); in krait_event_to_bit()
1559 int bit = -1; in krait_pmu_get_event_idx()
1560 struct hw_perf_event *hwc = &event->hw; in krait_pmu_get_event_idx()
1561 unsigned int region = EVENT_REGION(hwc->config_base); in krait_pmu_get_event_idx()
1562 unsigned int code = EVENT_CODE(hwc->config_base); in krait_pmu_get_event_idx()
1563 unsigned int group = EVENT_GROUP(hwc->config_base); in krait_pmu_get_event_idx()
1564 bool venum_event = EVENT_VENUM(hwc->config_base); in krait_pmu_get_event_idx()
1565 bool krait_event = EVENT_CPU(hwc->config_base); in krait_pmu_get_event_idx()
1570 return -EINVAL; in krait_pmu_get_event_idx()
1572 return -EINVAL; in krait_pmu_get_event_idx()
1575 if (test_and_set_bit(bit, cpuc->used_mask)) in krait_pmu_get_event_idx()
1576 return -EAGAIN; in krait_pmu_get_event_idx()
1581 clear_bit(bit, cpuc->used_mask); in krait_pmu_get_event_idx()
1590 struct hw_perf_event *hwc = &event->hw; in krait_pmu_clear_event_idx()
1591 unsigned int region = EVENT_REGION(hwc->config_base); in krait_pmu_clear_event_idx()
1592 unsigned int group = EVENT_GROUP(hwc->config_base); in krait_pmu_clear_event_idx()
1593 bool venum_event = EVENT_VENUM(hwc->config_base); in krait_pmu_clear_event_idx()
1594 bool krait_event = EVENT_CPU(hwc->config_base); in krait_pmu_clear_event_idx()
1599 clear_bit(bit, cpuc->used_mask); in krait_pmu_clear_event_idx()
1606 cpu_pmu->name = "armv7_krait"; in krait_pmu_init()
1608 if (of_property_read_bool(cpu_pmu->plat_device->dev.of_node, in krait_pmu_init()
1609 "qcom,no-pc-write")) in krait_pmu_init()
1610 cpu_pmu->map_event = krait_map_event_no_branch; in krait_pmu_init()
1612 cpu_pmu->map_event = krait_map_event; in krait_pmu_init()
1613 cpu_pmu->set_event_filter = armv7pmu_set_event_filter; in krait_pmu_init()
1614 cpu_pmu->reset = krait_pmu_reset; in krait_pmu_init()
1615 cpu_pmu->enable = krait_pmu_enable_event; in krait_pmu_init()
1616 cpu_pmu->disable = krait_pmu_disable_event; in krait_pmu_init()
1617 cpu_pmu->get_event_idx = krait_pmu_get_event_idx; in krait_pmu_init()
1618 cpu_pmu->clear_event_idx = krait_pmu_clear_event_idx; in krait_pmu_init()
1626 * +--------------------------------+
1628 * +--------------------------------+
1630 * +--------------------------------+
1632 * +--------------------------------+
1634 * +--------------------------------+
1636 * +--------------------------------+
1642 * hwc->config_base = 0xNRCCG
1731 /* Mix in mode-exclusion bits */ in scorpion_evt_setup()
1777 struct hw_perf_event *hwc = &event->hw; in scorpion_pmu_disable_event()
1778 int idx = hwc->idx; in scorpion_pmu_disable_event()
1788 if (hwc->config_base & KRAIT_EVENT_MASK) in scorpion_pmu_disable_event()
1789 scorpion_clearpmu(hwc->config_base); in scorpion_pmu_disable_event()
1797 struct hw_perf_event *hwc = &event->hw; in scorpion_pmu_enable_event()
1798 int idx = hwc->idx; in scorpion_pmu_enable_event()
1813 if (hwc->config_base & KRAIT_EVENT_MASK) in scorpion_pmu_enable_event()
1814 scorpion_evt_setup(idx, hwc->config_base); in scorpion_pmu_enable_event()
1816 armv7_pmnc_write_evtsel(idx, hwc->config_base); in scorpion_pmu_enable_event()
1844 for_each_set_bit(idx, cpu_pmu->cntr_mask, ARMV7_IDX_COUNTER_MAX) { in scorpion_pmu_reset()
1854 struct hw_perf_event *hwc = &event->hw; in scorpion_event_to_bit()
1855 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu); in scorpion_event_to_bit()
1857 if (hwc->config_base & VENUM_EVENT) in scorpion_event_to_bit()
1861 bit -= scorpion_get_pmresrn_event(0); in scorpion_event_to_bit()
1867 bit += bitmap_weight(cpu_pmu->cntr_mask, ARMV7_IDX_COUNTER_MAX); in scorpion_event_to_bit()
1880 int bit = -1; in scorpion_pmu_get_event_idx()
1881 struct hw_perf_event *hwc = &event->hw; in scorpion_pmu_get_event_idx()
1882 unsigned int region = EVENT_REGION(hwc->config_base); in scorpion_pmu_get_event_idx()
1883 unsigned int group = EVENT_GROUP(hwc->config_base); in scorpion_pmu_get_event_idx()
1884 bool venum_event = EVENT_VENUM(hwc->config_base); in scorpion_pmu_get_event_idx()
1885 bool scorpion_event = EVENT_CPU(hwc->config_base); in scorpion_pmu_get_event_idx()
1890 return -EINVAL; in scorpion_pmu_get_event_idx()
1893 if (test_and_set_bit(bit, cpuc->used_mask)) in scorpion_pmu_get_event_idx()
1894 return -EAGAIN; in scorpion_pmu_get_event_idx()
1899 clear_bit(bit, cpuc->used_mask); in scorpion_pmu_get_event_idx()
1908 struct hw_perf_event *hwc = &event->hw; in scorpion_pmu_clear_event_idx()
1909 unsigned int region = EVENT_REGION(hwc->config_base); in scorpion_pmu_clear_event_idx()
1910 unsigned int group = EVENT_GROUP(hwc->config_base); in scorpion_pmu_clear_event_idx()
1911 bool venum_event = EVENT_VENUM(hwc->config_base); in scorpion_pmu_clear_event_idx()
1912 bool scorpion_event = EVENT_CPU(hwc->config_base); in scorpion_pmu_clear_event_idx()
1917 clear_bit(bit, cpuc->used_mask); in scorpion_pmu_clear_event_idx()
1924 cpu_pmu->name = "armv7_scorpion"; in scorpion_pmu_init()
1925 cpu_pmu->map_event = scorpion_map_event; in scorpion_pmu_init()
1926 cpu_pmu->reset = scorpion_pmu_reset; in scorpion_pmu_init()
1927 cpu_pmu->enable = scorpion_pmu_enable_event; in scorpion_pmu_init()
1928 cpu_pmu->disable = scorpion_pmu_disable_event; in scorpion_pmu_init()
1929 cpu_pmu->get_event_idx = scorpion_pmu_get_event_idx; in scorpion_pmu_init()
1930 cpu_pmu->clear_event_idx = scorpion_pmu_clear_event_idx; in scorpion_pmu_init()
1937 cpu_pmu->name = "armv7_scorpion_mp"; in scorpion_mp_pmu_init()
1938 cpu_pmu->map_event = scorpion_map_event; in scorpion_mp_pmu_init()
1939 cpu_pmu->reset = scorpion_pmu_reset; in scorpion_mp_pmu_init()
1940 cpu_pmu->enable = scorpion_pmu_enable_event; in scorpion_mp_pmu_init()
1941 cpu_pmu->disable = scorpion_pmu_disable_event; in scorpion_mp_pmu_init()
1942 cpu_pmu->get_event_idx = scorpion_pmu_get_event_idx; in scorpion_mp_pmu_init()
1943 cpu_pmu->clear_event_idx = scorpion_pmu_clear_event_idx; in scorpion_mp_pmu_init()
1948 {.compatible = "arm,cortex-a17-pmu", .data = armv7_a17_pmu_init},
1949 {.compatible = "arm,cortex-a15-pmu", .data = armv7_a15_pmu_init},
1950 {.compatible = "arm,cortex-a12-pmu", .data = armv7_a12_pmu_init},
1951 {.compatible = "arm,cortex-a9-pmu", .data = armv7_a9_pmu_init},
1952 {.compatible = "arm,cortex-a8-pmu", .data = armv7_a8_pmu_init},
1953 {.compatible = "arm,cortex-a7-pmu", .data = armv7_a7_pmu_init},
1954 {.compatible = "arm,cortex-a5-pmu", .data = armv7_a5_pmu_init},
1955 {.compatible = "qcom,krait-pmu", .data = krait_pmu_init},
1956 {.compatible = "qcom,scorpion-pmu", .data = scorpion_pmu_init},
1957 {.compatible = "qcom,scorpion-mp-pmu", .data = scorpion_mp_pmu_init},
1968 .name = "armv7-pmu",