Lines Matching +full:scan +full:- +full:interval
1 // SPDX-License-Identifier: GPL-2.0
3 * BlueZ - Bluetooth protocol stack for Linux
28 if (hdev->req_status != HCI_REQ_PEND) in hci_cmd_sync_complete()
31 hdev->req_result = result; in hci_cmd_sync_complete()
32 hdev->req_status = HCI_REQ_DONE; in hci_cmd_sync_complete()
35 kfree_skb(hdev->req_skb); in hci_cmd_sync_complete()
36 hdev->req_skb = NULL; in hci_cmd_sync_complete()
45 hdev->req_rsp = skb_get(skb); in hci_cmd_sync_complete()
48 wake_up_interruptible(&hdev->req_wait_q); in hci_cmd_sync_complete()
63 hdr->opcode = cpu_to_le16(opcode); in hci_cmd_sync_alloc()
64 hdr->plen = plen; in hci_cmd_sync_alloc()
69 bt_dev_dbg(hdev, "skb len %d", skb->len); in hci_cmd_sync_alloc()
88 struct hci_dev *hdev = req->hdev; in hci_cmd_sync_add()
96 if (req->err) in hci_cmd_sync_add()
103 req->err = -ENOMEM; in hci_cmd_sync_add()
107 if (skb_queue_empty(&req->cmd_q)) in hci_cmd_sync_add()
108 bt_cb(skb)->hci.req_flags |= HCI_REQ_START; in hci_cmd_sync_add()
112 skb_queue_tail(&req->cmd_q, skb); in hci_cmd_sync_add()
117 struct hci_dev *hdev = req->hdev; in hci_req_sync_run()
121 bt_dev_dbg(hdev, "length %u", skb_queue_len(&req->cmd_q)); in hci_req_sync_run()
126 if (req->err) { in hci_req_sync_run()
127 skb_queue_purge(&req->cmd_q); in hci_req_sync_run()
128 return req->err; in hci_req_sync_run()
132 if (skb_queue_empty(&req->cmd_q)) in hci_req_sync_run()
133 return -ENODATA; in hci_req_sync_run()
135 skb = skb_peek_tail(&req->cmd_q); in hci_req_sync_run()
136 bt_cb(skb)->hci.req_complete_skb = hci_cmd_sync_complete; in hci_req_sync_run()
137 bt_cb(skb)->hci.req_flags |= HCI_REQ_SKB; in hci_req_sync_run()
139 spin_lock_irqsave(&hdev->cmd_q.lock, flags); in hci_req_sync_run()
140 skb_queue_splice_tail(&req->cmd_q, &hdev->cmd_q); in hci_req_sync_run()
141 spin_unlock_irqrestore(&hdev->cmd_q.lock, flags); in hci_req_sync_run()
143 queue_work(hdev->workqueue, &hdev->cmd_work); in hci_req_sync_run()
150 skb_queue_head_init(&req->cmd_q); in hci_request_init()
151 req->hdev = hdev; in hci_request_init()
152 req->err = 0; in hci_request_init()
155 /* This function requires the caller holds hdev->req_lock. */
170 hdev->req_status = HCI_REQ_PEND; in __hci_cmd_sync_sk()
176 err = wait_event_interruptible_timeout(hdev->req_wait_q, in __hci_cmd_sync_sk()
177 hdev->req_status != HCI_REQ_PEND, in __hci_cmd_sync_sk()
180 if (err == -ERESTARTSYS) in __hci_cmd_sync_sk()
181 return ERR_PTR(-EINTR); in __hci_cmd_sync_sk()
183 switch (hdev->req_status) { in __hci_cmd_sync_sk()
185 err = -bt_to_errno(hdev->req_result); in __hci_cmd_sync_sk()
189 err = -hdev->req_result; in __hci_cmd_sync_sk()
193 err = -ETIMEDOUT; in __hci_cmd_sync_sk()
197 hdev->req_status = 0; in __hci_cmd_sync_sk()
198 hdev->req_result = 0; in __hci_cmd_sync_sk()
199 skb = hdev->req_rsp; in __hci_cmd_sync_sk()
200 hdev->req_rsp = NULL; in __hci_cmd_sync_sk()
213 return ERR_PTR(-ENODATA); in __hci_cmd_sync_sk()
219 /* This function requires the caller holds hdev->req_lock. */
233 if (!test_bit(HCI_UP, &hdev->flags)) in hci_cmd_sync()
234 return ERR_PTR(-ENETDOWN); in hci_cmd_sync()
246 /* This function requires the caller holds hdev->req_lock. */
255 /* This function requires the caller holds hdev->req_lock. */
265 /* If command return a status event, skb will be set to -ENODATA */ in __hci_cmd_sync_status_sk()
266 if (skb == ERR_PTR(-ENODATA)) in __hci_cmd_sync_status_sk()
276 status = skb->data[0]; in __hci_cmd_sync_status_sk()
315 mutex_lock(&hdev->cmd_sync_work_lock); in hci_cmd_sync_work()
316 entry = list_first_entry_or_null(&hdev->cmd_sync_work_list, in hci_cmd_sync_work()
320 list_del(&entry->list); in hci_cmd_sync_work()
321 mutex_unlock(&hdev->cmd_sync_work_lock); in hci_cmd_sync_work()
328 if (entry->func) { in hci_cmd_sync_work()
332 err = entry->func(hdev, entry->data); in hci_cmd_sync_work()
333 if (entry->destroy) in hci_cmd_sync_work()
334 entry->destroy(hdev, entry->data, err); in hci_cmd_sync_work()
346 cancel_delayed_work_sync(&hdev->cmd_timer); in hci_cmd_sync_cancel_work()
347 cancel_delayed_work_sync(&hdev->ncmd_timer); in hci_cmd_sync_cancel_work()
348 atomic_set(&hdev->cmd_cnt, 1); in hci_cmd_sync_cancel_work()
350 wake_up_interruptible(&hdev->req_wait_q); in hci_cmd_sync_cancel_work()
378 bt_dev_err(hdev, "failed to disable LE scan: %d", status); in le_scan_disable()
382 /* If we were running LE only scan, change discovery state. If in le_scan_disable()
390 if (hdev->discovery.type == DISCOV_TYPE_LE) in le_scan_disable()
393 if (hdev->discovery.type != DISCOV_TYPE_INTERLEAVED) in le_scan_disable()
396 if (test_bit(HCI_QUIRK_SIMULTANEOUS_DISCOVERY, &hdev->quirks)) { in le_scan_disable()
397 if (!test_bit(HCI_INQUIRY, &hdev->flags) && in le_scan_disable()
398 hdev->discovery.state != DISCOVERY_RESOLVING) in le_scan_disable()
427 list_empty(&hdev->adv_instances)) in reenable_adv_sync()
430 if (hdev->cur_adv_instance) { in reenable_adv_sync()
432 hdev->cur_adv_instance, in reenable_adv_sync()
466 if (hdev->adv_instance_timeout) { in cancel_adv_timeout()
467 hdev->adv_instance_timeout = 0; in cancel_adv_timeout()
468 cancel_delayed_work(&hdev->adv_instance_expire); in cancel_adv_timeout()
473 * - force == true: The instance will be removed even when its remaining
475 * - force == false: the instance will be deactivated but kept stored unless
479 * - force == true: All instances will be removed regardless of their timeout
481 * - force == false: Only instances that have a timeout will be removed.
491 if (!instance || hdev->cur_adv_instance == instance) in hci_clear_adv_instance_sync()
498 if (instance && hdev->cur_adv_instance == instance) in hci_clear_adv_instance_sync()
502 list_for_each_entry_safe(adv_instance, n, &hdev->adv_instances, in hci_clear_adv_instance_sync()
504 if (!(force || adv_instance->timeout)) in hci_clear_adv_instance_sync()
507 rem_inst = adv_instance->instance; in hci_clear_adv_instance_sync()
515 if (force || (adv_instance && adv_instance->timeout && in hci_clear_adv_instance_sync()
516 !adv_instance->remaining_time)) { in hci_clear_adv_instance_sync()
519 next_instance->instance == instance) in hci_clear_adv_instance_sync()
533 next_instance->instance, in hci_clear_adv_instance_sync()
547 if (list_empty(&hdev->adv_instances)) in adv_timeout_expire_sync()
563 hdev->adv_instance_timeout = 0; in adv_timeout_expire()
565 if (hdev->cur_adv_instance == 0x00) in adv_timeout_expire()
572 *inst_ptr = hdev->cur_adv_instance; in adv_timeout_expire()
581 return hdev->interleave_scan_state != INTERLEAVE_SCAN_NONE; in is_interleave_scanning()
592 if (hdev->interleave_scan_state == INTERLEAVE_SCAN_ALLOWLIST) { in interleave_scan_work()
593 timeout = msecs_to_jiffies(hdev->advmon_allowlist_duration); in interleave_scan_work()
594 } else if (hdev->interleave_scan_state == INTERLEAVE_SCAN_NO_FILTER) { in interleave_scan_work()
595 timeout = msecs_to_jiffies(hdev->advmon_no_filter_duration); in interleave_scan_work()
605 switch (hdev->interleave_scan_state) { in interleave_scan_work()
608 hdev->interleave_scan_state = INTERLEAVE_SCAN_NO_FILTER; in interleave_scan_work()
612 hdev->interleave_scan_state = INTERLEAVE_SCAN_ALLOWLIST; in interleave_scan_work()
622 queue_delayed_work(hdev->req_workqueue, in interleave_scan_work()
623 &hdev->interleave_scan, timeout); in interleave_scan_work()
628 INIT_WORK(&hdev->cmd_sync_work, hci_cmd_sync_work); in hci_cmd_sync_init()
629 INIT_LIST_HEAD(&hdev->cmd_sync_work_list); in hci_cmd_sync_init()
630 mutex_init(&hdev->cmd_sync_work_lock); in hci_cmd_sync_init()
631 mutex_init(&hdev->unregister_lock); in hci_cmd_sync_init()
633 INIT_WORK(&hdev->cmd_sync_cancel_work, hci_cmd_sync_cancel_work); in hci_cmd_sync_init()
634 INIT_WORK(&hdev->reenable_adv_work, reenable_adv); in hci_cmd_sync_init()
635 INIT_DELAYED_WORK(&hdev->le_scan_disable, le_scan_disable); in hci_cmd_sync_init()
636 INIT_DELAYED_WORK(&hdev->adv_instance_expire, adv_timeout_expire); in hci_cmd_sync_init()
637 INIT_DELAYED_WORK(&hdev->interleave_scan, interleave_scan_work); in hci_cmd_sync_init()
644 if (entry->destroy) in _hci_cmd_sync_cancel_entry()
645 entry->destroy(hdev, entry->data, err); in _hci_cmd_sync_cancel_entry()
647 list_del(&entry->list); in _hci_cmd_sync_cancel_entry()
655 cancel_work_sync(&hdev->cmd_sync_work); in hci_cmd_sync_clear()
656 cancel_work_sync(&hdev->reenable_adv_work); in hci_cmd_sync_clear()
658 mutex_lock(&hdev->cmd_sync_work_lock); in hci_cmd_sync_clear()
659 list_for_each_entry_safe(entry, tmp, &hdev->cmd_sync_work_list, list) in hci_cmd_sync_clear()
660 _hci_cmd_sync_cancel_entry(hdev, entry, -ECANCELED); in hci_cmd_sync_clear()
661 mutex_unlock(&hdev->cmd_sync_work_lock); in hci_cmd_sync_clear()
668 if (hdev->req_status == HCI_REQ_PEND) { in hci_cmd_sync_cancel()
669 hdev->req_result = err; in hci_cmd_sync_cancel()
670 hdev->req_status = HCI_REQ_CANCELED; in hci_cmd_sync_cancel()
672 queue_work(hdev->workqueue, &hdev->cmd_sync_cancel_work); in hci_cmd_sync_cancel()
679 * - Set result and mark status to HCI_REQ_CANCELED
680 * - Wakeup command sync thread
686 if (hdev->req_status == HCI_REQ_PEND) { in hci_cmd_sync_cancel_sync()
690 hdev->req_result = err < 0 ? -err : err; in hci_cmd_sync_cancel_sync()
691 hdev->req_status = HCI_REQ_CANCELED; in hci_cmd_sync_cancel_sync()
693 wake_up_interruptible(&hdev->req_wait_q); in hci_cmd_sync_cancel_sync()
700 * - hdev must _not_ be unregistered
708 mutex_lock(&hdev->unregister_lock); in hci_cmd_sync_submit()
710 err = -ENODEV; in hci_cmd_sync_submit()
716 err = -ENOMEM; in hci_cmd_sync_submit()
719 entry->func = func; in hci_cmd_sync_submit()
720 entry->data = data; in hci_cmd_sync_submit()
721 entry->destroy = destroy; in hci_cmd_sync_submit()
723 mutex_lock(&hdev->cmd_sync_work_lock); in hci_cmd_sync_submit()
724 list_add_tail(&entry->list, &hdev->cmd_sync_work_list); in hci_cmd_sync_submit()
725 mutex_unlock(&hdev->cmd_sync_work_lock); in hci_cmd_sync_submit()
727 queue_work(hdev->req_workqueue, &hdev->cmd_sync_work); in hci_cmd_sync_submit()
730 mutex_unlock(&hdev->unregister_lock); in hci_cmd_sync_submit()
737 * - hdev must be running
745 if (!test_bit(HCI_RUNNING, &hdev->flags)) in hci_cmd_sync_queue()
746 return -ENETDOWN; in hci_cmd_sync_queue()
758 list_for_each_entry_safe(entry, tmp, &hdev->cmd_sync_work_list, list) { in _hci_cmd_sync_lookup_entry()
759 if (func && entry->func != func) in _hci_cmd_sync_lookup_entry()
762 if (data && entry->data != data) in _hci_cmd_sync_lookup_entry()
765 if (destroy && entry->destroy != destroy) in _hci_cmd_sync_lookup_entry()
776 * - Lookup if an entry already exist and only if it doesn't creates a new entry
791 * - hdev must be running
792 * - if on cmd_sync_work then run immediately otherwise queue
800 if (!test_bit(HCI_RUNNING, &hdev->flags)) in hci_cmd_sync_run()
801 return -ENETDOWN; in hci_cmd_sync_run()
804 if (current_work() == &hdev->cmd_sync_work) in hci_cmd_sync_run()
813 * - Lookup if an entry already exist and only if it doesn't creates a new entry
815 * - if on cmd_sync_work then run immediately otherwise queue
829 * - Return first entry that matches by function callback or data or
838 mutex_lock(&hdev->cmd_sync_work_lock); in hci_cmd_sync_lookup_entry()
840 mutex_unlock(&hdev->cmd_sync_work_lock); in hci_cmd_sync_lookup_entry()
850 mutex_lock(&hdev->cmd_sync_work_lock); in hci_cmd_sync_cancel_entry()
851 _hci_cmd_sync_cancel_entry(hdev, entry, -ECANCELED); in hci_cmd_sync_cancel_entry()
852 mutex_unlock(&hdev->cmd_sync_work_lock); in hci_cmd_sync_cancel_entry()
858 * - Lookup and cancel first entry that matches.
878 * - Lookup and cancel any entry that matches by function callback or data or
887 mutex_lock(&hdev->cmd_sync_work_lock); in hci_cmd_sync_dequeue()
890 _hci_cmd_sync_cancel_entry(hdev, entry, -ECANCELED); in hci_cmd_sync_dequeue()
893 mutex_unlock(&hdev->cmd_sync_work_lock); in hci_cmd_sync_dequeue()
921 if (memcmp(cp.data, hdev->eir, sizeof(cp.data)) == 0) in hci_update_eir_sync()
924 memcpy(hdev->eir, cp.data, sizeof(cp.data)); in hci_update_eir_sync()
935 list_for_each_entry(uuid, &hdev->uuids, list) in get_service_classes()
936 val |= uuid->svc_hint; in get_service_classes()
956 cod[0] = hdev->minor_class; in hci_update_class_sync()
957 cod[1] = hdev->major_class; in hci_update_class_sync()
963 if (memcmp(cod, hdev->dev_class, 3) == 0) in hci_update_class_sync()
977 if (hdev->conn_hash.le_num_peripheral > 0) { in is_advertising_allowed()
981 if (!connectable && !(hdev->le_states[2] & 0x10)) in is_advertising_allowed()
987 if (connectable && (!(hdev->le_states[4] & 0x40) || in is_advertising_allowed()
988 !(hdev->le_states[2] & 0x20))) in is_advertising_allowed()
993 if (hci_conn_num(hdev, LE_LINK) != hdev->conn_hash.le_num_peripheral) { in is_advertising_allowed()
995 if (!connectable && !(hdev->le_states[2] & 0x02)) in is_advertising_allowed()
1001 if (connectable && (!(hdev->le_states[4] & 0x08) || in is_advertising_allowed()
1002 !(hdev->le_states[2] & 0x08))) in is_advertising_allowed()
1077 err = smp_generate_rpa(hdev, hdev->irk, &hdev->rpa); in hci_update_random_address_sync()
1083 err = hci_set_random_addr_sync(hdev, &hdev->rpa); in hci_update_random_address_sync()
1091 * use an non-resolvable private address. This is useful for active in hci_update_random_address_sync()
1092 * scanning and non-connectable advertising. in hci_update_random_address_sync()
1098 /* The non-resolvable private address is generated in hci_update_random_address_sync()
1105 /* The non-resolvable private address shall not be in hci_update_random_address_sync()
1108 if (bacmp(&hdev->bdaddr, &nrpa)) in hci_update_random_address_sync()
1122 * In case BR/EDR has been disabled on a dual-mode controller in hci_update_random_address_sync()
1127 !bacmp(&hdev->bdaddr, BDADDR_ANY) || in hci_update_random_address_sync()
1129 bacmp(&hdev->static_addr, BDADDR_ANY))) { in hci_update_random_address_sync()
1131 if (bacmp(&hdev->static_addr, &hdev->random_addr)) in hci_update_random_address_sync()
1133 &hdev->static_addr); in hci_update_random_address_sync()
1157 return -EINVAL; in hci_disable_ext_adv_instance_sync()
1160 if (!adv->enabled) in hci_disable_ext_adv_instance_sync()
1167 set = (void *)cp->data; in hci_disable_ext_adv_instance_sync()
1170 cp->num_of_sets = !!instance; in hci_disable_ext_adv_instance_sync()
1171 cp->enable = 0x00; in hci_disable_ext_adv_instance_sync()
1173 set->handle = adv ? adv->handle : instance; in hci_disable_ext_adv_instance_sync()
1175 size = sizeof(*cp) + sizeof(*set) * cp->num_of_sets; in hci_disable_ext_adv_instance_sync()
1189 * hdev->random_addr to track its address so whenever it needs in hci_set_adv_set_random_addr_sync()
1191 * hdev->random_addr is shared with scan state machine. in hci_set_adv_set_random_addr_sync()
1221 return -EINVAL; in hci_setup_ext_adv_instance_sync()
1230 if (adv && !adv->pending) { in hci_setup_ext_adv_instance_sync()
1245 return -EPERM; in hci_setup_ext_adv_instance_sync()
1247 /* Set require_privacy to true only when non-connectable in hci_setup_ext_adv_instance_sync()
1249 * non-resolvable private address. in hci_setup_ext_adv_instance_sync()
1260 hci_cpu_to_le24(adv->min_interval, cp.min_interval); in hci_setup_ext_adv_instance_sync()
1261 hci_cpu_to_le24(adv->max_interval, cp.max_interval); in hci_setup_ext_adv_instance_sync()
1262 cp.tx_power = adv->tx_power; in hci_setup_ext_adv_instance_sync()
1264 hci_cpu_to_le24(hdev->le_adv_min_interval, cp.min_interval); in hci_setup_ext_adv_instance_sync()
1265 hci_cpu_to_le24(hdev->le_adv_max_interval, cp.max_interval); in hci_setup_ext_adv_instance_sync()
1301 cp.channel_map = hdev->le_adv_channel_map; in hci_setup_ext_adv_instance_sync()
1302 cp.handle = adv ? adv->handle : instance; in hci_setup_ext_adv_instance_sync()
1326 if (!bacmp(&random_addr, &adv->random_addr)) in hci_setup_ext_adv_instance_sync()
1329 if (!bacmp(&random_addr, &hdev->random_addr)) in hci_setup_ext_adv_instance_sync()
1350 if (!adv || !adv->scan_rsp_changed) in hci_set_ext_scan_rsp_data_sync()
1354 len = eir_create_scan_rsp(hdev, instance, pdu->data); in hci_set_ext_scan_rsp_data_sync()
1356 pdu->handle = adv ? adv->handle : instance; in hci_set_ext_scan_rsp_data_sync()
1357 pdu->length = len; in hci_set_ext_scan_rsp_data_sync()
1358 pdu->operation = LE_SET_ADV_DATA_OP_COMPLETE; in hci_set_ext_scan_rsp_data_sync()
1359 pdu->frag_pref = LE_SET_ADV_DATA_NO_FRAG; in hci_set_ext_scan_rsp_data_sync()
1368 adv->scan_rsp_changed = false; in hci_set_ext_scan_rsp_data_sync()
1370 memcpy(hdev->scan_rsp_data, pdu->data, len); in hci_set_ext_scan_rsp_data_sync()
1371 hdev->scan_rsp_data_len = len; in hci_set_ext_scan_rsp_data_sync()
1386 if (hdev->scan_rsp_data_len == len && in __hci_set_scan_rsp_data_sync()
1387 !memcmp(cp.data, hdev->scan_rsp_data, len)) in __hci_set_scan_rsp_data_sync()
1390 memcpy(hdev->scan_rsp_data, cp.data, sizeof(cp.data)); in __hci_set_scan_rsp_data_sync()
1391 hdev->scan_rsp_data_len = len; in __hci_set_scan_rsp_data_sync()
1420 return -EINVAL; in hci_enable_ext_advertising_sync()
1422 if (adv->enabled) in hci_enable_ext_advertising_sync()
1429 set = (void *)cp->data; in hci_enable_ext_advertising_sync()
1433 cp->enable = 0x01; in hci_enable_ext_advertising_sync()
1434 cp->num_of_sets = 0x01; in hci_enable_ext_advertising_sync()
1438 set->handle = adv ? adv->handle : instance; in hci_enable_ext_advertising_sync()
1443 if (adv && adv->timeout) { in hci_enable_ext_advertising_sync()
1444 u16 duration = adv->timeout * MSEC_PER_SEC; in hci_enable_ext_advertising_sync()
1447 set->duration = cpu_to_le16(duration / 10); in hci_enable_ext_advertising_sync()
1452 sizeof(*set) * cp->num_of_sets, in hci_enable_ext_advertising_sync()
1478 if (!adv || !adv->periodic || !adv->enabled) in hci_disable_per_advertising_sync()
1521 if (!adv || !adv->periodic) in hci_set_per_adv_data_sync()
1525 len = eir_create_per_adv_data(hdev, instance, pdu->data); in hci_set_per_adv_data_sync()
1527 pdu->length = len; in hci_set_per_adv_data_sync()
1528 pdu->handle = adv ? adv->handle : instance; in hci_set_per_adv_data_sync()
1529 pdu->operation = LE_SET_ADV_DATA_OP_COMPLETE; in hci_set_per_adv_data_sync()
1543 if (adv && adv->periodic && adv->enabled) in hci_enable_per_advertising_sync()
1573 if (!eir_get_service_data(adv->per_adv_data, adv->per_adv_data_len, in hci_adv_bcast_annoucement()
1581 if (eir_get_service_data(adv->adv_data, adv->adv_data_len, 0x1852, in hci_adv_bcast_annoucement()
1588 hci_set_adv_instance_data(hdev, adv->instance, sizeof(ad), ad, 0, NULL); in hci_adv_bcast_annoucement()
1590 return hci_update_adv_data_sync(hdev, adv->instance); in hci_adv_bcast_annoucement()
1613 adv->pending = false; in hci_start_per_adv_sync()
1679 hdev->cur_adv_instance); in hci_enable_advertising_sync()
1681 flags = hci_adv_instance_flags(hdev, hdev->cur_adv_instance); in hci_enable_advertising_sync()
1682 adv_instance = hci_find_adv_instance(hdev, hdev->cur_adv_instance); in hci_enable_advertising_sync()
1691 return -EINVAL; in hci_enable_advertising_sync()
1704 /* Set require_privacy to true only when non-connectable in hci_enable_advertising_sync()
1706 * non-resolvable private address. in hci_enable_advertising_sync()
1717 adv_min_interval = adv_instance->min_interval; in hci_enable_advertising_sync()
1718 adv_max_interval = adv_instance->max_interval; in hci_enable_advertising_sync()
1720 adv_min_interval = hdev->le_adv_min_interval; in hci_enable_advertising_sync()
1721 adv_max_interval = hdev->le_adv_max_interval; in hci_enable_advertising_sync()
1727 if (hci_adv_instance_is_scannable(hdev, hdev->cur_adv_instance)) in hci_enable_advertising_sync()
1742 cp.channel_map = hdev->le_adv_channel_map; in hci_enable_advertising_sync()
1761 list_empty(&hdev->adv_instances)) in hci_enable_advertising()
1781 return -EINVAL; in hci_remove_ext_adv_instance_sync()
1794 instance = adv->instance; in remove_ext_adv_sync()
1806 return -EINVAL; in hci_remove_ext_adv_instance()
1834 if (!adv || !adv->adv_data_changed) in hci_set_ext_adv_data_sync()
1838 len = eir_create_adv_data(hdev, instance, pdu->data); in hci_set_ext_adv_data_sync()
1840 pdu->length = len; in hci_set_ext_adv_data_sync()
1841 pdu->handle = adv ? adv->handle : instance; in hci_set_ext_adv_data_sync()
1842 pdu->operation = LE_SET_ADV_DATA_OP_COMPLETE; in hci_set_ext_adv_data_sync()
1843 pdu->frag_pref = LE_SET_ADV_DATA_NO_FRAG; in hci_set_ext_adv_data_sync()
1853 adv->adv_data_changed = false; in hci_set_ext_adv_data_sync()
1855 memcpy(hdev->adv_data, pdu->data, len); in hci_set_ext_adv_data_sync()
1856 hdev->adv_data_len = len; in hci_set_ext_adv_data_sync()
1872 if (hdev->adv_data_len == len && in hci_set_adv_data_sync()
1873 memcmp(cp.data, hdev->adv_data, len) == 0) in hci_set_adv_data_sync()
1876 memcpy(hdev->adv_data, cp.data, sizeof(cp.data)); in hci_set_adv_data_sync()
1877 hdev->adv_data_len = len; in hci_set_adv_data_sync()
1903 return -EPERM; in hci_schedule_adv_instance_sync()
1905 if (hdev->adv_instance_timeout) in hci_schedule_adv_instance_sync()
1906 return -EBUSY; in hci_schedule_adv_instance_sync()
1910 return -ENOENT; in hci_schedule_adv_instance_sync()
1920 if (adv->timeout == 0 || adv->duration <= adv->remaining_time) in hci_schedule_adv_instance_sync()
1921 timeout = adv->duration; in hci_schedule_adv_instance_sync()
1923 timeout = adv->remaining_time; in hci_schedule_adv_instance_sync()
1928 if (adv->timeout) in hci_schedule_adv_instance_sync()
1929 adv->remaining_time = adv->remaining_time - timeout; in hci_schedule_adv_instance_sync()
1933 hdev->adv_instance_timeout = timeout; in hci_schedule_adv_instance_sync()
1934 queue_delayed_work(hdev->req_workqueue, in hci_schedule_adv_instance_sync()
1935 &hdev->adv_instance_expire, in hci_schedule_adv_instance_sync()
1939 /* If we're just re-scheduling the same instance again then do not in hci_schedule_adv_instance_sync()
1943 if (!force && hdev->cur_adv_instance == instance && in hci_schedule_adv_instance_sync()
1947 hdev->cur_adv_instance = instance; in hci_schedule_adv_instance_sync()
1984 /* Cleanup non-ext instances */ in hci_clear_adv_sync()
1985 list_for_each_entry_safe(adv, n, &hdev->adv_instances, list) { in hci_clear_adv_sync()
1986 u8 instance = adv->instance; in hci_clear_adv_sync()
1989 if (!(force || adv->timeout)) in hci_clear_adv_sync()
2028 * - force == true: The instance will be removed even when its remaining
2030 * - force == false: the instance will be deactivated but kept stored unless
2034 * - force == true: All instances will be removed regardless of their timeout
2036 * - force == false: Only instances that have a timeout will be removed.
2045 if (!instance || hdev->cur_adv_instance == instance) in hci_remove_advertising_sync()
2052 if (hdev->cur_adv_instance == instance) in hci_remove_advertising_sync()
2062 if (force || (adv && adv->timeout && !adv->remaining_time)) { in hci_remove_advertising_sync()
2064 if (next && next->instance == instance) in hci_remove_advertising_sync()
2077 hci_schedule_adv_instance_sync(hdev, next->instance, false); in hci_remove_advertising_sync()
2183 if (hdev->scanning_paused) { in hci_scan_disable_sync()
2204 hdev->interleave_scan_state = INTERLEAVE_SCAN_NO_FILTER; in hci_start_interleave_scan()
2205 queue_delayed_work(hdev->req_workqueue, in hci_start_interleave_scan()
2206 &hdev->interleave_scan, 0); in hci_start_interleave_scan()
2211 bt_dev_dbg(hdev, "cancelling interleave scan"); in cancel_interleave_scan()
2213 cancel_delayed_work_sync(&hdev->interleave_scan); in cancel_interleave_scan()
2215 hdev->interleave_scan_state = INTERLEAVE_SCAN_NONE; in cancel_interleave_scan()
2223 /* Do interleaved scan only if all of the following are true: in hci_update_interleaved_scan_sync()
2224 * - There is at least one ADV monitor in hci_update_interleaved_scan_sync()
2225 * - At least one pending LE connection or one device to be scanned for in hci_update_interleaved_scan_sync()
2226 * - Monitor offloading is not supported in hci_update_interleaved_scan_sync()
2227 * If so, we should alternate between allowlist scan and one without in hci_update_interleaved_scan_sync()
2231 !(list_empty(&hdev->pend_le_conns) && in hci_update_interleaved_scan_sync()
2232 list_empty(&hdev->pend_le_reports)) && in hci_update_interleaved_scan_sync()
2239 bt_dev_dbg(hdev, "starting interleave scan"); in hci_update_interleaved_scan_sync()
2260 entry = hci_bdaddr_list_lookup_with_irk(&hdev->le_resolv_list, bdaddr, in hci_le_del_resolve_list_sync()
2279 if (!hci_bdaddr_list_lookup(&hdev->le_accept_list, bdaddr, bdaddr_type)) in hci_le_del_accept_list_sync()
2311 * Setting params to NULL programs local hdev->irk
2332 memcpy(cp.peer_irk, hdev->irk, 16); in hci_le_add_resolve_list_sync()
2336 irk = hci_find_irk_by_addr(hdev, ¶ms->addr, params->addr_type); in hci_le_add_resolve_list_sync()
2341 entry = hci_bdaddr_list_lookup_with_irk(&hdev->le_resolv_list, in hci_le_add_resolve_list_sync()
2342 ¶ms->addr, in hci_le_add_resolve_list_sync()
2343 params->addr_type); in hci_le_add_resolve_list_sync()
2347 cp.bdaddr_type = params->addr_type; in hci_le_add_resolve_list_sync()
2348 bacpy(&cp.bdaddr, ¶ms->addr); in hci_le_add_resolve_list_sync()
2349 memcpy(cp.peer_irk, irk->val, 16); in hci_le_add_resolve_list_sync()
2352 params->privacy_mode = HCI_NETWORK_PRIVACY; in hci_le_add_resolve_list_sync()
2355 p = hci_pend_le_action_lookup(&hdev->pend_le_conns, in hci_le_add_resolve_list_sync()
2356 ¶ms->addr, params->addr_type); in hci_le_add_resolve_list_sync()
2358 p = hci_pend_le_action_lookup(&hdev->pend_le_reports, in hci_le_add_resolve_list_sync()
2359 ¶ms->addr, params->addr_type); in hci_le_add_resolve_list_sync()
2361 WRITE_ONCE(p->privacy_mode, HCI_NETWORK_PRIVACY); in hci_le_add_resolve_list_sync()
2366 memcpy(cp.local_irk, hdev->irk, 16); in hci_le_add_resolve_list_sync()
2382 if (params->privacy_mode == HCI_DEVICE_PRIVACY) in hci_le_set_privacy_mode_sync()
2389 if (!(params->flags & HCI_CONN_FLAG_DEVICE_PRIVACY)) in hci_le_set_privacy_mode_sync()
2392 irk = hci_find_irk_by_addr(hdev, ¶ms->addr, params->addr_type); in hci_le_set_privacy_mode_sync()
2397 cp.bdaddr_type = irk->addr_type; in hci_le_set_privacy_mode_sync()
2398 bacpy(&cp.bdaddr, &irk->bdaddr); in hci_le_set_privacy_mode_sync()
2401 /* Note: params->privacy_mode is not updated since it is a copy */ in hci_le_set_privacy_mode_sync()
2419 if (hdev->suspended && in hci_le_add_accept_list_sync()
2420 !(params->flags & HCI_CONN_FLAG_REMOTE_WAKEUP)) { in hci_le_add_accept_list_sync()
2421 hci_le_del_accept_list_sync(hdev, ¶ms->addr, in hci_le_add_accept_list_sync()
2422 params->addr_type); in hci_le_add_accept_list_sync()
2427 if (*num_entries >= hdev->le_accept_list_size) in hci_le_add_accept_list_sync()
2428 return -ENOSPC; in hci_le_add_accept_list_sync()
2432 hci_find_irk_by_addr(hdev, ¶ms->addr, params->addr_type)) in hci_le_add_accept_list_sync()
2433 return -EINVAL; in hci_le_add_accept_list_sync()
2453 if (hci_bdaddr_list_lookup(&hdev->le_accept_list, ¶ms->addr, in hci_le_add_accept_list_sync()
2454 params->addr_type)) in hci_le_add_accept_list_sync()
2458 cp.bdaddr_type = params->addr_type; in hci_le_add_accept_list_sync()
2459 bacpy(&cp.bdaddr, ¶ms->addr); in hci_le_add_accept_list_sync()
2483 if (hdev->advertising_paused) in hci_pause_advertising_sync()
2498 hdev->discov_timeout = 0; in hci_pause_advertising_sync()
2514 hdev->advertising_paused = true; in hci_pause_advertising_sync()
2515 hdev->advertising_old_state = old_state; in hci_pause_advertising_sync()
2527 if (!hdev->advertising_paused) in hci_resume_advertising_sync()
2531 hdev->advertising_paused = false; in hci_resume_advertising_sync()
2532 if (hdev->advertising_old_state) { in hci_resume_advertising_sync()
2534 hdev->advertising_old_state = 0; in hci_resume_advertising_sync()
2540 /* Call for each tracked instance to be re-enabled */ in hci_resume_advertising_sync()
2541 list_for_each_entry_safe(adv, tmp, &hdev->adv_instances, list) { in hci_resume_advertising_sync()
2543 adv->instance); in hci_resume_advertising_sync()
2548 hci_remove_ext_adv_instance_sync(hdev, adv->instance, in hci_resume_advertising_sync()
2556 hdev->cur_adv_instance, in hci_resume_advertising_sync()
2560 hdev->advertising_paused = false; in hci_resume_advertising_sync()
2580 bt_dev_err(hdev, "Command not allowed when scan/LE connect"); in hci_pause_addr_resolution()
2581 return -EPERM; in hci_pause_addr_resolution()
2636 /* Racing adds are handled in next scan update */ in conn_params_copy()
2640 /* No hdev->lock, but: addr, addr_type are immutable. in conn_params_copy()
2646 bacpy(&p[i].addr, ¶ms->addr); in conn_params_copy()
2647 p[i].addr_type = params->addr_type; in conn_params_copy()
2648 p[i].flags = READ_ONCE(params->flags); in conn_params_copy()
2649 p[i].privacy_mode = READ_ONCE(params->privacy_mode); in conn_params_copy()
2662 if (!(hdev->commands[26] & 0x80)) in hci_le_clear_accept_list_sync()
2673 * use_ll_privacy((Disable Advertising) -> Disable Resolving List) ->
2674 * Remove Devices From Accept List ->
2675 * (has IRK && use_ll_privacy(Remove Devices From Resolving List))->
2676 * Add Devices to Accept List ->
2677 * (has IRK && use_ll_privacy(Remove Devices From Resolving List)) ->
2678 * use_ll_privacy(Enable Resolving List -> (Enable Advertising)) ->
2727 bacpy(&pa.addr, &sent->addr); in hci_update_accept_list_sync()
2728 pa.addr_type = sent->addr_type; in hci_update_accept_list_sync()
2748 list_for_each_entry_safe(b, t, &hdev->le_accept_list, list) { in hci_update_accept_list_sync()
2749 if (hci_conn_hash_lookup_le(hdev, &b->bdaddr, b->bdaddr_type)) in hci_update_accept_list_sync()
2753 pend_conn = hci_pend_le_action_lookup(&hdev->pend_le_conns, in hci_update_accept_list_sync()
2754 &b->bdaddr, in hci_update_accept_list_sync()
2755 b->bdaddr_type); in hci_update_accept_list_sync()
2756 pend_report = hci_pend_le_action_lookup(&hdev->pend_le_reports, in hci_update_accept_list_sync()
2757 &b->bdaddr, in hci_update_accept_list_sync()
2758 b->bdaddr_type); in hci_update_accept_list_sync()
2764 hci_le_del_accept_list_sync(hdev, &b->bdaddr, in hci_update_accept_list_sync()
2765 b->bdaddr_type); in hci_update_accept_list_sync()
2786 params = conn_params_copy(&hdev->pend_le_conns, &n); in hci_update_accept_list_sync()
2788 err = -ENOMEM; in hci_update_accept_list_sync()
2808 params = conn_params_copy(&hdev->pend_le_reports, &n); in hci_update_accept_list_sync()
2810 err = -ENOMEM; in hci_update_accept_list_sync()
2826 * - We are not currently suspending in hci_update_accept_list_sync()
2827 * - There are 1 or more ADV monitors registered and it's not offloaded in hci_update_accept_list_sync()
2828 * - Interleaved scanning is not currently using the allowlist in hci_update_accept_list_sync()
2830 if (!idr_is_empty(&hdev->adv_monitors_idr) && !hdev->suspended && in hci_update_accept_list_sync()
2832 hdev->interleave_scan_state != INTERLEAVE_SCAN_ALLOWLIST) in hci_update_accept_list_sync()
2833 err = -EINVAL; in hci_update_accept_list_sync()
2852 u8 type, u16 interval, u16 window) in hci_le_scan_phy_params() argument
2854 cp->type = type; in hci_le_scan_phy_params()
2855 cp->interval = cpu_to_le16(interval); in hci_le_scan_phy_params()
2856 cp->window = cpu_to_le16(window); in hci_le_scan_phy_params()
2860 u16 interval, u16 window, in hci_le_set_ext_scan_param_sync() argument
2869 phy = (void *)cp->data; in hci_le_set_ext_scan_param_sync()
2873 cp->own_addr_type = own_addr_type; in hci_le_set_ext_scan_param_sync()
2874 cp->filter_policy = filter_policy; in hci_le_set_ext_scan_param_sync()
2887 &sent->bdaddr); in hci_le_set_ext_scan_param_sync()
2889 struct bt_iso_qos *qos = &conn->iso_qos; in hci_le_set_ext_scan_param_sync()
2891 if (qos->bcast.in.phy & BT_ISO_PHY_1M || in hci_le_set_ext_scan_param_sync()
2892 qos->bcast.in.phy & BT_ISO_PHY_2M) { in hci_le_set_ext_scan_param_sync()
2893 cp->scanning_phys |= LE_SCAN_PHY_1M; in hci_le_set_ext_scan_param_sync()
2895 interval, in hci_le_set_ext_scan_param_sync()
2901 if (qos->bcast.in.phy & BT_ISO_PHY_CODED) { in hci_le_set_ext_scan_param_sync()
2902 cp->scanning_phys |= LE_SCAN_PHY_CODED; in hci_le_set_ext_scan_param_sync()
2904 interval * 3, in hci_le_set_ext_scan_param_sync()
2917 cp->scanning_phys |= LE_SCAN_PHY_1M; in hci_le_set_ext_scan_param_sync()
2918 hci_le_scan_phy_params(phy, type, interval, window); in hci_le_set_ext_scan_param_sync()
2924 cp->scanning_phys |= LE_SCAN_PHY_CODED; in hci_le_set_ext_scan_param_sync()
2925 hci_le_scan_phy_params(phy, type, interval * 3, window * 3); in hci_le_set_ext_scan_param_sync()
2932 return -EINVAL; in hci_le_set_ext_scan_param_sync()
2940 u16 interval, u16 window, in hci_le_set_scan_param_sync() argument
2946 return hci_le_set_ext_scan_param_sync(hdev, type, interval, in hci_le_set_scan_param_sync()
2952 cp.interval = cpu_to_le16(interval); in hci_le_set_scan_param_sync()
2961 static int hci_start_scan_sync(struct hci_dev *hdev, u8 type, u16 interval, in hci_start_scan_sync() argument
2967 if (hdev->scanning_paused) { in hci_start_scan_sync()
2972 err = hci_le_set_scan_param_sync(hdev, type, interval, window, in hci_start_scan_sync()
2984 u16 window, interval; in hci_passive_scan_sync() local
2988 if (hdev->scanning_paused) { in hci_passive_scan_sync()
3000 * during passive scanning. Not using an non-resolvable address in hci_passive_scan_sync()
3009 if (hdev->enable_advmon_interleave_scan && in hci_passive_scan_sync()
3013 bt_dev_dbg(hdev, "interleave state %d", hdev->interleave_scan_state); in hci_passive_scan_sync()
3025 if (hdev->suspended && !filter_policy) { in hci_passive_scan_sync()
3026 /* Check if accept list is empty then there is no need to scan in hci_passive_scan_sync()
3029 if (list_empty(&hdev->le_accept_list)) in hci_passive_scan_sync()
3033 * devices could not be programmed which in non-suspended case in hci_passive_scan_sync()
3052 (hdev->le_features[0] & HCI_LE_EXT_SCAN_POLICY)) in hci_passive_scan_sync()
3055 if (hdev->suspended) { in hci_passive_scan_sync()
3056 window = hdev->le_scan_window_suspend; in hci_passive_scan_sync()
3057 interval = hdev->le_scan_int_suspend; in hci_passive_scan_sync()
3059 window = hdev->le_scan_window_connect; in hci_passive_scan_sync()
3060 interval = hdev->le_scan_int_connect; in hci_passive_scan_sync()
3062 window = hdev->le_scan_window_adv_monitor; in hci_passive_scan_sync()
3063 interval = hdev->le_scan_int_adv_monitor; in hci_passive_scan_sync()
3079 window = hdev->le_scan_window; in hci_passive_scan_sync()
3080 interval = hdev->le_scan_interval; in hci_passive_scan_sync()
3089 bt_dev_dbg(hdev, "LE passive scan with acceptlist = %d", filter_policy); in hci_passive_scan_sync()
3091 return hci_start_scan_sync(hdev, LE_SCAN_PASSIVE, interval, window, in hci_passive_scan_sync()
3095 /* This function controls the passive scanning based on hdev->pend_le_conns
3099 * If there are devices to scan:
3101 * Disable Scanning -> Update Accept List ->
3102 * use_ll_privacy((Disable Advertising) -> Disable Resolving List ->
3103 * Update Resolving List -> Enable Resolving List -> (Enable Advertising)) ->
3114 if (!test_bit(HCI_UP, &hdev->flags) || in hci_update_passive_scan_sync()
3115 test_bit(HCI_INIT, &hdev->flags) || in hci_update_passive_scan_sync()
3127 if (hdev->discovery.state != DISCOVERY_STOPPED) in hci_update_passive_scan_sync()
3143 list_empty(&hdev->pend_le_conns) && in hci_update_passive_scan_sync()
3144 list_empty(&hdev->pend_le_reports) && in hci_update_passive_scan_sync()
3160 * keep the background scan running. in hci_update_passive_scan_sync()
3164 * since some controllers are not able to scan and connect at in hci_update_passive_scan_sync()
3199 if (!test_bit(HCI_UP, &hdev->flags) || in hci_update_passive_scan()
3200 test_bit(HCI_INIT, &hdev->flags) || in hci_update_passive_scan()
3223 hdev->features[1][0] |= LMP_HOST_SC; in hci_write_sc_support_sync()
3226 hdev->features[1][0] &= ~LMP_HOST_SC; in hci_write_sc_support_sync()
3302 list_empty(&hdev->adv_instances)) { in hci_powered_update_adv_sync()
3318 list_for_each_entry_safe(adv, tmp, &hdev->adv_instances, list) in hci_powered_update_adv_sync()
3319 hci_schedule_adv_instance_sync(hdev, adv->instance, true); in hci_powered_update_adv_sync()
3329 if (link_sec == test_bit(HCI_AUTH, &hdev->flags)) in hci_write_auth_enable_sync()
3346 if (hdev->hci_ver < BLUETOOTH_VER_1_2) in hci_write_fast_connectable_sync()
3354 /* 160 msec page scan interval */ in hci_write_fast_connectable_sync()
3355 cp.interval = cpu_to_le16(0x0100); in hci_write_fast_connectable_sync()
3357 type = hdev->def_page_scan_type; in hci_write_fast_connectable_sync()
3358 cp.interval = cpu_to_le16(hdev->def_page_scan_int); in hci_write_fast_connectable_sync()
3361 cp.window = cpu_to_le16(hdev->def_page_scan_window); in hci_write_fast_connectable_sync()
3363 if (__cpu_to_le16(hdev->page_scan_interval) != cp.interval || in hci_write_fast_connectable_sync()
3364 __cpu_to_le16(hdev->page_scan_window) != cp.window) { in hci_write_fast_connectable_sync()
3372 if (hdev->page_scan_type != type) in hci_write_fast_connectable_sync()
3385 list_for_each_entry(b, &hdev->accept_list, list) { in disconnected_accept_list_entries()
3388 conn = hci_conn_hash_lookup_ba(hdev, ACL_LINK, &b->bdaddr); in disconnected_accept_list_entries()
3392 if (conn->state != BT_CONNECTED && conn->state != BT_CONFIG) in disconnected_accept_list_entries()
3408 u8 scan; in hci_update_scan_sync() local
3419 if (hdev->scanning_paused) in hci_update_scan_sync()
3424 scan = SCAN_PAGE; in hci_update_scan_sync()
3426 scan = SCAN_DISABLED; in hci_update_scan_sync()
3429 scan |= SCAN_INQUIRY; in hci_update_scan_sync()
3431 if (test_bit(HCI_PSCAN, &hdev->flags) == !!(scan & SCAN_PAGE) && in hci_update_scan_sync()
3432 test_bit(HCI_ISCAN, &hdev->flags) == !!(scan & SCAN_INQUIRY)) in hci_update_scan_sync()
3435 return hci_write_scan_enable_sync(hdev, scan); in hci_update_scan_sync()
3444 memcpy(cp.name, hdev->dev_name, sizeof(cp.name)); in hci_update_name_sync()
3456 * HCI_LE_ENABLED(use_ll_privacy(Add local IRK to Resolving List) ->
3459 * lmp_bredr_capable(Set Fast Connectable -> Set Scan Type -> Set Class ->
3460 * Set Name -> Set EIR)
3506 * In case BR/EDR has been disabled on a dual-mode controller in hci_powered_update_sync()
3511 (!bacmp(&hdev->bdaddr, BDADDR_ANY) && in hci_powered_update_sync()
3513 if (bacmp(&hdev->static_addr, BDADDR_ANY)) in hci_powered_update_sync()
3515 &hdev->static_addr); in hci_powered_update_sync()
3522 * hci_dev_get_bd_addr_from_property - Get the Bluetooth Device Address
3527 * Search the firmware node for 'local-bd-address'.
3529 * All-zero BD addresses are rejected, because those could be properties
3531 * example, the DTS could define 'local-bd-address', with zero BD addresses.
3535 struct fwnode_handle *fwnode = dev_fwnode(hdev->dev.parent); in hci_dev_get_bd_addr_from_property()
3539 ret = fwnode_property_read_u8_array(fwnode, "local-bd-address", in hci_dev_get_bd_addr_from_property()
3544 if (test_bit(HCI_QUIRK_BDADDR_PROPERTY_BROKEN, &hdev->quirks)) in hci_dev_get_bd_addr_from_property()
3545 baswap(&hdev->public_addr, &ba); in hci_dev_get_bd_addr_from_property()
3547 bacpy(&hdev->public_addr, &ba); in hci_dev_get_bd_addr_from_property()
3602 set_bit(HCI_RESET, &hdev->flags); in hci_reset_sync()
3619 if (!test_bit(HCI_QUIRK_RESET_ON_CLOSE, &hdev->quirks)) { in hci_init0_sync()
3632 if (test_bit(HCI_QUIRK_RAW_DEVICE, &hdev->quirks)) in hci_unconf_init_sync()
3674 if (hdev->hci_ver > BLUETOOTH_VER_1_1 && in hci_read_local_cmds_sync()
3675 !test_bit(HCI_QUIRK_BROKEN_LOCAL_COMMANDS, &hdev->quirks)) in hci_read_local_cmds_sync()
3689 if (!test_bit(HCI_QUIRK_RESET_ON_CLOSE, &hdev->quirks)) { in hci_init1_sync()
3749 if (test_bit(HCI_QUIRK_BROKEN_FILTER_CLEAR_ALL, &hdev->quirks)) in hci_set_event_filter_sync()
3776 if (test_bit(HCI_QUIRK_BROKEN_FILTER_CLEAR_ALL, &hdev->quirks)) in hci_clear_event_filter_sync()
3826 hdev->max_page = 0x01; in hci_write_ssp_mode_1_sync()
3839 memset(hdev->eir, 0, sizeof(hdev->eir)); in hci_write_eir_sync()
3851 !test_bit(HCI_QUIRK_FIXUP_INQUIRY_MODE, &hdev->quirks)) in hci_write_inquiry_mode_sync()
3915 if (iso_capable(hdev) && hdev->commands[41] & 0x20) in hci_le_read_buffer_size_sync()
3971 /* LE-only controllers have LE implicitly enabled */ in hci_init2_sync()
3990 if (hdev->hci_ver < BLUETOOTH_VER_1_2) in hci_set_event_mask_sync()
4000 if (hdev->suspended) { in hci_set_event_mask_sync()
4005 /* Use a different default for LE-only devices */ in hci_set_event_mask_sync()
4015 if (hdev->commands[0] & 0x20) { in hci_set_event_mask_sync()
4020 if (!hdev->suspended) in hci_set_event_mask_sync()
4029 if (hdev->commands[2] & 0x80) in hci_set_event_mask_sync()
4034 if (hdev->le_features[0] & HCI_LE_ENCRYPTION) { in hci_set_event_mask_sync()
4041 test_bit(HCI_QUIRK_FIXUP_INQUIRY_MODE, &hdev->quirks)) in hci_set_event_mask_sync()
4082 events[7] |= 0x20; /* LE Meta-Event */ in hci_set_event_mask_sync()
4092 if (!(hdev->commands[6] & 0x20) || in hci_read_stored_link_key_sync()
4093 test_bit(HCI_QUIRK_BROKEN_STORED_LINK_KEY, &hdev->quirks)) in hci_read_stored_link_key_sync()
4109 if (!(hdev->commands[5] & 0x10)) in hci_setup_link_policy_sync()
4131 if (!(hdev->commands[8] & 0x01)) in hci_read_page_scan_activity_sync()
4140 if (!(hdev->commands[18] & 0x04) || in hci_read_def_err_data_reporting_sync()
4141 !(hdev->features[0][6] & LMP_ERR_DATA_REPORTING) || in hci_read_def_err_data_reporting_sync()
4142 test_bit(HCI_QUIRK_BROKEN_ERR_DATA_REPORTING, &hdev->quirks)) in hci_read_def_err_data_reporting_sync()
4152 * support the Read Page Scan Type command. Check support for in hci_read_page_scan_type_sync()
4155 if (!(hdev->commands[13] & 0x01)) in hci_read_page_scan_type_sync()
4171 for (page = 2; page < HCI_MAX_PAGES && page <= hdev->max_page; in hci_read_local_ext_features_all_sync()
4209 if (hdev->le_features[0] & HCI_LE_ENCRYPTION) in hci_le_set_event_mask_sync()
4215 if (hdev->le_features[0] & HCI_LE_CONN_PARAM_REQ_PROC) in hci_le_set_event_mask_sync()
4222 if (hdev->le_features[0] & HCI_LE_DATA_LEN_EXT) in hci_le_set_event_mask_sync()
4234 if (hdev->le_features[0] & HCI_LE_EXT_SCAN_POLICY) in hci_le_set_event_mask_sync()
4240 if (hdev->le_features[1] & HCI_LE_CHAN_SEL_ALG2) in hci_le_set_event_mask_sync()
4243 /* If the controller supports the LE Set Scan Enable command, in hci_le_set_event_mask_sync()
4246 if (hdev->commands[26] & 0x08) in hci_le_set_event_mask_sync()
4252 if (hdev->commands[26] & 0x10) in hci_le_set_event_mask_sync()
4258 if (hdev->commands[27] & 0x04) in hci_le_set_event_mask_sync()
4264 if (hdev->commands[27] & 0x20) in hci_le_set_event_mask_sync()
4268 /* If the controller supports the LE Read Local P-256 in hci_le_set_event_mask_sync()
4271 if (hdev->commands[34] & 0x02) in hci_le_set_event_mask_sync()
4272 /* LE Read Local P-256 Public Key Complete */ in hci_le_set_event_mask_sync()
4278 if (hdev->commands[34] & 0x04) in hci_le_set_event_mask_sync()
4284 if (hdev->commands[35] & (0x20 | 0x40)) in hci_le_set_event_mask_sync()
4287 /* If the controller supports LE Set Extended Scan Parameters in hci_le_set_event_mask_sync()
4288 * and LE Set Extended Scan Enable commands, enable the in hci_le_set_event_mask_sync()
4323 if ((hdev->commands[25] & 0x40) && !ext_adv_capable(hdev)) { in hci_le_read_adv_tx_power_sync()
4341 if (!(hdev->commands[38] & 0x80) || in hci_le_read_tx_power_sync()
4342 test_bit(HCI_QUIRK_BROKEN_READ_TRANSMIT_POWER, &hdev->quirks)) in hci_le_read_tx_power_sync()
4352 if (!(hdev->commands[26] & 0x40)) in hci_le_read_accept_list_size_sync()
4362 if (!(hdev->commands[34] & 0x40)) in hci_le_read_resolv_list_size_sync()
4372 if (!(hdev->commands[34] & 0x20)) in hci_le_clear_resolv_list_sync()
4382 __le16 timeout = cpu_to_le16(hdev->rpa_timeout); in hci_le_set_rpa_timeout_sync()
4384 if (!(hdev->commands[35] & 0x04) || in hci_le_set_rpa_timeout_sync()
4385 test_bit(HCI_QUIRK_BROKEN_SET_RPA_TIMEOUT, &hdev->quirks)) in hci_le_set_rpa_timeout_sync()
4396 if (!(hdev->le_features[0] & HCI_LE_DATA_LEN_EXT)) in hci_le_read_max_data_len_sync()
4406 if (!(hdev->le_features[0] & HCI_LE_DATA_LEN_EXT)) in hci_le_read_def_data_len_sync()
4429 /* LE-only devices do not support explicit enablement */ in hci_set_le_support_sync()
4529 if (!(hdev->commands[6] & 0x80) || in hci_delete_stored_link_key_sync()
4530 test_bit(HCI_QUIRK_BROKEN_STORED_LINK_KEY, &hdev->quirks)) in hci_delete_stored_link_key_sync()
4547 if (!(hdev->commands[22] & 0x04)) in hci_set_event_mask_page_2_sync()
4573 if (lmp_ping_capable(hdev) || hdev->le_features[0] & HCI_LE_PING) { in hci_set_event_mask_page_2_sync()
4594 if (hdev->commands[45] & 0x04) in hci_read_local_codecs_sync()
4596 else if (hdev->commands[29] & 0x20) in hci_read_local_codecs_sync()
4605 if (!(hdev->commands[41] & 0x08)) in hci_read_local_pairing_opts_sync()
4654 if (!(hdev->commands[18] & 0x08) || in hci_set_err_data_report_sync()
4655 !(hdev->features[0][6] & LMP_ERR_DATA_REPORTING) || in hci_set_err_data_report_sync()
4656 test_bit(HCI_QUIRK_BROKEN_ERR_DATA_REPORTING, &hdev->quirks)) in hci_set_err_data_report_sync()
4659 if (enabled == hdev->err_data_reporting) in hci_set_err_data_report_sync()
4695 if (!(hdev->le_features[0] & HCI_LE_DATA_LEN_EXT)) in hci_le_set_write_def_data_len_sync()
4699 cp.tx_len = cpu_to_le16(hdev->le_max_tx_len); in hci_le_set_write_def_data_len_sync()
4700 cp.tx_time = cpu_to_le16(hdev->le_max_tx_time); in hci_le_set_write_def_data_len_sync()
4713 if (!(hdev->commands[35] & 0x20)) { in hci_le_set_default_phy_sync()
4717 hdev->le_tx_def_phys = HCI_LE_SET_PHY_1M; in hci_le_set_default_phy_sync()
4718 hdev->le_rx_def_phys = HCI_LE_SET_PHY_1M; in hci_le_set_default_phy_sync()
4852 * Calls hdev->setup
4862 !test_bit(HCI_QUIRK_NON_PERSISTENT_SETUP, &hdev->quirks)) in hci_dev_setup_sync()
4869 if (hdev->setup) in hci_dev_setup_sync()
4870 ret = hdev->setup(hdev); in hci_dev_setup_sync()
4873 if (test_bit(hci_broken_table[i].quirk, &hdev->quirks)) in hci_dev_setup_sync()
4881 invalid_bdaddr = test_bit(HCI_QUIRK_INVALID_BDADDR, &hdev->quirks) || in hci_dev_setup_sync()
4882 test_bit(HCI_QUIRK_USE_BDADDR_PROPERTY, &hdev->quirks); in hci_dev_setup_sync()
4884 if (test_bit(HCI_QUIRK_USE_BDADDR_PROPERTY, &hdev->quirks) && in hci_dev_setup_sync()
4885 !bacmp(&hdev->public_addr, BDADDR_ANY)) in hci_dev_setup_sync()
4888 if (invalid_bdaddr && bacmp(&hdev->public_addr, BDADDR_ANY) && in hci_dev_setup_sync()
4889 hdev->set_bdaddr) { in hci_dev_setup_sync()
4890 ret = hdev->set_bdaddr(hdev, &hdev->public_addr); in hci_dev_setup_sync()
4906 if (test_bit(HCI_QUIRK_EXTERNAL_CONFIG, &hdev->quirks) || in hci_dev_setup_sync()
4935 atomic_set(&hdev->cmd_cnt, 1); in hci_dev_init_sync()
4936 set_bit(HCI_INIT, &hdev->flags); in hci_dev_init_sync()
4946 if (bacmp(&hdev->public_addr, BDADDR_ANY) && in hci_dev_init_sync()
4947 hdev->set_bdaddr) in hci_dev_init_sync()
4948 ret = hdev->set_bdaddr(hdev, &hdev->public_addr); in hci_dev_init_sync()
4950 ret = -EADDRNOTAVAIL; in hci_dev_init_sync()
4957 if (!ret && hdev->post_init) in hci_dev_init_sync()
4958 ret = hdev->post_init(hdev); in hci_dev_init_sync()
4966 if (test_bit(HCI_QUIRK_NON_PERSISTENT_DIAG, &hdev->quirks) && in hci_dev_init_sync()
4968 hci_dev_test_flag(hdev, HCI_VENDOR_DIAG) && hdev->set_diag) in hci_dev_init_sync()
4969 ret = hdev->set_diag(hdev, true); in hci_dev_init_sync()
4976 clear_bit(HCI_INIT, &hdev->flags); in hci_dev_init_sync()
4988 ret = -ENODEV; in hci_dev_open_sync()
4998 ret = -ERFKILL; in hci_dev_open_sync()
5012 !bacmp(&hdev->bdaddr, BDADDR_ANY) && in hci_dev_open_sync()
5013 !bacmp(&hdev->static_addr, BDADDR_ANY)) { in hci_dev_open_sync()
5014 ret = -EADDRNOTAVAIL; in hci_dev_open_sync()
5019 if (test_bit(HCI_UP, &hdev->flags)) { in hci_dev_open_sync()
5020 ret = -EALREADY; in hci_dev_open_sync()
5024 if (hdev->open(hdev)) { in hci_dev_open_sync()
5025 ret = -EIO; in hci_dev_open_sync()
5031 set_bit(HCI_RUNNING, &hdev->flags); in hci_dev_open_sync()
5039 set_bit(HCI_UP, &hdev->flags); in hci_dev_open_sync()
5052 flush_work(&hdev->tx_work); in hci_dev_open_sync()
5058 flush_work(&hdev->rx_work); in hci_dev_open_sync()
5059 flush_work(&hdev->cmd_work); in hci_dev_open_sync()
5061 skb_queue_purge(&hdev->cmd_q); in hci_dev_open_sync()
5062 skb_queue_purge(&hdev->rx_q); in hci_dev_open_sync()
5064 if (hdev->flush) in hci_dev_open_sync()
5065 hdev->flush(hdev); in hci_dev_open_sync()
5067 if (hdev->sent_cmd) { in hci_dev_open_sync()
5068 cancel_delayed_work_sync(&hdev->cmd_timer); in hci_dev_open_sync()
5069 kfree_skb(hdev->sent_cmd); in hci_dev_open_sync()
5070 hdev->sent_cmd = NULL; in hci_dev_open_sync()
5073 if (hdev->req_skb) { in hci_dev_open_sync()
5074 kfree_skb(hdev->req_skb); in hci_dev_open_sync()
5075 hdev->req_skb = NULL; in hci_dev_open_sync()
5078 clear_bit(HCI_RUNNING, &hdev->flags); in hci_dev_open_sync()
5081 hdev->close(hdev); in hci_dev_open_sync()
5082 hdev->flags &= BIT(HCI_RAW); in hci_dev_open_sync()
5089 /* This function requires the caller holds hdev->lock */
5094 list_for_each_entry(p, &hdev->le_conn_params, list) { in hci_pend_le_actions_clear()
5096 if (p->conn) { in hci_pend_le_actions_clear()
5097 hci_conn_drop(p->conn); in hci_pend_le_actions_clear()
5098 hci_conn_put(p->conn); in hci_pend_le_actions_clear()
5099 p->conn = NULL; in hci_pend_le_actions_clear()
5119 test_bit(HCI_UP, &hdev->flags)) { in hci_dev_shutdown()
5121 if (hdev->shutdown) in hci_dev_shutdown()
5122 err = hdev->shutdown(hdev); in hci_dev_shutdown()
5139 disable_delayed_work(&hdev->power_off); in hci_dev_close_sync()
5140 disable_delayed_work(&hdev->ncmd_timer); in hci_dev_close_sync()
5141 disable_delayed_work(&hdev->le_scan_disable); in hci_dev_close_sync()
5143 cancel_delayed_work(&hdev->power_off); in hci_dev_close_sync()
5144 cancel_delayed_work(&hdev->ncmd_timer); in hci_dev_close_sync()
5145 cancel_delayed_work(&hdev->le_scan_disable); in hci_dev_close_sync()
5152 if (hdev->adv_instance_timeout) { in hci_dev_close_sync()
5153 cancel_delayed_work_sync(&hdev->adv_instance_expire); in hci_dev_close_sync()
5154 hdev->adv_instance_timeout = 0; in hci_dev_close_sync()
5159 if (!test_and_clear_bit(HCI_UP, &hdev->flags)) { in hci_dev_close_sync()
5160 cancel_delayed_work_sync(&hdev->cmd_timer); in hci_dev_close_sync()
5167 flush_work(&hdev->tx_work); in hci_dev_close_sync()
5168 flush_work(&hdev->rx_work); in hci_dev_close_sync()
5170 if (hdev->discov_timeout > 0) { in hci_dev_close_sync()
5171 hdev->discov_timeout = 0; in hci_dev_close_sync()
5177 cancel_delayed_work(&hdev->service_cache); in hci_dev_close_sync()
5182 cancel_delayed_work_sync(&hdev->rpa_expired); in hci_dev_close_sync()
5184 list_for_each_entry(adv_instance, &hdev->adv_instances, list) in hci_dev_close_sync()
5185 cancel_delayed_work_sync(&adv_instance->rpa_expired_cb); in hci_dev_close_sync()
5191 drain_workqueue(hdev->workqueue); in hci_dev_close_sync()
5206 /* Prevent data races on hdev->smp_data or hdev->smp_bredr_data */ in hci_dev_close_sync()
5217 if (hdev->flush) in hci_dev_close_sync()
5218 hdev->flush(hdev); in hci_dev_close_sync()
5221 skb_queue_purge(&hdev->cmd_q); in hci_dev_close_sync()
5222 atomic_set(&hdev->cmd_cnt, 1); in hci_dev_close_sync()
5223 if (test_bit(HCI_QUIRK_RESET_ON_CLOSE, &hdev->quirks) && in hci_dev_close_sync()
5225 set_bit(HCI_INIT, &hdev->flags); in hci_dev_close_sync()
5227 clear_bit(HCI_INIT, &hdev->flags); in hci_dev_close_sync()
5231 flush_work(&hdev->cmd_work); in hci_dev_close_sync()
5234 skb_queue_purge(&hdev->rx_q); in hci_dev_close_sync()
5235 skb_queue_purge(&hdev->cmd_q); in hci_dev_close_sync()
5236 skb_queue_purge(&hdev->raw_q); in hci_dev_close_sync()
5239 if (hdev->sent_cmd) { in hci_dev_close_sync()
5240 cancel_delayed_work_sync(&hdev->cmd_timer); in hci_dev_close_sync()
5241 kfree_skb(hdev->sent_cmd); in hci_dev_close_sync()
5242 hdev->sent_cmd = NULL; in hci_dev_close_sync()
5246 if (hdev->req_skb) { in hci_dev_close_sync()
5247 kfree_skb(hdev->req_skb); in hci_dev_close_sync()
5248 hdev->req_skb = NULL; in hci_dev_close_sync()
5251 clear_bit(HCI_RUNNING, &hdev->flags); in hci_dev_close_sync()
5255 hdev->close(hdev); in hci_dev_close_sync()
5258 hdev->flags &= BIT(HCI_RAW); in hci_dev_close_sync()
5261 memset(hdev->eir, 0, sizeof(hdev->eir)); in hci_dev_close_sync()
5262 memset(hdev->dev_class, 0, sizeof(hdev->dev_class)); in hci_dev_close_sync()
5263 bacpy(&hdev->random_addr, BDADDR_ANY); in hci_dev_close_sync()
5264 hci_codec_list_clear(&hdev->local_codecs); in hci_dev_close_sync()
5280 if (test_bit(HCI_UP, &hdev->flags) && in hci_power_on_sync()
5283 cancel_delayed_work(&hdev->power_off); in hci_power_on_sync()
5297 (!bacmp(&hdev->bdaddr, BDADDR_ANY) && in hci_power_on_sync()
5298 !bacmp(&hdev->static_addr, BDADDR_ANY))) { in hci_power_on_sync()
5302 queue_delayed_work(hdev->req_workqueue, &hdev->power_off, in hci_power_on_sync()
5311 set_bit(HCI_RAW, &hdev->flags); in hci_power_on_sync()
5326 clear_bit(HCI_RAW, &hdev->flags); in hci_power_on_sync()
5351 struct discovery_state *d = &hdev->discovery; in hci_stop_discovery_sync()
5355 bt_dev_dbg(hdev, "state %u", hdev->discovery.state); in hci_stop_discovery_sync()
5357 if (d->state == DISCOVERY_FINDING || d->state == DISCOVERY_STOPPING) { in hci_stop_discovery_sync()
5358 if (test_bit(HCI_INQUIRY, &hdev->flags)) { in hci_stop_discovery_sync()
5366 cancel_delayed_work(&hdev->le_scan_disable); in hci_stop_discovery_sync()
5383 /* No further actions needed for LE-only discovery */ in hci_stop_discovery_sync()
5384 if (d->type == DISCOV_TYPE_LE) in hci_stop_discovery_sync()
5387 if (d->state == DISCOVERY_RESOLVING || d->state == DISCOVERY_STOPPING) { in hci_stop_discovery_sync()
5396 hci_remote_name_cancel_sync(hdev, &e->data.bdaddr); in hci_stop_discovery_sync()
5407 if (test_bit(HCI_CONN_BIG_CREATED, &conn->flags)) { in hci_disconnect_sync()
5419 cp.handle = cpu_to_le16(conn->handle); in hci_disconnect_sync()
5443 if (test_bit(HCI_CONN_SCANNING, &conn->flags)) in hci_le_connect_cancel_sync()
5446 if (conn->role == HCI_ROLE_SLAVE || in hci_le_connect_cancel_sync()
5447 test_and_set_bit(HCI_CONN_CANCEL, &conn->flags)) in hci_le_connect_cancel_sync()
5457 if (conn->type == LE_LINK) in hci_connect_cancel_sync()
5460 if (conn->type == ISO_LINK) { in hci_connect_cancel_sync()
5469 if (test_bit(HCI_CONN_CREATE_CIS, &conn->flags)) in hci_connect_cancel_sync()
5473 if (bacmp(&conn->dst, BDADDR_ANY)) in hci_connect_cancel_sync()
5482 if (hdev->hci_ver < BLUETOOTH_VER_1_2) in hci_connect_cancel_sync()
5492 6, &conn->dst, in hci_connect_cancel_sync()
5497 6, &conn->dst, HCI_CMD_TIMEOUT); in hci_connect_cancel_sync()
5506 bacpy(&cp.bdaddr, &conn->dst); in hci_reject_sco_sync()
5510 * allowed error values (0x0D-0x0F). in hci_reject_sco_sync()
5525 cp.handle = cpu_to_le16(conn->handle); in hci_le_reject_cis_sync()
5537 if (conn->type == ISO_LINK) in hci_reject_conn_sync()
5540 if (conn->type == SCO_LINK || conn->type == ESCO_LINK) in hci_reject_conn_sync()
5544 bacpy(&cp.bdaddr, &conn->dst); in hci_reject_conn_sync()
5554 u16 handle = conn->handle; in hci_abort_conn_sync()
5558 switch (conn->state) { in hci_abort_conn_sync()
5592 conn->state = BT_CLOSED; in hci_abort_conn_sync()
5606 struct list_head *head = &hdev->conn_hash.list; in hci_disconnect_all_sync()
5640 if (!test_bit(HCI_UP, &hdev->flags)) in hci_power_off_sync()
5645 if (test_bit(HCI_ISCAN, &hdev->flags) || in hci_power_off_sync()
5646 test_bit(HCI_PSCAN, &hdev->flags)) { in hci_power_off_sync()
5691 cp.num_iac = min_t(u8, hdev->num_iac, 2); in hci_write_iac_sync()
5778 * by-product of disabling connectable, we need to update the in hci_update_connectable_sync()
5782 err = hci_update_adv_data_sync(hdev, hdev->cur_adv_instance); in hci_update_connectable_sync()
5786 !list_empty(&hdev->adv_instances)) { in hci_update_connectable_sync()
5789 hdev->cur_adv_instance); in hci_update_connectable_sync()
5808 if (test_bit(HCI_INQUIRY, &hdev->flags)) in hci_inquiry_sync()
5817 if (hdev->discovery.limited) in hci_inquiry_sync()
5829 static int hci_active_scan_sync(struct hci_dev *hdev, uint16_t interval) in hci_active_scan_sync() argument
5852 /* Pause address resolution for active scan and stop advertising if in hci_active_scan_sync()
5860 * address (when privacy feature has been enabled) or non-resolvable in hci_active_scan_sync()
5869 (test_bit(HCI_QUIRK_STRICT_DUPLICATE_FILTER, &hdev->quirks) && in hci_active_scan_sync()
5870 hdev->discovery.result_filtering)) { in hci_active_scan_sync()
5884 err = hci_start_scan_sync(hdev, LE_SCAN_ACTIVE, interval, in hci_active_scan_sync()
5885 hdev->le_scan_window_discovery, in hci_active_scan_sync()
5906 err = hci_active_scan_sync(hdev, hdev->le_scan_int_discovery * 2); in hci_start_interleaved_discovery_sync()
5918 bt_dev_dbg(hdev, "type %u", hdev->discovery.type); in hci_start_discovery_sync()
5920 switch (hdev->discovery.type) { in hci_start_discovery_sync()
5933 &hdev->quirks)) { in hci_start_discovery_sync()
5935 /* During simultaneous discovery, we double LE scan in hci_start_discovery_sync()
5936 * interval. We must leave some time for the controller in hci_start_discovery_sync()
5943 timeout = msecs_to_jiffies(hdev->discov_interleaved_timeout); in hci_start_discovery_sync()
5944 err = hci_active_scan_sync(hdev, hdev->le_scan_int_discovery); in hci_start_discovery_sync()
5948 err = hci_active_scan_sync(hdev, hdev->le_scan_int_discovery); in hci_start_discovery_sync()
5951 return -EINVAL; in hci_start_discovery_sync()
5959 queue_delayed_work(hdev->req_workqueue, &hdev->le_scan_disable, in hci_start_discovery_sync()
5978 int old_state = hdev->discovery.state; in hci_pause_discovery_sync()
5983 hdev->discovery_paused) in hci_pause_discovery_sync()
5991 hdev->discovery_paused = true; in hci_pause_discovery_sync()
6000 u8 scan = SCAN_DISABLED; in hci_update_event_filter_sync() local
6001 bool scanning = test_bit(HCI_PSCAN, &hdev->flags); in hci_update_event_filter_sync()
6010 if (test_bit(HCI_QUIRK_BROKEN_FILTER_CLEAR_ALL, &hdev->quirks)) in hci_update_event_filter_sync()
6016 list_for_each_entry(b, &hdev->accept_list, list) { in hci_update_event_filter_sync()
6017 if (!(b->flags & HCI_CONN_FLAG_REMOTE_WAKEUP)) in hci_update_event_filter_sync()
6020 bt_dev_dbg(hdev, "Adding event filters for %pMR", &b->bdaddr); in hci_update_event_filter_sync()
6024 &b->bdaddr, in hci_update_event_filter_sync()
6028 &b->bdaddr); in hci_update_event_filter_sync()
6030 scan = SCAN_PAGE; in hci_update_event_filter_sync()
6033 if (scan && !scanning) in hci_update_event_filter_sync()
6034 hci_write_scan_enable_sync(hdev, scan); in hci_update_event_filter_sync()
6035 else if (!scan && scanning) in hci_update_event_filter_sync()
6036 hci_write_scan_enable_sync(hdev, scan); in hci_update_event_filter_sync()
6041 /* This function disables scan (BR and LE) and mark it as paused */
6044 if (hdev->scanning_paused) in hci_pause_scan_sync()
6047 /* Disable page scan if enabled */ in hci_pause_scan_sync()
6048 if (test_bit(HCI_PSCAN, &hdev->flags)) in hci_pause_scan_sync()
6053 hdev->scanning_paused = true; in hci_pause_scan_sync()
6076 if (hdev->suspended) in hci_suspend_sync()
6080 hdev->suspended = true; in hci_suspend_sync()
6091 /* Prevent disconnects from causing scanning to be re-enabled */ in hci_suspend_sync()
6099 hdev->suspend_state = BT_RUNNING; in hci_suspend_sync()
6113 if (!hdev->wakeup || !hdev->wakeup(hdev)) { in hci_suspend_sync()
6114 hdev->suspend_state = BT_SUSPEND_DISCONNECT; in hci_suspend_sync()
6119 hdev->scanning_paused = false; in hci_suspend_sync()
6124 /* Update LE passive scan if enabled */ in hci_suspend_sync()
6127 /* Pause scan changes again. */ in hci_suspend_sync()
6128 hdev->scanning_paused = true; in hci_suspend_sync()
6130 hdev->suspend_state = BT_SUSPEND_CONFIGURE_WAKE; in hci_suspend_sync()
6141 if (!hdev->discovery_paused) in hci_resume_discovery_sync()
6144 hdev->discovery_paused = false; in hci_resume_discovery_sync()
6167 /* This function resume scan and reset paused flag */
6170 if (!hdev->scanning_paused) in hci_resume_scan_sync()
6173 hdev->scanning_paused = false; in hci_resume_scan_sync()
6194 if (!hdev->suspended) in hci_resume_sync()
6197 hdev->suspended = false; in hci_resume_sync()
6202 /* Clear any event filters and restore scan state */ in hci_resume_sync()
6222 struct hci_dev *hdev = conn->hdev; in conn_use_rpa()
6251 cp.channel_map = hdev->le_adv_channel_map; in hci_le_ext_directed_advertising_sync()
6257 cp.peer_addr_type = conn->dst_type; in hci_le_ext_directed_advertising_sync()
6258 bacpy(&cp.peer_addr, &conn->dst); in hci_le_ext_directed_advertising_sync()
6280 bacmp(&random_addr, &hdev->random_addr)) { in hci_le_ext_directed_advertising_sync()
6327 cp.direct_addr_type = conn->dst_type; in hci_le_directed_advertising_sync()
6328 bacpy(&cp.direct_addr, &conn->dst); in hci_le_directed_advertising_sync()
6329 cp.channel_map = hdev->le_adv_channel_map; in hci_le_directed_advertising_sync()
6345 struct hci_dev *hdev = conn->hdev; in set_ext_conn_params()
6349 p->scan_interval = cpu_to_le16(hdev->le_scan_int_connect); in set_ext_conn_params()
6350 p->scan_window = cpu_to_le16(hdev->le_scan_window_connect); in set_ext_conn_params()
6351 p->conn_interval_min = cpu_to_le16(conn->le_conn_min_interval); in set_ext_conn_params()
6352 p->conn_interval_max = cpu_to_le16(conn->le_conn_max_interval); in set_ext_conn_params()
6353 p->conn_latency = cpu_to_le16(conn->le_conn_latency); in set_ext_conn_params()
6354 p->supervision_timeout = cpu_to_le16(conn->le_supv_timeout); in set_ext_conn_params()
6355 p->min_ce_len = cpu_to_le16(0x0000); in set_ext_conn_params()
6356 p->max_ce_len = cpu_to_le16(0x0000); in set_ext_conn_params()
6368 p = (void *)cp->data; in hci_le_ext_create_conn_sync()
6372 bacpy(&cp->peer_addr, &conn->dst); in hci_le_ext_create_conn_sync()
6373 cp->peer_addr_type = conn->dst_type; in hci_le_ext_create_conn_sync()
6374 cp->own_addr_type = own_addr_type; in hci_le_ext_create_conn_sync()
6378 if (scan_1m(hdev) && (conn->le_adv_phy == HCI_ADV_PHY_1M || in hci_le_ext_create_conn_sync()
6379 conn->le_adv_sec_phy == HCI_ADV_PHY_1M)) { in hci_le_ext_create_conn_sync()
6380 cp->phys |= LE_SCAN_PHY_1M; in hci_le_ext_create_conn_sync()
6387 if (scan_2m(hdev) && (conn->le_adv_phy == HCI_ADV_PHY_2M || in hci_le_ext_create_conn_sync()
6388 conn->le_adv_sec_phy == HCI_ADV_PHY_2M)) { in hci_le_ext_create_conn_sync()
6389 cp->phys |= LE_SCAN_PHY_2M; in hci_le_ext_create_conn_sync()
6396 if (scan_coded(hdev) && (conn->le_adv_phy == HCI_ADV_PHY_CODED || in hci_le_ext_create_conn_sync()
6397 conn->le_adv_sec_phy == HCI_ADV_PHY_CODED)) { in hci_le_ext_create_conn_sync()
6398 cp->phys |= LE_SCAN_PHY_CODED; in hci_le_ext_create_conn_sync()
6407 conn->conn_timeout, NULL); in hci_le_ext_create_conn_sync()
6419 return -ECANCELED; in hci_le_create_conn_sync()
6423 clear_bit(HCI_CONN_SCANNING, &conn->flags); in hci_le_create_conn_sync()
6424 conn->state = BT_CONNECT; in hci_le_create_conn_sync()
6427 if (conn->role == HCI_ROLE_SLAVE) { in hci_le_create_conn_sync()
6432 hdev->le_scan_type == LE_SCAN_ACTIVE && in hci_le_create_conn_sync()
6435 return -EBUSY; in hci_le_create_conn_sync()
6449 params = hci_conn_params_lookup(hdev, &conn->dst, conn->dst_type); in hci_le_create_conn_sync()
6451 conn->le_conn_min_interval = params->conn_min_interval; in hci_le_create_conn_sync()
6452 conn->le_conn_max_interval = params->conn_max_interval; in hci_le_create_conn_sync()
6453 conn->le_conn_latency = params->conn_latency; in hci_le_create_conn_sync()
6454 conn->le_supv_timeout = params->supervision_timeout; in hci_le_create_conn_sync()
6456 conn->le_conn_min_interval = hdev->le_conn_min_interval; in hci_le_create_conn_sync()
6457 conn->le_conn_max_interval = hdev->le_conn_max_interval; in hci_le_create_conn_sync()
6458 conn->le_conn_latency = hdev->le_conn_latency; in hci_le_create_conn_sync()
6459 conn->le_supv_timeout = hdev->le_supv_timeout; in hci_le_create_conn_sync()
6463 * not able to scan and connect at the same time. Also set the in hci_le_create_conn_sync()
6465 * handler for scan disabling knows to set the correct discovery in hci_le_create_conn_sync()
6474 * that we never connect with an non-resolvable address. in hci_le_create_conn_sync()
6488 cp.scan_interval = cpu_to_le16(hdev->le_scan_int_connect); in hci_le_create_conn_sync()
6489 cp.scan_window = cpu_to_le16(hdev->le_scan_window_connect); in hci_le_create_conn_sync()
6491 bacpy(&cp.peer_addr, &conn->dst); in hci_le_create_conn_sync()
6492 cp.peer_addr_type = conn->dst_type; in hci_le_create_conn_sync()
6494 cp.conn_interval_min = cpu_to_le16(conn->le_conn_min_interval); in hci_le_create_conn_sync()
6495 cp.conn_interval_max = cpu_to_le16(conn->le_conn_max_interval); in hci_le_create_conn_sync()
6496 cp.conn_latency = cpu_to_le16(conn->le_conn_latency); in hci_le_create_conn_sync()
6497 cp.supervision_timeout = cpu_to_le16(conn->le_supv_timeout); in hci_le_create_conn_sync()
6512 conn->conn_timeout, NULL); in hci_le_create_conn_sync()
6515 if (err == -ETIMEDOUT) in hci_le_create_conn_sync()
6518 /* Re-enable advertising after the connection attempt is finished. */ in hci_le_create_conn_sync()
6558 list_for_each_entry_rcu(conn, &hdev->conn_hash.list, list) { in hci_le_create_cis_sync()
6559 if (test_bit(HCI_CONN_CREATE_CIS, &conn->flags)) in hci_le_create_cis_sync()
6564 list_for_each_entry_rcu(conn, &hdev->conn_hash.list, list) { in hci_le_create_cis_sync()
6570 cig = conn->iso_qos.ucast.cig; in hci_le_create_cis_sync()
6572 list_for_each_entry_rcu(link, &hdev->conn_hash.list, list) { in hci_le_create_cis_sync()
6574 link->iso_qos.ucast.cig == cig && in hci_le_create_cis_sync()
6575 link->state != BT_CONNECTED) { in hci_le_create_cis_sync()
6588 list_for_each_entry_rcu(conn, &hdev->conn_hash.list, list) { in hci_le_create_cis_sync()
6589 struct hci_cis *cis = &cmd->cis[aux_num_cis]; in hci_le_create_cis_sync()
6592 conn->iso_qos.ucast.cig != cig) in hci_le_create_cis_sync()
6595 set_bit(HCI_CONN_CREATE_CIS, &conn->flags); in hci_le_create_cis_sync()
6596 cis->acl_handle = cpu_to_le16(conn->parent->handle); in hci_le_create_cis_sync()
6597 cis->cis_handle = cpu_to_le16(conn->handle); in hci_le_create_cis_sync()
6600 if (aux_num_cis >= cmd->num_cis) in hci_le_create_cis_sync()
6603 cmd->num_cis = aux_num_cis; in hci_le_create_cis_sync()
6615 struct_size(cmd, cis, cmd->num_cis), in hci_le_create_cis_sync()
6617 conn->conn_timeout, NULL); in hci_le_create_cis_sync()
6681 err = smp_generate_rpa(hdev, hdev->irk, &hdev->rpa); in hci_get_random_address()
6687 bacpy(rand_addr, &hdev->rpa); in hci_get_random_address()
6693 * use an non-resolvable private address. This is useful for in hci_get_random_address()
6694 * non-connectable advertising. in hci_get_random_address()
6700 /* The non-resolvable private address is generated in hci_get_random_address()
6707 /* The non-resolvable private address shall not be in hci_get_random_address()
6710 if (bacmp(&hdev->bdaddr, &nrpa)) in hci_get_random_address()
6747 return -ECANCELED; in hci_acl_create_conn_sync()
6757 if (test_bit(HCI_INQUIRY, &hdev->flags)) { in hci_acl_create_conn_sync()
6764 conn->state = BT_CONNECT; in hci_acl_create_conn_sync()
6765 conn->out = true; in hci_acl_create_conn_sync()
6766 conn->role = HCI_ROLE_MASTER; in hci_acl_create_conn_sync()
6768 conn->attempt++; in hci_acl_create_conn_sync()
6770 conn->link_policy = hdev->link_policy; in hci_acl_create_conn_sync()
6773 bacpy(&cp.bdaddr, &conn->dst); in hci_acl_create_conn_sync()
6776 ie = hci_inquiry_cache_lookup(hdev, &conn->dst); in hci_acl_create_conn_sync()
6779 cp.pscan_rep_mode = ie->data.pscan_rep_mode; in hci_acl_create_conn_sync()
6780 cp.pscan_mode = ie->data.pscan_mode; in hci_acl_create_conn_sync()
6781 cp.clock_offset = ie->data.clock_offset | in hci_acl_create_conn_sync()
6785 memcpy(conn->dev_class, ie->data.dev_class, 3); in hci_acl_create_conn_sync()
6788 cp.pkt_type = cpu_to_le16(conn->pkt_type); in hci_acl_create_conn_sync()
6789 if (lmp_rswitch_capable(hdev) && !(hdev->link_mode & HCI_LM_MASTER)) in hci_acl_create_conn_sync()
6797 conn->conn_timeout, NULL); in hci_acl_create_conn_sync()
6812 if (err == -ECANCELED) in create_le_conn_complete()
6830 flush_delayed_work(&conn->le_conn_timeout); in create_le_conn_complete()
6845 if (conn->state != BT_OPEN) in hci_cancel_connect_sync()
6846 return -EINVAL; in hci_cancel_connect_sync()
6848 switch (conn->type) { in hci_cancel_connect_sync()
6858 return -ENOENT; in hci_cancel_connect_sync()
6867 cp.handle = cpu_to_le16(conn->handle); in hci_le_conn_update_sync()
6868 cp.conn_interval_min = cpu_to_le16(params->conn_min_interval); in hci_le_conn_update_sync()
6869 cp.conn_interval_max = cpu_to_le16(params->conn_max_interval); in hci_le_conn_update_sync()
6870 cp.conn_latency = cpu_to_le16(params->conn_latency); in hci_le_conn_update_sync()
6871 cp.supervision_timeout = cpu_to_le16(params->supervision_timeout); in hci_le_conn_update_sync()