Lines Matching full:bc

78 	struct tiger_ch		bc[2];  member
96 card->bc[0].bch.debug = debug; in _set_debug()
97 card->bc[1].bch.debug = debug; in _set_debug()
176 fill_mem(struct tiger_ch *bc, u32 idx, u32 cnt, u32 fill) in fill_mem() argument
178 struct tiger_hw *card = bc->bch.hw; in fill_mem()
182 bc->bch.nr, fill, cnt, idx, card->send.idx); in fill_mem()
183 if (bc->bch.nr & 2) { in fill_mem()
199 mode_tiger(struct tiger_ch *bc, u32 protocol) in mode_tiger() argument
201 struct tiger_hw *card = bc->bch.hw; in mode_tiger()
204 bc->bch.nr, bc->bch.state, protocol); in mode_tiger()
207 if (bc->bch.state == ISDN_P_NONE) in mode_tiger()
209 fill_mem(bc, 0, card->send.size, 0xff); in mode_tiger()
210 bc->bch.state = protocol; in mode_tiger()
212 if ((card->bc[0].bch.state == ISDN_P_NONE) && in mode_tiger()
213 (card->bc[1].bch.state == ISDN_P_NONE)) { in mode_tiger()
218 test_and_clear_bit(FLG_HDLC, &bc->bch.Flags); in mode_tiger()
219 test_and_clear_bit(FLG_TRANSPARENT, &bc->bch.Flags); in mode_tiger()
220 bc->txstate = 0; in mode_tiger()
221 bc->rxstate = 0; in mode_tiger()
222 bc->lastrx = -1; in mode_tiger()
225 test_and_set_bit(FLG_TRANSPARENT, &bc->bch.Flags); in mode_tiger()
226 bc->bch.state = protocol; in mode_tiger()
227 bc->idx = 0; in mode_tiger()
228 bc->free = card->send.size / 2; in mode_tiger()
229 bc->rxstate = 0; in mode_tiger()
230 bc->txstate = TX_INIT | TX_IDLE; in mode_tiger()
231 bc->lastrx = -1; in mode_tiger()
239 test_and_set_bit(FLG_HDLC, &bc->bch.Flags); in mode_tiger()
240 bc->bch.state = protocol; in mode_tiger()
241 bc->idx = 0; in mode_tiger()
242 bc->free = card->send.size / 2; in mode_tiger()
243 bc->rxstate = 0; in mode_tiger()
244 bc->txstate = TX_INIT | TX_IDLE; in mode_tiger()
245 isdnhdlc_rcv_init(&bc->hrecv, 0); in mode_tiger()
246 isdnhdlc_out_init(&bc->hsend, 0); in mode_tiger()
247 bc->lastrx = -1; in mode_tiger()
312 card->bc[i].hsbuf = kmalloc(NJ_DMA_TXSIZE, GFP_ATOMIC); in inittiger()
313 if (!card->bc[i].hsbuf) { in inittiger()
317 card->bc[i].hrbuf = kmalloc(NJ_DMA_RXSIZE, GFP_ATOMIC); in inittiger()
318 if (!card->bc[i].hrbuf) { in inittiger()
364 read_dma(struct tiger_ch *bc, u32 idx, int cnt) in read_dma() argument
366 struct tiger_hw *card = bc->bch.hw; in read_dma()
371 if (bc->lastrx == idx) { in read_dma()
372 bc->rxstate |= RX_OVERRUN; in read_dma()
374 bc->bch.nr, idx); in read_dma()
376 bc->lastrx = idx; in read_dma()
377 if (test_bit(FLG_RX_OFF, &bc->bch.Flags)) { in read_dma()
378 bc->bch.dropcnt += cnt; in read_dma()
381 stat = bchannel_get_rxbuf(&bc->bch, cnt); in read_dma()
385 card->name, bc->bch.nr, cnt); in read_dma()
388 if (test_bit(FLG_TRANSPARENT, &bc->bch.Flags)) in read_dma()
389 p = skb_put(bc->bch.rx_skb, cnt); in read_dma()
391 p = bc->hrbuf; in read_dma()
395 if (bc->bch.nr & 2) in read_dma()
402 if (test_bit(FLG_TRANSPARENT, &bc->bch.Flags)) { in read_dma()
403 recv_Bchannel(&bc->bch, 0, false); in read_dma()
407 pn = bc->hrbuf; in read_dma()
409 stat = isdnhdlc_decode(&bc->hrecv, pn, cnt, &i, in read_dma()
410 bc->bch.rx_skb->data, bc->bch.maxlen); in read_dma()
412 p = skb_put(bc->bch.rx_skb, stat); in read_dma()
415 "B%1d-recv %s %d ", bc->bch.nr, in read_dma()
421 recv_Bchannel(&bc->bch, 0, false); in read_dma()
422 stat = bchannel_get_rxbuf(&bc->bch, bc->bch.maxlen); in read_dma()
425 card->name, bc->bch.nr, cnt); in read_dma()
430 card->name, bc->bch.nr); in read_dma()
433 card->name, bc->bch.nr); in read_dma()
436 card->name, bc->bch.nr, bc->bch.maxlen); in read_dma()
458 if (test_bit(FLG_ACTIVE, &card->bc[0].bch.Flags)) in recv_tiger()
459 read_dma(&card->bc[0], idx, cnt); in recv_tiger()
460 if (test_bit(FLG_ACTIVE, &card->bc[1].bch.Flags)) in recv_tiger()
461 read_dma(&card->bc[1], idx, cnt); in recv_tiger()
466 resync(struct tiger_ch *bc, struct tiger_hw *card) in resync() argument
470 if (bc->free > card->send.size / 2) in resync()
471 bc->free = card->send.size / 2; in resync()
477 bc->idx = (card->recv.size / 2) - 1; in resync()
479 bc->idx = card->recv.size - 1; in resync()
480 bc->txstate = TX_RUN; in resync()
482 __func__, bc->bch.nr, bc->free, bc->idx, card->send.idx); in resync()
488 fill_hdlc_flag(struct tiger_ch *bc) in fill_hdlc_flag() argument
490 struct tiger_hw *card = bc->bch.hw; in fill_hdlc_flag()
495 if (bc->free == 0) in fill_hdlc_flag()
498 __func__, bc->bch.nr, bc->free, bc->txstate, in fill_hdlc_flag()
499 bc->idx, card->send.idx); in fill_hdlc_flag()
500 if (bc->txstate & (TX_IDLE | TX_INIT | TX_UNDERRUN)) in fill_hdlc_flag()
501 resync(bc, card); in fill_hdlc_flag()
502 count = isdnhdlc_encode(&bc->hsend, NULL, 0, &i, in fill_hdlc_flag()
503 bc->hsbuf, bc->free); in fill_hdlc_flag()
505 bc->bch.nr, count); in fill_hdlc_flag()
506 bc->free -= count; in fill_hdlc_flag()
507 p = bc->hsbuf; in fill_hdlc_flag()
508 m = (bc->bch.nr & 1) ? 0xffffff00 : 0xffff00ff; in fill_hdlc_flag()
510 if (bc->idx >= card->send.size) in fill_hdlc_flag()
511 bc->idx = 0; in fill_hdlc_flag()
512 v = card->send.start[bc->idx]; in fill_hdlc_flag()
514 v |= (bc->bch.nr & 1) ? (u32)(p[i]) : ((u32)(p[i])) << 8; in fill_hdlc_flag()
515 card->send.start[bc->idx++] = v; in fill_hdlc_flag()
519 bc->bch.nr, card->name, count); in fill_hdlc_flag()
525 fill_dma(struct tiger_ch *bc) in fill_dma() argument
527 struct tiger_hw *card = bc->bch.hw; in fill_dma()
532 if (bc->free == 0) in fill_dma()
534 if (!bc->bch.tx_skb) { in fill_dma()
535 if (!test_bit(FLG_TX_EMPTY, &bc->bch.Flags)) in fill_dma()
539 p = bc->bch.fill; in fill_dma()
541 count = bc->bch.tx_skb->len - bc->bch.tx_idx; in fill_dma()
545 card->name, __func__, bc->bch.nr, count, bc->free, in fill_dma()
546 bc->bch.tx_idx, bc->bch.tx_skb->len, bc->txstate, in fill_dma()
547 bc->idx, card->send.idx); in fill_dma()
548 p = bc->bch.tx_skb->data + bc->bch.tx_idx; in fill_dma()
550 if (bc->txstate & (TX_IDLE | TX_INIT | TX_UNDERRUN)) in fill_dma()
551 resync(bc, card); in fill_dma()
552 if (test_bit(FLG_HDLC, &bc->bch.Flags) && !fillempty) { in fill_dma()
553 count = isdnhdlc_encode(&bc->hsend, p, count, &i, in fill_dma()
554 bc->hsbuf, bc->free); in fill_dma()
556 bc->bch.nr, i, count); in fill_dma()
557 bc->bch.tx_idx += i; in fill_dma()
558 bc->free -= count; in fill_dma()
559 p = bc->hsbuf; in fill_dma()
561 if (count > bc->free) in fill_dma()
562 count = bc->free; in fill_dma()
564 bc->bch.tx_idx += count; in fill_dma()
565 bc->free -= count; in fill_dma()
567 m = (bc->bch.nr & 1) ? 0xffffff00 : 0xffff00ff; in fill_dma()
570 if (!(bc->bch.nr & 1)) in fill_dma()
573 if (bc->idx >= card->send.size) in fill_dma()
574 bc->idx = 0; in fill_dma()
575 v = card->send.start[bc->idx]; in fill_dma()
578 card->send.start[bc->idx++] = v; in fill_dma()
582 if (bc->idx >= card->send.size) in fill_dma()
583 bc->idx = 0; in fill_dma()
584 v = card->send.start[bc->idx]; in fill_dma()
587 v |= (bc->bch.nr & 1) ? n : n << 8; in fill_dma()
588 card->send.start[bc->idx++] = v; in fill_dma()
593 bc->bch.nr, card->name, count); in fill_dma()
596 if (bc->free) in fill_dma()
597 bc_next_frame(bc); in fill_dma()
602 bc_next_frame(struct tiger_ch *bc) in bc_next_frame() argument
606 if (bc->bch.tx_skb && bc->bch.tx_idx < bc->bch.tx_skb->len) { in bc_next_frame()
607 fill_dma(bc); in bc_next_frame()
609 dev_kfree_skb(bc->bch.tx_skb); in bc_next_frame()
610 if (get_next_bframe(&bc->bch)) { in bc_next_frame()
611 fill_dma(bc); in bc_next_frame()
612 test_and_clear_bit(FLG_TX_EMPTY, &bc->bch.Flags); in bc_next_frame()
613 } else if (test_bit(FLG_TX_EMPTY, &bc->bch.Flags)) { in bc_next_frame()
614 fill_dma(bc); in bc_next_frame()
615 } else if (test_bit(FLG_FILLEMPTY, &bc->bch.Flags)) { in bc_next_frame()
616 test_and_set_bit(FLG_TX_EMPTY, &bc->bch.Flags); in bc_next_frame()
626 send_tiger_bc(struct tiger_hw *card, struct tiger_ch *bc) in send_tiger_bc() argument
630 bc->free += card->send.size / 2; in send_tiger_bc()
631 if (bc->free >= card->send.size) { in send_tiger_bc()
632 if (!(bc->txstate & (TX_UNDERRUN | TX_INIT))) { in send_tiger_bc()
634 bc->bch.nr, bc->txstate); in send_tiger_bc()
635 bc->txstate |= TX_UNDERRUN; in send_tiger_bc()
637 bc->free = card->send.size; in send_tiger_bc()
639 ret = bc_next_frame(bc); in send_tiger_bc()
641 if (test_bit(FLG_HDLC, &bc->bch.Flags)) { in send_tiger_bc()
642 fill_hdlc_flag(bc); in send_tiger_bc()
646 bc->bch.nr, bc->free, bc->idx, card->send.idx); in send_tiger_bc()
647 if (!(bc->txstate & (TX_IDLE | TX_INIT))) { in send_tiger_bc()
648 fill_mem(bc, bc->idx, bc->free, 0xff); in send_tiger_bc()
649 if (bc->free == card->send.size) in send_tiger_bc()
650 bc->txstate |= TX_IDLE; in send_tiger_bc()
670 if (test_bit(FLG_ACTIVE, &card->bc[i].bch.Flags)) in send_tiger()
671 send_tiger_bc(card, &card->bc[i]); in send_tiger()
742 struct tiger_ch *bc = container_of(bch, struct tiger_ch, bch); in nj_l2l1B() local
752 fill_dma(bc); in nj_l2l1B()
760 ret = mode_tiger(bc, ch->protocol); in nj_l2l1B()
771 mode_tiger(bc, ISDN_P_NONE); in nj_l2l1B()
784 channel_bctrl(struct tiger_ch *bc, struct mISDN_ctrl_req *cq) in channel_bctrl() argument
786 return mISDN_ctrl_bchannel(&bc->bch, cq); in channel_bctrl()
793 struct tiger_ch *bc = container_of(bch, struct tiger_ch, bch); in nj_bctrl() local
805 mode_tiger(bc, ISDN_P_NONE); in nj_bctrl()
813 ret = channel_bctrl(bc, arg); in nj_bctrl()
858 bch = &card->bc[rq->adr.channel - 1].bch; in open_bchannel()
934 mode_tiger(&card->bc[0], ISDN_P_NONE); in nj_init_card()
935 mode_tiger(&card->bc[1], ISDN_P_NONE); in nj_init_card()
951 mode_tiger(&card->bc[0], ISDN_P_NONE); in nj_release()
952 mode_tiger(&card->bc[1], ISDN_P_NONE); in nj_release()
964 mISDN_freebchannel(&card->bc[i].bch); in nj_release()
965 kfree(card->bc[i].hsbuf); in nj_release()
966 kfree(card->bc[i].hrbuf); in nj_release()
1018 card->bc[i].bch.nr = i + 1; in setup_instance()
1020 mISDN_initbchannel(&card->bc[i].bch, MAX_DATA_MEM, in setup_instance()
1022 card->bc[i].bch.hw = card; in setup_instance()
1023 card->bc[i].bch.ch.send = nj_l2l1B; in setup_instance()
1024 card->bc[i].bch.ch.ctrl = nj_bctrl; in setup_instance()
1025 card->bc[i].bch.ch.nr = i + 1; in setup_instance()
1026 list_add(&card->bc[i].bch.ch.list, in setup_instance()
1028 card->bc[i].bch.hw = card; in setup_instance()