2 * Copyright (c) 2003, 2004, 2005
3 * John Wehle <john@feith.com>. All rights reserved.
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 3. All advertising materials mentioning features or use of this software
14 * must display the following acknowledgement:
15 * This product includes software developed by John Wehle.
16 * 4. The name of the author may not be used to endorse or promote products
17 * derived from this software without specific prior written permission.
19 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
20 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22 * DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
23 * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
25 * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
26 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
27 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
28 * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29 * POSSIBILITY OF SUCH DAMAGE.
33 * Conexant MPEG-2 Codec driver. Supports the CX23415 / CX23416
34 * chips that are on the Hauppauge PVR-250 and PVR-350 video
35 * capture cards. Currently only the encoder is supported.
37 * This driver was written using the invaluable information
38 * compiled by The IvyTV Project (ivtv.sourceforge.net).
41 #include <sys/param.h>
42 #include <sys/systm.h>
45 #include <sys/kernel.h>
47 #include <sys/module.h>
49 #include <sys/event.h>
51 #include <sys/signalvar.h>
52 #include <sys/thread2.h>
53 #include <sys/vnode.h>
54 #include <sys/select.h>
55 #include <sys/resource.h>
59 #include <machine/clock.h>
61 #include <dev/video/meteor/ioctl_meteor.h>
62 #include <dev/video/bktr/ioctl_bt848.h>
64 #include <bus/pci/pcireg.h>
65 #include <bus/pci/pcivar.h>
67 #include <dev/video/cxm/cxm.h>
69 #include <bus/iicbus/iiconf.h>
72 * Various supported device vendors/types and their names.
74 static struct cxm_dev cxm_devs[] = {
75 { PCI_VENDOR_ICOMPRESSION, PCI_PRODUCT_ICOMPRESSION_ITVC15,
76 "Conexant iTVC15 MPEG Coder" },
77 { PCI_VENDOR_ICOMPRESSION, PCI_PRODUCT_ICOMPRESSION_ITVC16,
78 "Conexant iTVC16 MPEG Coder" },
83 static int cxm_probe(device_t dev);
84 static int cxm_attach(device_t dev);
85 static int cxm_detach(device_t dev);
86 static int cxm_shutdown(device_t dev);
87 static void cxm_intr(void *arg);
89 static void cxm_child_detached(device_t dev, device_t child);
90 static int cxm_read_ivar(device_t bus, device_t dev,
91 int index, uintptr_t* val);
92 static int cxm_write_ivar(device_t bus, device_t dev,
93 int index, uintptr_t val);
96 static device_method_t cxm_methods[] = {
97 /* Device interface */
98 DEVMETHOD(device_probe, cxm_probe),
99 DEVMETHOD(device_attach, cxm_attach),
100 DEVMETHOD(device_detach, cxm_detach),
101 DEVMETHOD(device_shutdown, cxm_shutdown),
104 DEVMETHOD(bus_child_detached, cxm_child_detached),
105 DEVMETHOD(bus_print_child, bus_generic_print_child),
106 DEVMETHOD(bus_driver_added, bus_generic_driver_added),
107 DEVMETHOD(bus_read_ivar, cxm_read_ivar),
108 DEVMETHOD(bus_write_ivar, cxm_write_ivar),
113 static driver_t cxm_driver = {
116 sizeof(struct cxm_softc),
119 static devclass_t cxm_devclass;
121 static d_open_t cxm_open;
122 static d_close_t cxm_close;
123 static d_read_t cxm_read;
124 static d_ioctl_t cxm_ioctl;
125 static d_poll_t cxm_poll;
126 static d_kqfilter_t cxm_kqfilter;
128 static void cxm_filter_detach(struct knote *);
129 static int cxm_filter(struct knote *, long);
131 #define CDEV_MAJOR 93
133 static struct dev_ops cxm_ops = {
134 { "cxm", CDEV_MAJOR, D_KQFILTER },
136 .d_close = cxm_close,
138 .d_ioctl = cxm_ioctl,
140 .d_kqfilter = cxm_kqfilter
143 MODULE_DEPEND(cxm, cxm_iic, 1, 1, 1);
144 DRIVER_MODULE(cxm, pci, cxm_driver, cxm_devclass, 0, 0);
147 static struct cxm_codec_audio_format codec_audio_formats[] = {
148 { 44100, 0xb8 }, /* 44.1 Khz, MPEG-1 Layer II, 224 kb/s */
149 { 48000, 0xe9 } /* 48 Khz, MPEG-1 Layer II, 384 kb/s */
156 static struct cxm_codec_profile vcd_ntsc_profile = {
157 "MPEG-1 VideoCD NTSC video and MPEG audio",
158 CXM_FW_STREAM_TYPE_VCD,
167 * Spatial filter = Manual, Temporal filter = Manual
168 * Median filter = Horizontal / Vertical
169 * Spatial filter value = 1, Temporal filter value = 4
175 static struct cxm_codec_profile vcd_pal_profile = {
176 "MPEG-1 VideoCD PAL video and MPEG audio",
177 CXM_FW_STREAM_TYPE_VCD,
186 * Spatial filter = Manual, Temporal filter = Manual
187 * Median filter = Horizontal / Vertical
188 * Spatial filter value = 1, Temporal filter value = 4
194 static struct cxm_codec_profile svcd_ntsc_profile = {
195 "MPEG-2 SuperVCD NTSC video and MPEG audio",
196 CXM_FW_STREAM_TYPE_SVCD,
202 /* 2.5 Mb/s peak limit to keep bbdmux followed by mplex -f 4 happy */
203 { 0, 1150000, 2500000 },
206 * Spatial filter = Manual, Temporal filter = Manual
207 * Median filter = Horizontal / Vertical
208 * Spatial filter value = 1, Temporal filter value = 4
214 static struct cxm_codec_profile svcd_pal_profile = {
215 "MPEG-2 SuperVCD PAL video and MPEG audio",
216 CXM_FW_STREAM_TYPE_SVCD,
222 /* 2.5 Mb/s peak limit to keep bbdmux followed by mplex -f 4 happy */
223 { 0, 1150000, 2500000 },
226 * Spatial filter = Manual, Temporal filter = Manual
227 * Median filter = Horizontal / Vertical
228 * Spatial filter value = 1, Temporal filter value = 4
234 static struct cxm_codec_profile dvd_half_d1_ntsc_profile = {
235 "MPEG-2 DVD NTSC video and MPEG audio",
236 CXM_FW_STREAM_TYPE_DVD,
242 { 0, 4000000, 4520000 }, /* 4 hours on 8.54 GB media */
245 * Spatial filter = Manual, Temporal filter = Manual
246 * Median filter = Horizontal / Vertical
247 * Spatial filter value = 1, Temporal filter value = 4
253 static struct cxm_codec_profile dvd_half_d1_pal_profile = {
254 "MPEG-2 DVD PAL video and MPEG audio",
255 CXM_FW_STREAM_TYPE_DVD,
261 { 0, 4000000, 4520000 }, /* 4 hours on 8.54 GB media */
264 * Spatial filter = Manual, Temporal filter = Manual
265 * Median filter = Horizontal / Vertical
266 * Spatial filter value = 1, Temporal filter value = 4
272 static struct cxm_codec_profile dvd_full_d1_ntsc_profile = {
273 "MPEG-2 DVD NTSC video and MPEG audio",
274 CXM_FW_STREAM_TYPE_DVD,
280 /* 9.52 Mb/s peak limit to keep bbdmux followed by mplex -f 8 happy */
281 { 0, 9000000, 9520000 }, /* 1 hour on 4.7 GB media */
284 * Spatial filter = Manual, Temporal filter = Manual
285 * Median filter = Horizontal / Vertical
286 * Spatial filter value = 1, Temporal filter value = 4
292 static struct cxm_codec_profile dvd_full_d1_pal_profile = {
293 "MPEG-2 DVD PAL video and MPEG audio",
294 CXM_FW_STREAM_TYPE_DVD,
300 /* 9.52 Mb/s peak limit to keep bbdmux followed by mplex -f 8 happy */
301 { 0, 9000000, 9520000 }, /* 1 hour on 4.7 GB media */
304 * Spatial filter = Manual, Temporal filter = Manual
305 * Median filter = Horizontal / Vertical
306 * Spatial filter value = 1, Temporal filter value = 4
313 static const struct cxm_codec_profile
314 *codec_profiles[] = {
319 &dvd_half_d1_ntsc_profile,
320 &dvd_half_d1_pal_profile,
321 &dvd_full_d1_ntsc_profile,
322 &dvd_full_d1_pal_profile
327 cxm_queue_firmware_command(struct cxm_softc *sc,
328 enum cxm_mailbox_name mbx_name, uint32_t cmd,
329 uint32_t *parameters, unsigned int nparameters)
332 unsigned int mailbox;
333 uint32_t completed_command;
336 if (nparameters > CXM_MBX_MAX_PARAMETERS) {
337 device_printf(sc->dev, "too many parameters for mailbox\n");
344 case cxm_dec_mailbox:
345 mailbox = sc->dec_mbx
346 + CXM_MBX_FW_CMD_MAILBOX *sizeof(struct cxm_mailbox);
349 case cxm_enc_mailbox:
350 mailbox = sc->enc_mbx
351 + CXM_MBX_FW_CMD_MAILBOX *sizeof(struct cxm_mailbox);
359 for (i = 0; i < CXM_MBX_FW_CMD_MAILBOXES; i++) {
360 flags = CSR_READ_4(sc,
362 + offsetof(struct cxm_mailbox, flags));
363 if (!(flags & CXM_MBX_FLAG_IN_USE))
367 * Mail boxes containing certain completed commands
368 * for which the results are never needed can be reused.
371 if ((flags & (CXM_MBX_FLAG_DRV_DONE | CXM_MBX_FLAG_FW_DONE))
372 == (CXM_MBX_FLAG_DRV_DONE | CXM_MBX_FLAG_FW_DONE)) {
376 + offsetof(struct cxm_mailbox, command));
379 * DMA results are always check by reading the
380 * DMA status register ... never by checking
381 * the mailbox after the command has completed.
384 if (completed_command == CXM_FW_CMD_SCHED_DMA_TO_HOST)
388 mailbox += sizeof(struct cxm_mailbox);
391 if (i >= CXM_MBX_FW_CMD_MAILBOXES) {
396 CSR_WRITE_4(sc, mailbox + offsetof(struct cxm_mailbox, flags),
397 CXM_MBX_FLAG_IN_USE);
400 * PCI writes may be buffered so force the
401 * write to complete by reading the last
405 CSR_READ_4(sc, mailbox + offsetof(struct cxm_mailbox, flags));
409 CSR_WRITE_4(sc, mailbox + offsetof(struct cxm_mailbox, command), cmd);
410 CSR_WRITE_4(sc, mailbox + offsetof(struct cxm_mailbox, timeout),
413 for (i = 0; i < nparameters; i++)
416 + offsetof(struct cxm_mailbox, parameters)
417 + i * sizeof(uint32_t),
420 for (; i < CXM_MBX_MAX_PARAMETERS; i++)
423 + offsetof(struct cxm_mailbox, parameters)
424 + i * sizeof(uint32_t), 0);
426 CSR_WRITE_4(sc, mailbox + offsetof(struct cxm_mailbox, flags),
427 CXM_MBX_FLAG_IN_USE | CXM_MBX_FLAG_DRV_DONE);
434 cxm_firmware_command(struct cxm_softc *sc,
435 enum cxm_mailbox_name mbx_name, uint32_t cmd,
436 uint32_t *parameters, unsigned int nparameters)
441 unsigned int mailbox;
449 case cxm_dec_mailbox:
454 case cxm_enc_mailbox:
463 mailbox = cxm_queue_firmware_command(sc, mbx_name, cmd,
464 parameters, nparameters);
466 device_printf(sc->dev, "no free mailboxes\n");
470 /* Give the firmware a chance to start processing the request */
471 tsleep(bmp, 0, wmesg, hz / 100);
473 for (i = 0; i < 100; i++) {
474 flags = CSR_READ_4(sc,
476 + offsetof(struct cxm_mailbox, flags));
477 if ((flags & CXM_MBX_FLAG_FW_DONE))
481 tsleep(bmp, 0, wmesg, hz / 100);
485 device_printf(sc->dev, "timeout\n");
489 result = CSR_READ_4(sc,
491 + offsetof(struct cxm_mailbox, result));
493 for (i = 0; i < nparameters; i++)
497 + offsetof(struct cxm_mailbox, parameters)
498 + i * sizeof(uint32_t));
500 CSR_WRITE_4(sc, mailbox + offsetof(struct cxm_mailbox, flags), 0);
502 return result == 0 ? 0 : -1;
507 cxm_firmware_command_nosleep(struct cxm_softc *sc,
508 enum cxm_mailbox_name mbx_name, uint32_t cmd,
509 uint32_t *parameters, unsigned int nparameters)
512 unsigned int mailbox;
516 for (i = 0; i < 100; i++) {
517 mailbox = cxm_queue_firmware_command(sc, mbx_name, cmd,
518 parameters, nparameters);
527 device_printf(sc->dev, "no free mailboxes\n");
531 /* Give the firmware a chance to start processing the request */
534 for (i = 0; i < 100; i++) {
535 flags = CSR_READ_4(sc,
537 + offsetof(struct cxm_mailbox, flags));
538 if ((flags & CXM_MBX_FLAG_FW_DONE))
546 device_printf(sc->dev, "timeout\n");
550 result = CSR_READ_4(sc,
552 + offsetof(struct cxm_mailbox, result));
554 for (i = 0; i < nparameters; i++)
558 + offsetof(struct cxm_mailbox, parameters)
559 + i * sizeof(uint32_t));
561 CSR_WRITE_4(sc, mailbox + offsetof(struct cxm_mailbox, flags), 0);
563 return result == 0 ? 0 : -1;
568 cxm_stop_firmware(struct cxm_softc *sc)
571 if (cxm_firmware_command_nosleep(sc, cxm_enc_mailbox,
572 CXM_FW_CMD_ENC_HALT_FW, NULL, 0) < 0)
575 if (sc->type == cxm_iTVC15_type
576 && cxm_firmware_command_nosleep(sc, cxm_dec_mailbox,
577 CXM_FW_CMD_DEC_HALT_FW,
589 cxm_set_irq_mask(struct cxm_softc *sc, uint32_t mask)
593 CSR_WRITE_4(sc, CXM_REG_IRQ_MASK, mask);
596 * PCI writes may be buffered so force the
597 * write to complete by reading the last
601 CSR_READ_4(sc, CXM_REG_IRQ_MASK);
610 cxm_set_irq_status(struct cxm_softc *sc, uint32_t status)
613 CSR_WRITE_4(sc, CXM_REG_IRQ_STATUS, status);
616 * PCI writes may be buffered so force the
617 * write to complete by reading the last
621 CSR_READ_4(sc, CXM_REG_IRQ_STATUS);
626 cxm_stop_hardware(struct cxm_softc *sc)
629 if (cxm_saa7115_mute(sc) < 0)
631 if (cxm_msp_mute(sc) < 0)
635 /* Halt the firmware */
636 if (sc->enc_mbx != -1) {
637 if (cxm_stop_firmware(sc) < 0)
641 /* Mask all interrupts */
642 cxm_set_irq_mask(sc, 0xffffffff);
645 CSR_WRITE_4(sc, CXM_REG_VDM, CXM_CMD_VDM_STOP);
648 CSR_WRITE_4(sc, CXM_REG_AO, CXM_CMD_AO_STOP);
651 CSR_WRITE_4(sc, CXM_REG_APU, CXM_CMD_APU_PING);
654 CSR_WRITE_4(sc, CXM_REG_VPU, sc->type == cxm_iTVC15_type
656 : CXM_CMD_VPU_STOP16);
658 /* Reset Hw Blocks */
659 CSR_WRITE_4(sc, CXM_REG_HW_BLOCKS, CXM_CMD_HW_BLOCKS_RST);
662 CSR_WRITE_4(sc, CXM_REG_SPU, CXM_CMD_SPU_STOP);
672 cxm_download_firmware(struct cxm_softc *sc)
677 /* Check if firmware is compiled in */
678 if (strncmp((const char *)cxm_enc_fw, "NOFW", 4) == 0) {
679 device_printf(sc->dev, "encoder firmware not compiled in\n");
681 } else if (strncmp((const char *)cxm_dec_fw, "NOFW", 4) == 0) {
682 device_printf(sc->dev, "decoder firmware not compiled in\n");
686 /* Download the encoder firmware */
687 fw = (const uint32_t *)cxm_enc_fw;
688 for (i = 0; i < CXM_FW_SIZE; i += sizeof(*fw))
689 CSR_WRITE_4(sc, CXM_MEM_ENC + i, *fw++);
691 /* Download the decoder firmware */
692 if (sc->type == cxm_iTVC15_type) {
693 fw = (const uint32_t *)cxm_dec_fw;
694 for (i = 0; i < CXM_FW_SIZE; i += sizeof(*fw))
695 CSR_WRITE_4(sc, CXM_MEM_DEC + i, *fw++);
703 cxm_init_hardware(struct cxm_softc *sc)
706 unsigned int mailbox;
709 if (cxm_stop_hardware(sc) < 0)
712 /* Initialize encoder SDRAM pre-charge */
713 CSR_WRITE_4(sc, CXM_REG_ENC_SDRAM_PRECHARGE,
714 CXM_CMD_SDRAM_PRECHARGE_INIT);
716 /* Initialize encoder SDRAM refresh to 1us */
717 CSR_WRITE_4(sc, CXM_REG_ENC_SDRAM_REFRESH,
718 CXM_CMD_SDRAM_REFRESH_INIT);
720 /* Initialize decoder SDRAM pre-charge */
721 CSR_WRITE_4(sc, CXM_REG_DEC_SDRAM_PRECHARGE,
722 CXM_CMD_SDRAM_PRECHARGE_INIT);
724 /* Initialize decoder SDRAM refresh to 1us */
725 CSR_WRITE_4(sc, CXM_REG_DEC_SDRAM_REFRESH,
726 CXM_CMD_SDRAM_REFRESH_INIT);
731 if (cxm_download_firmware(sc) < 0)
735 CSR_WRITE_4(sc, CXM_REG_SPU,
736 CSR_READ_4(sc, CXM_REG_SPU) & CXM_MASK_SPU_ENABLE);
738 /* Wait for 1 second */
742 CSR_WRITE_4(sc, CXM_REG_VPU,
743 CSR_READ_4(sc, CXM_REG_VPU)
744 & (sc->type == cxm_iTVC15_type
745 ? CXM_MASK_VPU_ENABLE15
746 : CXM_MASK_VPU_ENABLE16));
748 /* Wait for 1 second */
751 /* Locate encoder mailbox */
752 mailbox = CXM_MEM_ENC;
753 for (i = 0; i < CXM_MEM_ENC_SIZE; i += 0x100)
754 if (CSR_READ_4(sc, mailbox + i) == 0x12345678
755 && CSR_READ_4(sc, mailbox + i + 4) == 0x34567812
756 && CSR_READ_4(sc, mailbox + i + 8) == 0x56781234
757 && CSR_READ_4(sc, mailbox + i + 12) == 0x78123456)
760 if (i >= CXM_MEM_ENC_SIZE)
763 sc->enc_mbx = mailbox + i + 16;
765 /* Locate decoder mailbox */
766 if (sc->type == cxm_iTVC15_type) {
767 mailbox = CXM_MEM_DEC;
768 for (i = 0; i < CXM_MEM_DEC_SIZE; i += 0x100)
769 if (CSR_READ_4(sc, mailbox + i) == 0x12345678
770 && CSR_READ_4(sc, mailbox + i + 4) == 0x34567812
771 && CSR_READ_4(sc, mailbox + i + 8) == 0x56781234
772 && CSR_READ_4(sc, mailbox + i + 12) == 0x78123456)
775 if (i >= CXM_MEM_DEC_SIZE)
778 sc->dec_mbx = mailbox + i + 16;
781 /* Get encoder firmware version */
783 if (cxm_firmware_command_nosleep(sc, cxm_enc_mailbox,
784 CXM_FW_CMD_ENC_GET_FW_VER,
788 device_printf(sc->dev, "encoder firmware version %#x\n",
789 (unsigned int)parameter);
791 /* Get decoder firmware version */
792 if (sc->type == cxm_iTVC15_type) {
794 if (cxm_firmware_command_nosleep(sc, cxm_dec_mailbox,
795 CXM_FW_CMD_DEC_GET_FW_VER,
799 device_printf(sc->dev, "decoder firmware version %#x\n",
800 (unsigned int)parameter);
808 cxm_configure_encoder(struct cxm_softc *sc)
812 uint32_t parameters[12];
813 const struct cxm_codec_profile *cpp;
815 if (sc->source == cxm_fm_source)
816 switch (cxm_tuner_selected_channel_set(sc)) {
818 case CHNLSET_CABLEIRC:
819 case CHNLSET_JPNBCST:
820 case CHNLSET_JPNCABLE:
829 fps = cxm_saa7115_detected_fps(sc);
834 if (sc->profile->fps != fps) {
837 * Pick a profile with the correct fps using the
838 * chosen stream type and width to decide between
839 * the VCD, SVCD, or DVD profiles.
842 for (i = 0; i < NUM_ELEMENTS(codec_profiles); i++)
843 if (codec_profiles[i]->fps == fps
844 && codec_profiles[i]->stream_type
845 == sc->profile->stream_type
846 && codec_profiles[i]->width == sc->profile->width)
849 if (i >= NUM_ELEMENTS(codec_profiles))
852 sc->profile = codec_profiles[i];
857 if (cxm_saa7115_configure(sc,
858 cpp->width, cpp->source_height, fps,
859 cpp->audio_sample_rate) < 0)
862 /* assign dma block len */
863 parameters[0] = 1; /* Transfer block size = 1 */
864 parameters[1] = 1; /* Units = 1 (frames) */
865 if (cxm_firmware_command(sc, cxm_enc_mailbox,
866 CXM_FW_CMD_ASSIGN_DMA_BLOCKLEN,
871 /* assign program index info */
872 parameters[0] = 0; /* Picture mask = 0 (don't generate index) */
873 parameters[1] = 0; /* Num_req = 0 */
874 if (cxm_firmware_command(sc, cxm_enc_mailbox,
875 CXM_FW_CMD_ASSIGN_PGM_INDEX_INFO,
879 /* assign stream type */
880 parameters[0] = cpp->stream_type;
881 if (cxm_firmware_command(sc, cxm_enc_mailbox,
882 CXM_FW_CMD_ASSIGN_STREAM_TYPE,
886 /* assign output port */
887 parameters[0] = 0; /* 0 (Memory) */
888 if (cxm_firmware_command(sc, cxm_enc_mailbox,
889 CXM_FW_CMD_ASSIGN_OUTPUT_PORT,
893 /* assign framerate */
894 parameters[0] = cpp->fps == 30 ? 0 : 1;
895 if (cxm_firmware_command(sc, cxm_enc_mailbox,
896 CXM_FW_CMD_ASSIGN_FRAME_RATE,
900 /* assign frame size */
901 parameters[0] = cpp->height;
902 parameters[1] = cpp->width;
903 if (cxm_firmware_command(sc, cxm_enc_mailbox,
904 CXM_FW_CMD_ASSIGN_FRAME_SIZE,
908 /* assign aspect ratio */
909 parameters[0] = cpp->aspect;
910 if (cxm_firmware_command(sc, cxm_enc_mailbox,
911 CXM_FW_CMD_ASSIGN_ASPECT_RATIO,
915 /* assign bitrates */
916 parameters[0] = cpp->bitrate.mode;
917 parameters[1] = cpp->bitrate.average;
918 parameters[2] = cpp->bitrate.peak / 400;
919 if (cxm_firmware_command(sc, cxm_enc_mailbox,
920 CXM_FW_CMD_ASSIGN_BITRATES,
924 /* assign gop closure */
925 parameters[0] = cpp->gop.closure;
926 if (cxm_firmware_command(sc, cxm_enc_mailbox,
927 CXM_FW_CMD_ASSIGN_GOP_CLOSURE,
931 /* assign gop properties */
932 parameters[0] = cpp->gop.frames;
933 parameters[1] = cpp->gop.bframes;
934 if (cxm_firmware_command(sc, cxm_enc_mailbox,
935 CXM_FW_CMD_ASSIGN_GOP_PROPERTIES,
939 /* assign 3 2 pulldown */
940 parameters[0] = cpp->pulldown;
941 if (cxm_firmware_command(sc, cxm_enc_mailbox,
942 CXM_FW_CMD_ASSIGN_3_2_PULLDOWN,
946 /* assign dnr filter mode */
947 parameters[0] = cpp->dnr.mode;
948 parameters[1] = cpp->dnr.type;
949 if (cxm_firmware_command(sc, cxm_enc_mailbox,
950 CXM_FW_CMD_ASSIGN_DNR_FILTER_MODE,
954 /* assign dnr filter props */
955 parameters[0] = cpp->dnr.spatial;
956 parameters[1] = cpp->dnr.temporal;
957 if (cxm_firmware_command(sc, cxm_enc_mailbox,
958 CXM_FW_CMD_ASSIGN_DNR_FILTER_PROPERTIES,
963 * assign audio properties
966 for (i = 0; i < NUM_ELEMENTS(codec_audio_formats); i++)
967 if (codec_audio_formats[i].sample_rate
968 == cpp->audio_sample_rate)
971 if (i >= NUM_ELEMENTS(codec_audio_formats))
974 parameters[0] = codec_audio_formats[i].format;
975 if (cxm_firmware_command(sc, cxm_enc_mailbox,
976 CXM_FW_CMD_ASSIGN_AUDIO_PROPERTIES,
980 /* assign coring levels */
981 parameters[0] = 0; /* luma_h */
982 parameters[1] = 255; /* luma_l */
983 parameters[2] = 0; /* chroma_h */
984 parameters[3] = 255; /* chroma_l */
985 if (cxm_firmware_command(sc, cxm_enc_mailbox,
986 CXM_FW_CMD_ASSIGN_CORING_LEVELS,
990 /* assign spatial filter type */
991 parameters[0] = 3; /* Luminance filter = 3 (2D H/V Separable) */
992 parameters[1] = 1; /* Chrominance filter = 1 (1D Horizontal) */
993 if (cxm_firmware_command(sc, cxm_enc_mailbox,
994 CXM_FW_CMD_ASSIGN_SPATIAL_FILTER_TYPE,
998 /* assign frame drop rate */
1000 if (cxm_firmware_command(sc, cxm_enc_mailbox,
1001 CXM_FW_CMD_ASSIGN_FRAME_DROP_RATE,
1002 parameters, 1) != 0)
1005 /* assign placeholder */
1006 parameters[0] = 0; /* type = 0 (Extension / UserData) */
1007 parameters[1] = 0; /* period */
1008 parameters[2] = 0; /* size_t */
1009 parameters[3] = 0; /* arg0 */
1010 parameters[4] = 0; /* arg1 */
1011 parameters[5] = 0; /* arg2 */
1012 parameters[6] = 0; /* arg3 */
1013 parameters[7] = 0; /* arg4 */
1014 parameters[8] = 0; /* arg5 */
1015 parameters[9] = 0; /* arg6 */
1016 parameters[10] = 0; /* arg7 */
1017 parameters[11] = 0; /* arg8 */
1018 if (cxm_firmware_command(sc, cxm_enc_mailbox,
1019 CXM_FW_CMD_ASSIGN_PLACEHOLDER,
1020 parameters, 12) != 0)
1023 /* assign VBI properties */
1024 parameters[0] = 0xbd04; /* mode = 0 (sliced), stream and user data */
1025 parameters[1] = 0; /* frames per interrupt (only valid in raw mode) */
1026 parameters[2] = 0; /* total raw VBI frames (only valid in raw mode) */
1027 parameters[3] = 0x25256262; /* ITU 656 start codes (saa7115 table 24)*/
1028 parameters[4] = 0x38387f7f; /* ITU 656 stop codes (saa7115 table 24) */
1029 parameters[5] = cpp->vbi.nlines; /* lines per frame */
1030 parameters[6] = 1440; /* bytes per line = 720 pixels */
1031 if (cxm_firmware_command(sc, cxm_enc_mailbox,
1032 CXM_FW_CMD_ASSIGN_VBI_PROPERTIES,
1033 parameters, 7) != 0)
1036 /* assign VBI lines */
1037 parameters[0] = 0xffffffff; /* all lines */
1038 parameters[1] = 0; /* disable VBI features */
1042 if (cxm_firmware_command(sc, cxm_enc_mailbox,
1043 CXM_FW_CMD_ASSIGN_VBI_LINE,
1044 parameters, 5) != 0)
1047 /* assign number of lines in fields 1 and 2 */
1048 parameters[0] = cpp->source_height / 2 + cpp->vbi.nlines;
1049 parameters[1] = cpp->source_height / 2 + cpp->vbi.nlines;
1050 if (cxm_firmware_command(sc, cxm_enc_mailbox,
1051 CXM_FW_CMD_ASSIGN_NUM_VSYNC_LINES,
1052 parameters, 2) != 0)
1060 cxm_start_encoder(struct cxm_softc *sc)
1062 uint32_t parameters[4];
1070 if (cxm_configure_encoder(sc) < 0)
1073 /* Mute the video input if necessary. */
1074 parameters[0] = sc->source == cxm_fm_source ? 1 : 0;
1075 if (cxm_firmware_command(sc, cxm_enc_mailbox,
1076 CXM_FW_CMD_MUTE_VIDEO_INPUT,
1077 parameters, 1) != 0)
1080 /* Clear pending encoder interrupts (which are currently masked) */
1081 cxm_set_irq_status(sc, CXM_IRQ_ENC);
1083 /* Enable event notification */
1084 parameters[0] = 0; /* Event = 0 (refresh encoder input) */
1085 parameters[1] = 1; /* Notification = 1 (enable) */
1086 parameters[2] = 0x10000000; /* Interrupt bit */
1087 parameters[3] = -1; /* Mailbox = -1 (no mailbox) */
1088 if (cxm_firmware_command(sc, cxm_enc_mailbox,
1089 CXM_FW_CMD_ENC_EVENT_NOTIFICATION,
1090 parameters, 4) != 0)
1093 if (cxm_saa7115_mute(sc) < 0)
1095 if (cxm_msp_mute(sc) < 0)
1098 if (cxm_firmware_command(sc, cxm_enc_mailbox,
1099 CXM_FW_CMD_INITIALIZE_VIDEO_INPUT,
1103 if (cxm_saa7115_unmute(sc) < 0)
1105 if (cxm_msp_unmute(sc) < 0)
1108 /* Wait for 100ms */
1109 tsleep(&sc->encoding, 0, "cxmce", hz / 10);
1111 type = sc->mpeg ? CXM_FW_CAPTURE_STREAM_TYPE_MPEG
1112 : CXM_FW_CAPTURE_STREAM_TYPE_RAW;
1113 subtype = ((sc->mpeg || sc->source == cxm_fm_source)
1114 ? CXM_FW_CAPTURE_STREAM_PCM_AUDIO : 0)
1115 | ((sc->mpeg || sc->source != cxm_fm_source)
1116 ? CXM_FW_CAPTURE_STREAM_YUV : 0);
1118 /* Start the encoder */
1119 parameters[0] = type;
1120 parameters[1] = subtype;
1121 if (cxm_firmware_command(sc, cxm_enc_mailbox,
1122 CXM_FW_CMD_BEGIN_CAPTURE, parameters, 2) != 0)
1125 sc->enc_pool.offset = 0;
1126 sc->enc_pool.read = 0;
1127 sc->enc_pool.write = 0;
1129 sc->encoding_eos = 0;
1133 /* Enable interrupts */
1134 cxm_set_irq_mask(sc, sc->irq_mask & ~CXM_IRQ_ENC);
1141 cxm_stop_encoder(struct cxm_softc *sc)
1143 uint32_t parameters[4];
1150 type = sc->mpeg ? CXM_FW_CAPTURE_STREAM_TYPE_MPEG
1151 : CXM_FW_CAPTURE_STREAM_TYPE_RAW;
1152 subtype = ((sc->mpeg || sc->source == cxm_fm_source)
1153 ? CXM_FW_CAPTURE_STREAM_PCM_AUDIO : 0)
1154 | ((sc->mpeg || sc->source != cxm_fm_source)
1155 ? CXM_FW_CAPTURE_STREAM_YUV : 0);
1157 /* Stop the encoder */
1158 parameters[0] = sc->mpeg ? 0 : 1; /* When = 0 (end of GOP) */
1159 parameters[1] = type;
1160 parameters[2] = subtype;
1161 if (cxm_firmware_command(sc, cxm_enc_mailbox,
1162 CXM_FW_CMD_END_CAPTURE, parameters, 3) != 0)
1165 /* Wait for up to 1 second */
1167 if (!sc->encoding_eos)
1168 tsleep(&sc->encoding_eos, 0, "cxmeos", hz);
1171 if (sc->mpeg && !sc->encoding_eos)
1172 device_printf(sc->dev, "missing encoder EOS\n");
1174 /* Disable event notification */
1175 parameters[0] = 0; /* Event = 0 (refresh encoder input) */
1176 parameters[1] = 0; /* Notification = 0 (disable) */
1177 parameters[2] = 0x10000000; /* Interrupt bit */
1178 parameters[3] = -1; /* Mailbox = -1 (no mailbox) */
1179 if (cxm_firmware_command(sc, cxm_enc_mailbox,
1180 CXM_FW_CMD_ENC_EVENT_NOTIFICATION,
1181 parameters, 4) != 0)
1184 /* Disable interrupts */
1185 cxm_set_irq_mask(sc, sc->irq_mask | CXM_IRQ_ENC);
1194 cxm_pause_encoder(struct cxm_softc *sc)
1198 /* Pause the encoder */
1200 if (cxm_firmware_command(sc, cxm_enc_mailbox,
1201 CXM_FW_CMD_PAUSE_ENCODER, ¶meter, 1) != 0)
1209 cxm_unpause_encoder(struct cxm_softc *sc)
1213 /* Unpause the encoder */
1215 if (cxm_firmware_command(sc, cxm_enc_mailbox,
1216 CXM_FW_CMD_PAUSE_ENCODER, ¶meter, 1) != 0)
1224 cxm_encoder_fixup_byte_order(struct cxm_softc *sc,
1225 unsigned int current, size_t offset)
1227 unsigned int strips;
1231 unsigned int macroblocks_per_line;
1232 unsigned int scratch;
1233 unsigned int words_per_line;
1238 switch (sc->enc_pool.bufs[current].byte_order) {
1239 case cxm_device_mpeg_byte_order:
1242 * Convert each 32 bit word to the proper byte ordering.
1246 ptr = (uint32_t *)sc->enc_pool.bufs[current].vaddr;
1247 nbytes != sc->enc_pool.bufs[current].size;
1248 nbytes += sizeof(*ptr), ptr++)
1249 *ptr = bswap32(*ptr);
1252 case cxm_device_yuv12_byte_order:
1255 * Convert each macro block to planar using
1256 * a scratch buffer (the buffer prior to the
1257 * current buffer is always free since it marks
1258 * the end of the ring buffer).
1261 scratch = (current + (CXM_SG_BUFFERS - 1)) % CXM_SG_BUFFERS;
1268 src = (uint32_t *)sc->enc_pool.bufs[current].vaddr;
1269 words_per_line = sc->profile->width / sizeof(*ptr);
1270 macroblocks_per_line
1271 = sc->profile->width / CXM_MACROBLOCK_WIDTH;
1272 strips = sc->enc_pool.bufs[current].size
1273 / (macroblocks_per_line * CXM_MACROBLOCK_SIZE);
1275 for (i = 0; i < strips; i++) {
1276 ptr = (uint32_t *)sc->enc_pool.bufs[scratch].vaddr
1277 + i * macroblocks_per_line * CXM_MACROBLOCK_SIZE
1279 for (j = 0; j < macroblocks_per_line; j++) {
1280 for (k = 0; k < CXM_MACROBLOCK_HEIGHT; k++) {
1281 #if CXM_MACROBLOCK_WIDTH != 16
1282 # error CXM_MACROBLOCK_WIDTH != 16
1284 *(ptr + k * words_per_line)
1286 *(ptr + k * words_per_line + 1)
1288 *(ptr + k * words_per_line + 2)
1290 *(ptr + k * words_per_line + 3)
1293 ptr += CXM_MACROBLOCK_WIDTH / sizeof(*ptr);
1297 sc->enc_pool.bufs[scratch].size
1298 = sc->enc_pool.bufs[current].size;
1307 sc->enc_pool.bufs[current].byte_order = cxm_host_byte_order;
1314 cxm_encoder_dma_discard(struct cxm_softc *sc)
1316 uint32_t parameters[3];
1318 /* Discard the DMA request */
1322 if (cxm_queue_firmware_command(sc, cxm_enc_mailbox,
1323 CXM_FW_CMD_SCHED_DMA_TO_HOST,
1324 parameters, 3) == -1) {
1325 device_printf(sc->dev,
1326 "failed to discard encoder dma request\n");
1330 sc->encoding_dma = -1;
1335 cxm_encoder_dma_done(struct cxm_softc *sc)
1337 int buffers_pending;
1340 if (!sc->encoding_dma) {
1341 device_printf(sc->dev,
1342 "encoder dma not already in progress\n");
1346 buffers_pending = sc->encoding_dma;
1347 sc->encoding_dma = 0;
1349 if (buffers_pending < 0)
1352 status = CSR_READ_4(sc, CXM_REG_DMA_STATUS) & 0x0000000f;
1355 & (CXM_DMA_ERROR_LIST | CXM_DMA_ERROR_WRITE | CXM_DMA_SUCCESS))
1356 != CXM_DMA_SUCCESS) {
1357 device_printf(sc->dev, "encoder dma status %#x\n",
1358 (unsigned int)status);
1362 /* Update the books */
1364 sc->enc_pool.write = (sc->enc_pool.write + buffers_pending)
1368 /* signal anyone requesting notification */
1370 ksignal (sc->enc_proc, sc->enc_signal);
1372 /* wakeup anyone waiting for data */
1373 wakeup(&sc->enc_pool.read);
1375 /* wakeup anyone polling for data */
1376 selwakeup(&sc->enc_sel);
1381 cxm_encoder_dma_request(struct cxm_softc *sc)
1383 enum cxm_byte_order byte_order;
1385 int buffers_pending;
1386 unsigned int current;
1388 unsigned int mailbox;
1389 unsigned int macroblocks_per_line;
1390 unsigned int nrequests;
1391 unsigned int strips;
1392 uint32_t parameters[CXM_MBX_MAX_PARAMETERS];
1394 size_t max_sg_segment;
1400 if (sc->encoding_dma) {
1401 device_printf(sc->dev, "encoder dma already in progress\n");
1402 cxm_encoder_dma_discard(sc);
1406 mailbox = sc->enc_mbx
1407 + CXM_MBX_FW_DMA_MAILBOX * sizeof(struct cxm_mailbox);
1409 for (i = 0; i < CXM_MBX_MAX_PARAMETERS; i++)
1413 + offsetof(struct cxm_mailbox, parameters)
1414 + i * sizeof(uint32_t)
1417 byte_order = cxm_device_mpeg_byte_order;
1418 max_sg_segment = CXM_SG_SEGMENT;
1420 type = parameters[0];
1424 requests[nrequests].offset = parameters[1];
1425 requests[nrequests++].size = parameters[2];
1429 byte_order = cxm_device_yuv12_byte_order;
1432 * Simplify macroblock unpacking by ensuring
1433 * that strips don't span buffers.
1436 #if CXM_MACROBLOCK_SIZE % 256
1437 # error CXM_MACROBLOCK_SIZE not a multiple of 256
1440 macroblocks_per_line = sc->profile->width
1441 / CXM_MACROBLOCK_WIDTH;
1442 strips = CXM_SG_SEGMENT
1443 / (macroblocks_per_line * CXM_MACROBLOCK_SIZE);
1444 max_sg_segment = strips
1445 * macroblocks_per_line * CXM_MACROBLOCK_SIZE;
1447 requests[nrequests].offset = parameters[1]; /* Y */
1448 requests[nrequests++].size = parameters[2];
1449 requests[nrequests].offset = parameters[3]; /* UV */
1450 requests[nrequests++].size = parameters[4];
1453 case 2: /* PCM (audio) */
1456 device_printf(sc->dev, "encoder dma type %#x unsupported\n",
1457 (unsigned int)type);
1458 cxm_encoder_dma_discard(sc);
1463 * Determine the number of buffers free at this * instant *
1464 * taking into consideration that the ring buffer wraps.
1467 buffers_free = sc->enc_pool.read - sc->enc_pool.write;
1468 if (buffers_free <= 0)
1469 buffers_free += CXM_SG_BUFFERS;
1473 * Build the scatter / gather list taking in
1474 * consideration that the ring buffer wraps,
1475 * at least one free buffer must always be
1476 * present to mark the end of the ring buffer,
1477 * and each transfer must be a multiple of 256.
1480 buffers_pending = 0;
1481 current = sc->enc_pool.write;
1483 for (i = 0; i < nrequests; i++) {
1484 if (!requests[i].size) {
1485 device_printf(sc->dev, "encoder dma size is zero\n");
1486 cxm_encoder_dma_discard(sc);
1490 while (requests[i].size) {
1491 sc->enc_pool.bufs[current].size
1492 = requests[i].size > max_sg_segment
1493 ? max_sg_segment : requests[i].size;
1494 sc->enc_pool.bufs[current].byte_order = byte_order;
1496 sc->enc_sg.vaddr[buffers_pending].src
1497 = requests[i].offset;
1498 sc->enc_sg.vaddr[buffers_pending].dst
1499 = sc->enc_pool.bufs[current].baddr;
1500 sc->enc_sg.vaddr[buffers_pending].size
1501 = (sc->enc_pool.bufs[current].size + 0x000000ff)
1504 requests[i].offset += sc->enc_pool.bufs[current].size;
1505 requests[i].size -= sc->enc_pool.bufs[current].size;
1507 current = (current + 1) % CXM_SG_BUFFERS;
1509 if (buffers_pending >= buffers_free) {
1510 device_printf(sc->dev,
1511 "encoder dma not enough buffer space free\n");
1512 cxm_encoder_dma_discard(sc);
1518 /* Mark the last transfer in the list */
1519 sc->enc_sg.vaddr[buffers_pending - 1].size |= 0x80000000;
1521 /* Schedule the DMA */
1522 parameters[0] = sc->enc_sg.baddr;
1523 parameters[1] = buffers_pending * sizeof(sc->enc_sg.vaddr[0]);
1524 parameters[2] = type;
1525 if (cxm_queue_firmware_command(sc, cxm_enc_mailbox,
1526 CXM_FW_CMD_SCHED_DMA_TO_HOST,
1527 parameters, 3) == -1) {
1528 device_printf(sc->dev,
1529 "failed to schedule encoder dma request\n");
1534 * Record the number of pending buffers for the
1535 * benefit of cxm_encoder_dma_done. Doing this
1536 * after queuing the command doesn't introduce
1537 * a race condition since we're already in the
1538 * interrupt handler.
1541 sc->encoding_dma = buffers_pending;
1546 cxm_encoder_wait_for_lock(struct cxm_softc *sc)
1555 * Wait for the tuner to lock.
1557 if (sc->source == cxm_fm_source || sc->source == cxm_tuner_source) {
1558 result = cxm_tuner_wait_for_lock(sc);
1564 * Wait for the video decoder to lock.
1566 if (sc->source != cxm_fm_source) {
1567 result = cxm_saa7115_wait_for_lock(sc);
1570 else if (result == 0)
1575 * Wait for the audio decoder to lock.
1577 if (sc->source == cxm_tuner_source) {
1578 muted = cxm_msp_is_muted(sc);
1580 result = cxm_msp_autodetect_standard(sc);
1583 else if (result == 0)
1586 if (muted == 0 && cxm_msp_unmute(sc) < 0)
1595 cxm_mapmem(void *arg, bus_dma_segment_t *segs, int nseg, int error)
1597 bus_addr_t *busaddrp;
1600 * Only the first bus space address is needed
1601 * since it's known that the memory is physically
1602 * contiguous due to bus_dmamem_alloc.
1605 busaddrp = (bus_addr_t *)arg;
1606 *busaddrp = segs->ds_addr;
1611 * the boot time probe routine.
1614 cxm_probe(device_t dev)
1620 while(t->name != NULL) {
1621 if ((pci_get_vendor(dev) == t->vid) &&
1622 (pci_get_device(dev) == t->did)) {
1623 device_set_desc(dev, t->name);
1634 * the attach routine.
1637 cxm_attach(device_t dev)
1644 struct cxm_softc *sc;
1646 /* Get the device data */
1647 sc = device_get_softc(dev);
1648 unit = device_get_unit(dev);
1651 sc->type = cxm_iTVC15_type;
1653 switch(pci_get_device(dev)) {
1654 case PCI_PRODUCT_ICOMPRESSION_ITVC16:
1655 sc->type = cxm_iTVC16_type;
1663 * Enable bus mastering and memory mapped I/O.
1665 pci_enable_busmaster(dev);
1666 pci_enable_io(dev, SYS_RES_MEMORY);
1667 command = pci_read_config(dev, PCIR_COMMAND, 4);
1669 if (!(command & PCIM_CMD_MEMEN)) {
1670 device_printf(dev, "failed to enable memory mappings\n");
1676 * Map control/status registers.
1679 sc->mem_res = bus_alloc_resource(dev, SYS_RES_MEMORY, &rid,
1680 0, ~0, 1, RF_ACTIVE);
1683 device_printf(dev, "could not map memory\n");
1688 sc->btag = rman_get_bustag(sc->mem_res);
1689 sc->bhandle = rman_get_bushandle(sc->mem_res);
1692 * Attach the I2C bus.
1694 sc->cxm_iic = device_add_child(dev, "cxm_iic", unit);
1697 device_printf(dev, "could not add cxm_iic\n");
1702 error = device_probe_and_attach(sc->cxm_iic);
1705 device_printf(dev, "could not attach cxm_iic\n");
1710 * Initialize the tuner.
1712 if (cxm_tuner_init(sc) < 0) {
1713 device_printf(dev, "could not initialize tuner\n");
1719 * Initialize the SAA7115.
1721 if (cxm_saa7115_init(sc) < 0) {
1722 device_printf(dev, "could not initialize video decoder\n");
1728 * Initialize the MSP3400.
1730 if (cxm_msp_init(sc) < 0) {
1731 device_printf(dev, "could not initialize audio decoder\n");
1737 * Initialize the IR Remote.
1739 if (cxm_ir_init(sc) < 0) {
1740 device_printf(dev, "could not initialize IR remote\n");
1749 * Disable the Conexant device.
1751 * This is done * after * attaching the I2C bus so
1752 * cxm_stop_hardware can mute the video and audio
1755 cxm_stop_hardware(sc);
1758 * Allocate our interrupt.
1761 sc->irq_res = bus_alloc_resource(dev, SYS_RES_IRQ, &rid,
1762 0, ~0, 1, RF_SHAREABLE | RF_ACTIVE);
1764 if (sc->irq_res == NULL) {
1765 device_printf(dev, "could not map interrupt\n");
1770 error = bus_setup_intr(dev, sc->irq_res, 0,
1771 cxm_intr, sc, &sc->ih_cookie, NULL);
1773 device_printf(dev, "could not setup irq\n");
1779 * Allocate a DMA tag for the parent bus.
1781 error = bus_dma_tag_create(NULL, 1, 0,
1782 BUS_SPACE_MAXADDR_32BIT,
1783 BUS_SPACE_MAXADDR, NULL, NULL,
1784 BUS_SPACE_MAXSIZE_32BIT, 1,
1785 BUS_SPACE_MAXSIZE_32BIT, 0,
1788 device_printf(dev, "could not create parent bus DMA tag\n");
1793 * Allocate a DMA tag for the encoder buffers.
1795 error = bus_dma_tag_create(sc->parent_dmat, 256, 0,
1796 BUS_SPACE_MAXADDR_32BIT,
1797 BUS_SPACE_MAXADDR, NULL, NULL,
1799 BUS_SPACE_MAXSIZE_32BIT, 0,
1800 &sc->enc_pool.dmat);
1803 "could not create encoder buffer DMA tag\n");
1807 for (i = 0; i < CXM_SG_BUFFERS; i++) {
1810 * Allocate the encoder buffer.
1812 error = bus_dmamem_alloc(sc->enc_pool.dmat,
1813 (void **)&sc->enc_pool.bufs[i].vaddr,
1815 &sc->enc_pool.bufs[i].dmamap);
1818 "could not allocate encoder buffer\n");
1823 * Map the encoder buffer.
1825 error = bus_dmamap_load(sc->enc_pool.dmat,
1826 sc->enc_pool.bufs[i].dmamap,
1827 sc->enc_pool.bufs[i].vaddr,
1830 &sc->enc_pool.bufs[i].baddr, 0);
1832 device_printf(dev, "could not map encoder buffer\n");
1838 * Allocate a DMA tag for the scatter / gather list.
1840 error = bus_dma_tag_create(sc->parent_dmat, 1, 0,
1841 BUS_SPACE_MAXADDR_32BIT,
1842 BUS_SPACE_MAXADDR, NULL, NULL,
1844 * sizeof(struct cxm_sg_entry), 1,
1845 BUS_SPACE_MAXSIZE_32BIT, 0,
1849 "could not create scatter / gather DMA tag\n");
1854 * Allocate the scatter / gather list.
1856 error = bus_dmamem_alloc(sc->enc_sg.dmat, (void **)&sc->enc_sg.vaddr,
1857 BUS_DMA_NOWAIT, &sc->enc_sg.dmamap);
1860 "could not allocate scatter / gather list\n");
1865 * Map the scatter / gather list.
1867 error = bus_dmamap_load(sc->enc_sg.dmat, sc->enc_sg.dmamap,
1869 CXM_SG_BUFFERS * sizeof(struct cxm_sg_entry),
1870 cxm_mapmem, &sc->enc_sg.baddr, 0);
1872 device_printf(dev, "could not map scatter / gather list\n");
1877 * Initialize the hardware.
1879 if (cxm_init_hardware(sc) < 0) {
1880 device_printf(dev, "could not initialize hardware\n");
1885 sc->profile = &dvd_full_d1_ntsc_profile;
1887 sc->source = cxm_tuner_source;
1890 /* make the device entries */
1891 sc->cxm_dev_t = make_dev(&cxm_ops, unit,
1892 0, 0, 0444, "cxm%d", unit);
1897 if (sc->enc_sg.baddr)
1898 bus_dmamap_unload(sc->enc_sg.dmat, sc->enc_sg.dmamap);
1899 if (sc->enc_sg.vaddr)
1900 bus_dmamem_free(sc->enc_sg.dmat, sc->enc_sg.vaddr,
1902 if (sc->enc_sg.dmat)
1903 bus_dma_tag_destroy(sc->enc_sg.dmat);
1905 for (i = 0; i < CXM_SG_BUFFERS; i++) {
1906 if (sc->enc_pool.bufs[i].baddr)
1907 bus_dmamap_unload(sc->enc_pool.dmat,
1908 sc->enc_pool.bufs[i].dmamap);
1909 if (sc->enc_pool.bufs[i].vaddr)
1910 bus_dmamem_free(sc->enc_pool.dmat,
1911 sc->enc_pool.bufs[i].vaddr,
1912 sc->enc_pool.bufs[i].dmamap);
1915 if (sc->enc_pool.dmat)
1916 bus_dma_tag_destroy(sc->enc_pool.dmat);
1918 if (sc->parent_dmat)
1919 bus_dma_tag_destroy(sc->parent_dmat);
1922 * Detach the I2C bus.
1924 * This is done * after * deallocating the scatter / gather
1925 * list and buffers so the kernel has a better chance of
1926 * gracefully handling a memory shortage.
1928 * Detach the children before recursively deleting
1929 * in case a child has a pointer to a grandchild
1930 * which is used by the child's detach routine.
1932 bus_generic_detach(dev);
1934 device_delete_child(dev, sc->cxm_iic);
1937 bus_teardown_intr(dev, sc->irq_res, sc->ih_cookie);
1939 bus_release_resource(dev, SYS_RES_IRQ, 0, sc->irq_res);
1941 bus_release_resource(dev, SYS_RES_MEMORY, CXM_RID, sc->mem_res);
1947 * the detach routine.
1950 cxm_detach(device_t dev)
1953 struct cxm_softc *sc;
1956 /* Get the device data */
1957 sc = device_get_softc(dev);
1959 /* Disable the Conexant device. */
1960 cxm_stop_hardware(sc);
1962 /* Unregister the /dev/cxmN device. */
1963 dev_ops_remove_minor(&cxm_ops, /*0, */device_get_unit(dev));
1966 * Deallocate scatter / gather list and buffers.
1968 bus_dmamap_unload(sc->enc_sg.dmat, sc->enc_sg.dmamap);
1969 bus_dmamem_free(sc->enc_sg.dmat, sc->enc_sg.vaddr, sc->enc_sg.dmamap);
1971 bus_dma_tag_destroy(sc->enc_sg.dmat);
1973 for (i = 0; i < CXM_SG_BUFFERS; i++) {
1974 bus_dmamap_unload(sc->enc_pool.dmat,
1975 sc->enc_pool.bufs[i].dmamap);
1976 bus_dmamem_free(sc->enc_pool.dmat, sc->enc_pool.bufs[i].vaddr,
1977 sc->enc_pool.bufs[i].dmamap);
1980 bus_dma_tag_destroy(sc->enc_pool.dmat);
1982 bus_dma_tag_destroy(sc->parent_dmat);
1985 * Detach the I2C bus.
1987 * This is done * after * deallocating the scatter / gather
1988 * list and buffers so the kernel has a better chance of
1989 * gracefully handling a memory shortage.
1991 * Detach the children before recursively deleting
1992 * in case a child has a pointer to a grandchild
1993 * which is used by the child's detach routine.
1995 * Remember the child before detaching so we can
1996 * delete it (bus_generic_detach indirectly zeroes
1999 child = sc->cxm_iic;
2000 bus_generic_detach(dev);
2002 device_delete_child(dev, child);
2004 /* Deallocate resources. */
2005 bus_teardown_intr(dev, sc->irq_res, sc->ih_cookie);
2006 bus_release_resource(dev, SYS_RES_IRQ, 0, sc->irq_res);
2007 bus_release_resource(dev, SYS_RES_MEMORY, CXM_RID, sc->mem_res);
2013 * the shutdown routine.
2016 cxm_shutdown(device_t dev)
2018 struct cxm_softc *sc = device_get_softc(dev);
2020 /* Disable the Conexant device. */
2021 cxm_stop_hardware(sc);
2027 * the interrupt routine.
2033 struct cxm_softc *sc;
2035 /* Get the device data */
2036 sc = (struct cxm_softc *)arg;
2038 status = CSR_READ_4(sc, CXM_REG_IRQ_STATUS);
2040 status &= ~sc->irq_mask;
2045 /* Process DMA done before handling a new DMA request or EOS */
2046 if (status & CXM_IRQ_ENC_DMA_DONE)
2047 cxm_encoder_dma_done(sc);
2049 if (status & CXM_IRQ_ENC_DMA_REQUEST)
2050 cxm_encoder_dma_request(sc);
2052 if (status & CXM_IRQ_ENC_EOS) {
2053 sc->encoding_eos = 1;
2054 wakeup(&sc->encoding_eos);
2057 cxm_set_irq_status(sc, status);
2062 * the child detached routine.
2065 cxm_child_detached(device_t dev, device_t child)
2067 struct cxm_softc *sc;
2069 /* Get the device data */
2070 sc = device_get_softc(dev);
2072 if (child == sc->cxm_iic)
2078 cxm_read_ivar(device_t dev, device_t child, int index, uintptr_t* val)
2080 struct cxm_softc *sc;
2082 /* Get the device data */
2083 sc = device_get_softc(dev);
2086 case CXM_IVAR_BHANDLE:
2087 *(bus_space_handle_t **)val = &sc->bhandle;
2091 *(bus_space_tag_t **)val = &sc->btag;
2094 case CXM_IVAR_IICBUS:
2095 *(device_t **)val = &sc->iicbus;
2107 cxm_write_ivar(device_t dev, device_t child, int index, uintptr_t val)
2109 struct cxm_softc *sc;
2111 /* Get the device data */
2112 sc = device_get_softc(dev);
2115 case CXM_IVAR_BHANDLE:
2121 case CXM_IVAR_IICBUS:
2124 sc->iicbus = val ? *(device_t *)val : NULL;
2135 /*---------------------------------------------------------
2137 ** Conexant iTVC15 / iTVC16 character device driver routines
2139 **---------------------------------------------------------
2142 #define UNIT(x) ((x) & 0x0f)
2143 #define FUNCTION(x) (x >> 4)
2149 cxm_open(struct dev_open_args *ap)
2151 cdev_t dev = ap->a_head.a_dev;
2153 struct cxm_softc *sc;
2155 unit = UNIT(minor(dev));
2157 /* Get the device data */
2158 sc = (struct cxm_softc*)devclass_get_softc(cxm_devclass, unit);
2160 /* the device is no longer valid/functioning */
2170 /* Record that the device is now busy */
2171 device_busy(devclass_get_device(cxm_devclass, unit));
2181 cxm_close(struct dev_close_args *ap)
2183 cdev_t dev = ap->a_head.a_dev;
2185 struct cxm_softc *sc;
2187 unit = UNIT(minor(dev));
2189 /* Get the device data */
2190 sc = (struct cxm_softc*)devclass_get_softc(cxm_devclass, unit);
2192 /* the device is no longer valid/functioning */
2196 if (cxm_stop_encoder(sc) < 0)
2199 sc->enc_pool.offset = 0;
2200 sc->enc_pool.read = 0;
2201 sc->enc_pool.write = 0;
2203 sc->enc_proc = NULL;
2206 device_unbusy(devclass_get_device(cxm_devclass, unit));
2218 cxm_read(struct dev_read_args *ap)
2220 cdev_t dev = ap->a_head.a_dev;
2221 int buffers_available;
2225 unsigned int current;
2229 struct cxm_softc *sc;
2231 unit = UNIT(minor(dev));
2233 /* Get the device data */
2234 sc = (struct cxm_softc*)devclass_get_softc(cxm_devclass, unit);
2236 /* the device is no longer valid/functioning */
2240 /* Only trigger the encoder if the ring buffer is empty */
2241 if (!sc->encoding && sc->enc_pool.read == sc->enc_pool.write) {
2242 if (cxm_start_encoder(sc) < 0)
2244 if (ap->a_ioflag & IO_NDELAY)
2248 buffers_available = 0;
2251 while (sc->enc_pool.read == sc->enc_pool.write) {
2252 error = tsleep(&sc->enc_pool.read, PCATCH, "cxmrd", 0);
2260 * Determine the number of buffers available at this * instant *
2261 * taking in consideration that the ring buffer wraps.
2263 buffers_available = sc->enc_pool.write - sc->enc_pool.read;
2264 if (buffers_available < 0)
2265 buffers_available += CXM_SG_BUFFERS;
2268 offset = sc->enc_pool.offset;
2270 for (buffers_read = 0, i = sc->enc_pool.read;
2271 buffers_read != buffers_available && ap->a_uio->uio_resid;
2272 buffers_read++, i = (i + 1) % CXM_SG_BUFFERS) {
2274 current = cxm_encoder_fixup_byte_order (sc, i, offset);
2276 nbytes = sc->enc_pool.bufs[current].size - offset;
2278 /* Don't transfer more than requested */
2279 if (nbytes > ap->a_uio->uio_resid)
2280 nbytes = ap->a_uio->uio_resid;
2282 error = uiomove(sc->enc_pool.bufs[current].vaddr + offset,
2289 /* Handle a partial read of a buffer */
2290 if (!ap->a_uio->uio_resid && offset != sc->enc_pool.bufs[i].size)
2296 sc->enc_pool.offset = offset;
2298 /* Update the books */
2300 sc->enc_pool.read = (sc->enc_pool.read + buffers_read)
2312 cxm_ioctl(struct dev_ioctl_args *ap)
2314 cdev_t dev = ap->a_head.a_dev;
2316 int chroma_saturation;
2326 struct cxm_softc *sc;
2327 enum cxm_source source;
2328 struct bktr_capture_area *cap;
2329 struct bktr_remote *remote;
2331 unit = UNIT(minor(dev));
2333 /* Get the device data */
2334 sc = (struct cxm_softc*)devclass_get_softc(cxm_devclass, unit);
2336 /* the device is no longer valid/functioning */
2340 switch (ap->a_cmd) {
2342 switch (cxm_msp_selected_source(sc)) {
2343 case cxm_tuner_source:
2344 *(int *) ap->a_data = AUDIO_TUNER;
2347 case cxm_line_in_source_composite:
2348 case cxm_line_in_source_svideo:
2349 *(int *) ap->a_data = AUDIO_EXTERN;
2353 *(int *) ap->a_data = AUDIO_INTERN;
2360 if (cxm_msp_is_muted(sc) == 1)
2361 *(int *) ap->a_data |= AUDIO_MUTE;
2365 source = cxm_unknown_source;
2367 switch (*(int *) ap->a_data) {
2369 source = cxm_tuner_source;
2373 source = cxm_line_in_source_composite;
2377 source = cxm_fm_source;
2381 if (cxm_msp_mute(sc) < 0)
2386 if (cxm_msp_unmute(sc) < 0)
2397 * Switching between audio + video and audio only
2398 * subtypes isn't supported while encoding.
2401 if (source != sc->source
2402 && (source == cxm_fm_source
2403 || sc->source == cxm_fm_source))
2407 if (cxm_pause_encoder(sc) < 0)
2410 if (cxm_msp_select_source(sc, source) < 0)
2413 if (source == cxm_fm_source)
2414 sc->source = source;
2416 result = cxm_encoder_wait_for_lock(sc);
2419 else if (result == 0)
2422 if (cxm_unpause_encoder(sc) < 0)
2427 brightness = cxm_saa7115_get_brightness(sc);
2433 * Brooktree brightness:
2434 * 0x80 = -50.0%, 0x00 = +0.0%, 0x7f = +49.6%
2436 *(int *)ap->a_data = (int)(unsigned char)brightness - 128;
2442 * Brooktree brightness:
2443 * 0x80 = -50.0%, 0x00 = +0.0%, 0x7f = +49.6%
2445 brightness = *(int *)ap->a_data + 128;
2447 if (cxm_saa7115_set_brightness(sc, brightness) < 0)
2452 brightness = cxm_saa7115_get_brightness(sc);
2457 *(unsigned char *)ap->a_data = (unsigned char)brightness;
2461 brightness = *(unsigned char *)ap->a_data;
2463 if (cxm_saa7115_set_brightness(sc, brightness) < 0)
2468 chroma_saturation = cxm_saa7115_get_chroma_saturation(sc);
2470 if (chroma_saturation < 0)
2474 * Brooktree chroma saturation:
2475 * 0x000 = 0%, 0x0fe = 100%, 0x1ff = 201.18%
2477 *(int *)ap->a_data = ((signed char)chroma_saturation > 0)
2478 ? (chroma_saturation * 4 - 2) : 0;
2484 * Brooktree chroma saturation:
2485 * 0x000 = 0%, 0x0fe = 100%, 0x1ff = 201.18%
2487 chroma_saturation = (*(int *)ap->a_data & 0x1ff) < 510
2488 ? ((*(int *)ap->a_data & 0x1ff) + 2) / 4 : 127;
2490 if (cxm_saa7115_set_chroma_saturation(sc, chroma_saturation)
2497 chroma_saturation = cxm_saa7115_get_chroma_saturation(sc);
2499 if (chroma_saturation < 0)
2502 *(unsigned char *)ap->a_data = (unsigned char)chroma_saturation;
2506 chroma_saturation = *(unsigned char *)ap->a_data;
2508 if (cxm_saa7115_set_chroma_saturation(sc, chroma_saturation)
2514 contrast = cxm_saa7115_get_contrast(sc);
2519 *(unsigned char *)ap->a_data = (unsigned char)contrast;
2523 contrast = *(unsigned char *)ap->a_data;
2525 if (cxm_saa7115_set_contrast(sc, contrast) < 0)
2530 hue = cxm_saa7115_get_hue(sc);
2535 *(int *)ap->a_data = (signed char)hue;
2539 hue = *(int *)ap->a_data;
2541 if (cxm_saa7115_set_hue(sc, hue) < 0)
2546 hue = cxm_saa7115_get_hue(sc);
2551 *(signed char *)ap->a_data = (signed char)hue;
2555 hue = *(signed char *)ap->a_data;
2557 if (cxm_saa7115_set_hue(sc, hue) < 0)
2562 switch (*(int *) ap->a_data) {
2563 case METEOR_CAP_CONTINOUS:
2564 if (cxm_start_encoder(sc) < 0)
2568 case METEOR_CAP_STOP_CONT:
2569 if (cxm_stop_encoder(sc) < 0)
2578 case BT848_GCAPAREA:
2579 cap = (struct bktr_capture_area *)ap->a_data;
2580 memset (cap, 0, sizeof (*cap));
2583 cap->x_size = sc->profile->width;
2584 cap->y_size = sc->profile->height;
2587 case BT848_SCAPAREA:
2591 cap = (struct bktr_capture_area *)ap->a_data;
2592 if (cap->x_offset || cap->y_offset
2593 || (cap->x_size % CXM_MACROBLOCK_WIDTH)
2594 || (cap->y_size % CXM_MACROBLOCK_HEIGHT))
2598 * Setting the width and height has the side effect of
2599 * chosing between the VCD, SVCD, and DVD profiles.
2602 for (i = 0; i < NUM_ELEMENTS(codec_profiles); i++)
2603 if (codec_profiles[i]->width == cap->x_size
2604 && codec_profiles[i]->height == cap->y_size)
2607 if (i >= NUM_ELEMENTS(codec_profiles))
2610 sc->profile = codec_profiles[i];
2614 switch (cxm_saa7115_detected_format(sc)) {
2615 case cxm_ntsc_60hz_source_format:
2616 *(unsigned long *)ap->a_data = BT848_IFORM_F_NTSCM;
2619 case cxm_pal_50hz_source_format:
2620 *(unsigned long *)ap->a_data = BT848_IFORM_F_PALBDGHI;
2623 case cxm_secam_50hz_source_format:
2624 *(unsigned long *)ap->a_data = BT848_IFORM_F_SECAM;
2627 case cxm_pal_60hz_source_format:
2628 *(unsigned long *)ap->a_data = BT848_IFORM_F_PALM;
2631 case cxm_bw_50hz_source_format:
2632 case cxm_bw_60hz_source_format:
2633 case cxm_ntsc_50hz_source_format:
2634 *(unsigned long *)ap->a_data = BT848_IFORM_F_AUTO;
2643 switch (cxm_saa7115_detected_format(sc)) {
2644 case cxm_ntsc_60hz_source_format:
2645 *(unsigned long *)ap->a_data = METEOR_FMT_NTSC;
2648 case cxm_pal_50hz_source_format:
2649 *(unsigned long *)ap->a_data = METEOR_FMT_PAL;
2652 case cxm_secam_50hz_source_format:
2653 *(unsigned long *)ap->a_data = METEOR_FMT_SECAM;
2656 case cxm_bw_50hz_source_format:
2657 case cxm_bw_60hz_source_format:
2658 case cxm_ntsc_50hz_source_format:
2659 case cxm_pal_60hz_source_format:
2660 *(unsigned long *)ap->a_data = METEOR_FMT_AUTOMODE;
2669 fps = cxm_saa7115_detected_fps(sc);
2674 *(unsigned short *)ap->a_data = fps;
2678 switch (sc->source) {
2679 case cxm_tuner_source:
2680 *(unsigned long *)ap->a_data = METEOR_INPUT_DEV1;
2683 case cxm_line_in_source_composite:
2684 *(unsigned long *)ap->a_data = METEOR_INPUT_DEV2;
2687 case cxm_line_in_source_svideo:
2688 *(unsigned long *)ap->a_data = METEOR_INPUT_DEV_SVIDEO;
2697 source = cxm_unknown_source;
2699 switch (*(unsigned long *)ap->a_data & 0xf000) {
2700 case METEOR_INPUT_DEV1:
2701 source = cxm_tuner_source;
2704 case METEOR_INPUT_DEV2:
2705 source = cxm_line_in_source_composite;
2708 case METEOR_INPUT_DEV_SVIDEO:
2709 source = cxm_line_in_source_svideo;
2719 * Switching between audio + video and audio only
2720 * subtypes isn't supported while encoding.
2723 if (source != sc->source
2724 && (source == cxm_fm_source
2725 || sc->source == cxm_fm_source))
2729 if (cxm_pause_encoder(sc) < 0)
2732 if (cxm_saa7115_select_source(sc, source) < 0)
2734 if (cxm_msp_select_source(sc, source) < 0)
2736 sc->source = source;
2738 result = cxm_encoder_wait_for_lock(sc);
2741 else if (result == 0)
2744 if (cxm_unpause_encoder(sc) < 0)
2749 *(unsigned int *)ap->a_data = sc->enc_signal;
2753 sig = *(unsigned int *)ap->a_data;
2755 if (!_SIG_VALID(sig))
2759 * Historically, applications used METEOR_SIG_MODE_MASK
2760 * to reset signal delivery.
2762 if (sig == METEOR_SIG_MODE_MASK)
2766 sc->enc_proc = sig ? curproc : NULL;
2767 sc->enc_signal = sig;
2772 /* Convert from kHz to MHz * 100 */
2773 freq = sc->tuner_freq / 10;
2775 *(unsigned int *)ap->a_data = freq;
2779 if (sc->source == cxm_fm_source)
2780 if (cxm_pause_encoder(sc) < 0)
2783 /* Convert from MHz * 100 to kHz */
2784 freq = *(unsigned int *)ap->a_data * 10;
2786 if (cxm_tuner_select_frequency(sc, cxm_tuner_fm_freq_type,
2791 * Explicitly wait for the tuner lock so we
2792 * can indicate if there's a station present.
2794 if (cxm_tuner_wait_for_lock(sc) < 0)
2797 result = cxm_encoder_wait_for_lock(sc);
2800 else if (result == 0)
2803 if (sc->source == cxm_fm_source)
2804 if (cxm_unpause_encoder(sc) < 0)
2808 case TVTUNER_GETAFC:
2809 *(int *)ap->a_data = sc->tuner_afc;
2812 case TVTUNER_SETAFC:
2813 sc->tuner_afc = (*(int *)ap->a_data != 0);
2816 case TVTUNER_GETTYPE:
2817 *(unsigned int *)ap->a_data = cxm_tuner_selected_channel_set(sc);
2820 case TVTUNER_SETTYPE:
2821 if (cxm_tuner_select_channel_set(sc, *(unsigned int *)ap->a_data) < 0)
2825 case TVTUNER_SETCHNL:
2826 if (sc->source == cxm_tuner_source)
2827 if (cxm_pause_encoder(sc) < 0)
2830 if (cxm_tuner_select_channel(sc, *(unsigned int *)ap->a_data) < 0)
2834 if (cxm_tuner_apply_afc(sc) < 0)
2838 * Explicitly wait for the tuner lock so we
2839 * can indicate if there's a station present.
2841 if (cxm_tuner_wait_for_lock(sc) < 0)
2844 result = cxm_encoder_wait_for_lock(sc);
2847 else if (result == 0)
2850 if (sc->source == cxm_tuner_source)
2851 if (cxm_unpause_encoder(sc) < 0)
2855 case TVTUNER_GETFREQ:
2856 /* Convert from kHz to MHz * 16 */
2857 freq = (sc->tuner_freq * 16) / 1000;
2859 *(unsigned int *)ap->a_data = freq;
2862 case TVTUNER_SETFREQ:
2863 if (sc->source == cxm_tuner_source)
2864 if (cxm_pause_encoder(sc) < 0)
2867 /* Convert from MHz * 16 to kHz */
2868 freq = (*(unsigned int *)ap->a_data * 1000) / 16;
2870 if (cxm_tuner_select_frequency(sc, cxm_tuner_tv_freq_type,
2875 * Explicitly wait for the tuner lock so we
2876 * can indicate if there's a station present.
2878 if (cxm_tuner_wait_for_lock(sc) < 0)
2881 result = cxm_encoder_wait_for_lock(sc);
2884 else if (result == 0)
2887 if (sc->source == cxm_tuner_source)
2888 if (cxm_unpause_encoder(sc) < 0)
2893 case TVTUNER_GETSTATUS:
2894 status = cxm_tuner_status(sc);
2897 *(unsigned long *)ap->a_data = status & 0xff;
2901 remote = (struct bktr_remote *)ap->a_data;
2902 if (cxm_ir_key(sc, (char *)remote, sizeof(*remote)) < 0)
2915 cxm_poll(struct dev_poll_args *ap)
2917 cdev_t dev = ap->a_head.a_dev;
2920 struct cxm_softc *sc;
2922 unit = UNIT(minor(dev));
2924 /* Get the device data */
2925 sc = (struct cxm_softc*)devclass_get_softc(cxm_devclass, unit);
2927 /* the device is no longer valid/functioning */
2934 if (ap->a_events & (POLLIN | POLLRDNORM)) {
2935 if (sc->enc_pool.read == sc->enc_pool.write)
2936 selrecord(curthread, &sc->enc_sel);
2938 revents = ap->a_events & (POLLIN | POLLRDNORM);
2945 static struct filterops cxm_filterops =
2946 { 1, NULL, cxm_filter_detach, cxm_filter };
2949 cxm_kqfilter(struct dev_kqfilter_args *ap)
2951 cdev_t dev = ap->a_head.a_dev;
2952 struct knote *kn = ap->a_kn;
2953 struct cxm_softc *sc;
2954 struct klist *klist;
2959 switch (kn->kn_filter) {
2961 unit = UNIT(minor(dev));
2962 /* Get the device data */
2963 sc = (struct cxm_softc *)devclass_get_softc(cxm_devclass, unit);
2964 kn->kn_fop = &cxm_filterops;
2965 kn->kn_hook = (caddr_t)sc;
2968 ap->a_result = EOPNOTSUPP;
2973 klist = &sc->enc_sel.si_note;
2974 SLIST_INSERT_HEAD(klist, kn, kn_selnext);
2981 cxm_filter_detach(struct knote *kn)
2983 struct cxm_softc *sc = (struct cxm_softc *)kn->kn_hook;
2984 struct klist *klist;
2987 klist = &sc->enc_sel.si_note;
2988 SLIST_REMOVE(klist, kn, knote, kn_selnext);
2993 cxm_filter(struct knote *kn, long hint)
2995 struct cxm_softc *sc = (struct cxm_softc *)kn->kn_hook;
2999 /* the device is no longer valid/functioning */
3000 kn->kn_flags |= EV_EOF;
3005 if (sc->enc_pool.read != sc->enc_pool.write)