Merge from vendor branch LIBARCHIVE:
[dragonfly.git] / sys / dev / disk / aic7xxx / aic7xxx_osm.c
1 /*
2  * Bus independent FreeBSD shim for the aic7xxx based Adaptec SCSI controllers
3  *
4  * Copyright (c) 1994-2001 Justin T. Gibbs.
5  * All rights reserved.
6  *
7  * Redistribution and use in source and binary forms, with or without
8  * modification, are permitted provided that the following conditions
9  * are met:
10  * 1. Redistributions of source code must retain the above copyright
11  *    notice, this list of conditions, and the following disclaimer,
12  *    without modification.
13  * 2. The name of the author may not be used to endorse or promote products
14  *    derived from this software without specific prior written permission.
15  *
16  * Alternatively, this software may be distributed under the terms of the
17  * GNU Public License ("GPL").
18  *
19  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
20  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
21  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
22  * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE FOR
23  * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
24  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
25  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
26  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
27  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
28  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
29  * SUCH DAMAGE.
30  *
31  * $Id: //depot/aic7xxx/freebsd/dev/aic7xxx/aic7xxx_osm.c#20 $
32  *
33  * $FreeBSD: src/sys/dev/aic7xxx/aic7xxx_osm.c,v 1.45 2006/09/05 20:28:28 mjacob Exp $
34  * $DragonFly: src/sys/dev/disk/aic7xxx/aic7xxx_osm.c,v 1.20 2007/07/11 23:46:58 dillon Exp $
35  */
36
37 #include "aic7xxx_osm.h"
38 #include "aic7xxx_inline.h"
39
40 #include <sys/kthread.h>
41
42 #ifndef AHC_TMODE_ENABLE
43 #define AHC_TMODE_ENABLE 0
44 #endif
45
46 #include "aic_osm_lib.c"
47
48 #define ccb_scb_ptr spriv_ptr0
49
50 devclass_t ahc_devclass;
51
52 #if 0
53 static void     ahc_dump_targcmd(struct target_cmd *cmd);
54 #endif
55 static int      ahc_modevent(module_t mod, int type, void *data);
56 static void     ahc_action(struct cam_sim *sim, union ccb *ccb);
57 static void     ahc_get_tran_settings(struct ahc_softc *ahc,
58                                       int our_id, char channel,
59                                       struct ccb_trans_settings *cts);
60 static void     ahc_async(void *callback_arg, uint32_t code,
61                           struct cam_path *path, void *arg);
62 static void     ahc_execute_scb(void *arg, bus_dma_segment_t *dm_segs,
63                                 int nsegments, int error);
64 static void     ahc_poll(struct cam_sim *sim);
65 static void     ahc_setup_data(struct ahc_softc *ahc, struct cam_sim *sim,
66                                struct ccb_scsiio *csio, struct scb *scb);
67 static void     ahc_abort_ccb(struct ahc_softc *ahc, struct cam_sim *sim,
68                               union ccb *ccb);
69 static int      ahc_create_path(struct ahc_softc *ahc,
70                                 char channel, u_int target, u_int lun,
71                                 struct cam_path **path);
72
73
74 static int
75 ahc_create_path(struct ahc_softc *ahc, char channel, u_int target,
76                 u_int lun, struct cam_path **path)
77 {
78         path_id_t path_id;
79
80         if (channel == 'B')
81                 path_id = cam_sim_path(ahc->platform_data->sim_b);
82         else 
83                 path_id = cam_sim_path(ahc->platform_data->sim);
84
85         return (xpt_create_path(path, /*periph*/NULL,
86                                 path_id, target, lun));
87 }
88
89 int
90 ahc_map_int(struct ahc_softc *ahc)
91 {
92         int error;
93         int zero;
94         int shareable;
95
96         zero = 0;
97         shareable = (ahc->flags & AHC_EDGE_INTERRUPT) ? 0: RF_SHAREABLE;
98         ahc->platform_data->irq =
99             bus_alloc_resource_any(ahc->dev_softc, SYS_RES_IRQ, &zero,
100                                    RF_ACTIVE | shareable);
101         if (ahc->platform_data->irq == NULL) {
102                 device_printf(ahc->dev_softc,
103                               "bus_alloc_resource() failed to allocate IRQ\n");
104                 return (ENOMEM);
105         }
106         ahc->platform_data->irq_res_type = SYS_RES_IRQ;
107
108         /* Hook up our interrupt handler */
109         error = bus_setup_intr(ahc->dev_softc, ahc->platform_data->irq,
110                                0, ahc_platform_intr, ahc,
111                                &ahc->platform_data->ih, NULL);
112
113         if (error != 0)
114                 device_printf(ahc->dev_softc, "bus_setup_intr() failed: %d\n",
115                               error);
116         return (error);
117 }
118
119 int
120 aic7770_map_registers(struct ahc_softc *ahc, u_int unused_ioport_arg)
121 {
122         struct  resource *regs;
123         int     rid;
124
125         rid = 0;
126         regs = bus_alloc_resource_any(ahc->dev_softc, SYS_RES_IOPORT, &rid,
127                                       RF_ACTIVE);
128         if (regs == NULL) {
129                 device_printf(ahc->dev_softc, "Unable to map I/O space?!\n");
130                 return ENOMEM;
131         }
132         ahc->platform_data->regs_res_type = SYS_RES_IOPORT;
133         ahc->platform_data->regs_res_id = rid,
134         ahc->platform_data->regs = regs;
135         ahc->tag = rman_get_bustag(regs);
136         ahc->bsh = rman_get_bushandle(regs);
137         return (0);
138 }
139
140 /*
141  * Attach all the sub-devices we can find
142  */
143 int
144 ahc_attach(struct ahc_softc *ahc)
145 {
146         char   ahc_info[256];
147         struct ccb_setasync csa;
148         int bus_id;
149         int bus_id2;
150         struct cam_sim *sim;
151         struct cam_sim *sim2;
152         struct cam_path *path;
153         struct cam_path *path2;
154         int count;
155
156         count = 0;
157         sim = NULL;
158         sim2 = NULL;
159         path = NULL;
160         path2 = NULL;
161
162         /*
163          * Create a thread to perform all recovery.
164          */
165         if (ahc_spawn_recovery_thread(ahc) != 0)
166                 goto fail;
167
168         ahc_controller_info(ahc, ahc_info);
169         kprintf("%s\n", ahc_info);
170         ahc_lock();
171
172         /*
173          * Attach secondary channel first if the user has
174          * declared it the primary channel.
175          */
176         if ((ahc->features & AHC_TWIN) != 0
177          && (ahc->flags & AHC_PRIMARY_CHANNEL) != 0) {
178                 bus_id = 1;
179                 bus_id2 = 0;
180         } else {
181                 bus_id = 0;
182                 bus_id2 = 1;
183         }
184
185         /*
186          * Construct our first channel SIM entry
187          */
188         sim = cam_sim_alloc(ahc_action, ahc_poll, "ahc", ahc,
189                             device_get_unit(ahc->dev_softc),
190                             1, AHC_MAX_QUEUE, NULL);
191         if (sim == NULL)
192                 goto fail;
193
194         if (xpt_bus_register(sim, bus_id) != CAM_SUCCESS) {
195                 cam_sim_free(sim);
196                 sim = NULL;
197                 goto fail;
198         }
199         
200         if (xpt_create_path(&path, /*periph*/NULL,
201                             cam_sim_path(sim), CAM_TARGET_WILDCARD,
202                             CAM_LUN_WILDCARD) != CAM_REQ_CMP) {
203                 xpt_bus_deregister(cam_sim_path(sim));
204                 cam_sim_free(sim);
205                 sim = NULL;
206                 goto fail;
207         }
208                 
209         xpt_setup_ccb(&csa.ccb_h, path, /*priority*/5);
210         csa.ccb_h.func_code = XPT_SASYNC_CB;
211         csa.event_enable = AC_LOST_DEVICE;
212         csa.callback = ahc_async;
213         csa.callback_arg = sim;
214         xpt_action((union ccb *)&csa);
215         count++;
216
217         if (ahc->features & AHC_TWIN) {
218                 sim2 = cam_sim_alloc(ahc_action, ahc_poll, "ahc",
219                                     ahc, device_get_unit(ahc->dev_softc), 1,
220                                     AHC_MAX_QUEUE, NULL);
221
222                 if (sim2 == NULL) {
223                         kprintf("ahc_attach: Unable to attach second "
224                                "bus due to resource shortage");
225                         goto fail;
226                 }
227                 
228                 if (xpt_bus_register(sim2, bus_id2) != CAM_SUCCESS) {
229                         kprintf("ahc_attach: Unable to attach second "
230                                "bus due to resource shortage");
231                         /*
232                          * We do not want to destroy the device queue
233                          * because the first bus is using it.
234                          */
235                         cam_sim_free(sim2);
236                         goto fail;
237                 }
238
239                 if (xpt_create_path(&path2, /*periph*/NULL,
240                                     cam_sim_path(sim2),
241                                     CAM_TARGET_WILDCARD,
242                                     CAM_LUN_WILDCARD) != CAM_REQ_CMP) {
243                         xpt_bus_deregister(cam_sim_path(sim2));
244                         cam_sim_free(sim2);
245                         sim2 = NULL;
246                         goto fail;
247                 }
248                 xpt_setup_ccb(&csa.ccb_h, path2, /*priority*/5);
249                 csa.ccb_h.func_code = XPT_SASYNC_CB;
250                 csa.event_enable = AC_LOST_DEVICE;
251                 csa.callback = ahc_async;
252                 csa.callback_arg = sim2;
253                 xpt_action((union ccb *)&csa);
254                 count++;
255         }
256
257 fail:
258         if ((ahc->features & AHC_TWIN) != 0
259          && (ahc->flags & AHC_PRIMARY_CHANNEL) != 0) {
260                 ahc->platform_data->sim_b = sim;
261                 ahc->platform_data->path_b = path;
262                 ahc->platform_data->sim = sim2;
263                 ahc->platform_data->path = path2;
264         } else {
265                 ahc->platform_data->sim = sim;
266                 ahc->platform_data->path = path;
267                 ahc->platform_data->sim_b = sim2;
268                 ahc->platform_data->path_b = path2;
269         }
270
271         if (count != 0) {
272                 /* We have to wait until after any system dumps... */
273                 ahc->platform_data->eh =
274                     EVENTHANDLER_REGISTER(shutdown_post_sync, ahc_shutdown,
275                                           ahc, SHUTDOWN_PRI_DRIVER);
276                 ahc_intr_enable(ahc, TRUE);
277         }
278
279         ahc_unlock();
280         return (count);
281 }
282
283 /*
284  * Catch an interrupt from the adapter
285  */
286 void
287 ahc_platform_intr(void *arg)
288 {
289         struct  ahc_softc *ahc;
290
291         ahc = (struct ahc_softc *)arg; 
292         ahc_intr(ahc);
293 }
294
295 /*
296  * We have an scb which has been processed by the
297  * adaptor, now we look to see how the operation
298  * went.
299  */
300 void
301 ahc_done(struct ahc_softc *ahc, struct scb *scb)
302 {
303         union ccb *ccb;
304
305         CAM_DEBUG(scb->io_ctx->ccb_h.path, CAM_DEBUG_TRACE,
306                   ("ahc_done - scb %d\n", scb->hscb->tag));
307
308         ccb = scb->io_ctx;
309         LIST_REMOVE(scb, pending_links);
310         if ((scb->flags & SCB_TIMEDOUT) != 0)
311                 LIST_REMOVE(scb, timedout_links);
312         if ((scb->flags & SCB_UNTAGGEDQ) != 0) {
313                 struct scb_tailq *untagged_q;
314                 int target_offset;
315
316                 target_offset = SCB_GET_TARGET_OFFSET(ahc, scb);
317                 untagged_q = &ahc->untagged_queues[target_offset];
318                 TAILQ_REMOVE(untagged_q, scb, links.tqe);
319                 scb->flags &= ~SCB_UNTAGGEDQ;
320                 ahc_run_untagged_queue(ahc, untagged_q);
321         }
322
323         callout_stop(&ccb->ccb_h.timeout_ch);
324
325         if ((ccb->ccb_h.flags & CAM_DIR_MASK) != CAM_DIR_NONE) {
326                 bus_dmasync_op_t op;
327
328                 if ((ccb->ccb_h.flags & CAM_DIR_MASK) == CAM_DIR_IN)
329                         op = BUS_DMASYNC_POSTREAD;
330                 else
331                         op = BUS_DMASYNC_POSTWRITE;
332                 bus_dmamap_sync(ahc->buffer_dmat, scb->dmamap, op);
333                 bus_dmamap_unload(ahc->buffer_dmat, scb->dmamap);
334         }
335
336         if (ccb->ccb_h.func_code == XPT_CONT_TARGET_IO) {
337                 struct cam_path *ccb_path;
338
339                 /*
340                  * If we have finally disconnected, clean up our
341                  * pending device state.
342                  * XXX - There may be error states that cause where
343                  *       we will remain connected.
344                  */
345                 ccb_path = ccb->ccb_h.path;
346                 if (ahc->pending_device != NULL
347                  && xpt_path_comp(ahc->pending_device->path, ccb_path) == 0) {
348
349                         if ((ccb->ccb_h.flags & CAM_SEND_STATUS) != 0) {
350                                 ahc->pending_device = NULL;
351                         } else {
352                                 if (bootverbose) {
353                                         xpt_print_path(ccb->ccb_h.path);
354                                         kprintf("Still connected\n");
355                                 }
356                                 aic_freeze_ccb(ccb);
357                         }
358                 }
359
360                 if (aic_get_transaction_status(scb) == CAM_REQ_INPROG)
361                         ccb->ccb_h.status |= CAM_REQ_CMP;
362                 ccb->ccb_h.status &= ~CAM_SIM_QUEUED;
363                 ahc_free_scb(ahc, scb);
364                 xpt_done(ccb);
365                 return;
366         }
367
368         /*
369          * If the recovery SCB completes, we have to be
370          * out of our timeout.
371          */
372         if ((scb->flags & SCB_RECOVERY_SCB) != 0) {
373                 struct  scb *list_scb;
374
375                 ahc->scb_data->recovery_scbs--;
376
377                 if (aic_get_transaction_status(scb) == CAM_BDR_SENT
378                  || aic_get_transaction_status(scb) == CAM_REQ_ABORTED)
379                         aic_set_transaction_status(scb, CAM_CMD_TIMEOUT);
380
381                 if (ahc->scb_data->recovery_scbs == 0) {
382                         /*
383                          * All recovery actions have completed successfully,
384                          * so reinstate the timeouts for all other pending
385                          * commands.
386                          */
387                          LIST_FOREACH(list_scb, &ahc->pending_scbs,
388                                       pending_links) {
389
390                                 aic_scb_timer_reset(list_scb,
391                                                     aic_get_timeout(scb));
392                         }
393
394                         ahc_print_path(ahc, scb);
395                         kprintf("no longer in timeout, status = %x\n",
396                                ccb->ccb_h.status);
397                 }
398         }
399
400         /* Don't clobber any existing error state */
401         if (aic_get_transaction_status(scb) == CAM_REQ_INPROG) {
402                 ccb->ccb_h.status |= CAM_REQ_CMP;
403         } else if ((scb->flags & SCB_SENSE) != 0) {
404                 /*
405                  * We performed autosense retrieval.
406                  *
407                  * Zero any sense not transferred by the
408                  * device.  The SCSI spec mandates that any
409                  * untransfered data should be assumed to be
410                  * zero.  Complete the 'bounce' of sense information
411                  * through buffers accessible via bus-space by
412                  * copying it into the clients csio.
413                  */
414                 memset(&ccb->csio.sense_data, 0, sizeof(ccb->csio.sense_data));
415                 memcpy(&ccb->csio.sense_data,
416                        ahc_get_sense_buf(ahc, scb),
417                        (aic_le32toh(scb->sg_list->len) & AHC_SG_LEN_MASK)
418                        - ccb->csio.sense_resid);
419                 scb->io_ctx->ccb_h.status |= CAM_AUTOSNS_VALID;
420         }
421         ccb->ccb_h.status &= ~CAM_SIM_QUEUED;
422         ahc_free_scb(ahc, scb);
423         xpt_done(ccb);
424 }
425
426 static void
427 ahc_action(struct cam_sim *sim, union ccb *ccb)
428 {
429         struct  ahc_softc *ahc;
430         struct  ahc_tmode_lstate *lstate;
431         u_int   target_id;
432         u_int   our_id;
433
434         CAM_DEBUG(ccb->ccb_h.path, CAM_DEBUG_TRACE, ("ahc_action\n"));
435         
436         ahc = (struct ahc_softc *)cam_sim_softc(sim);
437
438         target_id = ccb->ccb_h.target_id;
439         our_id = SIM_SCSI_ID(ahc, sim);
440         
441         switch (ccb->ccb_h.func_code) {
442         /* Common cases first */
443         case XPT_ACCEPT_TARGET_IO:      /* Accept Host Target Mode CDB */
444         case XPT_CONT_TARGET_IO:/* Continue Host Target I/O Connection*/
445         {
446                 struct     ahc_tmode_tstate *tstate;
447                 cam_status status;
448
449                 status = ahc_find_tmode_devs(ahc, sim, ccb, &tstate,
450                                              &lstate, TRUE);
451
452                 if (status != CAM_REQ_CMP) {
453                         if (ccb->ccb_h.func_code == XPT_CONT_TARGET_IO) {
454                                 /* Response from the black hole device */
455                                 tstate = NULL;
456                                 lstate = ahc->black_hole;
457                         } else {
458                                 ccb->ccb_h.status = status;
459                                 xpt_done(ccb);
460                                 break;
461                         }
462                 }
463                 if (ccb->ccb_h.func_code == XPT_ACCEPT_TARGET_IO) {
464
465                         ahc_lock();
466                         SLIST_INSERT_HEAD(&lstate->accept_tios, &ccb->ccb_h,
467                                           sim_links.sle);
468                         ccb->ccb_h.status = CAM_REQ_INPROG;
469                         if ((ahc->flags & AHC_TQINFIFO_BLOCKED) != 0)
470                                 ahc_run_tqinfifo(ahc, /*paused*/FALSE);
471                         ahc_unlock();
472                         break;
473                 }
474
475                 /*
476                  * The target_id represents the target we attempt to
477                  * select.  In target mode, this is the initiator of
478                  * the original command.
479                  */
480                 our_id = target_id;
481                 target_id = ccb->csio.init_id;
482                 /* FALLTHROUGH */
483         }
484         case XPT_SCSI_IO:       /* Execute the requested I/O operation */
485         case XPT_RESET_DEV:     /* Bus Device Reset the specified SCSI device */
486         {
487                 struct  scb *scb;
488                 struct  hardware_scb *hscb;     
489
490                 if ((ahc->flags & AHC_INITIATORROLE) == 0
491                  && (ccb->ccb_h.func_code == XPT_SCSI_IO
492                   || ccb->ccb_h.func_code == XPT_RESET_DEV)) {
493                         ccb->ccb_h.status = CAM_PROVIDE_FAIL;
494                         xpt_done(ccb);
495                         return;
496                 }
497
498                 /*
499                  * get an scb to use.
500                  */
501                 ahc_lock();
502                 if ((scb = ahc_get_scb(ahc)) == NULL) {
503         
504                         xpt_freeze_simq(sim, /*count*/1);
505                         ahc->flags |= AHC_RESOURCE_SHORTAGE;
506                         ahc_unlock();
507                         ccb->ccb_h.status = CAM_REQUEUE_REQ;
508                         xpt_done(ccb);
509                         return;
510                 }
511                 ahc_unlock();
512                 
513                 hscb = scb->hscb;
514                 
515                 CAM_DEBUG(ccb->ccb_h.path, CAM_DEBUG_SUBTRACE,
516                           ("start scb(%p)\n", scb));
517                 scb->io_ctx = ccb;
518                 /*
519                  * So we can find the SCB when an abort is requested
520                  */
521                 ccb->ccb_h.ccb_scb_ptr = scb;
522
523                 /*
524                  * Put all the arguments for the xfer in the scb
525                  */
526                 hscb->control = 0;
527                 hscb->scsiid = BUILD_SCSIID(ahc, sim, target_id, our_id);
528                 hscb->lun = ccb->ccb_h.target_lun;
529                 if (ccb->ccb_h.func_code == XPT_RESET_DEV) {
530                         hscb->cdb_len = 0;
531                         scb->flags |= SCB_DEVICE_RESET;
532                         hscb->control |= MK_MESSAGE;
533                         ahc_execute_scb(scb, NULL, 0, 0);
534                 } else {
535                         if (ccb->ccb_h.func_code == XPT_CONT_TARGET_IO) {
536                                 struct target_data *tdata;
537
538                                 tdata = &hscb->shared_data.tdata;
539                                 if (ahc->pending_device == lstate)
540                                         scb->flags |= SCB_TARGET_IMMEDIATE;
541                                 hscb->control |= TARGET_SCB;
542                                 scb->flags |= SCB_TARGET_SCB;
543                                 tdata->target_phases = 0;
544                                 if ((ccb->ccb_h.flags & CAM_SEND_STATUS) != 0) {
545                                         tdata->target_phases |= SPHASE_PENDING;
546                                         tdata->scsi_status =
547                                             ccb->csio.scsi_status;
548                                 }
549                                 if (ccb->ccb_h.flags & CAM_DIS_DISCONNECT)
550                                         tdata->target_phases |= NO_DISCONNECT;
551
552                                 tdata->initiator_tag = ccb->csio.tag_id;
553                         }
554                         if (ccb->ccb_h.flags & CAM_TAG_ACTION_VALID)
555                                 hscb->control |= ccb->csio.tag_action;
556                         
557                         ahc_setup_data(ahc, sim, &ccb->csio, scb);
558                 }
559                 break;
560         }
561         case XPT_NOTIFY_ACK:
562         case XPT_IMMED_NOTIFY:
563         {
564                 struct     ahc_tmode_tstate *tstate;
565                 struct     ahc_tmode_lstate *lstate;
566                 cam_status status;
567
568                 status = ahc_find_tmode_devs(ahc, sim, ccb, &tstate,
569                                              &lstate, TRUE);
570
571                 if (status != CAM_REQ_CMP) {
572                         ccb->ccb_h.status = status;
573                         xpt_done(ccb);
574                         break;
575                 }
576                 SLIST_INSERT_HEAD(&lstate->immed_notifies, &ccb->ccb_h,
577                                   sim_links.sle);
578                 ccb->ccb_h.status = CAM_REQ_INPROG;
579                 ahc_send_lstate_events(ahc, lstate);
580                 break;
581         }
582         case XPT_EN_LUN:                /* Enable LUN as a target */
583                 ahc_handle_en_lun(ahc, sim, ccb);
584                 xpt_done(ccb);
585                 break;
586         case XPT_ABORT:                 /* Abort the specified CCB */
587         {
588                 ahc_abort_ccb(ahc, sim, ccb);
589                 break;
590         }
591         case XPT_SET_TRAN_SETTINGS:
592         {
593 #ifdef AHC_NEW_TRAN_SETTINGS
594                 struct  ahc_devinfo devinfo;
595                 struct  ccb_trans_settings *cts;
596                 struct  ccb_trans_settings_scsi *scsi;
597                 struct  ccb_trans_settings_spi *spi;
598                 struct  ahc_initiator_tinfo *tinfo;
599                 struct  ahc_tmode_tstate *tstate;
600                 uint16_t *discenable;
601                 uint16_t *tagenable;
602                 u_int   update_type;
603
604                 cts = &ccb->cts;
605                 scsi = &cts->proto_specific.scsi;
606                 spi = &cts->xport_specific.spi;
607                 ahc_compile_devinfo(&devinfo, SIM_SCSI_ID(ahc, sim),
608                                     cts->ccb_h.target_id,
609                                     cts->ccb_h.target_lun,
610                                     SIM_CHANNEL(ahc, sim),
611                                     ROLE_UNKNOWN);
612                 tinfo = ahc_fetch_transinfo(ahc, devinfo.channel,
613                                             devinfo.our_scsiid,
614                                             devinfo.target, &tstate);
615                 update_type = 0;
616                 if (cts->type == CTS_TYPE_CURRENT_SETTINGS) {
617                         update_type |= AHC_TRANS_GOAL;
618                         discenable = &tstate->discenable;
619                         tagenable = &tstate->tagenable;
620                         tinfo->curr.protocol_version =
621                             cts->protocol_version;
622                         tinfo->curr.transport_version =
623                             cts->transport_version;
624                         tinfo->goal.protocol_version =
625                             cts->protocol_version;
626                         tinfo->goal.transport_version =
627                             cts->transport_version;
628                 } else if (cts->type == CTS_TYPE_USER_SETTINGS) {
629                         update_type |= AHC_TRANS_USER;
630                         discenable = &ahc->user_discenable;
631                         tagenable = &ahc->user_tagenable;
632                         tinfo->user.protocol_version =
633                             cts->protocol_version;
634                         tinfo->user.transport_version =
635                             cts->transport_version;
636                 } else {
637                         ccb->ccb_h.status = CAM_REQ_INVALID;
638                         xpt_done(ccb);
639                         break;
640                 }
641                 
642                 ahc_lock();
643
644                 if ((spi->valid & CTS_SPI_VALID_DISC) != 0) {
645                         if ((spi->flags & CTS_SPI_FLAGS_DISC_ENB) != 0)
646                                 *discenable |= devinfo.target_mask;
647                         else
648                                 *discenable &= ~devinfo.target_mask;
649                 }
650                 
651                 if ((scsi->valid & CTS_SCSI_VALID_TQ) != 0) {
652                         if ((scsi->flags & CTS_SCSI_FLAGS_TAG_ENB) != 0)
653                                 *tagenable |= devinfo.target_mask;
654                         else
655                                 *tagenable &= ~devinfo.target_mask;
656                 }       
657
658                 if ((spi->valid & CTS_SPI_VALID_BUS_WIDTH) != 0) {
659                         ahc_validate_width(ahc, /*tinfo limit*/NULL,
660                                            &spi->bus_width, ROLE_UNKNOWN);
661                         ahc_set_width(ahc, &devinfo, spi->bus_width,
662                                       update_type, /*paused*/FALSE);
663                 }
664
665                 if ((spi->valid & CTS_SPI_VALID_PPR_OPTIONS) == 0) {
666                         if (update_type == AHC_TRANS_USER)
667                                 spi->ppr_options = tinfo->user.ppr_options;
668                         else
669                                 spi->ppr_options = tinfo->goal.ppr_options;
670                 }
671
672                 if ((spi->valid & CTS_SPI_VALID_SYNC_OFFSET) == 0) {
673                         if (update_type == AHC_TRANS_USER)
674                                 spi->sync_offset = tinfo->user.offset;
675                         else
676                                 spi->sync_offset = tinfo->goal.offset;
677                 }
678
679                 if ((spi->valid & CTS_SPI_VALID_SYNC_RATE) == 0) {
680                         if (update_type == AHC_TRANS_USER)
681                                 spi->sync_period = tinfo->user.period;
682                         else
683                                 spi->sync_period = tinfo->goal.period;
684                 }
685
686                 if (((spi->valid & CTS_SPI_VALID_SYNC_RATE) != 0)
687                  || ((spi->valid & CTS_SPI_VALID_SYNC_OFFSET) != 0)) {
688                         struct ahc_syncrate *syncrate;
689                         u_int maxsync;
690
691                         if ((ahc->features & AHC_ULTRA2) != 0)
692                                 maxsync = AHC_SYNCRATE_DT;
693                         else if ((ahc->features & AHC_ULTRA) != 0)
694                                 maxsync = AHC_SYNCRATE_ULTRA;
695                         else
696                                 maxsync = AHC_SYNCRATE_FAST;
697
698                         if (spi->bus_width != MSG_EXT_WDTR_BUS_16_BIT)
699                                 spi->ppr_options &= ~MSG_EXT_PPR_DT_REQ;
700
701                         syncrate = ahc_find_syncrate(ahc, &spi->sync_period,
702                                                      &spi->ppr_options,
703                                                      maxsync);
704                         ahc_validate_offset(ahc, /*tinfo limit*/NULL,
705                                             syncrate, &spi->sync_offset,
706                                             spi->bus_width, ROLE_UNKNOWN);
707
708                         /* We use a period of 0 to represent async */
709                         if (spi->sync_offset == 0) {
710                                 spi->sync_period = 0;
711                                 spi->ppr_options = 0;
712                         }
713
714                         ahc_set_syncrate(ahc, &devinfo, syncrate,
715                                          spi->sync_period, spi->sync_offset,
716                                          spi->ppr_options, update_type,
717                                          /*paused*/FALSE);
718                 }
719                 ahc_unlock();
720                 ccb->ccb_h.status = CAM_REQ_CMP;
721                 xpt_done(ccb);
722 #else
723                 struct    ahc_devinfo devinfo;
724                 struct    ccb_trans_settings *cts;
725                 struct    ahc_initiator_tinfo *tinfo;
726                 struct    ahc_tmode_tstate *tstate;
727                 uint16_t *discenable;
728                 uint16_t *tagenable;
729                 u_int     update_type;
730
731                 cts = &ccb->cts;
732                 ahc_compile_devinfo(&devinfo, SIM_SCSI_ID(ahc, sim),
733                                     cts->ccb_h.target_id,
734                                     cts->ccb_h.target_lun,
735                                     SIM_CHANNEL(ahc, sim),
736                                     ROLE_UNKNOWN);
737                 tinfo = ahc_fetch_transinfo(ahc, devinfo.channel,
738                                             devinfo.our_scsiid,
739                                             devinfo.target, &tstate);
740                 update_type = 0;
741                 if ((cts->flags & CCB_TRANS_CURRENT_SETTINGS) != 0) {
742                         update_type |= AHC_TRANS_GOAL;
743                         discenable = &tstate->discenable;
744                         tagenable = &tstate->tagenable;
745                 } else if ((cts->flags & CCB_TRANS_USER_SETTINGS) != 0) {
746                         update_type |= AHC_TRANS_USER;
747                         discenable = &ahc->user_discenable;
748                         tagenable = &ahc->user_tagenable;
749                 } else {
750                         ccb->ccb_h.status = CAM_REQ_INVALID;
751                         xpt_done(ccb);
752                         break;
753                 }
754                 
755                 ahc_lock();
756
757                 if ((cts->valid & CCB_TRANS_DISC_VALID) != 0) {
758                         if ((cts->flags & CCB_TRANS_DISC_ENB) != 0)
759                                 *discenable |= devinfo.target_mask;
760                         else
761                                 *discenable &= ~devinfo.target_mask;
762                 }
763                 
764                 if ((cts->valid & CCB_TRANS_TQ_VALID) != 0) {
765                         if ((cts->flags & CCB_TRANS_TAG_ENB) != 0)
766                                 *tagenable |= devinfo.target_mask;
767                         else
768                                 *tagenable &= ~devinfo.target_mask;
769                 }       
770
771                 if ((cts->valid & CCB_TRANS_BUS_WIDTH_VALID) != 0) {
772                         ahc_validate_width(ahc, /*tinfo limit*/NULL,
773                                            &cts->bus_width, ROLE_UNKNOWN);
774                         ahc_set_width(ahc, &devinfo, cts->bus_width,
775                                       update_type, /*paused*/FALSE);
776                 }
777
778                 if ((cts->valid & CCB_TRANS_SYNC_OFFSET_VALID) == 0) {
779                         if (update_type == AHC_TRANS_USER)
780                                 cts->sync_offset = tinfo->user.offset;
781                         else
782                                 cts->sync_offset = tinfo->goal.offset;
783                 }
784
785                 if ((cts->valid & CCB_TRANS_SYNC_RATE_VALID) == 0) {
786                         if (update_type == AHC_TRANS_USER)
787                                 cts->sync_period = tinfo->user.period;
788                         else
789                                 cts->sync_period = tinfo->goal.period;
790                 }
791
792                 if (((cts->valid & CCB_TRANS_SYNC_RATE_VALID) != 0)
793                  || ((cts->valid & CCB_TRANS_SYNC_OFFSET_VALID) != 0)) {
794                         struct ahc_syncrate *syncrate;
795                         u_int ppr_options;
796                         u_int maxsync;
797
798                         if ((ahc->features & AHC_ULTRA2) != 0)
799                                 maxsync = AHC_SYNCRATE_DT;
800                         else if ((ahc->features & AHC_ULTRA) != 0)
801                                 maxsync = AHC_SYNCRATE_ULTRA;
802                         else
803                                 maxsync = AHC_SYNCRATE_FAST;
804
805                         ppr_options = 0;
806                         if (cts->sync_period <= 9
807                          && cts->bus_width == MSG_EXT_WDTR_BUS_16_BIT)
808                                 ppr_options = MSG_EXT_PPR_DT_REQ;
809
810                         syncrate = ahc_find_syncrate(ahc, &cts->sync_period,
811                                                      &ppr_options,
812                                                      maxsync);
813                         ahc_validate_offset(ahc, /*tinfo limit*/NULL,
814                                             syncrate, &cts->sync_offset,
815                                             MSG_EXT_WDTR_BUS_8_BIT,
816                                             ROLE_UNKNOWN);
817
818                         /* We use a period of 0 to represent async */
819                         if (cts->sync_offset == 0) {
820                                 cts->sync_period = 0;
821                                 ppr_options = 0;
822                         }
823
824                         if (ppr_options == MSG_EXT_PPR_DT_REQ
825                          && tinfo->user.transport_version >= 3) {
826                                 tinfo->goal.transport_version =
827                                     tinfo->user.transport_version;
828                                 tinfo->curr.transport_version =
829                                     tinfo->user.transport_version;
830                         }
831                         
832                         ahc_set_syncrate(ahc, &devinfo, syncrate,
833                                          cts->sync_period, cts->sync_offset,
834                                          ppr_options, update_type,
835                                          /*paused*/FALSE);
836                 }
837                 ahc_unlock();
838                 ccb->ccb_h.status = CAM_REQ_CMP;
839                 xpt_done(ccb);
840 #endif
841                 break;
842         }
843         case XPT_GET_TRAN_SETTINGS:
844         /* Get default/user set transfer settings for the target */
845         {
846
847                 ahc_lock();
848                 ahc_get_tran_settings(ahc, SIM_SCSI_ID(ahc, sim),
849                                       SIM_CHANNEL(ahc, sim), &ccb->cts);
850                 ahc_unlock();
851                 xpt_done(ccb);
852                 break;
853         }
854         case XPT_CALC_GEOMETRY:
855         {
856                 int extended;
857
858                 extended = SIM_IS_SCSIBUS_B(ahc, sim)
859                          ? ahc->flags & AHC_EXTENDED_TRANS_B
860                          : ahc->flags & AHC_EXTENDED_TRANS_A;
861                 cam_calc_geometry(&ccb->ccg, extended);
862                 xpt_done(ccb);
863                 break;
864         }
865         case XPT_RESET_BUS:             /* Reset the specified SCSI bus */
866         {
867                 int  found;
868                 
869                 ahc_lock();
870                 found = ahc_reset_channel(ahc, SIM_CHANNEL(ahc, sim),
871                                           /*initiate reset*/TRUE);
872                 ahc_unlock();
873                 if (bootverbose) {
874                         xpt_print_path(SIM_PATH(ahc, sim));
875                         kprintf("SCSI bus reset delivered. "
876                                "%d SCBs aborted.\n", found);
877                 }
878                 ccb->ccb_h.status = CAM_REQ_CMP;
879                 xpt_done(ccb);
880                 break;
881         }
882         case XPT_TERM_IO:               /* Terminate the I/O process */
883                 /* XXX Implement */
884                 ccb->ccb_h.status = CAM_REQ_INVALID;
885                 xpt_done(ccb);
886                 break;
887         case XPT_PATH_INQ:              /* Path routing inquiry */
888         {
889                 struct ccb_pathinq *cpi = &ccb->cpi;
890                 
891                 cpi->version_num = 1; /* XXX??? */
892                 cpi->hba_inquiry = PI_SDTR_ABLE|PI_TAG_ABLE;
893                 if ((ahc->features & AHC_WIDE) != 0)
894                         cpi->hba_inquiry |= PI_WIDE_16;
895                 if ((ahc->features & AHC_TARGETMODE) != 0) {
896                         cpi->target_sprt = PIT_PROCESSOR
897                                          | PIT_DISCONNECT
898                                          | PIT_TERM_IO;
899                 } else {
900                         cpi->target_sprt = 0;
901                 }
902                 cpi->hba_misc = 0;
903                 cpi->hba_eng_cnt = 0;
904                 cpi->max_target = (ahc->features & AHC_WIDE) ? 15 : 7;
905                 cpi->max_lun = AHC_NUM_LUNS - 1;
906                 if (SIM_IS_SCSIBUS_B(ahc, sim)) {
907                         cpi->initiator_id = ahc->our_id_b;
908                         if ((ahc->flags & AHC_RESET_BUS_B) == 0)
909                                 cpi->hba_misc |= PIM_NOBUSRESET;
910                 } else {
911                         cpi->initiator_id = ahc->our_id;
912                         if ((ahc->flags & AHC_RESET_BUS_A) == 0)
913                                 cpi->hba_misc |= PIM_NOBUSRESET;
914                 }
915                 cpi->bus_id = cam_sim_bus(sim);
916                 cpi->base_transfer_speed = 3300;
917                 strncpy(cpi->sim_vid, "FreeBSD", SIM_IDLEN);
918                 strncpy(cpi->hba_vid, "Adaptec", HBA_IDLEN);
919                 strncpy(cpi->dev_name, cam_sim_name(sim), DEV_IDLEN);
920                 cpi->unit_number = cam_sim_unit(sim);
921 #ifdef AHC_NEW_TRAN_SETTINGS
922                 cpi->protocol = PROTO_SCSI;
923                 cpi->protocol_version = SCSI_REV_2;
924                 cpi->transport = XPORT_SPI;
925                 cpi->transport_version = 2;
926                 cpi->xport_specific.spi.ppr_options = SID_SPI_CLOCK_ST;
927                 if ((ahc->features & AHC_DT) != 0) {
928                         cpi->transport_version = 3;
929                         cpi->xport_specific.spi.ppr_options =
930                             SID_SPI_CLOCK_DT_ST;
931                 }
932 #endif
933                 cpi->ccb_h.status = CAM_REQ_CMP;
934                 xpt_done(ccb);
935                 break;
936         }
937         default:
938                 ccb->ccb_h.status = CAM_PROVIDE_FAIL;
939                 xpt_done(ccb);
940                 break;
941         }
942 }
943
944 static void
945 ahc_get_tran_settings(struct ahc_softc *ahc, int our_id, char channel,
946                       struct ccb_trans_settings *cts)
947 {
948 #ifdef AHC_NEW_TRAN_SETTINGS
949         struct  ahc_devinfo devinfo;
950         struct  ccb_trans_settings_scsi *scsi;
951         struct  ccb_trans_settings_spi *spi;
952         struct  ahc_initiator_tinfo *targ_info;
953         struct  ahc_tmode_tstate *tstate;
954         struct  ahc_transinfo *tinfo;
955
956         scsi = &cts->proto_specific.scsi;
957         spi = &cts->xport_specific.spi;
958         ahc_compile_devinfo(&devinfo, our_id,
959                             cts->ccb_h.target_id,
960                             cts->ccb_h.target_lun,
961                             channel, ROLE_UNKNOWN);
962         targ_info = ahc_fetch_transinfo(ahc, devinfo.channel,
963                                         devinfo.our_scsiid,
964                                         devinfo.target, &tstate);
965         
966         if (cts->type == CTS_TYPE_CURRENT_SETTINGS)
967                 tinfo = &targ_info->curr;
968         else
969                 tinfo = &targ_info->user;
970         
971         scsi->flags &= ~CTS_SCSI_FLAGS_TAG_ENB;
972         spi->flags &= ~CTS_SPI_FLAGS_DISC_ENB;
973         if (cts->type == CTS_TYPE_USER_SETTINGS) {
974                 if ((ahc->user_discenable & devinfo.target_mask) != 0)
975                         spi->flags |= CTS_SPI_FLAGS_DISC_ENB;
976
977                 if ((ahc->user_tagenable & devinfo.target_mask) != 0)
978                         scsi->flags |= CTS_SCSI_FLAGS_TAG_ENB;
979         } else {
980                 if ((tstate->discenable & devinfo.target_mask) != 0)
981                         spi->flags |= CTS_SPI_FLAGS_DISC_ENB;
982
983                 if ((tstate->tagenable & devinfo.target_mask) != 0)
984                         scsi->flags |= CTS_SCSI_FLAGS_TAG_ENB;
985         }
986         cts->protocol_version = tinfo->protocol_version;
987         cts->transport_version = tinfo->transport_version;
988
989         spi->sync_period = tinfo->period;
990         spi->sync_offset = tinfo->offset;
991         spi->bus_width = tinfo->width;
992         spi->ppr_options = tinfo->ppr_options;
993         
994         cts->protocol = PROTO_SCSI;
995         cts->transport = XPORT_SPI;
996         spi->valid = CTS_SPI_VALID_SYNC_RATE
997                    | CTS_SPI_VALID_SYNC_OFFSET
998                    | CTS_SPI_VALID_BUS_WIDTH
999                    | CTS_SPI_VALID_PPR_OPTIONS;
1000
1001         if (cts->ccb_h.target_lun != CAM_LUN_WILDCARD) {
1002                 scsi->valid = CTS_SCSI_VALID_TQ;
1003                 spi->valid |= CTS_SPI_VALID_DISC;
1004         } else {
1005                 scsi->valid = 0;
1006         }
1007
1008         cts->ccb_h.status = CAM_REQ_CMP;
1009 #else
1010         struct  ahc_devinfo devinfo;
1011         struct  ahc_initiator_tinfo *targ_info;
1012         struct  ahc_tmode_tstate *tstate;
1013         struct  ahc_transinfo *tinfo;
1014
1015         ahc_compile_devinfo(&devinfo, our_id,
1016                             cts->ccb_h.target_id,
1017                             cts->ccb_h.target_lun,
1018                             channel, ROLE_UNKNOWN);
1019         targ_info = ahc_fetch_transinfo(ahc, devinfo.channel,
1020                                         devinfo.our_scsiid,
1021                                         devinfo.target, &tstate);
1022         
1023         if ((cts->flags & CCB_TRANS_CURRENT_SETTINGS) != 0)
1024                 tinfo = &targ_info->curr;
1025         else
1026                 tinfo = &targ_info->user;
1027         
1028         cts->flags &= ~(CCB_TRANS_DISC_ENB|CCB_TRANS_TAG_ENB);
1029         if ((cts->flags & CCB_TRANS_CURRENT_SETTINGS) == 0) {
1030                 if ((ahc->user_discenable & devinfo.target_mask) != 0)
1031                         cts->flags |= CCB_TRANS_DISC_ENB;
1032
1033                 if ((ahc->user_tagenable & devinfo.target_mask) != 0)
1034                         cts->flags |= CCB_TRANS_TAG_ENB;
1035         } else {
1036                 if ((tstate->discenable & devinfo.target_mask) != 0)
1037                         cts->flags |= CCB_TRANS_DISC_ENB;
1038
1039                 if ((tstate->tagenable & devinfo.target_mask) != 0)
1040                         cts->flags |= CCB_TRANS_TAG_ENB;
1041         }
1042         cts->sync_period = tinfo->period;
1043         cts->sync_offset = tinfo->offset;
1044         cts->bus_width = tinfo->width;
1045         
1046         cts->valid = CCB_TRANS_SYNC_RATE_VALID
1047                    | CCB_TRANS_SYNC_OFFSET_VALID
1048                    | CCB_TRANS_BUS_WIDTH_VALID;
1049
1050         if (cts->ccb_h.target_lun != CAM_LUN_WILDCARD)
1051                 cts->valid |= CCB_TRANS_DISC_VALID|CCB_TRANS_TQ_VALID;
1052
1053         cts->ccb_h.status = CAM_REQ_CMP;
1054 #endif
1055 }
1056
1057 static void
1058 ahc_async(void *callback_arg, uint32_t code, struct cam_path *path, void *arg)
1059 {
1060         struct ahc_softc *ahc;
1061         struct cam_sim *sim;
1062
1063         sim = (struct cam_sim *)callback_arg;
1064         ahc = (struct ahc_softc *)cam_sim_softc(sim);
1065         switch (code) {
1066         case AC_LOST_DEVICE:
1067         {
1068                 struct  ahc_devinfo devinfo;
1069
1070                 ahc_compile_devinfo(&devinfo, SIM_SCSI_ID(ahc, sim),
1071                                     xpt_path_target_id(path),
1072                                     xpt_path_lun_id(path),
1073                                     SIM_CHANNEL(ahc, sim),
1074                                     ROLE_UNKNOWN);
1075
1076                 /*
1077                  * Revert to async/narrow transfers
1078                  * for the next device.
1079                  */
1080                 ahc_lock();
1081                 ahc_set_width(ahc, &devinfo, MSG_EXT_WDTR_BUS_8_BIT,
1082                               AHC_TRANS_GOAL|AHC_TRANS_CUR, /*paused*/FALSE);
1083                 ahc_set_syncrate(ahc, &devinfo, /*syncrate*/NULL,
1084                                  /*period*/0, /*offset*/0, /*ppr_options*/0,
1085                                  AHC_TRANS_GOAL|AHC_TRANS_CUR,
1086                                  /*paused*/FALSE);
1087                 ahc_unlock();
1088                 break;
1089         }
1090         default:
1091                 break;
1092         }
1093 }
1094
1095 static void
1096 ahc_execute_scb(void *arg, bus_dma_segment_t *dm_segs, int nsegments,
1097                 int error)
1098 {
1099         struct  scb *scb;
1100         union   ccb *ccb;
1101         struct  ahc_softc *ahc;
1102         struct  ahc_initiator_tinfo *tinfo;
1103         struct  ahc_tmode_tstate *tstate;
1104         u_int   mask;
1105
1106         scb = (struct scb *)arg;
1107         ccb = scb->io_ctx;
1108         ahc = scb->ahc_softc;
1109
1110         if (error != 0) {
1111                 if (error == EFBIG)
1112                         aic_set_transaction_status(scb, CAM_REQ_TOO_BIG);
1113                 else
1114                         aic_set_transaction_status(scb, CAM_REQ_CMP_ERR);
1115                 if (nsegments != 0)
1116                         bus_dmamap_unload(ahc->buffer_dmat, scb->dmamap);
1117                 ahc_lock();
1118                 ahc_free_scb(ahc, scb);
1119                 ahc_unlock();
1120                 xpt_done(ccb);
1121                 return;
1122         }
1123         if (nsegments != 0) {
1124                 struct    ahc_dma_seg *sg;
1125                 bus_dma_segment_t *end_seg;
1126                 bus_dmasync_op_t op;
1127
1128                 end_seg = dm_segs + nsegments;
1129
1130                 /* Copy the segments into our SG list */
1131                 sg = scb->sg_list;
1132                 while (dm_segs < end_seg) {
1133                         uint32_t len;
1134
1135                         sg->addr = aic_htole32(dm_segs->ds_addr);
1136                         len = dm_segs->ds_len
1137                             | ((dm_segs->ds_addr >> 8) & 0x7F000000);
1138                         sg->len = aic_htole32(len);
1139                         sg++;
1140                         dm_segs++;
1141                 }
1142                 
1143                 /*
1144                  * Note where to find the SG entries in bus space.
1145                  * We also set the full residual flag which the 
1146                  * sequencer will clear as soon as a data transfer
1147                  * occurs.
1148                  */
1149                 scb->hscb->sgptr = aic_htole32(scb->sg_list_phys|SG_FULL_RESID);
1150
1151                 if ((ccb->ccb_h.flags & CAM_DIR_MASK) == CAM_DIR_IN)
1152                         op = BUS_DMASYNC_PREREAD;
1153                 else
1154                         op = BUS_DMASYNC_PREWRITE;
1155
1156                 bus_dmamap_sync(ahc->buffer_dmat, scb->dmamap, op);
1157
1158                 if (ccb->ccb_h.func_code == XPT_CONT_TARGET_IO) {
1159                         struct target_data *tdata;
1160
1161                         tdata = &scb->hscb->shared_data.tdata;
1162                         tdata->target_phases |= DPHASE_PENDING;
1163                         /*
1164                          * CAM data direction is relative to the initiator.
1165                          */
1166                         if ((ccb->ccb_h.flags & CAM_DIR_MASK) == CAM_DIR_OUT)
1167                                 tdata->data_phase = P_DATAOUT;
1168                         else
1169                                 tdata->data_phase = P_DATAIN;
1170
1171                         /*
1172                          * If the transfer is of an odd length and in the
1173                          * "in" direction (scsi->HostBus), then it may
1174                          * trigger a bug in the 'WideODD' feature of
1175                          * non-Ultra2 chips.  Force the total data-length
1176                          * to be even by adding an extra, 1 byte, SG,
1177                          * element.  We do this even if we are not currently
1178                          * negotiated wide as negotiation could occur before
1179                          * this command is executed.
1180                          */
1181                         if ((ahc->bugs & AHC_TMODE_WIDEODD_BUG) != 0
1182                          && (ccb->csio.dxfer_len & 0x1) != 0
1183                          && (ccb->ccb_h.flags & CAM_DIR_MASK) == CAM_DIR_OUT) {
1184
1185                                 nsegments++;
1186                                 if (nsegments > AHC_NSEG) {
1187
1188                                         aic_set_transaction_status(scb,
1189                                             CAM_REQ_TOO_BIG);
1190                                         bus_dmamap_unload(ahc->buffer_dmat,
1191                                                           scb->dmamap);
1192                                         ahc_lock();
1193                                         ahc_free_scb(ahc, scb);
1194                                         ahc_unlock();
1195                                         xpt_done(ccb);
1196                                         return;
1197                                 }
1198                                 sg->addr = aic_htole32(ahc->dma_bug_buf);
1199                                 sg->len = aic_htole32(1);
1200                                 sg++;
1201                         }
1202                 }
1203                 sg--;
1204                 sg->len |= aic_htole32(AHC_DMA_LAST_SEG);
1205
1206                 /* Copy the first SG into the "current" data pointer area */
1207                 scb->hscb->dataptr = scb->sg_list->addr;
1208                 scb->hscb->datacnt = scb->sg_list->len;
1209         } else {
1210                 scb->hscb->sgptr = aic_htole32(SG_LIST_NULL);
1211                 scb->hscb->dataptr = 0;
1212                 scb->hscb->datacnt = 0;
1213         }
1214         
1215         scb->sg_count = nsegments;
1216
1217         ahc_lock();
1218
1219         /*
1220          * Last time we need to check if this SCB needs to
1221          * be aborted.
1222          */
1223         if (aic_get_transaction_status(scb) != CAM_REQ_INPROG) {
1224                 if (nsegments != 0)
1225                         bus_dmamap_unload(ahc->buffer_dmat, scb->dmamap);
1226                 ahc_free_scb(ahc, scb);
1227                 ahc_unlock();
1228                 xpt_done(ccb);
1229                 return;
1230         }
1231
1232         tinfo = ahc_fetch_transinfo(ahc, SCSIID_CHANNEL(ahc, scb->hscb->scsiid),
1233                                     SCSIID_OUR_ID(scb->hscb->scsiid),
1234                                     SCSIID_TARGET(ahc, scb->hscb->scsiid),
1235                                     &tstate);
1236
1237         mask = SCB_GET_TARGET_MASK(ahc, scb);
1238         scb->hscb->scsirate = tinfo->scsirate;
1239         scb->hscb->scsioffset = tinfo->curr.offset;
1240         if ((tstate->ultraenb & mask) != 0)
1241                 scb->hscb->control |= ULTRAENB;
1242
1243         if ((tstate->discenable & mask) != 0
1244          && (ccb->ccb_h.flags & CAM_DIS_DISCONNECT) == 0)
1245                 scb->hscb->control |= DISCENB;
1246
1247         if ((ccb->ccb_h.flags & CAM_NEGOTIATE) != 0
1248          && (tinfo->goal.width != 0
1249           || tinfo->goal.offset != 0
1250           || tinfo->goal.ppr_options != 0)) {
1251                 scb->flags |= SCB_NEGOTIATE;
1252                 scb->hscb->control |= MK_MESSAGE;
1253         } else if ((tstate->auto_negotiate & mask) != 0) {
1254                 scb->flags |= SCB_AUTO_NEGOTIATE;
1255                 scb->hscb->control |= MK_MESSAGE;
1256         }
1257
1258         LIST_INSERT_HEAD(&ahc->pending_scbs, scb, pending_links);
1259
1260         ccb->ccb_h.status |= CAM_SIM_QUEUED;
1261
1262         /*
1263          * We only allow one untagged transaction
1264          * per target in the initiator role unless
1265          * we are storing a full busy target *lun*
1266          * table in SCB space.
1267          */
1268         if ((scb->hscb->control & (TARGET_SCB|TAG_ENB)) == 0
1269          && (ahc->flags & AHC_SCB_BTT) == 0) {
1270                 struct scb_tailq *untagged_q;
1271                 int target_offset;
1272
1273                 target_offset = SCB_GET_TARGET_OFFSET(ahc, scb);
1274                 untagged_q = &(ahc->untagged_queues[target_offset]);
1275                 TAILQ_INSERT_TAIL(untagged_q, scb, links.tqe);
1276                 scb->flags |= SCB_UNTAGGEDQ;
1277                 if (TAILQ_FIRST(untagged_q) != scb) {
1278                         ahc_unlock();
1279                         return;
1280                 }
1281         }
1282         scb->flags |= SCB_ACTIVE;
1283
1284         /*
1285          * Timers are disabled while recovery is in progress.
1286          */
1287         aic_scb_timer_start(scb);
1288
1289         if ((scb->flags & SCB_TARGET_IMMEDIATE) != 0) {
1290                 /* Define a mapping from our tag to the SCB. */
1291                 ahc->scb_data->scbindex[scb->hscb->tag] = scb;
1292                 ahc_pause(ahc);
1293                 if ((ahc->flags & AHC_PAGESCBS) == 0)
1294                         ahc_outb(ahc, SCBPTR, scb->hscb->tag);
1295                 ahc_outb(ahc, TARG_IMMEDIATE_SCB, scb->hscb->tag);
1296                 ahc_unpause(ahc);
1297         } else {
1298                 ahc_queue_scb(ahc, scb);
1299         }
1300
1301         ahc_unlock();
1302 }
1303
1304 static void
1305 ahc_poll(struct cam_sim *sim)
1306 {
1307         struct ahc_softc *ahc;
1308
1309         ahc = (struct ahc_softc *)cam_sim_softc(sim);
1310         ahc_intr(ahc);
1311 }
1312
1313 static void
1314 ahc_setup_data(struct ahc_softc *ahc, struct cam_sim *sim,
1315                struct ccb_scsiio *csio, struct scb *scb)
1316 {
1317         struct hardware_scb *hscb;
1318         struct ccb_hdr *ccb_h;
1319         
1320         hscb = scb->hscb;
1321         ccb_h = &csio->ccb_h;
1322         
1323         csio->resid = 0;
1324         csio->sense_resid = 0;
1325         if (ccb_h->func_code == XPT_SCSI_IO) {
1326                 hscb->cdb_len = csio->cdb_len;
1327                 if ((ccb_h->flags & CAM_CDB_POINTER) != 0) {
1328
1329                         if (hscb->cdb_len > sizeof(hscb->cdb32)
1330                          || (ccb_h->flags & CAM_CDB_PHYS) != 0) {
1331                                 aic_set_transaction_status(scb,
1332                                                            CAM_REQ_INVALID);
1333                                 ahc_lock();
1334                                 ahc_free_scb(ahc, scb);
1335                                 ahc_unlock();
1336                                 xpt_done((union ccb *)csio);
1337                                 return;
1338                         }
1339                         if (hscb->cdb_len > 12) {
1340                                 memcpy(hscb->cdb32, 
1341                                        csio->cdb_io.cdb_ptr,
1342                                        hscb->cdb_len);
1343                                 scb->flags |= SCB_CDB32_PTR;
1344                         } else {
1345                                 memcpy(hscb->shared_data.cdb, 
1346                                        csio->cdb_io.cdb_ptr,
1347                                        hscb->cdb_len);
1348                         }
1349                 } else {
1350                         if (hscb->cdb_len > 12) {
1351                                 memcpy(hscb->cdb32, csio->cdb_io.cdb_bytes,
1352                                        hscb->cdb_len);
1353                                 scb->flags |= SCB_CDB32_PTR;
1354                         } else {
1355                                 memcpy(hscb->shared_data.cdb,
1356                                        csio->cdb_io.cdb_bytes,
1357                                        hscb->cdb_len);
1358                         }
1359                 }
1360         }
1361                 
1362         /* Only use S/G if there is a transfer */
1363         if ((ccb_h->flags & CAM_DIR_MASK) != CAM_DIR_NONE) {
1364                 if ((ccb_h->flags & CAM_SCATTER_VALID) == 0) {
1365                         /* We've been given a pointer to a single buffer */
1366                         if ((ccb_h->flags & CAM_DATA_PHYS) == 0) {
1367                                 int error;
1368
1369                                 crit_enter();
1370                                 error = bus_dmamap_load(ahc->buffer_dmat,
1371                                                         scb->dmamap,
1372                                                         csio->data_ptr,
1373                                                         csio->dxfer_len,
1374                                                         ahc_execute_scb,
1375                                                         scb, /*flags*/0);
1376                                 if (error == EINPROGRESS) {
1377                                         /*
1378                                          * So as to maintain ordering,
1379                                          * freeze the controller queue
1380                                          * until our mapping is
1381                                          * returned.
1382                                          */
1383                                         xpt_freeze_simq(sim,
1384                                                         /*count*/1);
1385                                         scb->io_ctx->ccb_h.status |=
1386                                             CAM_RELEASE_SIMQ;
1387                                 }
1388                                 crit_exit();
1389                         } else {
1390                                 struct bus_dma_segment seg;
1391
1392                                 /* Pointer to physical buffer */
1393                                 if (csio->dxfer_len > AHC_MAXTRANSFER_SIZE)
1394                                         panic("ahc_setup_data - Transfer size "
1395                                               "larger than can device max");
1396
1397                                 seg.ds_addr =
1398                                     (bus_addr_t)(vm_offset_t)csio->data_ptr;
1399                                 seg.ds_len = csio->dxfer_len;
1400                                 ahc_execute_scb(scb, &seg, 1, 0);
1401                         }
1402                 } else {
1403                         struct bus_dma_segment *segs;
1404
1405                         if ((ccb_h->flags & CAM_DATA_PHYS) != 0)
1406                                 panic("ahc_setup_data - Physical segment "
1407                                       "pointers unsupported");
1408
1409                         if ((ccb_h->flags & CAM_SG_LIST_PHYS) == 0)
1410                                 panic("ahc_setup_data - Virtual segment "
1411                                       "addresses unsupported");
1412
1413                         /* Just use the segments provided */
1414                         segs = (struct bus_dma_segment *)csio->data_ptr;
1415                         ahc_execute_scb(scb, segs, csio->sglist_cnt, 0);
1416                 }
1417         } else {
1418                 ahc_execute_scb(scb, NULL, 0, 0);
1419         }
1420 }
1421
1422 static void
1423 ahc_abort_ccb(struct ahc_softc *ahc, struct cam_sim *sim, union ccb *ccb)
1424 {
1425         union ccb *abort_ccb;
1426
1427         abort_ccb = ccb->cab.abort_ccb;
1428         switch (abort_ccb->ccb_h.func_code) {
1429         case XPT_ACCEPT_TARGET_IO:
1430         case XPT_IMMED_NOTIFY:
1431         case XPT_CONT_TARGET_IO:
1432         {
1433                 struct ahc_tmode_tstate *tstate;
1434                 struct ahc_tmode_lstate *lstate;
1435                 struct ccb_hdr_slist *list;
1436                 cam_status status;
1437
1438                 status = ahc_find_tmode_devs(ahc, sim, abort_ccb, &tstate,
1439                                              &lstate, TRUE);
1440
1441                 if (status != CAM_REQ_CMP) {
1442                         ccb->ccb_h.status = status;
1443                         break;
1444                 }
1445
1446                 if (abort_ccb->ccb_h.func_code == XPT_ACCEPT_TARGET_IO)
1447                         list = &lstate->accept_tios;
1448                 else if (abort_ccb->ccb_h.func_code == XPT_IMMED_NOTIFY)
1449                         list = &lstate->immed_notifies;
1450                 else
1451                         list = NULL;
1452
1453                 if (list != NULL) {
1454                         struct ccb_hdr *curelm;
1455                         int found;
1456
1457                         curelm = SLIST_FIRST(list);
1458                         found = 0;
1459                         if (curelm == &abort_ccb->ccb_h) {
1460                                 found = 1;
1461                                 SLIST_REMOVE_HEAD(list, sim_links.sle);
1462                         } else {
1463                                 while(curelm != NULL) {
1464                                         struct ccb_hdr *nextelm;
1465
1466                                         nextelm =
1467                                             SLIST_NEXT(curelm, sim_links.sle);
1468
1469                                         if (nextelm == &abort_ccb->ccb_h) {
1470                                                 found = 1;
1471                                                 SLIST_NEXT(curelm,
1472                                                            sim_links.sle) =
1473                                                     SLIST_NEXT(nextelm,
1474                                                                sim_links.sle);
1475                                                 break;
1476                                         }
1477                                         curelm = nextelm;
1478                                 }
1479                         }
1480
1481                         if (found) {
1482                                 abort_ccb->ccb_h.status = CAM_REQ_ABORTED;
1483                                 xpt_done(abort_ccb);
1484                                 ccb->ccb_h.status = CAM_REQ_CMP;
1485                         } else {
1486                                 xpt_print_path(abort_ccb->ccb_h.path);
1487                                 kprintf("Not found\n");
1488                                 ccb->ccb_h.status = CAM_PATH_INVALID;
1489                         }
1490                         break;
1491                 }
1492                 /* FALLTHROUGH */
1493         }
1494         case XPT_SCSI_IO:
1495                 /* XXX Fully implement the hard ones */
1496                 ccb->ccb_h.status = CAM_UA_ABORT;
1497                 break;
1498         default:
1499                 ccb->ccb_h.status = CAM_REQ_INVALID;
1500                 break;
1501         }
1502         xpt_done(ccb);
1503 }
1504
1505 void
1506 ahc_send_async(struct ahc_softc *ahc, char channel, u_int target,
1507                 u_int lun, ac_code code, void *opt_arg)
1508 {
1509         struct  ccb_trans_settings cts;
1510         struct cam_path *path;
1511         void *arg;
1512         int error;
1513
1514         arg = NULL;
1515         error = ahc_create_path(ahc, channel, target, lun, &path);
1516
1517         if (error != CAM_REQ_CMP)
1518                 return;
1519
1520         switch (code) {
1521         case AC_TRANSFER_NEG:
1522         {
1523 #ifdef AHC_NEW_TRAN_SETTINGS
1524                 struct  ccb_trans_settings_scsi *scsi;
1525         
1526                 cts.type = CTS_TYPE_CURRENT_SETTINGS;
1527                 scsi = &cts.proto_specific.scsi;
1528 #else
1529                 cts.flags = CCB_TRANS_CURRENT_SETTINGS;
1530 #endif
1531                 cts.ccb_h.path = path;
1532                 cts.ccb_h.target_id = target;
1533                 cts.ccb_h.target_lun = lun;
1534                 ahc_get_tran_settings(ahc, channel == 'A' ? ahc->our_id
1535                                                           : ahc->our_id_b,
1536                                       channel, &cts);
1537                 arg = &cts;
1538 #ifdef AHC_NEW_TRAN_SETTINGS
1539                 scsi->valid &= ~CTS_SCSI_VALID_TQ;
1540                 scsi->flags &= ~CTS_SCSI_FLAGS_TAG_ENB;
1541 #else
1542                 cts.valid &= ~CCB_TRANS_TQ_VALID;
1543                 cts.flags &= ~CCB_TRANS_TAG_ENB;
1544 #endif
1545                 if (opt_arg == NULL)
1546                         break;
1547                 if (*((ahc_queue_alg *)opt_arg) == AHC_QUEUE_TAGGED)
1548 #ifdef AHC_NEW_TRAN_SETTINGS
1549                         scsi->flags |= ~CTS_SCSI_FLAGS_TAG_ENB;
1550                 scsi->valid |= CTS_SCSI_VALID_TQ;
1551 #else
1552                         cts.flags |= CCB_TRANS_TAG_ENB;
1553                 cts.valid |= CCB_TRANS_TQ_VALID;
1554 #endif
1555                 break;
1556         }
1557         case AC_SENT_BDR:
1558         case AC_BUS_RESET:
1559                 break;
1560         default:
1561                 panic("ahc_send_async: Unexpected async event");
1562         }
1563         xpt_async(code, path, arg);
1564         xpt_free_path(path);
1565 }
1566
1567 void
1568 ahc_platform_set_tags(struct ahc_softc *ahc,
1569                       struct ahc_devinfo *devinfo, int enable)
1570 {
1571 }
1572
1573 int
1574 ahc_platform_alloc(struct ahc_softc *ahc, void *platform_arg)
1575 {
1576         ahc->platform_data = kmalloc(sizeof(struct ahc_platform_data), M_DEVBUF,
1577                                     M_INTWAIT | M_ZERO);
1578         return (0);
1579 }
1580
1581 void
1582 ahc_platform_free(struct ahc_softc *ahc)
1583 {
1584         struct ahc_platform_data *pdata;
1585
1586         pdata = ahc->platform_data;
1587         if (pdata != NULL) {
1588                 if (pdata->regs != NULL)
1589                         bus_release_resource(ahc->dev_softc,
1590                                              pdata->regs_res_type,
1591                                              pdata->regs_res_id,
1592                                              pdata->regs);
1593
1594                 if (pdata->irq != NULL)
1595                         bus_release_resource(ahc->dev_softc,
1596                                              pdata->irq_res_type,
1597                                              0, pdata->irq);
1598
1599                 if (pdata->sim_b != NULL) {
1600                         xpt_async(AC_LOST_DEVICE, pdata->path_b, NULL);
1601                         xpt_free_path(pdata->path_b);
1602                         xpt_bus_deregister(cam_sim_path(pdata->sim_b));
1603                         cam_sim_free(pdata->sim_b);
1604                 }
1605                 if (pdata->sim != NULL) {
1606                         xpt_async(AC_LOST_DEVICE, pdata->path, NULL);
1607                         xpt_free_path(pdata->path);
1608                         xpt_bus_deregister(cam_sim_path(pdata->sim));
1609                         cam_sim_free(pdata->sim);
1610                 }
1611                 if (pdata->eh != NULL)
1612                         EVENTHANDLER_DEREGISTER(shutdown_post_sync, pdata->eh);
1613                 kfree(ahc->platform_data, M_DEVBUF);
1614         }
1615 }
1616
1617 int
1618 ahc_softc_comp(struct ahc_softc *lahc, struct ahc_softc *rahc)
1619 {
1620         /* We don't sort softcs under FreeBSD so report equal always */
1621         return (0);
1622 }
1623
1624 int
1625 ahc_detach(device_t dev)
1626 {
1627         struct ahc_softc *ahc;
1628
1629         device_printf(dev, "detaching device\n");
1630         ahc = device_get_softc(dev);
1631         ahc = ahc_find_softc(ahc);
1632         if (ahc == NULL) {
1633                 device_printf(dev, "aic7xxx already detached\n");
1634                 return (ENOENT);
1635         }
1636         TAILQ_REMOVE(&ahc_tailq, ahc, links);
1637         ahc_lock();
1638         ahc_intr_enable(ahc, FALSE);
1639         bus_teardown_intr(dev, ahc->platform_data->irq, ahc->platform_data->ih);
1640         ahc_unlock();
1641         ahc_free(ahc);
1642         return (0);
1643 }
1644
1645 #if 0
1646 static void
1647 ahc_dump_targcmd(struct target_cmd *cmd)
1648 {
1649         uint8_t *byte;
1650         uint8_t *last_byte;
1651         int i;
1652
1653         byte = &cmd->initiator_channel;
1654         /* Debugging info for received commands */
1655         last_byte = &cmd[1].initiator_channel;
1656
1657         i = 0;
1658         while (byte < last_byte) {
1659                 if (i == 0)
1660                         kprintf("\t");
1661                 kprintf("%#x", *byte++);
1662                 i++;
1663                 if (i == 8) {
1664                         kprintf("\n");
1665                         i = 0;
1666                 } else {
1667                         kprintf(", ");
1668                 }
1669         }
1670 }
1671 #endif
1672
1673 static int
1674 ahc_modevent(module_t mod, int type, void *data)
1675 {
1676         /* XXX Deal with busy status on unload. */
1677         return 0;
1678 }
1679   
1680 static moduledata_t ahc_mod = {
1681         "ahc",
1682         ahc_modevent,
1683         NULL
1684 };
1685
1686 DECLARE_MODULE(ahc, ahc_mod, SI_SUB_DRIVERS, SI_ORDER_MIDDLE);
1687 MODULE_DEPEND(ahc, cam, 1, 1, 1);
1688 MODULE_VERSION(ahc, 1);