Lines Matching refs:tf

336  *	@tf: ATA taskfile register set
343 void ata_sff_tf_load(struct ata_port *ap, const struct ata_taskfile *tf)
346 unsigned int is_addr = tf->flags & ATA_TFLAG_ISADDR;
348 if (tf->ctl != ap->last_ctl) {
350 iowrite8(tf->ctl, ioaddr->ctl_addr);
351 ap->last_ctl = tf->ctl;
355 if (is_addr && (tf->flags & ATA_TFLAG_LBA48)) {
357 iowrite8(tf->hob_feature, ioaddr->feature_addr);
358 iowrite8(tf->hob_nsect, ioaddr->nsect_addr);
359 iowrite8(tf->hob_lbal, ioaddr->lbal_addr);
360 iowrite8(tf->hob_lbam, ioaddr->lbam_addr);
361 iowrite8(tf->hob_lbah, ioaddr->lbah_addr);
365 iowrite8(tf->feature, ioaddr->feature_addr);
366 iowrite8(tf->nsect, ioaddr->nsect_addr);
367 iowrite8(tf->lbal, ioaddr->lbal_addr);
368 iowrite8(tf->lbam, ioaddr->lbam_addr);
369 iowrite8(tf->lbah, ioaddr->lbah_addr);
372 if (tf->flags & ATA_TFLAG_DEVICE)
373 iowrite8(tf->device, ioaddr->device_addr);
382 * @tf: ATA taskfile register set for storing input
385 * into @tf. Assumes the device has a fully SFF compliant task file
392 void ata_sff_tf_read(struct ata_port *ap, struct ata_taskfile *tf)
396 tf->status = ata_sff_check_status(ap);
397 tf->error = ioread8(ioaddr->error_addr);
398 tf->nsect = ioread8(ioaddr->nsect_addr);
399 tf->lbal = ioread8(ioaddr->lbal_addr);
400 tf->lbam = ioread8(ioaddr->lbam_addr);
401 tf->lbah = ioread8(ioaddr->lbah_addr);
402 tf->device = ioread8(ioaddr->device_addr);
404 if (tf->flags & ATA_TFLAG_LBA48) {
406 iowrite8(tf->ctl | ATA_HOB, ioaddr->ctl_addr);
407 tf->hob_feature = ioread8(ioaddr->error_addr);
408 tf->hob_nsect = ioread8(ioaddr->nsect_addr);
409 tf->hob_lbal = ioread8(ioaddr->lbal_addr);
410 tf->hob_lbam = ioread8(ioaddr->lbam_addr);
411 tf->hob_lbah = ioread8(ioaddr->lbah_addr);
412 iowrite8(tf->ctl, ioaddr->ctl_addr);
413 ap->last_ctl = tf->ctl;
423 * @tf: ATA taskfile register set
431 void ata_sff_exec_command(struct ata_port *ap, const struct ata_taskfile *tf)
433 iowrite8(tf->command, ap->ioaddr.command_addr);
441 * @tf: ATA taskfile register set
452 const struct ata_taskfile *tf,
455 trace_ata_tf_load(ap, tf);
456 ap->ops->sff_tf_load(ap, tf);
457 trace_ata_exec_command(ap, tf, tag);
458 ap->ops->sff_exec_command(ap, tf);
581 bool do_write = (qc->tf.flags & ATA_TFLAG_WRITE);
661 if (is_multi_taskfile(&qc->tf)) {
698 switch (qc->tf.protocol) {
709 trace_ata_bmdma_start(ap, &qc->tf, qc->tag);
731 int rw = (qc->tf.flags & ATA_TFLAG_WRITE) ? WRITE : READ;
804 int i_write, do_write = (qc->tf.flags & ATA_TFLAG_WRITE) ? 1 : 0;
855 if (qc->tf.flags & ATA_TFLAG_POLLING)
859 if (qc->tf.protocol == ATA_PROT_PIO &&
860 (qc->tf.flags & ATA_TFLAG_WRITE))
863 if (ata_is_atapi(qc->tf.protocol) &&
927 * in_wq is not equivalent to (qc->tf.flags & ATA_TFLAG_POLLING).
942 poll_next = (qc->tf.flags & ATA_TFLAG_POLLING);
983 if (qc->tf.protocol == ATA_PROT_PIO) {
1005 if (qc->tf.protocol == ATAPI_PROT_PIO) {
1082 if (!(qc->tf.flags & ATA_TFLAG_WRITE)) {
1115 (!(qc->tf.flags & ATA_TFLAG_WRITE))) {
1290 qc->tf.flags |= ATA_TFLAG_POLLING;
1296 switch (qc->tf.protocol) {
1298 if (qc->tf.flags & ATA_TFLAG_POLLING)
1301 ata_tf_to_host(ap, &qc->tf, qc->tag);
1304 if (qc->tf.flags & ATA_TFLAG_POLLING)
1310 if (qc->tf.flags & ATA_TFLAG_POLLING)
1313 ata_tf_to_host(ap, &qc->tf, qc->tag);
1315 if (qc->tf.flags & ATA_TFLAG_WRITE) {
1327 if (qc->tf.flags & ATA_TFLAG_POLLING)
1340 if (qc->tf.flags & ATA_TFLAG_POLLING)
1343 ata_tf_to_host(ap, &qc->tf, qc->tag);
1349 (qc->tf.flags & ATA_TFLAG_POLLING))
1410 * need to check ata_is_atapi(qc->tf.protocol) again.
1480 if (!(qc->tf.flags & ATA_TFLAG_POLLING))
1573 if (!qc || qc->tf.flags & ATA_TFLAG_POLLING)
1749 struct ata_taskfile tf;
1755 memset(&tf, 0, sizeof(tf));
1757 ap->ops->sff_tf_read(ap, &tf);
1758 err = tf.error;
1774 class = ata_port_classify(ap, &tf);
2653 if (!ata_is_dma(qc->tf.protocol))
2660 switch (qc->tf.protocol) {
2662 WARN_ON_ONCE(qc->tf.flags & ATA_TFLAG_POLLING);
2664 trace_ata_tf_load(ap, &qc->tf);
2665 ap->ops->sff_tf_load(ap, &qc->tf); /* load tf registers */
2666 trace_ata_bmdma_setup(ap, &qc->tf, qc->tag);
2668 trace_ata_bmdma_start(ap, &qc->tf, qc->tag);
2674 WARN_ON_ONCE(qc->tf.flags & ATA_TFLAG_POLLING);
2676 trace_ata_tf_load(ap, &qc->tf);
2677 ap->ops->sff_tf_load(ap, &qc->tf); /* load tf registers */
2678 trace_ata_bmdma_setup(ap, &qc->tf, qc->tag);
2716 if (ap->hsm_task_state == HSM_ST_LAST && ata_is_dma(qc->tf.protocol)) {
2726 trace_ata_bmdma_stop(ap, &qc->tf, qc->tag);
2739 if (unlikely(qc->err_mask) && ata_is_dma(qc->tf.protocol))
2791 if (qc && ata_is_dma(qc->tf.protocol)) {
2807 trace_ata_bmdma_stop(ap, &qc->tf, qc->tag);
2839 if (ata_is_dma(qc->tf.protocol)) {
2841 trace_ata_bmdma_stop(ap, &qc->tf, qc->tag);
2880 unsigned int rw = (qc->tf.flags & ATA_TFLAG_WRITE);
2895 ap->ops->sff_exec_command(ap, &qc->tf);