Valid HTML 4.0! Valid CSS!
%%% -*-BibTeX-*-
%%% ====================================================================
%%% BibTeX-file{
%%%     author          = "Nelson H. F. Beebe",
%%%     version         = "1.49",
%%%     date            = "08 October 2024",
%%%     time            = "06:04:59 MDT",
%%%     filename        = "taccess.bib",
%%%     address         = "University of Utah
%%%                        Department of Mathematics, 110 LCB
%%%                        155 S 1400 E RM 233
%%%                        Salt Lake City, UT 84112-0090
%%%                        USA",
%%%     telephone       = "+1 801 581 5254",
%%%     FAX             = "+1 801 581 4148",
%%%     URL             = "https://www.math.utah.edu/~beebe",
%%%     checksum        = "62380 12099 63679 604805",
%%%     email           = "beebe at math.utah.edu, beebe at acm.org,
%%%                        beebe at computer.org (Internet)",
%%%     codetable       = "ISO/ASCII",
%%%     keywords        = "ACM Transactions on Accessible Computing;
%%%                        bibliography; TACCESS",
%%%     license         = "public domain",
%%%     supported       = "yes",
%%%     docstring       = "This is a COMPLETE BibTeX bibliography for
%%%                        ACM Transactions on Accessible Computing
%%%                        (CODEN ????, ISSN 1936-7228 (print),
%%%                        1936-7236 (electronic)), covering all journal
%%%                        issues from 2008 -- date.
%%%
%%%                        At version 1.49, the COMPLETE journal
%%%                        coverage looked like this:
%%%
%%%                             2008 (  12)    2014 (   5)    2020 (  25)
%%%                             2009 (  15)    2015 (  29)    2021 (  24)
%%%                             2010 (  13)    2016 (  18)    2022 (  38)
%%%                             2011 (  10)    2017 (  25)    2023 (  24)
%%%                             2012 (   8)    2018 (  16)    2024 (  17)
%%%                             2013 (   7)    2019 (  14)
%%%
%%%                             Article:        300
%%%
%%%                             Total entries:  300
%%%
%%%                        The journal Web page can be found at:
%%%
%%%                            http://www.is.umbc.edu/taccess/
%%%
%%%                        The journal table of contents page is at:
%%%
%%%                            http://www.acm.org/taccess/
%%%                            http://portal.acm.org/browse_dl.cfm?idx=J1156
%%%                            https://dl.acm.org/loi/taccess
%%%
%%%                        Qualified subscribers can retrieve the full
%%%                        text of recent articles in PDF form.
%%%
%%%                        The initial draft was extracted from the ACM
%%%                        Web pages.
%%%
%%%                        ACM copyrights explicitly permit abstracting
%%%                        with credit, so article abstracts, keywords,
%%%                        and subject classifications have been
%%%                        included in this bibliography wherever
%%%                        available.  Article reviews have been
%%%                        omitted, until their copyright status has
%%%                        been clarified.
%%%
%%%                        bibsource keys in the bibliography entries
%%%                        below indicate the entry originally came
%%%                        from the computer science bibliography
%%%                        archive, even though it has likely since
%%%                        been corrected and updated.
%%%
%%%                        URL keys in the bibliography point to
%%%                        World Wide Web locations of additional
%%%                        information about the entry.
%%%
%%%                        BibTeX citation tags are uniformly chosen
%%%                        as name:year:abbrev, where name is the
%%%                        family name of the first author or editor,
%%%                        year is a 4-digit number, and abbrev is a
%%%                        3-letter condensation of important title
%%%                        words. Citation tags were automatically
%%%                        generated by software developed for the
%%%                        BibNet Project.
%%%
%%%                        In this bibliography, entries are sorted in
%%%                        publication order, using ``bibsort -byvolume.''
%%%
%%%                        The checksum field above contains a CRC-16
%%%                        checksum as the first value, followed by the
%%%                        equivalent of the standard UNIX wc (word
%%%                        count) utility output of lines, words, and
%%%                        characters.  This is produced by Robert
%%%                        Solovay's checksum utility."
%%%     }
%%% ====================================================================
@Preamble{"\input bibnames.sty" #
    "\def \TM {${}^{\sc TM}$}"
}

%%% ====================================================================
%%% Acknowledgement abbreviations:
@String{ack-nhfb = "Nelson H. F. Beebe,
                    University of Utah,
                    Department of Mathematics, 110 LCB,
                    155 S 1400 E RM 233,
                    Salt Lake City, UT 84112-0090, USA,
                    Tel: +1 801 581 5254,
                    FAX: +1 801 581 4148,
                    e-mail: \path|beebe@math.utah.edu|,
                            \path|beebe@acm.org|,
                            \path|beebe@computer.org| (Internet),
                    URL: \path|https://www.math.utah.edu/~beebe/|"}

%%% ====================================================================
%%% Journal abbreviations:
@String{j-TACCESS              = "ACM Transactions on Accessible Computing"}

%%% ====================================================================
%%% Bibliography entries:
@Article{Sears:2008:I,
  author =       "Andrew Sears and Vicki Hanson",
  title =        "Introduction",
  journal =      j-TACCESS,
  volume =       "1",
  number =       "1",
  pages =        "1:1--1:??",
  month =        may,
  year =         "2008",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1361203.1361204",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Oct 21 18:48:15 MDT 2008",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "1",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Trewin:2008:GE,
  author =       "Shari Trewin",
  title =        "Guest Editorial",
  journal =      j-TACCESS,
  volume =       "1",
  number =       "1",
  pages =        "2:1--2:??",
  month =        may,
  year =         "2008",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1361203.1361205",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Oct 21 18:48:15 MDT 2008",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "2",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Huenerfauth:2008:EAS,
  author =       "Matt Huenerfauth and Liming Zhao and Erdan Gu and Jan
                 Allbeck",
  title =        "Evaluation of {American Sign Language} Generation by
                 Native {ASL} Signers",
  journal =      j-TACCESS,
  volume =       "1",
  number =       "1",
  pages =        "3:1--3:??",
  month =        may,
  year =         "2008",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1361203.1361206",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Oct 21 18:48:15 MDT 2008",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "There are many important factors in the design of
                 evaluation studies for systems that generate animations
                 of American Sign Language (ASL) sentences, and
                 techniques for evaluating natural language generation
                 of written texts are not easily adapted to ASL. When
                 conducting user-based evaluations, several cultural and
                 linguistic characteristics of members of the American
                 Deaf community must be taken into account so as to
                 ensure the accuracy of evaluations involving these
                 users. This article describes an implementation and
                 user-based evaluation (by native ASL signers) of a
                 prototype ASL natural language generation system that
                 produces sentences containing classifier predicates,
                 which are frequent and complex spatial phenomena that
                 previous ASL generators have not produced. Native
                 signers preferred the system's output to Signed English
                 animations -- scoring it higher in grammaticality,
                 understandability, and naturalness of movement. They
                 were also more successful at a comprehension task after
                 viewing the system's classifier predicate animations.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "3",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "accessibility technology for the deaf; American Sign
                 Language; animation; evaluation; natural language
                 generation",
}

@Article{Wobbrock:2008:GCM,
  author =       "Jacob O. Wobbrock and Krzysztof Z. Gajos",
  title =        "Goal Crossing with Mice and Trackballs for People with
                 Motor Impairments: Performance, Submovements, and
                 Design Directions",
  journal =      j-TACCESS,
  volume =       "1",
  number =       "1",
  pages =        "4:1--4:??",
  month =        may,
  year =         "2008",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1361203.1361207",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Oct 21 18:48:15 MDT 2008",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Prior research shows that people with motor
                 impairments face considerable challenges when using
                 conventional mice and trackballs. One challenge is
                 positioning the mouse cursor within confined target
                 areas; another is executing a precise click without
                 slipping. These problems can make mouse pointing in
                 graphical user interfaces very difficult for some
                 people. This article explores goal crossing as an
                 alternative strategy for more accessible target
                 acquisition. In goal crossing, targets are boundaries
                 that are simply crossed by the mouse cursor. Thus, goal
                 crossing avoids the two aforementioned problems. To
                 date, however, researchers have not examined the
                 feasibility of goal crossing for people with motor
                 difficulties. We therefore present a study comparing
                 area pointing and goal crossing. Our performance
                 results indicate that although Fitts' throughput for
                 able-bodied users is higher for area pointing than for
                 goal crossing (4.72 vs. 3.61 bits/s), the opposite is
                 true for users with motor impairments (2.34 vs. 2.88
                 bits/s). However, error rates are higher for goal
                 crossing than for area pointing under a strict
                 definition of crossing errors (6.23\% vs. 1.94\%). We
                 also present path analyses and an examination of
                 submovement velocity, acceleration, and jerk (the
                 change in acceleration over time). These results show
                 marked differences between crossing and pointing and
                 almost categorically favor crossing. An important
                 finding is that crossing reduces jerk for both
                 participant groups, indicating more fluid, stable
                 motion. To help realize the potential of goal crossing
                 for computer access, we offer design concepts for
                 crossing widgets that address the occlusion problem,
                 which occurs when one crossing goal obscures another in
                 persistent mouse-cursor interfaces. This work provides
                 the motivation and initial steps for further
                 exploration of goal crossing on the desktop, and may
                 help researchers and designers to radically reshape
                 user interfaces to provide accessible goal crossing,
                 thereby lowering barriers to access.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "4",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "area pointing; Fitts' law; goal crossing; motor
                 impairments; mouse pointing; movement microstructure;
                 path analysis; Steering law; submovements; Target
                 acquisition; throughput",
}

@Article{Allen:2008:FEM,
  author =       "Meghan Allen and Joanna McGrenere and Barbara Purves",
  title =        "The Field Evaluation of a Mobile Digital Image
                 Communication Application Designed for People with
                 Aphasia",
  journal =      j-TACCESS,
  volume =       "1",
  number =       "1",
  pages =        "5:1--5:??",
  month =        may,
  year =         "2008",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1361203.1361208",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Oct 21 18:48:15 MDT 2008",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "PhotoTalk is an application for a mobile device that
                 allows people with aphasia to capture and manage
                 digital photographs to support face-to-face
                 communication. Unlike any other augmentative and
                 alternative communication device for people with
                 aphasia, PhotoTalk focuses solely on image capture and
                 organization and is designed to be used independently.
                 Our project used a streamlined process with three
                 phases: (1) a rapid participatory design and
                 development phase with two speech-language pathologists
                 acting as representative users, (2) an informal
                 usability study with five aphasic participants, which
                 caught usability problems and provided preliminary
                 feedback on the usefulness of PhotoTalk, and (3) a
                 one-month field evaluation with two aphasic
                 participants followed by a one-month secondary field
                 evaluation with one aphasic participant, which showed
                 that they all used it regularly and relatively
                 independently, although not always for its intended
                 communicative purpose. Our field evaluations
                 demonstrated PhotoTalk's promise in terms of its
                 usability and usefulness in {\em everyday
                 communication}.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "5",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "AAC devices; aphasia; cognitive disability;
                 evaluation; field study; mobile technology;
                 participatory design",
}

@Article{Wandmacher:2008:SAC,
  author =       "Tonio Wandmacher and Jean-Yves Antoine and Franck
                 Poirier and Jean-Paul D{\'e}parte",
  title =        "{Sibylle}, An Assistive Communication System Adapting
                 to the Context and Its User",
  journal =      j-TACCESS,
  volume =       "1",
  number =       "1",
  pages =        "6:1--6:??",
  month =        may,
  year =         "2008",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1361203.1361209",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Oct 21 18:48:15 MDT 2008",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "In this article, we describe the latest version of
                 Sibylle, an AAC system that permits persons who have
                 severe physical disabilities to enter text with any
                 computer application, as well as to compose messages to
                 be read out through speech synthesis. The system
                 consists of a virtual keyboard comprising a set of
                 keypads that allow for the entering of characters or
                 full words by a single-switch selection process. It
                 also includes a sophisticated word prediction component
                 which dynamically calculates the most appropriate words
                 for a given context. This component is auto-adaptive,
                 that is, it learns with every text the user enters. It
                 thus adapts its predictions to the user's language and
                 the current topic of communication as well. So far, the
                 system works for French, German and English. Earlier
                 versions of Sibylle have been used since 2001 in a
                 rehabilitation center (Kerpape, France).",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "6",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "Augmentative and alternative communication; keystroke
                 saving rate; latent semantic analysis; user adaptation;
                 virtual keyboard; word prediction",
}

@Article{Glinert:2008:CPD,
  author =       "Ephraim P. Glinert and Bryant W. York",
  title =        "Computers and People with Disabilities",
  journal =      j-TACCESS,
  volume =       "1",
  number =       "2",
  pages =        "7:1--7:??",
  month =        oct,
  year =         "2008",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1408760.1408761",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Oct 21 18:48:22 MDT 2008",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "{\em Editors' comments:\/} ``Computers and People with
                 Disabilities'' is a reprint of an article originally
                 published in {\em Communications of the ACM\/} in 1992.
                 In this article, Glinert and York issued a
                 ``call-to-arms'' for research and development on
                 technologies for people with disabilities.
                 Specifically, they highlighted that human-computer
                 interfaces at the time generally failed to take into
                 account the needs of disabled users. Their challenge
                 was to change computing culture to address this need.
                 Their article remains timely today in its consideration
                 of government, industry, and private foundations
                 working with researchers to achieve accessible
                 technology. With the recent launch of {\em Transactions
                 on Accessible Computing}, this seems an appropriate
                 time to consider progress in the field since, as well
                 as current research trends.\par

                 The reprinting of this article is followed by four
                 commentaries by leaders in accessibility research. Each
                 was cited in the 1992 article and each now gives their
                 view on how the field has progressed since that time.
                 In their commentaries, some themes emerge and new
                 technologies are discussed. In short, their
                 commentaries point to both a great deal of progress and
                 a lack of progress. All four of the commentators note
                 areas where computing continues to present barriers
                 rather than assist users with
                 disabilities.\par

                 Alistair Edwards sets the stage with a look back at
                 interfaces and input technologies popular in 1992, with
                 attention paid to access problems related to graphical
                 user interfaces (GUIs) that have consumed much research
                 energy since 1992. Alan Newell highlights disability
                 concerns that were not given large research
                 consideration in 1992, but which have now become
                 significant due, in large part, to changes in global
                 demographics. Specifically, he brings visibility to
                 research on older adults and cognitively disabled
                 users.\par

                 A number of advances in technology and methodology
                 since 1992 are discussed by the commentators. The
                 ubiquity of computing and its critical adoption in the
                 world today are highlighted. The commentators reflect,
                 for example, on technologies produced by research for
                 disabled users that have now impacted mainstream
                 offerings on standards for accessibility that have
                 emerged worldwide and their impact and on assistive
                 technologies that have been developed. Critically, the
                 proliferation of the World Wide Web was not foreseen in
                 1992 and its use by people with disabilities is
                 discussed. Gregg Vanderheiden considers the opportunity
                 afforded by the Web to provide widespread availability
                 of accessible software.\par

                 Glinert and York discussed the need for design for
                 disability. While research relevant to users with
                 disabilities is gaining momentum, the commentators
                 indicate that users with disabilities still struggle
                 with much of today's IT. The commentators note current
                 trends toward designs that take into account disabled
                 users. Notably, Richard Ladner ends his commentary by
                 mentioning the issue of empowerment. Users with
                 disabilities have moved beyond simply needing the
                 protections of regulation that were emerging in 1992,
                 to being active participants in designing solutions to
                 allow full participation in the current social,
                 political, and economic environments.\par

                 Together, these articles provide a great deal of food
                 for thought on technology advances and new
                 considerations of accessible technology. Has the change
                 in computing culture envisioned by Glinert and York
                 taken hold?\par

                 Vicki L. Hanson and Andrew Sears

                 Co-Editors in Chief",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "7",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "Accessibility; HCI; inclusive design; interfaces;
                 ubiquitous computing; universal access; Web",
}

@Article{Edwards:2008:KTC,
  author =       "Alistair D. N. Edwards",
  title =        "Keeping Up with Technology: Commentary on {``Computers
                 and People with Disabilities''}",
  journal =      j-TACCESS,
  volume =       "1",
  number =       "2",
  pages =        "8:1--8:??",
  month =        oct,
  year =         "2008",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1408760.1408762",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Oct 21 18:48:22 MDT 2008",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "This is a personal response to rereading the Glinert
                 and York [1992] article ``Computers and People with
                 Disabilities.'' Comparing the world of assistive
                 technology as it was in 1992 and as it now appears is
                 instructive in terms of the things which have changed
                 --- and those which have not. The technology has
                 certainly developed. This applies both to the
                 mainstream and to the assistive technology which aims
                 to make the mainstream accessible. So, in 1992, the GUI
                 was a threat to visually impaired computer users; now
                 there are powerful screen readers available. Yet what
                 does not appear to have changed much is the fact that
                 assistive technologies continue to lag behind the
                 mainstream, constantly having to ``catch up.'' Also,
                 while there has been some increase in awareness of the
                 need for accessibility, there is still scope for that
                 awareness to be translated into action.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "8",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "Interfaces",
}

@Article{Newell:2008:ACP,
  author =       "Alan F. Newell",
  title =        "Accessible Computing --- Past Trends and Future
                 Suggestions: Commentary on {``Computers and People with
                 Disabilities''}",
  journal =      j-TACCESS,
  volume =       "1",
  number =       "2",
  pages =        "9:1--9:??",
  month =        oct,
  year =         "2008",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1408760.1408763",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Oct 21 18:48:22 MDT 2008",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "This article gives a personal perspective on Glinert
                 and York's 1992 paper, focusing on whether and how the
                 situation has changed over the past 15 years, and makes
                 recommendations for the future of the field of
                 accessible computing with a particular focus on the
                 needs of older people and people with cognitive
                 dysfunction.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "9",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "Assistive technology; cognitive dysfunction; inclusive
                 design; older and disabled people; theater in design",
}

@Article{Vanderheiden:2008:UAC,
  author =       "Gregg C. Vanderheiden",
  title =        "Ubiquitous Accessibility, Common Technology Core, and
                 Micro-Assistive Technology: Commentary on {``Computers
                 and People with Disabilities''}",
  journal =      j-TACCESS,
  volume =       "1",
  number =       "2",
  pages =        "10:1--10:??",
  month =        oct,
  year =         "2008",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1408760.1408764",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Oct 21 18:48:22 MDT 2008",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Much has changed since 1992 when the original CACM
                 article by Ephraim Glinert and Bryant York was
                 published. In the early 1990's, accessibility was
                 mostly an add-on, with only Apple computers having
                 built-in access. Computers were playing an increasingly
                 important role in education and employment, but had not
                 yet completely integrated themselves into all aspects
                 of life as completely as they have today. The World
                 Wide Web as we know it had not yet been born. Today
                 there are accessibility features built directly into
                 every major operating system, and one OS even includes
                 a built-in screen reader. Assistive technologies are
                 more numerous and capable. And awareness of the
                 importance of access is much higher. However, some
                 things have not changed. Assistive technologies lag
                 behind mainstream technologies in both compatibility
                 and functionality. Effective assistive technologies are
                 often beyond the financial reach of those who need
                 them. Effective assistive technologies are not
                 available in many countries and many languages, even
                 though technology is reaching into education,
                 employment, and daily living of more countries and more
                 people in each country every year. In moving forward we
                 need to build on what we have achieved and explore new
                 concepts, such as a {\em common technical core,
                 ubiquitous accessibility, micro-assistive technology},
                 and {\em free public accessibility}. Cooperative and
                 collaborative approaches also need to be explored if we
                 are to have any hope of catching up and keeping up with
                 the ever-accelerating mainstream information and
                 communication technologies.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "10",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "Interfaces; micro-AT; ubiquitous accessibility",
}

@Article{Ladner:2008:AEC,
  author =       "Richard E. Ladner",
  title =        "Access and Empowerment: Commentary on {``Computers and
                 People with Disabilities''}",
  journal =      j-TACCESS,
  volume =       "1",
  number =       "2",
  pages =        "11:1--11:??",
  month =        oct,
  year =         "2008",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1408760.1408765",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Oct 21 18:48:22 MDT 2008",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "A number of positive changes have taken place since
                 Glinert and York's 1992 call-to-arms. Progress reviewed
                 in this article includes evolving considerations of
                 universal design in the marketplace, ubiquitous
                 computing with accessibility features, increasing
                 computing research and conference venues that address
                 needs of users with disabilities, and attention to the
                 importance of user empowerment in development.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "11",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "ubiquitous computing; Universal design; user-centered
                 design; Web",
}

@Article{Crossan:2008:MTP,
  author =       "Andrew Crossan and Stephen Brewster",
  title =        "Multimodal Trajectory Playback for Teaching Shape
                 Information and Trajectories to Visually Impaired
                 Computer Users",
  journal =      j-TACCESS,
  volume =       "1",
  number =       "2",
  pages =        "12:1--12:??",
  month =        oct,
  year =         "2008",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1408760.1408766",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Oct 21 18:48:22 MDT 2008",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "There are difficulties in presenting nontextual or
                 dynamic information to blind or visually impaired users
                 through computers. This article examines the potential
                 of haptic and auditory trajectory playback as a method
                 of teaching shapes and gestures to visually impaired
                 people. Two studies are described which test the
                 success of teaching simple shapes. The first study
                 examines haptic trajectory playback alone, played
                 through a force-feedback device, and compares
                 performance of visually impaired users with sighted
                 users. It demonstrates that the task is significantly
                 harder for visually impaired users. The second study
                 builds on these results, combining force-feedback with
                 audio to teach visually impaired users to recreate
                 shapes. The results suggest that users performed
                 significantly better when presented with multimodal
                 haptic and audio playback of the shape, rather than
                 haptic only. Finally, an initial test of these ideas in
                 an application context is described, with sighted
                 participants describing drawings to visually impaired
                 participants through touch and sound. This study
                 demonstrates in what situations trajectory playback can
                 prove a useful role in a collaborative setting.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "12",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "Accessibility; evaluation; multimodal; trajectory
                 playback",
}

@Article{McCoy:2009:ISI,
  author =       "Kathleen F. McCoy and Annalu Waller",
  title =        "Introduction to the Special Issue on {AAC}",
  journal =      j-TACCESS,
  volume =       "1",
  number =       "3",
  pages =        "13:1--13:??",
  month =        feb,
  year =         "2009",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1497302.1497303",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Jun 1 18:10:33 MDT 2009",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "This article presents an introduction to the special
                 issue on Augmentative and Alternative Communication
                 (AAC).",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "13",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "Alternative and augmentative communication;
                 human-computer interaction",
}

@Article{Ghedira:2009:CEC,
  author =       "Souhir Ghedira and Pierre Pino and Guy Bourhis",
  title =        "Conception and Experimentation of a Communication
                 Device with Adaptive Scanning",
  journal =      j-TACCESS,
  volume =       "1",
  number =       "3",
  pages =        "14:1--14:??",
  month =        feb,
  year =         "2009",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1497302.1497304",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Jun 1 18:10:33 MDT 2009",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "For some people with motor disabilities and speech
                 disorders, the only way to communicate and to have some
                 control over their environment is through the use of a
                 controlled scanning system operated by a single switch.
                 The main problem with these systems is that the
                 communication process tends to be exceedingly slow,
                 since the system must scan through the available
                 choices one at a time until the desired message is
                 reached. One way of raising the speed of message
                 selection is to optimize the elementary scanning delay
                 in real time so that it allows the user to make
                 selections as quickly as possible without making too
                 many errors. With this objective in mind, this article
                 presents a method for optimizing the scanning delay,
                 which is based on an analysis of the data recorded in
                 ``log files'' while applying the EDiTH system [Digital
                 Teleaction Environment for People with Disabilities].
                 This analysis makes it possible to develop a
                 human-machine interaction model specific to the study,
                 and then to establish an adaptive algorithm for the
                 calculation of the scanning delay. The results obtained
                 with imposed scenarios and then in ecological
                 situations provides a confirmation that our algorithms
                 are effective in dynamically adapting a scan speed. The
                 main advantage offered by the procedure proposed is
                 that it works on timing information alone and thus does
                 not require any knowledge of the scanning device
                 itself. This allows it to work with any scanning
                 device.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "14",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "adaptive scanning rate; Alternative communication;
                 Model Human Processor; modeling; scanning system",
}

@Article{Fried-Oken:2009:EVO,
  author =       "Melanie Fried-Oken and Charity Rowland and Glory Baker
                 and Mayling Dixon and Carolyn Mills and Darlene Schultz
                 and Barry Oken",
  title =        "The Effect of Voice Output on {AAC}-Supported
                 Conversations of Persons with {Alzheimer}'s Disease",
  journal =      j-TACCESS,
  volume =       "1",
  number =       "3",
  pages =        "15:1--15:??",
  month =        feb,
  year =         "2009",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1497302.1497305",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Jun 1 18:10:33 MDT 2009",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "The purpose of this study was to determine whether the
                 presence or absence of digitized 1--2-word voice output
                 on a direct selection, customized Augmentative and
                 Alternative Communication (AAC) device would affect the
                 impoverished conversations of persons with dementia.
                 Thirty adults with moderate Alzheimer's disease
                 participated in two personally relevant conversations
                 with an AAC device. For twelve of the participants the
                 AAC device included voice output. The AAC device was
                 the Flexiboard$^{TM}$ containing sixteen messages
                 needed to discuss a favorite autobiographical topic
                 chosen by the participant and his/her family
                 caregivers. Ten-minute conversations were videotaped in
                 participants' residences and analyzed for four
                 conversational measures related to the participants'
                 communicative behavior. Results show that AAC devices
                 with digitized voice output depress conversational
                 performance and distract participants with moderate
                 Alzheimer's disease as compared to similar devices
                 without voice output. There were significantly more
                 1-word utterances and fewer total utterances when AAC
                 devices included voice output, and the rate of topic
                 elaborations/initiations was significantly lower when
                 voice output was present. Discussion about the novelty
                 of voice output for this population of elders and the
                 need to train elders to use this technology is
                 provided.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "15",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "Alzheimer' Augmentative and Alternative Communication
                 (AAC); Dementia; digitized speech synthesis; language;
                 s disease",
}

@Article{Waller:2009:ESP,
  author =       "Annalu Waller and Rolf Black and David A. O'Mara and
                 Helen Pain and Graeme Ritchie and Ruli Manurung",
  title =        "Evaluating the {STANDUP} Pun Generating Software with
                 Children with Cerebral Palsy",
  journal =      j-TACCESS,
  volume =       "1",
  number =       "3",
  pages =        "16:1--16:??",
  month =        feb,
  year =         "2009",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1497302.1497306",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Jun 1 18:10:33 MDT 2009",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "The interactive STANDUP software was developed to
                 provide children who use augmentative and alternative
                 communication (AAC) with a ``language playground.'' The
                 software provides appropriate functionality for users
                 with physical, speech, and language impairments to
                 generate and tell novel punning riddles at different
                 levels of complexity. STANDUP was evaluated with nine
                 children with cerebral palsy during an eight-week
                 study. Results show that the participants were able to
                 generate and tell novel jokes with minimal or no
                 support. The use of STANDUP impacted favorably on
                 general AAC use. The study results also suggested that
                 STANDUP could potentially have a positive effect on
                 social and pragmatic skills. Further research to
                 investigate the impact of STANDUP on communication
                 skills is proposed. Suggestions for future software
                 development include providing users with opportunities
                 to complete jokes and to integrate online dictionaries
                 when new vocabulary is encountered.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "16",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "Alternative and augmentative communication;
                 computational humor; speech generation devices",
}

@Article{Trnka:2009:UIW,
  author =       "Keith Trnka and John McCaw and Debra Yarrington and
                 Kathleen F. McCoy and Christopher Pennington",
  title =        "User Interaction with Word Prediction: The Effects of
                 Prediction Quality",
  journal =      j-TACCESS,
  volume =       "1",
  number =       "3",
  pages =        "17:1--17:??",
  month =        feb,
  year =         "2009",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1497302.1497307",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Jun 1 18:10:33 MDT 2009",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Word prediction systems can reduce the number of
                 keystrokes required to form a message in a letter-based
                 AAC system. It has been questioned, however, whether
                 such savings translate into an enhanced communication
                 rate due to the additional overhead (e.g., shifting of
                 focus and repeated scanning of a prediction list)
                 required in using such a system. Our hypothesis is that
                 word prediction has high potential for enhancing AAC
                 communication rate, but the amount is dependent in a
                 complex way on the accuracy of the predictions. Due to
                 significant user interface variations in AAC systems
                 and the potential bias of prior word prediction
                 experience on existing devices, this hypothesis is
                 difficult to verify. We present a study of two
                 different word prediction methods compared against
                 letter-by-letter entry at simulated AAC communication
                 rates. We find that word prediction systems can in fact
                 speed communication rate (an advanced system gave a
                 58.6\% improvement), and that a more accurate word
                 prediction system can raise the communication rate
                 higher than is explained by the additional accuracy of
                 the system alone due to better utilization (93.6\%
                 utilization for advanced versus 78.2\% for basic).",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "17",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "communication rate; user study; Word prediction",
}

@Article{Czaja:2009:ISI,
  author =       "Sara J. Czaja and Peter Gregor and Vicki L. Hanson",
  title =        "Introduction to the Special Issue on Aging and
                 Information Technology",
  journal =      j-TACCESS,
  volume =       "2",
  number =       "1",
  pages =        "1:1--1:??",
  month =        may,
  year =         "2009",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1525840.1525841",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Jun 1 18:10:43 MDT 2009",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "This article provides an introduction to the Special
                 Issue on Aging.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "1",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "Aging; cognitive aging; instruction; menu design;
                 older adults; pen interfaces; quality of life
                 technology; spoken dialog systems; user privacy
                 preferences; video modeling; voice interfaces",
}

@Article{Wolters:2009:BOD,
  author =       "Maria Wolters and Kallirroi Georgila and Johanna D.
                 Moore and Sarah E. MacPherson",
  title =        "Being Old Doesn't Mean Acting Old: How Older Users
                 Interact with Spoken Dialog Systems",
  journal =      j-TACCESS,
  volume =       "2",
  number =       "1",
  pages =        "2:1--2:??",
  month =        may,
  year =         "2009",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1525840.1525842",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Jun 1 18:10:43 MDT 2009",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Most studies on adapting voice interfaces to older
                 users work top-down by comparing the interaction
                 behavior of older and younger users. In contrast, we
                 present a bottom-up approach. A statistical cluster
                 analysis of 447 appointment scheduling dialogs between
                 50 older and younger users and 9 simulated spoken
                 dialog systems revealed two main user groups, a
                 ``social'' group and a ``factual'' group. ``Factual''
                 users adapted quickly to the systems and interacted
                 efficiently with them. ``Social'' users, on the other
                 hand, were more likely to treat the system like a
                 human, and did not adapt their interaction style. While
                 almost all ``social'' users were older, over a third of
                 all older users belonged in the ``factual'' group.
                 Cognitive abilities and gender did not predict group
                 membership. We conclude that spoken dialog systems
                 should adapt to users based on observed behavior, not
                 on age.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "2",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "Aging; clustering; cognitive aging; spoken dialog
                 systems; voice interfaces",
}

@Article{Moffatt:2009:EMI,
  author =       "Karyn Moffatt and Joanna McGrenere",
  title =        "Exploring Methods to Improve Pen-Based Menu Selection
                 for Younger and Older Adults",
  journal =      j-TACCESS,
  volume =       "2",
  number =       "1",
  pages =        "3:1--3:??",
  month =        may,
  year =         "2009",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1525840.1525843",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Jun 1 18:10:43 MDT 2009",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Tablet PCs are gaining popularity, but many
                 individuals still struggle with pen-based interaction.
                 In a previous baseline study, we examined the types of
                 difficulties younger and older adults encounter when
                 using pen-based input. The research reported in this
                 article seeks to address one of these errors, namely,
                 missing just below. This error occurs in a menu
                 selection task when a user's selection pattern is
                 downwardly shifted, such that the top edge of the menu
                 item below the target is selected relatively often,
                 while the corresponding top edge of the target itself
                 is seldom selected. We developed two approaches for
                 addressing missing just below errors: reassigning
                 selections along the top edge and deactivating them. In
                 a laboratory evaluation, only the deactivated edge
                 approach showed promise overall. Further analysis of
                 our data revealed that individual differences played a
                 large role in our results and identified a new source
                 of selection difficulty. Specifically, we observed two
                 error-prone groups of users: the low hitters, who, like
                 participants in the baseline study, made missing just
                 below errors, and the high hitters, who, in contrast,
                 had difficulty with errors on the item above. All but
                 one of the older participants fell into one of these
                 error-prone groups, reinforcing that older users do
                 need better support for selecting menu items with a
                 pen. Preliminary analysis of the performance data
                 suggests both of our approaches were beneficial for the
                 low hitters, but that additional techniques are needed
                 to meet the needs of the high hitters and to address
                 the challenge of supporting both groups in a single
                 interface.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "3",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "aging; interaction techniques; menu design; older
                 users; Pen-based target acquisition",
}

@Article{Struve:2009:VMT,
  author =       "Doreen Struve and Hartmut Wandke",
  title =        "Video Modeling for Training Older Adults to Use New
                 Technologies",
  journal =      j-TACCESS,
  volume =       "2",
  number =       "1",
  pages =        "4:1--4:??",
  month =        may,
  year =         "2009",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1525840.1525844",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Jun 1 18:10:43 MDT 2009",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "The increasing permeation of technology in our society
                 leads to the challenge that everybody needs to interact
                 with technology systems. Older adults often meet
                 difficulties while trying to interact with complex,
                 demanding systems in their daily life. One approach to
                 enable older adults to use new technologies in a safe
                 and efficient way is the provision of training
                 programs. In this article we report about a promising
                 training strategy using video modeling in conjunction
                 with other instructional methods to enhance learning.
                 Cognitive as well as socio-motivational aspects will be
                 addressed. We assessed if guided error training in
                 video modeling will improve learning outcomes for a
                 Ticket Vending Machine (TVM). To investigate if the
                 training method might be beneficial for younger adults
                 as well, we compared 40 younger and 40 older adult
                 learners in a guided error training course with
                 error-free training. Younger and older participants
                 made fewer mistakes in guided error training, but no
                 differences occurred in task completion times.
                 Moreover, self-efficacy increased with training for
                 both age groups, but no significant differences were
                 found for the training condition. Analysis of knowledge
                 gains showed a significant benefit of guided error
                 training in structural knowledge. Overall, the results
                 showed that guided error training may enhance learning
                 for younger and older adults who are learning to use
                 technology.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "4",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "guided error training; Instruction; older adults;
                 self-efficacy; technology use; video modeling",
}

@Article{Beach:2009:R,
  author =       "Scott Beach and {ACM TACCESS Staff} and Richard Schulz
                 and Julie Downs and Judith Matthews and Bruce Barron
                 and Katherine Seelman",
  title =        "2008 Reviewers",
  journal =      j-TACCESS,
  volume =       "2",
  number =       "1",
  pages =        "6:1--6:??",
  month =        may,
  year =         "2009",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1525840.1540652",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Jun 1 18:10:43 MDT 2009",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Technology aimed at enhancing function and enabling
                 independent living among older and disabled adults is a
                 growing field of research. Privacy concerns are a
                 potential barrier to adoption of such technology. Using
                 data from a national Web survey (n=1,518), we focus on
                 perceived acceptability of sharing information about
                 toileting, taking medications, moving about the home,
                 cognitive ability, driving behavior, and vital signs
                 with five targets: family, healthcare providers,
                 insurance companies, researchers, and government. We
                 also examine acceptability of recording the behaviors
                 using three methods: video with sound, video without
                 sound, and sensors. Results show that sharing or
                 recording information about toileting behavior; sharing
                 information with the government and insurance
                 companies; and recording the information using video
                 were least acceptable. Respondents who reported current
                 disability were significantly more accepting of sharing
                 and recording of information than nondisabled adults,
                 controlling for demographic variables, general
                 technology attitudes, and assistive device use. Results
                 for age were less consistent, although older
                 respondents tended to be more accepting than younger
                 respondents. The study provides empirical evidence from
                 a large national sample of the implicit trade-offs
                 between privacy and the potential for improved health
                 among older and disabled adults in quality of life
                 technology applications.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "6",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "quality of life technology; User privacy preferences",
}

@Article{Barreto:2009:GE,
  author =       "Armando Barreto and Torsten Felzer",
  title =        "Guest Editorial",
  journal =      j-TACCESS,
  volume =       "2",
  number =       "2",
  pages =        "7:1--7:??",
  month =        jun,
  year =         "2009",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1530064.1530065",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Mar 16 10:04:01 MDT 2010",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "7",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Hailpern:2009:AHC,
  author =       "Joshua Hailpern and Karrie Karahalios and James Halle
                 and Laura Dethorne and Mary-Kelsey Coletto",
  title =        "{A3}: {HCI} Coding Guideline for Research Using Video
                 Annotation to Assess Behavior of Nonverbal Subjects
                 with Computer-Based Intervention",
  journal =      j-TACCESS,
  volume =       "2",
  number =       "2",
  pages =        "8:1--8:??",
  month =        jun,
  year =         "2009",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1530064.1530066",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Mar 16 10:04:01 MDT 2010",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "8",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Huenerfauth:2009:LMM,
  author =       "Matt Huenerfauth",
  title =        "A Linguistically Motivated Model for Speed and Pausing
                 in Animations of {American Sign Language}",
  journal =      j-TACCESS,
  volume =       "2",
  number =       "2",
  pages =        "9:1--9:??",
  month =        jun,
  year =         "2009",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1530064.1530067",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Mar 16 10:04:01 MDT 2010",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "9",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Price:2009:DEP,
  author =       "Kathleen J. Price and Andrew Sears",
  title =        "The Development and Evaluation of Performance-Based
                 Functional Assessment: a Methodology for the
                 Measurement of Physical Capabilities",
  journal =      j-TACCESS,
  volume =       "2",
  number =       "2",
  pages =        "10:1--10:??",
  month =        jun,
  year =         "2009",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1530064.1530068",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Mar 16 10:04:01 MDT 2010",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "10",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Trewin:2009:EVM,
  author =       "Shari Trewin and Mark Laff and Vicki Hanson and Anna
                 Cavender",
  title =        "Exploring Visual and Motor Accessibility in Navigating
                 a Virtual World",
  journal =      j-TACCESS,
  volume =       "2",
  number =       "2",
  pages =        "11:1--11:??",
  month =        jun,
  year =         "2009",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1530064.1530069",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Mar 16 10:04:01 MDT 2010",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "11",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Walker:2010:UDA,
  author =       "B. N. Walker and L. M. Mauney",
  title =        "Universal Design of Auditory Graphs: a Comparison of
                 Sonification Mappings for Visually Impaired and Sighted
                 Listeners",
  journal =      j-TACCESS,
  volume =       "2",
  number =       "3",
  pages =        "12:1--12:??",
  month =        mar,
  year =         "2010",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1714458.1714459",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Mar 16 10:04:03 MDT 2010",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "12",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Feng:2010:CUC,
  author =       "Jinjuan Feng and Jonathan Lazar and Libby Kumin and
                 Ant Ozok",
  title =        "Computer Usage by Children with {Down Syndrome}:
                 Challenges and Future Research",
  journal =      j-TACCESS,
  volume =       "2",
  number =       "3",
  pages =        "13:1--13:??",
  month =        mar,
  year =         "2010",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1714458.1714460",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Mar 16 10:04:03 MDT 2010",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "13",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Pino:2010:IOS,
  author =       "Alexandros Pino and Georgios Kouroupetroglou",
  title =        "{ITHACA}: An Open Source Framework for Building
                 Component-Based Augmentative and Alternative
                 Communication Applications",
  journal =      j-TACCESS,
  volume =       "2",
  number =       "4",
  pages =        "14:1--14:??",
  month =        jun,
  year =         "2010",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1786774.1786775",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Aug 14 16:52:06 MDT 2010",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "As an answer to the disabled community's odyssey to
                 gain access to adaptable, modular, multilingual, cheap
                 and sustainable Augmentative and Alternative
                 Communication (AAC) products, we propose the use of the
                 ITHACA framework. It is a software environment for
                 building component-based AAC applications, grounded on
                 the Design for All principles and a hybrid--- {\em
                 community\/} and {\em commercial\/} ---Open Source
                 development model. ITHACA addresses the developers, the
                 vendors, as well as the people who use AAC. We
                 introduce a new viewpoint on the AAC product
                 design-develop-distribute lifecycle, and a novel way to
                 search-select-modify-maintain the AAC aid. ITHACA
                 provides programmers with a set of tools and reusable
                 Open Source code for building AAC software components.
                 It also facilitates AAC product vendors to put together
                 sophisticated applications using the available on the
                 Web, independently premanufactured, free or commercial
                 software parts. Furthermore, it provides people who use
                 AAC with a variety of compatible AAC software products
                 which incorporate multimodal, user-tailored interfaces
                 that can fulfill their changing needs. The ITHACA
                 architecture and the proposed fusion of past and
                 current approaches, trends and technologies are
                 explained. ITHACA has been successfully applied by
                 implementing a family of AAC products, based on
                 interchangeable components. Several ready to use
                 ITHACA-based components, including on-screen keyboards,
                 Text-to-Speech, symbol selection sets, e-chatting,
                 emailing, and scanning-based input, as well as four
                 complete communication aids addressing different user
                 cases have been developed. This demonstration showed
                 good acceptance of the ITHACA applications and
                 substantial improvement of the end users' communication
                 skills. Developers' experience on working in ITHACA's
                 Open Source projects was also positively evaluated.
                 More importantly, the potential contribution of the
                 component-based framework and Open Source development
                 model combination to the AAC community emerged.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "14",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "Augmentative and alternative communication; component;
                 design for all; framework; open source",
}

@Article{Sauer:2010:TUU,
  author =       "Graig Sauer and Jonathan Lazar and Harry Hochheiser
                 and Jinjuan Feng",
  title =        "Towards a Universally Usable Human Interaction Proof:
                 Evaluation of Task Completion Strategies",
  journal =      j-TACCESS,
  volume =       "2",
  number =       "4",
  pages =        "15:1--15:??",
  month =        jun,
  year =         "2010",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1786774.1786776",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Aug 14 16:52:06 MDT 2010",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "The need for security features to stop spam and bots
                 has prompted research aimed at developing human
                 interaction proofs (HIPs) that are both secure and easy
                 to use. The primarily visual techniques used in these
                 HIP tools present difficulties for users with visual
                 impairments. This article reports on the development of
                 Human-Interaction Proof, Universally Usable (HIPUU), a
                 new approach to human-interaction proofs based on
                 identification of a series of sound/image pairs.
                 Simultaneous presentation of a single, unified task in
                 two alternative modalities provides multiple paths to
                 successful task completion. We present two alternative
                 task completion strategies, based on differing input
                 strategies (menu-based vs. free text entry). Empirical
                 results from studies involving both blind and sighted
                 users validate both the usability and accessibility of
                 these differing strategies, with blind users achieving
                 successful task completion rates above 90\%. The
                 strengths of the alternate task completion strategies
                 are discussed, along with possible approaches for
                 improving the robustness of HIPUU.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "15",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "blind users; CAPTCHA; HIP; security; universal
                 usability",
}

@Article{Randolph:2010:AFN,
  author =       "Adriane B. Randolph and Melody M. Moore Jackson",
  title =        "Assessing Fit of Nontraditional Assistive
                 Technologies",
  journal =      j-TACCESS,
  volume =       "2",
  number =       "4",
  pages =        "16:1--16:??",
  month =        jun,
  year =         "2010",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1786774.1786777",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Aug 14 16:52:06 MDT 2010",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "There is a variety of brain-based interface methods
                 which depend on measuring small changes in brain
                 signals or properties. These methods have typically
                 been used for nontraditional assistive technology
                 applications. Non-traditional assistive technology is
                 generally targeted for users with severe motor
                 disabilities which may last long-term due to illness or
                 injury or short-term due to situational disabilities.
                 Control of a nontraditional assistive technology can
                 vary widely across users depending upon many factors
                 ranging from health to experience. Unfortunately, there
                 is no systematic method for assessing usability of
                 nontraditional assistive technologies to achieve the
                 best control. The current methods to accommodate users
                 through trial-and-error result in the loss of valuable
                 time and resources as users sometimes have diminishing
                 abilities or suffer from terminal illnesses. This work
                 describes a methodology for objectively measuring an
                 individual's ability to control a specific
                 nontraditional assistive technology, thus expediting
                 the technology-fit process.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "16",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "assistive technology; brain-based interfaces;
                 brain-computer interface; direct-brain interface;
                 functional near-infrared; galvanic skin response;
                 individual characteristics; user profiles",
}

@Article{Leung:2010:MLI,
  author =       "Rock Leung and Leah Findlater and Joanna McGrenere and
                 Peter Graf and Justine Yang",
  title =        "Multi-Layered Interfaces to Improve Older Adults'
                 Initial Learnability of Mobile Applications",
  journal =      j-TACCESS,
  volume =       "3",
  number =       "1",
  pages =        "1:1--1:??",
  month =        sep,
  year =         "2010",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1838562.1838563",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Oct 8 18:21:23 MDT 2010",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Mobile computing devices can offer older adults (ages
                 65+) support in their daily lives, but older adults
                 often find such devices difficult to learn and use. One
                 potential design approach to improve the learnability
                 of mobile devices is a Multi-Layered (ML) interface,
                 where novice users start with a reduced-functionality
                 interface layer that only allows them to perform basic
                 tasks, before progressing to a more complex interface
                 layer when they are comfortable. We studied the effects
                 of a ML interface on older adults' performance in
                 learning tasks on a mobile device. We conducted a
                 controlled experiment with 16 older (ages 65--81) and
                 16 younger participants (age 21--36), who performed
                 tasks on either a 2-layer or a nonlayered (control)
                 address book application, implemented on a commercial
                 smart phone. We found that the ML interface's
                 Reduced-Functionality layer, compared to the control's
                 Full-Functionality layer, better helped users to master
                 a set of basic tasks and to retain that ability 30
                 minutes later. When users transitioned from the
                 Reduced-Functionality to the Full-Functionality
                 interface layer, their performance on the previously
                 learned tasks was negatively affected, but no negative
                 impact was found on learning new, advanced tasks.
                 Overall, the ML interface provided greater benefit for
                 older participants than for younger participants in
                 terms of task completion time during initial learning,
                 perceived complexity, and preference. We discuss how
                 the ML interface approach is suitable for improving the
                 learnability of mobile applications, particularly for
                 older adults.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "1",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "Age-related differences; learnability; menu design;
                 mobile devices; multi-layered interfaces; older adults;
                 user study",
}

@Article{Huenerfauth:2010:AAM,
  author =       "Matt Huenerfauth and Pengfei Lu",
  title =        "Accurate and Accessible Motion-Capture Glove
                 Calibration for Sign Language Data Collection",
  journal =      j-TACCESS,
  volume =       "3",
  number =       "1",
  pages =        "2:1--2:??",
  month =        sep,
  year =         "2010",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1838562.1838564",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Oct 8 18:21:23 MDT 2010",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Motion-capture recordings of sign language are used in
                 research on automatic recognition of sign language or
                 generation of sign language animations, which have
                 accessibility applications for deaf users with low
                 levels of written-language literacy. Motion-capture
                 gloves are used to record the wearer's handshape.
                 Unfortunately, they require a time-consuming and
                 inexact calibration process each time they are worn.
                 This article describes the design and evaluation of a
                 new calibration protocol for motion-capture gloves,
                 which is designed to make the process more efficient
                 and to be accessible for participants who are deaf and
                 use American Sign Language (ASL). The protocol was
                 evaluated experimentally; deaf ASL signers wore the
                 gloves, were calibrated (using the new protocol and
                 using a calibration routine provided by the glove
                 manufacturer), and were asked to perform sequences of
                 ASL handshapes. Five native ASL signers rated the
                 correctness and understandability of the collected
                 handshape data. In an additional evaluation, ASL
                 signers were asked to perform ASL stories while wearing
                 the gloves and a motion-capture bodysuit (in some cases
                 our new calibration protocol was used, in other cases,
                 the standard protocol). Later, twelve native ASL
                 signers watched animations produced from this
                 motion-capture data and answered comprehension
                 questions about the stories. In both evaluation
                 studies, the new protocol received significantly higher
                 scores than the standard calibration. The protocol has
                 been made freely available online, and it includes
                 directions for the researcher, images and videos of how
                 participants move their hands during the process, and
                 directions for participants (as ASL videos and English
                 text).",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "2",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
  keywords =     "accessibility technology for the deaf; American Sign
                 Language; animation; calibration; CyberGlove;
                 Motion-capture glove",
}

@Article{Zhu:2010:IGB,
  author =       "Shaojian Zhu and Jinjuan Feng and Andrew Sears",
  title =        "Investigating Grid-Based Navigation: The Impact of
                 Physical Disability",
  journal =      j-TACCESS,
  volume =       "3",
  number =       "1",
  pages =        "3:1--3:??",
  month =        sep,
  year =         "2010",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1838562.1838565",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Oct 8 18:21:23 MDT 2010",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Hands-free speech-based technology can be a useful
                 alternative for individuals that find traditional input
                 devices, such as keyboard and mouse, difficult to use.
                 Various speech-based navigation techniques have been
                 examined, and several are available in commercial
                 software applications. Among these alternatives,
                 grid-based navigation has demonstrated both potential
                 and limitations. In this article, we discuss an
                 empirical study that assessed the efficacy of two
                 enhancements to grid-based navigation: magnification
                 and fine-tuning. The magnification capability enlarges
                 the selected region when it becomes sufficiently small,
                 making it easier to see the target and cursor. The
                 fine-tuning capability allows users to move the cursor
                 short distances to position the cursor over the target.
                 The study involved one group of participants with
                 physical disabilities, an age-matched group of
                 participants without disabilities, and a third group
                 that included young adults without disabilities. The
                 results confirm that both magnification and fine-tuning
                 significantly improved the participants' performance
                 when selecting targets, especially small targets.
                 Providing either, or both, of the proposed enhancements
                 substantially reduced the gaps in performance due to
                 disability and age. The results will inform the design
                 of speech-based target selection mechanism, allowing
                 users to select targets faster while making fewer
                 errors.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "3",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{McCoy:2010:GEA,
  author =       "Kathleen F. McCoy",
  title =        "Guest Editorial: {ASSETS 2009}",
  journal =      j-TACCESS,
  volume =       "3",
  number =       "2",
  pages =        "4:1--4:??",
  month =        nov,
  year =         "2010",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1857920.1857921",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Mar 28 11:38:52 MDT 2011",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "4",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Sato:2010:EAC,
  author =       "Daisuke Sato and Hironobu Takagi and Masatomo
                 Kobayashi and Shinya Kawanaka and Chieko Asakawa",
  title =        "Exploratory Analysis of Collaborative {Web}
                 Accessibility Improvement",
  journal =      j-TACCESS,
  volume =       "3",
  number =       "2",
  pages =        "5:1--5:??",
  month =        nov,
  year =         "2010",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1857920.1857922",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Mar 28 11:38:52 MDT 2011",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "5",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Amemiya:2010:OKH,
  author =       "Tomohiro Amemiya and Hisashi Sugiyama",
  title =        "Orienting Kinesthetically: a Haptic Handheld Wayfinder
                 for People with Visual Impairments",
  journal =      j-TACCESS,
  volume =       "3",
  number =       "2",
  pages =        "6:1--6:??",
  month =        nov,
  year =         "2010",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1857920.1857923",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Mar 28 11:38:52 MDT 2011",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "6",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Sanchez:2010:UMV,
  author =       "Jaime S{\'a}nchez and Mauricio Saenz and Jose Miguel
                 Garrido",
  title =        "Usability of a Multimodal Video Game to Improve
                 Navigation Skills for Blind Children",
  journal =      j-TACCESS,
  volume =       "3",
  number =       "2",
  pages =        "7:1--7:??",
  month =        nov,
  year =         "2010",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1857920.1857924",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Mar 28 11:38:52 MDT 2011",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "7",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Doush:2010:MPT,
  author =       "Iyad Abu Doush and Enrico Pontelli and Tran Cao Son
                 and Dominic Simon and Ou Ma",
  title =        "Multimodal Presentation of Two-Dimensional Charts: An
                 Investigation Using {Open Office XML} and {Microsoft
                 Excel}",
  journal =      j-TACCESS,
  volume =       "3",
  number =       "2",
  pages =        "8:1--8:??",
  month =        nov,
  year =         "2010",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1857920.1857925",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Mar 28 11:38:52 MDT 2011",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "8",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Wobbrock:2011:ABD,
  author =       "Jacob O. Wobbrock and Shaun K. Kane and Krzysztof Z.
                 Gajos and Susumu Harada and Jon Froehlich",
  title =        "Ability-Based Design: Concept, Principles and
                 Examples",
  journal =      j-TACCESS,
  volume =       "3",
  number =       "3",
  pages =        "9:1--9:??",
  month =        apr,
  year =         "2011",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1952383.1952384",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Apr 21 09:12:15 MDT 2011",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "9",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Jeon:2011:SSI,
  author =       "Myounghoon Jeon and Bruce N. Walker",
  title =        "{Spindex} (Speech Index) Improves Auditory Menu
                 Acceptance and Navigation Performance",
  journal =      j-TACCESS,
  volume =       "3",
  number =       "3",
  pages =        "10:1--10:??",
  month =        apr,
  year =         "2011",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1952383.1952385",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Apr 21 09:12:15 MDT 2011",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "10",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Sharit:2011:HPS,
  author =       "Joseph Sharit and Mario A. Hernandez and Sankaran N.
                 Nair and Thomas Kuhn and Sara J. Czaja",
  title =        "Health Problem Solving by Older Persons Using a
                 Complex Government {Web} Site: Analysis and
                 Implications for {Web} Design",
  journal =      j-TACCESS,
  volume =       "3",
  number =       "3",
  pages =        "11:1--11:??",
  month =        apr,
  year =         "2011",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1952383.1952386",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Apr 21 09:12:15 MDT 2011",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "11",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Kuber:2011:EHH,
  author =       "Ravi Kuber and Wai Yu and M. Sile O'Modhrain",
  title =        "Evaluation of Haptic {HTML} Mappings Derived from a
                 Novel Methodology",
  journal =      j-TACCESS,
  volume =       "3",
  number =       "4",
  pages =        "12:1--12:??",
  month =        apr,
  year =         "2011",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1952388.1952389",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Apr 21 09:12:16 MDT 2011",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "12",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Lunn:2011:IBS,
  author =       "Darren Lunn and Simon Harper and Sean Bechhofer",
  title =        "Identifying Behavioral Strategies of Visually Impaired
                 Users to Improve Access to {Web} Content",
  journal =      j-TACCESS,
  volume =       "3",
  number =       "4",
  pages =        "13:1--13:??",
  month =        apr,
  year =         "2011",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/1952388.1952390",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Apr 21 09:12:16 MDT 2011",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "13",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Hanson:2011:IAS,
  author =       "Vicki L. Hanson and Andrew Sears",
  title =        "Introduction {ASSETS'10} Special Issue",
  journal =      j-TACCESS,
  volume =       "4",
  number =       "1",
  pages =        "1:1--1:??",
  month =        nov,
  year =         "2011",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2039339.2039340",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Mar 16 15:16:33 MDT 2012",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "This article provides an introduction to ASSETS'10
                 Special Issue.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "1",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Piper:2011:WSA,
  author =       "Anne Marie Piper and Nadir Weibel and James D.
                 Hollan",
  title =        "{Write-N-Speak}: Authoring Multimodal Digital-Paper
                 Materials for Speech-Language Therapy",
  journal =      j-TACCESS,
  volume =       "4",
  number =       "1",
  pages =        "2:1--2:??",
  month =        nov,
  year =         "2011",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2039339.2039341",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Mar 16 15:16:33 MDT 2012",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Aphasia is characterized by a reduced ability to
                 understand and/or generate speech and language.
                 Speech-language therapy helps individuals with aphasia
                 regain language and cope with changes in their
                 communication abilities. The therapy process is largely
                 paper-based, making multimodal digital pen technology a
                 promising tool for supporting therapy activities. We
                 report on ten months of field research where we examine
                 the practice of speech-language therapy, implement
                 Write-N-Speak, a digital-paper toolkit for end-user
                 creation of custom therapy materials, and deploy this
                 system for 12 weeks with one therapist-client dyad in a
                 clinical setting. The therapist used Write-N-Speak to
                 create a range of materials including custom
                 interactive worksheets, photographs programmed with the
                 client's voice, and interactive stickers on household
                 items to aid object recognition and naming. We conclude
                 with a discussion of multimodal digital pen technology
                 for this and other therapy activities.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "2",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Alankus:2011:STT,
  author =       "Gazihan Alankus and Rachel Proffitt and Caitlin
                 Kelleher and Jack Engsberg",
  title =        "Stroke Therapy through Motion-Based Games: a Case
                 Study",
  journal =      j-TACCESS,
  volume =       "4",
  number =       "1",
  pages =        "3:1--3:??",
  month =        nov,
  year =         "2011",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2039339.2039342",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Mar 16 15:16:33 MDT 2012",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "In the United States alone, more than five million
                 people are living with long term motor impairments
                 caused by a stroke. Recently, video games with
                 affordable motion-based input devices have been
                 proposed as a part of therapy to help people recover
                 lost range of motion and motor control. While
                 researchers have demonstrated the potential utility of
                 therapeutic games through controlled studies,
                 relatively little work has explored their long-term
                 home-based use. We conducted a six-week home study with
                 a 62-year-old woman who was seventeen years
                 post-stroke. She played therapeutic games for
                 approximately one hour a day, five days a week. Over
                 the six weeks, she recovered significant motor
                 abilities, which is unexpected given the time since her
                 stroke. We explore detecting such improvements early,
                 using game logs for daily measurements of motor ability
                 to complement the standard measurements that are taken
                 less often. Through observations and interviews, we
                 present lessons learned about the barriers and
                 opportunities that arise from long-term home-based use
                 of therapeutic games.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "3",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Lu:2011:DDS,
  author =       "Pengfei Lu and Matt Huenerfauth",
  title =        "Data-Driven Synthesis of Spatially Inflected Verbs for
                 {American Sign Language} Animation",
  journal =      j-TACCESS,
  volume =       "4",
  number =       "1",
  pages =        "4:1--4:??",
  month =        nov,
  year =         "2011",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2039339.2039343",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Mar 16 15:16:33 MDT 2012",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "We are studying techniques for producing realistic and
                 understandable animations of American Sign Language
                 (ASL); such animations have accessibility benefits for
                 signers with lower levels of written language literacy.
                 This article describes and evaluates a novel method for
                 modeling and synthesizing ASL animations based on
                 samples of ASL signs collected from native signers. We
                 apply this technique to ASL inflecting verbs, common
                 signs in which the location and orientation of the
                 hands is influenced by the arrangement of locations in
                 3D space that represent entities under discussion. We
                 train mathematical models of hand movement on animation
                 data of signs produced by a native signer. In
                 evaluation studies with native ASL signers, the verb
                 animations synthesized from our model had similar
                 subjective-rating and comprehension-question scores to
                 animations produced by a human animator; they also
                 achieved higher scores than baseline animations.
                 Further, we examine a split modeling technique for
                 accommodating certain verb signs with complex movement
                 patterns, and we conduct an analysis of how robust our
                 modeling techniques are to reductions in the size of
                 their training data. The modeling techniques in this
                 article are applicable to other types of ASL signs and
                 to other sign languages used internationally. Our
                 models' parameterization of sign animations can
                 increase the repertoire of generation systems and can
                 partially automate the work of humans using sign
                 language scripting systems.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "4",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Staff:2011:R,
  author =       "ACM Transactions on Accessible Computing Staff",
  title =        "Reviewers",
  journal =      j-TACCESS,
  volume =       "4",
  number =       "1",
  pages =        "5:1--5:??",
  month =        nov,
  year =         "2011",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2039339.2039344",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Mar 16 15:16:33 MDT 2012",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "5",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Hanson:2012:IA,
  author =       "Vicki L. Hanson and Andrew Sears",
  title =        "Introduction to article 7",
  journal =      j-TACCESS,
  volume =       "4",
  number =       "2",
  pages =        "6:1--6:??",
  month =        mar,
  year =         "2012",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2141943.2141944",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Nov 6 18:42:03 MST 2012",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "6",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Sears:2012:RUA,
  author =       "Andrew Sears and Vicki L. Hanson",
  title =        "Representing users in accessibility research",
  journal =      j-TACCESS,
  volume =       "4",
  number =       "2",
  pages =        "7:1--7:??",
  month =        mar,
  year =         "2012",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2141943.2141945",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Nov 6 18:42:03 MST 2012",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "The need to study representative users is widely
                 accepted within the human-computer interaction (HCI)
                 community. While exceptions exist, and alternative
                 populations are sometimes studied, virtually any
                 introduction to the process of designing user
                 interfaces will discuss the importance of understanding
                 the intended users as well as the significant impact
                 individual differences can have on how effectively
                 individuals can use various technologies. HCI
                 researchers are expected to provide relevant
                 demographics regarding study participants as well as
                 information about experience using similar
                 technologies. Yet in the field of accessibility, we
                 continue to see studies that do not appropriately
                 include representative users. Highlighting ways to
                 remedy this multifaceted problem, we argue that
                 expectations regarding how accessibility research is
                 conducted and reported must be raised if this field is
                 to have the desired impact with regard to inclusive
                 design, the information technologies studied, and the
                 lives of the individuals studied.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "7",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Brajnik:2012:ACE,
  author =       "Giorgio Brajnik and Yeliz Yesilada and Simon Harper",
  title =        "Is accessibility conformance an elusive property? {A}
                 study of validity and reliability of {WCAG} 2.0",
  journal =      j-TACCESS,
  volume =       "4",
  number =       "2",
  pages =        "8:1--8:??",
  month =        mar,
  year =         "2012",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2141943.2141946",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Nov 6 18:42:03 MST 2012",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "The Web Content Accessibility Guidelines (WCAG) 2.0
                 separate testing into both ``Machine'' and ``Human''
                 audits; and further classify ``Human Testability'' into
                 ``Reliably Human Testable'' and ``Not Reliably
                 Testable''; it is human testability that is the focus
                 of this paper. We wanted to investigate the likelihood
                 that ``at least 80\% of knowledgeable human evaluators
                 would agree on the conclusion'' of an accessibility
                 audit, and therefore understand the percentage of
                 success criteria that could be described as reliably
                 human testable, and those that could not. In this case,
                 we recruited twenty-five experienced evaluators to
                 audit four pages for WCAG 2.0 conformance. These pages
                 were chosen to differ in layout, complexity, and
                 accessibility support, thereby creating a small but
                 variable sample. We found that an 80\% agreement
                 between experienced evaluators almost never occurred
                 and that the average agreement was at the 70--75\%
                 mark, while the error rate was around 29\%. Further,
                 trained-but novice-evaluators performing the same
                 audits exhibited the same agreement to that of our more
                 experienced ones, but a reduction on validity of
                 6--13\% ; the validity that an untrained user would
                 attain can only be a conjecture. Expertise appears to
                 improve (by 19\%) the ability to avoid false positives.
                 Finally, pooling the results of two independent
                 experienced evaluators would be the best option,
                 capturing at most 76\% of the true problems and
                 producing only 24\% of false positives. Any other
                 independent combination of audits would achieve worse
                 results. This means that an 80\% target for agreement,
                 when audits are conducted without communication between
                 evaluators, is not attainable, even with experienced
                 evaluators, when working on pages similar to the ones
                 used in this experiment; that the error rate even for
                 experienced evaluators is relatively high and further,
                 that untrained accessibility auditors be they
                 developers or quality testers from other domains, would
                 do much worse than this.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "8",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Obrien:2012:UAT,
  author =       "Marita A. O'Brien and Wendy A. Rogers and Arthur D.
                 Fisk",
  title =        "Understanding age and technology experience
                 differences in use of prior knowledge for everyday
                 technology interactions",
  journal =      j-TACCESS,
  volume =       "4",
  number =       "2",
  pages =        "9:1--9:??",
  month =        mar,
  year =         "2012",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2141943.2141947",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Nov 6 18:42:03 MST 2012",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Technology designers must understand relevant prior
                 knowledge in a target user population to facilitate
                 adoption and effective use. To assess prior knowledge
                 used in naturalistic settings, we systematically
                 collected information about technologies used over
                 10-day periods from older adults with high and low
                 technology experience and younger adults. Technology
                 repertoires for younger adults and high technology
                 older adults were similar; differences reflected
                 typically different needs for kitchen and health care
                 technologies between the age groups. Technology
                 repertoires for low-technology older adults showed
                 substantial technology usage in many categories. Lower
                 usage compared to high-tech older adults for each
                 category was limited primarily to PC and Internet
                 technologies. Experience differences suggest
                 preferences among low-technology older adults for basic
                 technology usage and for working with people rather
                 than technologies. Participants in all groups were
                 generally successful using their everyday technologies
                 to achieve their goals. Prior knowledge was the most
                 common attribution for success, but external
                 information was also commonly referenced. Relevant
                 prior knowledge included technical, functional,
                 strategy, and self knowledge. High tech older adults
                 did not report more problems than younger adults, but
                 they did attribute more problems to insufficient prior
                 knowledge. Younger adults attributed more problems to
                 interference from prior knowledge. Low-tech older
                 adults reported fewer problems, typically attributing
                 them to insufficient prior knowledge or product/system
                 faults. We discuss implications for further research
                 and design improvements to increase everyday technology
                 success and adoption for high-tech and low-tech older
                 adults.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "9",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{McGrenere:2012:ISI,
  author =       "Joanna McGrenere",
  title =        "Introduction to Special Issue on Mobile Technologies
                 for Older Users",
  journal =      j-TACCESS,
  volume =       "4",
  number =       "3",
  pages =        "10:1--10:??",
  month =        dec,
  year =         "2012",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2399193.2399194",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Mar 13 07:24:02 MDT 2014",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "10",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Leung:2012:HOA,
  author =       "Rock Leung and Charlotte Tang and Shathel Haddad and
                 Joanna Mcgrenere and Peter Graf and Vilia Ingriany",
  title =        "How Older Adults Learn to Use Mobile Devices: Survey
                 and Field Investigations",
  journal =      j-TACCESS,
  volume =       "4",
  number =       "3",
  pages =        "11:1--11:??",
  month =        dec,
  year =         "2012",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2399193.2399195",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Mar 13 07:24:02 MDT 2014",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Mobile computing devices, such as smart phones, offer
                 benefits that may be especially valuable to older
                 adults (age 65+). Yet, older adults have been shown to
                 have difficulty learning to use these devices. In the
                 research presented in this article, we sought to better
                 understand how older adults learn to use mobile
                 devices, their preferences and barriers, in order to
                 find new ways to support them in their learning
                 process. We conducted two complementary studies: a
                 survey study with 131 respondents from three age groups
                 (20--49, 50--64, 65+) and an in-depth field study with
                 6 older adults aged 50+. The results showed, among
                 other things, that the preference for trial-and-error
                 decreases with age, and while over half of older
                 respondents and participants preferred using the
                 instruction manual, many reported difficulties using
                 it. We discuss implications for design and illustrate
                 these implications with an example help system, Help
                 Kiosk, designed to support older adults' learning to
                 use mobile devices.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "11",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Yesilada:2012:SIA,
  author =       "Yeliz Yesilada",
  title =        "Special Issue {ASSETS 2011}",
  journal =      j-TACCESS,
  volume =       "4",
  number =       "3",
  pages =        "12:1--12:??",
  month =        dec,
  year =         "2012",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2399193.2399196",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Mar 13 07:24:02 MDT 2014",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "12",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Flatla:2012:SSM,
  author =       "David R. Flatla and Carl Gutwin",
  title =        "Situation-Specific Models of Color Differentiation",
  journal =      j-TACCESS,
  volume =       "4",
  number =       "3",
  pages =        "13:1--13:??",
  month =        dec,
  year =         "2012",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2399193.2399197",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Mar 13 07:24:02 MDT 2014",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Color is commonly used to represent categories and
                 values in computer applications, but users with
                 Color-Vision Deficiencies (CVD) often have difficulty
                 differentiating these colors. Recoloring tools have
                 been developed to address the problem, but current
                 recolorers are limited in that they work from a model
                 of only one type of congenital CVD (i.e.,
                 dichromatism). This model does not adequately describe
                 many other forms of CVD (e.g., more common congenital
                 deficiencies such as anomalous trichromacy, acquired
                 deficiencies such as cataracts or age-related yellowing
                 of the lens, or temporary deficiencies such as wearing
                 tinted glasses or working in bright sunlight), and so
                 standard recolorers work poorly in many situations. In
                 this article we describe an alternate approach that can
                 address these limitations. The new approach, called
                 Situation-Specific Modeling (SSM), constructs a model
                 of a specific user's color differentiation abilities in
                 a specific situation, and uses that model as the basis
                 for recoloring digital presentations. As a result, SSM
                 can inherently handle all types of CVD, whether
                 congenital, acquired, or environmental. In this article
                 we describe and evaluate several models that are based
                 on the SSM approach. Our first model of individual
                 color differentiation (called ICD-1) works in RGB color
                 space, and a user study showed it to be accurate and
                 robust (both for users with and without congenital
                 CVD). However, three aspects of ICD-1 were identified
                 as needing improvement: the calibration step needed to
                 build the situation-specific model, and the prediction
                 steps used in recoloring were too slow for real-world
                 use; and the results of the model's predictions were
                 too coarse for some uses. We therefore developed three
                 further techniques: ICD-2 reduces the time needed to
                 calibrate the model; ICD-3 reduces the time needed to
                 make predictions with the model; and ICD-4 provides
                 additional information about the degree of
                 differentiability in a prediction. Our final result is
                 a model of the user's color perception that handles any
                 type of CVD, can be calibrated in two minutes, and can
                 find replacement colors in near-real time ($^~$ 1
                 second for a 64-color image). The ICD models provide a
                 tool that can greatly improve the perceptibility of
                 digital color for many different types of CVD users,
                 and also demonstrates situation-specific modeling as a
                 new approach that can broaden the applicability of
                 assistive technology.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "13",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Moll:2013:HTG,
  author =       "Jonas Moll and Eva-Lotta Salln{\"a}s Pysander",
  title =        "A Haptic Tool for Group Work on Geometrical Concepts
                 Engaging Blind and Sighted Pupils",
  journal =      j-TACCESS,
  volume =       "4",
  number =       "4",
  pages =        "14:1--14:??",
  month =        jul,
  year =         "2013",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2493171.2493172",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Mar 13 07:24:04 MDT 2014",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "In the study presented here, two haptic and visual
                 applications for learning geometrical concepts in group
                 work in primary school have been designed and
                 evaluated. The aim was to support collaborative
                 learning among sighted and visually impaired pupils.
                 The first application is a static flattened 3D
                 environment that supports learning to distinguish
                 between angles by means of a 3D haptic device providing
                 touch feedback. The second application is a dynamic 3D
                 environment that supports learning of spatial geometry.
                 The scene is a room with a box containing geometrical
                 objects, which pupils can pick up and move around. The
                 applications were evaluated in four schools with groups
                 of two sighted and one visually impaired pupil. The
                 results showed the support for the visually impaired
                 pupil and for the collaboration to be satisfying. A
                 shared understanding of the workspace could be
                 achieved, as long as the virtual environment did not
                 contain movable objects. Verbal communication was
                 crucial for the work process but haptic guiding to some
                 extent substituted communication about direction. When
                 it comes to joint action between visually impaired and
                 sighted pupils a number of interesting problems were
                 identified when the dynamic and static virtual
                 environments were compared. These problems require
                 further investigation. The study extends prior work in
                 the areas of assistive technology and multimodal
                 communication by evaluating functions for joint haptic
                 manipulation in the unique setting of group work in
                 primary school.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "14",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Ma:2013:IUB,
  author =       "Yao Ma and Jinjuan Feng and Libby Kumin and Jonathan
                 Lazar",
  title =        "Investigating User Behavior for Authentication
                 Methods: a Comparison between Individuals with {Down
                 Syndrome} and Neurotypical Users",
  journal =      j-TACCESS,
  volume =       "4",
  number =       "4",
  pages =        "15:1--15:??",
  month =        jul,
  year =         "2013",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2493171.2493173",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Mar 13 07:24:04 MDT 2014",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "A wide variety of authentication mechanisms have been
                 designed to ensure information security. Individuals
                 with cognitive disabilities depend on computers and the
                 Internet for a variety of tasks and, therefore, use
                 authentication applications on an everyday basis.
                 However, although there have been numerous studies
                 investigating password usage by neurotypical users,
                 there have been no research studies conducted to
                 examine the use of authentication methods by
                 individuals with cognitive disabilities. In this
                 article, we systematically investigate how individuals
                 with cognitive disabilities, specifically Down syndrome
                 (DS), interact with various user authentication
                 mechanisms. This research provides the first benchmark
                 data on the performance of individuals with DS when
                 using multiple authentication methods. It confirms that
                 individuals with DS are capable of using the
                 traditional alphanumeric passwords with reasonable
                 efficiency. The passwords created by individuals with
                 DS are of similar strength to those created by
                 neurotypical people. Graphic passwords are not as
                 effective as traditional alphanumeric and mnemonic
                 passwords regarding efficiency, and are less preferred
                 by the participants. Based on the findings of the
                 study, we propose design guidelines that aim to assist
                 both practitioners and researchers in designing and
                 developing effective authentication applications that
                 fit the specific needs of individuals with DS.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "15",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Hwang:2013:ETE,
  author =       "Faustina Hwang and Nic Hollinworth and Nitin
                 Williams",
  title =        "Effects of Target Expansion on Selection Performance
                 in Older Computer Users",
  journal =      j-TACCESS,
  volume =       "5",
  number =       "1",
  pages =        "1:1--1:??",
  month =        sep,
  year =         "2013",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2514848",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Mar 13 07:24:05 MDT 2014",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Point and click interactions using a mouse are an
                 integral part of computer use for current desktop
                 systems. Compared with younger users though, older
                 adults experience greater difficulties performing
                 cursor positioning tasks, and this can present
                 limitations to using a computer easily and effectively.
                 Target expansion is a technique for improving pointing
                 performance where the target grows dynamically as the
                 cursor approaches. This has the advantage that targets
                 conserve screen real estate in their unexpanded state,
                 yet can still provide the benefits of a larger area to
                 click on. This article presents two studies of target
                 expansion with older and younger participants,
                 involving multidirectional point-select tasks with a
                 computer mouse. Study 1 compares static versus
                 expanding targets, and Study 2 compares static targets
                 with three alternative techniques for expansion.
                 Results show that expansion can improve times by up to
                 14\%, and reduce error rates by up to 50\%.
                 Additionally, expanding targets are beneficial even
                 when the expansion happens late in the movement, that
                 is, after the cursor has reached the expanded target
                 area or even after it has reached the original target
                 area. The participants' subjective feedback on the
                 target expansion are generally favorable, and this
                 lends further support for the technique.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "1",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Vickers:2013:PLT,
  author =       "Stephen Vickers and Howell Istance and Aulikki
                 Hyrskykari",
  title =        "Performing Locomotion Tasks in Immersive Computer
                 Games with an Adapted Eye-Tracking Interface",
  journal =      j-TACCESS,
  volume =       "5",
  number =       "1",
  pages =        "2:1--2:??",
  month =        sep,
  year =         "2013",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2514856",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Mar 13 07:24:05 MDT 2014",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Young people with severe physical disabilities may
                 benefit greatly from participating in immersive
                 computer games. In-game tasks can be fun, engaging,
                 educational, and socially interactive. But for those
                 who are unable to use traditional methods of computer
                 input such as a mouse and keyboard, there is a barrier
                 to interaction that they must first overcome. Eye-gaze
                 interaction is one method of input that can potentially
                 achieve the levels of interaction required for these
                 games. How we use eye-gaze or the gaze interaction
                 technique depends upon the task being performed, the
                 individual performing it, and the equipment available.
                 To fully realize the impact of participation in these
                 environments, techniques need to be adapted to the
                 person's abilities. We describe an approach to
                 designing and adapting a gaze interaction technique to
                 support locomotion, a task central to immersive game
                 playing. This is evaluated by a group of young people
                 with cerebral palsy and muscular dystrophy. The results
                 show that by adapting the interaction technique,
                 participants are able to significantly improve their
                 in-game character control.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "2",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Sears:2013:E,
  author =       "Andrew Sears and Vicki Hanson",
  title =        "Editorial",
  journal =      j-TACCESS,
  volume =       "5",
  number =       "2",
  pages =        "3:1--3:??",
  month =        oct,
  year =         "2013",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2522990.2522991",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Mar 13 07:24:07 MDT 2014",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "3",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Kacorri:2013:EDH,
  author =       "Hernisa Kacorri and Pengfei Lu and Matt Huenerfauth",
  title =        "Effect of Displaying Human Videos During an Evaluation
                 Study of {American Sign Language} Animation",
  journal =      j-TACCESS,
  volume =       "5",
  number =       "2",
  pages =        "4:1--4:??",
  month =        oct,
  year =         "2013",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2517038",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Mar 13 07:24:07 MDT 2014",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Many researchers internationally are studying how to
                 synthesize computer animations of sign language; such
                 animations have accessibility benefits for people who
                 are deaf and have lower literacy in written languages.
                 The field has not yet formed a consensus as to how to
                 best conduct evaluations of the quality of sign
                 language animations, and this article explores an
                 important methodological issue for researchers
                 conducting experimental studies with participants who
                 are deaf. Traditionally, when evaluating an animation,
                 some lower and upper baselines are shown for comparison
                 during the study. For the upper baseline, some
                 researchers use carefully produced animations, and
                 others use videos of human signers. Specifically, this
                 article investigates, in studies where signers view
                 animations of sign language and are asked subjective
                 and comprehension questions, whether participants
                 differ in their subjective and comprehension responses
                 when actual videos of human signers are shown during
                 the study. Through three sets of experiments, we
                 characterize how the Likert-scale subjective judgments
                 of participants about sign language animations are
                 negatively affected when they are also shown videos of
                 human signers for comparison --- especially when
                 displayed side-by-side. We also identify a small
                 positive effect on the comprehension of sign language
                 animations when studies also contain videos of human
                 signers. Our results enable direct comparison of
                 previously published evaluations of sign language
                 animations that used different types of upper baselines
                 --- video or animation. Our results also provide
                 methodological guidance for researchers who are
                 designing evaluation studies of sign language animation
                 or designing experimental stimuli or questions for
                 participants who are deaf.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "4",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Hurst:2013:DUP,
  author =       "Amy Hurst and Scott E. Hudson and Jennifer Mankoff and
                 Shari Trewin",
  title =        "Distinguishing Users By Pointing Performance in
                 Laboratory and Real-World Tasks",
  journal =      j-TACCESS,
  volume =       "5",
  number =       "2",
  pages =        "5:1--5:??",
  month =        oct,
  year =         "2013",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2517039",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Mar 13 07:24:07 MDT 2014",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Accurate pointing is an obstacle to computer access
                 for individuals who experience motor impairments. One
                 of the main barriers to assisting individuals with
                 pointing problems is a lack of frequent and low-cost
                 assessment of pointing ability. We are working to build
                 technology to automatically assess pointing problems
                 during every day (or real-world) computer use. To this
                 end, we have gathered and studied real-world pointing
                 use from individuals with motor impairments and older
                 adults. We have used this data to develop novel
                 techniques to analyze pointing performance. In this
                 article, we present learned statistical models that
                 distinguish between pointing actions from diverse
                 populations using real-world pointing samples. We
                 describe how our models could be used to support
                 individuals with different abilities sharing a
                 computer, or one individual who experiences temporary
                 pointing problems. Our investigation contributes to a
                 better understanding of real-world pointing. We hope
                 that these techniques will be used to develop systems
                 that can automatically adapt to users' current needs in
                 real-world computing environments.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "5",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Editors:2014:GNE,
  author =       "Editors",
  title =        "Greetings from the New {Editors-in-Chief}",
  journal =      j-TACCESS,
  volume =       "5",
  number =       "3",
  pages =        "6:1--6:??",
  month =        jan,
  year =         "2014",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2557667",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Mar 13 07:24:08 MDT 2014",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "6",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Kushalnagar:2014:AEC,
  author =       "Raja S. Kushalnagar and Walter S. Lasecki and Jeffrey
                 P. Bigham",
  title =        "Accessibility Evaluation of Classroom Captions",
  journal =      j-TACCESS,
  volume =       "5",
  number =       "3",
  pages =        "7:1--7:??",
  month =        jan,
  year =         "2014",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2543578",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Mar 13 07:24:08 MDT 2014",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Real-time captioning enables deaf and hard of hearing
                 (DHH) people to follow classroom lectures and other
                 aural speech by converting it into visual text with
                 less than a five second delay. Keeping the delay short
                 allows end-users to follow and participate in
                 conversations. This article focuses on the fundamental
                 problem that makes real-time captioning difficult:
                 sequential keyboard typing is much slower than
                 speaking. We first surveyed the audio characteristics
                 of 240 one-hour-long captioned lectures on YouTube,
                 such as speed and duration of speaking bursts. We then
                 analyzed how these characteristics impact caption
                 generation and readability, considering specifically
                 our human-powered collaborative captioning approach. We
                 note that most of these characteristics are also
                 present in more general domains. For our caption
                 comparison evaluation, we transcribed a classroom
                 lecture in real-time using all three captioning
                 approaches. We recruited 48 participants (24 DHH) to
                 watch these classroom transcripts in an eye-tracking
                 laboratory. We presented these captions in a
                 randomized, balanced order. We show that both hearing
                 and DHH participants preferred and followed
                 collaborative captions better than those generated by
                 automatic speech recognition (ASR) or professionals due
                 to the more consistent flow of the resulting captions.
                 These results show the potential to reliably capture
                 speech even during sudden bursts of speed, as well as
                 for generating ``enhanced'' captions, unlike other
                 human-powered captioning approaches.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "7",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Webster:2014:TSC,
  author =       "Gemma Webster and Vicki L. Hanson",
  title =        "Technology for Supporting Care Staff in Residential
                 Homes",
  journal =      j-TACCESS,
  volume =       "5",
  number =       "3",
  pages =        "8:1--8:??",
  month =        jan,
  year =         "2014",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2543577",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Mar 13 07:24:08 MDT 2014",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Care staff, those who attend to the day-to-day needs
                 of people in residential facilities, represent an
                 important segment of the health-care provision of those
                 entrusted to their care. The potential use of
                 technology by care staff has not been a focus of
                 researcher attention. The work reported here provides
                 initial steps in addressing that gap, considering both
                 the design requirements for this population and
                 presentation of early work on a software system for use
                 by care staff. We describe the development of a
                 software tool for use by care staff, called Portrait,
                 and report two studies related to factors affecting
                 technology use by this population. The results of this
                 research are promising, with Portrait being very
                 positively received by care managers and care staff.
                 Use of this software in a care home for over a month
                 indicated continued use, with care staff returning to
                 the system throughout the test period. The
                 contributions of this research are the identification
                 of factors important in working with a care staff
                 population, the introduction and evaluation of a novel
                 software tool for care staff in residential homes, and
                 the highlighting of potential benefits of technology in
                 assisting care staff.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "8",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Shipman:2014:ISL,
  author =       "Frank M. Shipman and Ricardo Gutierrez-Osuna and Caio
                 D. D. Monteiro",
  title =        "Identifying Sign Language Videos in Video Sharing
                 Sites",
  journal =      j-TACCESS,
  volume =       "5",
  number =       "4",
  pages =        "9:1--9:??",
  month =        mar,
  year =         "2014",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2579698",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Apr 1 05:55:51 MDT 2014",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Video sharing sites enable members of the sign
                 language community to record and share their knowledge,
                 opinions, and worries on a wide range of topics. As a
                 result, these sites have formative digital libraries of
                 sign language content hidden within their large overall
                 collections. This article explores the problem of
                 locating these sign language (SL) videos and presents
                 techniques for identifying SL videos in such
                 collections. To determine the effectiveness of existing
                 text-based search for locating these SL videos, a
                 series of queries were issued to YouTube to locate SL
                 videos on the top 10 news stories of 2011 according to
                 Yahoo!. Overall precision for the first page of results
                 (up to 20 results) was 42\%. An approach for
                 automatically detecting SL video is then presented.
                 Five video features considered likely to be of value
                 were developed using standard background modeling and
                 face detection. The article compares the results of an
                 SVM classifier when given all permutations of these
                 five features. The results show that a measure of the
                 symmetry of motion relative to the face position
                 provided the best performance of any single feature.
                 When tested against a challenging test collection that
                 included many likely false positives, an SVM provided
                 with all five features achieved 82\% precision and 90\%
                 recall. In contrast, the text-based search (queries
                 with the topic terms and ``ASL'' or ``sign language'')
                 returned a significant portion of non-SL
                 content---nearly half of all videos found. By our
                 estimates, the application of video-based filtering
                 techniques such as the one proposed here would increase
                 precision from 42\% for text-based queries up to
                 75\%.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "9",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Peters:2014:ATA,
  author =       "Christian Peters and Thomas Hermann and Sven Wachsmuth
                 and Jesse Hoey",
  title =        "Automatic Task Assistance for People with Cognitive
                 Disabilities in Brushing Teeth --- A User Study with
                 the {TEBRA} System",
  journal =      j-TACCESS,
  volume =       "5",
  number =       "4",
  pages =        "10:1--10:??",
  month =        mar,
  year =         "2014",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2579700",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Apr 1 05:55:51 MDT 2014",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "People with cognitive disabilities such as dementia
                 and intellectual disabilities tend to have problems in
                 coordinating steps in the execution of Activities of
                 Daily Living (ADLs) due to limited capabilities in
                 cognitive functioning. To successfully perform ADLs,
                 these people are reliant on the assistance of human
                 caregivers. This leads to a decrease of independence
                 for care recipients and imposes a high burden on
                 caregivers. Assistive Technology for Cognition (ATC)
                 aims to compensate for decreased cognitive functions.
                 ATC systems provide automatic assistance in task
                 execution by delivering appropriate prompts which
                 enable the user to perform ADLs without any assistance
                 of a human caregiver. This leads to an increase of the
                 user's independence and to a relief of caregiver's
                 burden. In this article, we describe the design,
                 development and evaluation of a novel ATC system. The
                 TEBRA (TEeth BRushing Assistance) system supports
                 people with moderate cognitive disabilities in the
                 execution of brushing teeth. A main requirement for the
                 acceptance of ATC systems is context awareness:
                 explicit feedback from the user is not necessary to
                 provide appropriate assistance. Furthermore, an ATC
                 system needs to handle spatial and temporal variance in
                 the execution of behaviors such as different movement
                 characteristics and different velocities. The TEBRA
                 system handles spatial variance in a behavior
                 recognition component based on a Bayesian network
                 classifier. A dynamic timing model deals with temporal
                 variance by adapting to different velocities of users
                 during a trial. We evaluate a fully functioning
                 prototype of the TEBRA system in a study with people
                 with cognitive disabilities. The main aim of the study
                 is to analyze the technical performance of the system
                 and the user's behavior in the interaction with the
                 system with regard to the main hypothesis: is the TEBRA
                 system able to increase the user's independence in the
                 execution of brushing teeth?",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "10",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Braier:2015:HSR,
  author =       "Jonas Braier and Katharina Lattenkamp and Benjamin
                 R{\"a}thel and Sandra Schering and Michael Wojatzki and
                 Benjamin Weyers",
  title =        "Haptic {$3$D} Surface Representation of Table-Based
                 Data for People With Visual Impairments",
  journal =      j-TACCESS,
  volume =       "6",
  number =       "1",
  pages =        "1:1--1:??",
  month =        mar,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2700433",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Mar 5 07:49:53 MST 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "The UN Convention on the Rights of Persons with
                 Disabilities Article 24 states that ``States Parties
                 shall ensure inclusive education at all levels of
                 education and life long learning.'' This article
                 focuses on the inclusion of people with visual
                 impairments in learning processes including complex
                 table-based data. Gaining insight into and
                 understanding of complex data is a highly demanding
                 task for people with visual impairments. Especially in
                 the case of table-based data, the classic approaches of
                 braille-based output devices and printing concepts are
                 limited. Haptic perception requires sequential
                 information processing rather than the parallel
                 processing used by the visual system, which hinders
                 haptic perception to gather a fast overview of and
                 deeper insight into the data. Nevertheless,
                 neuroscientific research has identified great
                 dependencies between haptic perception and the
                 cognitive processing of visual sensing. Based on these
                 findings, we developed a haptic 3D surface
                 representation of classic diagrams and charts, such as
                 bar graphs and pie charts. In a qualitative evaluation
                 study, we identified certain advantages of our
                 relief-type 3D chart approach. Finally, we present an
                 education model for German schools that includes a 3D
                 printing approach to help integrate students with
                 visual impairments.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "1",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Tartaro:2015:APS,
  author =       "Andrea Tartaro and Justine Cassell and Corina Ratz and
                 Jennifer Lira and Valeria Nanclares-Nogu{\'e}s",
  title =        "Accessing Peer Social Interaction: Using Authorable
                 Virtual Peer Technology as a Component of a Group
                 Social Skills Intervention Program",
  journal =      j-TACCESS,
  volume =       "6",
  number =       "1",
  pages =        "2:1--2:??",
  month =        mar,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2700434",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Mar 5 07:49:53 MST 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Autism spectrum and related communication and social
                 disorders can severely affect some children's ability
                 to engage in peer social interaction. In this article,
                 we describe and evaluate an Authorable Virtual Peer
                 (AVP), technology designed to help children access peer
                 interactions by supporting them in developing critical
                 social skills. Children interact with the AVP in three
                 ways: (1) engaging in face-to-face interaction with a
                 life-sized, computer-animated child; (2) creating new
                 social behaviors for the AVP; and (3) controlling the
                 AVP using a graphical user interface to select
                 appropriate responses while the AVP interacts with
                 another person. Our evaluation suggests that when an
                 AVP is used as an activity during a social group
                 intervention, a common intervention approach used with
                 children with social and communication difficulties,
                 that children's use of specific social behaviors
                 critical to successful social interaction increases
                 during role-play of common social situations with
                 another child.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "2",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Pedrosa:2015:FDC,
  author =       "Diogo Pedrosa and Maria {Da Gra{\c{c}}a Pimentel} and
                 Amy Wright and Khai N. Truong",
  title =        "{Filteryedping}: Design Challenges and User
                 Performance of Dwell-Free Eye Typing",
  journal =      j-TACCESS,
  volume =       "6",
  number =       "1",
  pages =        "3:1--3:??",
  month =        mar,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2724728",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Mar 5 07:49:53 MST 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "The ability to use the movements of the eyes to write
                 is extremely important for individuals with a severe
                 motor disability. With eye typing, a virtual keyboard
                 is shown on the screen and the user enters text by
                 gazing at the intended keys one at a time. With
                 dwell-based eye typing, a key is selected by
                 continuously gazing at it for a specific amount of
                 time. However, this approach has two possible
                 drawbacks: unwanted selections and slow typing rates.
                 In this study, we propose a dwell-free eye typing
                 technique that filters out unintentionally selected
                 letters from the sequence of letters looked at by the
                 user. It ranks possible words based on their length and
                 frequency of use and suggests them to the user. We
                 evaluated Filteryedping with a series of experiments.
                 First, we recruited participants without disabilities
                 to compare it with another potential dwell-free
                 technique and with a dwell-based eye typing interface.
                 The results indicate it is a fast technique that allows
                 an average of 15.95 words per minute after 100min of
                 typing. Then, we improved the technique through
                 iterative design and evaluation with individuals who
                 have severe motor disabilities. This phase helped to
                 identify and create parameters that allow the technique
                 to be adapted to different users.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "3",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Felzer:2015:ESN,
  author =       "Torsten Felzer and Stephan Rinderknecht",
  title =        "Experiences of Someone with a Neuromuscular Disease in
                 Operating a {PC} (and Ways to Successfully Overcome
                 Challenges)",
  journal =      j-TACCESS,
  volume =       "6",
  number =       "2",
  pages =        "4:1--4:??",
  month =        mar,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2700436",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Mar 23 16:45:41 MDT 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "This article describes the experiences of the first
                 author, who was diagnosed with the neuromuscular
                 disease Friedreich's Ataxia more than 25 years ago,
                 with the innovative approach to human-computer
                 interaction characterized by the software tool
                 OnScreenDualScribe. Originally developed by (and for!)
                 the first author, the tool replaces the standard input
                 devices-that is, keyboard and mouse-with a small
                 numeric keypad, making optimal use of his abilities.
                 This work attempts to illustrate some of the
                 difficulties the first author usually has to face when
                 operating a computer, due to considerable motor
                 problems. The article will discuss what he tried in the
                 past, and why OnScreenDualScribe, offering various
                 assistive techniques-including word prediction, an
                 ambiguous keyboard, and stepwise pointing operations-is
                 indeed a viable alternative. In a pilot study that was
                 repeated multiple times with slight variations over a
                 period of 3 years, the first author's entry rate with
                 OnScreenDualScribe (including early versions of the
                 tool) increased from 1.38wpm to 6.16wpm, while his
                 achievable typing rate went from 12wpm to 3.5wpm in the
                 course of 24 years. However, the ultimate goal is to
                 help not just one single person, but to make the
                 system-which not only accelerates entry, but also
                 clearly reduces the required effort-available to anyone
                 with similar conditions.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "4",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Ladner:2015:IASa,
  author =       "Richard Ladner and Jonathan Lazar",
  title =        "Introduction to the {ASSETS'13} Special Issue",
  journal =      j-TACCESS,
  volume =       "6",
  number =       "2",
  pages =        "4:1--4:??",
  month =        mar,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2737200",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Mar 23 16:45:41 MDT 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "4e",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Hara:2015:IPT,
  author =       "Kotaro Hara and Shiri Azenkot and Megan Campbell and
                 Cynthia L. Bennett and Vicki Le and Sean Pannella and
                 Robert Moore and Kelly Minckler and Rochelle H. Ng and
                 Jon E. Froehlich",
  title =        "Improving Public Transit Accessibility for Blind
                 Riders by Crowdsourcing Bus Stop Landmark Locations
                 with {Google Street View}: an Extended Analysis",
  journal =      j-TACCESS,
  volume =       "6",
  number =       "2",
  pages =        "5:1--5:??",
  month =        mar,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2717513",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Mar 23 16:45:41 MDT 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Low-vision and blind bus riders often rely on known
                 physical landmarks to help locate and verify bus stop
                 locations (e.g., by searching for an expected shelter,
                 bench, or newspaper bin). However, there are currently
                 few, if any, methods to determine this information a
                 priori via computational tools or services. In this
                 article, we introduce and evaluate a new scalable
                 method for collecting bus stop location and landmark
                 descriptions by combining online crowdsourcing and
                 Google Street View (GSV). We conduct and report on
                 three studies: (i) a formative interview study of 18
                 people with visual impairments to inform the design of
                 our crowdsourcing tool, (ii) a comparative study
                 examining differences between physical bus stop audit
                 data and audits conducted virtually with GSV, and (iii)
                 an online study of 153 crowd workers on Amazon
                 Mechanical Turk to examine the feasibility of
                 crowdsourcing bus stop audits using our custom tool
                 with GSV. Our findings reemphasize the importance of
                 landmarks in nonvisual navigation, demonstrate that GSV
                 is a viable bus stop audit dataset, and show that
                 minimally trained crowd workers can find and identify
                 bus stop landmarks with 82.5\% accuracy across 150 bus
                 stop locations (87.3\% with simple quality control).",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "5",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Gerling:2015:DWB,
  author =       "Kathrin M. Gerling and Regan L. Mandryk and Matthew
                 Miller and Michael R. Kalyn and Max Birk and Jan D.
                 Smeddinck",
  title =        "Designing Wheelchair-Based Movement Games",
  journal =      j-TACCESS,
  volume =       "6",
  number =       "2",
  pages =        "6:1--6:??",
  month =        mar,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2724729",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Mar 23 16:45:41 MDT 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "People using wheelchairs have access to fewer sports
                 and other physically stimulating leisure activities
                 than nondisabled persons, and often lead sedentary
                 lifestyles that negatively influence their health.
                 While motion-based video games have demonstrated great
                 potential of encouraging physical activity among
                 nondisabled players, the accessibility of motion-based
                 games is limited for persons with mobility
                 disabilities, thus also limiting access to the
                 potential health benefits of playing these games. In
                 our work, we address this issue through the design of
                 wheelchair-accessible motion-based game controls. We
                 present KINECT$^{\rm Wheels}$, a toolkit designed to
                 integrate wheelchair movements into motion-based games.
                 Building on the toolkit, we developed Cupcake Heaven, a
                 wheelchair-based video game designed for older adults
                 using wheelchairs, and we created Wheelchair
                 Revolution, a motion-based dance game that is
                 accessible to both persons using wheelchairs and
                 nondisabled players. Evaluation results show that
                 KINECT$^{\rm Wheels}$ can be applied to make
                 motion-based games wheelchair-accessible, and that
                 wheelchair-based games engage broad audiences in
                 physically stimulating play. Through the application of
                 the wheelchair as an enabling technology in games, our
                 work has the potential of encouraging players of all
                 ages to develop a positive relationship with their
                 wheelchair.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "6",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Christensen:2015:PSL,
  author =       "Heidi Christensen and Frank Rudzicz and Fran{\c{c}}ois
                 Portet and Jan Alexandersson",
  title =        "Perspectives on Speech and Language Interaction for
                 Daily Assistive Technology: Introduction to {Part 1} of
                 the Special Issue",
  journal =      j-TACCESS,
  volume =       "6",
  number =       "3",
  pages =        "7:1--7:??",
  month =        jun,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2756765",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Jun 10 11:04:09 MDT 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "7",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Pellegrini:2015:AAS,
  author =       "Thomas Pellegrini and Lionel Fontan and Julie Mauclair
                 and J{\'e}r{\^o}me Farinas and Charlotte Alazard-Guiu
                 and Marina Robert and Peggy Gatignol",
  title =        "Automatic Assessment of Speech Capability Loss in
                 Disordered Speech",
  journal =      j-TACCESS,
  volume =       "6",
  number =       "3",
  pages =        "8:1--8:??",
  month =        jun,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2739051",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Jun 10 11:04:09 MDT 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "In this article, we report on the use of an automatic
                 technique to assess pronunciation in the context of
                 several types of speech disorders. Even if such tools
                 already exist, they are more widely used in a different
                 context, namely, Computer-Assisted Language Learning,
                 in which the objective is to assess nonnative
                 pronunciation by detecting learners' mispronunciations
                 at segmental and/or suprasegmental levels. In our work,
                 we sought to determine if the Goodness of Pronunciation
                 (GOP) algorithm, which aims to detect phone-level
                 mispronunciations by means of automatic speech
                 recognition, could also detect segmental deviances in
                 disordered speech. Our main experiment is an analysis
                 of speech from people with unilateral facial palsy.
                 This pathology may impact the realization of certain
                 phonemes such as bilabial plosives and sibilants.
                 Speech read by 32 speakers at four different clinical
                 severity grades was automatically aligned and GOP
                 scores were computed for each phone realization. The
                 highest scores, which indicate large dissimilarities
                 with standard phone realizations, were obtained for the
                 most severely impaired speakers. The corresponding
                 speech subset was manually transcribed at phone level;
                 8.3\% of the phones differed from standard
                 pronunciations extracted from our lexicon. The GOP
                 technique allowed the detection of 70.2\% of
                 mispronunciations with an equal rate of about 30\% of
                 false rejections and false acceptances. Finally, to
                 broaden the scope of the study, we explored the
                 correlation between GOP values and speech
                 comprehensibility scores on a second corpus, composed
                 of sentences recorded by six people with speech
                 impairments due to cancer surgery or neurological
                 disorders. Strong correlations were achieved between
                 GOP scores and subjective comprehensibility scores
                 (about 0.7 absolute). Results from both experiments
                 tend to validate the use of GOP to measure speech
                 capability loss, a dimension that could be used as a
                 complement to physiological measures in pathologies
                 causing speech disorders.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "8",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Laaridh:2015:ADP,
  author =       "Imed Laaridh and Corinne Fredouille and Christine
                 Meunier",
  title =        "Automatic Detection of Phone-Based Anomalies in
                 Dysarthric Speech",
  journal =      j-TACCESS,
  volume =       "6",
  number =       "3",
  pages =        "9:1--9:??",
  month =        jun,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2739050",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Jun 10 11:04:09 MDT 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Perceptual evaluation is still the most common method
                 in clinical practice for diagnosing and following the
                 progression of the condition of people with speech
                 disorders. Although a number of studies have addressed
                 the acoustic analysis of speech productions exhibiting
                 impairments, additional descriptive analysis is
                 required to manage interperson variability, considering
                 speakers with the same condition or across different
                 conditions. In this context, this article investigates
                 automatic speech processing approaches dedicated to the
                 detection and localization of abnormal acoustic
                 phenomena in speech signal produced by people with
                 speech disorders. This automatic process aims at
                 enhancing the manual investigation of human experts
                 while at the same time reducing the extent of their
                 intervention by calling their attention to specific
                 parts of the speech considered as atypical from an
                 acoustical point of view. Two different approaches are
                 proposed in this article. The first approach models
                 only the normal speech, whereas the second models both
                 normal and dysarthric speech. Both approaches are
                 evaluated following two strategies: one consists of a
                 strict phone comparison between a human annotation of
                 abnormal phones and the automatic output, while the
                 other uses a ``one-phone delay'' for the comparison.
                 The experimental evaluation of both approaches for the
                 task of detecting acoustic anomalies was conducted on
                 two different corpora composed of French dysarthric
                 speakers and control speakers. These approaches obtain
                 very encouraging results and their potential for
                 clinical uses with different types of dysarthria and
                 neurological diseases is quite promising.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "9",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Martinez:2015:IAS,
  author =       "David Mart{\'\i}nez and Eduardo Lleida and Phil Green
                 and Heidi Christensen and Alfonso Ortega and Antonio
                 Miguel",
  title =        "Intelligibility Assessment and Speech Recognizer Word
                 Accuracy Rate Prediction for Dysarthric Speakers in a
                 Factor Analysis Subspace",
  journal =      j-TACCESS,
  volume =       "6",
  number =       "3",
  pages =        "10:1--10:??",
  month =        jun,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2746405",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Jun 10 11:04:09 MDT 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Automated intelligibility assessments can support
                 speech and language therapists in determining the type
                 of dysarthria presented by their clients. Such
                 assessments can also help predict how well a person
                 with dysarthria might cope with a voice interface to
                 assistive technology. Our approach to intelligibility
                 assessment is based on iVectors, a set of measures that
                 capture many aspects of a person's speech, including
                 intelligibility. The major advantage of iVectors is
                 that they compress all acoustic information contained
                 in an utterance into a reduced number of measures, and
                 they are very suitable to be used with simple
                 predictors. We show that intelligibility assessments
                 work best if there is a pre-existing set of words
                 annotated for intelligibility from the speaker to be
                 evaluated, which can be used for training our system.
                 We discuss the implications of our findings for
                 practice.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "10",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Rudzicz:2015:PSL,
  author =       "Frank Rudzicz and Heidi Christensen and Fran{\c{c}}ois
                 Portet and Jan Alexandersson",
  title =        "Perspectives on Speech and Language Interaction for
                 Daily Assistive Technology: Introduction to {Part 2}
                 --- Speaking and Reading Aids",
  journal =      j-TACCESS,
  volume =       "6",
  number =       "4",
  pages =        "11:1--11:??",
  month =        jun,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2767690",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Oct 1 15:28:12 MDT 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "11",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Mcloughlin:2015:RPS,
  author =       "Ian V. Mcloughlin and Hamid Reza Sharifzadeh and Su
                 Lim Tan and Jingjie Li and Yan Song",
  title =        "Reconstruction of Phonated Speech from Whispers Using
                 Formant-Derived Plausible Pitch Modulation",
  journal =      j-TACCESS,
  volume =       "6",
  number =       "4",
  pages =        "12:1--12:??",
  month =        jun,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2737724",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Oct 1 15:28:12 MDT 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Whispering is a natural, unphonated, secondary aspect
                 of speech communications for most people. However, it
                 is the primary mechanism of communications for some
                 speakers who have impaired voice production mechanisms,
                 such as partial laryngectomees, as well as for those
                 prescribed voice rest, which often follows surgery or
                 damage to the larynx. Unlike most people, who choose
                 when to whisper and when not to, these speakers may
                 have little choice but to rely on whispers for much of
                 their daily vocal interaction. Even though most
                 speakers will whisper at times, and some speakers can
                 only whisper, the majority of today's computational
                 speech technology systems assume or require phonated
                 speech. This article considers conversion of whispers
                 into natural-sounding phonated speech as a noninvasive
                 prosthetic aid for people with voice impairments who
                 can only whisper. As a by-product, the technique is
                 also useful for unimpaired speakers who choose to
                 whisper. Speech reconstruction systems can be
                 classified into those requiring training and those that
                 do not. Among the latter, a recent parametric
                 reconstruction framework is explored and then enhanced
                 through a refined estimation of plausible pitch from
                 weighted formant differences. The improved
                 reconstruction framework, with proposed formant-derived
                 artificial pitch modulation, is validated through
                 subjective and objective comparison tests alongside
                 state-of-the-art alternatives.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "12",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Aihara:2015:IPV,
  author =       "Ryo Aihara and Tetsuya Takiguchi and Yasuo Ariki",
  title =        "Individuality-Preserving Voice Conversion for
                 Articulation Disorders Using Phoneme-Categorized
                 Exemplars",
  journal =      j-TACCESS,
  volume =       "6",
  number =       "4",
  pages =        "13:1--13:??",
  month =        jun,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2738048",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Oct 1 15:28:12 MDT 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "We present a voice conversion (VC) method for a person
                 with an articulation disorder resulting from athetoid
                 cerebral palsy. The movements of such speakers are
                 limited by their athetoid symptoms and their consonants
                 are often unstable or unclear, which makes it difficult
                 for them to communicate. Exemplar-based spectral
                 conversion using Nonnegative Matrix Factorization (NMF)
                 is applied to a voice from a speaker with an
                 articulation disorder. In our conventional work, we
                 used a combined dictionary that was constructed from
                 the source speaker's vowels and the consonants from a
                 target speaker without articulation disorders in order
                 to preserve the speaker's individuality. However, this
                 conventional exemplar-based approach needs to use all
                 the training exemplars (frames), and it may cause
                 mismatching of phonemes between input signals and
                 selected exemplars. In order to reduce the mismatching
                 of phoneme alignment, we propose a phoneme-categorized
                 subdictionary and a dictionary selection method using
                 NMF. The effectiveness of this method was confirmed by
                 comparing its effectiveness with that of a conventional
                 Gaussian Mixture Model (GMM)-based and a conventional
                 exemplar-based method.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "13",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Saggion:2015:MIS,
  author =       "Horacio Saggion and Sanja Stajner and Stefan Bott and
                 Simon Mille and Luz Rello and Biljana Drndarevic",
  title =        "Making It Simplext: Implementation and Evaluation of a
                 Text Simplification System for {Spanish}",
  journal =      j-TACCESS,
  volume =       "6",
  number =       "4",
  pages =        "14:1--14:??",
  month =        jun,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2738046",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Oct 1 15:28:12 MDT 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "The way in which a text is written can be a barrier
                 for many people. Automatic text simplification is a
                 natural language processing technology that, when
                 mature, could be used to produce texts that are adapted
                 to the specific needs of particular users. Most
                 research in the area of automatic text simplification
                 has dealt with the English language. In this article,
                 we present results from the Simplext project, which is
                 dedicated to automatic text simplification for Spanish.
                 We present a modular system with dedicated procedures
                 for syntactic and lexical simplification that are
                 grounded on the analysis of a corpus manually
                 simplified for people with special needs. We carried
                 out an automatic evaluation of the system's output,
                 taking into account the interaction between three
                 different modules dedicated to different simplification
                 aspects. One evaluation is based on readability metrics
                 for Spanish and shows that the system is able to reduce
                 the lexical and syntactic complexity of the texts. We
                 also show, by means of a human evaluation, that
                 sentence meaning is preserved in most cases. Our
                 results, even if our work represents the first
                 automatic text simplification system for Spanish that
                 addresses different linguistic aspects, are comparable
                 to the state of the art in English Automatic Text
                 Simplification.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "14",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Hu:2015:IIS,
  author =       "Ruimin Hu and Jinjuan Heidi Feng",
  title =        "Investigating Information Search by People with
                 Cognitive Disabilities",
  journal =      j-TACCESS,
  volume =       "7",
  number =       "1",
  pages =        "1:1--1:??",
  month =        jun,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2729981",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Jun 3 06:26:16 MDT 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "The ability to gather information online has become
                 increasingly important in the past decades. Previous
                 research suggests that people with cognitive
                 disabilities experience challenges when finding
                 information on websites. Although a number of studies
                 examined the impact of various design guidelines on
                 information search by people with cognitive
                 disabilities, our knowledge in this topic remains
                 limited. To date, no study has been conducted to
                 examine how people with cognitive disabilities navigate
                 in different content structures. We completed an
                 empirical study to investigate the impact of different
                 search methods and content structures on the search
                 behavior of people with cognitive disabilities. 23
                 participants with various cognitive disabilities
                 completed 15 information search tasks under three
                 conditions: browsing a website with a deep structure ($
                 4 \times 4 \times 4 \times 4$), browsing a website with
                 a broad structure ($ 16 \times 16$), and searching
                 through a search engine. The results suggest that the
                 participants overwhelmingly preferred the search engine
                 method to the two browsing conditions. The broad
                 structure resulted in significantly higher failure
                 rates than the search engine condition and the deep
                 structure condition. The causes of failed search tasks
                 were analyzed in detail. Participants frequently
                 visited incorrect categories in both the deep structure
                 and the broad structure conditions. However, it was
                 more difficult to recover from incorrect categories on
                 the lower-level pages in the broad structure than in
                 the deep structure. Under the search engine condition,
                 failed tasks were mainly caused by difficulty in
                 selecting the correct link from the returned list,
                 misspellings, and difficulty in generating appropriate
                 search keywords.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "1",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Garcia:2015:MPL,
  author =       "Lu{\'\i}s Filipe Garcia and Lu{\'\i}s {Caldas De
                 Oliveira} and David {Martins De Matos}",
  title =        "Measuring the Performance of a Location-Aware Text
                 Prediction System",
  journal =      j-TACCESS,
  volume =       "7",
  number =       "1",
  pages =        "2:1--2:??",
  month =        jun,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2739998",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Jun 3 06:26:16 MDT 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "In recent years, some works have discussed the
                 conception of location-aware Augmentative and
                 Alternative Communication (AAC) systems with very
                 positive feedback from participants. However, in most
                 cases, complementary quantitative evaluations have not
                 been carried out to confirm those results. To
                 contribute to clarifying the validity of these
                 approaches, our study quantitatively evaluated the
                 effect of using language models with location knowledge
                 on the efficiency of a word and sentence prediction
                 system. Using corpora collected for three different
                 locations (classroom, school cafeteria, home),
                 location-specific language models were trained with
                 sentences from each location and compared with a
                 traditional all-purpose language model, trained on all
                 corpora. User tests showed a modest mean improvement of
                 2.4\% and 1.3\% for Words Per Minute (WPM) and
                 Keystroke Saving Rate (KSR), respectively, but the
                 differences were not statistically significant. Since
                 our text prediction system relies on the concept of
                 sentence reuse, we ran a set of simulations with
                 language models having different sentence knowledge
                 levels (0\%, 25\%, 50\%, 75\%, 100\%). We also
                 introduced in the comparison a second location-aware
                 strategy that combines the location-specific approach
                 with the all-purpose approach (mixed approach). The
                 mixed language models performed better under low
                 sentence-reuse conditions (0\%, 25\%, 50\%) with 1.0\%,
                 1.3\%, and 1.2\% KSR improvements, respectively. The
                 location-specific language models performed better
                 under high sentence-reuse conditions (75\%, 100\%) with
                 1.7\% and 1.5\% KSR improvements, respectively.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "2",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Boyd:2015:ECI,
  author =       "Louanne E. Boyd and Kathryn E. Ringland and Oliver L.
                 Haimson and Helen Fernandez and Maria Bistarkey and
                 Gillian R. Hayes",
  title =        "Evaluating a Collaborative {iPad} Game's Impact on
                 Social Relationships for Children with Autism Spectrum
                 Disorder",
  journal =      j-TACCESS,
  volume =       "7",
  number =       "1",
  pages =        "3:1--3:??",
  month =        jun,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2751564",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Jun 3 06:26:16 MDT 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "This article describes how collaborative assistive
                 technologies, housed on off-the-shelf, low-cost
                 platforms such as the iPad, can be used to facilitate
                 social relationships in children with autism spectrum
                 disorder (ASD). Through an empirical study of the use
                 of a collaborative iPad game, Zody, we explore how
                 assistive technologies can be used to support social
                 relationships, even without intervention from adults.
                 We discuss how specific design choices can encourage
                 three levels of social relationship: membership,
                 partnership, and friendship. This work contributes to
                 research on both assistive technologies and
                 collaborative gaming through a framework that describes
                 how specific in-game elements can foster social skill
                 development for children with ASD.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "3",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Portet:2015:PSL,
  author =       "Fran{\c{c}} Ois Portet and Heidi Christensen and Frank
                 Rudzicz and Jan Alexandersson",
  title =        "Perspectives on Speech and Language Interaction for
                 Daily Assistive Technology: Overall Introduction to the
                 Special {IssuePart 3}",
  journal =      j-TACCESS,
  volume =       "7",
  number =       "2",
  pages =        "4:1--4:??",
  month =        jul,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2791576",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Aug 7 10:36:10 MDT 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "4",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Vacher:2015:ECA,
  author =       "Michel Vacher and Sybille Caffiau and Fran{\c{c}}ois
                 Portet and Brigitte Meillon and Camille Roux and Elena
                 Elias and Benjamin Lecouteux and Pedro Chahuara",
  title =        "Evaluation of a Context-Aware Voice Interface for
                 Ambient Assisted Living: Qualitative User Study vs.
                 Quantitative System Evaluation",
  journal =      j-TACCESS,
  volume =       "7",
  number =       "2",
  pages =        "5:1--5:??",
  month =        jul,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2738047",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Aug 7 10:36:10 MDT 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "This article presents an experiment with seniors and
                 people with visual impairment in a voice-controlled
                 smart home using the Sweet-Home system. The experiment
                 shows some weaknesses in automatic speech recognition
                 that must be addressed, as well as the need for better
                 adaptation to the user and the environment. Users were
                 disturbed by the rigid structure of the grammar and
                 were eager to adapt it to their own preferences.
                 Surprisingly, while no humanoid aspect was introduced
                 in the system, the senior participants were inclined to
                 embody the system. Despite these aspects to improve,
                 the system has been favorably assessed as diminishing
                 most participant fears related to the loss of
                 autonomy.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "5",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Rudzicz:2015:SIP,
  author =       "Frank Rudzicz and Rosalie Wang and Momotaz Begum and
                 Alex Mihailidis",
  title =        "Speech Interaction with Personal Assistive Robots
                 Supporting Aging at Home for Individuals with
                 {Alzheimer}'s Disease",
  journal =      j-TACCESS,
  volume =       "7",
  number =       "2",
  pages =        "6:1--6:??",
  month =        jul,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2744206",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Aug 7 10:36:10 MDT 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Increases in the prevalence of dementia and
                 Alzheimer's disease (AD) are a growing challenge in
                 many nations where healthcare infrastructures are
                 ill-prepared for the upcoming demand for personal
                 caregiving. To help individuals with AD live at home
                 for longer, we are developing a mobile robot, called
                 ED, intended to assist with activities of daily living
                 through visual monitoring and verbal prompts in cases
                 of difficulty. In a series of experiments, we study
                 speech-based interactions between ED and each of 10
                 older adults with AD as the latter complete daily tasks
                 in a simulated home environment. Traditional automatic
                 speech recognition is evaluated in this environment,
                 along with rates of verbal behaviors that indicate
                 confusion or trouble with the conversation. Analysis
                 reveals that speech recognition remains a challenge in
                 this setup, especially during household tasks with
                 individuals with AD. Across the verbal behaviors that
                 indicate confusion, older adults with AD are very
                 likely to simply ignore the robot, which accounts for
                 over 40\% of all such behaviors when interacting with
                 the robot. This work provides a baseline assessment of
                 the types of technical and communicative challenges
                 that will need to be overcome for robots to be used
                 effectively in the home for speech-based assistance
                 with daily living.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "6",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Alexanderson:2015:TFA,
  author =       "Simon Alexanderson and Jonas Beskow",
  title =        "Towards Fully Automated Motion Capture of Signs ---
                 Development and Evaluation of a Key Word Signing
                 Avatar",
  journal =      j-TACCESS,
  volume =       "7",
  number =       "2",
  pages =        "7:1--7:??",
  month =        jul,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2764918",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Aug 7 10:36:10 MDT 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Motion capture of signs provides unique challenges in
                 the field of multimodal data collection. The dense
                 packaging of visual information requires high fidelity
                 and high bandwidth of the captured data. Even though
                 marker-based optical motion capture provides many
                 desirable features such as high accuracy, global
                 fitting, and the ability to record body and face
                 simultaneously, it is not widely used to record finger
                 motion, especially not for articulated and syntactic
                 motion such as signs. Instead, most signing avatar
                 projects use costly instrumented gloves, which require
                 long calibration procedures. In this article, we
                 evaluate the data quality obtained from optical motion
                 capture of isolated signs from Swedish sign language
                 with a large number of low-cost cameras. We also
                 present a novel dual-sensor approach to combine the
                 data with low-cost, five-sensor instrumented gloves to
                 provide a recording method with low manual
                 postprocessing. Finally, we evaluate the collected data
                 and the dual-sensor approach as transferred to a highly
                 stylized avatar. The application of the avatar is a
                 game-based environment for training Key Word Signing
                 (KWS) as augmented and alternative communication (AAC),
                 intended for children with communication
                 disabilities.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "7",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Ladner:2015:IASb,
  author =       "Richard Ladner and Jonathan Lazar",
  title =        "Introduction to the {ASSETS'13} Special Issue, {Part
                 2}",
  journal =      j-TACCESS,
  volume =       "7",
  number =       "3",
  pages =        "8:1--8:??",
  month =        nov,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2825095",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Nov 16 06:05:47 MST 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "8",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Oh:2015:ABF,
  author =       "Uran Oh and Stacy Branham and Leah Findlater and Shaun
                 K. Kane",
  title =        "Audio-Based Feedback Techniques for Teaching
                 Touchscreen Gestures",
  journal =      j-TACCESS,
  volume =       "7",
  number =       "3",
  pages =        "9:1--9:??",
  month =        nov,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2764917",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Nov 16 06:05:47 MST 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "While sighted users may learn to perform touchscreen
                 gestures through observation (e.g., of other users or
                 video tutorials), such mechanisms are inaccessible for
                 users with visual impairments. As a result, learning to
                 perform gestures without visual feedback can be
                 challenging. We propose and evaluate two techniques to
                 teach touchscreen gestures to users with visual
                 impairments: (1) gesture sonification to generate sound
                 based on finger touches, creating an audio
                 representation of a gesture; and (2) corrective verbal
                 feedback that combined automatic analysis of the user's
                 drawn gesture with speech feedback. To refine and
                 evaluate the techniques, we conducted three controlled
                 laboratory studies. The first study, with 12 sighted
                 participants, compared parameters for sonifying
                 gestures in an eyes-free scenario. We identified
                 pitch+stereo panning as the best combination. In the
                 second study, ten blind and low-vision participants
                 completed gesture replication tasks for single-stroke,
                 multistroke, and multitouch gestures using the gesture
                 sonification feedback. We found that multistroke
                 gestures were more difficult to understand in
                 sonification, but that playing each finger sound
                 serially may improve understanding. In the third study,
                 six blind and low-vision participants completed gesture
                 replication tasks with both the sonification and
                 corrective verbal feedback techniques. Subjective data
                 and preliminary performance findings indicated that the
                 techniques offer complementary advantages: although
                 verbal feedback was preferred overall primarily due to
                 the precision of its instructions, almost all
                 participants appreciated the sonification for certain
                 situations (e.g., to convey speed). This article
                 extends our previous publication on gesture
                 sonification by extending these techniques to
                 multistroke and multitouch gestures. These findings
                 provide a foundation for nonvisual training systems for
                 touchscreen gestures.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "9",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Parnandi:2015:DRT,
  author =       "Avinash Parnandi and Virendra Karappa and Tian Lan and
                 Mostafa Shahin and Jacqueline McKechnie and Kirrie
                 Ballard and Beena Ahmed and Ricardo Gutierrez-Osuna",
  title =        "Development of a Remote Therapy Tool for Childhood
                 Apraxia of Speech",
  journal =      j-TACCESS,
  volume =       "7",
  number =       "3",
  pages =        "10:1--10:??",
  month =        nov,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2776895",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Nov 16 06:05:47 MST 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "We present a multitier system for the remote
                 administration of speech therapy to children with
                 apraxia of speech. The system uses a client-server
                 architecture model and facilitates task-oriented remote
                 therapeutic training in both in-home and clinical
                 settings. The system allows a speech language
                 pathologist (SLP) to remotely assign speech production
                 exercises to each child through a web interface and the
                 child to practice these exercises in the form of a game
                 on a mobile device. The mobile app records the child's
                 utterances and streams them to a back-end server for
                 automated scoring by a speech-analysis engine. The SLP
                 can then review the individual recordings and the
                 automated scores through a web interface, provide
                 feedback to the child, and adapt the training program
                 as needed. We have validated the system through a pilot
                 study with children diagnosed with apraxia of speech,
                 their parents, and SLPs. Here, we describe the overall
                 client-server architecture, middleware tools used to
                 build the system, speech-analysis tools for automatic
                 scoring of utterances, and present results from a
                 clinical study. Our results support the feasibility of
                 the system as a complement to traditional face-to-face
                 therapy through the use of mobile tools and automated
                 speech analysis algorithms.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "10",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Tran:2015:EIB,
  author =       "Jessica J. Tran and Eve A. Riskin and Richard E.
                 Ladner and Jacob O. Wobbrock",
  title =        "Evaluating Intelligibility and Battery Drain of Mobile
                 Sign Language Video Transmitted at Low Frame Rates and
                 Bit Rates",
  journal =      j-TACCESS,
  volume =       "7",
  number =       "3",
  pages =        "11:1--11:??",
  month =        nov,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2797142",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Nov 16 06:05:47 MST 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Mobile sign language video conversations can become
                 unintelligible if high video transmission rates cause
                 network congestion and delayed video. In an effort to
                 understand the perceived lower limits of intelligible
                 sign language video intended for mobile communication,
                 we evaluated sign language video transmitted at four
                 low frame rates (1, 5, 10, and 15 frames per second
                 [fps]) and four low fixed bit rates (15, 30, 60, and
                 120 kilobits per second [kbps]) at a constant spatial
                 resolution of 320 $ \times $ 240 pixels. We discovered
                 an ``intelligibility ceiling effect,'' in which
                 increasing the frame rate above 10fps did not improve
                 perceived intelligibility, and increasing the bit rate
                 above 60kbps produced diminishing returns. Given the
                 study parameters, our findings suggest that relaxing
                 the recommended frame rate and bit rate to 10fps at
                 60kbps will provide intelligible video conversations
                 while reducing total bandwidth consumption to 25\% of
                 the ITU-T standard (at least 25fps and 100kbps). As
                 part of this work, we developed the Human Signal
                 Intelligibility Model, a new conceptual model useful
                 for informing evaluations of video intelligibility and
                 our methodology for creating linguistically accessible
                 web surveys for deaf people. We also conducted a
                 battery-savings experiment quantifying battery drain
                 when sign language video is transmitted at the lower
                 frame rates and bit rates. Results confirmed that
                 increasing the transmission rates monotonically
                 decreased the battery life.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "11",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Morash:2015:GNW,
  author =       "Valerie S. Morash and Yue-Ting Siu and Joshua A. Miele
                 and Lucia Hasty and Steven Landau",
  title =        "Guiding Novice {Web} Workers in Making Image
                 Descriptions Using Templates",
  journal =      j-TACCESS,
  volume =       "7",
  number =       "4",
  pages =        "12:1--12:??",
  month =        nov,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2764916",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Nov 25 15:32:35 MST 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "This article compares two methods of employing novice
                 Web workers to author descriptions of science,
                 technology, engineering, and mathematics images to make
                 them accessible to individuals with visual and
                 print-reading disabilities. The goal is to identify
                 methods of creating image descriptions that are
                 inexpensive, effective, and follow established
                 accessibility guidelines. The first method explicitly
                 presented the guidelines to the worker, then the worker
                 constructed the image description in an empty text box
                 and table. The second method queried the worker for
                 image information and then used responses to construct
                 a template-based description according to established
                 guidelines. The descriptions generated through queried
                 image description (QID) were more likely to include
                 information on the image category, title, caption, and
                 units. They were also more similar to one another,
                 based on Jaccard distances of q-grams, indicating that
                 their word usage and structure were more standardized.
                 Last, the workers preferred describing images using QID
                 and found the task easier. Therefore, explicit
                 instruction on image-description guidelines is not
                 sufficient to produce quality image descriptions when
                 using novice Web workers. Instead, it is better to
                 provide information about images, then generate
                 descriptions from responses using templates.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "12",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Mahmud:2015:IDF,
  author =       "Abdullah {Al Mahmud} and Jean-Bernard Martens",
  title =        "Iterative Design and Field Trial of an
                 Aphasia-Friendly Email Tool",
  journal =      j-TACCESS,
  volume =       "7",
  number =       "4",
  pages =        "13:1--13:??",
  month =        nov,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2790305",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Nov 25 15:32:35 MST 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "In this article, we describe the iterative design and
                 field trial of Amail, an email client specifically
                 designed for people with aphasia who have problems
                 expressing themselves verbally. We conducted a 3-month
                 study with eight persons with aphasia to better
                 understand how people with aphasia could integrate
                 Amail in their daily life. Subjective data
                 (questionnaires, interviews, and diaries) and objective
                 data (usage logs) were collected to gain understanding
                 of the usage patterns. All persons with aphasia in our
                 study were able to use Amail independently, and four
                 participants continued using Amail after the study
                 period. The usage patterns, especially the frequency
                 and length of the composed email messages, indicated
                 that, over time, persons with aphasia were able to
                 improve their email communication. Email partners also
                 had the impression that their email partners with
                 aphasia were improving gradually. Last but not least,
                 the use of Amail positively influenced the number and
                 quality of social contacts for the persons with
                 aphasia. We also report some of the challenges
                 encountered while conducting the field trial.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "13",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Oh:2015:PCH,
  author =       "Uran Oh and Leah Findlater",
  title =        "A Performance Comparison of On-Hand versus On-Phone
                 Nonvisual Input by Blind and Sighted Users",
  journal =      j-TACCESS,
  volume =       "7",
  number =       "4",
  pages =        "14:1--14:??",
  month =        nov,
  year =         "2015",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2820616",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Nov 25 15:32:35 MST 2015",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "On-body interaction, in which the user employs one's
                 own body as an input surface, has the potential to
                 provide efficient mobile computing access for blind
                 users. It offers increased tactile and proprioceptive
                 feedback compared to a phone and, because it is always
                 available, it should allow for quick audio output
                 control without having to retrieve the phone from a
                 pocket or bag. Despite this potential, there has been
                 little investigation of on-body input for users with
                 visual impairments. To assess blind users' performance
                 with on-body input versus touchscreen input, we
                 conducted a controlled lab study with 12 sighted and 11
                 blind participants. Study tasks included basic pointing
                 and drawing more complex shape gestures. Our findings
                 confirm past work with sighted users showing that the
                 hand results in faster pointing than the phone. Most
                 important, we also show that: (1) the performance gain
                 of the hand applies to blind users as well, (2) the
                 accuracy of where the pointing finger first lands is
                 higher with the hand than the phone, (3) on-hand
                 pointing performance is affected by the location of
                 targets, and (4) shape gestures drawn on the hand
                 result in higher gesture recognition rates than those
                 on the phone. Our findings highlight the potential of
                 on-body input to support accessible nonvisual mobile
                 computing.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "14",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Richards:2016:IASa,
  author =       "John Richards",
  title =        "Introduction to the {ASSETS '14} Special Issue",
  journal =      j-TACCESS,
  volume =       "8",
  number =       "1",
  pages =        "1:1--1:??",
  month =        jan,
  year =         "2016",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2853995",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Feb 6 08:01:37 MST 2016",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "1",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Guerreiro:2016:SDC,
  author =       "Jo{\~a}o Guerreiro and Daniel Gon{\c{c}}alves",
  title =        "Scanning for Digital Content: How Blind and Sighted
                 People Perceive Concurrent Speech",
  journal =      j-TACCESS,
  volume =       "8",
  number =       "1",
  pages =        "2:1--2:??",
  month =        jan,
  year =         "2016",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2822910",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Feb 6 08:01:37 MST 2016",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "The widespread availability of digital media has
                 changed the way that people consume information and has
                 impacted the consumption of auditory information.
                 Despite this recent popularity among sighted people,
                 the use of auditory feedback to access digital
                 information is not new for visually impaired users.
                 However, its sequential nature undermines both blind
                 and sighted people's ability to efficiently find
                 relevant information in the midst of several
                 potentially useful items. We propose taking advantage
                 of the Cocktail Party Effect, which states that people
                 are able to focus on a single speech source among
                 several conversations, but still identify relevant
                 content in the background. Therefore, in contrast to
                 one sequential speech channel, we hypothesize that
                 people can leverage concurrent speech channels to
                 quickly get the gist of digital information. In this
                 article, we present an experiment with 46 (23 blind, 23
                 sighted) participants, which aims to understand
                 people's ability to search for relevant content
                 listening to two, three, or four concurrent speech
                 channels. Our results suggest that both blind and
                 sighted people are able to process concurrent speech in
                 scanning scenarios. In particular, the use of two
                 concurrent sources may be used both to identify and
                 understand the content of the relevant sentence.
                 Moreover, three sources may be used for most people
                 depending on the task intelligibility demands and user
                 characteristics. Contrasting with related work, the use
                 of different voices did not affect the perception of
                 concurrent speech but was highly preferred by
                 participants. To complement the analysis, we propose a
                 set of scenarios that may benefit from the use of
                 concurrent speech sources, for both blind and sighted
                 people, toward a Design for All paradigm.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "2",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Baker:2016:TGV,
  author =       "Catherine M. Baker and Lauren R. Milne and Ryan
                 Drapeau and Jeffrey Scofield and Cynthia L. Bennett and
                 Richard E. Ladner",
  title =        "Tactile Graphics with a Voice",
  journal =      j-TACCESS,
  volume =       "8",
  number =       "1",
  pages =        "3:1--3:??",
  month =        jan,
  year =         "2016",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2854005",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Feb 6 08:01:37 MST 2016",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "We discuss the development of Tactile Graphics with a
                 Voice (TGV), a system used to access label information
                 in tactile graphics using QR codes. Blind students
                 often rely on tactile graphics to access textbook
                 images. Many textbook images have a large number of
                 text labels that need to be made accessible. In order
                 to do so, we propose TGV, which uses QR codes to
                 replace the text, as an alternative to Braille. The
                 codes are read with a smartphone application. We
                 evaluated the system with a longitudinal study where 10
                 blind and low-vision participants completed tasks using
                 three different modes on the smartphone application:
                 (1) no guidance, (2) verbal guidance, and (3)
                 finger-pointing guidance. Our results show that TGV is
                 an effective way to access text in tactile graphics,
                 especially for those blind users who are not fluent in
                 Braille. We also found that preferences varied greatly
                 across the modes, indicating that future work should
                 support multiple modes. We expand upon the algorithms
                 we used to implement the finger pointing, algorithms to
                 automatically place QR codes on documents. We also
                 discuss work we have started on creating a Google Glass
                 version of the application.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "3",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Dee:2016:PRU,
  author =       "Marianne Dee and Vicki L. Hanson",
  title =        "A Pool of Representative Users for Accessibility
                 Research: Seeing through the Eyes of the Users",
  journal =      j-TACCESS,
  volume =       "8",
  number =       "1",
  pages =        "4:1--4:??",
  month =        jan,
  year =         "2016",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2845088",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Feb 6 08:01:37 MST 2016",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "A critical element of accessibility research is the
                 exploration and evaluation of ideas with representative
                 users. However, it is often difficult to recruit such a
                 range of users, particularly in a timely manner, nor is
                 it easy for new researchers to understand how to
                 recruit relevant populations or feel confident in
                 communicating with older or ``vulnerable'' users. We
                 report on the establishment of a large user pool
                 created to facilitate accessibility research through
                 recruiting sizeable numbers of older adults potentially
                 interested in taking part in research studies about
                 technology. We suggest points to guide new researchers
                 and invite other experts to build on these. We also
                 sketch some of the lessons learned from creating and
                 maintaining this pool of individuals, including
                 thoughts on issues for others wishing to set up similar
                 pools.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "4",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Shinohara:2016:SCS,
  author =       "Kristen Shinohara and Jacob O. Wobbrock",
  title =        "Self-Conscious or Self-Confident? {A} Diary Study
                 Conceptualizing the Social Accessibility of Assistive
                 Technology",
  journal =      j-TACCESS,
  volume =       "8",
  number =       "2",
  pages =        "5:1--5:??",
  month =        jan,
  year =         "2016",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2827857",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon May 2 16:14:40 MDT 2016",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "With the recent influx of smartphones, tablets, and
                 wearables such as watches and glasses, personal
                 interactive device use is increasingly visible and
                 commonplace in public and social spaces. Assistive
                 Technologies (ATs) used by people with disabilities are
                 observable to others and, as a result, can affect how
                 AT users are perceived. This raises the possibility
                 that what we call ``social accessibility'' may be as
                 important as ``functional accessibility'' when
                 considering ATs. But, to date, ATs have almost
                 exclusively been regarded as functional aids. For
                 example, ATs are defined by the Technical Assistance to
                 the States Act as technologies that are ``used to
                 increase, maintain or improve functional capabilities
                 of individuals with disabilities.'' To investigate
                 perceptions and self-perceptions of AT users, we
                 conducted a diary study of two groups of participants:
                 people with disabilities and people without
                 disabilities. Our goal was to explore the types of
                 interactions and perceptions that arise around AT use
                 in social and public spaces. During our 4-week study,
                 participants with sensory disabilities wrote about
                 feeling either self-conscious or self-confident when
                 using an assistive device in a social or public
                 situation. Meanwhile, participants without disabilities
                 were prompted to record their reactions and feelings
                 whenever they saw ATs used in social or public
                 situations. We found that AT form and function does
                 influence social interactions by impacting
                 self-efficacy and self-confidence. When the design of
                 form or function is poor, or when inequality between
                 technological accessibility exists, social inclusion is
                 negatively affected, as are perceptions of ability. We
                 contribute a definition for the ``social
                 accessibility'' of ATs and subsequently offer Design
                 for Social Accessibility (DSA) as a holistic design
                 stance focused on balancing an AT user's sociotechnical
                 identity with functional requirements.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "5",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Brewer:2016:ETP,
  author =       "Robin Brewer and Raymundo Cornejo Garcia and Tedmond
                 Schwaba and Darren Gergle and Anne Marie Piper",
  title =        "Exploring Traditional Phones as an E-Mail Interface
                 for Older Adults",
  journal =      j-TACCESS,
  volume =       "8",
  number =       "2",
  pages =        "6:1--6:??",
  month =        jan,
  year =         "2016",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2839303",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon May 2 16:14:40 MDT 2016",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "This article explores the design and development of
                 voice interfaces via traditional telephones as a way of
                 supporting asynchronous online communication for older
                 adults. E-mail is the primary form of online
                 communication for older adults. However, e-mail
                 communication currently requires access to and the
                 ability to use an Internet-connected computer or
                 device, which may be problematic due to barriers of
                 physical access, expense, insufficient computer skill,
                 or other accessibility issues. To address this, the
                 present work leverages the pervasive hardware of
                 traditional phones and familiar nonvisual models of
                 phone-based interaction to create a new e-mail
                 interface for older adults. We examine the potential of
                 e-mail interaction via traditional phones through
                 long-term field observations, prototype testing, and a
                 four-week field-based user study. Our findings indicate
                 that a simple voice e-mail interface accessed through
                 traditional phones is usable for older adults and is a
                 useful way for offline older adults to interact with an
                 e-mail account. The ease of use and convenience of a
                 phone-based interface are important given the ``work''
                 of keeping in touch over e-mail, and this approach has
                 the potential to open up new avenues of online access
                 for older adults who are still offline or who have
                 late-life disabilities that make using traditional
                 graphical e-mail systems difficult. This project
                 contributes new insights regarding the ways in which
                 voice interfaces can support asynchronous online
                 communication for older adults and provides design
                 guidance for the development of subsequent voice
                 interfaces.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "6",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Li:2016:STM,
  author =       "Kehuang Li and Zhengyu Zhou and Chin-Hui Lee",
  title =        "Sign Transition Modeling and a Scalable Solution to
                 Continuous Sign Language Recognition for Real-World
                 Applications",
  journal =      j-TACCESS,
  volume =       "8",
  number =       "2",
  pages =        "7:1--7:??",
  month =        jan,
  year =         "2016",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2850421",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon May 2 16:14:40 MDT 2016",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "We propose a new approach to modeling transition
                 information between signs in continuous Sign Language
                 Recognition (SLR) and address some scalability issues
                 in designing SLR systems. In contrast to Automatic
                 Speech Recognition (ASR) in which the transition
                 between speech sounds is often brief and mainly
                 addressed by the coarticulation effect, the sign
                 transition in continuous SLR is far from being clear
                 and usually not easily and exactly characterized.
                 Leveraging upon hidden Markov modeling techniques from
                 ASR, we proposed a modeling framework for continuous
                 SLR having the following major advantages, namely: (i)
                 the system is easy to scale up to large-vocabulary SLR;
                 (ii) modeling of signs as well as the transitions
                 between signs is robust even for noisy data collected
                 in real-world SLR; and (iii) extensions to training,
                 decoding, and adaptation are directly applicable even
                 with new deep learning algorithms. A pair of low-cost
                 digital gloves affordable for the deaf and hard of
                 hearing community is used to collect a collection of
                 training and testing data for real-world SLR
                 interaction applications. Evaluated on 1,024 testing
                 sentences from five signers, a word accuracy rate of
                 87.4\% is achieved using a vocabulary of 510 words. The
                 SLR speed is in real time, requiring an average of
                 0.69s per sentence. The encouraging results indicate
                 that it is feasible to develop real-world SLR
                 applications based on the proposed SLR framework.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "7",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Richards:2016:IASb,
  author =       "John Richards",
  title =        "Introduction to the {ASSETS'14} Special Issue, {Part
                 2}",
  journal =      j-TACCESS,
  volume =       "8",
  number =       "3",
  pages =        "8:1--8:??",
  month =        may,
  year =         "2016",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2891030",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon May 2 16:14:41 MDT 2016",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "8",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Fage:2016:TBA,
  author =       "Charles Fage and L{\'e}onard Pommereau and Charles
                 Consel and Emilie Balland and H{\'e}l{\`e}ne
                 Sauz{\'e}on",
  title =        "Tablet-Based Activity Schedule in Mainstream
                 Environment for Children with Autism and Children with
                 {ID}",
  journal =      j-TACCESS,
  volume =       "8",
  number =       "3",
  pages =        "9:1--9:??",
  month =        may,
  year =         "2016",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2854156",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon May 2 16:14:41 MDT 2016",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Including children with autism spectrum disorders
                 (ASD) in mainstream environments creates a need for new
                 interventions whose efficacy must be assessed in situ.
                 This article presents a tablet-based application for
                 activity schedules that has been designed following a
                 participatory design approach involving mainstream
                 teachers, special education teachers, and school aides.
                 This application addresses two domains of activities:
                 classroom routines and verbal communications. We
                 assessed the efficiency of our application with two
                 overlapping user studies in mainstream inclusion,
                 sharing a group of children with ASD. The first
                 experiment involved 10 children with ASD, where five
                 children were equipped with our tabled-based
                 application and five were not equipped. We show that
                 (1) the use of the application is rapidly
                 self-initiated (after 2 months for almost all the
                 participants) and (2) the tablet-supported routines are
                 better performed after 3 months of intervention. The
                 second experiment involved 10 children equipped with
                 our application; it shared the data collected for the
                 five children with ASD and compared them with data
                 collected for five children with intellectual
                 disability (ID). We show that (1) children with ID are
                 not autonomous in the use of the application at the end
                 of the intervention, (2) both groups exhibited the same
                 benefits on classroom routines, and (3) children with
                 ID improve significantly less their performance on
                 verbal communication routines. These results are
                 discussed in relation with our design principles.
                 Importantly, the inclusion of a group with another
                 neurodevelopmental condition provided insights about
                 the applicability of these principles beyond the target
                 population of children with ASD.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "9",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Crabb:2016:AAT,
  author =       "Michael Crabb and Vicki L. Hanson",
  title =        "An Analysis of Age, Technology Usage, and Cognitive
                 Characteristics Within Information Retrieval Tasks",
  journal =      j-TACCESS,
  volume =       "8",
  number =       "3",
  pages =        "10:1--10:??",
  month =        may,
  year =         "2016",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2856046",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon May 2 16:14:41 MDT 2016",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "This work presents two studies that aim to discover
                 whether age can be used as a suitable metric for
                 distinguishing performance between individuals or if
                 other factors can provide greater insight. Information
                 retrieval tasks are used to test the performance of
                 these factors. First, a study is introduced that
                 examines the effect that fluid intelligence and
                 Internet usage has on individuals. Second, a larger
                 study is reported on that examines a collection of
                 Internet and cognitive factors in order to determine to
                 what extent each of these metrics can account for
                 disorientation in users. This work adds to growing
                 evidence showing that age is not a suitable metric to
                 distinguish between individuals within the field of
                 human-computer interaction. It shows that factors such
                 as previous Internet experience and fluid-based
                 cognitive abilities can be used to gain better insight
                 into users' reported browsing experience during
                 information retrieval tasks.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "10",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Buehler:2016:IIP,
  author =       "Erin Buehler and Niara Comrie and Megan Hofmann and
                 Samantha McDonald and Amy Hurst",
  title =        "Investigating the Implications of {$3$D} Printing in
                 Special Education",
  journal =      j-TACCESS,
  volume =       "8",
  number =       "3",
  pages =        "11:1--11:??",
  month =        may,
  year =         "2016",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2870640",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon May 2 16:14:41 MDT 2016",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Consumer-grade digital fabrication such as 3D printing
                 is on the rise, and we believe it can be leveraged to
                 great benefit in special education. Although 3D
                 printing is infiltrating mainstream education, little
                 research has explored 3D printing in the context of
                 students with special support needs. We describe our
                 studies on this topic and the resulting contributions.
                 We initially conducted a formative study exploring the
                 use of 3D printing at three locations serving
                 populations with varying ability, including individuals
                 with cognitive, motor, and visual impairments. We found
                 that 3D design and printing perform three functions in
                 special education: (1) STEM engagement, (2) creation of
                 educational aids for accessible curriculum content, and
                 (3) making custom adaptive devices. As part of our
                 formative work, we also discussed a case study in the
                 codesign of an assistive hand grip created with
                 occupational therapists at one of our investigation
                 sites. This work inspired further studies on the
                 creation of adaptive devices using 3D printers. We
                 identified the needs and constraints of these
                 therapists and found implications for a specialized 3D
                 modeling tool to support their use of 3D printers. We
                 developed GripFab, 3D modeling software based on
                 feedback from therapists, and used it to explore the
                 feasibility of in-house 3D object designs in support of
                 accessibility. Our contributions include case studies
                 at three special education sites and discussion of
                 obstacles to efficient 3D printing in this context. We
                 have extended these contributions with a more in-depth
                 look at the stakeholders and findings from GripFab
                 studies. We have expanded our discussion to include
                 suggestions for researchers in this space, in addition
                 to refined suggestions from our earlier work for
                 technologists creating 3D modeling and printing tools,
                 therapists seeking to leverage 3D printers, and
                 educators and administrators looking to implement these
                 design tools in special education environments.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "11",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Carrington:2016:GRF,
  author =       "Patrick Carrington and Jian-Ming Chang and Kevin Chang
                 and Catherine Hornback and Amy Hurst and Shaun K.
                 Kane",
  title =        "The {Gest-Rest} Family: Exploring Input Possibilities
                 for Wheelchair Armrests",
  journal =      j-TACCESS,
  volume =       "8",
  number =       "3",
  pages =        "12:1--12:??",
  month =        may,
  year =         "2016",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2873062",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon May 2 16:14:41 MDT 2016",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Interacting with touch screen-based computing devices
                 can be difficult for individuals with mobility
                 impairments that affect their hands, arms, neck, or
                 head. These problems may be especially difficult for
                 power wheelchair users, as the frame of their
                 wheelchair may obstruct the users' range of motion and
                 reduce their ability to reach objects in the
                 environment. The concept of ``chairable'' input devices
                 refers to input devices that are designed to fit with
                 the form of an individual's wheelchair, much like
                 wearable technology fits with an individual's clothing.
                 In this paper, we introduce a new family of chairable
                 input devices: Gest-Rests. A Gest-Rest is a gesture
                 based input device that fits over a standard power
                 wheelchair armrest. A Gest-Rest enables users to
                 perform traditional touch screen gestures, such as
                 press and flick, as well as pressure-based gestures
                 such as squeezing and punching. We have developed the
                 Gest-Rest Family, a suite of armrest-based input
                 devices designed for power wheelchair users. In this
                 paper, we present formative evaluations of these
                 prototypes with wheelchair users and therapists and
                 present their opinions of this new integrated input
                 technology. Our results show the benefits of
                 armrest-based input devices as well as a comparison of
                 physical switches, f",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "12",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Putnam:2016:BPT,
  author =       "Cynthia Putnam and Maria Dahman and Emma Rose and
                 Jinghui Cheng and Glenn Bradford",
  title =        "Best Practices for Teaching Accessibility in
                 University Classrooms: Cultivating Awareness,
                 Understanding, and Appreciation for Diverse Users",
  journal =      j-TACCESS,
  volume =       "8",
  number =       "4",
  pages =        "13:1--13:??",
  month =        may,
  year =         "2016",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2831424",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat May 21 08:23:11 MDT 2016",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "As Information and Communication Technologies (ICTs)
                 become more diffuse, developers and designers need to
                 consider a growing diversity of users including people
                 with disabilities and aging populations. As a result,
                 computing education needs to respond by providing
                 students opportunities to learn about accessibility and
                 designing for inclusion. This article presents results
                 of a qualitative research study of practices in
                 teaching accessibility in university-level programs in
                 the US. The study included interviews with 18
                 professors from some of the top universities in the US
                 and a content analysis of syllabi and other teaching
                 materials. Using the pedagogical theory of authentic
                 learning and elements from the 21st Century Skills
                 framework, we found that instructors emphasized the
                 need for students to develop awareness and
                 understanding for a diversity of ICT users through
                 multiple different experiences; experiences that
                 included research projects that directly involve users
                 with disabilities, guest speakers, field trips,
                 simulating disabilities, and the use of videos/movies.
                 Additionally, instructors used multiple resources
                 (e.g., research papers, online resources), in part, to
                 offset the challenge that there is a perceived lack of
                 a comprehensive textbook. Instructors also emphasized
                 the importance of their individual initiative; that is,
                 the inclusion of accessible topics or courses was often
                 linked to a faculty member's research and/or personal
                 commitment. This article contributes to a gap in the
                 literature by disseminating and sharing different
                 approaches to teaching accessibility across multiple
                 instructors, courses, and campuses.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "13",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Wang:2016:ISL,
  author =       "Hanjie Wang and Xiujuan Chai and Xiaopeng Hong and
                 Guoying Zhao and Xilin Chen",
  title =        "Isolated Sign Language Recognition with {Grassmann}
                 Covariance Matrices",
  journal =      j-TACCESS,
  volume =       "8",
  number =       "4",
  pages =        "14:1--14:??",
  month =        may,
  year =         "2016",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2897735",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat May 21 08:23:11 MDT 2016",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "In this article, to utilize long-term dynamics over an
                 isolated sign sequence, we propose a covariance
                 matrix--based representation to naturally fuse
                 information from multimodal sources. To tackle the
                 drawback induced by the commonly used Riemannian
                 metric, the proximity of covariance matrices is
                 measured on the Grassmann manifold. However, the
                 inherent Grassmann metric cannot be directly applied to
                 the covariance matrix. We solve this problem by
                 evaluating and selecting the most significant singular
                 vectors of covariance matrices of sign sequences. The
                 resulting compact representation is called the
                 Grassmann covariance matrix. Finally, the Grassmann
                 metric is used to be a kernel for the support vector
                 machine, which enables learning of the signs in a
                 discriminative manner. To validate the proposed method,
                 we collect three challenging sign language datasets, on
                 which comprehensive evaluations show that the proposed
                 method outperforms the state-of-the-art methods both in
                 accuracy and computational cost.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "14",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Rello:2016:EFT,
  author =       "Luz Rello and Ricardo Baeza-Yates",
  title =        "The Effect of Font Type on Screen Readability by
                 People with Dyslexia",
  journal =      j-TACCESS,
  volume =       "8",
  number =       "4",
  pages =        "15:1--15:??",
  month =        may,
  year =         "2016",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2897736",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat May 21 08:23:11 MDT 2016",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/font.bib;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Around 10\% of the people have dyslexia, a
                 neurological disability that impairs a person's ability
                 to read and write. There is evidence that the
                 presentation of the text has a significant effect on a
                 text's accessibility for people with dyslexia. However,
                 to the best of our knowledge, there are no experiments
                 that objectively measure the impact of the typeface
                 (font) on screen reading performance. In this article,
                 we present the first experiment that uses eye-tracking
                 to measure the effect of typeface on reading speed.
                 Using a mixed between-within subject design, 97
                 subjects (48 with dyslexia) read 12 texts with 12
                 different fonts. Font types have an impact on
                 readability for people with and without dyslexia. For
                 the tested fonts, sans serif, monospaced, and roman
                 font styles significantly improved the reading
                 performance over serif, proportional, and italic fonts.
                 On the basis of our results, we recommend a set of more
                 accessible fonts for people with and without
                 dyslexia.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "15",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Stearns:2016:EHA,
  author =       "Lee Stearns and Ruofei Du and Uran Oh and Catherine
                 Jou and Leah Findlater and David A. Ross and Jon E.
                 Froehlich",
  title =        "Evaluating Haptic and Auditory Directional Guidance to
                 Assist Blind People in Reading Printed Text Using
                 Finger-Mounted Cameras",
  journal =      j-TACCESS,
  volume =       "9",
  number =       "1",
  pages =        "1:1--1:??",
  month =        nov,
  year =         "2016",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2914793",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Jan 17 16:54:35 MST 2017",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "The recent miniaturization of cameras has enabled
                 finger-based reading approaches that provide blind and
                 visually impaired readers with access to printed
                 materials. Compared to handheld text scanners such as
                 mobile phone applications, mounting a tiny camera on
                 the user's own finger has the potential to mitigate
                 camera framing issues, enable a blind reader to better
                 understand the spatial layout of a document, and
                 provide better control over reading pace. A
                 finger-based approach, however, also introduces the
                 need to guide the reader in physically navigating a
                 document, such as tracing along lines of text. While
                 previous work has proposed audio and haptic directional
                 finger guidance for this purpose, user studies of
                 finger-based reading have not provided an in-depth
                 performance analysis of the finger-based reading
                 process. To further investigate the effectiveness of
                 finger-based sensing and feedback for reading printed
                 text, we conducted a controlled laboratory experiment
                 with 19 blind participants, comparing audio and haptic
                 directional finger guidance within an iPad-based
                 testbed. As a small follow-up, we asked four of those
                 participants to return and provide feedback on a
                 preliminary wearable prototype called HandSight.
                 Findings from the controlled experiment show similar
                 performance between haptic and audio directional
                 guidance, although audio may offer an accuracy
                 advantage for tracing lines of text. Subjective
                 feedback also highlights trade-offs between the two
                 types of guidance, such as the interference of audio
                 guidance with speech output and the potential for
                 desensitization to haptic guidance. While several
                 participants appreciated the direct access to layout
                 information provided by finger-based exploration,
                 important concerns also arose about ease of use and the
                 amount of concentration required. We close with a
                 discussion on the effectiveness of finger-based reading
                 for blind users and potential design improvements to
                 the HandSight prototype.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "1",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Sengpiel:2016:TDH,
  author =       "Michael Sengpiel",
  title =        "Teach or Design? {How} Older Adults' Use of Ticket
                 Vending Machines Could Be More Effective",
  journal =      j-TACCESS,
  volume =       "9",
  number =       "1",
  pages =        "2:1--2:??",
  month =        nov,
  year =         "2016",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2935619",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Jan 17 16:54:35 MST 2017",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "The dominance of computer technology in work and
                 leisure poses challenges for older people. Their lack
                 of computer experience and computer literacy impedes
                 their ability to explore and use new interactive
                 systems. This is particularly challenging for the
                 design of public access systems, such as ticket vending
                 machines (TVM). This article describes a conflict
                 relevant for many designers considering age-related
                 differences in technology use: should the user be
                 taught to use the existing design or should the design
                 be changed to accommodate older users? An experiment
                 was conducted to directly compare these alternative
                 approaches with each other and with a simulation of an
                 existing TVM. It compares three TVM designs regarding
                 the usability criteria of effectiveness, efficiency and
                 satisfaction, controlling for age, and cognitive and
                 motivational characteristics. 62 older (M = 68 years)
                 and 62 younger (M = 25 years) participants were split
                 into three groups: The control group solved 11 tasks
                 using a simulation of the TVM, the video group watched
                 a brief instructional video before solving the same
                 tasks with the same TVM, and the wizard group used a
                 redesigned wizard interface instead. Results indicate
                 that young and old participants' performance improved
                 after watching the video, but older participants
                 improved more, reaching the effectiveness of the young
                 control group. In the wizard condition, age differences
                 in effectiveness and satisfaction were eliminated;
                 however, speed differences remained in all conditions.
                 The results suggest that the simple integration of
                 minimal video instruction or a task-oriented wizard
                 design can make public access systems truly universally
                 usable, and that the wizard TVM was a true
                 ``walk-up-and-use system.''",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "2",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Tomlinson:2016:EAG,
  author =       "Brianna J. Tomlinson and Jared Batterman and Yee Chieh
                 Chew and Ashley Henry and Bruce N. Walker",
  title =        "Exploring Auditory Graphing Software in the Classroom:
                 The Effect of Auditory Graphs on the Classroom
                 Environment",
  journal =      j-TACCESS,
  volume =       "9",
  number =       "1",
  pages =        "3:1--3:??",
  month =        nov,
  year =         "2016",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2994606",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Jan 17 16:54:35 MST 2017",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Students who are visually impaired make up a
                 population with unique needs for learning. Some tools
                 have been developed to support these needs in the
                 classroom. One such tool, the Graph and Number line
                 Input and Exploration software (GNIE), was developed by
                 the Georgia Institute of Technology Sonification Lab.
                 GNIE was deployed for use in a middle school math
                 classroom at the Georgia Academy for the Blind (GAB)
                 for 2 years starting in fall 2012. We interviewed the
                 middle school math teacher throughout the deployment to
                 learn about the challenges faced when teaching: lesson
                 planning, execution, and review. We also observed how
                 these changed when using GNIE compared to traditional
                 teaching materials. During these 2 years, we conducted
                 interviews and focus groups with students to learn
                 about their attitudes toward tactile graphs compared to
                 auditory graphs. With these in mind, we present lessons
                 learned from the use of GNIE in a real-world classroom
                 and implications for design of software to aid
                 graphical learning for students with vision
                 impairments.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "3",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Morash:2017:SPA,
  author =       "Valerie S. Morash and Yue-Ting Siu",
  title =        "Social Predictors of Assistive Technology Proficiency
                 Among Teachers of Students with Visual Impairments",
  journal =      j-TACCESS,
  volume =       "9",
  number =       "2",
  pages =        "4:1--4:??",
  month =        jan,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/2999569",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Jan 17 16:54:35 MST 2017",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Assistive technology (AT) is critical for K-12
                 students who have visual impairments to engage with
                 their education and is predictive of positive
                 postsecondary outcomes and future employment. Teachers
                 of students with visual impairments (TVIs) act as the
                 primary gatekeepers of AT for these students.
                 Unfortunately, only about 40\% of TVIs integrate AT
                 into their practice. Efforts to predict TVIs' AT
                 proficiency based on their preservice training have
                 been unsuccessful. The current study proposes and
                 confirms that TVIs' AT proficiency is related to their
                 identification with a social community of practice
                 (CoP) that values AT. Results from n = 505 North
                 American TVIs produced a Spearman's correlation of \rho
                 = 0.49 between estimated AT proficiency and CoP
                 identification. The relationship was strongest among
                 TVIs with lower AT proficiency and CoP identification.
                 Results have implications for industry, researchers,
                 teacher preparation programs, personnel who administer
                 and train assistive technologies, and policymakers
                 concerned with ensuring that AT is available to
                 students who have visual impairments. Mere availability
                 of AT is insufficient to ensure its successful
                 introduction to K-12 students with visual impairments,
                 which relies on TVIs' AT proficiency for meaningful
                 implementation. Developers and advocates of AT for K-12
                 students with visual impairments must consider the
                 social context in which AT proficiency develops and
                 provide appropriate social supports.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "4",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Tigwell:2017:ACP,
  author =       "Garreth W. Tigwell and David R. Flatla and Neil D.
                 Archibald",
  title =        "{ACE}: a Colour Palette Design Tool for Balancing
                 Aesthetics and Accessibility",
  journal =      j-TACCESS,
  volume =       "9",
  number =       "2",
  pages =        "5:1--5:??",
  month =        jan,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3014588",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Jan 17 16:54:35 MST 2017",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Colour can convey a mood or elicit a particular
                 emotion and, in terms of web design, colour can
                 influence attitudes, perceptions, and behaviours.
                 However, many websites demonstrate inaccessible colour
                 choices. Numerous online colour palette design tools
                 only focus on assisting designers with either the
                 aesthetics or accessibility of colours. With a
                 user-centered design approach, we developed the
                 Accessible Colour Evaluator (ACE, daprlab.com/ace)
                 which enhances web developers' and designers' ability
                 to balance aesthetic and accessibility constraints. We
                 distributed an online questionnaire to 28 web
                 developers and designers to understand their attitudes
                 and utilisation of accessibility guidelines, as well as
                 to gather initial design requirements for ACE. With
                 this information, we created three low-fidelity paper
                 prototypes that were used to create two high-fidelity
                 prototypes. The high-fidelity prototypes were discussed
                 with 4 web developers and designers during a design
                 workshop, and their feedback was used to develop the
                 final version of ACE. A comparative evaluation of ACE
                 and three existing alternative tools was conducted with
                 10 new web developers and designers. All participants
                 were able to complete a colour palette design task when
                 using ACE and identified ACE as their most preferred
                 tool. The mean scores for the six TLX measures show ACE
                 as providing the best performance and causing the
                 lowest frustration. Finally, we conducted a small focus
                 group with 3 web developers and designers to gather
                 qualitative feedback about ACE. Participants identified
                 a number of ACE's strengths and made suggestions for
                 future extensions and improvements.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "5",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Grussenmeyer:2017:ATT,
  author =       "William Grussenmeyer and Eelke Folmer",
  title =        "Accessible Touchscreen Technology for People with
                 Visual Impairments: a Survey",
  journal =      j-TACCESS,
  volume =       "9",
  number =       "2",
  pages =        "6:1--6:??",
  month =        jan,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3022701",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Jan 17 16:54:35 MST 2017",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Touchscreens have become a de facto standard of input
                 for mobile devices as they most optimally use the
                 limited input and output space that is imposed by their
                 form factor. In recent years, people who are blind and
                 visually impaired have been increasing their usage of
                 smartphones and touchscreens. Although basic access is
                 available, there are still many accessibility issues
                 left to deal with in order to bring full inclusion to
                 this population. Many of the accessibility problems are
                 complex; in the past decade, various solutions have
                 been explored. This article provides a review of the
                 current state of the art of touchscreen accessibility
                 for people with visual impairments and identifies new
                 directions for research.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "6",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Zhang:2017:MPH,
  author =       "Ting Zhang and Bradley S. Duerstock and Juan P.
                 Wachs",
  title =        "Multimodal Perception of Histological Images for
                 Persons Who Are Blind or Visually Impaired",
  journal =      j-TACCESS,
  volume =       "9",
  number =       "3",
  pages =        "7:1--7:??",
  month =        feb,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3026794",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Apr 13 17:49:12 MDT 2017",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Lack of suitable substitute assistive technology is a
                 roadblock for students and scientists who are blind or
                 visually impaired (BVI) from advancing in careers in
                 science, technology, engineering, and mathematics
                 (STEM) fields. It is challenging for persons who are
                 BVI to interpret real-time visual scientific data which
                 is commonly generated during lab experimentation, such
                 as performing light microscopy, spectrometry, and
                 observing chemical reactions. To address this problem,
                 a real-time multimodal image perception system was
                 developed to allow standard laboratory blood smear
                 images to be perceived by BVI individuals by employing
                 a combination of auditory, haptic, and vibrotactile
                 feedback. These sensory feedback modalities were used
                 to convey visual information through alternative
                 perceptual channels, thus creating a palette of
                 multimodal, sensory information. Two sets of image
                 features of interest (primary and peripheral features)
                 were applied to characterize images. A Bayesian network
                 was applied to construct causal relations between these
                 two groups of features. In order to match primary
                 features with sensor modalities, two methods were
                 conceived. Experimental results confirmed that this
                 real-time approach produced higher accuracy in
                 recognizing and analyzing objects within images
                 compared to conventional tactile images.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "7",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Betancourt:2017:SCS,
  author =       "Mariana Aparicio Betancourt and Laura S. Dethorne and
                 Karrie Karahalios and Jennifer G. Kim",
  title =        "Skin Conductance as an In Situ Marker for Emotional
                 Arousal in Children with Neurodevelopmental
                 Communication Impairments: Methodological
                 Considerations and Clinical Implications",
  journal =      j-TACCESS,
  volume =       "9",
  number =       "3",
  pages =        "8:1--8:??",
  month =        feb,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3035536",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Apr 13 17:49:12 MDT 2017",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Even though electrodermal activity has been widely
                 used in the study of psychological states and processes
                 for over 130 years, the use of such technology in situ,
                 within the context of daily activities, remains a major
                 challenge. Recent technological advancements have led
                 to the development of wearable biosensors that
                 noninvasively measure electrical conductance across the
                 skin. These biosensors represent a new approach for
                 skin conductance assessment, as a proxy for emotional
                 arousal, in children with neurodevelopmental
                 communication impairments who are often described as
                 having difficulties with emotional regulation,
                 expressing thoughts and feelings, and present a higher
                 prevalence of challenging behaviors. Here we provide an
                 overview of skin conductance and explore the benefits
                 of recent technological advancements for applied
                 research and clinical practice. We draw on user
                 experience from two experimental interventions
                 involving eight children with neurodevelopmental
                 impairments. In both cases investigators monitored
                 phasic and tonic EDA measures in situ using wearable
                 biosensors. We share the behavioral and technical
                 challenges experienced across these two experimental
                 contexts, and propose associated considerations for
                 future use. Specifically, sensor functioning,
                 synchronization, and data preprocessing/analysis
                 difficulties, as well as behavioral findings related to
                 developmental differences, sensor tolerance over time,
                 and sensor placement are discussed.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "8",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Palani:2017:PDL,
  author =       "Hari Prasath Palani and Nicholas A. Giudice",
  title =        "Principles for Designing Large-Format Refreshable
                 Haptic Graphics Using Touchscreen Devices: an
                 Evaluation of Nonvisual Panning Methods",
  journal =      j-TACCESS,
  volume =       "9",
  number =       "3",
  pages =        "9:1--9:??",
  month =        feb,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3035537",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Apr 13 17:49:12 MDT 2017",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Touchscreen devices, such as smartphones and tablets,
                 represent a modern solution for providing graphical
                 access to people with blindness and visual impairment
                 (BVI). However, a significant problem with these
                 solutions is their limited screen real estate, which
                 necessitates panning or zooming operations for
                 accessing large-format graphical materials such as
                 maps. Non-visual interfaces cannot directly employ
                 traditional panning or zooming techniques due to
                 various perceptual and cognitive limitations (e.g.,
                 constraints of the haptic field of view and
                 disorientation due to loss of one's reference point
                 after performing these operations). This article
                 describes the development of four novel non-visual
                 panning methods designed from the onset with
                 consideration of these perceptual and cognitive
                 constraints. Two studies evaluated the usability of
                 these panning methods in comparison with a non-panning
                 control condition. Results demonstrated that the
                 exploration, learning, and subsequent spatial behaviors
                 were similar between panning and non-panning
                 conditions, with one panning mode, based on a
                 two-finger drag technique, revealing the overall best
                 performance. Findings provide compelling evidence that
                 incorporating panning operations on touchscreen devices
                 --- the fastest growing computational platform among
                 the BVI demographic --- is a viable, low-cost, and
                 immediate solution for providing BVI people with access
                 to a broad range of large-format digital graphical
                 information.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "9",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Bigham:2017:IASa,
  author =       "Jeffrey Bigham",
  title =        "Introduction to the {ASSETS'15} Special Issue, {Part
                 1}",
  journal =      j-TACCESS,
  volume =       "9",
  number =       "4",
  pages =        "10:1--10:??",
  month =        apr,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3051484",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Apr 13 17:49:12 MDT 2017",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "10",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Ahmetovic:2017:MYC,
  author =       "Dragan Ahmetovic and Roberto Manduchi and James M.
                 Coughlan and Sergio Mascetti",
  title =        "Mind Your Crossings: Mining {GIS} Imagery for
                 Crosswalk Localization",
  journal =      j-TACCESS,
  volume =       "9",
  number =       "4",
  pages =        "11:1--11:??",
  month =        apr,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3046790",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Apr 13 17:49:12 MDT 2017",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "For blind travelers, finding crosswalks and remaining
                 within their borders while traversing them is a crucial
                 part of any trip involving street crossings. While
                 standard Orientation 8 Mobility (O8M) techniques allow
                 blind travelers to safely negotiate street crossings,
                 additional information about crosswalks and other
                 important features at intersections would be helpful in
                 many situations, resulting in greater safety and/or
                 comfort during independent travel. For instance, in
                 planning a trip a blind pedestrian may wish to be
                 informed of the presence of all marked crossings near a
                 desired route. We have conducted a survey of several
                 O8M experts from the United States and Italy to
                 determine the role that crosswalks play in travel by
                 blind pedestrians. The results show stark differences
                 between survey respondents from the U.S. compared with
                 Italy: the former group emphasized the importance of
                 following standard O{\&}M techniques at all legal
                 crossings (marked or unmarked), while the latter group
                 strongly recommended crossing at marked crossings
                 whenever possible. These contrasting opinions reflect
                 differences in the traffic regulations of the two
                 countries and highlight the diversity of needs that
                 travelers in different regions may have. To address the
                 challenges faced by blind pedestrians in negotiating
                 street crossings, we devised a computer vision--based
                 technique that mines existing spatial image databases
                 for discovery of zebra crosswalks in urban settings.
                 Our algorithm first searches for zebra crosswalks in
                 satellite images; all candidates thus found are
                 validated against spatially registered Google Street
                 View images. This cascaded approach enables fast and
                 reliable discovery and localization of zebra crosswalks
                 in large image datasets. While fully automatic, our
                 algorithm can be improved by a final crowdsourcing
                 validation. To this end, we developed a Pedestrian
                 Crossing Human Validation web service, which supports
                 crowdsourcing, to rule out false positives and identify
                 false negatives.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "11",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Rector:2017:DRW,
  author =       "Kyle Rector and Roger Vilardaga and Leo Lansky and
                 Kellie Lu and Cynthia L. Bennett and Richard E. Ladner
                 and Julie A. Kientz",
  title =        "Design and Real-World Evaluation of Eyes-Free Yoga: an
                 Exergame for Blind and Low-Vision Exercise",
  journal =      j-TACCESS,
  volume =       "9",
  number =       "4",
  pages =        "12:1--12:??",
  month =        apr,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3022729",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Apr 13 17:49:12 MDT 2017",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "People who are blind or low vision may have a harder
                 time participating in exercise due to inaccessibility
                 or lack of encouragement. To address this, we developed
                 Eyes-Free Yoga using the Microsoft Kinect that acts as
                 a yoga instructor and has personalized auditory
                 feedback based on skeletal tracking. We conducted two
                 different studies on two different versions of
                 Eyes-Free Yoga: (1) a controlled study with 16 people
                 who are blind or low vision to evaluate the feasibility
                 of a proof-of-concept and (2) an 8-week in-home
                 deployment study with 4 people who are blind or low
                 vision, with a fully functioning exergame containing
                 four full workouts and motivational techniques. We
                 found that participants preferred the personalized
                 feedback for yoga postures during the laboratory study.
                 Therefore, the personalized feedback was used as a
                 means to build the core components of the system used
                 in the deployment study and was included in both study
                 conditions. From the deployment study, we found that
                 the participants practiced Yoga consistently throughout
                 the 8-week period (Average hours = 17; Average days of
                 practice = 24), almost reaching the American Heart
                 Association recommended exercise guidelines. On
                 average, motivational techniques increased
                 participant's user experience and their frequency and
                 exercise time. The findings of this work have
                 implications for eyes-free exergame design, including
                 engaging domain experts, piloting with inexperienced
                 users, using musical metaphors, and designing for
                 in-home use cases.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "12",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Mascetti:2017:ENS,
  author =       "Sergio Mascetti and Andrea Gerino and Cristian
                 Bernareggi and Lorenzo Picinali",
  title =        "On the Evaluation of Novel Sonification Techniques for
                 Non-Visual Shape Exploration",
  journal =      j-TACCESS,
  volume =       "9",
  number =       "4",
  pages =        "13:1--13:??",
  month =        apr,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3046789",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Apr 13 17:49:12 MDT 2017",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "There are several situations in which a person with
                 visual impairment or blindness needs to extract
                 information from an image. For example, graphical
                 representations are often used in education, in
                 particular, in STEM (science, technology, engineering,
                 and mathematics) subjects. In this contribution, we
                 propose a set of six sonification techniques to support
                 individuals with visual impairment or blindness in
                 recognizing shapes on touchscreen devices. These
                 techniques are compared among themselves and with two
                 other sonification techniques already proposed in the
                 literature. Using Invisible Puzzle, a mobile
                 application which allows one to conduct non-supervised
                 evaluation sessions, we conducted tests with 49
                 subjects with visual impairment and blindness, and 178
                 sighted subjects. All subjects involved in the process
                 successfully completed the evaluation session, showing
                 a high level of engagement, demonstrating, therefore,
                 the effectiveness of the evaluation procedure. Results
                 give interesting insights into the differences among
                 the sonification techniques and, most importantly, show
                 that after a short training, subjects are able to
                 successfully identify several different shapes.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "13",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Bigham:2017:IASb,
  author =       "Jeffrey Bigham",
  title =        "Introduction to the {ASSETS'15} Special Issue, {Part
                 2}",
  journal =      j-TACCESS,
  volume =       "10",
  number =       "1",
  pages =        "1:1--1:??",
  month =        apr,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3051486",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Apr 13 17:49:12 MDT 2017",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "1",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Huenerfauth:2017:ELF,
  author =       "Matt Huenerfauth and Elaine Gale and Brian Penly and
                 Sree Pillutla and Mackenzie Willard and Dhananjai
                 Hariharan",
  title =        "Evaluation of Language Feedback Methods for Student
                 Videos of {American Sign Language}",
  journal =      j-TACCESS,
  volume =       "10",
  number =       "1",
  pages =        "2:1--2:??",
  month =        apr,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3046788",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Apr 13 17:49:12 MDT 2017",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "This research investigates how to best present
                 video-based feedback information to students learning
                 American Sign Language (ASL); these results are
                 relevant not only for the design of a software tool for
                 providing automatic feedback to students but also in
                 the context of how ASL instructors could convey
                 feedback on students' submitted work. It is known that
                 deaf children benefit from early exposure to language,
                 and higher levels of written language literacy have
                 been measured in deaf adults who were raised in homes
                 using ASL. In addition, prior work has established that
                 new parents of deaf children benefit from technologies
                 to support learning ASL. As part of a long-term project
                 to design a tool to automatically analyze a video of a
                 students' signing and provide immediate feedback about
                 fluent and non-fluent aspects of their movements, we
                 conducted a study to compare multiple methods of
                 conveying feedback to ASL students, using videos of
                 their signing. Through two user studies, with a
                 Wizard-of-Oz design, we compared multiple types of
                 feedback in regard to users' subjective judgments of
                 system quality and the degree students' signing
                 improved (as judged by an ASL instructor who analyzed
                 recordings of students' signing before and after they
                 viewed each type of feedback). The initial study
                 revealed that displaying videos to students of their
                 signing, augmented with feedback messages about their
                 errors or correct ASL usage, yielded higher subjective
                 scores and greater signing improvement. Students gave
                 higher subjective scores to a version in which
                 time-synchronized pop-up messages appeared overlaid on
                 the student's video to indicate errors or correct ASL
                 usage. In a subsequent study, we found that providing
                 images of correct ASL face and hand movements when
                 providing feedback yielded even higher subjective
                 evaluation scores from ASL students using the system.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "2",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Kacorri:2017:RAD,
  author =       "Hernisa Kacorri and Matt Huenerfauth and Sarah Ebling
                 and Kasmira Patel and Kellie Menzies and Mackenzie
                 Willard",
  title =        "Regression Analysis of Demographic and
                 Technology-Experience Factors Influencing Acceptance of
                 Sign Language Animation",
  journal =      j-TACCESS,
  volume =       "10",
  number =       "1",
  pages =        "3:1--3:??",
  month =        apr,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3046787",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Apr 13 17:49:12 MDT 2017",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Software for automating the creation of linguistically
                 accurate and natural-looking animations of American
                 Sign Language (ASL) could increase information
                 accessibility for many people who are deaf. As compared
                 to recording and updating videos of human ASL signers,
                 technology for automatically producing animation from
                 an easy-to-update script would make maintaining ASL
                 content on websites more efficient. Most sign language
                 animation researchers evaluate their systems by
                 collecting subjective judgments and
                 comprehension-question responses from deaf
                 participants. Through a survey (N = 62) and
                 multiple-regression analysis, we identified
                 relationships between (a) demographic and
                 technology-experience characteristics of participants
                 and (b) the subjective and objective scores collected
                 from them during the evaluation of sign language
                 animation systems. These relationships were
                 experimentally verified in a subsequent user study with
                 57 participants, which demonstrated that specific
                 subpopulations have higher comprehension or subjective
                 scores when viewing sign language animations in an
                 evaluation study. This finding indicates that
                 researchers should collect and report a set of specific
                 characteristics about participants in any publications
                 describing evaluation studies of their technology, a
                 practice that is not yet currently standard among
                 researchers working in this field. In addition to
                 investigating this relationship between participant
                 characteristics and study results, we have also
                 released our survey questions in ASL and English that
                 can be used to measure these participant
                 characteristics, to encourage reporting of such data in
                 future studies. Such reporting would enable researchers
                 in the field to better interpret and compare results
                 between studies with different participant pools.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "3",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Nicolau:2017:ILE,
  author =       "Hugo Nicolau and Kyle Montague and Tiago Guerreiro and
                 Andr{\'e} Rodrigues and Vicki L. Hanson",
  title =        "Investigating Laboratory and Everyday Typing
                 Performance of Blind Users",
  journal =      j-TACCESS,
  volume =       "10",
  number =       "1",
  pages =        "4:1--4:??",
  month =        apr,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3046785",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Apr 13 17:49:12 MDT 2017",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Over the last decade there have been numerous studies
                 on touchscreen typing by blind people. However, there
                 are no reports about blind users' everyday typing
                 performance and how it relates to laboratory settings.
                 We conducted a longitudinal study involving five
                 participants to investigate how blind users truly type
                 on their smartphones. For 12 weeks, we collected field
                 data, coupled with eight weekly laboratory sessions.
                 This article provides a thorough analysis of everyday
                 typing data and its relationship with controlled
                 laboratory assessments. We improve state-of-the-art
                 techniques to obtain intent from field data, and
                 provide insights on real-world performance. Our
                 findings show that users improve over time, even though
                 it is at a slow rate. Substitutions are the most common
                 type of error and have a significant impact on entry
                 rates in both field and laboratory settings. Results
                 show that participants are 1.3--2 times faster when
                 typing during everyday tasks. On the other hand, they
                 are less accurate. We finished by deriving some
                 implications that should inform the design of a future
                 virtual keyboard for nonvisual input. Moreover,
                 findings should be of interest to keyboard designers
                 and researchers looking to conduct field studies to
                 understand everyday input performance.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "4",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Bozgeyikli:2017:VRI,
  author =       "Lal Bozgeyikli and Evren Bozgeyikli and Andrew Raij
                 and Redwan Alqasemi and Srinivas Katkoori and Rajiv
                 Dubey",
  title =        "Vocational Rehabilitation of Individuals with Autism
                 Spectrum Disorder with Virtual Reality",
  journal =      j-TACCESS,
  volume =       "10",
  number =       "2",
  pages =        "5:1--5:??",
  month =        apr,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3046786",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Apr 13 17:49:13 MDT 2017",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "In this article, a virtual reality system for
                 vocational rehabilitation of individuals with
                 disabilities (VR4VR) is presented. VR4VR uses immersive
                 virtual environments to assess and train individuals
                 with cognitive and physical disabilities. This article
                 focuses on the system modules that were designed and
                 developed for the Autism Spectrum Disorder (ASD)
                 population. The system offers training on six
                 vocational skills that were identified as transferrable
                 to and useful in many common jobs. These six
                 transferable skills are cleaning, loading the back of a
                 truck, money management, shelving, environmental
                 awareness, and social skills. This article presents the
                 VR4VR system, the design considerations for the ASD
                 population, and the findings with a cohort of nine
                 neurotypical individuals (control group) and nine
                 high-functioning individuals with ASD (experiment
                 group) who used the system. Good design practices
                 gathered throughout the study are also shared for
                 future virtual reality applications targeting
                 individuals with ASD. Research questions focused on the
                 effectiveness of the virtual reality system on
                 vocational training of high-functioning individuals
                 with ASD and the effect of distracters on task
                 performance of high-functioning individuals with ASD.
                 Follow-up survey results indicated that for individuals
                 with ASD, there was improvement in all of the trained
                 skills. No negative effects of the distracters were
                 observed on the score of individuals with ASD. The
                 proposed VR4VR system was found by professional job
                 trainers to provide effective vocational training for
                 individuals with ASD. The system turned out to be
                 promising in terms of providing an alternative
                 practical training tool for individuals with ASD.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "5",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Zhang:2017:PAW,
  author =       "Dongsong Zhang and Lina Zhou and Judith O. Uchidiuno
                 and Isil Y. Kilic",
  title =        "Personalized Assistive {Web} for Improving Mobile
                 {Web} Browsing and Accessibility for Visually Impaired
                 Users",
  journal =      j-TACCESS,
  volume =       "10",
  number =       "2",
  pages =        "6:1--6:??",
  month =        apr,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3053733",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Apr 13 17:49:13 MDT 2017",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Mobile web browsing has become a daily routine for
                 many people, including those with visual impairments.
                 However, usability and accessibility challenges of
                 mobile handheld devices may compromise the benefits of
                 mobile web access, particularly for users with visual
                 impairments. To improve mobile web accessibility, we
                 propose a Personalized Assistive Web (PAW) that aims to
                 improve skimming in mobile web browsing for users with
                 visual impairments through hierarchical outline view
                 and personalization adaptations in this research. We
                 empirically evaluated PAW via a controlled lab
                 experiment with 21 blind participants and 34 sighted
                 participants. The empirical results provide strong
                 evidence for the positive impacts of the hierarchical
                 outline view adaptation on user performance of
                 information search (i.e., search time) and perceptions
                 (i.e., perceived ease of use and perceived usefulness)
                 across the two groups of participants and demonstrate
                 that the positive effects of adaptation personalization
                 vary with participants. The findings not only
                 demonstrate the effectiveness of the hierarchical
                 outline view adaptation for blind and sighted
                 participants but also reveal some important
                 similarities and interesting differences in the effect
                 of personalized adaptation between the two groups of
                 participants. This research provides design and
                 technical insights that are instrumental for improving
                 mobile web accessibility.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "6",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Prescher:2017:CTP,
  author =       "Denise Prescher and Jens Bornschein and Gerhard
                 Weber",
  title =        "Consistency of a Tactile Pattern Set",
  journal =      j-TACCESS,
  volume =       "10",
  number =       "2",
  pages =        "7:1--7:??",
  month =        apr,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3053723",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Apr 13 17:49:13 MDT 2017",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Consistency over multiple images is a central
                 requirement in most guidelines for creating tactile
                 graphics. By contrast, tactile consistency over
                 multiple production media for tactile graphics is not
                 very common. In this article, we describe a
                 user-centered approach of developing a tactile fill
                 pattern set to be used for tactile graphics on
                 microcapsule paper, tactile matrix embossers, and
                 dynamic tactile pin-matrix devices. We show the results
                 of our iterative user evaluations with visually
                 impaired and blind-folded sighted participants.
                 Finally, we present a Scalable Vector Graphics pattern
                 set that comprises nine intuitively recognizable and
                 distinctive patterns keeping their meaning and
                 recognizability over the different production media.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "7",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Sosa-Garcia:2017:HVR,
  author =       "Joan Sosa-Garc{\'\i}a and Francesca Odone",
  title =        "``{Hands} On'' Visual Recognition for Visually
                 Impaired Users",
  journal =      j-TACCESS,
  volume =       "10",
  number =       "3",
  pages =        "8:1--8:??",
  month =        aug,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3060056",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Aug 12 09:01:31 MDT 2017",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Blind or visually impaired (BVI) individuals are
                 capable of identifying an object in their hands by
                 combining the available visual cues (if available) with
                 manipulation. It is harder for them to associate the
                 object with a specific brand, a model, or a type.
                 Starting from this observation, we propose a
                 collaborative system designed to deliver visual
                 feedback automatically and to help the user filling
                 this semantic gap. Our visual recognition module is
                 implemented by means of an image retrieval procedure
                 that provides real-time feedback, performs the
                 computation locally on the device, and is scalable to
                 new categories and instances. We carry out a thorough
                 experimental analysis of the visual recognition module,
                 which includes a comparative analysis with the state of
                 the art. We also present two different system
                 implementations that we test with the help of BVI users
                 to evaluate the technical soundness, the usability, and
                 the effectiveness of the proposed concept.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "8",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Baldwin:2017:TDM,
  author =       "Mark S. Baldwin and Gillian R. Hayes and Oliver L.
                 Haimson and Jennifer Mankoff and Scott E. Hudson",
  title =        "The Tangible Desktop: a Multimodal Approach to
                 Nonvisual Computing",
  journal =      j-TACCESS,
  volume =       "10",
  number =       "3",
  pages =        "9:1--9:??",
  month =        aug,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3075222",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Aug 12 09:01:31 MDT 2017",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Audio-only interfaces, facilitated through
                 text-to-speech screen reading software, have been the
                 primary mode of computer interaction for blind and
                 low-vision computer users for more than four decades.
                 During this time, the advances that have made visual
                 interfaces faster and easier to use, from direct
                 manipulation to skeuomorphic design, have not been
                 paralleled in nonvisual computing environments. The
                 screen reader-dependent community is left with no
                 alternatives to engage with our rapidly advancing
                 technological infrastructure. In this article, we
                 describe our efforts to understand the problems that
                 exist with audio-only interfaces. Based on observing
                 screen reader use for 4 months at a computer training
                 school for blind and low-vision adults, we identify
                 three problem areas within audio-only interfaces:
                 ephemerality, linear interaction, and unidirectional
                 communication. We then evaluated a multimodal approach
                 to computer interaction called the Tangible Desktop
                 that addresses these problems by moving semantic
                 information from the auditory to the tactile channel.
                 Our evaluation demonstrated that among novice screen
                 reader users, Tangible Desktop improved task completion
                 times by an average of 6 minutes when compared to
                 traditional audio-only computer systems.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "9",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Rozado:2017:FHC,
  author =       "David Rozado and Jason Niu and Martin Lochner",
  title =        "Fast Human-Computer Interaction by Combining Gaze
                 Pointing and Face Gestures",
  journal =      j-TACCESS,
  volume =       "10",
  number =       "3",
  pages =        "10:1--10:??",
  month =        aug,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3075301",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Aug 12 09:01:31 MDT 2017",
  bibsource =    "http://portal.acm.org/;
                 https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "In this work, we show how our open source
                 accessibility software, the FaceSwitch, can help
                 motor-impaired subjects to efficiently interact with a
                 computer hands-free. The FaceSwitch enhances gaze
                 interaction with video-based face gestures interaction.
                 The emerging multimodal system allows for interaction
                 with a user interface by means of gaze pointing for
                 target selection and facial gestures for
                 target-specific action commands. The FaceSwitch maps
                 facial gestures to specific mouse or keyboard events
                 such as: left mouse click, right mouse click, or page
                 scroll down. Hence, facial gestures serve the purpose
                 of mechanical switches. With this multimodal
                 interaction paradigm, the user gazes at the object in
                 the user interface with which it wants to interact and
                 then triggers a target-specific action by performing a
                 face gesture. Through a rigorous user study, we have
                 obtained quantitative evidence that suggests our
                 proposed interaction paradigm improves the performance
                 of traditional accessibility options, such as gaze-only
                 interaction or gaze with a single mechanical switch
                 interaction while coming close in terms of speed and
                 accuracy with traditional mouse-based interaction. We
                 make the FaceSwitch software freely available to the
                 community so the output of our research can help the
                 target audience.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "10",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Cutter:2017:IAM,
  author =       "Michael Cutter and Roberto Manduchi",
  title =        "Improving the Accessibility of Mobile {OCR} Apps Via
                 Interactive Modalities",
  journal =      j-TACCESS,
  volume =       "10",
  number =       "4",
  pages =        "11:1--11:??",
  month =        oct,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3075300",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Jan 22 09:48:24 MST 2018",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "We describe two experiments with a system designed to
                 facilitate the use of mobile optical character
                 recognition (OCR) by blind people. This system,
                 implemented as an iOS app, enables two interaction
                 modalities (autoshot and guidance). In the first study,
                 augmented reality fiducials were used to track a
                 smartphone's camera, whereas in the second study, the
                 text area extent was detected using a dedicated text
                 spotting and text line detection algorithm. Although
                 the guidance modality was expected to be superior in
                 terms of faster text access, this was shown to be true
                 only when some conditions (involving the user interface
                 and text detection modules) are met. Both studies also
                 showed that our participants, after experimenting with
                 the autoshot or guidance modality, appeared to have
                 improved their skill at taking OCR-readable pictures
                 even without use of such interaction modalities.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "11",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Lopez:2017:DDO,
  author =       "Sebasti{\'a}n Aced L{\'o}pez and Fulvio Corno and
                 Luigi {De Russis}",
  title =        "Design and Development of One-Switch Video Games for
                 Children with Severe Motor Disabilities",
  journal =      j-TACCESS,
  volume =       "10",
  number =       "4",
  pages =        "12:1--12:??",
  month =        oct,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3085957",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Jan 22 09:48:24 MST 2018",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Video games are not just played for fun; they have
                 become a handy instrument for the cognitive, emotional,
                 and social development of children. However, several
                 barriers prevent many children with disabilities from
                 playing action-oriented video games, alone or with
                 their peers. In particular, children with severe motor
                 disabilities, who rely on one-switch interaction for
                 accessing electronic devices, find fast-paced games
                 that require rapid decision-making and timely
                 responses, completely unplayable. This article
                 contributes to lowering such barriers by presenting
                 GNomon (Gaming NOMON), a software framework based on
                 the NOMON mode of interaction that allows the creation
                 of action-oriented single-switch video games. The
                 article reports the results of two studies that
                 evaluate the playability and rehabilitation suitability
                 of GNomon-based video games. The playability of
                 GNomon-based games is evaluated by assessing their
                 learnability, effectiveness, errors, satisfaction,
                 memorability, and enjoyability with a group of eight
                 children with severe motor disabilities. The
                 suitability for pediatric rehabilitation is determined
                 by means of a focus group with a team of speech
                 therapists, physiotherapists, and psychologists from a
                 Local Health Agency in Turin, Italy. The results of the
                 playability study are positive: All children had fun
                 playing GNomon-based video games, and seven of eight
                 were able to interact and play autonomously. The
                 results of the rehabilitation-suitability study also
                 entail that GNomon-based games can be exploited in
                 training hand-eye coordination and maintenance of
                 selective attention over time. The article finally
                 offers critical hindsight and reflections and shows
                 possible new future game concepts.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "12",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Wade:2017:PMD,
  author =       "Joshua Wade and Arpan Sarkar and Amy Swanson and Amy
                 Weitlauf and Zachary Warren and Nilanjan Sarkar",
  title =        "Process Measures of Dyadic Collaborative Interaction
                 for Social Skills Intervention in Individuals with
                 Autism Spectrum Disorders",
  journal =      j-TACCESS,
  volume =       "10",
  number =       "4",
  pages =        "13:1--13:??",
  month =        oct,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3107925",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Jan 22 09:48:24 MST 2018",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Technology-based social skills interventions have
                 shown promise for people with Autism Spectrum Disorder
                 (ASD), a neurodevelopmental disorder characterized by
                 impairments in social interaction and communication.
                 Primary advantages of a technology-based approach to
                 intervention relate to consistency of service delivery
                 as well as an ability to quantitatively measure process
                 and outcomes. Despite these strengths, however, many
                 current computer-supported systems rely on survey data
                 or data collected post-interaction. In response, we
                 have developed and pilot-tested DOSE (Dyad-Operated
                 Social Encouragement), a novel game and data
                 acquisition platform for collaborative skills
                 intervention that leverages the ability of software to
                 collect time-series, speech audio, and event
                 information for the purposes of finer-grained analyses
                 of dyadic interactions. A pilot study involving 12
                 participant dyads-comprised of children with ASD and
                 typically developing (TD) peers (6 ASD-TD dyads and 6
                 TD-TD dyads)-was conducted and several metrics were
                 computed during interactions. Preliminary results
                 suggest that the DOSE system is engaging to users, is
                 capable of collecting a wide range of quantitative
                 process measures, and that post-training measures show
                 preliminary evidence of increased communication and
                 activity coordination. Furthermore, DOSE has been made
                 open-source, allowing other investigators to use and
                 extend DOSE for a variety of applications.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "13",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Faucett:2017:VDA,
  author =       "Heather A. Faucett and Kate E. Ringland and Amanda L.
                 L. Cullen and Gillian R. Hayes",
  title =        "{(In)Visibility} in Disability and Assistive
                 Technology",
  journal =      j-TACCESS,
  volume =       "10",
  number =       "4",
  pages =        "14:1--14:??",
  month =        oct,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3132040",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Jan 22 09:48:24 MST 2018",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "In this article, we present a meta-analysis of
                 research examining visibility of disability. In
                 interrogating the issue of visibility and invisibility
                 in the design of assistive technologies, we open a
                 discussion about how perceptions surrounding disability
                 can be probed through an examination of visibility and
                 how these tensions do, and perhaps should, influence
                 assistive technology design and research.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "14",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Galliers:2017:EEP,
  author =       "Julia Galliers and Stephanie Wilson and Jane Marshall
                 and Richard Talbot and Niamh Devane and Tracey Booth
                 and Celia Woolf and Helen Greenwood",
  title =        "Experiencing {EVA Park}, a Multi-User Virtual World
                 for People with Aphasia",
  journal =      j-TACCESS,
  volume =       "10",
  number =       "4",
  pages =        "15:1--15:??",
  month =        oct,
  year =         "2017",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3134227",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Mon Jan 22 09:48:24 MST 2018",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Virtual worlds are used in wide-ranging ways by many
                 people with long-term health conditions, but their use
                 by people with aphasia (PWA) has been limited. In
                 contrast, this article reports the use of EVA Park, a
                 multi-user virtual world designed for PWA to practice
                 conversations, focusing on people's emotional, social,
                 and conversational experiences. An analysis of
                 observation and interview data collected from 20 people
                 with aphasia who participated in a 5-week therapy
                 intervention revealed key themes related to user
                 experience. The themes offer a rich insight into
                 aspects of the virtual world experience for PWA that go
                 beyond therapeutic outcomes. They are as follows:
                 affect (positive and negative); types of conversation,
                 miscommunication, and misunderstanding; immersion in
                 the virtual world; social presence and initiative and
                 flow. Overall, the study showed that participants
                 experienced positive emotional and social outcomes. We
                 argue that this was achieved as a consequence of EVA
                 Park being not only accessible but also a varied and
                 entertaining environment within which PWA experienced
                 both the realistic and the quirky while engaging with
                 others and having fun.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "15",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Huenerfauth:2018:IAS,
  author =       "Matt Huenerfauth and Kathleen F. McCoy",
  title =        "Introduction to the {ASSETS'16} Special Issue",
  journal =      j-TACCESS,
  volume =       "11",
  number =       "1",
  pages =        "1:1--1:??",
  month =        apr,
  year =         "2018",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3183374",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:09 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "1",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Reichinger:2018:PYM,
  author =       "Andreas Reichinger and Helena Garcia Carrizosa and
                 Joanna Wood and Svenja Schr{\"o}der and Christian
                 L{\"o}w and Laura Rosalia Luidolt and Maria
                 Schimkowitsch and Anton Fuhrmann and Stefan Maierhofer
                 and Werner Purgathofer",
  title =        "Pictures in Your Mind: Using Interactive
                 Gesture-Controlled Reliefs to Explore Art",
  journal =      j-TACCESS,
  volume =       "11",
  number =       "1",
  pages =        "2:1--2:??",
  month =        apr,
  year =         "2018",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3155286",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:09 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Tactile reliefs offer many benefits over the more
                 classic raised line drawings or tactile diagrams, as
                 depth, 3D shape, and surface textures are directly
                 perceivable. Although often created for blind and
                 visually impaired (BVI) people, a wider range of people
                 may benefit from such multimodal material. However,
                 some reliefs are still difficult to understand without
                 proper guidance or accompanying verbal descriptions,
                 hindering autonomous exploration. In this work, we
                 present a gesture-controlled interactive audio guide
                 (IAG) based on recent low-cost depth cameras that can
                 be operated directly with the hands on relief surfaces
                 during tactile exploration. The interactively
                 explorable, location-dependent verbal and captioned
                 descriptions promise rapid tactile accessibility to
                 2.5D spatial information in a home or education
                 setting, to online resources, or as a kiosk
                 installation at public places. We present a working
                 prototype, discuss design decisions, and present the
                 results of two evaluation studies: the first with 13
                 BVI test users and the second follow-up study with 14
                 test users across a wide range of people with
                 differences and difficulties associated with
                 perception, memory, cognition, and communication. The
                 participant-led research method of this latter study
                 prompted new, significant and innovative
                 developments.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "2",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Sharma:2018:DGB,
  author =       "Sumita Sharma and Blessin Varkey and Krishnaveni
                 Achary and Jaakko Hakulinen and Markku Turunen and Tomi
                 Heimonen and Saurabh Srivastava and Nitendra Rajput",
  title =        "Designing Gesture-Based Applications for Individuals
                 with Developmental Disabilities: Guidelines from User
                 Studies in {India}",
  journal =      j-TACCESS,
  volume =       "11",
  number =       "1",
  pages =        "3:1--3:??",
  month =        apr,
  year =         "2018",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3161710",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:09 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Gesture interaction provides a multitude of benefits
                 to individuals with developmental disabilities, from
                 enhancing social, motor and cognitive skills to
                 providing a safe and controlled environment for
                 simulating real-world scenarios. As gesture-based
                 applications gain ground in the special education
                 domain, we study their potential in the Indian context.
                 Together with Tamana, an NGO in New Delhi, we have been
                 conducting a series of exploratory user studies since
                 October 2013. This includes the design and evaluation
                 of three gesture-based applications to impart social
                 and life skills to individuals with developmental
                 disabilities. The Kirana application employs socially
                 appropriate gestures to teach the life skill of buying
                 day-to-day items from a local Indian grocery. Balloons
                 promotes joint attention skills through collaborative
                 interaction. HOPE improves motor coordination and
                 social and cognitive skills, with increasing levels of
                 difficulty. Based on studies with these applications,
                 this article presents guidelines for designing
                 gesture-based applications for individuals with
                 developmental disabilities. The guidelines focus on (a)
                 designing applications that cater to a larger group of
                 individuals to encourage collaboration and inclusion,
                 for instance, providing easy and controllable
                 transitions between different task levels, and
                 balancing interaction and content complexity; (b)
                 addressing the challenges in conducting research in
                 this domain, with respect to ethical and procedural
                 decisions; and (c) designing for technology acceptance
                 within the Indian context, for example, by following a
                 collaborative and stakeholder inclusive approach, and
                 addressing apprehensions towards technology adoption.
                 These guidelines aim to benefit other practitioners
                 working in this domain and especially in the
                 educational technology context of India.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "3",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Flores:2018:WAD,
  author =       "Germ{\'a}n H. Flores and Roberto Manduchi",
  title =        "{WeAllWalk}: an Annotated Dataset of Inertial Sensor
                 Time Series from Blind Walkers",
  journal =      j-TACCESS,
  volume =       "11",
  number =       "1",
  pages =        "4:1--4:??",
  month =        apr,
  year =         "2018",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3161711",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:09 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "We introduce WeAllWalk, a dataset of inertial sensor
                 time series collected from blind and sighted walkers
                 using a long cane or a guide dog. Ten blind volunteers
                 (seven using a long cane, one using a guide dog, and
                 two alternating use of a long cane and of a guide dog)
                 as well as five sighted volunteers contributed to the
                 data collection. The participants walked through fairly
                 long and complex indoor routes that included obstacles
                 to be avoided and doors to be opened. Inertial data
                 were recorded by two iPhone 6s carried by our
                 participants in their pockets and carefully annotated.
                 Ground-truth heel strike times were measured by two
                 small inertial sensor units clipped to the
                 participants' shoes. We also present an in-depth
                 comparative analysis of various step counting and turn
                 detection algorithms as tested on WeAllWalk. This
                 analysis reveals interesting differences between the
                 achievable accuracy of step and turn detection across
                 different communities of sighted and blind walkers.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "4",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Kacorri:2018:IAO,
  author =       "Hernisa Kacorri and Sergio Mascetti and Andrea Gerino
                 and Dragan Ahmetovic and Valeria Alampi and Hironobu
                 Takagi and Chieko Asakawa",
  title =        "Insights on Assistive Orientation and Mobility of
                 People with Visual Impairment Based on Large-Scale
                 Longitudinal Data",
  journal =      j-TACCESS,
  volume =       "11",
  number =       "1",
  pages =        "5:1--5:??",
  month =        apr,
  year =         "2018",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3178853",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:09 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Assistive applications for orientation and mobility
                 promote independence for people with visual impairment
                 (PVI). While typical design and evaluation of such
                 applications involves small-sample iterative studies,
                 we analyze large-scale longitudinal data from a
                 geographically diverse population. Our publicly
                 released dataset from i Move, a mobile app supporting
                 orientation of PVI, contains millions of interactions
                 by thousands of users over a year. Our analysis (i)
                 examines common functionalities, settings, assistive
                 features, and movement modalities in i Move dataset and
                 (ii) discovers user communities based on interaction
                 patterns. We find that the most popular interaction
                 mode is passive, where users receive more
                 notifications, often verbose, while in motion and
                 perform fewer actions. The use of built-in assistive
                 features such as enlarged text indicate a high presence
                 of users with residual sight. Users fall into three
                 distinct groups: (C1) users interested in surrounding
                 points of interest, (C2) users interacting in short
                 bursts to inquire about current location, and (C3)
                 users with long active sessions while in motion. i Move
                 was designed with C3 in mind, and one strength of our
                 contribution is providing meaningful semantics for
                 unanticipated groups, C1 and C2. Our analysis reveals
                 insights that can be generalized to other assistive
                 orientation and mobility applications.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "5",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Shinohara:2018:TSA,
  author =       "Kristen Shinohara and Cynthia L. Bennett and Wanda
                 Pratt and Jacob O. Wobbrock",
  title =        "Tenets for Social Accessibility: Towards Humanizing
                 Disabled People in Design",
  journal =      j-TACCESS,
  volume =       "11",
  number =       "1",
  pages =        "6:1--6:??",
  month =        apr,
  year =         "2018",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3178855",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:09 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Despite years of addressing disability in technology
                 design and advocating user-centered design practices,
                 popular mainstream technologies remain largely
                 inaccessible for people with disabilities. We conducted
                 a design course study investigating how student
                 designers regard disability and explored how designing
                 for multiple disabled and nondisabled users encouraged
                 students to think about accessibility in the design
                 process. Across two university course offerings one
                 year apart, we examined how students focused on a
                 design project while learning user-centered design
                 concepts and techniques, working with people with and
                 without disabilities throughout the project. In
                 addition, we compared how students incorporated
                 disability-focused design approaches within a classroom
                 setting. We found that designing for multiple
                 stakeholders with and without disabilities expanded
                 student understanding of accessible design by
                 demonstrating that people with the same disability
                 could have diverse needs and by aligning such needs
                 with those of nondisabled users. We also found that
                 using approaches targeted toward designing for people
                 with disabilities complemented interactions with users,
                 particularly with regard to managing varying abilities
                 across users, or incorporating social aspects. Our
                 findings contribute to an understanding about how we
                 might incur change in design practice by working with
                 multiple stakeholders with and without disabilities
                 whenever possible. We refined Design for Social
                 Accessibility by incorporating these findings into
                 three tenets emphasizing: (1) design for disability
                 ought to incorporate users with and without
                 disabilities, (2) design should address functional and
                 social factors simultaneously, and (3) design should
                 include tools to spur consideration of social factors
                 in accessible design.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "6",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Huenerfauth:2018:II,
  author =       "Matt Huenerfauth and Kathleen F. McCoy",
  title =        "Introduction to this Issue",
  journal =      j-TACCESS,
  volume =       "11",
  number =       "2",
  pages =        "7:1--7:??",
  month =        jun,
  year =         "2018",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3199475",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:09 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "7",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Gotzelmann:2018:VAA,
  author =       "T. G{\"o}tzelmann",
  title =        "Visually Augmented Audio-Tactile Graphics for Visually
                 Impaired People",
  journal =      j-TACCESS,
  volume =       "11",
  number =       "2",
  pages =        "8:1--8:??",
  month =        jun,
  year =         "2018",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3186894",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:09 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Tactile graphics play an essential role in knowledge
                 transfer for blind people. The tactile exploration of
                 these graphics is often challenging because of the
                 cognitive load caused by physiological constraints and
                 their complexity. The coupling of physical tactile
                 graphics with electronic devices offers to support the
                 tactile exploration by auditory feedback. Often, these
                 systems have strict constraints regarding their
                 mobility or the process of coupling both components.
                 Additionally, visually impaired people cannot
                 appropriately benefit from their residual vision. This
                 article presents a concept for 3D printed tactile
                 graphics, which offers to use audio-tactile graphics
                 with usual smartphones or tablet-computers. By using
                 capacitive markers, the coupling of the tactile
                 graphics with the mobile device is simplified. These
                 tactile graphics integrating these markers can be
                 printed in one turn by off-the-shelf 3D printers
                 without any post-processing and allows us to use
                 multiple elevation levels for graphical elements. Based
                 on the developed generic concept on visually augmented
                 audio-tactile graphics, we presented a case study for
                 maps. A prototypical implementation was tested by a
                 user study with visually impaired people. All the
                 participants were able to interact with the 3D printed
                 tactile maps using a standard tablet computer. To study
                 the effect of visual augmentation of graphical
                 elements, we conducted another comprehensive user
                 study. We tested multiple types of graphics and
                 obtained evidence that visual augmentation may offer
                 clear advantages for the exploration of tactile
                 graphics. Even participants with a minor residual
                 vision could solve the tasks with visual augmentation
                 more quickly and accurately.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "8",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Fan:2018:GCS,
  author =       "Mingming Fan and Khai N. Truong",
  title =        "Guidelines for Creating Senior-Friendly Product
                 Instructions",
  journal =      j-TACCESS,
  volume =       "11",
  number =       "2",
  pages =        "9:1--9:??",
  month =        jun,
  year =         "2018",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3209882",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:09 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Although older adults feel generally positive about
                 technologies, many face difficulties when using them
                 and need support during the process. One common form of
                 support is the product instructions that come with
                 devices. Unfortunately, when using them, older adults
                 often feel confused, overwhelmed, or frustrated. In
                 this work, we sought to address the issues that affect
                 older adults' ability to successfully complete tasks
                 using product instructions. By observing how older
                 adults used the product instructions of various devices
                 and how they made modifications to simplify the use of
                 the instructions, we identified 11 guidelines for
                 creating senior-friendly product instructions. We
                 validated the usability and effectiveness of the
                 guidelines by evaluating how older adults used
                 instruction manuals that were modified to adhere to
                 these guidelines against the originals and those that
                 were modified by interaction design researchers.
                 Results show that, overall, participants had the
                 highest task success rate and lowest task completion
                 time when using guideline-modified user instructions.
                 Participants also perceived these instructions to be
                 the most helpful, the easiest to follow, the most
                 complete, and the most concise among the three. We also
                 compared the guidelines derived from this research to
                 existing documentation guidelines and discussed
                 potential challenges of applying them.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "9",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Antonelli:2018:DMW,
  author =       "Humberto Lidio Antonelli and Rodrigo Augusto Igawa and
                 Renata {Pontin De Mattos Fortes} and Eduardo Henrique
                 Rizo and Willian Massami Watanabe",
  title =        "Drop-Down Menu Widget Identification Using {HTML}
                 Structure Changes Classification",
  journal =      j-TACCESS,
  volume =       "11",
  number =       "2",
  pages =        "10:1--10:??",
  month =        jun,
  year =         "2018",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3178854",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:09 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Widgets have been deployed in rich internet
                 applications for more than 10 years. However, many of
                 the widgets currently available on the web do not
                 implement current accessibility design solutions
                 standardized in ARIA (Accessible Rich Internet
                 Applications) specification, hence are not accessible
                 to disabled users. This article sets out an approach
                 for automatically identifying widgets on the basis of
                 machine-learning algorithms and the classification of
                 mutation records; it is an HTML5 technology that logs
                 all changes that occur in the structure of a web
                 application. Automatic widget identification is an
                 essential component for the elaboration of automatic
                 ARIA evaluation and adaptation strategies. Thus, the
                 aim of this article is to take steps toward easing the
                 software-engineering process of ARIA widgets. The
                 proposed approach focuses on the identification of
                 drop-down menu widgets. An experiment with real-world
                 web applications was conducted and the results showed
                 evidence that this approach is capable of identifying
                 these widgets and can outperform previous
                 state-of-the-art techniques based on an F-measure
                 analysis conducted during the experiment.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "10",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Zhang:2018:DEC,
  author =       "Lian Zhang and Qiang Fu and Amy Swanson and Amy
                 Weitlauf and Zachary Warren and Nilanjan Sarkar",
  title =        "Design and Evaluation of a Collaborative Virtual
                 Environment {(CoMove)} for Autism Spectrum Disorder
                 Intervention",
  journal =      j-TACCESS,
  volume =       "11",
  number =       "2",
  pages =        "11:1--11:??",
  month =        jun,
  year =         "2018",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3209687",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:09 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  abstract =     "Autism Spectrum Disorder (ASD) is a neurodevelopmental
                 disorder characterized in part by core deficits in
                 social interaction and communication. A collaborative
                 virtual environment (CVE), which is a computer-based,
                 distributed, virtual space for multiple users to
                 interact with one another and/or with virtual items,
                 has the potential to support flexible, safe, and
                 peer-based social interactions. In this article, we
                 presented the design of a CVE system, called CoMove,
                 with the ultimate goals of measuring and potentially
                 enhancing collaborative interactions and verbal
                 communication of children with ASD when they play
                 collaborative puzzle games with their typically
                 developing (TD) peers in remote locations. CoMove has
                 two distinguishing characteristics: (i) the ability to
                 promote important collaborative behaviors (including
                 information sharing, sequential interactions, and
                 simultaneous interactions) and to provide real-time
                 feedback based on users' game performance; as well as
                 (ii) an objective way to measure and index important
                 aspects of collaboration and verbal-communication
                 skills during system interaction. A feasibility study
                 with 14 pairs-7 ASD/TD pairs and 7 TD/TD pairs-was
                 conducted to initially test the feasibility of CoMove.
                 The results of the study validated the system
                 feasibility and suggested its potential to index
                 important aspects of collaboration and verbal
                 communication.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "11",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Zhao:2018:DHG,
  author =       "Huan Zhao and Zhaobo Zheng and Amy Swanson and Amy
                 Weitlauf and Zachary Warren and Nilanjan Sarkar",
  title =        "Design of a Haptic-Gripper Virtual Reality System
                 ({Hg}) for Analyzing Fine Motor Behaviors in Children
                 with Autism",
  journal =      j-TACCESS,
  volume =       "11",
  number =       "4",
  pages =        "19:1--19:??",
  month =        nov,
  year =         "2018",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3231938",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:10 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/ft_gateway.cfm?id=3231938",
  abstract =     "Fine motor skills, including grasping, manipulating,
                 and reaching for objects, are a documented weakness for
                 many children with Autism Spectrum Disorders (ASD).
                 However, relatively less research has attempted to
                 address these motor deficits, especially by taking
                 advantage of advanced technology. To explore potential
                 mechanisms for expanding accessibility to fine motor
                 intervention for people with ASD, we present the design
                 and implementation of a feasibility study of a novel
                 Haptic-Gripper Virtual Reality System (Hg). Hg is
                 capable of providing analysis and practice
                 opportunities of fine motor skills in an adaptive and
                 low-cost virtual environment with real-time auditory,
                 visual, and haptic feedback. The Haptic Gripper in Hg
                 can detect a user's grip force and hand location and
                 provide haptic feedback to guide hand movement and grip
                 control while completing several simple and engaging
                 virtual fine motor tasks. We conducted a feasibility
                 study with six children with ASD and six typically
                 developing (TD) children and found that participants
                 were interested in using the Haptic Gripper and could
                 quickly get used to the system. Although the results
                 are preliminary and limited, we observed medium to
                 strong correlations between the proposed fine motor
                 skill metrics and the scores achieved with a
                 standardized fine motor skill test and improvements of
                 participants in accuracy and steadiness of movement and
                 force control. This study provides important guidance
                 for future investigations of the Hg's potential for
                 assessing and improving fine motor manipulation
                 skills.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "19",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Elor:2018:PSC,
  author =       "Aviv Elor and Mircea Teodorescu and Sri Kurniawan",
  title =        "{Project Star Catcher}: a Novel Immersive Virtual
                 Reality Experience for Upper Limb Rehabilitation",
  journal =      j-TACCESS,
  volume =       "11",
  number =       "4",
  pages =        "20:1--20:??",
  month =        nov,
  year =         "2018",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3265755",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:10 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/ft_gateway.cfm?id=3265755",
  abstract =     "Modern immersive virtual reality experiences have the
                 unique potential to motivate patients undergoing
                 physical therapy for performing intensive repetitive
                 task-based treatment and can be utilized to collect
                 real-time user data to track adherence and compliance
                 rates. This article reports the design and evaluation
                 of an immersive virtual reality game using the HTC Vive
                 for upper limb rehabilitation, titled ``Project Star
                 Catcher'' (PSC), aimed at users with hemiparesis. The
                 game mechanics were adapted from modified Constraint
                 Induced Therapy (mCIT), an established therapy method
                 where users are asked to use the weaker arm by
                 physically binding the stronger arm. Our adaptation
                 changes the physical to psychological binding by
                 providing various types of immersive stimulation to
                 influence the use of the weaker arm. PSC was evaluated
                 by users with combined developmental and physical
                 impairments as well as stroke survivors. The results
                 suggest that we were successful in providing a
                 motivating experience for performing mCIT as well as a
                 cost-effective solution for real-time data capture
                 during therapy. We conclude the article with a set of
                 considerations for immersive virtual reality therapy
                 game design.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "20",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Chakraborty:2018:TDL,
  author =       "Tusher Chakraborty and Taslim Arefin Khan and A. B. M.
                 Alim {Al Islam}",
  title =        "Towards Devising a Low-cost and Easy-to-use Arithmetic
                 Learning Framework for Economically Less-privileged
                 Visually Impaired Children",
  journal =      j-TACCESS,
  volume =       "11",
  number =       "4",
  pages =        "21:1--21:??",
  month =        nov,
  year =         "2018",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3265756",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:10 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/ft_gateway.cfm?id=3265756",
  abstract =     "Basic arithmetic operations are essential skills
                 needed in our life, and this is no different for the
                 visually impaired. However, working arithmetic out on
                 paper is always a challenge for visually impaired
                 people. This situation is exacerbated by low-resource
                 settings due to a paucity of low-cost and easy-to-use
                 solutions. As a remedy to this situation, we propose a
                 low-cost and easy-to-use arithmetic learning framework
                 and draw a contrast between the conventional means of
                 solving arithmetic problems and our proposed framework.
                 Our proposal is engendered from comprehensive studies,
                 both qualitative and quantitative, over the challenges
                 faced by visually impaired children from two low-income
                 countries. These studies are conducted in three
                 phases-exploratory, descriptive, and
                 explanatory-involving six visually impaired children
                 and sixteen visually impaired grownups. User evaluation
                 of our framework, in disguise of a tutorial session,
                 confirms its acceptability and adaptability, along with
                 its effectiveness in evoking interest in arithmetic. We
                 believe that our study and proposed framework will help
                 in breaking barriers to similar challenges in other
                 developing regions across the border.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "21",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{LalL:2018:EVR,
  author =       "Lal ``Lila'' Bozgeyikli and Evren Bozgeyikli and
                 Srinivas Katkoori and Andrew Raij and Redwan Alqasemi",
  title =        "Effects of Virtual Reality Properties on User
                 Experience of Individuals with Autism",
  journal =      j-TACCESS,
  volume =       "11",
  number =       "4",
  pages =        "22:1--22:??",
  month =        nov,
  year =         "2018",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3267340",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:10 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/ft_gateway.cfm?id=3267340",
  abstract =     "In recent years, virtual reality (VR) has been become
                 a popular training tool for individuals with Autism
                 Spectrum Disorder (ASD). Although VR was proven to be a
                 promising tool for individuals with ASD, effects of VR
                 properties or attributes of user interfaces designed
                 for VR on user experience is still an unexplored area.
                 In this study, we explore effects of five attributes of
                 user interfaces designed for VR on user experience of
                 high-functioning individuals with Autism Spectrum
                 Disorder (HFASD): instruction methods, visual fidelity,
                 view zoom, clutter, and motion. Our motivation is to
                 give positive contribution to the design of future VR
                 training applications for individuals with ASD so that
                 more benefits can be gained. Three VR experiences were
                 designed and implemented, and a user study was
                 performed with 15 high-functioning individuals with ASD
                 and 15 neurotypical individuals as the control group.
                 Results indicated that using animated instructions and
                 avoiding verbal instructions, using low visual fidelity
                 and normal view zoom, and using no clutter and no
                 motion in VR warehouse training applications targeting
                 individuals with HFASD are good design practices.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "22",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Wade:2018:EEI,
  author =       "Joshua Wade and Heathman S. Nichols and Megan Ichinose
                 and Dayi Bian and Esube Bekele and Matthew Snodgress
                 and Ashwaq Zaini Amat and Eric Granholm and Sohee Park
                 and Nilanjan Sarkar",
  title =        "Extraction of Emotional Information via Visual
                 Scanning Patterns: a Feasibility Study of Participants
                 with Schizophrenia and Neurotypical Individuals",
  journal =      j-TACCESS,
  volume =       "11",
  number =       "4",
  pages =        "23:1--23:??",
  month =        nov,
  year =         "2018",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3282434",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:10 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/ft_gateway.cfm?id=3282434",
  abstract =     "Emotion recognition impairment is a core feature of
                 schizophrenia (SZ), present throughout all stages of
                 this condition, and leads to poor social outcome.
                 However, the underlying mechanisms that give rise to
                 such deficits have not been elucidated and hence, it
                 has been difficult to develop precisely targeted
                 interventions. Evidence supports the use of methods
                 designed to modify patterns of visual attention in
                 individuals with SZ in order to effect meaningful
                 improvements in social cognition. To date, however,
                 attention-shaping systems have not fully utilized
                 available technology (e.g., eye tracking) to achieve
                 this goal. The current work consisted of the design and
                 feasibility testing of a novel gaze-sensitive social
                 skills intervention system called MASI-VR. Adults from
                 an outpatient clinic with confirmed SZ diagnosis ( n =
                 10) and a comparison sample of neurotypical
                 participants ( n = 10) were evaluated on measures of
                 emotion recognition and visual attention at baseline
                 assessment, and a pilot test of the intervention system
                 was evaluated on the SZ sample following five training
                 sessions over three weeks. Consistent with the `enlrg'
                 literature, participants in the SZ group demonstrated
                 lower recognition of faces showing medium intensity
                 fear, spent more time deliberating about presented
                 emotions, and had fewer fixations in comparison to
                 neurotypical peers. Furthermore, participants in the SZ
                 group showed significant improvement in the recognition
                 of fearful faces post-training. Preliminary evidence
                 supports the feasibility of a gaze-sensitive paradigm
                 for use in assessment and training of emotion
                 recognition and social attention in individuals with
                 SZ, thus warranting further evaluation of the novel
                 intervention.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "23",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Sultana:2019:EAS,
  author =       "Afroza Sultana and Karyn Moffatt",
  title =        "Effects of Aging on Small Target Selection with Touch
                 Input",
  journal =      j-TACCESS,
  volume =       "12",
  number =       "1",
  pages =        "1:1--1:??",
  month =        feb,
  year =         "2019",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3300178",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:10 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/ft_gateway.cfm?id=3300178",
  abstract =     "Age-related declines in physical and cognitive
                 function can result in target selection difficulties
                 that hinder device operation. Previous studies have
                 detailed the different types of target selection errors
                 encountered, as well as how they vary with age and with
                 input device for mouse and pen interaction. We extend
                 this work to describe the types of age-related
                 selection errors encountered with small touchscreen
                 devices. Consistent with prior results, we found that
                 older adults had longer target selection times,
                 generated higher error rates, and encountered a broader
                 range of selection difficulties (e.g., miss errors and
                 slip errors) relative to a younger comparison group.
                 However, in contrast to the patterns previously found
                 with pen interaction, we found that miss error (i.e.,
                 both landing and lifting outside the target bounds) was
                 a more common source of errors for older adults than
                 slip error (i.e., landing on the target but slipping
                 outside the target bounds before lifting). Moreover,
                 aging influenced both miss and slip errors in our study
                 of touch interaction, whereas for pen interaction, age
                 has been found to influence only slip errors. These
                 differences highlight the need to consider pen and
                 touch interaction separately despite both being forms
                 of direct input. Based on our findings, we discuss
                 possible approaches for improving the accessibility of
                 touch interaction for older adults.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "1",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Bian:2019:DPB,
  author =       "Dayi Bian and Joshua Wade and Amy Swanson and Amy
                 Weitlauf and Zachary Warren and Nilanjan Sarkar",
  title =        "Design of a Physiology-based Adaptive Virtual Reality
                 Driving Platform for Individuals with {ASD}",
  journal =      j-TACCESS,
  volume =       "12",
  number =       "1",
  pages =        "2:1--2:??",
  month =        feb,
  year =         "2019",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3301498",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:10 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/ft_gateway.cfm?id=3301498",
  abstract =     "Driving is essential for many people in developed
                 countries to achieve independence. Individuals with
                 Autism Spectrum Disorder (ASD), in addition to having
                 social skill deficits, may experience difficulty in
                 learning to drive due to deficits in
                 attention-shifting, performing sequential tasks,
                 integrating visual-motor responses, and coordinating
                 motor response. Lacking confidence and feeling anxiety
                 further exacerbates these concerns. While there is a
                 growing body of research regarding assessment of
                 driving behavior or comparisons of driving behaviors
                 between individuals with and without ASD, there is a
                 lack of driving simulator that is catered toward the
                 needs of individuals with ASD. We present the
                 development of a novel closed-loop adaptive Virtual
                 Reality (VR) driving simulator for individuals with ASD
                 that can infer one's engagement based on his/her
                 physiological responses and adapts driving task
                 difficulty based on engagement level in real-time. We
                 believe that this simulator will provide opportunities
                 for learning driving skills in a safe and
                 individualized environment to individuals with ASD and
                 help them with independent living. We also conducted a
                 small user study with teenagers with ASD to demonstrate
                 the feasibility and tolerability of such a driving
                 simulator. Preliminary results showed that the
                 participants found the engagement-sensitive system more
                 engaging and more enjoyable than a purely
                 performance-sensitive system. These findings could
                 support future work into driving simulator
                 technologies, which could provide opportunities to
                 practice driving skills in cost-effective, supportive,
                 and safe environments.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "2",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Yan:2019:CSA,
  author =       "Shunguo Yan and P. G. Ramachandran",
  title =        "The Current Status of Accessibility in Mobile Apps",
  journal =      j-TACCESS,
  volume =       "12",
  number =       "1",
  pages =        "3:1--3:??",
  month =        feb,
  year =         "2019",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3300176",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:10 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/ft_gateway.cfm?id=3300176",
  abstract =     "This study evaluated the status of accessibility in
                 mobile apps by investigating the graphical user
                 interface (GUI) structures and conformance to
                 accessibility guidelines of 479 Android apps in 23
                 business categories from Google Play. An automated
                 tool, IBM Mobile Accessibility Checker (MAC), was used
                 to identify the accessibility issues, which were
                 categorized as a violation (V), potential violation
                 (PV), or warning (W). The results showed 94.8\%,
                 97.5\%, and 66.4\% of apps studied contained issues
                 related to V, PV, or W, respectively. Five widget
                 categories (TextView, ImageView, View, Button, and
                 ImageButton) were used to create 92\% of the total
                 number of the GUI elements and caused 89\%, 78\%, and
                 86\% of V, PV, and W, respectively. These accessibility
                 issues were mainly caused by lack of element focus,
                 missing element description, low text color contrast,
                 lack of sufficient spacing between elements, and less
                 than minimum sizes of text fonts and elements.
                 Together, these accessibility issues accounted for
                 97.0\%, 77.8\%, and 94.5\% of V, PV, and W,
                 respectively. This study proposed coverage measures to
                 estimate the percentage of accessibility issues
                 identified by an automated tool. The result showed that
                 MAC, on average, identified about 67\% of accessibility
                 issues in mobile apps. Two new accessibility
                 conformance measures were proposed in this study:
                 inaccessible element rate (IAER) and accessibility
                 issue rate (AIR). IAER estimates the percentage of GUI
                 elements that are inaccessible. AIR calculates the
                 percentage of the actual number of accessibility issues
                 relative to the maximum number of accessibility issues.
                 Average IAER and AIR scores were 27.3\%, 19.9\%, 6.3\%
                 and 20.7\%, 15.0\%, 5.4\% for V, PV, and W,
                 respectively, for the studied apps. The IAER score
                 showed approximately 30\% of the GUI elements had
                 accessibility issues, and the AIR score indicated that
                 15\% of the accessibility issues remained and need to
                 be fixed to make the apps accessible.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "3",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Brock:2019:ISI,
  author =       "Anke Brock",
  title =        "Introduction to the Special Issue on {ASSETS'17}",
  journal =      j-TACCESS,
  volume =       "12",
  number =       "2",
  pages =        "4:1--4:??",
  month =        jul,
  year =         "2019",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3325866",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:11 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/ft_gateway.cfm?id=3325866",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "4",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Monteiro:2019:TED,
  author =       "Caio D. D. Monteiro and Frank M. Shipman and
                 Satyakiran Duggina and Ricardo Gutierrez-Osuna",
  title =        "Tradeoffs in the Efficient Detection of Sign Language
                 Content in Video Sharing Sites",
  journal =      j-TACCESS,
  volume =       "12",
  number =       "2",
  pages =        "5:1--5:??",
  month =        jul,
  year =         "2019",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3325863",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:11 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/ft_gateway.cfm?id=3325863",
  abstract =     "Video sharing sites have become keepers of de-facto
                 digital libraries of sign language content, being used
                 to store videos including the experiences, knowledge,
                 and opinions of many in the deaf or hard of hearing
                 community. Due to limitations of term-based search over
                 metadata, these videos can be difficult to find,
                 reducing their value to the community. Another result
                 is that community members frequently engage in a
                 push-style delivery of content (e.g., emailing or
                 posting links to videos for others in the sign language
                 community) rather than having access be based on the
                 information needs of community members. In prior work,
                 we have shown the potential to detect sign language
                 content using features derived from the video content
                 rather than relying on metadata. Our prior technique
                 was developed with a focus on accuracy of results and
                 are quite computationally expensive, making it
                 unrealistic to apply them on a corpus the size of
                 YouTube or other large video sharing sites. Here, we
                 describe and examine the performance of optimizations
                 that reduce the cost of face detection and the length
                 of video segments processed. We show that optimizations
                 can reduce the computation time required by 96\%, while
                 losing only 1\% in F1 score. Further, a keyframe-based
                 approach is examined that removes the need to process
                 continuous video. This approach achieves comparable
                 recall but lower precision than the above techniques.
                 Merging the advantages of the optimizations, we also
                 present a staged classifier, where the keyframe
                 approach is used to reduce the number of non-sign
                 language videos fully processed. An analysis of the
                 staged classifier shows a further reduction in average
                 computation time per video while achieving similar
                 quality of results.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "5",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Berke:2019:DPE,
  author =       "Larwan Berke and Matt Huenerfauth and Kasmira Patel",
  title =        "Design and Psychometric Evaluation of {American Sign
                 Language} Translations of Usability Questionnaires",
  journal =      j-TACCESS,
  volume =       "12",
  number =       "2",
  pages =        "6:1--6:??",
  month =        jul,
  year =         "2019",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3314205",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:11 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/ft_gateway.cfm?id=3314205",
  abstract =     "To promote greater inclusion of people who are Deaf
                 and Hard of Hearing (DHH) in studies conducted by
                 Human-Computer Interaction (HCI) researchers or
                 professionals, we have undertaken a project to formally
                 translate several standardized usability questionnaires
                 from English to ASL. Many deaf adults in the U.S. have
                 lower levels of English reading literacy, but there are
                 currently no standardized usability questionnaires
                 available in American Sign Language (ASL) for these
                 users. A critical concern in conducting such a
                 translation is to ensure that the meaning of the
                 original question items has been preserved during
                 translation, as well as other key psychometric
                 properties of the instrument, including internal
                 reliability, criterion validity, and construct
                 validity. After identifying best-practices for such a
                 translation and evaluation project, a bilingual team of
                 domain experts (including native ASL signers who are
                 members of the Deaf community) translated the System
                 Usability Scale (SUS) and Net Promoter Score (NPS)
                 instruments into ASL and then conducted
                 back-translation evaluations to assess the faithfulness
                 of the translation. The new ASL instruments were
                 employed in usability tests with DHH participants, to
                 assemble a dataset of response scores, in support of
                 the psychometric validation. We are disseminating these
                 translated instruments, as well as collected response
                 values from DHH participants, to encourage greater
                 participation in HCI studies among DHH users.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "6",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Kafle:2019:PUI,
  author =       "Sushant Kafle and Matt Huenerfauth",
  title =        "Predicting the Understandability of Imperfect
                 {English} Captions for People Who Are Deaf or Hard of
                 Hearing",
  journal =      j-TACCESS,
  volume =       "12",
  number =       "2",
  pages =        "7:1--7:??",
  month =        jul,
  year =         "2019",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3325862",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:11 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/ft_gateway.cfm?id=3325862",
  abstract =     "Automatic Speech Recognition (ASR) technology has seen
                 major advancements in its accuracy and speed in recent
                 years, making it a possible mechanism for supporting
                 communication between people who are Deaf or
                 Hard-of-Hearing (DHH) and their hearing peers. However,
                 state-of-the-art ASR technology is still imperfect in
                 many realistic settings. Researchers who evaluate ASR
                 performance often focus on improving the Word Error
                 Rate (WER) metric, but it has been found to have little
                 correlation with human-subject performance for many
                 applications. This article describes and evaluates
                 several new captioning-focused evaluation metrics for
                 predicting the impact of ASR errors on the
                 understandability of automatically generated captions
                 for people who are DHH. Through experimental studies
                 with DHH users, we have found that our new metric
                 (based on word-importance and semantic-difference
                 scoring) is more closely correlated with DHH user's
                 judgements of caption quality-as compared to
                 pre-existing metrics for ASR evaluation.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "7",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Vacher:2019:MEC,
  author =       "Michel Vacher and Fr{\'e}d{\'e}ric Aman and Solange
                 Rossato and Fran{\c{c}}ois Portet and Benjamin
                 Lecouteux",
  title =        "Making Emergency Calls More Accessible to Older Adults
                 Through a Hands-free Speech Interface in the House",
  journal =      j-TACCESS,
  volume =       "12",
  number =       "2",
  pages =        "8:1--8:??",
  month =        jul,
  year =         "2019",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3310132",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:11 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/ft_gateway.cfm?id=3310132",
  abstract =     "Wearable personable emergency response (PER) systems
                 are the mainstream solution for allowing frail and
                 isolated individuals to call for help in an emergency.
                 However, these devices are not well adapted to all
                 users and are often not worn all the time, meaning they
                 are not available when needed. This article presents a
                 Voice User Interface system for emergency-call
                 recognition. The interface is designed to permit
                 hands-free interaction using natural language.
                 Crucially, this allows a call for help to be registered
                 without necessitating physical proximity to the system.
                 The system is based on an ASR engine and is tested on a
                 corpus collected to simulate realistic situations. The
                 corpus contains French speech from 4 older adults and
                 13 younger people wearing an old-age simulator to
                 hamper their mobility, vision, and hearing. On-line
                 evaluation of the preliminary system showed an
                 emergency-call error rate of 27\%. Subsequent off-line
                 experimentation improved the results (call error rate
                 24\%), demonstrating that emergency-call recognition in
                 the home is achievable. Another contribution of this
                 work is the corpus, which is made available for
                 research with the hope that it will facilitate related
                 research and quicker development of robust methods for
                 automatic emergency-call recognition in the home.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "8",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Sharit:2019:EEU,
  author =       "Joseph Sharit and Jerad H. Moxley and Walter R. Boot
                 and Neil Charness and Wendy A. Rogers and Sara J.
                 Czaja",
  title =        "Effects of Extended Use of an Age-friendly Computer
                 System on Assessments of Computer Proficiency,
                 Attitudes, and Usability by Older Non--Computer Users",
  journal =      j-TACCESS,
  volume =       "12",
  number =       "2",
  pages =        "9:1--9:??",
  month =        jul,
  year =         "2019",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3325290",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:11 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/ft_gateway.cfm?id=3325290",
  abstract =     "This study examined the impact of use of a computer
                 software application designed specifically for older
                 people known as PRISM-a Personal Reminder Information
                 and Social Management system-which was installed on a
                 computer that was placed in the homes of adults aged 65
                 to 98 years, who were at risk for social isolation and
                 had minimal or no computer skills and no computers in
                 their homes. Participants received face-to-face
                 training on the system in their homes over several days
                 and a variety of measures were collected at baseline
                 and at 12 months. A growth mixture model applied to
                 participants' usage of the system over the course of 12
                 months revealed two distinct subpopulations of
                 users-less-frequent users and more-frequent users-who
                 after one year of exposure to the system differed in
                 computer proficiency, attitudes toward computers, and
                 ratings of system usability. These two groups did not
                 differ on computer proficiency and computer attitude
                 measures at baseline. The more-frequent user group,
                 however, had significantly higher fluid cognitive
                 abilities. Additional analytical models were used to
                 further examine the relationships among the study
                 measures. The implications of the findings are
                 discussed in terms of the importance of usability for
                 promoting initial engagement with a system and that
                 increased engagement with the system can instill
                 beliefs in these older adults that they can
                 successfully transition to other computer-based
                 technologies and applications. The results also
                 underscore the importance of the user-centered design
                 approach and designing highly usable systems for older
                 adults with low technology proficiency.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "9",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Huenerfauth:2019:EMO,
  author =       "Matt Huenerfauth and Kathleen F. McCoy",
  title =        "Editorial: a Message from the Outgoing
                 {Editors-in-Chief}",
  journal =      j-TACCESS,
  volume =       "12",
  number =       "3",
  pages =        "10:1--10:??",
  month =        sep,
  year =         "2019",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3345019",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:11 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/ft_gateway.cfm?id=3345019",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "10",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Williams:2019:FSA,
  author =       "Kristin Williams and Taylor Clarke and Steve Gardiner
                 and John Zimmerman and Anthony Tomasic",
  title =        "Find and Seek: Assessing the Impact of Table
                 Navigation on Information Look-up with a Screen
                 Reader",
  journal =      j-TACCESS,
  volume =       "12",
  number =       "3",
  pages =        "11:1--11:??",
  month =        sep,
  year =         "2019",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3342282",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:11 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/ft_gateway.cfm?id=3342282",
  abstract =     "Web designers use visual cues such as layout and
                 typography to make pages easier to navigate and
                 understand. Yet, screen readers generally ignore these
                 features and present page information in a linear audio
                 stream. We investigate whether transcoding the visual
                 semantics of grid-based layouts to tables supports
                 better navigation. In a controlled experiment,
                 participants navigated re-written pages significantly
                 faster when doing data synthesis tasks and more
                 accurately when looking up information meeting multiple
                 criteria. Participants rated their table navigation
                 experience better in terms of effort, memorization,
                 ease of navigation, understanding of page information,
                 and confidence in submitted answers. Participants
                 attributed these gains to the table structure's support
                 for (1) predictable audio presentation, (2) adopting an
                 appropriate search strategy, and (3) making sense of
                 page content. Contrary to the established belief that
                 tables are inaccessible, our results show that tables
                 can facilitate navigation when users need to synthesize
                 across page content.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "11",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Putnam:2019:ITD,
  author =       "Cynthia Putnam and Christina Hanschke and Jennifer
                 Todd and Jonathan Gemmell and Mia Kollia",
  title =        "Interactive Technologies Designed for Children with
                 Autism: Reports of Use and Desires from Parents,
                 Teachers, and Therapists",
  journal =      j-TACCESS,
  volume =       "12",
  number =       "3",
  pages =        "12:1--12:??",
  month =        sep,
  year =         "2019",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3342285",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:11 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/ft_gateway.cfm?id=3342285",
  abstract =     "Autism spectrum disorder (ASD) affects many people;
                 the Center for Disease Control and Prevention estimates
                 that 1 in 59 children are currently identified with ASD
                 in the United States. Although it is difficult to
                 generalize about people with ASD due to their
                 heterogeneity, many share an affinity for technologies;
                 as such, numerous academic endeavors and commercial
                 products have focused on the creation of interactive
                 technologies for ASD. In this article, we present
                 findings from 19 interviews and 230 surveys with
                 parents, teachers, and therapists who had children with
                 ASD in their care and had considered or used
                 interactive technologies with those children. We aimed
                 to understand how interactive technologies were used,
                 perceived, desired, and discovered. Findings of use and
                 perception included the following: participants had
                 tried a wide range of commercially available
                 technologies but had very few reported products in
                 common, products were limited to commercial
                 mobile-based apps, and apps were typically perceived
                 positively. In regard to desires, participants hoped
                 for future technologies on diverse platforms (e.g.,
                 robots, virtual reality) with more consideration given
                 to personalization, customization, and incorporation of
                 audio and video. Findings about discovery included the
                 following: participants chose technologies in an
                 information-poor environment, and although there are
                 many academic projects aimed at participants' desires,
                 no participants reported any experience working with
                 researchers. Implications of this study include the
                 need for a recommendation and information sharing
                 system to help people choose and discover appropriate
                 and effective interactive technologies that are a good
                 fit for their child. This work also pointed to a need
                 for such a system to include findings from lab
                 (experimental and usability) studies of commercially
                 available interactive technologies to provide measures
                 of efficacy and usability. Our envisioned system could
                 also potentially help academic researchers with
                 outreach to wider audiences.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "12",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Huenerfauth:2019:ISI,
  author =       "Kathleen F. McCoy / Matt Huenerfauth",
  title =        "Introduction to the Special Issue on {ASSETS'17} (Part
                 2)",
  journal =      j-TACCESS,
  volume =       "12",
  number =       "3",
  pages =        "13:1--13:??",
  month =        sep,
  year =         "2019",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3345021",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:11 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/ft_gateway.cfm?id=3345021",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "13",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Sato:2019:NWL,
  author =       "Daisuke Sato and Uran Oh and Jo{\~a}o Guerreiro and
                 Dragan Ahmetovic and Kakuya Naito and Hironobu Takagi
                 and Kris M. Kitani and Chieko Asakawa",
  title =        "{NavCog3} in the Wild: Large-scale Blind Indoor
                 Navigation Assistant with Semantic Features",
  journal =      j-TACCESS,
  volume =       "12",
  number =       "3",
  pages =        "14:1--14:??",
  month =        sep,
  year =         "2019",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3340319",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Oct 19 17:13:11 MDT 2019",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/ft_gateway.cfm?id=3340319",
  abstract =     "NavCog3 is a smartphone turn-by-turn navigation
                 assistant system we developed specifically designed to
                 enable independent navigation for people with visual
                 impairments. Using off-the-shelf Bluetooth beacons
                 installed in the surrounding environment and a
                 commodity smartphone carried by the user, NavCog3
                 achieves unparalleled localization accuracy in
                 real-world large-scale scenarios. By leveraging its
                 accurate localization capabilities, NavCog3 guides the
                 user through the environment and signals the presence
                 of semantic features and points of interest in the
                 vicinity (e.g., doorways, shops). To assess the
                 capability of NavCog3 to promote independent mobility
                 of individuals with visual impairments, we deployed and
                 evaluated the system in two challenging real-world
                 scenarios. The first scenario demonstrated the
                 scalability of the system, which was permanently
                 installed in a five-story shopping mall spanning three
                 buildings and a public underground area. During the
                 study, 10 participants traversed three fixed routes,
                 and 43 participants traversed free-choice routes across
                 the environment. The second scenario validated the
                 system's usability in the wild in a hotel complex
                 temporarily equipped with NavCog3 during a conference
                 for individuals with visual impairments. In the hotel,
                 almost 14.2h of system usage data were collected from
                 37 unique users who performed 280 travels across the
                 environment, for a total of 30,200m traversed.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "14",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "http://portal.acm.org/browse_dl.cfm?idx=J1156",
}

@Article{Guerreiro:2020:GNE,
  author =       "Tiago Guerreiro and Stephanie Ludi",
  title =        "Greetings from the New {Editors-in-Chief}",
  journal =      j-TACCESS,
  volume =       "12",
  number =       "4",
  pages =        "1--1",
  month =        jan,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3372922",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Jan 23 07:49:32 MST 2020",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/abs/10.1145/3372922",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Zhao:2020:DEC,
  author =       "Yuhang Zhao and Sarit Szpiro and Lei Shi and Shiri
                 Azenkot",
  title =        "Designing and Evaluating a Customizable Head-mounted
                 Vision Enhancement System for People with Low Vision",
  journal =      j-TACCESS,
  volume =       "12",
  number =       "4",
  pages =        "1--46",
  month =        jan,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3361866",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Jan 23 07:49:32 MST 2020",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/abs/10.1145/3361866",
  abstract =     "Recent advances in head-mounted displays (HMDs)
                 present an opportunity to design vision enhancement
                 systems for people with low vision, whose vision cannot
                 be corrected with glasses or contact lenses. We aim to
                 understand whether and how HMDs can aid \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "15",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Snider:2020:AIN,
  author =       "Sharon Snider and Willie L. {Scott II} and Shari
                 Trewin",
  title =        "Accessibility Information Needs in the Enterprise",
  journal =      j-TACCESS,
  volume =       "12",
  number =       "4",
  pages =        "1--23",
  month =        jan,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3368620",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Jan 23 07:49:32 MST 2020",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/abs/10.1145/3368620",
  abstract =     "We describe the questions asked about accessibility,
                 both through information searches and direct queries,
                 within a large multinational corporation over a period
                 of two years, finding an emphasis on topics covering
                 enterprise requirements for testing, \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "16",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Moffatt:2020:ISI,
  author =       "Karyn Moffatt",
  title =        "Introduction to the Special Issue on {ASSETS'18}",
  journal =      j-TACCESS,
  volume =       "12",
  number =       "4",
  pages =        "1--1",
  month =        jan,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3372925",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Jan 23 07:49:32 MST 2020",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/abs/10.1145/3372925",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Shinohara:2020:DSA,
  author =       "Kristen Shinohara and Nayeri Jacobo and Wanda Pratt
                 and Jacob O. Wobbrock",
  title =        "Design for Social Accessibility Method Cards: Engaging
                 Users and Reflecting on Social Scenarios for Accessible
                 Design",
  journal =      j-TACCESS,
  volume =       "12",
  number =       "4",
  pages =        "1--33",
  month =        jan,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3369903",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Jan 23 07:49:32 MST 2020",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/abs/10.1145/3369903",
  abstract =     "This article is an extended version of our 2018 ASSETS
                 paper entitled, {``Incorporating Social Factors in
                 Accessible Design.''} In our ASSETS paper, we
                 demonstrated the viability of the Design for Social
                 Accessibility perspective through a series of
                 user-\ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "17",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Abdolrahmani:2020:BLS,
  author =       "Ali Abdolrahmani and Kevin M. Storer and Antony Rishin
                 Mukkath Roy and Ravi Kuber and Stacy M. Branham",
  title =        "Blind Leading the Sighted: Drawing Design Insights
                 from Blind Users towards More Productivity-oriented
                 Voice Interfaces",
  journal =      j-TACCESS,
  volume =       "12",
  number =       "4",
  pages =        "1--35",
  month =        jan,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3368426",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Jan 23 07:49:32 MST 2020",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/abs/10.1145/3368426",
  abstract =     "Voice-activated personal assistants (VAPAs) are
                 becoming smaller, cheaper, and more accurate, such that
                 they are now prevalent in homes (e.g., Amazon Echo,
                 Sonos One) and on mobile devices (e.g., Google
                 Assistant, Apple Siri) around the world. VAPAs
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "18",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Ahmetovic:2020:DLC,
  author =       "Dragan Ahmetovic and Sergio Mascetti and Cristian
                 Bernareggi and Jo{\~a}o Guerreiro and Uran Oh and
                 Chieko Asakawa",
  title =        "Deep Learning Compensation of Rotation Errors During
                 Navigation Assistance for People with Visual
                 Impairments or Blindness",
  journal =      j-TACCESS,
  volume =       "12",
  number =       "4",
  pages =        "1--19",
  month =        jan,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3349264",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Thu Jan 23 07:49:32 MST 2020",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/abs/10.1145/3349264",
  abstract =     "Navigation assistive technologies are designed to
                 support people with visual impairments during mobility.
                 In particular, turn-by-turn navigation is commonly used
                 to provide walk and turn instructions, without
                 requiring any prior knowledge about the \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "19",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Gerling:2020:CRT,
  author =       "Kathrin Gerling and Mo Ray and Vero {Vanden Abeele}
                 and Adam B. Evans",
  title =        "Critical Reflections on Technology to Support Physical
                 Activity among Older Adults: an Exploration of Leading
                 {HCI} Venues",
  journal =      j-TACCESS,
  volume =       "13",
  number =       "1",
  pages =        "1:1--1:23",
  month =        apr,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3374660",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Apr 24 19:00:00 MDT 2020",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/abs/10.1145/3374660",
  abstract =     "Contemporary policy on ageing overwhelmingly focuses
                 on active ageing and achieving a sustainable increase
                 in disability-free years, leading to an agenda that
                 promotes interventions that often focus on deficits of
                 older persons with little consideration \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "1",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Hong:2020:RSI,
  author =       "Jonggi Hong and Christine Vaing and Hernisa Kacorri
                 and Leah Findlater",
  title =        "Reviewing Speech Input with Audio: Differences between
                 Blind and Sighted Users",
  journal =      j-TACCESS,
  volume =       "13",
  number =       "1",
  pages =        "2:1--2:28",
  month =        apr,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3382039",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Apr 24 19:00:00 MDT 2020",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/abs/10.1145/3382039",
  abstract =     "Speech input is a primary method of interaction for
                 blind mobile device users, yet the process of dictating
                 and reviewing recognized text through audio only (i.e.,
                 without access to visual feedback) has received little
                 attention. A recent study found \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "2",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Brinkley:2020:ENP,
  author =       "Julian Brinkley and Earl W. Huff and Briana Posadas
                 and Julia Woodward and Shaundra B. Daily and Juan E.
                 Gilbert",
  title =        "Exploring the Needs, Preferences, and Concerns of
                 Persons with Visual Impairments Regarding Autonomous
                 Vehicles",
  journal =      j-TACCESS,
  volume =       "13",
  number =       "1",
  pages =        "3:1--3:34",
  month =        apr,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3372280",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Apr 24 19:00:00 MDT 2020",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/abs/10.1145/3372280",
  abstract =     "Fully autonomous or ``self-driving'' vehicles are an
                 emerging technology that may hold tremendous mobility
                 potential for blind or visually impaired persons who
                 are currently unable to drive a conventional motor
                 vehicle. Despite the considerable potential \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "3",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Ross:2020:EIL,
  author =       "Anne Spencer Ross and Xiaoyi Zhang and James Fogarty
                 and Jacob O. Wobbrock",
  title =        "An Epidemiology-inspired Large-scale Analysis of
                 {Android} App Accessibility",
  journal =      j-TACCESS,
  volume =       "13",
  number =       "1",
  pages =        "4:1--4:36",
  month =        apr,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3348797",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Apr 24 19:00:00 MDT 2020",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/abs/10.1145/3348797",
  abstract =     "Accessibility barriers in mobile applications (apps)
                 can make it challenging for people who have impairments
                 or use assistive technology to use those apps. Ross et
                 al.'s epidemiology-inspired framework emphasizes that a
                 wide variety of factors may \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "4",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Guerreiro:2020:ISI,
  author =       "Jo{\~a}o Guerreiro and Anke M. Brock and Hernisa
                 Kacorri",
  title =        "Introduction to the Special Issue on Technology to
                 Support Independent Orientation and Mobility of People
                 with Visual Impairments",
  journal =      j-TACCESS,
  volume =       "13",
  number =       "2",
  pages =        "5e:1--5e:2",
  month =        jun,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3398652",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Jul 8 18:28:17 MDT 2020",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/abs/10.1145/3398652",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "5e",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{vanErp:2020:TWM,
  author =       "Jan B. F. van Erp and Katja I. Paul and Tina Mioch",
  title =        "Tactile Working Memory Capacity of Users Who Are Blind
                 in an Electronic Travel Aid Application with a
                 Vibration Belt",
  journal =      j-TACCESS,
  volume =       "13",
  number =       "2",
  pages =        "5:1--5:14",
  month =        jun,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3372273",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Jul 8 18:28:17 MDT 2020",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/abs/10.1145/3372273",
  abstract =     "Electronic travel aids (ETAs) can increase the safety
                 and comfort of pedestrians who have a visual impairment
                 by displaying obstacles through a vibrotactile
                 navigation belt. Building a complete picture of
                 relevant obstacles and finding a safe route \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "5",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Hersh:2020:MMU,
  author =       "Marion Hersh",
  title =        "Mental Maps and the Use of Sensory Information by
                 Blind and Partially Sighted People",
  journal =      j-TACCESS,
  volume =       "13",
  number =       "2",
  pages =        "6:1--6:32",
  month =        jun,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3375279",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Jul 8 18:28:17 MDT 2020",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/abs/10.1145/3375279",
  abstract =     "This article aims to fill an important gap in the
                 literature by reporting on blind and partially sighted
                 people's use of spatial representations (mental maps)
                 from their perspective and when travelling on real
                 routes. The results presented here were \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "6",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Thevin:2020:XRV,
  author =       "Lauren Thevin and Carine Briant and Anke M. Brock",
  title =        "{X-Road}: Virtual Reality Glasses for Orientation and
                 Mobility Training of People with Visual Impairments",
  journal =      j-TACCESS,
  volume =       "13",
  number =       "2",
  pages =        "7:1--7:47",
  month =        jun,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3377879",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Jul 8 18:28:17 MDT 2020",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/abs/10.1145/3377879",
  abstract =     "Orientation and Mobility (O8M) classes teach people
                 with visual impairments how to navigate the world; for
                 instance, how to cross a road. Yet, this training can
                 be difficult and dangerous due to conditions such as
                 traffic and weather. Virtual Reality  \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "7",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{May:2020:SSD,
  author =       "Keenan R. May and Brianna J. Tomlinson and Xiaomeng Ma
                 and Phillip Roberts and Bruce N. Walker",
  title =        "Spotlights and Soundscapes: On the Design of Mixed
                 Reality Auditory Environments for Persons with Visual
                 Impairment",
  journal =      j-TACCESS,
  volume =       "13",
  number =       "2",
  pages =        "8:1--8:47",
  month =        jun,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3378576",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Jul 8 18:28:17 MDT 2020",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/abs/10.1145/3378576",
  abstract =     "For persons with visual impairment, forming cognitive
                 maps of unfamiliar interior spaces can be challenging.
                 Various technical developments have converged to make
                 it feasible, without specialized equipment, to
                 represent a variety of useful landmark \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "8",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Leporini:2020:DGI,
  author =       "Barbara Leporini and Valentina Rossetti and Francesco
                 Furfari and Susanna Pelagatti and Andrea Quarta",
  title =        "Design Guidelines for an Interactive {$3$D} Model as a
                 Supporting Tool for Exploring a Cultural Site by
                 Visually Impaired and Sighted People",
  journal =      j-TACCESS,
  volume =       "13",
  number =       "3",
  pages =        "9:1--9:39",
  month =        aug,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3399679",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Aug 28 12:06:18 MDT 2020",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3399679",
  abstract =     "Being able to explore and familiarise themselves with
                 the structure and details of a cultural site before
                 actually visiting it is fundamental for orienting
                 visually impaired people during the visit; otherwise,
                 it is particularly difficult to gain a \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "9",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Gorlewicz:2020:DGR,
  author =       "Jenna L. Gorlewicz and Jennifer L. Tennison and P.
                 Merlin Uesbeck and Margaret E. Richard and Hari P.
                 Palani and Andreas Stefik and Derrick W. Smith and
                 Nicholas A. Giudice",
  title =        "Design Guidelines and Recommendations for Multimodal,
                 Touchscreen-based Graphics",
  journal =      j-TACCESS,
  volume =       "13",
  number =       "3",
  pages =        "10:1--10:30",
  month =        aug,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3403933",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Aug 28 12:06:18 MDT 2020",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3403933",
  abstract =     "With content rapidly moving to the electronic space,
                 access to graphics for individuals with visual
                 impairments is a growing concern. Recent research has
                 demonstrated the potential for representing basic
                 graphical content on touchscreens using \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "10",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Giudice:2020:UIN,
  author =       "Nicholas A. Giudice and Benjamin A. Guenther and Toni
                 M. Kaplan and Shane M. Anderson and Robert J. Knuesel
                 and Joseph F. Cioffi",
  title =        "Use of an Indoor Navigation System by Sighted and
                 Blind Travelers: Performance Similarities across Visual
                 Status and Age",
  journal =      j-TACCESS,
  volume =       "13",
  number =       "3",
  pages =        "11:1--11:27",
  month =        aug,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3407191",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Aug 28 12:06:18 MDT 2020",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3407191",
  abstract =     "This article first reviews the pros and cons of
                 current accessible indoor navigation systems and then
                 describes a study using commercial smart devices to
                 navigate routes through a complex building. Our
                 interest was in comparing performance when using
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "11",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Samanta:2020:VTE,
  author =       "Debasis Samanta and Tuhin Chakraborty",
  title =        "{VectorEntry}: Text Entry Mechanism Using Handheld
                 Touch-Enabled Mobile Devices for People with Visual
                 Impairments",
  journal =      j-TACCESS,
  volume =       "13",
  number =       "3",
  pages =        "12:1--12:29",
  month =        aug,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3406537",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Aug 28 12:06:18 MDT 2020",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3406537",
  abstract =     "Mobile phones are now touch-enabled, which allows the
                 use of on-screen keyboards for text entry. Text entry
                 tasks are among the most frequently occurring tasks
                 performed by mobile phone users. However, people with
                 visual impairments find it difficult to \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "12",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Sharma:2020:STA,
  author =       "Sumita Sharma and Tero Avellan and Juhani Linna and
                 Krishnaveni Achary and Markku Turunen and Jaakko
                 Hakulinen and Blessin Varkey",
  title =        "Socio-Technical Aspirations for Children with Special
                 Needs: a Study in Two Locations --- {India} and
                 {Finland}",
  journal =      j-TACCESS,
  volume =       "13",
  number =       "3",
  pages =        "13:1--13:27",
  month =        aug,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3396076",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Aug 28 12:06:18 MDT 2020",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3396076",
  abstract =     "Society's expectations and assistance for children
                 with special needs is rooted in its cultural, societal,
                 and political backdrop. Previous work on the role of
                 culture on assistive or adaptive technology design for
                 children with special needs identified \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "13",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Ascari:2020:CVB,
  author =       "R{\'u}bia E. O. Schultz Ascari and Roberto Pereira and
                 Luciano Silva",
  title =        "Computer Vision-based Methodology to Improve
                 Interaction for People with Motor and Speech
                 Impairment",
  journal =      j-TACCESS,
  volume =       "13",
  number =       "4",
  pages =        "14:1--14:33",
  month =        oct,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3408300",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sun Mar 28 08:13:23 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3408300",
  abstract =     "Augmentative and Alternative Communication (AAC) aims
                 to complement or replace spoken language to compensate
                 for expression difficulties faced by people with speech
                 impairments. Computing systems have been developed to
                 support AAC; however, partially \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "14",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Madugalla:2020:CAO,
  author =       "Anuradha Madugalla and Kim Marriott and Simone Marinai
                 and Samuele Capobianco and Cagatay Goncu",
  title =        "Creating Accessible Online Floor Plans for Visually
                 Impaired Readers",
  journal =      j-TACCESS,
  volume =       "13",
  number =       "4",
  pages =        "15:1--15:37",
  month =        oct,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3410446",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sun Mar 28 08:13:23 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3410446",
  abstract =     "We present a generic model for providing blind and
                 severely vision-impaired readers with access to online
                 information graphics. The model supports fully and
                 semi-automatic transcription and allows the reader a
                 choice of presentation mediums. We evaluate \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "15",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Kim:2020:CTM,
  author =       "Sung-Hee Kim and Kailun Zhang and Joanna McGrenere and
                 Kellogg S. Booth and Claudia Jacova",
  title =        "A Comparison of Touchscreen and Mouse for Real-World
                 and Abstract Tasks with Older Adults",
  journal =      j-TACCESS,
  volume =       "13",
  number =       "4",
  pages =        "16:1--16:26",
  month =        oct,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3418057",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sun Mar 28 08:13:23 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3418057",
  abstract =     "Computer technology is increasingly being used to
                 facilitate the timely identification of cognitive
                 impairment in older adults. Our Cognitive Testing on
                 Computer (C-TOC) project aims to develop a
                 self-administered online test for older adults to take
                 at \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "16",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Lock:2020:EAS,
  author =       "Jacobus C. Lock and Iain D. Gilchrist and Iain D.
                 Gilchrist and Grzegorz Cielniak and Nicola Bellotto",
  title =        "Experimental Analysis of a Spatialised Audio Interface
                 for People with Visual Impairments",
  journal =      j-TACCESS,
  volume =       "13",
  number =       "4",
  pages =        "17:1--17:21",
  month =        oct,
  year =         "2020",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3412325",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sun Mar 28 08:13:23 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3412325",
  abstract =     "Sound perception is a fundamental skill for many
                 people with severe sight impairments. The research
                 presented in this article is part of an ongoing project
                 with the aim to create a mobile guidance aid to help
                 people with vision impairments find objects \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "17",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Guerreiro:2021:EEC,
  author =       "Tiago Guerreiro and Stephanie Ludi",
  title =        "Editorial from the {Editors-in-Chief}",
  journal =      j-TACCESS,
  volume =       "14",
  number =       "1",
  pages =        "1:1--1:2",
  month =        apr,
  year =         "2021",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3456772",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Apr 24 07:48:21 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3456772",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "1",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Mcgowan:2021:EUR,
  author =       "John J. Mcgowan and Iain Mcgregor and Gregory
                 Leplatre",
  title =        "Evaluation of the Use of Real-time {$3$D} Graphics to
                 Augment Therapeutic Music Sessions for Young People on
                 the Autism Spectrum",
  journal =      j-TACCESS,
  volume =       "14",
  number =       "1",
  pages =        "2:1--2:41",
  month =        apr,
  year =         "2021",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3445032",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Apr 24 07:48:21 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3445032",
  abstract =     "The present research evaluates the effectiveness of
                 CymaSense, a real-time 3D visualisation application
                 developed by the authors, as a means of improving the
                 communicative behaviours of autistic participants
                 through the addition of a visual modality \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "2",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Hair:2021:LET,
  author =       "Adam Hair and Kirrie J. Ballard and Constantina
                 Markoulli and Penelope Monroe and Jacqueline Mckechnie
                 and Beena Ahmed and Ricardo Gutierrez-Osuna",
  title =        "A Longitudinal Evaluation of Tablet-Based Child Speech
                 Therapy with Apraxia World",
  journal =      j-TACCESS,
  volume =       "14",
  number =       "1",
  pages =        "3:1--3:26",
  month =        apr,
  year =         "2021",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3433607",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Apr 24 07:48:21 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3433607",
  abstract =     "Digital games can make speech therapy exercises more
                 enjoyable for children and increase their motivation
                 during therapy. However, many such games developed to
                 date have not been designed for long-term use. To
                 address this issue, we developed Apraxia \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "3",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Andrade:2021:EMP,
  author =       "Ronny Andrade and Jenny Waycott and Steven Baker and
                 Frank Vetere",
  title =        "Echolocation as a Means for People with Visual
                 Impairment {(PVI)} to Acquire Spatial Knowledge of
                 Virtual Space",
  journal =      j-TACCESS,
  volume =       "14",
  number =       "1",
  pages =        "4:1--4:25",
  month =        apr,
  year =         "2021",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3448273",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Apr 24 07:48:21 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3448273",
  abstract =     "In virtual environments, spatial information is
                 communicated visually. This prevents people with visual
                 impairment (PVI) from accessing such spaces. In this
                 article, we investigate whether echolocation could be
                 used as a tool to convey spatial \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "4",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Borgos-Rodriguez:2021:MDI,
  author =       "Katya Borgos-Rodriguez and Maitraye Das and Anne Marie
                 Piper",
  title =        "{Melodie}: a Design Inquiry into Accessible Crafting
                 through Audio-enhanced Weaving",
  journal =      j-TACCESS,
  volume =       "14",
  number =       "1",
  pages =        "5:1--5:30",
  month =        apr,
  year =         "2021",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3444699",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Apr 24 07:48:21 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3444699",
  abstract =     "Despite the promise of the maker movement as
                 empowering individuals and democratizing design, people
                 with disabilities still face many barriers to
                 participation. Recent work has highlighted the
                 inaccessible nature of making and introduced more
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "5",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Bayor:2021:TCB,
  author =       "Andrew A. Bayor and Margot Brereton and Laurianne
                 Sitbon and Bernd Ploderer and Filip Bircanin and Benoit
                 Favre and Stewart Koplick",
  title =        "Toward a Competency-based Approach to Co-designing
                 Technologies with People with Intellectual Disability",
  journal =      j-TACCESS,
  volume =       "14",
  number =       "2",
  pages =        "6:1--6:33",
  month =        jul,
  year =         "2021",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3450355",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Aug 10 13:55:32 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3450355",
  abstract =     "Ability-based design is a useful framework that
                 centralizes the abilities (all that users can do) of
                 people with disabilities in approaching the design of
                 assistive technologies. However, although this
                 framework aspires to support designing with people
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "6",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Bragg:2021:FLS,
  author =       "Danielle Bragg and Naomi Caselli and Julie A.
                 Hochgesang and Matt Huenerfauth and Leah Katz-Hernandez
                 and Oscar Koller and Raja Kushalnagar and Christian
                 Vogler and Richard E. Ladner",
  title =        "The {FATE} Landscape of Sign Language {AI} Datasets:
                 an Interdisciplinary Perspective",
  journal =      j-TACCESS,
  volume =       "14",
  number =       "2",
  pages =        "7:1--7:45",
  month =        jul,
  year =         "2021",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3436996",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Aug 10 13:55:32 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3436996",
  abstract =     "Sign language datasets are essential to developing
                 many sign language technologies. In particular,
                 datasets are required for training artificial
                 intelligence (AI) and machine learning (ML) systems.
                 Though the idea of using AI/ML for sign languages is
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "7",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Kearney-Volpe:2021:AWD,
  author =       "Claire Kearney-Volpe and Amy Hurst",
  title =        "Accessible {Web} Development: Opportunities to Improve
                 the Education and Practice of web Development with a
                 Screen Reader",
  journal =      j-TACCESS,
  volume =       "14",
  number =       "2",
  pages =        "8:1--8:32",
  month =        jul,
  year =         "2021",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3458024",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Aug 10 13:55:32 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3458024",
  abstract =     "There are a growing number of jobs related to web
                 development, yet there is little formal literature
                 about the accessibility of web development with a
                 screen reader. This article describes research to
                 explore (1) web development accessibility issues and
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "8",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Zhao:2021:CHC,
  author =       "Huan Zhao and Ashwaq Zaini Amat and Miroslava Migovich
                 and Amy Swanson and Amy S. Weitlauf and Zachary Warren
                 and Nilanjan Sarkar",
  title =        "{C-Hg}: a Collaborative Haptic-Gripper Fine Motor
                 Skill Training System for Children with Autism Spectrum
                 Disorder",
  journal =      j-TACCESS,
  volume =       "14",
  number =       "2",
  pages =        "9:1--9:28",
  month =        jul,
  year =         "2021",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3459608",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Aug 10 13:55:32 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3459608",
  abstract =     "Computer-assisted systems can provide efficient and
                 engaging ASD intervention environments for children
                 with Autism Spectrum Disorder (ASD). However, most
                 existing computer-assisted systems target only one
                 skill deficit (e.g., social conversation skills).
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "9",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Arachchi:2021:EIS,
  author =       "Theja K. Arachchi and Laurianne Sitbon and Jinglan
                 Zhang and Ruwan Gamage and Priyantha Hewagamage",
  title =        "Enhancing {Internet} Search Abilities for People with
                 Intellectual Disabilities in {Sri Lanka}",
  journal =      j-TACCESS,
  volume =       "14",
  number =       "2",
  pages =        "10:1--10:36",
  month =        jul,
  year =         "2021",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3460202",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Aug 10 13:55:32 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3460202",
  abstract =     "This article presents how young adults with
                 intellectual disability (ID) from Sri Lanka, who had
                 not previously used the Internet, interacted with
                 Google search while enhancing their web search
                 abilities throughout three web search workshops.
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "10",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Wood:2021:IBP,
  author =       "Rachel Wood and Emma Dixon and Salma Elsayed-Ali and
                 Ekta Shokeen and Amanda Lazar and Jonathan Lazar",
  title =        "Investigating Best Practices for Remote Summative
                 Usability Testing with People with Mild to Moderate
                 Dementia",
  journal =      j-TACCESS,
  volume =       "14",
  number =       "3",
  pages =        "11:1--11:26",
  month =        sep,
  year =         "2021",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3460942",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Aug 31 06:12:07 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3460942",
  abstract =     "People with dementia may miss out on the benefits of
                 using technology, because they often find it difficult
                 to use. Usability testing is one method to identify
                 barriers and areas for improvement in technology.
                 Unfortunately, usability testing is often \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "11",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Bragg:2021:ELI,
  author =       "Danielle Bragg and Katharina Reinecke and Richard E.
                 Ladner",
  title =        "Expanding a Large Inclusive Study of Human Listening
                 Rates",
  journal =      j-TACCESS,
  volume =       "14",
  number =       "3",
  pages =        "12:1--12:26",
  month =        sep,
  year =         "2021",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3461700",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Aug 31 06:12:07 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3461700",
  abstract =     "As conversational agents and digital assistants become
                 increasingly pervasive, understanding their synthetic
                 speech becomes increasingly important. Simultaneously,
                 speech synthesis is becoming more sophisticated and
                 manipulable, providing the \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "12",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Khan:2021:TDM,
  author =       "Al Majed Khan and Dr Mark D. Dunlop and Dr Marilyn
                 Lennon and Dr Mateusz Dubiel",
  title =        "Towards Designing Mobile Apps for Independent Travel:
                 Exploring Current Barriers and Opportunities for
                 Supporting Young Adults with {Down}'s Syndrome",
  journal =      j-TACCESS,
  volume =       "14",
  number =       "3",
  pages =        "13:1--13:40",
  month =        sep,
  year =         "2021",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3460943",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Aug 31 06:12:07 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3460943",
  abstract =     "Performing daily independent activities can be a
                 challenge for people with Down's Syndrome (DS). This
                 article investigates how to better support these
                 activities with smart devices based on three cycles of
                 a collaborative participatory action research
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "13",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{VandenAbeele:2021:IVR,
  author =       "Vero {Vanden Abeele} and Brenda Schraepen and Hanne
                 Huygelier and Celine Gillebert and Kathrin Gerling and
                 Raymond {Van Ee}",
  title =        "Immersive Virtual Reality for Older Adults:
                 Empirically Grounded Design Guidelines",
  journal =      j-TACCESS,
  volume =       "14",
  number =       "3",
  pages =        "14:1--14:30",
  month =        sep,
  year =         "2021",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3470743",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Aug 31 06:12:07 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3470743",
  abstract =     "Despite the proliferation of research on immersive
                 virtual reality (IVR) technologies for older adults,
                 comprehensive guidelines on designing immersive and
                 engaging VR for older adults remain sparse. Therefore,
                 we first compounded 67 guidelines based on \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "14",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Fink:2021:FAV,
  author =       "Paul D. S. Fink and Jessica A. Holz and Nicholas A.
                 Giudice",
  title =        "Fully Autonomous Vehicles for People with Visual
                 Impairment: Policy, Accessibility, and Future
                 Directions",
  journal =      j-TACCESS,
  volume =       "14",
  number =       "3",
  pages =        "15:1--15:17",
  month =        sep,
  year =         "2021",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3471934",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Aug 31 06:12:07 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3471934",
  abstract =     "A significant number of individuals in the United
                 States report a disability that limits their ability to
                 travel, including many people who are blind or visually
                 impaired (BVI). The implications of restricted
                 transportation result in negative impacts \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "15",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Vargemidis:2021:IGS,
  author =       "Dimitri Vargemidis and Kathrin Gerling and Vero
                 {Vanden Abeele} and Luc Geurts and Katta Spiel",
  title =        "Irrelevant Gadgets or a Source of Worry: Exploring
                 Wearable Activity Trackers with Older Adults",
  journal =      j-TACCESS,
  volume =       "14",
  number =       "3",
  pages =        "16:1--16:28",
  month =        sep,
  year =         "2021",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3473463",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Aug 31 06:12:07 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3473463",
  abstract =     "Wearable activity trackers are routinely applied in
                 physical activity (PA) interventions in late life, but
                 there is little research that focuses on older adults'
                 perspectives on the technology. We conducted a
                 qualitative study with 24 older persons to \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "16",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Binns:2021:HCE,
  author =       "Reuben Binns and Reuben Kirkham",
  title =        "How Could Equality and Data Protection Law Shape {AI}
                 Fairness for People with Disabilities?",
  journal =      j-TACCESS,
  volume =       "14",
  number =       "3",
  pages =        "17:1--17:32",
  month =        sep,
  year =         "2021",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3473673",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Aug 31 06:12:07 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3473673",
  abstract =     "This article examines the concept of `AI fairness' for
                 people with disabilities from the perspective of data
                 protection and equality law. This examination
                 demonstrates that there is a need for a distinctive
                 approach to AI fairness that is fundamentally
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "17",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Shinohara:2021:ISI,
  author =       "Kristen Shinohara and Foad Hamidi",
  title =        "Introduction to the Special Issue on {ASSETS'19}",
  journal =      j-TACCESS,
  volume =       "14",
  number =       "4",
  pages =        "18e:1--18e:2",
  month =        dec,
  year =         "2021",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3486212",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Nov 3 09:35:32 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3486212",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "18e",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Hassan:2021:ESR,
  author =       "Saad Hassan and Oliver Alonzo and Abraham Glasser and
                 Matt Huenerfauth",
  title =        "Effect of Sign-recognition Performance on the
                 Usability of Sign-language Dictionary Search",
  journal =      j-TACCESS,
  volume =       "14",
  number =       "4",
  pages =        "18:1--18:33",
  month =        dec,
  year =         "2021",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3470650",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Nov 3 09:35:32 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3470650",
  abstract =     "Advances in sign-language recognition technology have
                 enabled researchers to investigate various methods that
                 can assist users in searching for an unfamiliar sign in
                 ASL using sign-recognition technology. Users can
                 generate a query by submitting a video \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "18",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Presti:2021:IDS,
  author =       "Giorgio Presti and Dragan Ahmetovic and Mattia Ducci
                 and Cristian Bernareggi and Luca A. Ludovico and
                 Adriano Barat{\`e} and Federico Avanzini and Sergio
                 Mascetti",
  title =        "Iterative Design of Sonification Techniques to Support
                 People with Visual Impairments in Obstacle Avoidance",
  journal =      j-TACCESS,
  volume =       "14",
  number =       "4",
  pages =        "19:1--19:27",
  month =        dec,
  year =         "2021",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3470649",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Nov 3 09:35:32 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3470649",
  abstract =     "Obstacle avoidance is a major challenge during
                 independent mobility for blind or visually impaired
                 (BVI) people. Typically, BVI people can only perceive
                 obstacles at a short distance (about 1 m, in case they
                 are using the white cane), and some obstacles
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "19",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Kafle:2021:DHH,
  author =       "Sushant Kafle and Becca Dingman and Matt Huenerfauth",
  title =        "Deaf and Hard-of-hearing Users Evaluating Designs for
                 Highlighting Key Words in Educational Lecture Videos",
  journal =      j-TACCESS,
  volume =       "14",
  number =       "4",
  pages =        "20:1--20:24",
  month =        dec,
  year =         "2021",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3470651",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Nov 3 09:35:32 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3470651",
  abstract =     "There are style guidelines for authors who highlight
                 important words in static text, e.g., bolded words in
                 student textbooks, yet little research has investigated
                 highlighting in dynamic texts, e.g., captions during
                 educational videos for Deaf or Hard of \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "20",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Kozma-Spytek:2021:FAA,
  author =       "Linda Kozma-Spytek and Christian Vogler",
  title =        "Factors Affecting the Accessibility of Voice Telephony
                 for People with Hearing Loss: Audio Encoding, Network
                 Impairments, Video and Environmental Noise",
  journal =      j-TACCESS,
  volume =       "14",
  number =       "4",
  pages =        "21:1--21:35",
  month =        dec,
  year =         "2021",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3479160",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Nov 3 09:35:32 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3479160",
  abstract =     "This paper describes four studies with a total of 114
                 individuals with hearing loss and 12 hearing controls
                 that investigate the impact of audio quality parameters
                 on voice telecommunications. These studies were first
                 informed by a survey of 439 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "21",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Ananthanarayana:2021:DLM,
  author =       "Tejaswini Ananthanarayana and Priyanshu Srivastava and
                 Akash Chintha and Akhil Santha and Brian Landy and
                 Joseph Panaro and Andre Webster and Nikunj Kotecha and
                 Shagan Sah and Thomastine Sarchet and Raymond Ptucha
                 and Ifeoma Nwogu",
  title =        "Deep Learning Methods for Sign Language Translation",
  journal =      j-TACCESS,
  volume =       "14",
  number =       "4",
  pages =        "22:1--22:30",
  month =        dec,
  year =         "2021",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3477498",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Nov 3 09:35:32 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3477498",
  abstract =     "Many sign languages are bona fide natural languages
                 with grammatical rules and lexicons hence can benefit
                 from machine translation methods. Similarly, since sign
                 language is a visual-spatial language, it can also
                 benefit from computer vision methods for \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "22",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Zheng:2021:CNI,
  author =       "Z. Kevin Zheng and Nandan Sarkar and Amy Swanson and
                 Amy Weitlauf and Zachary Warren and Nilanjan Sarkar",
  title =        "{CheerBrush}: a Novel Interactive Augmented Reality
                 Coaching System for Toothbrushing Skills in Children
                 with Autism Spectrum Disorder",
  journal =      j-TACCESS,
  volume =       "14",
  number =       "4",
  pages =        "23:1--23:20",
  month =        dec,
  year =         "2021",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3481642",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Nov 3 09:35:32 MDT 2021",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3481642",
  abstract =     "Autism Spectrum Disorder (ASD) is a common
                 neurodevelopmental disorder that impacts one in every
                 54 children in the United States. Some children with
                 ASD have learning and fine motor skill challenges that
                 contribute to difficulties completing daily living
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "23",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Jamieson:2022:SPA,
  author =       "Matthew Jamieson and Marilyn Lennon and Breda Cullen
                 and Stephen Brewster and Jonathan Evans",
  title =        "Supporting People with Acquired Brain Injury to Use a
                 Reminding App; Narrow-deep vs. Broad-shallow User
                 Interfaces",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "1",
  pages =        "1:1--1:23",
  month =        mar,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3501275",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Mar 25 07:59:59 MDT 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3501275",
  abstract =     "People with memory impairments following an acquired
                 brain injury stand to benefit from smartphone apps as
                 memory aids. Due, in part, to usability issues they use
                 smartphone-based reminding less than the general
                 population. Evidence suggests this group \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "1",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Adiani:2022:CIR,
  author =       "Deeksha Adiani and Aaron Itzkovitz and Dayi Bian and
                 Harrison Katz and Michael Breen and Spencer Hunt and
                 Amy Swanson and Timothy J. Vogus and Joshua Wade and
                 Nilanjan Sarkar",
  title =        "Career Interview Readiness in Virtual Reality
                 {(CIRVR)}: a Platform for Simulated Interview Training
                 for Autistic Individuals and Their Employers",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "1",
  pages =        "2:1--2:28",
  month =        mar,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3505560",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Mar 25 07:59:59 MDT 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3505560",
  abstract =     "Employment outcomes for autistic individuals are often
                 poorer relative to their neurotypical (NT) peers,
                 resulting in a greater need for other forms of
                 financial and social support. While a great deal of
                 work has focused on developing interventions for
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "2",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Pradhan:2022:DET,
  author =       "Debashish Pradhan and Tripti Rajput and Aravind Jembu
                 Rajkumar and Jonathan Lazar and Rajiv Jain and Vlad I.
                 Morariu and Varun Manjunatha",
  title =        "Development and Evaluation of a Tool for Assisting
                 Content Creators in Making {PDF} Files More
                 Accessible",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "1",
  pages =        "3:1--3:52",
  month =        mar,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3507661",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Mar 25 07:59:59 MDT 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3507661",
  abstract =     "Most PDF documents are inaccessible for people with
                 disabilities, creating barriers in education, science,
                 commerce, e-government, and recreation. Documents in
                 PDF format are considered harder to make accessible
                 than documents in other formats, primarily due to the
                 insufficient tools available to assist content
                 creators. In this article, we present the research and
                 development of Ally, a new tool to assist content
                 creators in remediating their PDF files to improve
                 accessibility. Ally utilizes best practices from other
                 areas of HCI research to create a more efficient and
                 effective interaction for remediating regions, headers,
                 reading order, and tables in a PDF document for
                 improved accessibility. Twenty participants attempted
                 to complete the same PDF accessibility remediation
                 tasks using both Ally and a standard industry tool,
                 Adobe Acrobat Pro. Ally was almost twice as fast and
                 three times as accurate compared to Acrobat Pro, with
                 participants reporting a strong preference for and a
                 much higher level of satisfaction with Ally. The
                 approaches taken in Ally improve the ability to create
                 accessible PDFs efficiently and accurately for the four
                 important aspects studied, but future work will need to
                 incorporate additional functionality, related to
                 remediating alt text, forms, and other aspects of PDF
                 accessibility.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "3",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Cooper:2022:DYH,
  author =       "Triston Cooper and Heather Lai and Jenna Gorlewicz",
  title =        "Do You Hear What {I} Hear: The Balancing Act of
                 Designing an Electronic Hockey Puck for Playing Hockey
                 Non-Visually",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "1",
  pages =        "4:1--4:29",
  month =        mar,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3507660",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Mar 25 07:59:59 MDT 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3507660",
  abstract =     "Blind hockey is a sport that is gaining popularity in
                 the United States after having an international
                 presence for years. In blind hockey, a modified puck is
                 used that emits sounds via ball bearings that rattle
                 inside the puck when it is moving. The \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "4",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Zhao:2022:IHI,
  author =       "Huan Zhao and Ashwaq Zaini Amat and Miroslava Migovich
                 and Amy Swanson and Amy S. Weitlauf and Zachary Warren
                 and Nilanjan Sarkar",
  title =        "{INC-Hg}: an Intelligent Collaborative Haptic-Gripper
                 Virtual Reality System",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "1",
  pages =        "5:1--5:23",
  month =        mar,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3487606",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Mar 25 07:59:59 MDT 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3487606",
  abstract =     "Collaborative Virtual Environments (CVE) have shown
                 potential to be an effective social skill training
                 platform for children with Autism Spectrum Disorders
                 (ASD) to learn and practice collaborative and
                 communication skills through peer interactions.
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "5",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Muehlbradt:2022:WAT,
  author =       "Annika Muehlbradt and Shaun K. Kane",
  title =        "What's in an {ALT} Tag? {Exploring} Caption Content
                 Priorities through Collaborative Captioning",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "1",
  pages =        "6:1--6:32",
  month =        mar,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3507659",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Mar 25 07:59:59 MDT 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3507659",
  abstract =     "Evaluating the quality of accessible image captions
                 with human raters is difficult, as it may be difficult
                 for a visually impaired user to know how comprehensive
                 a caption is, whereas a sighted assistant may not know
                 what information a user will need from a caption. To
                 explore how image captioners and caption consumers
                 assess caption content, we conducted a series of
                 collaborative captioning sessions in which six pairs,
                 consisting of a blind person and their sighted partner,
                 worked together to discuss, create, and evaluate image
                 captions. By making captioning a collaborative task, we
                 were able to observe captioning strategies, to elicit
                 questions and answers about image captions, and to
                 explore blind users' caption preferences. Our findings
                 provide insight about the process of creating good
                 captions and serve as a case study for cross-ability
                 collaboration between blind and sighted people",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "6",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Mountapmbeme:2022:AAB,
  author =       "Aboubakar Mountapmbeme and Obianuju Okafor and
                 Stephanie Ludi",
  title =        "Addressing Accessibility Barriers in Programming for
                 People with Visual Impairments: a Literature Review",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "1",
  pages =        "7:1--7:26",
  month =        mar,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3507469",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Mar 25 07:59:59 MDT 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3507469",
  abstract =     "Accessibility issues with programming languages and
                 programming environments pose a major barrier for
                 students with visual impairments to participate in
                 computing related courses as well as threatens the
                 productivity of professional programmers with visual
                 impairments. To remedy this, the past two decades have
                 witnessed an increase in accessibility research
                 designed to investigate and address the challenges
                 faced by people with visual impairments while
                 programming or learning how to program. We conducted a
                 literature review of accessibility research in this
                 domain. The aim was to identify, aggregate, and
                 highlight known accessibility barriers to programming
                 faced by professional programmers and students with
                 visual impairments learning how to code as well as to
                 identify all solutions that have been proposed to
                 address these barriers. We selected and analyzed 70
                 papers reporting on accessibility of programming and
                 programming environments for people with visual
                 impairments. Numerous barriers to programming by people
                 with visual impairments have been identified in the
                 literature. Some of these barriers are understudied and
                 present opportunities for future work. A lot of studies
                 have also proposed tools and new accessible programming
                 languages to address the accessibility issues of
                 current programming languages and programming
                 environments.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "7",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Kouroupetroglou:2022:ISI,
  author =       "Georgios Kouroupetroglou and Hugo Nicolau",
  title =        "Introduction to the Special Issue on {ASSETS'20}",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "2",
  pages =        "8e:1--8e:2",
  month =        jun,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3542810",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Jun 4 08:18:27 MDT 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3542810",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "8e",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Tamburro:2022:CSC,
  author =       "Carla Tamburro and Timothy Neate and Abi Roper and
                 Stephanie Wilson",
  title =        "{Comic Spin}: a Comic Creation Tool Enabling
                 Self-expression for People with Aphasia",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "2",
  pages =        "8:1--8:27",
  month =        jun,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3508500",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Jun 4 08:18:27 MDT 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3508500",
  abstract =     "Comics, with their highly visual format, offer a
                 promising opportunity for people who experience
                 challenges with language to express humour and emotion.
                 However, comic creation tools are not designed to be
                 accessible to people with language impairments
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "8",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Ilsar:2022:IIE,
  author =       "Alon Ilsar and Gail Kenning and Sam Trolland and
                 Ciaran Frame",
  title =        "Inclusive Improvisation: Exploring the Line between
                 Listening and Playing Music",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "2",
  pages =        "9:1--9:21",
  month =        jun,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3506856",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Jun 4 08:18:27 MDT 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3506856",
  abstract =     "The field of Accessible Digital Musical Instruments
                 (ADMIs) is growing rapidly, with instrument designers
                 recognising that adaptations to existing Digital
                 Musical Instruments (DMIs) can foster inclusive music
                 making. ADMIs offer opportunities to engage \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "9",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Dixon:2022:UHS,
  author =       "Emma Dixon and Jesse Anderson and Amanda Lazar",
  title =        "Understanding How Sensory Changes Experienced by
                 Individuals with a Range of Age-Related Cognitive
                 Changes Can Effect Technology Use",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "2",
  pages =        "10:1--10:33",
  month =        jun,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3511906",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Jun 4 08:18:27 MDT 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3511906",
  abstract =     "Clinical researchers have identified sensory changes
                 people with age-related cognitive changes, such as
                 dementia and mild cognitive impairment, experience that
                 are different from typical age-related sensory changes.
                 Technology designers and researchers do \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "10",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Akter:2022:SPC,
  author =       "Taslima Akter and Tousif Ahmed and Apu Kapadia and
                 Manohar Swaminathan",
  title =        "Shared Privacy Concerns of the Visually Impaired and
                 Sighted Bystanders with Camera-Based Assistive
                 Technologies",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "2",
  pages =        "11:1--11:33",
  month =        jun,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3506857",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Jun 4 08:18:27 MDT 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3506857",
  abstract =     "Camera-based assistive technologies can provide people
                 with visual impairments (PVIs) visually derived
                 information about people in their vicinity.
                 Furthermore, the advent of smart glasses offers the
                 possibility of not only analyzing visual information in
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "11",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Lee:2022:AAR,
  author =       "Sooyeon Lee and Nelson Daniel Troncoso Aldas and
                 Chonghan Lee and Mary Beth Rosson and John M. Carroll
                 and Vijaykrishnan Narayanan",
  title =        "{AIGuide}: Augmented Reality Hand Guidance in a Visual
                 Prosthetic",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "2",
  pages =        "12:1--12:32",
  month =        jun,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3508501",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Jun 4 08:18:27 MDT 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3508501",
  abstract =     "Locating and grasping objects is a critical task in
                 people's daily lives. For people with visual
                 impairments, this task can be a daily struggle. The
                 support of augmented reality frameworks in smartphones
                 can overcome the limitations of current object
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "12",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Muller:2022:TMI,
  author =       "Karin M{\"u}ller and Christin Engel and Claudia
                 Loitsch and Rainer Stiefelhagen and Gerhard Weber",
  title =        "Traveling More Independently: a Study on the Diverse
                 Needs and Challenges of People with Visual or Mobility
                 Impairments in Unfamiliar Indoor Environments",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "2",
  pages =        "13:1--13:44",
  month =        jun,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3514255",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Jun 4 08:18:27 MDT 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3514255",
  abstract =     "It is much more difficult for people with visual or
                 mobility impairments to prepare for a trip or visit
                 unfamiliar places than it is for people without
                 disabilities. In addition to the usual travel
                 arrangements, one needs to know if the various parts of
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "13",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Lee:2022:CTA,
  author =       "Hae-Na Lee and Vikas Ashok",
  title =        "Customizable Tabular Access to {Web} Data Records for
                 Convenient Low-vision Screen Magnifier Interaction",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "2",
  pages =        "14:1--14:22",
  month =        jun,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3517044",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Jun 4 08:18:27 MDT 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3517044",
  abstract =     "To interact with webpages, people with low vision
                 typically rely on screen magnifier assistive technology
                 that enlarges screen content and also enables them to
                 pan the content to view the different portions of a
                 webpage. This back-and-forth panning \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "14",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Stangl:2022:PCV,
  author =       "Abigale Stangl and Kristina Shiroma and Nathan Davis
                 and Bo Xie and Kenneth R. Fleischmann and Leah
                 Findlater and Danna Gurari",
  title =        "Privacy Concerns for Visual Assistance Technologies",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "2",
  pages =        "15:1--15:43",
  month =        jun,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3517384",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Jun 4 08:18:27 MDT 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3517384",
  abstract =     "People who are blind share their images and videos
                 with companies that provide visual assistance
                 technologies (VATs) to gain access to information about
                 their surroundings. A challenge is that people who are
                 blind cannot independently validate the content
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "15",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Alonzo:2022:RAT,
  author =       "Oliver Alonzo and Lisa Elliot and Becca Dingman and
                 Sooyeon Lee and Akhter {Al Amin} and Matt Huenerfauth",
  title =        "Reading-Assistance Tools Among Deaf and
                 Hard-of-Hearing Computing Professionals in the {U.S.}:
                 Their Reading Experiences, Interests and Perceptions of
                 Social Accessibility",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "2",
  pages =        "16:1--16:31",
  month =        jun,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3520198",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Jun 4 08:18:27 MDT 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3520198",
  abstract =     "Automatic Text Simplification (ATS) software aims at
                 automatically rewrite complex text to make it simpler
                 to read. Prior research has explored the use of ATS as
                 a reading assistance technology, identifying benefits
                 from providing these technologies to \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "16",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Phutane:2022:TMP,
  author =       "Mahika Phutane and Julie Wright and Brenda Veronica
                 Castro and Lei Shi and Simone R. Stern and Holly M.
                 Lawson and Shiri Azenkot",
  title =        "Tactile Materials in Practice: Understanding the
                 Experiences of Teachers of the Visually Impaired",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "3",
  pages =        "17:1--17:??",
  month =        sep,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3508364",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Nov 11 13:34:42 MST 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3508364",
  abstract =     "Teachers of the visually impaired (TVIs) regularly
                 present tactile materials (tactile graphics, 3D models,
                 and real objects) to students with vision impairments.
                 Researchers have been increasingly interested in
                 designing tools to support the use of \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "17",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Andrade:2022:PDA,
  author =       "Ronny Andrade and Steven Baker and Jenny Waycott and
                 Frank Vetere",
  title =        "A Participatory Design Approach to Creating
                 Echolocation-Enabled Virtual Environments",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "3",
  pages =        "18:1--18:??",
  month =        sep,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3516448",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Nov 11 13:34:42 MST 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3516448",
  abstract =     "As virtual environments-in the form of videogames and
                 augmented and virtual reality experiences-become more
                 popular, it is important to ensure that they are
                 accessible to all. Previous research has identified
                 echolocation as a useful interaction approach
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "18",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Barbosa:2022:EWP,
  author =       "Nat{\~a} M. Barbosa and Jordan Hayes and Smirity
                 Kaushik and Yang Wang",
  title =        "{``Every Website Is a Puzzle!''}: Facilitating Access
                 to Common Website Features for People with Visual
                 Impairments",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "3",
  pages =        "19:1--19:??",
  month =        sep,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3519032",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Nov 11 13:34:42 MST 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3519032",
  abstract =     "Navigating unfamiliar websites is challenging for
                 users with visual impairments. Although many websites
                 offer visual cues to facilitate access to
                 pages/features most websites are expected to have
                 (e.g., log in at the top right), such visual shortcuts
                 are \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "19",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Hoogsteen:2022:BCD,
  author =       "Karst M. P. Hoogsteen and Sarit Szpiro and Gabriel
                 Kreiman and Eli Peli",
  title =        "Beyond the Cane: Describing Urban Scenes to Blind
                 People for Mobility Tasks",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "3",
  pages =        "20:1--20:??",
  month =        sep,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3522757",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Nov 11 13:34:42 MST 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3522757",
  abstract =     "Blind people face difficulties with independent
                 mobility, impacting employment prospects, social
                 inclusion, and quality of life. Given the advancements
                 in computer vision, with more efficient and effective
                 automated information extraction from visual \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "20",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Balasuriya:2022:SWP,
  author =       "Saminda Sundeepa Balasuriya and Laurianne Sitbon and
                 Margot Brereton",
  title =        "A Support Worker Perspective on Use of New
                 Technologies by People with Intellectual Disabilities",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "3",
  pages =        "21:1--21:??",
  month =        sep,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3523058",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Nov 11 13:34:42 MST 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3523058",
  abstract =     "People with intellectual disability access innovative
                 technologies in disability community centres in
                 Australia, under the guidance of support workers. This
                 article investigates the perspectives of 15 support
                 workers and 5 managers across four community \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "21",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Biswas:2022:ATS,
  author =       "Pradipta Biswas and Pilar Orero and Kavita
                 Krishnaswamy and Swaminathan Manohar and Peter
                 Robinson",
  title =        "{ACM TACCESS} Special Issue on Adaptive Inclusive
                 {AR\slash VR} Systems",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "3",
  pages =        "22:1--22:??",
  month =        sep,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3561517",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Nov 11 13:34:42 MST 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3561517",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "22",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Yamagami:2022:TOD,
  author =       "Momona Yamagami and Sasa Junuzovic and Mar
                 Gonzalez-Franco and Eyal Ofek and Edward Cutrell and
                 John R. Porter and Andrew D. Wilson and Martez E.
                 Mott",
  title =        "Two-In-One: a Design Space for Mapping Unimanual Input
                 into Bimanual Interactions in {VR} for Users with
                 Limited Movement",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "3",
  pages =        "23:1--23:??",
  month =        sep,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3510463",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Nov 11 13:34:42 MST 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3510463",
  abstract =     "Virtual Reality (VR) applications often require users
                 to perform actions with two hands when performing tasks
                 and interacting with objects in virtual environments.
                 Although bimanual interactions in VR can resemble
                 real-world interactions-thus increasing \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "23",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Williams:2022:MME,
  author =       "Rua M. Williams and Kiana Alikhademi and Imani N. S.
                 Munyaka and Juan E. Gilbert",
  title =        "{MetaCogs}: Mitigating Executive Dysfunction via
                 Agent-based Modeling for Metacognitive Strategy
                 Development",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "3",
  pages =        "24:1--24:??",
  month =        sep,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3514254",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Nov 11 13:34:42 MST 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3514254",
  abstract =     "Executive functions (EF) are a collection of cognitive
                 domains governing task initiation, motor planning,
                 attention, and goal-oriented action. Difficulties with
                 EF have marked impacts on adaptive living skills,
                 learning outcomes, and quality of life for \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "24",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Li:2022:SRA,
  author =       "Yifan Li and Kangsoo Kim and Austin Erickson and Nahal
                 Norouzi and Jonathan Jules and Gerd Bruder and Gregory
                 F. Welch",
  title =        "A Scoping Review of Assistance and Therapy with
                 Head-Mounted Displays for People Who Are Visually
                 Impaired",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "3",
  pages =        "25:1--25:??",
  month =        sep,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3522693",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Nov 11 13:34:42 MST 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3522693",
  abstract =     "Given the inherent visual affordances of Head-Mounted
                 Displays (HMDs) used for Virtual and Augmented Reality
                 (VR/AR), they have been actively used over many years
                 as assistive and therapeutic devices for the people who
                 are visually impaired. In this \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "25",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Duval:2022:DSC,
  author =       "Jared Duval and Rutul Thakkar and Delong Du and
                 Kassandra Chin and Sherry Luo and Aviv Elor and Magy
                 Seif El-Nasr and Michael John",
  title =        "Designing Spellcasters from Clinician Perspectives: a
                 Customizable Gesture-Based Immersive Virtual Reality
                 Game for Stroke Rehabilitation",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "3",
  pages =        "26:1--26:??",
  month =        sep,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3530820",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Nov 11 13:34:42 MST 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3530820",
  abstract =     "Developing games is time-consuming and costly. Overly
                 clinical therapy games run the risk of being boring,
                 which defeats the purpose of using games to motivate
                 healing in the first place [ 10 , 23 ]. In this work,
                 we adapt and repurpose an existing immersive \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "26",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Sharma:2022:CTS,
  author =       "Vinay Krishna Sharma and L. R. D. Murthy and Pradipta
                 Biswas",
  title =        "Comparing Two Safe Distance Maintenance Algorithms for
                 a Gaze-Controlled {HRI} Involving Users with {SSMI}",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "3",
  pages =        "27:1--27:??",
  month =        sep,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3530822",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Nov 11 13:34:42 MST 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3530822",
  abstract =     "People with severe speech and motor impairment often
                 find it difficult to manipulate physical objects due to
                 spasticity and have familiarity with eye pointing based
                 communication. This article presents a novel eye gaze
                 controlled augmented reality human-. \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "27",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Aziz:2022:PYJ,
  author =       "Nida Aziz and Tony Stockman and Rebecca Stewart",
  title =        "Planning Your Journey in Audio: Design and Evaluation
                 of Auditory Route Overviews",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "4",
  pages =        "28:1--28:??",
  month =        dec,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3531529",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Nov 12 07:08:22 MST 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3531529",
  abstract =     "Auditory overviews of routes can provide routing and
                 map information to blind users enabling them to preview
                 route maps before embarking on a journey. This article
                 investigates the usefulness of a system designed to do
                 this through a Preliminary Survey, \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "28",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Zhang:2022:IOL,
  author =       "Han Zhang and Margaret Morris and Paula Nurius and
                 Kelly Mack and Jennifer Brown and Kevin Kuehn and
                 Yasaman Sefidgar and Xuhai Xu and Eve Riskin and Anind
                 Dey and Jennifer Mankoff",
  title =        "Impact of Online Learning in the Context of {COVID-19}
                 on Undergraduates with Disabilities and Mental Health
                 Concerns",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "4",
  pages =        "29:1--29:??",
  month =        dec,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3538514",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Nov 12 07:08:22 MST 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3538514",
  abstract =     "The COVID-19 pandemic upended college education and
                 the experiences of students due to the rapid and uneven
                 shift to online learning. This study examined the
                 experiences of students with disabilities with online
                 learning, with a consideration of \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "29",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Lottridge:2022:AUT,
  author =       "Danielle Lottridge and Chris Yoon and Darren Burton
                 and Chester Wang and Jofish Kaye",
  title =        "Ally: Understanding Text Messaging to Build a Better
                 Onscreen Keyboard for Blind People",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "4",
  pages =        "30:1--30:??",
  month =        dec,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3533707",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Nov 12 07:08:22 MST 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3533707",
  abstract =     "Millions of people worldwide use smartphones every
                 day, but the standard issue QWERTY keyboard is poorly
                 optimized for non-sighted input. In this article, we
                 document the variety of methods blind people use to
                 enter text into their smartphones, and focus \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "30",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Savage:2022:GCE,
  author =       "Saiph Savage and Claudia Flores-Saviaga and Rachel
                 Rodney and Liliana Savage and Jon Schull and Jennifer
                 Mankoff",
  title =        "The Global Care Ecosystems of {$3$D} Printed Assistive
                 Devices",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "4",
  pages =        "31:1--31:??",
  month =        dec,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3537676",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Nov 12 07:08:22 MST 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3537676",
  abstract =     "The popularity of 3D printed assistive technology has
                 led to the emergence of new ecosystems of care, where
                 multiple stakeholders (makers, clinicians, and
                 recipients with disabilities) work toward creating new
                 upper limb prosthetic devices. However, \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "31",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Hersh:2022:RDS,
  author =       "Marion Hersh and Alejandro Rafael Garcia Ramirez",
  title =        "Route Descriptions, Spatial Knowledge and Spatial
                 Representations of Blind and Partially Sighted People:
                 Improved Design of Electronic Travel Aids",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "4",
  pages =        "32:1--32:??",
  month =        dec,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3549077",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Nov 12 07:08:22 MST 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3549077",
  abstract =     "The results presented here were obtained from an
                 experimental study of blind people's experiences on two
                 routes with very different characteristics. They are
                 intended to answer three research questions on how
                 blind people identify environmental features \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "32",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Menzies:2022:ARC,
  author =       "Rachel Menzies and Garreth W. Tigwell and Michael
                 Crabb",
  title =        "Author Reflections on Creating Accessible Academic
                 Papers",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "4",
  pages =        "33:1--33:??",
  month =        dec,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3546195",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Nov 12 07:08:22 MST 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3546195",
  abstract =     "Academic papers demonstrate inaccessibility despite
                 accessible writing resources made available by
                 SIGACCESS and others. The move from accessibility
                 guidance to accessibility implementation is challenging
                 for authors. Our work focuses on understanding
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "33",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Nolte:2022:IAB,
  author =       "Amelie Nolte and Jacob Wobbrock and Torben Volkmann
                 and Nicole Jochems",
  title =        "Implementing Ability-Based Design: a Systematic
                 Approach to Conceptual User Modeling",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "4",
  pages =        "34:1--34:??",
  month =        dec,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3551646",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Nov 12 07:08:22 MST 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3551646",
  abstract =     "The notion of Ability-Based Design, put forth by
                 Wobbrock et al. [ 80 , 82 ] as a solution to the
                 challenge of creating accessible technology, has been
                 discussed in human-computer interaction research now
                 for over a decade. However, despite being cited as
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "34",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Yamagami:2022:IJO,
  author =       "Momona Yamagami and Kelly Mack and Jennifer Mankoff
                 and Katherine M. Steele",
  title =        "{``I'm Just Overwhelmed''}: Investigating Physical
                 Therapy Accessibility and Technology Interventions for
                 People with Disabilities and\slash or Chronic
                 Conditions",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "4",
  pages =        "35:1--35:??",
  month =        dec,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3563396",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Nov 12 07:08:22 MST 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3563396",
  abstract =     "Many individuals with disabilities and/or chronic
                 conditions (da/cc) experience symptoms that may require
                 intermittent or on-going medical care. However,
                 healthcare is an often-overlooked domain for
                 accessibility work, where access needs associated with
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "35",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Prietch:2022:SRU,
  author =       "Soraia Prietch and J. Alfredo S{\'a}nchez and Josefina
                 Guerrero",
  title =        "A Systematic Review of User Studies as a Basis for the
                 Design of Systems for Automatic Sign Language
                 Processing",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "4",
  pages =        "36:1--36:??",
  month =        dec,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3563395",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Nov 12 07:08:22 MST 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3563395",
  abstract =     "Deaf persons, whether or not they are sign language
                 users, make up one of various existing marginalized
                 populations that historically have been socially and
                 politically underrepresented. Unfortunately, this also
                 happens in technology design. Conducting \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "36",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Pruszynska:2022:TET,
  author =       "Magdalena Pruszy{\'n}ska and Marta
                 Milewska-Jedrzejczak and Igor Bednarski and Piotr
                 Szpakowski and Andrzej G{\l}abi{\'n}ski and S{\l}awomir
                 Konrad Tadeja",
  title =        "Towards Effective Telerehabilitation: Assessing
                 Effects of Applying Augmented Reality in Remote
                 Rehabilitation of Patients Suffering from Multiple
                 Sclerosis",
  journal =      j-TACCESS,
  volume =       "15",
  number =       "4",
  pages =        "37:1--37:??",
  month =        dec,
  year =         "2022",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3560822",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Nov 12 07:08:22 MST 2022",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3560822",
  abstract =     "Multiple Sclerosis (MS) is a chronic, incurable
                 disease of the central nervous system that is also one
                 of the most common causes of disability among young
                 adults. Despite available pharmacological treatments,
                 the patients often require ongoing, supervised
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "37",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Lee:2023:AUU,
  author =       "Yun Jung Lee and Hwayeon Joh and Suhyeon Yoo and Uran
                 Oh",
  title =        "{AccessComics2}: Understanding the User Experience of
                 an Accessible Comic Book Reader for Blind People with
                 Textual Sound Effects",
  journal =      j-TACCESS,
  volume =       "16",
  number =       "1",
  pages =        "2:1--2:??",
  month =        mar,
  year =         "2023",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3555720",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Apr 5 09:56:07 MDT 2023",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3555720",
  abstract =     "For people with visual impairments, many studies have
                 been conducted to improve the accessibility of various
                 types of images on the web. However, the majority of
                 the work focused on photos or graphs. In this study, we
                 propose AccessComics, an accessible \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "2",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Manca:2023:TAW,
  author =       "Marco Manca and Vanessa Palumbo and Fabio Patern{\`o}
                 and Carmen Santoro",
  title =        "The Transparency of Automatic {Web} Accessibility
                 Evaluation Tools: Design Criteria, State of the Art,
                 and User Perception",
  journal =      j-TACCESS,
  volume =       "16",
  number =       "1",
  pages =        "3:1--3:??",
  month =        mar,
  year =         "2023",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3556979",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Apr 5 09:56:07 MDT 2023",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3556979",
  abstract =     "Several Web accessibility evaluation tools have been
                 put forward to reduce the burden of identifying
                 accessibility barriers for users, especially those with
                 disabilities. One common issue in using accessibility
                 evaluation tools in practice is that the \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "3",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Fan:2023:ADV,
  author =       "Danyang Fan and Alexa Fay Siu and Hrishikesh Rao and
                 Gene Sung-Ho Kim and Xavier Vazquez and Lucy Greco and
                 Sile O'Modhrain and Sean Follmer",
  title =        "The Accessibility of Data Visualizations on the {Web}
                 for Screen Reader Users: Practices and Experiences
                 During {COVID-19}",
  journal =      j-TACCESS,
  volume =       "16",
  number =       "1",
  pages =        "4:1--4:??",
  month =        mar,
  year =         "2023",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3557899",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Apr 5 09:56:07 MDT 2023",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3557899",
  abstract =     "Data visualization has become an increasingly
                 important means of effective data communication and has
                 played a vital role in broadcasting the progression of
                 COVID-19. Accessible data representations, however,
                 have lagged behind, leaving areas of information out of
                 reach for many blind and visually impaired (BVI) users.
                 In this work, we sought to understand (1) the
                 accessibility of current implementations of
                 visualizations on the web; (2) BVI users' preferences
                 and current experiences when accessing data-driven
                 media; (3) how accessible data representations on the
                 web address these users' access needs and help them
                 navigate, interpret, and gain insights from the data;
                 and (4) the practical challenges that limit BVI users'
                 access and use of data representations. To answer these
                 questions, we conducted a mixed-methods study
                 consisting of an accessibility audit of 87 data
                 visualizations on the web to identify accessibility
                 issues, an online survey of 127 screen reader users to
                 understand lived experiences and preferences, and a
                 remote contextual inquiry with 12 of the survey
                 respondents to observe how they navigate, interpret,
                 and gain insights from accessible data representations.
                 Our observations during this critical period of time
                 provide an understanding of the widespread
                 accessibility issues encountered across online data
                 visualizations, the impact that data accessibility
                 inequities have on the BVI community, the ways screen
                 reader users sought access to data-driven information
                 and made use of online visualizations to form insights,
                 and the pressing need to make larger strides towards
                 improving data literacy, building confidence, and
                 enriching methods of access. Based on our findings, we
                 provide recommendations for researchers and
                 practitioners to broaden data accessibility on the
                 web.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "4",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Ahmetovic:2023:WSA,
  author =       "Dragan Ahmetovic and Cristian Bernareggi and Barbara
                 Leporini and Sergio Mascetti",
  title =        "{WordMelodies}: Supporting the Acquisition of Literacy
                 Skills by Children with Visual Impairment through a
                 Mobile App",
  journal =      j-TACCESS,
  volume =       "16",
  number =       "1",
  pages =        "5:1--5:??",
  month =        mar,
  year =         "2023",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3565029",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Apr 5 09:56:07 MDT 2023",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3565029",
  abstract =     "WordMelodies is a mobile app that aims to support
                 inclusive teaching of literacy skills for primary
                 school students. Thus it was designed to be accessible
                 both visually and through screen reader, and it
                 includes over 80 different types of exercises for
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "5",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Iniesto:2023:CSC,
  author =       "Francisco Iniesto and Tim Coughlan and Kate Lister and
                 Peter Devine and Nick Freear and Richard Greenwood and
                 Wayne Holmes and Ian Kenny and Kevin McLeod and Ruth
                 Tudor",
  title =        "Creating `a Simple Conversation': Designing a
                 Conversational User Interface to Improve the Experience
                 of Accessing Support for Study",
  journal =      j-TACCESS,
  volume =       "16",
  number =       "1",
  pages =        "6:1--6:??",
  month =        mar,
  year =         "2023",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3568166",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Apr 5 09:56:07 MDT 2023",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3568166",
  abstract =     "Administrative processes are ubiquitous in modern life
                 and have been identified as a particular burden to
                 those with accessibility needs. Students who have
                 accessibility needs often have to understand guidance,
                 fill in complex forms, and communicate with \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "6",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Leporini:2023:VCT,
  author =       "Barbara Leporini and Marina Buzzi and Marion Hersh",
  title =        "Video Conferencing Tools: Comparative Study of the
                 Experiences of Screen Reader Users and the Development
                 of More Inclusive Design Guidelines",
  journal =      j-TACCESS,
  volume =       "16",
  number =       "1",
  pages =        "7:1--7:??",
  month =        mar,
  year =         "2023",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3573012",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Apr 5 09:56:07 MDT 2023",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3573012",
  abstract =     "Since the first lockdown in 2020, video conferencing
                 tools have become increasingly important for
                 employment, education, and social interaction, making
                 them essential tools in everyday life. This study
                 investigates the accessibility and usability of the
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "7",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Kearney-Volpe:2023:TPT,
  author =       "Claire Kearney-Volpe and Chancey Fleet and Keita
                 Ohshiro and Veronica Alfaro Arias and Eric Hao Xu and
                 Amy Hurst",
  title =        "Tangible Progress: Tools, Techniques, and Impacts of
                 Teaching {Web} Development to Screen Reader Users",
  journal =      j-TACCESS,
  volume =       "16",
  number =       "1",
  pages =        "8:1--8:??",
  month =        mar,
  year =         "2023",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3585315",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Apr 5 09:56:07 MDT 2023",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3585315",
  abstract =     "Despite a growing demand for Web Development and
                 adjacent tech skills, there is a lack of accessible
                 skills training for screen reader users. To address
                 this gap, we developed tools and techniques to support
                 screen reader users in learning web \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "8",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Putnam:2023:ICB,
  author =       "Cynthia Putnam and Emma J. Rose and Craig M.
                 MacDonald",
  title =        "{``It could be better. It could be much worse''}:
                 Understanding Accessibility in User Experience Practice
                 with Implications for Industry and Education",
  journal =      j-TACCESS,
  volume =       "16",
  number =       "1",
  pages =        "9:1--9:??",
  month =        mar,
  year =         "2023",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3575662",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Apr 5 09:56:07 MDT 2023",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3575662",
  abstract =     "While accessibility is acknowledged as a crucial
                 component in design, many technologies remain
                 inaccessible for people with disabilities. As part of a
                 study to better understand UX practice to inform
                 pedagogy, we analyzed 58 interview sessions that
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "9",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Pandey:2023:ARH,
  author =       "Vineet Pandey and Nergis C. Khan and Anoopum S. Gupta
                 and Krzysztof Z. Gajos",
  title =        "Accuracy and Reliability of At-Home Quantification of
                 Motor Impairments Using a Computer-Based Pointing Task
                 with Children with Ataxia-Telangiectasia",
  journal =      j-TACCESS,
  volume =       "16",
  number =       "1",
  pages =        "10:1--10:??",
  month =        mar,
  year =         "2023",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3581790",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Apr 5 09:56:07 MDT 2023",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3581790",
  abstract =     "Methods for obtaining accurate quantitative
                 assessments of motor impairments are essential in
                 accessibility research, design of adaptive
                 ability-based assistive technologies, as well as in
                 clinical care and medical research. Currently, such
                 assessments \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "10",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Dai:2023:ESS,
  author =       "Jiamin Dai and Karyn Moffatt",
  title =        "Enriching Social Sharing for the Dementia Community:
                 Insights from In-Person and Online Social Programs",
  journal =      j-TACCESS,
  volume =       "16",
  number =       "1",
  pages =        "11:1--11:??",
  month =        mar,
  year =         "2023",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3582558",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Apr 5 09:56:07 MDT 2023",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3582558",
  abstract =     "The dementia community faces major challenges in
                 social engagements, which have been further complicated
                 by the prolonged physical distancing measures due to
                 the COVID-19 pandemic. Designing digital tools for
                 in-person social sharing in family and care \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "11",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Kruger:2023:MCB,
  author =       "Rynhardt Kruger and Febe {De Wet} and Thomas Niesler",
  title =        "Mathematical Content Browsing for Print-disabled
                 Readers Based on Virtual-world Exploration and
                 Audio-visual Sensory Substitution",
  journal =      j-TACCESS,
  volume =       "16",
  number =       "2",
  pages =        "12:1--12:??",
  month =        jun,
  year =         "2023",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3584365",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Aug 30 11:07:16 MDT 2023",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3584365",
  abstract =     "Documents containing mathematical content remain
                 largely inaccessible to blind and visually impaired
                 readers because they are predominantly published as
                 untagged PDFs, which do not include the semantic data
                 necessary for effective accessibility. Equations in
                 such documents consist of text interlaced with lines
                 and other graphical elements and cannot be interpreted
                 using a screen reader. We present a browsing approach
                 for print-disabled readers specifically aimed at such
                 mathematical content. This approach draws on the
                 navigational mechanisms often used to explore the
                 virtual worlds of text adventure games with
                 audio-visual sensory substitution for graphical
                 content. The relative spatial placement of the elements
                 of an equation are represented as a virtual world so
                 the reader can navigate between elements. Text elements
                 are announced conventionally using synthesised speech,
                 while graphical elements, such as roots and fraction
                 lines, are rendered using a modification of the vOICe
                 algorithm. The virtual world allows the reader to
                 interactively discover the spatial structure of the
                 equation, while the rendition of graphical elements as
                 sound allows the shape and identity of elements that
                 cannot be synthesised as speech to be discovered and
                 recognised. The browsing approach was evaluated by 11
                 blind and 14 sighted participants in a user trial that
                 included identifying twelve equations extracted from
                 PDF documents. Overall, equations were identified
                 completely correctly in 78\% of cases (74\% and 83\%,
                 respectively, for blind and sighted subjects). If
                 partial correctness is considered, then the performance
                 is substantially higher. Feedback from the blind
                 subjects indicated that the technique allows spatial
                 information and graphical detail to be discovered. We
                 conclude that the integration of a spatial model
                 represented as a virtual world in conjunction with
                 audio-visual sensory substitution for non-textual
                 elements can be an effective way for blind and visually
                 impaired readers to read currently inaccessible
                 mathematical content in PDF documents.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "12",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Trinh:2023:EEM,
  author =       "Viet Trinh and Roberto Manduchi and Nicholas A.
                 Giudice",
  title =        "Experimental Evaluation of Multi-scale Tactile Maps
                 Created with {SIM}, a {Web} App for Indoor Map
                 Authoring",
  journal =      j-TACCESS,
  volume =       "16",
  number =       "2",
  pages =        "13:1--13:??",
  month =        jun,
  year =         "2023",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3590775",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Aug 30 11:07:16 MDT 2023",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3590775",
  abstract =     "In this article, we introduce Semantic Interior
                 Mapology (SIM), a web app that allows anyone to quickly
                 trace the floor plan of a building, generating a
                 vectorized representation \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "13",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Campos:2023:MGA,
  author =       "Virg{\'{\i}}nia P. Campos and Luiz M. G.
                 Gon{\c{c}}alves and Wesnydy L. Ribeiro and Tiago M. U.
                 Ara{\'u}jo and Tha{\'{\i}}s G. {Do Rego} and Pedro H.
                 V. Figueiredo and Suanny F. S. Vieira and Thiago F. S.
                 Costa and Caio C. Moraes and Alexandre C. S. Cruz and
                 Felipe A. Ara{\'u}jo and Guido L. Souza Filho",
  title =        "Machine Generation of Audio Description for Blind and
                 Visually Impaired People",
  journal =      j-TACCESS,
  volume =       "16",
  number =       "2",
  pages =        "14:1--14:??",
  month =        jun,
  year =         "2023",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3590955",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Aug 30 11:07:16 MDT 2023",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3590955",
  abstract =     "Automating the generation of audio descriptions (AD)
                 for blind and visually impaired (BVI) people is a
                 difficult task, since it has several challenges
                 involved, such as: identifying gaps in dialogues;
                 describing the essential elements; summarizing and
                 fitting the descriptions into the dialogue gaps;
                 generating an AD narration track, and synchronizing it
                 with the main soundtrack. In our previous work (Campos
                 et al. [6]), we propose a solution for automatic AD
                 script generation, named CineAD, which uses the movie's
                 script as a basis for the AD generation. This article
                 proposes extending this solution to complement the
                 information extracted from the script and reduce its
                 dependency based on the classification of visual
                 information from the video. To assess the viability of
                 the proposed solution, we implemented a proof of
                 concept of the solution and evaluated it with 11 blind
                 users. The results showed that the solution could
                 generate a more succinct and objective AD but with a
                 similar users' level of understanding compared to our
                 previous work. Thus, the solution can provide relevant
                 information to blind users using less video time for
                 descriptions.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "14",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Guerreiro:2023:ISI,
  author =       "Jo{\~a}o Guerreiro",
  title =        "Introduction to the Special Issue on {ASSETS'21}",
  journal =      j-TACCESS,
  volume =       "16",
  number =       "2",
  pages =        "15:1--15:??",
  month =        jun,
  year =         "2023",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3605947",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Aug 30 11:07:16 MDT 2023",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3605947",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "15",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Venkatasubramanian:2023:DPA,
  author =       "Krishna Venkatasubramanian and Tina-Marie Ranalli and
                 Jack Lanoie and Alexander Sinapi and Andrew Laraw Lama
                 and Jeanine Skorinko and Mariah Freark and Nancy
                 Alterio",
  title =        "The Design and Prototyping of an App to Teach Adults
                 with Intellectual and Developmental Disabilities to
                 Empower Them Against Abuse",
  journal =      j-TACCESS,
  volume =       "16",
  number =       "2",
  pages =        "16:1--16:??",
  month =        jun,
  year =         "2023",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3569585",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Aug 30 11:07:16 MDT 2023",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3569585",
  abstract =     "In the United States, the abuse of individuals with
                 intellectual and developmental disabilities (I/DD) is
                 at epidemic proportions. However, the reporting of such
                 abuse has been \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "16",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Momotaz:2023:UUL,
  author =       "Farhani Momotaz and Md Ehtesham-Ul-Haque and Syed
                 Masum Billah",
  title =        "Understanding the Usages, Lifecycle, and Opportunities
                 of Screen Readers' Plugins",
  journal =      j-TACCESS,
  volume =       "16",
  number =       "2",
  pages =        "17:1--17:??",
  month =        jun,
  year =         "2023",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3582697",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Aug 30 11:07:16 MDT 2023",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3582697",
  abstract =     "Screen reader plugins are small pieces of code that
                 blind users can download and install to enhance the
                 capabilities of their screen readers. This article aims
                 to understand why blind users use these plugins, as
                 well as how these plugins are developed, deployed, and
                 maintained. To this end, we conducted an interview
                 study with 14 blind users to gain individual
                 perspectives and analyzed 2,000 online posts scraped
                 from three plugin-related forums to gain the community
                 perspective. Our study revealed that screen reader
                 users rely on plugins for various reasons, such as to
                 improve the usability of screen readers and application
                 software, to make partially accessible applications
                 accessible, and to receive custom auditory feedback.
                 Furthermore, installing plugins is easy; uninstalling
                 them is unlikely; and finding them online is ad hoc,
                 challenging, and sometimes poses security threats. In
                 addition, developing screen reader plugins is
                 technically demanding; only a handful of people develop
                 plugins. Unfortunately, most plugins do not receive
                 updates once distributed and become obsolete. The lack
                 of financial incentives plays in the slow growth of the
                 plugin ecosystem. Further, we outlined the complex,
                 tripartite collaboration among individual blind users,
                 their online communities, and developer communities in
                 creating a plugin. Additionally, we reported several
                 phenomena within and between these communities that are
                 likely to influence a plugin's development. Based on
                 our findings, we recommend creating a community-driven
                 repository for all plugins hosted on a peer-to-peer
                 infrastructure, engaging third-party developers, and
                 raising general awareness about the benefits and
                 dangers of plugins. We believe our findings will
                 inspire HCI researchers to embrace the plugin-based
                 distribution model as an effective way to combat
                 accessibility and usability problems in non-visual
                 interaction and to investigate potential ways to
                 improve the collaboration between blind users and
                 developer communities.",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "17",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Edwards:2023:HAT,
  author =       "Emory J. Edwards and Michael Gilbert and Emily Blank
                 and Stacy M. Branham",
  title =        "How the {Alt} Text Gets Made: What Roles and Processes
                 of {Alt} Text Creation Can Teach Us About Inclusive
                 Imagery",
  journal =      j-TACCESS,
  volume =       "16",
  number =       "2",
  pages =        "18:1--18:??",
  month =        jun,
  year =         "2023",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3587469",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Wed Aug 30 11:07:16 MDT 2023",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3587469",
  abstract =     "Many studies within Accessible Computing have
                 investigated image accessibility, from what should be
                 included in alternative text (alt text), to possible
                 automated, human-in-the-loop, or crowdsourced
                 approaches to alt text generation. However, the
                 processes through which practitioners make alt text in
                 situ have rarely been discussed. Through interviews
                 with three artists and three accessibility
                 practitioners working with Google, as well as 25 end
                 users, we identify four processes of alt text creation
                 used by this company --- The User-Evaluation Process,
                 The Lone Writer Process, The Team Write-A-Thon Process,
                 and The Artist-Writer Process --- and unpack their
                 potential strengths and weaknesses as they relate to
                 access and inclusive imagery. We conclude with a
                 discussion of what alt text researchers and industry
                 professionals can learn from considering alt text in
                 situ, including opportunities to support user feedback,
                 cross-contributor consistency, and organizational or
                 technical changes to production processes",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "18",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Ghenai:2023:GDS,
  author =       "Amira Ghenai and Philips Ayeni and Jing Yu and Robin
                 Cohen and Karyn Moffatt",
  title =        "Guidelines for Designing Social Networking Sites for
                 Older Adults: a Systematic Review with Thematic
                 Synthesis",
  journal =      j-TACCESS,
  volume =       "16",
  number =       "3",
  pages =        "19:1--19:??",
  month =        sep,
  year =         "2023",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3615662",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Sep 29 07:45:09 MDT 2023",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3615662",
  abstract =     "Social networking site (SNS) inaccessibility remains a
                 barrier for many older adults. Increasingly, research
                 has sought to address these shortcomings with
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "19",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Hamidi:2023:SSI,
  author =       "Foad Hamidi and Tsion Kidane and Patrick Mbullo Owuor
                 and Michaela Hynie and Melanie Baljko",
  title =        "Supporting Social Inclusion with {DIY-ATs}:
                 Perspectives of {Kenyan} Caregivers of Children with
                 Cognitive Disabilities",
  journal =      j-TACCESS,
  volume =       "16",
  number =       "3",
  pages =        "20:1--20:??",
  month =        sep,
  year =         "2023",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3616378",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Sep 29 07:45:09 MDT 2023",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3616378",
  abstract =     "Do-It-Yourself assistive technologies (DIY-ATs) that
                 can be designed, fabricated, or customized by
                 non-technical individuals can enable people with
                 disabilities and their community \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "20",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Vargemidis:2023:PPE,
  author =       "Dimitri Vargemidis and Kathrin Gerling and Vero
                 {Vanden Abeele} and Luc Geurts",
  title =        "Performance and Pleasure: Exploring the Perceived
                 Usefulness and Appeal of Physical Activity Data
                 Visualizations with Older Adults",
  journal =      j-TACCESS,
  volume =       "16",
  number =       "3",
  pages =        "21:1--21:??",
  month =        sep,
  year =         "2023",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3615664",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Sep 29 07:45:09 MDT 2023",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3615664",
  abstract =     "Wearable activity trackers hold the promise of making
                 older adults aware of their levels of physical activity
                 (PA), encouraging them to remain or become physically
                 active. \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "21",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Lu:2023:WTM,
  author =       "Leon Lu and Karen Anne Cochrane and Jin Kang and
                 Audrey Girouard",
  title =        "{``Why are there so many steps?''}: Improving Access
                 to Blind and Low Vision Music Learning through Personal
                 Adaptations and Future Design Ideas",
  journal =      j-TACCESS,
  volume =       "16",
  number =       "3",
  pages =        "22:1--22:??",
  month =        sep,
  year =         "2023",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3615663",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Sep 29 07:45:09 MDT 2023",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3615663",
  abstract =     "Music can be a catalyst for self-development, creative
                 expression, and community building for blind or low
                 vision (BLV) individuals. However, BLV music learners
                 face complex \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "22",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Forster:2023:DMI,
  author =       "Andreas F{\"o}rster and Steffen Lepa",
  title =        "Digital Musical Instruments in Special Educational
                 Needs Schools: Requirements from the Music Teachers'
                 Perspective and the Status Quo in {Germany}",
  journal =      j-TACCESS,
  volume =       "16",
  number =       "3",
  pages =        "23:1--23:??",
  month =        sep,
  year =         "2023",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3616015",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Fri Sep 29 07:45:09 MDT 2023",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3616015",
  abstract =     "Digital musical instruments (DMIs) offer the
                 possibility to create barrier-free access to active
                 music-making and to unique sound aesthetics for a broad
                 group of people, including \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "23",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Philips:2023:HHI,
  author =       "Gavin R. Philips and Morris Huang and Cathy Bodine",
  title =        "Helping or Hindering: Inclusive Design of Automated
                 Task Prompting for Workers with Cognitive
                 Disabilities",
  journal =      j-TACCESS,
  volume =       "16",
  number =       "4",
  pages =        "24:1--24:??",
  month =        dec,
  year =         "2023",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3628447",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Jan 13 15:28:00 MST 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3628447",
  abstract =     "Of the $\approx 8.8$ million working-age adults with
                 cognitive disabilities in the United States, only
                 28.6\% are employed, contributing to a poverty rate
                 (26.1\%) for people with cognitive \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "24",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Kuhn:2023:MAA,
  author =       "Korbinian Kuhn and Verena Kersken and Benedikt Reuter
                 and Niklas Egger and Gottfried Zimmermann",
  title =        "Measuring the Accuracy of Automatic Speech Recognition
                 Solutions",
  journal =      j-TACCESS,
  volume =       "16",
  number =       "4",
  pages =        "25:1--25:??",
  month =        dec,
  year =         "2023",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3636513",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Sat Jan 13 15:28:00 MST 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3636513",
  abstract =     "For d/Deaf and hard of hearing (DHH) people,
                 captioning is an essential accessibility tool.
                 Significant developments in artificial intelligence
                 mean that automatic speech \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "25",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Clarke:2024:IUI,
  author =       "James M. Clarke and Maryam Mehrnezhad and Ehsan
                 Toreini",
  title =        "Invisible, Unreadable, and Inaudible Cookie Notices:
                 an Evaluation of Cookie Notices for Users with Visual
                 Impairments",
  journal =      j-TACCESS,
  volume =       "17",
  number =       "1",
  pages =        "1:1--1:??",
  month =        mar,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3641281",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Aug 20 08:56:05 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3641281",
  abstract =     "This article investigates the accessibility of cookie
                 notices on websites for users with visual impairments
                 (VI) via a set of system studies on top UK websites ( n
                 =46) and a user study \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "1",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Migovich:2024:SDA,
  author =       "Miroslava Migovich and Deeksha Adiani and Michael
                 Breen and Amy Swanson and Timothy J. Vogus and Nilanjan
                 Sarkar",
  title =        "Stress Detection of Autistic Adults during Simulated
                 Job Interviews Using a Novel Physiological Dataset and
                 Machine Learning",
  journal =      j-TACCESS,
  volume =       "17",
  number =       "1",
  pages =        "2:1--2:??",
  month =        mar,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3639709",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Aug 20 08:56:05 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3639709",
  abstract =     "The interview process has been identified as one of
                 the major barriers to employment of autistic
                 individuals, which contributes to the staggering rate
                 of under and unemployment of \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "2",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Tang:2024:ESP,
  author =       "Charlotte Tang and Imrul K. Shuva and Matthew Thelen
                 and Linda Zhu and Nathaniel S. Miller",
  title =        "Exploring the Strategies People with {Parkinson}'s
                 Disease Use to Self-track Symptoms and Medications",
  journal =      j-TACCESS,
  volume =       "17",
  number =       "1",
  pages =        "3:1--3:??",
  month =        mar,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3649454",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Aug 20 08:56:05 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3649454",
  abstract =     "Self-tracking has great potential in empowering
                 individuals with a chronic illness in managing their
                 condition. Parkinson's Disease (PD) is a common
                 neurodegenerative disease \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "3",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Wood:2024:HDV,
  author =       "Rachel Wood and Jinjuan Heidi Feng and Jonathan
                 Lazar",
  title =        "Health Data Visualization Literacy Skills of {Young}
                 Adults with Down Syndrome and the Barriers to
                 Inference-making",
  journal =      j-TACCESS,
  volume =       "17",
  number =       "1",
  pages =        "4:1--4:??",
  month =        mar,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3648621",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Aug 20 08:56:05 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3648621",
  abstract =     "As health management becomes more intertwined with
                 data, an individual's ability to read, interpret, and
                 engage with personal health information in data
                 visualizations is \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "4",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Taheri:2024:MET,
  author =       "Atieh Taheri and Carlos Gilberto Gomez-Monroy and
                 Vicente Borja and Misha Sra",
  title =        "{MouseClicker}: Exploring Tactile Feedback and
                 Physical Agency for People with Hand Motor
                 Impairments",
  journal =      j-TACCESS,
  volume =       "17",
  number =       "1",
  pages =        "5:1--5:??",
  month =        mar,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3648685",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Aug 20 08:56:05 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3648685",
  abstract =     "Assistive technology (AT) design is critical in
                 enabling functionality for people with disabilities,
                 blending essential elements of both practical utility
                 and user experience. \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "5",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Lewis:2024:AIA,
  author =       "Brittany Lewis and Priyankan Kirupaharan and
                 Tina-Marie Ranalli and Krishna Venkatasubramanian",
  title =        "{A3C}: an Image-Association-Based Computing Device
                 Authentication Framework for People with Upper
                 Extremity Impairments",
  journal =      j-TACCESS,
  volume =       "17",
  number =       "2",
  pages =        "6:1--6:??",
  month =        jun,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3652522",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Aug 20 08:56:06 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3652522",
  abstract =     "Current computing device authentication often presents
                 accessibility barriers for people with upper extremity
                 impairments (UEI). In this article, we present a
                 framework called \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "6",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Marsh:2024:DWS,
  author =       "Abigail Marsh and Lauren R. Milne",
  title =        "{I} Don't Want to Sound Rude, but It's None of Their
                 Business: Exploring Security and Privacy Concerns
                 around Assistive Technology Use in Educational
                 Settings",
  journal =      j-TACCESS,
  volume =       "17",
  number =       "2",
  pages =        "7:1--7:??",
  month =        jun,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3670690",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Aug 20 08:56:06 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3670690",
  abstract =     "Students with disabilities often use assistive
                 technology to gain equal access to educational content.
                 However, using this technology can lead to security and
                 privacy concerns, \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "7",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Elmimouni:2024:NCC,
  author =       "Houda Elmimouni and Selma {\v{S}}abanovi{\'c} and
                 Jennifer A. Rode",
  title =        "Navigating the Cyborg Classroom: Telepresence Robots,
                 Accessibility Challenges, and Inclusivity in the
                 Classroom",
  journal =      j-TACCESS,
  volume =       "17",
  number =       "2",
  pages =        "8:1--8:??",
  month =        jun,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3672569",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Aug 20 08:56:06 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3672569",
  abstract =     "Telepresence robots, designed to bridge physical
                 distances, have unique capabilities and inherent
                 limitations when deployed in classroom environments.
                 This study examines \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "8",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Rauschenberger:2024:ISI,
  author =       "Maria Rauschenberger and Sukru Eraslan",
  title =        "Introduction to the Special Issue on {W4A'22}",
  journal =      j-TACCESS,
  volume =       "17",
  number =       "2",
  pages =        "9:1--9:??",
  month =        jun,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3676149",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Aug 20 08:56:06 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3676149",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "9",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Coverdale:2024:DAE,
  author =       "Andy Coverdale and Sarah Lewthwaite and Sarah Horton",
  title =        "Digital Accessibility Education in Context: Expert
                 Perspectives on Building Capacity in Academia and the
                 Workplace",
  journal =      j-TACCESS,
  volume =       "17",
  number =       "2",
  pages =        "10:1--10:??",
  month =        jun,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3649508",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Aug 20 08:56:06 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3649508",
  abstract =     "The social model of disability, accessibility
                 legislation, and the digital transformation spurred by
                 COVID-19 expose a lack of accessibility capacity in the
                 workforce, \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "10",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Regimbal:2024:IOS,
  author =       "Juliette Regimbal and Jeffrey R. Blum and Cyan Kuo and
                 Jeremy R. Cooperstock",
  title =        "{IMAGE}: an Open-Source, Extensible Framework for
                 Deploying Accessible Audio and Haptic Renderings of
                 {Web} Graphics",
  journal =      j-TACCESS,
  volume =       "17",
  number =       "2",
  pages =        "11:1--11:??",
  month =        jun,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3665223",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Aug 20 08:56:06 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3665223",
  abstract =     "For accessibility practitioners, creating and
                 deploying novel multimedia interactions for people with
                 disabilities is a nontrivial task. As a result, many
                 projects aiming to support \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "11",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{SassakiResendeSilva:2024:PNA,
  author =       "Jorge {Sassaki Resende Silva} and Paula Christina
                 {Figueira Cardoso} and Raphael {Winckler De Bettio} and
                 Daniela {Cardoso Tavares} and Carlos Alberto Silva and
                 Willian {Massami Watanabe} and Andr{\'e} {Pimenta
                 Freire}",
  title =        "In-Page Navigation Aids for Screen-Reader Users with
                 Automatic Topicalisation and Labelling",
  journal =      j-TACCESS,
  volume =       "17",
  number =       "2",
  pages =        "12:1--12:??",
  month =        jun,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3649223",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Aug 20 08:56:06 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3649223",
  abstract =     "Navigation aids such as headers and internal links
                 provide vital support for screen-reader users on web
                 documents to grasp a document's structure. However,
                 when such \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "12",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Pereira:2024:AUE,
  author =       "Let{\'\i}cia Seixas Pereira and Jo{\~a}o Guerreiro and
                 Andr{\'e} Rodrigues and Tiago Guerreiro and Carlos
                 Duarte",
  title =        "From Automation to User Empowerment: Investigating the
                 Role of a Semi-automatic Tool in Social Media
                 Accessibility",
  journal =      j-TACCESS,
  volume =       "17",
  number =       "3",
  pages =        "13:1--13:??",
  month =        sep,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3647643",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Oct 8 06:03:26 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3647643",
  abstract =     "This article focuses on evaluating SONAAR (Social
                 Networks Accessible Authoring), a tool that combines
                 automation and end-user empowerment to enhance the
                 \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "13",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Hassan:2024:EBA,
  author =       "Saad Hassan and Calu{\~a} de Lacerda Pataca and Akhter
                 {Al Amin} and Laleh Nourian and Diego Navarro and
                 Sooyeon Lee and Alexis Gordon and Matthew Watkins and
                 Garreth W. Tigwell and Matt Huenerfauth",
  title =        "Exploring the Benefits and Applications of Video-Span
                 Selection and Search for Real-Time Support in Sign
                 Language Video Comprehension among {ASL} Learners",
  journal =      j-TACCESS,
  volume =       "17",
  number =       "3",
  pages =        "14:1--14:??",
  month =        sep,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3690647",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Oct 8 06:03:26 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3690647",
  abstract =     "People learning American Sign Language (ASL) and
                 practicing their comprehension skills will often
                 encounter complex ASL videos that may contain
                 unfamiliar signs. Existing \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "14",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Shaheen:2024:EBL,
  author =       "Natalie L. Shaheen",
  title =        "Exploring Blind and Low-Vision {Youth}'s Digital
                 Access Needs in School: Toward Accessible Instructional
                 Technologies",
  journal =      j-TACCESS,
  volume =       "17",
  number =       "3",
  pages =        "15:1--15:??",
  month =        sep,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3688805",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Oct 8 06:03:26 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3688805",
  abstract =     "Blind and low-vision (BLV) youth have been excluded
                 from technology-mediated learning in school for two
                 decades due to the systemic inaccessibility of K-12
                 instructional \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "15",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Eagle:2024:IWS,
  author =       "Tessa Eagle and Leya Breanna Baltaxe-Admony and
                 Kathryn E. Ringland",
  title =        "{``It Was Something I Naturally Found Worked and Heard
                 About Later''}: an Investigation of Body Doubling with
                 Neurodivergent Participants",
  journal =      j-TACCESS,
  volume =       "17",
  number =       "3",
  pages =        "16:1--16:??",
  month =        sep,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3689648",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Oct 8 06:03:26 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3689648",
  abstract =     "Body doubling has emerged as a community-driven
                 phenomenon primarily employed by neurodivergent
                 individuals. In this work, we survey 220 people to
                 investigate how, \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "16",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}

@Article{Bern:2024:IGP,
  author =       "Jordan J. Bern and Gregg C. Vanderheiden",
  title =        "International Guidelines for Photosensitive Epilepsy:
                 Gap Analysis and Recommendations",
  journal =      j-TACCESS,
  volume =       "17",
  number =       "3",
  pages =        "17:1--17:??",
  month =        sep,
  year =         "2024",
  CODEN =        "????",
  DOI =          "https://doi.org/10.1145/3694790",
  ISSN =         "1936-7228 (print), 1936-7236 (electronic)",
  ISSN-L =       "1936-7236",
  bibdate =      "Tue Oct 8 06:03:26 MDT 2024",
  bibsource =    "https://www.math.utah.edu/pub/tex/bib/taccess.bib",
  URL =          "https://dl.acm.org/doi/10.1145/3694790",
  abstract =     "People with photosensitive epilepsy may have seizures
                 caused by flashing lights, patterns, and video
                 sequences. Because of this, there is increasing
                 interest among researchers, \ldots{}",
  acknowledgement = ack-nhfb,
  ajournal =     "ACM Trans. Access. Comput.",
  articleno =    "17",
  fjournal =     "ACM Transactions on Accessible Computing (TACCESS)",
  journal-URL =  "https://dl.acm.org/loi/taccess",
}