1 // Copyright 2014 The Chromium Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef UI_CHROMEOS_TOUCH_EXPLORATION_CONTROLLER_H_ 6 #define UI_CHROMEOS_TOUCH_EXPLORATION_CONTROLLER_H_ 7 8 #include "base/time/tick_clock.h" 9 #include "base/timer/timer.h" 10 #include "base/values.h" 11 #include "ui/chromeos/ui_chromeos_export.h" 12 #include "ui/events/event.h" 13 #include "ui/events/event_rewriter.h" 14 #include "ui/events/gesture_detection/gesture_detector.h" 15 #include "ui/events/gestures/gesture_provider_aura.h" 16 #include "ui/gfx/geometry/point.h" 17 18 namespace aura { 19 class Window; 20 } 21 22 namespace ui { 23 24 class Event; 25 class EventHandler; 26 class GestureEvent; 27 class GestureProviderAura; 28 class TouchEvent; 29 30 // A delegate to handle commands in response to detected accessibility gesture 31 // events. 32 class TouchExplorationControllerDelegate { 33 public: 34 virtual ~TouchExplorationControllerDelegate() {} 35 36 // Takes an int from 0.0 to 100.0 that indicates the percent the volume 37 // should be set to. 38 virtual void SetOutputLevel(int volume) = 0; 39 40 // Silences spoken feedback. 41 virtual void SilenceSpokenFeedback() = 0; 42 43 // This function should be called when the volume adjust earcon should be 44 // played 45 virtual void PlayVolumeAdjustEarcon() = 0; 46 47 // This function should be called when the passthrough earcon should be 48 // played. 49 virtual void PlayPassthroughEarcon() = 0; 50 51 // This function should be called when the exit screen earcon should be 52 // played. 53 virtual void PlayExitScreenEarcon() = 0; 54 55 // This function should be called when the enter screen earcon should be 56 // played. 57 virtual void PlayEnterScreenEarcon() = 0; 58 }; 59 60 // TouchExplorationController is used in tandem with "Spoken Feedback" to 61 // make the touch UI accessible. Gestures performed in the middle of the screen 62 // are mapped to accessibility key shortcuts while gestures performed on the 63 // edge of the screen can change settings. 64 // 65 // ** Short version ** 66 // 67 // At a high-level, single-finger events are used for accessibility - 68 // exploring the screen gets turned into mouse moves (which can then be 69 // spoken by an accessibility service running), a single tap while the user 70 // is in touch exploration or a double-tap simulates a click, and gestures 71 // can be used to send high-level accessibility commands. For example, a swipe 72 // right would correspond to the keyboard short cut shift+search+right. 73 // Swipes with up to four fingers are also mapped to commands. Slide 74 // gestures performed on the edge of the screen can change settings 75 // continuously. For example, sliding a finger along the right side of the 76 // screen will change the volume. When a user double taps and holds with one 77 // finger, the finger is passed through as if accessibility was turned off. If 78 // the user taps the screen with two fingers, the user can silence spoken 79 // feedback if it is playing. 80 // 81 // ** Long version ** 82 // 83 // Here are the details of the implementation: 84 // 85 // When the first touch is pressed, a 300 ms grace period timer starts. 86 // 87 // If the user keeps their finger down for more than 300 ms and doesn't 88 // perform a supported accessibility gesture in that time (e.g. swipe right), 89 // they enter touch exploration mode, and all movements are translated into 90 // synthesized mouse move events. 91 // 92 // Also, if the user moves their single finger outside a certain slop region 93 // (without performing a gesture), they enter touch exploration mode earlier 94 // than 300 ms. 95 // 96 // If the user taps and releases their finger, after 300 ms from the initial 97 // touch, a single mouse move is fired. 98 // 99 // While in touch exploration mode, the user can perform a single tap 100 // if the user releases their finger and taps before 300 ms passes. 101 // This will result in a click on the last successful touch exploration 102 // location. This allows the user to perform a single tap 103 // anywhere to activate it. 104 // 105 // The user can perform swipe gestures in one of the four cardinal directions 106 // which will be interpreted and used to control the UI. All gestures will only 107 // be registered if the fingers move outside the slop, and all fingers will only 108 // be registered if they are completed within the grace period. If a single 109 // finger gesture fails to be completed within the grace period, the state 110 // changes to touch exploration mode. If a multi finger gesture fails to be 111 // completed within the grace period, the user must lift all fingers before 112 // completing any more actions. 113 // 114 // If the user double-taps, the second tap is passed through, allowing the 115 // user to click - however, the double-tap location is changed to the location 116 // of the last successful touch exploration - that allows the user to explore 117 // anywhere on the screen, hear its description, then double-tap anywhere 118 // to activate it. 119 // 120 // If the user double taps and holds, any event from that finger is passed 121 // through. These events are passed through with an offset such that the first 122 // touch is offset to be at the location of the last touch exploration 123 // location, and every following event is offset by the same amount. 124 // 125 // If any other fingers are added or removed, they are ignored. Once the 126 // passthrough finger is released, passthrough stops and the user is reset 127 // to no fingers down state. 128 // 129 // If the user enters touch exploration mode, they can click without lifting 130 // their touch exploration finger by tapping anywhere else on the screen with 131 // a second finger, while the touch exploration finger is still pressed. 132 // 133 // Once touch exploration mode has been activated, it remains in that mode until 134 // all fingers have been released. 135 // 136 // If the user places a finger on the edge of the screen and moves their finger 137 // past slop, a slide gesture is performed. The user can then slide one finger 138 // along an edge of the screen and continuously control a setting. Once the user 139 // enters this state, the boundaries that define an edge expand so that the user 140 // can now adjust the setting within a slightly bigger width along the screen. 141 // If the user exits this area without lifting their finger, they will not be 142 // able to perform any actions, however if they keep their finger down and 143 // return to the "hot edge," then they can still adjust the setting. In order to 144 // perform other touch accessibility movements, the user must lift their finger. 145 // If additional fingers are added while in this state, the user will transition 146 // to passthrough. 147 // 148 // Currently, only the right edge is mapped to control the volume. Volume 149 // control along the edge of the screen is directly proportional to where the 150 // user's finger is located on the screen. The top right corner of the screen 151 // automatically sets the volume to 100% and the bottome right corner of the 152 // screen automatically sets the volume to 0% once the user has moved past slop. 153 // 154 // If the user taps the screen with two fingers and lifts both fingers before 155 // the grace period has passed, spoken feedback is silenced. 156 // 157 // The user can also enter passthrough by placing a finger on one of the bottom 158 // corners of the screen until an earcon sounds. After the earcon sounds, the 159 // user is in passthrough so all subsequent fingers placed on the screen will be 160 // passed through. Once the finger in the corner has been released, the state 161 // will switch to wait for no fingers. 162 // 163 // The caller is expected to retain ownership of instances of this class and 164 // destroy them before |root_window| is destroyed. 165 class UI_CHROMEOS_EXPORT TouchExplorationController 166 : public ui::EventRewriter, 167 public ui::GestureProviderAuraClient { 168 public: 169 explicit TouchExplorationController( 170 aura::Window* root_window, 171 ui::TouchExplorationControllerDelegate* delegate); 172 virtual ~TouchExplorationController(); 173 174 private: 175 friend class TouchExplorationControllerTestApi; 176 177 // Overridden from ui::EventRewriter 178 virtual ui::EventRewriteStatus RewriteEvent( 179 const ui::Event& event, 180 scoped_ptr<ui::Event>* rewritten_event) OVERRIDE; 181 virtual ui::EventRewriteStatus NextDispatchEvent( 182 const ui::Event& last_event, scoped_ptr<ui::Event>* new_event) OVERRIDE; 183 184 // Event handlers based on the current state - see State, below. 185 ui::EventRewriteStatus InNoFingersDown( 186 const ui::TouchEvent& event, scoped_ptr<ui::Event>* rewritten_event); 187 ui::EventRewriteStatus InSingleTapPressed( 188 const ui::TouchEvent& event, scoped_ptr<ui::Event>* rewritten_event); 189 ui::EventRewriteStatus InSingleTapOrTouchExploreReleased( 190 const ui::TouchEvent& event, scoped_ptr<ui::Event>* rewritten_event); 191 ui::EventRewriteStatus InDoubleTapPending( 192 const ui::TouchEvent& event, scoped_ptr<ui::Event>* rewritten_event); 193 ui::EventRewriteStatus InTouchReleasePending( 194 const ui::TouchEvent& event, scoped_ptr<ui::Event>* rewritten_event); 195 ui::EventRewriteStatus InTouchExploration( 196 const ui::TouchEvent& event, scoped_ptr<ui::Event>* rewritten_event); 197 ui::EventRewriteStatus InCornerPassthrough( 198 const ui::TouchEvent& event, scoped_ptr<ui::Event>* rewritten_event); 199 ui::EventRewriteStatus InOneFingerPassthrough( 200 const ui::TouchEvent& event, scoped_ptr<ui::Event>* rewritten_event); 201 ui::EventRewriteStatus InGestureInProgress( 202 const ui::TouchEvent& event, scoped_ptr<ui::Event>* rewritten_event); 203 ui::EventRewriteStatus InTouchExploreSecondPress( 204 const ui::TouchEvent& event, scoped_ptr<ui::Event>* rewritten_event); 205 ui::EventRewriteStatus InWaitForNoFingers( 206 const ui::TouchEvent& event, scoped_ptr<ui::Event>* rewritten_event); 207 ui::EventRewriteStatus InSlideGesture( 208 const ui::TouchEvent& event, scoped_ptr<ui::Event>* rewritten_event); 209 ui::EventRewriteStatus InTwoFingerTap( 210 const ui::TouchEvent& event, scoped_ptr<ui::Event>* rewritten_event); 211 212 // Returns the current time of the tick clock. 213 base::TimeDelta Now(); 214 215 // This timer is started every time we get the first press event, and 216 // it fires after the double-click timeout elapses (300 ms by default). 217 // If the user taps and releases within 300 ms and doesn't press again, 218 // we treat that as a single mouse move (touch exploration) event. 219 void StartTapTimer(); 220 void OnTapTimerFired(); 221 222 // This timer is started every timer we get the first press event and the 223 // finger is in the corner of the screen. 224 // It fires after the corner passthrough delay elapses. If the 225 // user is still in the corner by the time this timer fires, all subsequent 226 // fingers added on the screen will be passed through. 227 void OnPassthroughTimerFired(); 228 229 // Dispatch a new event outside of the event rewriting flow. 230 void DispatchEvent(ui::Event* event); 231 232 // Overridden from GestureProviderAuraClient. 233 // 234 // The gesture provider keeps track of all the touch events after 235 // the user moves fast enough to trigger a gesture. After the user 236 // completes their gesture, this method will decide what keyboard 237 // input their gesture corresponded to. 238 virtual void OnGestureEvent(ui::GestureEvent* gesture) OVERRIDE; 239 240 // Process the gesture events that have been created. 241 void ProcessGestureEvents(); 242 243 void OnSwipeEvent(ui::GestureEvent* swipe_gesture); 244 245 void SideSlideControl(ui::GestureEvent* gesture); 246 247 // Dispatches the keyboard short cut Shift+Search+<arrow key> 248 // outside the event rewritting flow. 249 void DispatchShiftSearchKeyEvent(const ui::KeyboardCode third_key); 250 251 // Binds DispatchShiftSearchKeyEvent to a specific third key. 252 base::Closure BindShiftSearchKeyEvent(const ui::KeyboardCode third_key); 253 254 // Dispatches a single key with the given flags. 255 void DispatchKeyWithFlags(const ui::KeyboardCode key, int flags); 256 257 // Binds DispatchKeyWithFlags to a specific key and flags. 258 base::Closure BindKeyEventWithFlags(const ui::KeyboardCode key, int flags); 259 260 scoped_ptr<ui::Event> CreateMouseMoveEvent(const gfx::PointF& location, 261 int flags); 262 263 void EnterTouchToMouseMode(); 264 265 void PlaySoundForTimer(); 266 267 // Some constants used in touch_exploration_controller: 268 269 // Within this many dips of the screen edge, the release event generated will 270 // reset the state to NoFingersDown. 271 const float kLeavingScreenEdge = 6; 272 273 // Swipe/scroll gestures within these bounds (in DIPs) will change preset 274 // settings. 275 const float kMaxDistanceFromEdge = 75; 276 277 // After a slide gesture has been triggered, if the finger is still within 278 // these bounds (in DIPs), the preset settings will still change. 279 const float kSlopDistanceFromEdge = kMaxDistanceFromEdge + 40; 280 281 // The split tap slop is a bit more generous since keeping two 282 // fingers in place is a bit harder. 283 const float GetSplitTapTouchSlop(); 284 285 enum State { 286 // No fingers are down and no events are pending. 287 NO_FINGERS_DOWN, 288 289 // A single finger is down, but we're not yet sure if this is going 290 // to be touch exploration or something else. 291 SINGLE_TAP_PRESSED, 292 293 // The user pressed and released a single finger - a tap - but we have 294 // to wait until the end of the grace period to allow the user to tap the 295 // second time. If the second tap doesn't occurs within the grace period, 296 // we dispatch a mouse move at the location of the first tap. 297 SINGLE_TAP_RELEASED, 298 299 // The user was in touch explore mode and released the finger. 300 // If another touch press occurs within the grace period, a single 301 // tap click occurs. This state differs from SINGLE_TAP_RELEASED 302 // in that if a second tap doesn't occur within the grace period, 303 // there is no mouse move dispatched. 304 TOUCH_EXPLORE_RELEASED, 305 306 // The user tapped once, and before the grace period expired, pressed 307 // one finger down to begin a double-tap, but has not released it yet. 308 // This could become passthrough, so no touch press is dispatched yet. 309 DOUBLE_TAP_PENDING, 310 311 // The user was doing touch exploration, started split tap, but lifted the 312 // touch exploration finger. Once they remove all fingers, a touch release 313 // will go through. 314 TOUCH_RELEASE_PENDING, 315 316 // We're in touch exploration mode. Anything other than the first finger 317 // is ignored, and movements of the first finger are rewritten as mouse 318 // move events. This mode is entered if a single finger is pressed and 319 // after the grace period the user hasn't added a second finger or 320 // moved the finger outside of the slop region. We'll stay in this 321 // mode until all fingers are lifted. 322 TOUCH_EXPLORATION, 323 324 // If the user moves their finger faster than the threshold velocity after a 325 // single tap, the touch events that follow will be translated into gesture 326 // events. If the user successfully completes a gesture within the grace 327 // period, the gesture will be interpreted and used to control the UI via 328 // discrete actions - currently by synthesizing key events corresponding to 329 // each gesture Otherwise, the collected gestures are discarded and the 330 // state changes to touch_exploration. 331 GESTURE_IN_PROGRESS, 332 333 // The user was in touch exploration, but has placed down another finger. 334 // If the user releases the second finger, a touch press and release 335 // will go through at the last touch explore location. If the user 336 // releases the touch explore finger, the touch press and release will 337 // still go through once the split tap finger is also lifted. If any 338 // fingers pressed past the first two, the touch press is cancelled and 339 // the user enters the wait state for the fingers to be removed. 340 TOUCH_EXPLORE_SECOND_PRESS, 341 342 // After the user double taps and holds with a single finger, all events 343 // for that finger are passed through, displaced by an offset. Adding 344 // extra fingers has no effect. This state is left when the user removes 345 // all fingers. 346 ONE_FINGER_PASSTHROUGH, 347 348 // If the user has pressed and held down the left corner past long press, 349 // then as long as they are holding the corner, all subsequent fingers 350 // registered will be in passthrough. 351 CORNER_PASSTHROUGH, 352 353 // If the user added another finger in SINGLE_TAP_PRESSED, or if the user 354 // has multiple fingers fingers down in any other state between 355 // passthrough, touch exploration, and gestures, they must release 356 // all fingers before completing any more actions. This state is 357 // generally useful for developing new features, because it creates a 358 // simple way to handle a dead end in user flow. 359 WAIT_FOR_NO_FINGERS, 360 361 // If the user is within the given bounds from an edge of the screen, not 362 // including corners, then the resulting movements will be interpreted as 363 // slide gestures. 364 SLIDE_GESTURE, 365 366 // If the user taps the screen with two fingers and releases both fingers 367 // before the grace period has passed, spoken feedback will be silenced. 368 TWO_FINGER_TAP, 369 }; 370 371 enum ScreenLocation { 372 // Hot "edges" of the screen are each represented by a respective bit. 373 NO_EDGE = 0, 374 RIGHT_EDGE = 1 << 0, 375 TOP_EDGE = 1 << 1, 376 LEFT_EDGE = 1 << 2, 377 BOTTOM_EDGE = 1 << 3, 378 BOTTOM_LEFT_CORNER = LEFT_EDGE | BOTTOM_EDGE, 379 BOTTOM_RIGHT_CORNER = RIGHT_EDGE | BOTTOM_EDGE, 380 }; 381 382 // Given a point, if it is within the given bounds of an edge, returns the 383 // edge. If it is within the given bounds of two edges, returns an int with 384 // both bits that represent the respective edges turned on. Otherwise returns 385 // SCREEN_CENTER. 386 int FindEdgesWithinBounds(gfx::Point point, float bounds); 387 388 // Set the state and modifies any variables related to the state change. 389 // (e.g. resetting the gesture provider). 390 void SetState(State new_state, const char* function_name); 391 392 void VlogState(const char* function_name); 393 394 void VlogEvent(const ui::TouchEvent& event, const char* function_name); 395 396 // Gets enum name from integer value. 397 const char* EnumStateToString(State state); 398 399 // Maps each single/multi finger swipe to the function that dispatches 400 // the corresponding key events. 401 void InitializeSwipeGestureMaps(); 402 403 aura::Window* root_window_; 404 405 // Handles volume control. Not owned. 406 ui::TouchExplorationControllerDelegate* delegate_; 407 408 // A set of touch ids for fingers currently touching the screen. 409 std::vector<int> current_touch_ids_; 410 411 // Map of touch ids to their last known location. 412 std::map<int, gfx::PointF> touch_locations_; 413 414 // The current state. 415 State state_; 416 417 // A copy of the event from the initial touch press. 418 scoped_ptr<ui::TouchEvent> initial_press_; 419 420 // Map of touch ids to where its initial press occurred relative to the 421 // screen. 422 std::map<int, gfx::Point> initial_presses_; 423 424 // In one finger passthrough, the touch is displaced relative to the 425 // last touch exploration location. 426 gfx::Vector2d passthrough_offset_; 427 428 // Stores the most recent event from a finger that is currently not 429 // sending events through, but might in the future (e.g. before a finger 430 // enters double-tap-hold passthrough, we need to update its location.) 431 scoped_ptr<ui::TouchEvent> last_unused_finger_event_; 432 433 // The last synthesized mouse move event. When the user double-taps, 434 // we send the passed-through tap to the location of this event. 435 scoped_ptr<ui::TouchEvent> last_touch_exploration_; 436 437 // A timer that fires after the double-tap delay. 438 base::OneShotTimer<TouchExplorationController> tap_timer_; 439 440 // A timer that fires to enter passthrough. 441 base::OneShotTimer<TouchExplorationController> passthrough_timer_; 442 443 // A timer to fire an indicating sound when sliding to change volume. 444 base::RepeatingTimer<TouchExplorationController> sound_timer_; 445 446 // A default gesture detector config, so we can share the same 447 // timeout and pixel slop constants. 448 ui::GestureDetector::Config gesture_detector_config_; 449 450 // Gesture Handler to interpret the touch events. 451 scoped_ptr<ui::GestureProviderAura> gesture_provider_; 452 453 // The previous state entered. 454 State prev_state_; 455 456 // A copy of the previous event passed. 457 scoped_ptr<ui::TouchEvent> prev_event_; 458 459 // This toggles whether VLOGS are turned on or not. 460 bool VLOG_on_; 461 462 // When touch_exploration_controller gets time relative to real time during 463 // testing, this clock is set to the simulated clock and used. 464 base::TickClock* tick_clock_; 465 466 // Maps the number of fingers in a swipe to the resulting functions that 467 // dispatch key events. 468 std::map<int, base::Closure> left_swipe_gestures_; 469 std::map<int, base::Closure> right_swipe_gestures_; 470 std::map<int, base::Closure> up_swipe_gestures_; 471 std::map<int, base::Closure> down_swipe_gestures_; 472 473 DISALLOW_COPY_AND_ASSIGN(TouchExplorationController); 474 }; 475 476 } // namespace ui 477 478 #endif // UI_CHROMEOS_TOUCH_EXPLORATION_CONTROLLER_H_ 479