1 /* 2 * Copyright (C) 2016 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package com.android.bluetooth.a2dpsink; 18 19 import android.bluetooth.BluetoothA2dpSink; 20 import android.bluetooth.BluetoothAvrcpController; 21 import android.bluetooth.BluetoothDevice; 22 import android.content.Context; 23 import android.media.AudioManager; 24 import android.media.AudioManager.OnAudioFocusChangeListener; 25 import android.os.Message; 26 import android.util.Log; 27 28 import com.android.bluetooth.avrcp.AvrcpControllerService; 29 import com.android.bluetooth.R; 30 import com.android.internal.util.IState; 31 import com.android.internal.util.State; 32 import com.android.internal.util.StateMachine; 33 34 /** 35 * Bluetooth A2DP SINK Streaming StateMachine. 36 * 37 * This state machine defines how the stack behaves once the A2DP connection is established and both 38 * devices are ready for streaming. For simplification we assume that the connection can either 39 * stream music immediately (i.e. data packets coming in or have potential to come in) or it cannot 40 * stream (i.e. Idle and Open states are treated alike). See Fig 4-1 of GAVDP Spec 1.0. 41 * 42 * Legend: 43 * SRC: Source (Remote) 44 * (SRC, F) - Remote is not streaming. F stands for false. 45 * (SRC, T) - Remote is intent to streaming. T stands for true. 46 * ACT: Action 47 * (ACT, F) - Local/Remote user intent is to pause/stop (AVRCP pause/stop). 48 * (ACT, T) - Local/Remote user intent is to play (AVRCP play). 49 * The way to detect action is two fold: 50 * -- We can detect action on the SNK side by directly monitoring the AVRCP controller service. 51 * -- On the SRC side, any AVRCP action will be accompanied by an update via AVRCP and hence we can 52 * update our action state. 53 * 54 * A state will be a combination of SRC and ACT state. Hence a state such as: 55 * (F, T) will mean that user has shown intent to play on local or remote device (second T) but the 56 * connection is not in streaming state yet. 57 * 58 * ----------------------------------------------------------------------------------------------- 59 * Start State | End State | Transition(s) 60 * ----------------------------------------------------------------------------------------------- 61 * (F, F) (F, T) ACT Play (No streaming in either states) 62 * (F, F) (T, F) Remote streams (play depends on policy) 63 * (T, F) (F, F) Remote stops streaming. 64 * (T, F) (T, T) ACT Play (streaming already existed). 65 * (F, T) (F, F) ACT Pause. 66 * (F, T) (T, T) Remote starts streaming (ACT Play already existed) 67 * (T, T) (F, T) Remote stops streaming. 68 * (T, T) (F, F) ACT stop. 69 * (T, T) (T, F) ACT pause. 70 * 71 * ----------------------------------------------------------------------------------------------- 72 * State | Action(s) 73 * ----------------------------------------------------------------------------------------------- 74 * (F, F) 1. Lose audio focus (if it exists) and notify fluoride of audio focus loss. 75 * 2. Stop AVRCP from pushing updates to UI. 76 * (T, F) 1. If policy is opt-in then get focus and stream (get audio focus etc). 77 * 2. Else throw away the data (lose audio focus etc). 78 * (F, T) In this state the source does not stream although we have play intent. 79 * 1. Show a spinny that data will come through. 80 * (T, T) 1. Request Audio Focus and on success update AVRCP to show UI updates. 81 * 2. On Audio focus enable streaming in Fluoride. 82 */ 83 final class A2dpSinkStreamingStateMachine extends StateMachine { 84 private static final boolean DBG = true; 85 private static final String TAG = "A2dpSinkStreamingStateMachine"; 86 private static final int ACT_PLAY_NUM_RETRIES = 5; 87 private static final int ACT_PLAY_RETRY_DELAY = 2000; // millis. 88 private static final int DEFAULT_DUCK_PERCENT = 25; 89 90 // Streaming states (see the description above). 91 private SRC_F_ACT_F mSrcFActF; 92 private SRC_F_ACT_T mSrcFActT; 93 private SRC_T_ACT_F mSrcTActF; 94 private SRC_T_ACT_T mSrcTActT; 95 96 // Transitions. 97 public static final int SRC_STR_START = 0; 98 public static final int SRC_STR_STOP = 1; 99 public static final int SRC_STR_STOP_JITTER_WAIT_OVER = 2; 100 public static final int ACT_PLAY = 3; 101 public static final int ACT_PLAY_RETRY = 4; 102 public static final int ACT_PAUSE = 5; 103 public static final int AUDIO_FOCUS_CHANGE = 6; 104 public static final int DISCONNECT = 7; 105 106 // Private variables. 107 private A2dpSinkStateMachine mA2dpSinkSm; 108 private Context mContext; 109 private AudioManager mAudioManager; 110 // Set default focus to loss since we have never requested it before. 111 private int mCurrentAudioFocus = AudioManager.AUDIOFOCUS_LOSS; 112 113 /* Used to indicate focus lost */ 114 private static final int STATE_FOCUS_LOST = 0; 115 /* Used to inform bluedroid that focus is granted */ 116 private static final int STATE_FOCUS_GRANTED = 1; 117 118 /* Wait in millis before the ACT loses focus on SRC jitter when streaming */ 119 private static final int SRC_STR_JITTER_WAIT = 5 * 1000; // 5sec 120 121 /* Focus changes when we are currently holding focus (i.e. we're in SRC_T_ACT_T state). */ 122 private OnAudioFocusChangeListener mAudioFocusListener = new OnAudioFocusChangeListener() { 123 public void onAudioFocusChange(int focusChange){ 124 if (DBG) { 125 Log.d(TAG, "onAudioFocusChangeListener focuschange " + focusChange); 126 } 127 A2dpSinkStreamingStateMachine.this.sendMessage(AUDIO_FOCUS_CHANGE, focusChange); 128 } 129 }; 130 131 private A2dpSinkStreamingStateMachine(A2dpSinkStateMachine a2dpSinkSm, Context context) { 132 super("A2dpSinkStreamingStateMachine"); 133 mA2dpSinkSm = a2dpSinkSm; 134 mContext = context; 135 mAudioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); 136 137 mSrcFActF = new SRC_F_ACT_F(); 138 mSrcFActT = new SRC_F_ACT_T(); 139 mSrcTActF = new SRC_T_ACT_F(); 140 mSrcTActT = new SRC_T_ACT_T(); 141 142 // States are independent of each other. We simply use transitionTo. 143 addState(mSrcFActF); 144 addState(mSrcFActT); 145 addState(mSrcTActF); 146 addState(mSrcTActT); 147 setInitialState(mSrcFActF); 148 149 } 150 151 public static A2dpSinkStreamingStateMachine make( 152 A2dpSinkStateMachine a2dpSinkSm, Context context) { 153 if (DBG) { 154 Log.d(TAG, "make"); 155 } 156 A2dpSinkStreamingStateMachine a2dpStrStateMachine = 157 new A2dpSinkStreamingStateMachine(a2dpSinkSm, context); 158 a2dpStrStateMachine.start(); 159 return a2dpStrStateMachine; 160 } 161 162 /** 163 * Utility functions that can be used by all states. 164 */ 165 private boolean requestAudioFocus() { 166 return (mAudioManager.requestAudioFocus( 167 mAudioFocusListener, AudioManager.STREAM_MUSIC, AudioManager.AUDIOFOCUS_GAIN) == 168 AudioManager.AUDIOFOCUS_REQUEST_GRANTED); 169 } 170 171 private void startAvrcpUpdates() { 172 // Since AVRCP gets started after A2DP we may need to request it later in cycle. 173 AvrcpControllerService avrcpService = AvrcpControllerService.getAvrcpControllerService(); 174 175 if (DBG) { 176 Log.d(TAG, "startAvrcpUpdates"); 177 } 178 if (avrcpService != null && avrcpService.getConnectedDevices().size() == 1) { 179 avrcpService.startAvrcpUpdates(); 180 } else { 181 Log.e(TAG, "startAvrcpUpdates failed because of connection."); 182 } 183 } 184 185 private void stopAvrcpUpdates() { 186 // Since AVRCP gets started after A2DP we may need to request it later in cycle. 187 AvrcpControllerService avrcpService = AvrcpControllerService.getAvrcpControllerService(); 188 189 if (DBG) { 190 Log.d(TAG, "stopAvrcpUpdates"); 191 } 192 if (avrcpService != null && avrcpService.getConnectedDevices().size() == 1) { 193 avrcpService.stopAvrcpUpdates(); 194 } else { 195 Log.e(TAG, "stopAvrcpUpdates failed because of connection."); 196 } 197 } 198 199 private void sendAvrcpPause() { 200 // Since AVRCP gets started after A2DP we may need to request it later in cycle. 201 AvrcpControllerService avrcpService = AvrcpControllerService.getAvrcpControllerService(); 202 203 if (DBG) { 204 Log.d(TAG, "sendAvrcpPause"); 205 } 206 if (avrcpService != null && avrcpService.getConnectedDevices().size() == 1) { 207 if (DBG) { 208 Log.d(TAG, "Pausing AVRCP."); 209 } 210 avrcpService.sendPassThroughCmd( 211 avrcpService.getConnectedDevices().get(0), 212 BluetoothAvrcpController.PASS_THRU_CMD_ID_PAUSE, 213 BluetoothAvrcpController.KEY_STATE_PRESSED); 214 avrcpService.sendPassThroughCmd( 215 avrcpService.getConnectedDevices().get(0), 216 BluetoothAvrcpController.PASS_THRU_CMD_ID_PAUSE, 217 BluetoothAvrcpController.KEY_STATE_RELEASED); 218 } else { 219 Log.e(TAG, "Passthrough not sent, connection un-available."); 220 } 221 } 222 223 private void sendAvrcpPlay() { 224 // Since AVRCP gets started after A2DP we may need to request it later in cycle. 225 AvrcpControllerService avrcpService = AvrcpControllerService.getAvrcpControllerService(); 226 227 if (DBG) { 228 Log.d(TAG, "sendAvrcpPlay"); 229 } 230 if (avrcpService != null && avrcpService.getConnectedDevices().size() == 1) { 231 if (DBG) { 232 Log.d(TAG, "Playing AVRCP."); 233 } 234 avrcpService.sendPassThroughCmd( 235 avrcpService.getConnectedDevices().get(0), 236 BluetoothAvrcpController.PASS_THRU_CMD_ID_PLAY, 237 BluetoothAvrcpController.KEY_STATE_PRESSED); 238 avrcpService.sendPassThroughCmd( 239 avrcpService.getConnectedDevices().get(0), 240 BluetoothAvrcpController.PASS_THRU_CMD_ID_PLAY, 241 BluetoothAvrcpController.KEY_STATE_RELEASED); 242 } else { 243 Log.e(TAG, "Passthrough not sent, connection un-available."); 244 } 245 } 246 247 private void startFluorideStreaming() { 248 mA2dpSinkSm.informAudioFocusStateNative(STATE_FOCUS_GRANTED); 249 mA2dpSinkSm.informAudioTrackGainNative(1.0f); 250 } 251 252 private void stopFluorideStreaming() { 253 mA2dpSinkSm.informAudioFocusStateNative(STATE_FOCUS_LOST); 254 } 255 256 private void setFluorideAudioTrackGain(float gain) { 257 mA2dpSinkSm.informAudioTrackGainNative(gain); 258 } 259 260 private class SRC_F_ACT_F extends State { 261 private static final String STATE_TAG = A2dpSinkStreamingStateMachine.TAG + ".SRC_F_ACT_F"; 262 @Override 263 public void enter() { 264 if (DBG) { 265 Log.d(STATE_TAG, "Enter: " + getCurrentMessage().what); 266 } 267 } 268 269 @Override 270 public boolean processMessage(Message message) { 271 if (DBG) { 272 Log.d(STATE_TAG, " process message: " + message.what); 273 } 274 switch (message.what) { 275 case SRC_STR_START: 276 // Opt out of all sounds without AVRCP play. We simply throw away. 277 transitionTo(mSrcTActF); 278 break; 279 280 case ACT_PLAY: 281 // Wait in next state for actual playback. We defer the message so that the next 282 // state (SRC_F_ACT_T) can execute the retry logic. 283 deferMessage(message); 284 transitionTo(mSrcFActT); 285 break; 286 287 case DISCONNECT: 288 mAudioManager.abandonAudioFocus(mAudioFocusListener); 289 mCurrentAudioFocus = AudioManager.AUDIOFOCUS_LOSS; 290 break; 291 292 case AUDIO_FOCUS_CHANGE: 293 // If we are regaining focus after transient loss this indicates that we should 294 // press play again. 295 int newAudioFocus = message.arg1; 296 if (DBG) { 297 Log.d(STATE_TAG, 298 "prev focus " + mCurrentAudioFocus + " new focus " + newAudioFocus); 299 } 300 if (mCurrentAudioFocus == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT && 301 newAudioFocus == AudioManager.AUDIOFOCUS_GAIN) { 302 sendAvrcpPlay(); 303 // We should transition to SRC_F_ACT_T after this message. We also send some 304 // retries here because after phone calls we may have race conditions. 305 sendMessageDelayed( 306 ACT_PLAY_RETRY, ACT_PLAY_NUM_RETRIES, ACT_PLAY_RETRY_DELAY); 307 } 308 mCurrentAudioFocus = newAudioFocus; 309 break; 310 311 default: 312 Log.e(TAG, "Don't know how to handle " + message.what); 313 } 314 return HANDLED; 315 } 316 } 317 318 private class SRC_F_ACT_T extends State { 319 private static final String STATE_TAG = A2dpSinkStreamingStateMachine.TAG + ".SRC_F_ACT_T"; 320 private boolean mPlay = false; 321 322 @Override 323 public void enter() { 324 if (DBG) { 325 Log.d(STATE_TAG, "Enter: " + getCurrentMessage().what); 326 } 327 } 328 329 @Override 330 public boolean processMessage(Message message) { 331 if (DBG) { 332 Log.d(STATE_TAG, " process message: " + message.what); 333 } 334 switch (message.what) { 335 case SRC_STR_START: 336 deferMessage(message); 337 transitionTo(mSrcTActT); 338 break; 339 340 case ACT_PAUSE: 341 transitionTo(mSrcFActF); 342 break; 343 344 case ACT_PLAY: 345 // Retry if the remote has not yet started playing music. This is seen in some 346 // devices where after the phone call it requires multiple play commands to 347 // start music. 348 break; 349 350 case ACT_PLAY_RETRY: 351 if (message.arg1 > 0) { 352 Log.d(STATE_TAG, "Retry " + message.arg1); 353 sendAvrcpPlay(); 354 sendMessageDelayed(ACT_PLAY_RETRY, message.arg1 - 1, ACT_PLAY_RETRY_DELAY); 355 } 356 break; 357 358 359 case DISCONNECT: 360 deferMessage(message); 361 transitionTo(mSrcFActF); 362 mPlay = false; 363 break; 364 365 case AUDIO_FOCUS_CHANGE: 366 int newAudioFocus = message.arg1; 367 if (DBG) { 368 Log.d(STATE_TAG, 369 "prev focus " + mCurrentAudioFocus + " new focus " + newAudioFocus); 370 } 371 if (newAudioFocus == AudioManager.AUDIOFOCUS_GAIN) { 372 sendAvrcpPlay(); 373 mCurrentAudioFocus = newAudioFocus; 374 } else if (newAudioFocus == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT) { 375 sendAvrcpPause(); 376 mCurrentAudioFocus = newAudioFocus; 377 } else if (newAudioFocus == AudioManager.AUDIOFOCUS_LOSS) { 378 mAudioManager.abandonAudioFocus(mAudioFocusListener); 379 mCurrentAudioFocus = AudioManager.AUDIOFOCUS_LOSS; 380 } 381 break; 382 383 default: 384 Log.e(TAG, "Don't know how to handle " + message.what); 385 } 386 return HANDLED; 387 } 388 } 389 390 private class SRC_T_ACT_F extends State { 391 private static final String STATE_TAG = A2dpSinkStreamingStateMachine.TAG + ".SRC_T_ACT_F"; 392 @Override 393 public void enter() { 394 if (DBG) { 395 Log.d(STATE_TAG, "Enter: " + getCurrentMessage().what); 396 } 397 } 398 399 @Override 400 public boolean processMessage(Message message) { 401 if (DBG) { 402 Log.d(STATE_TAG, " process message: " + message.what); 403 } 404 switch (message.what) { 405 case SRC_STR_STOP: 406 transitionTo(mSrcFActF); 407 break; 408 409 case ACT_PLAY: 410 deferMessage(message); 411 transitionTo(mSrcTActT); 412 break; 413 414 case DISCONNECT: 415 deferMessage(message); 416 transitionTo(mSrcFActF); 417 break; 418 419 case AUDIO_FOCUS_CHANGE: 420 // If we regain focus from TRANSIENT that means that the remote was playing all 421 // this while although we must have sent a PAUSE (see focus loss in SRC_T_ACT_T 422 // state). In any case, we should resume music here if that is the case. 423 int newAudioFocus = message.arg1; 424 if (DBG) { 425 Log.d(STATE_TAG, 426 "prev focus " + mCurrentAudioFocus + " new focus " + newAudioFocus); 427 } 428 if (newAudioFocus == AudioManager.AUDIOFOCUS_LOSS || 429 newAudioFocus == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT) { 430 sendAvrcpPause(); 431 } else if (newAudioFocus == AudioManager.AUDIOFOCUS_GAIN) { 432 sendAvrcpPlay(); 433 } 434 mCurrentAudioFocus = newAudioFocus; 435 break; 436 437 default: 438 Log.e(TAG, "Don't know how to handle " + message.what); 439 } 440 return HANDLED; 441 } 442 } 443 444 private class SRC_T_ACT_T extends State { 445 private static final String STATE_TAG = A2dpSinkStreamingStateMachine.TAG + ".SRC_T_ACT_T"; 446 private boolean mWaitForJitter = false; 447 @Override 448 public void enter() { 449 if (DBG) { 450 Log.d(STATE_TAG, "Enter: " + getCurrentMessage().what); 451 } 452 } 453 454 @Override 455 public boolean processMessage(Message message) { 456 if (DBG) { 457 Log.d(STATE_TAG, " process message: " + message.what); 458 } 459 switch (message.what) { 460 case ACT_PAUSE: 461 // Stop avrcp updates. 462 stopAvrcpUpdates(); 463 stopFluorideStreaming(); 464 transitionTo(mSrcTActF); 465 if (mCurrentAudioFocus == AudioManager.AUDIOFOCUS_GAIN) { 466 // If we have focus gain and we still get pause that means that we must have 467 // gotten a PAUSE by user explicitly pressing PAUSE on Car or Phone. Hence 468 // we release focus. 469 mAudioManager.abandonAudioFocus(mAudioFocusListener); 470 mCurrentAudioFocus = AudioManager.AUDIOFOCUS_LOSS; 471 } 472 break; 473 474 case SRC_STR_STOP: 475 stopAvrcpUpdates(); 476 stopFluorideStreaming(); 477 transitionTo(mSrcFActT); 478 // This could be variety of reasons including that the remote is going to 479 // (eventually send us pause) or the device is going to go into a call state 480 // etc. Also it may simply be stutter of music. Instead of sending pause 481 // prematurely we wait for either a Pause from remote or AudioFocus change owing 482 // an ongoing call. 483 break; 484 485 case SRC_STR_START: 486 case ACT_PLAY: 487 Log.d(STATE_TAG, "Current Audio Focus " + mCurrentAudioFocus); 488 boolean startStream = true; 489 if (mCurrentAudioFocus == AudioManager.AUDIOFOCUS_LOSS) { 490 if (!requestAudioFocus()) { 491 Log.e(STATE_TAG, "Cannot get focus, hence not starting streaming."); 492 startStream = false; 493 } else { 494 mCurrentAudioFocus = AudioManager.AUDIOFOCUS_GAIN; 495 } 496 } 497 if (startStream) { 498 startAvrcpUpdates(); 499 startFluorideStreaming(); 500 } 501 // If we did not get focus, it may mean that the device in a call state and 502 // hence we should wait for an audio focus event. 503 break; 504 505 // On Audio Focus events we stay in the same state but this can potentially change 506 // if we playback. 507 case AUDIO_FOCUS_CHANGE: 508 int newAudioFocus = (int) message.arg1; 509 if (DBG) { 510 Log.d(STATE_TAG, 511 "prev focus " + mCurrentAudioFocus + " new focus " + newAudioFocus); 512 } 513 514 if (newAudioFocus == AudioManager.AUDIOFOCUS_GAIN) { 515 // We have gained focus so play with 1.0 gain. 516 sendAvrcpPlay(); 517 startAvrcpUpdates(); 518 startFluorideStreaming(); 519 setFluorideAudioTrackGain(1.0f); 520 } else if (newAudioFocus == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK) { 521 // Make the volume duck. 522 int duckPercent = 523 mContext.getResources().getInteger(R.integer.a2dp_sink_duck_percent); 524 if (duckPercent < 0 || duckPercent > 100) { 525 Log.e(STATE_TAG, "Invalid duck percent using default."); 526 duckPercent = DEFAULT_DUCK_PERCENT; 527 } 528 float duckRatio = (float) ((duckPercent * 1.0f) / 100); 529 Log.d(STATE_TAG, 530 "Setting reduce gain on transient loss gain=" + duckRatio); 531 setFluorideAudioTrackGain(duckRatio); 532 } else { 533 // We either are in transient loss or we are in permanent loss, 534 // either ways we should stop streaming. 535 sendAvrcpPause(); 536 stopAvrcpUpdates(); 537 stopFluorideStreaming(); 538 539 // If it is permanent focus loss then we should abandon focus here and wait 540 // for user to explicitly play again. 541 if (newAudioFocus == AudioManager.AUDIOFOCUS_LOSS) { 542 mAudioManager.abandonAudioFocus(mAudioFocusListener); 543 mCurrentAudioFocus = AudioManager.AUDIOFOCUS_LOSS; 544 } 545 } 546 mCurrentAudioFocus = newAudioFocus; 547 break; 548 549 case DISCONNECT: 550 deferMessage(message); 551 transitionTo(mSrcFActF); 552 break; 553 554 default: 555 Log.e(TAG, "Don't know how to handle " + message.what); 556 } 557 return HANDLED; 558 } 559 } 560 } 561