Home | History | Annotate | Download | only in adaptivestreaming
      1 /*
      2  * Copyright 2012 Sebastian Annies, Hamburg
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the License);
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *     http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an AS IS BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 package com.googlecode.mp4parser.authoring.adaptivestreaming;
     17 
     18 import com.coremedia.iso.Hex;
     19 import com.coremedia.iso.boxes.SampleDescriptionBox;
     20 import com.coremedia.iso.boxes.SoundMediaHeaderBox;
     21 import com.coremedia.iso.boxes.VideoMediaHeaderBox;
     22 import com.coremedia.iso.boxes.h264.AvcConfigurationBox;
     23 import com.coremedia.iso.boxes.sampleentry.AudioSampleEntry;
     24 import com.coremedia.iso.boxes.sampleentry.VisualSampleEntry;
     25 import com.googlecode.mp4parser.Version;
     26 import com.googlecode.mp4parser.authoring.Movie;
     27 import com.googlecode.mp4parser.authoring.Track;
     28 import com.googlecode.mp4parser.authoring.builder.FragmentIntersectionFinder;
     29 import com.googlecode.mp4parser.boxes.DTSSpecificBox;
     30 import com.googlecode.mp4parser.boxes.EC3SpecificBox;
     31 import com.googlecode.mp4parser.boxes.mp4.ESDescriptorBox;
     32 import com.googlecode.mp4parser.boxes.mp4.objectdescriptors.AudioSpecificConfig;
     33 import org.w3c.dom.Document;
     34 import org.w3c.dom.Element;
     35 
     36 import javax.xml.parsers.DocumentBuilder;
     37 import javax.xml.parsers.DocumentBuilderFactory;
     38 import javax.xml.parsers.ParserConfigurationException;
     39 import javax.xml.transform.*;
     40 import javax.xml.transform.dom.DOMSource;
     41 import javax.xml.transform.stream.StreamResult;
     42 import java.io.ByteArrayOutputStream;
     43 import java.io.IOException;
     44 import java.io.StringWriter;
     45 import java.nio.ByteBuffer;
     46 import java.util.LinkedList;
     47 import java.util.List;
     48 import java.util.logging.Logger;
     49 
     50 public class FlatManifestWriterImpl extends AbstractManifestWriter {
     51     private static final Logger LOG = Logger.getLogger(FlatManifestWriterImpl.class.getName());
     52 
     53     protected FlatManifestWriterImpl(FragmentIntersectionFinder intersectionFinder) {
     54         super(intersectionFinder);
     55     }
     56 
     57     /**
     58      * Overwrite this method in subclasses to add your specialities.
     59      *
     60      * @param manifest the original manifest
     61      * @return your customized version of the manifest
     62      */
     63     protected Document customizeManifest(Document manifest) {
     64         return manifest;
     65     }
     66 
     67     public String getManifest(Movie movie) throws IOException {
     68 
     69         LinkedList<VideoQuality> videoQualities = new LinkedList<VideoQuality>();
     70         long videoTimescale = -1;
     71 
     72         LinkedList<AudioQuality> audioQualities = new LinkedList<AudioQuality>();
     73         long audioTimescale = -1;
     74 
     75         for (Track track : movie.getTracks()) {
     76             if (track.getMediaHeaderBox() instanceof VideoMediaHeaderBox) {
     77                 videoFragmentsDurations = checkFragmentsAlign(videoFragmentsDurations, calculateFragmentDurations(track, movie));
     78                 SampleDescriptionBox stsd = track.getSampleDescriptionBox();
     79                 videoQualities.add(getVideoQuality(track, (VisualSampleEntry) stsd.getSampleEntry()));
     80                 if (videoTimescale == -1) {
     81                     videoTimescale = track.getTrackMetaData().getTimescale();
     82                 } else {
     83                     assert videoTimescale == track.getTrackMetaData().getTimescale();
     84                 }
     85             }
     86             if (track.getMediaHeaderBox() instanceof SoundMediaHeaderBox) {
     87                 audioFragmentsDurations = checkFragmentsAlign(audioFragmentsDurations, calculateFragmentDurations(track, movie));
     88                 SampleDescriptionBox stsd = track.getSampleDescriptionBox();
     89                 audioQualities.add(getAudioQuality(track, (AudioSampleEntry) stsd.getSampleEntry()));
     90                 if (audioTimescale == -1) {
     91                     audioTimescale = track.getTrackMetaData().getTimescale();
     92                 } else {
     93                     assert audioTimescale == track.getTrackMetaData().getTimescale();
     94                 }
     95 
     96             }
     97         }
     98         DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance();
     99         DocumentBuilder documentBuilder;
    100         try {
    101             documentBuilder = documentBuilderFactory.newDocumentBuilder();
    102         } catch (ParserConfigurationException e) {
    103             throw new IOException(e);
    104         }
    105         Document document = documentBuilder.newDocument();
    106 
    107 
    108         Element smoothStreamingMedia = document.createElement("SmoothStreamingMedia");
    109         document.appendChild(smoothStreamingMedia);
    110         smoothStreamingMedia.setAttribute("MajorVersion", "2");
    111         smoothStreamingMedia.setAttribute("MinorVersion", "1");
    112 // silverlight ignores the timescale attr        smoothStreamingMedia.addAttribute(new Attribute("TimeScale", Long.toString(movieTimeScale)));
    113         smoothStreamingMedia.setAttribute("Duration", "0");
    114 
    115         smoothStreamingMedia.appendChild(document.createComment(Version.VERSION));
    116         Element videoStreamIndex = document.createElement("StreamIndex");
    117         videoStreamIndex.setAttribute("Type", "video");
    118         videoStreamIndex.setAttribute("TimeScale", Long.toString(videoTimescale)); // silverlight ignores the timescale attr
    119         videoStreamIndex.setAttribute("Chunks", Integer.toString(videoFragmentsDurations.length));
    120         videoStreamIndex.setAttribute("Url", "video/{bitrate}/{start time}");
    121         videoStreamIndex.setAttribute("QualityLevels", Integer.toString(videoQualities.size()));
    122         smoothStreamingMedia.appendChild(videoStreamIndex);
    123 
    124         for (int i = 0; i < videoQualities.size(); i++) {
    125             VideoQuality vq = videoQualities.get(i);
    126             Element qualityLevel = document.createElement("QualityLevel");
    127             qualityLevel.setAttribute("Index", Integer.toString(i));
    128             qualityLevel.setAttribute("Bitrate", Long.toString(vq.bitrate));
    129             qualityLevel.setAttribute("FourCC", vq.fourCC);
    130             qualityLevel.setAttribute("MaxWidth", Long.toString(vq.width));
    131             qualityLevel.setAttribute("MaxHeight", Long.toString(vq.height));
    132             qualityLevel.setAttribute("CodecPrivateData", vq.codecPrivateData);
    133             qualityLevel.setAttribute("NALUnitLengthField", Integer.toString(vq.nalLength));
    134             videoStreamIndex.appendChild(qualityLevel);
    135         }
    136 
    137         for (int i = 0; i < videoFragmentsDurations.length; i++) {
    138             Element c = document.createElement("c");
    139             c.setAttribute("n", Integer.toString(i));
    140             c.setAttribute("d", Long.toString(videoFragmentsDurations[i]));
    141             videoStreamIndex.appendChild(c);
    142         }
    143 
    144         if (audioFragmentsDurations != null) {
    145             Element audioStreamIndex = document.createElement("StreamIndex");
    146             audioStreamIndex.setAttribute("Type", "audio");
    147             audioStreamIndex.setAttribute("TimeScale", Long.toString(audioTimescale)); // silverlight ignores the timescale attr
    148             audioStreamIndex.setAttribute("Chunks", Integer.toString(audioFragmentsDurations.length));
    149             audioStreamIndex.setAttribute("Url", "audio/{bitrate}/{start time}");
    150             audioStreamIndex.setAttribute("QualityLevels", Integer.toString(audioQualities.size()));
    151             smoothStreamingMedia.appendChild(audioStreamIndex);
    152 
    153             for (int i = 0; i < audioQualities.size(); i++) {
    154                 AudioQuality aq = audioQualities.get(i);
    155                 Element qualityLevel = document.createElement("QualityLevel");
    156                 qualityLevel.setAttribute("Index", Integer.toString(i));
    157                 qualityLevel.setAttribute("FourCC", aq.fourCC);
    158                 qualityLevel.setAttribute("Bitrate", Long.toString(aq.bitrate));
    159                 qualityLevel.setAttribute("AudioTag", Integer.toString(aq.audioTag));
    160                 qualityLevel.setAttribute("SamplingRate", Long.toString(aq.samplingRate));
    161                 qualityLevel.setAttribute("Channels", Integer.toString(aq.channels));
    162                 qualityLevel.setAttribute("BitsPerSample", Integer.toString(aq.bitPerSample));
    163                 qualityLevel.setAttribute("PacketSize", Integer.toString(aq.packetSize));
    164                 qualityLevel.setAttribute("CodecPrivateData", aq.codecPrivateData);
    165                 audioStreamIndex.appendChild(qualityLevel);
    166             }
    167             for (int i = 0; i < audioFragmentsDurations.length; i++) {
    168                 Element c = document.createElement("c");
    169                 c.setAttribute("n", Integer.toString(i));
    170                 c.setAttribute("d", Long.toString(audioFragmentsDurations[i]));
    171                 audioStreamIndex.appendChild(c);
    172             }
    173         }
    174 
    175         document.setXmlStandalone(true);
    176         Source source = new DOMSource(document);
    177         StringWriter stringWriter = new StringWriter();
    178         Result result = new StreamResult(stringWriter);
    179         TransformerFactory factory = TransformerFactory.newInstance();
    180         Transformer transformer;
    181         try {
    182             transformer = factory.newTransformer();
    183             transformer.setOutputProperty(OutputKeys.INDENT, "yes");
    184             transformer.transform(source, result);
    185         } catch (TransformerConfigurationException e) {
    186             throw new IOException(e);
    187         } catch (TransformerException e) {
    188             throw new IOException(e);
    189         }
    190         return stringWriter.getBuffer().toString();
    191 
    192 
    193     }
    194 
    195     private AudioQuality getAudioQuality(Track track, AudioSampleEntry ase) {
    196         if (getFormat(ase).equals("mp4a")) {
    197             return getAacAudioQuality(track, ase);
    198         } else if (getFormat(ase).equals("ec-3")) {
    199             return getEc3AudioQuality(track, ase);
    200         } else if (getFormat(ase).startsWith("dts")) {
    201             return getDtsAudioQuality(track, ase);
    202         } else {
    203             throw new InternalError("I don't know what to do with audio of type " + getFormat(ase));
    204         }
    205 
    206     }
    207 
    208     private AudioQuality getAacAudioQuality(Track track, AudioSampleEntry ase) {
    209         AudioQuality l = new AudioQuality();
    210         final ESDescriptorBox esDescriptorBox = ase.getBoxes(ESDescriptorBox.class).get(0);
    211         final AudioSpecificConfig audioSpecificConfig = esDescriptorBox.getEsDescriptor().getDecoderConfigDescriptor().getAudioSpecificInfo();
    212         if (audioSpecificConfig.getSbrPresentFlag() == 1) {
    213             l.fourCC = "AACH";
    214         } else if (audioSpecificConfig.getPsPresentFlag() == 1) {
    215             l.fourCC = "AACP"; //I'm not sure if that's what MS considers as AAC+ - because actually AAC+ and AAC-HE should be the same...
    216         } else {
    217             l.fourCC = "AACL";
    218         }
    219         l.bitrate = getBitrate(track);
    220         l.audioTag = 255;
    221         l.samplingRate = ase.getSampleRate();
    222         l.channels = ase.getChannelCount();
    223         l.bitPerSample = ase.getSampleSize();
    224         l.packetSize = 4;
    225         l.codecPrivateData = getAudioCodecPrivateData(audioSpecificConfig);
    226         //Index="0" Bitrate="103000" AudioTag="255" SamplingRate="44100" Channels="2" BitsPerSample="16" packetSize="4" CodecPrivateData=""
    227         return l;
    228     }
    229 
    230     private AudioQuality getEc3AudioQuality(Track track, AudioSampleEntry ase) {
    231         final EC3SpecificBox ec3SpecificBox = ase.getBoxes(EC3SpecificBox.class).get(0);
    232         if (ec3SpecificBox == null) {
    233             throw new RuntimeException("EC-3 track misses EC3SpecificBox!");
    234         }
    235 
    236         short nfchans = 0; //full bandwidth channels
    237         short lfechans = 0;
    238         byte dWChannelMaskFirstByte = 0;
    239         byte dWChannelMaskSecondByte = 0;
    240         for (EC3SpecificBox.Entry entry : ec3SpecificBox.getEntries()) {
    241             /*
    242             Table 4.3: Audio coding mode
    243             acmod Audio coding mode Nfchans Channel array ordering
    244             000 1 + 1 2 Ch1, Ch2
    245             001 1/0 1 C
    246             010 2/0 2 L, R
    247             011 3/0 3 L, C, R
    248             100 2/1 3 L, R, S
    249             101 3/1 4 L, C, R, S
    250             110 2/2 4 L, R, SL, SR
    251             111 3/2 5 L, C, R, SL, SR
    252 
    253             Table F.2: Chan_loc field bit assignments
    254             Bit Location
    255             0 Lc/Rc pair
    256             1 Lrs/Rrs pair
    257             2 Cs
    258             3 Ts
    259             4 Lsd/Rsd pair
    260             5 Lw/Rw pair
    261             6 Lvh/Rvh pair
    262             7 Cvh
    263             8 LFE2
    264             */
    265             switch (entry.acmod) {
    266                 case 0: //1+1; Ch1, Ch2
    267                     nfchans += 2;
    268                     throw new RuntimeException("Smooth Streaming doesn't support DDP 1+1 mode");
    269                 case 1: //1/0; C
    270                     nfchans += 1;
    271                     if (entry.num_dep_sub > 0) {
    272                         DependentSubstreamMask dependentSubstreamMask = new DependentSubstreamMask(dWChannelMaskFirstByte, dWChannelMaskSecondByte, entry).process();
    273                         dWChannelMaskFirstByte |= dependentSubstreamMask.getdWChannelMaskFirstByte();
    274                         dWChannelMaskSecondByte |= dependentSubstreamMask.getdWChannelMaskSecondByte();
    275                     } else {
    276                         dWChannelMaskFirstByte |= 0x20;
    277                     }
    278                     break;
    279                 case 2: //2/0; L, R
    280                     nfchans += 2;
    281                     if (entry.num_dep_sub > 0) {
    282                         DependentSubstreamMask dependentSubstreamMask = new DependentSubstreamMask(dWChannelMaskFirstByte, dWChannelMaskSecondByte, entry).process();
    283                         dWChannelMaskFirstByte |= dependentSubstreamMask.getdWChannelMaskFirstByte();
    284                         dWChannelMaskSecondByte |= dependentSubstreamMask.getdWChannelMaskSecondByte();
    285                     } else {
    286                         dWChannelMaskFirstByte |= 0xC0;
    287                     }
    288                     break;
    289                 case 3: //3/0; L, C, R
    290                     nfchans += 3;
    291                     if (entry.num_dep_sub > 0) {
    292                         DependentSubstreamMask dependentSubstreamMask = new DependentSubstreamMask(dWChannelMaskFirstByte, dWChannelMaskSecondByte, entry).process();
    293                         dWChannelMaskFirstByte |= dependentSubstreamMask.getdWChannelMaskFirstByte();
    294                         dWChannelMaskSecondByte |= dependentSubstreamMask.getdWChannelMaskSecondByte();
    295                     } else {
    296                         dWChannelMaskFirstByte |= 0xE0;
    297                     }
    298                     break;
    299                 case 4: //2/1; L, R, S
    300                     nfchans += 3;
    301                     if (entry.num_dep_sub > 0) {
    302                         DependentSubstreamMask dependentSubstreamMask = new DependentSubstreamMask(dWChannelMaskFirstByte, dWChannelMaskSecondByte, entry).process();
    303                         dWChannelMaskFirstByte |= dependentSubstreamMask.getdWChannelMaskFirstByte();
    304                         dWChannelMaskSecondByte |= dependentSubstreamMask.getdWChannelMaskSecondByte();
    305                     } else {
    306                         dWChannelMaskFirstByte |= 0xC0;
    307                         dWChannelMaskSecondByte |= 0x80;
    308                     }
    309                     break;
    310                 case 5: //3/1; L, C, R, S
    311                     nfchans += 4;
    312                     if (entry.num_dep_sub > 0) {
    313                         DependentSubstreamMask dependentSubstreamMask = new DependentSubstreamMask(dWChannelMaskFirstByte, dWChannelMaskSecondByte, entry).process();
    314                         dWChannelMaskFirstByte |= dependentSubstreamMask.getdWChannelMaskFirstByte();
    315                         dWChannelMaskSecondByte |= dependentSubstreamMask.getdWChannelMaskSecondByte();
    316                     } else {
    317                         dWChannelMaskFirstByte |= 0xE0;
    318                         dWChannelMaskSecondByte |= 0x80;
    319                     }
    320                     break;
    321                 case 6: //2/2; L, R, SL, SR
    322                     nfchans += 4;
    323                     if (entry.num_dep_sub > 0) {
    324                         DependentSubstreamMask dependentSubstreamMask = new DependentSubstreamMask(dWChannelMaskFirstByte, dWChannelMaskSecondByte, entry).process();
    325                         dWChannelMaskFirstByte |= dependentSubstreamMask.getdWChannelMaskFirstByte();
    326                         dWChannelMaskSecondByte |= dependentSubstreamMask.getdWChannelMaskSecondByte();
    327                     } else {
    328                         dWChannelMaskFirstByte |= 0xCC;
    329                     }
    330                     break;
    331                 case 7: //3/2; L, C, R, SL, SR
    332                     nfchans += 5;
    333                     if (entry.num_dep_sub > 0) {
    334                         DependentSubstreamMask dependentSubstreamMask = new DependentSubstreamMask(dWChannelMaskFirstByte, dWChannelMaskSecondByte, entry).process();
    335                         dWChannelMaskFirstByte |= dependentSubstreamMask.getdWChannelMaskFirstByte();
    336                         dWChannelMaskSecondByte |= dependentSubstreamMask.getdWChannelMaskSecondByte();
    337                     } else {
    338                         dWChannelMaskFirstByte |= 0xEC;
    339                     }
    340                     break;
    341             }
    342             if (entry.lfeon == 1) {
    343                 lfechans ++;
    344                 dWChannelMaskFirstByte |= 0x10;
    345             }
    346         }
    347 
    348         final ByteBuffer waveformatex = ByteBuffer.allocate(22);
    349         waveformatex.put(new byte[]{0x00, 0x06}); //1536 wSamplesPerBlock - little endian
    350         waveformatex.put(dWChannelMaskFirstByte);
    351         waveformatex.put(dWChannelMaskSecondByte);
    352         waveformatex.put(new byte[]{0x00, 0x00}); //pad dwChannelMask to 32bit
    353         waveformatex.put(new byte[]{(byte)0xAF, (byte)0x87, (byte)0xFB, (byte)0xA7, 0x02, 0x2D, (byte)0xFB, 0x42, (byte)0xA4, (byte)0xD4, 0x05, (byte)0xCD, (byte)0x93, (byte)0x84, 0x3B, (byte)0xDD}); //SubFormat - Dolby Digital Plus GUID
    354 
    355         final ByteBuffer dec3Content = ByteBuffer.allocate((int) ec3SpecificBox.getContentSize());
    356         ec3SpecificBox.getContent(dec3Content);
    357 
    358         AudioQuality l = new AudioQuality();
    359         l.fourCC = "EC-3";
    360         l.bitrate = getBitrate(track);
    361         l.audioTag = 65534;
    362         l.samplingRate = ase.getSampleRate();
    363         l.channels = nfchans + lfechans;
    364         l.bitPerSample = 16;
    365         l.packetSize = track.getSamples().get(0).limit(); //assuming all are same size
    366         l.codecPrivateData = Hex.encodeHex(waveformatex.array()) + Hex.encodeHex(dec3Content.array()); //append EC3SpecificBox (big endian) at the end of waveformatex
    367         return l;
    368     }
    369 
    370     private AudioQuality getDtsAudioQuality(Track track, AudioSampleEntry ase) {
    371         final DTSSpecificBox dtsSpecificBox = ase.getBoxes(DTSSpecificBox.class).get(0);
    372         if (dtsSpecificBox == null) {
    373             throw new RuntimeException("DTS track misses DTSSpecificBox!");
    374         }
    375 
    376         final ByteBuffer waveformatex = ByteBuffer.allocate(22);
    377         final int frameDuration = dtsSpecificBox.getFrameDuration();
    378         short samplesPerBlock = 0;
    379         switch (frameDuration) {
    380             case 0:
    381                 samplesPerBlock = 512;
    382                 break;
    383             case 1:
    384                 samplesPerBlock = 1024;
    385                 break;
    386             case 2:
    387                 samplesPerBlock = 2048;
    388                 break;
    389             case 3:
    390                 samplesPerBlock = 4096;
    391                 break;
    392         }
    393         waveformatex.put((byte) (samplesPerBlock & 0xff));
    394         waveformatex.put((byte) (samplesPerBlock >>> 8));
    395         final int dwChannelMask = getNumChannelsAndMask(dtsSpecificBox)[1];
    396         waveformatex.put((byte) (dwChannelMask & 0xff));
    397         waveformatex.put((byte) (dwChannelMask >>> 8));
    398         waveformatex.put((byte) (dwChannelMask >>> 16));
    399         waveformatex.put((byte) (dwChannelMask >>> 24));
    400         waveformatex.put(new byte[]{(byte)0xAE, (byte)0xE4, (byte)0xBF, (byte)0x5E, (byte)0x61, (byte)0x5E, (byte)0x41, (byte)0x87, (byte)0x92, (byte)0xFC, (byte)0xA4, (byte)0x81, (byte)0x26, (byte)0x99, (byte)0x02, (byte)0x11}); //DTS-HD GUID
    401 
    402         final ByteBuffer dtsCodecPrivateData = ByteBuffer.allocate(8);
    403         dtsCodecPrivateData.put((byte) dtsSpecificBox.getStreamConstruction());
    404 
    405         final int channelLayout = dtsSpecificBox.getChannelLayout();
    406         dtsCodecPrivateData.put((byte) (channelLayout & 0xff));
    407         dtsCodecPrivateData.put((byte) (channelLayout >>> 8));
    408         dtsCodecPrivateData.put((byte) (channelLayout >>> 16));
    409         dtsCodecPrivateData.put((byte) (channelLayout >>> 24));
    410 
    411         byte dtsFlags = (byte) (dtsSpecificBox.getMultiAssetFlag() << 1);
    412         dtsFlags |= dtsSpecificBox.getLBRDurationMod();
    413         dtsCodecPrivateData.put(dtsFlags);
    414         dtsCodecPrivateData.put(new byte[]{0x00, 0x00}); //reserved
    415 
    416         AudioQuality l = new AudioQuality();
    417         l.fourCC = getFormat(ase);
    418         l.bitrate = dtsSpecificBox.getAvgBitRate();
    419         l.audioTag = 65534;
    420         l.samplingRate = dtsSpecificBox.getDTSSamplingFrequency();
    421         l.channels = getNumChannelsAndMask(dtsSpecificBox)[0];
    422         l.bitPerSample = 16;
    423         l.packetSize = track.getSamples().get(0).limit(); //assuming all are same size
    424         l.codecPrivateData = Hex.encodeHex(waveformatex.array()) + Hex.encodeHex(dtsCodecPrivateData.array());
    425         return l;
    426 
    427     }
    428 
    429     /* dwChannelMask
    430     L SPEAKER_FRONT_LEFT 0x00000001
    431     R SPEAKER_FRONT_RIGHT 0x00000002
    432     C SPEAKER_FRONT_CENTER 0x00000004
    433     LFE1 SPEAKER_LOW_FREQUENCY 0x00000008
    434     Ls or Lsr* SPEAKER_BACK_LEFT 0x00000010
    435     Rs or Rsr* SPEAKER_BACK_RIGHT 0x00000020
    436     Lc SPEAKER_FRONT_LEFT_OF_CENTER 0x00000040
    437     Rc SPEAKER_FRONT_RIGHT_OF_CENTER 0x00000080
    438     Cs SPEAKER_BACK_CENTER 0x00000100
    439     Lss SPEAKER_SIDE_LEFT 0x00000200
    440     Rss SPEAKER_SIDE_RIGHT 0x00000400
    441     Oh SPEAKER_TOP_CENTER 0x00000800
    442     Lh SPEAKER_TOP_FRONT_LEFT 0x00001000
    443     Ch SPEAKER_TOP_FRONT_CENTER 0x00002000
    444     Rh SPEAKER_TOP_FRONT_RIGHT 0x00004000
    445     Lhr SPEAKER_TOP_BACK_LEFT 0x00008000
    446     Chf SPEAKER_TOP_BACK_CENTER 0x00010000
    447     Rhr SPEAKER_TOP_BACK_RIGHT 0x00020000
    448     SPEAKER_RESERVED 0x80000000
    449 
    450     * if Lss, Rss exist, then this position is equivalent to Lsr, Rsr respectively
    451      */
    452     private int[] getNumChannelsAndMask(DTSSpecificBox dtsSpecificBox) {
    453         final int channelLayout = dtsSpecificBox.getChannelLayout();
    454         int numChannels = 0;
    455         int dwChannelMask = 0;
    456         if ((channelLayout & 0x0001) == 0x0001) {
    457             //0001h Center in front of listener 1
    458             numChannels += 1;
    459             dwChannelMask |= 0x00000004; //SPEAKER_FRONT_CENTER
    460         }
    461         if ((channelLayout & 0x0002) == 0x0002) {
    462             //0002h Left/Right in front 2
    463             numChannels += 2;
    464             dwChannelMask |= 0x00000001; //SPEAKER_FRONT_LEFT
    465             dwChannelMask |= 0x00000002; //SPEAKER_FRONT_RIGHT
    466         }
    467         if ((channelLayout & 0x0004) == 0x0004) {
    468             //0004h Left/Right surround on side in rear 2
    469             numChannels += 2;
    470             //* if Lss, Rss exist, then this position is equivalent to Lsr, Rsr respectively
    471             dwChannelMask |= 0x00000010; //SPEAKER_BACK_LEFT
    472             dwChannelMask |= 0x00000020; //SPEAKER_BACK_RIGHT
    473         }
    474         if ((channelLayout & 0x0008) == 0x0008) {
    475             //0008h Low frequency effects subwoofer 1
    476             numChannels += 1;
    477             dwChannelMask |= 0x00000008; //SPEAKER_LOW_FREQUENCY
    478         }
    479         if ((channelLayout & 0x0010) == 0x0010) {
    480             //0010h Center surround in rear 1
    481             numChannels += 1;
    482             dwChannelMask |= 0x00000100; //SPEAKER_BACK_CENTER
    483         }
    484         if ((channelLayout & 0x0020) == 0x0020) {
    485             //0020h Left/Right height in front 2
    486             numChannels += 2;
    487             dwChannelMask |= 0x00001000; //SPEAKER_TOP_FRONT_LEFT
    488             dwChannelMask |= 0x00004000; //SPEAKER_TOP_FRONT_RIGHT
    489         }
    490         if ((channelLayout & 0x0040) == 0x0040) {
    491             //0040h Left/Right surround in rear 2
    492             numChannels += 2;
    493             dwChannelMask |= 0x00000010; //SPEAKER_BACK_LEFT
    494             dwChannelMask |= 0x00000020; //SPEAKER_BACK_RIGHT
    495         }
    496         if ((channelLayout & 0x0080) == 0x0080) {
    497             //0080h Center Height in front 1
    498             numChannels += 1;
    499             dwChannelMask |= 0x00002000; //SPEAKER_TOP_FRONT_CENTER
    500         }
    501         if ((channelLayout & 0x0100) == 0x0100) {
    502             //0100h Over the listeners head 1
    503             numChannels += 1;
    504             dwChannelMask |= 0x00000800; //SPEAKER_TOP_CENTER
    505         }
    506         if ((channelLayout & 0x0200) == 0x0200) {
    507             //0200h Between left/right and center in front 2
    508             numChannels += 2;
    509             dwChannelMask |= 0x00000040; //SPEAKER_FRONT_LEFT_OF_CENTER
    510             dwChannelMask |= 0x00000080; //SPEAKER_FRONT_RIGHT_OF_CENTER
    511         }
    512         if ((channelLayout & 0x0400) == 0x0400) {
    513             //0400h Left/Right on side in front 2
    514             numChannels += 2;
    515             dwChannelMask |= 0x00000200; //SPEAKER_SIDE_LEFT
    516             dwChannelMask |= 0x00000400; //SPEAKER_SIDE_RIGHT
    517         }
    518         if ((channelLayout & 0x0800) == 0x0800) {
    519             //0800h Left/Right surround on side 2
    520             numChannels += 2;
    521             //* if Lss, Rss exist, then this position is equivalent to Lsr, Rsr respectively
    522             dwChannelMask |= 0x00000010; //SPEAKER_BACK_LEFT
    523             dwChannelMask |= 0x00000020; //SPEAKER_BACK_RIGHT
    524         }
    525         if ((channelLayout & 0x1000) == 0x1000) {
    526             //1000h Second low frequency effects subwoofer 1
    527             numChannels += 1;
    528             dwChannelMask |= 0x00000008; //SPEAKER_LOW_FREQUENCY
    529         }
    530         if ((channelLayout & 0x2000) == 0x2000) {
    531             //2000h Left/Right height on side 2
    532             numChannels += 2;
    533             dwChannelMask |= 0x00000010; //SPEAKER_BACK_LEFT
    534             dwChannelMask |= 0x00000020; //SPEAKER_BACK_RIGHT
    535         }
    536         if ((channelLayout & 0x4000) == 0x4000) {
    537             //4000h Center height in rear 1
    538             numChannels += 1;
    539             dwChannelMask |= 0x00010000; //SPEAKER_TOP_BACK_CENTER
    540         }
    541         if ((channelLayout & 0x8000) == 0x8000) {
    542             //8000h Left/Right height in rear 2
    543             numChannels += 2;
    544             dwChannelMask |= 0x00008000; //SPEAKER_TOP_BACK_LEFT
    545             dwChannelMask |= 0x00020000; //SPEAKER_TOP_BACK_RIGHT
    546         }
    547         if ((channelLayout & 0x10000) == 0x10000) {
    548             //10000h Center below in front
    549             numChannels += 1;
    550         }
    551         if ((channelLayout & 0x20000) == 0x20000) {
    552             //20000h Left/Right below in front
    553             numChannels += 2;
    554         }
    555         return new int[]{numChannels, dwChannelMask};
    556     }
    557 
    558     private String getAudioCodecPrivateData(AudioSpecificConfig audioSpecificConfig) {
    559         byte[] configByteArray = audioSpecificConfig.getConfigBytes();
    560         return Hex.encodeHex(configByteArray);
    561     }
    562 
    563     private VideoQuality getVideoQuality(Track track, VisualSampleEntry vse) {
    564         VideoQuality l;
    565         if ("avc1".equals(getFormat(vse))) {
    566             AvcConfigurationBox avcConfigurationBox = vse.getBoxes(AvcConfigurationBox.class).get(0);
    567             l = new VideoQuality();
    568             l.bitrate = getBitrate(track);
    569             l.codecPrivateData = Hex.encodeHex(getAvcCodecPrivateData(avcConfigurationBox));
    570             l.fourCC = "AVC1";
    571             l.width = vse.getWidth();
    572             l.height = vse.getHeight();
    573             l.nalLength = avcConfigurationBox.getLengthSizeMinusOne() + 1;
    574         } else {
    575             throw new InternalError("I don't know how to handle video of type " + getFormat(vse));
    576         }
    577         return l;
    578     }
    579 
    580     private byte[] getAvcCodecPrivateData(AvcConfigurationBox avcConfigurationBox) {
    581         List<byte[]> sps = avcConfigurationBox.getSequenceParameterSets();
    582         List<byte[]> pps = avcConfigurationBox.getPictureParameterSets();
    583         ByteArrayOutputStream baos = new ByteArrayOutputStream();
    584         try {
    585             baos.write(new byte[]{0, 0, 0, 1});
    586 
    587             for (byte[] sp : sps) {
    588                 baos.write(sp);
    589             }
    590             baos.write(new byte[]{0, 0, 0, 1});
    591             for (byte[] pp : pps) {
    592                 baos.write(pp);
    593             }
    594         } catch (IOException ex) {
    595             throw new RuntimeException("ByteArrayOutputStream do not throw IOException ?!?!?");
    596         }
    597         return baos.toByteArray();
    598     }
    599 
    600     private class DependentSubstreamMask {
    601         private byte dWChannelMaskFirstByte;
    602         private byte dWChannelMaskSecondByte;
    603         private EC3SpecificBox.Entry entry;
    604 
    605         public DependentSubstreamMask(byte dWChannelMaskFirstByte, byte dWChannelMaskSecondByte, EC3SpecificBox.Entry entry) {
    606             this.dWChannelMaskFirstByte = dWChannelMaskFirstByte;
    607             this.dWChannelMaskSecondByte = dWChannelMaskSecondByte;
    608             this.entry = entry;
    609         }
    610 
    611         public byte getdWChannelMaskFirstByte() {
    612             return dWChannelMaskFirstByte;
    613         }
    614 
    615         public byte getdWChannelMaskSecondByte() {
    616             return dWChannelMaskSecondByte;
    617         }
    618 
    619         public DependentSubstreamMask process() {
    620             switch (entry.chan_loc) {
    621                 case 0:
    622                     dWChannelMaskFirstByte |= 0x3;
    623                     break;
    624                 case 1:
    625                     dWChannelMaskFirstByte |= 0xC;
    626                     break;
    627                 case 2:
    628                     dWChannelMaskSecondByte |= 0x80;
    629                     break;
    630                 case 3:
    631                     dWChannelMaskSecondByte |= 0x8;
    632                     break;
    633                 case 6:
    634                     dWChannelMaskSecondByte |= 0x5;
    635                     break;
    636                 case 7:
    637                     dWChannelMaskSecondByte |= 0x2;
    638                     break;
    639             }
    640             return this;
    641         }
    642     }
    643 }
    644