Home | History | Annotate | Download | only in libagl
      1 /* libs/opengles/Tokenizer.cpp
      2 **
      3 ** Copyright 2006, The Android Open Source Project
      4 **
      5 ** Licensed under the Apache License, Version 2.0 (the "License");
      6 ** you may not use this file except in compliance with the License.
      7 ** You may obtain a copy of the License at
      8 **
      9 **     http://www.apache.org/licenses/LICENSE-2.0
     10 **
     11 ** Unless required by applicable law or agreed to in writing, software
     12 ** distributed under the License is distributed on an "AS IS" BASIS,
     13 ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     14 ** See the License for the specific language governing permissions and
     15 ** limitations under the License.
     16 */
     17 
     18 #include <stdio.h>
     19 
     20 #include "Tokenizer.h"
     21 
     22 // ----------------------------------------------------------------------------
     23 
     24 namespace android {
     25 
     26 ANDROID_BASIC_TYPES_TRAITS(Tokenizer::run_t)
     27 
     28 Tokenizer::Tokenizer()
     29 {
     30 }
     31 
     32 Tokenizer::Tokenizer(const Tokenizer& other)
     33     : mRanges(other.mRanges)
     34 {
     35 }
     36 
     37 Tokenizer::~Tokenizer()
     38 {
     39 }
     40 
     41 uint32_t Tokenizer::acquire()
     42 {
     43     if (!mRanges.size() || mRanges[0].first) {
     44         _insertTokenAt(0,0);
     45         return 0;
     46     }
     47 
     48     // just extend the first run
     49     const run_t& run = mRanges[0];
     50     uint32_t token = run.first + run.length;
     51     _insertTokenAt(token, 1);
     52     return token;
     53 }
     54 
     55 bool Tokenizer::isAcquired(uint32_t token) const
     56 {
     57     return (_indexOrderOf(token) >= 0);
     58 }
     59 
     60 status_t Tokenizer::reserve(uint32_t token)
     61 {
     62     size_t o;
     63     const ssize_t i = _indexOrderOf(token, &o);
     64     if (i >= 0) {
     65         return BAD_VALUE; // this token is already taken
     66     }
     67     ssize_t err = _insertTokenAt(token, o);
     68     return (err<0) ? err : status_t(NO_ERROR);
     69 }
     70 
     71 status_t Tokenizer::release(uint32_t token)
     72 {
     73     const ssize_t i = _indexOrderOf(token);
     74     if (i >= 0) {
     75         const run_t& run = mRanges[i];
     76         if ((token >= run.first) && (token < run.first+run.length)) {
     77             // token in this range, we need to split
     78             run_t& run = mRanges.editItemAt(i);
     79             if ((token == run.first) || (token == run.first+run.length-1)) {
     80                 if (token == run.first) {
     81                     run.first += 1;
     82                 }
     83                 run.length -= 1;
     84                 if (run.length == 0) {
     85                     // XXX: should we systematically remove a run that's empty?
     86                     mRanges.removeItemsAt(i);
     87                 }
     88             } else {
     89                 // split the run
     90                 run_t new_run;
     91                 new_run.first = token+1;
     92                 new_run.length = run.first+run.length - new_run.first;
     93                 run.length = token - run.first;
     94                 mRanges.insertAt(new_run, i+1);
     95             }
     96             return NO_ERROR;
     97         }
     98     }
     99     return NAME_NOT_FOUND;
    100 }
    101 
    102 ssize_t Tokenizer::_indexOrderOf(uint32_t token, size_t* order) const
    103 {
    104     // binary search
    105     ssize_t err = NAME_NOT_FOUND;
    106     ssize_t l = 0;
    107     ssize_t h = mRanges.size()-1;
    108     ssize_t mid;
    109     const run_t* a = mRanges.array();
    110     while (l <= h) {
    111         mid = l + (h - l)/2;
    112         const run_t* const curr = a + mid;
    113         int c = 0;
    114         if (token < curr->first)                        c = 1;
    115         else if (token >= curr->first+curr->length)     c = -1;
    116         if (c == 0) {
    117             err = l = mid;
    118             break;
    119         } else if (c < 0) {
    120             l = mid + 1;
    121         } else {
    122             h = mid - 1;
    123         }
    124     }
    125     if (order) *order = l;
    126     return err;
    127 }
    128 
    129 ssize_t Tokenizer::_insertTokenAt(uint32_t token, size_t index)
    130 {
    131     const size_t c = mRanges.size();
    132 
    133     if (index >= 1) {
    134         // do we need to merge with the previous run?
    135         run_t& p = mRanges.editItemAt(index-1);
    136         if (p.first+p.length == token) {
    137             p.length += 1;
    138             if (index < c) {
    139                 const run_t& n = mRanges[index];
    140                 if (token+1 == n.first) {
    141                     p.length += n.length;
    142                     mRanges.removeItemsAt(index);
    143                 }
    144             }
    145             return index;
    146         }
    147     }
    148 
    149     if (index < c) {
    150         // do we need to merge with the next run?
    151         run_t& n = mRanges.editItemAt(index);
    152         if (token+1 == n.first) {
    153             n.first -= 1;
    154             n.length += 1;
    155             return index;
    156         }
    157     }
    158 
    159     return mRanges.insertAt(run_t(token,1), index);
    160 }
    161 
    162 void Tokenizer::dump() const
    163 {
    164     const run_t* ranges = mRanges.array();
    165     const size_t c = mRanges.size();
    166     ALOGD("Tokenizer (%p, size = %zu)\n", this, c);
    167     for (size_t i=0 ; i<c ; i++) {
    168         ALOGD("%zu: (%u, %u)\n", i, ranges[i].first, ranges[i].length);
    169     }
    170 }
    171 
    172 }; // namespace android
    173 
    174