Home | History | Annotate | Download | only in model
      1 # Copyright (C) 2010 Google Inc. All rights reserved.
      2 #
      3 # Redistribution and use in source and binary forms, with or without
      4 # modification, are permitted provided that the following conditions are
      5 # met:
      6 #
      7 #     * Redistributions of source code must retain the above copyright
      8 # notice, this list of conditions and the following disclaimer.
      9 #     * Redistributions in binary form must reproduce the above
     10 # copyright notice, this list of conditions and the following disclaimer
     11 # in the documentation and/or other materials provided with the
     12 # distribution.
     13 #     * Neither the name of Google Inc. nor the names of its
     14 # contributors may be used to endorse or promote products derived from
     15 # this software without specific prior written permission.
     16 #
     17 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     18 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     19 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     20 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     21 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     22 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     23 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     24 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     25 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     26 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     28 
     29 from datetime import datetime
     30 import logging
     31 
     32 from google.appengine.ext import db
     33 
     34 MAX_DATA_ENTRY_PER_FILE = 10
     35 MAX_ENTRY_LEN = 1000 * 1000
     36 
     37 
     38 class DataEntry(db.Model):
     39     """Datastore entry that stores one segmant of file data
     40        (<1000*1000 bytes).
     41     """
     42 
     43     data = db.BlobProperty()
     44 
     45     @classmethod
     46     def get(cls, key):
     47         return db.get(key)
     48 
     49     def get_data(self, key):
     50         return db.get(key)
     51 
     52 
     53 class DataStoreFile(db.Model):
     54     """This class stores file in datastore.
     55        If a file is oversize (>1000*1000 bytes), the file is split into
     56        multiple segments and stored in multiple datastore entries.
     57     """
     58 
     59     name = db.StringProperty()
     60     data_keys = db.ListProperty(db.Key)
     61     # keys to the data store entries that can be reused for new data.
     62     # If it is emtpy, create new DataEntry.
     63     new_data_keys = db.ListProperty(db.Key)
     64     date = db.DateTimeProperty(auto_now_add=True)
     65 
     66     data = None
     67 
     68     def delete_data(self, keys=None):
     69         if not keys:
     70             keys = self.data_keys
     71 
     72         for key in keys:
     73             data_entry = DataEntry.get(key)
     74             if data_entry:
     75                 data_entry.delete()
     76 
     77     def save_data(self, data):
     78         if not data:
     79             logging.warning("No data to save.")
     80             return False
     81 
     82         if len(data) > (MAX_DATA_ENTRY_PER_FILE * MAX_ENTRY_LEN):
     83             logging.error("File too big, can't save to datastore: %dK",
     84                 len(data) / 1024)
     85             return False
     86 
     87         start = 0
     88         # Use the new_data_keys to store new data. If all new data are saved
     89         # successfully, swap new_data_keys and data_keys so we can reuse the
     90         # data_keys entries in next run. If unable to save new data for any
     91         # reason, only the data pointed by new_data_keys may be corrupted,
     92         # the existing data_keys data remains untouched. The corrupted data
     93         # in new_data_keys will be overwritten in next update.
     94         keys = self.new_data_keys
     95         self.new_data_keys = []
     96 
     97         while start < len(data):
     98             if keys:
     99                 key = keys[0]
    100                 data_entry = DataEntry.get(key)
    101                 if not data_entry:
    102                     logging.warning("Found key, but no data entry: %s", key)
    103                     data_entry = DataEntry()
    104             else:
    105                 data_entry = DataEntry()
    106 
    107             data_entry.data = db.Blob(data[start: start + MAX_ENTRY_LEN])
    108             try:
    109                 data_entry.put()
    110             except Exception, err:
    111                 logging.error("Failed to save data store entry: %s", err)
    112                 if keys:
    113                     self.delete_data(keys)
    114                 return False
    115 
    116             logging.info("Data saved: %s.", data_entry.key())
    117             self.new_data_keys.append(data_entry.key())
    118             if keys:
    119                 keys.pop(0)
    120 
    121             start = start + MAX_ENTRY_LEN
    122 
    123         if keys:
    124             self.delete_data(keys)
    125 
    126         temp_keys = self.data_keys
    127         self.data_keys = self.new_data_keys
    128         self.new_data_keys = temp_keys
    129         self.data = data
    130 
    131         return True
    132 
    133     def load_data(self):
    134         if not self.data_keys:
    135             logging.warning("No data to load.")
    136             return None
    137 
    138         data = []
    139         for key in self.data_keys:
    140             logging.info("Loading data for key: %s.", key)
    141             data_entry = DataEntry.get(key)
    142             if not data_entry:
    143                 logging.error("No data found for key: %s.", key)
    144                 return None
    145 
    146             data.append(data_entry.data)
    147 
    148         self.data = "".join(data)
    149 
    150         return self.data
    151