1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. 2 # Use of this source code is governed by a BSD-style license that can be 3 # found in the LICENSE file. 4 5 def IsDeadlineExceededError(error): 6 '''A general way of determining whether |error| is a DeadlineExceededError, 7 since there are 3 different types thrown by AppEngine and we might as well 8 handle them all the same way. For more info see: 9 https://developers.google.com/appengine/articles/deadlineexceedederrors 10 ''' 11 return type(error).__name__ == 'DeadlineExceededError' 12 13 14 def IsDownloadError(error): 15 return type(error).__name__ == 'DownloadError' 16 17 18 # This will attempt to import the actual App Engine modules, and if it fails, 19 # they will be replaced with fake modules. This is useful during testing. 20 try: 21 import google.appengine.api.app_identity as app_identity 22 import google.appengine.api.files as files 23 import google.appengine.api.logservice as logservice 24 import google.appengine.api.memcache as memcache 25 import google.appengine.api.taskqueue as taskqueue 26 import google.appengine.api.urlfetch as urlfetch 27 import google.appengine.ext.blobstore as blobstore 28 from google.appengine.ext.blobstore.blobstore import BlobReferenceProperty 29 import google.appengine.ext.db as db 30 import webapp2 31 except ImportError: 32 import re 33 from StringIO import StringIO 34 35 FAKE_URL_FETCHER_CONFIGURATION = None 36 37 def ConfigureFakeUrlFetch(configuration): 38 """|configuration| is a dictionary mapping strings to fake urlfetch classes. 39 A fake urlfetch class just needs to have a fetch method. The keys of the 40 dictionary are treated as regex, and they are matched with the URL to 41 determine which fake urlfetch is used. 42 """ 43 global FAKE_URL_FETCHER_CONFIGURATION 44 FAKE_URL_FETCHER_CONFIGURATION = dict( 45 (re.compile(k), v) for k, v in configuration.iteritems()) 46 47 def _GetConfiguration(key): 48 if not FAKE_URL_FETCHER_CONFIGURATION: 49 raise ValueError('No fake fetch paths have been configured. ' 50 'See ConfigureFakeUrlFetch in appengine_wrappers.py.') 51 for k, v in FAKE_URL_FETCHER_CONFIGURATION.iteritems(): 52 if k.match(key): 53 return v 54 raise ValueError('No configuration found for %s' % key) 55 56 class _RPC(object): 57 def __init__(self, result=None): 58 self.result = result 59 60 def get_result(self): 61 return self.result 62 63 def wait(self): 64 pass 65 66 class FakeAppIdentity(object): 67 """A fake app_identity module that returns no access tokens.""" 68 def get_access_token(self, scope): 69 return (None, None) 70 app_identity = FakeAppIdentity() 71 72 class FakeUrlFetch(object): 73 """A fake urlfetch module that uses the current 74 |FAKE_URL_FETCHER_CONFIGURATION| to map urls to fake fetchers. 75 """ 76 class DownloadError(Exception): 77 pass 78 79 class _Response(object): 80 def __init__(self, content): 81 self.content = content 82 self.headers = {'Content-Type': 'none'} 83 self.status_code = 200 84 85 def fetch(self, url, **kwargs): 86 url = url.split('?', 1)[0] 87 response = self._Response(_GetConfiguration(url).fetch(url)) 88 if response.content is None: 89 response.status_code = 404 90 return response 91 92 def create_rpc(self, **kwargs): 93 return _RPC() 94 95 def make_fetch_call(self, rpc, url, **kwargs): 96 rpc.result = self.fetch(url) 97 urlfetch = FakeUrlFetch() 98 99 _BLOBS = {} 100 class FakeBlobstore(object): 101 class BlobNotFoundError(Exception): 102 pass 103 104 class BlobReader(object): 105 def __init__(self, blob_key): 106 self._data = _BLOBS[blob_key].getvalue() 107 108 def read(self): 109 return self._data 110 111 blobstore = FakeBlobstore() 112 113 class FakeFileInterface(object): 114 """This class allows a StringIO object to be used in a with block like a 115 file. 116 """ 117 def __init__(self, io): 118 self._io = io 119 120 def __exit__(self, *args): 121 pass 122 123 def write(self, data): 124 self._io.write(data) 125 126 def __enter__(self, *args): 127 return self._io 128 129 class FakeFiles(object): 130 _next_blobstore_key = 0 131 class blobstore(object): 132 @staticmethod 133 def create(): 134 FakeFiles._next_blobstore_key += 1 135 return FakeFiles._next_blobstore_key 136 137 @staticmethod 138 def get_blob_key(filename): 139 return filename 140 141 def open(self, filename, mode): 142 _BLOBS[filename] = StringIO() 143 return FakeFileInterface(_BLOBS[filename]) 144 145 def GetBlobKeys(self): 146 return _BLOBS.keys() 147 148 def finalize(self, filename): 149 pass 150 151 files = FakeFiles() 152 153 class Logservice(object): 154 AUTOFLUSH_ENABLED = True 155 156 def flush(self): 157 pass 158 159 logservice = Logservice() 160 161 class InMemoryMemcache(object): 162 """An in-memory memcache implementation. 163 """ 164 def __init__(self): 165 self._namespaces = {} 166 167 class Client(object): 168 def set_multi_async(self, mapping, namespace='', time=0): 169 for k, v in mapping.iteritems(): 170 memcache.set(k, v, namespace=namespace, time=time) 171 172 def get_multi_async(self, keys, namespace='', time=0): 173 return _RPC(result=dict( 174 (k, memcache.get(k, namespace=namespace, time=time)) for k in keys)) 175 176 def set(self, key, value, namespace='', time=0): 177 self._GetNamespace(namespace)[key] = value 178 179 def get(self, key, namespace='', time=0): 180 return self._GetNamespace(namespace).get(key) 181 182 def delete(self, key, namespace=''): 183 self._GetNamespace(namespace).pop(key, None) 184 185 def delete_multi(self, keys, namespace=''): 186 for k in keys: 187 self.delete(k, namespace=namespace) 188 189 def _GetNamespace(self, namespace): 190 if namespace not in self._namespaces: 191 self._namespaces[namespace] = {} 192 return self._namespaces[namespace] 193 194 memcache = InMemoryMemcache() 195 196 class webapp2(object): 197 class RequestHandler(object): 198 """A fake webapp2.RequestHandler class for Handler to extend. 199 """ 200 def __init__(self, request, response): 201 self.request = request 202 self.response = response 203 self.response.status = 200 204 205 def redirect(self, path, permanent=False): 206 self.response.status = 301 if permanent else 302 207 self.response.headers['Location'] = path 208 209 class _Db_Result(object): 210 def __init__(self, data): 211 self._data = data 212 213 class _Result(object): 214 def __init__(self, value): 215 self.value = value 216 217 def get(self): 218 return self._Result(self._data) 219 220 class db(object): 221 _store = {} 222 223 class StringProperty(object): 224 pass 225 226 class BlobProperty(object): 227 pass 228 229 class Key(object): 230 def __init__(self, key): 231 self._key = key 232 233 @staticmethod 234 def from_path(model_name, path): 235 return db.Key('%s/%s' % (model_name, path)) 236 237 def __eq__(self, obj): 238 return self.__class__ == obj.__class__ and self._key == obj._key 239 240 def __hash__(self): 241 return hash(self._key) 242 243 def __str__(self): 244 return str(self._key) 245 246 class Model(object): 247 key = None 248 249 def __init__(self, **optargs): 250 cls = self.__class__ 251 for k, v in optargs.iteritems(): 252 assert hasattr(cls, k), '%s does not define property %s' % ( 253 cls.__name__, k) 254 setattr(self, k, v) 255 256 @staticmethod 257 def gql(query, key): 258 return _Db_Result(db._store.get(key)) 259 260 def put(self): 261 db._store[self.key_] = self.value 262 263 @staticmethod 264 def get_async(key): 265 return _RPC(result=db._store.get(key)) 266 267 @staticmethod 268 def delete_async(key): 269 db._store.pop(key, None) 270 return _RPC() 271 272 @staticmethod 273 def put_async(value): 274 db._store[value.key] = value 275 return _RPC() 276 277 class BlobReferenceProperty(object): 278 pass 279 280 # Executes any queued tasks synchronously as they are queued. 281 _task_runner = None 282 283 def SetTaskRunnerForTest(task_runner): 284 global _task_runner 285 _task_runner = task_runner 286 287 class SynchronousTaskQueue(object): 288 class Task(object): 289 def __init__(self, url=None, params={}): 290 self.url_ = url 291 self.params_ = params 292 293 def GetUrl(self): 294 return self.url_ 295 296 def GetCommit(self): 297 return self.params_.get('commit') 298 299 class Queue(object): 300 def __init__(self, name='default'): 301 pass 302 303 def add(self, task): 304 global _task_runner 305 if _task_runner: 306 _task_runner(task.GetUrl(), task.GetCommit()) 307 return _RPC() 308 309 def purge(self): 310 return _RPC() 311 312 taskqueue = SynchronousTaskQueue() 313