Home | History | Annotate | Download | only in testserver
      1 # Copyright 2013 The Chromium Authors. All rights reserved.
      2 # Use of this source code is governed by a BSD-style license that can be
      3 # found in the LICENSE file.
      4 
      5 """An implementation of the server side of the Chromium sync protocol.
      6 
      7 The details of the protocol are described mostly by comments in the protocol
      8 buffer definition at chrome/browser/sync/protocol/sync.proto.
      9 """
     10 
     11 import base64
     12 import cgi
     13 import copy
     14 import google.protobuf.text_format
     15 import hashlib
     16 import operator
     17 import pickle
     18 import random
     19 import string
     20 import sys
     21 import threading
     22 import time
     23 import urlparse
     24 import uuid
     25 
     26 import app_list_specifics_pb2
     27 import app_notification_specifics_pb2
     28 import app_setting_specifics_pb2
     29 import app_specifics_pb2
     30 import article_specifics_pb2
     31 import autofill_specifics_pb2
     32 import bookmark_specifics_pb2
     33 import client_commands_pb2
     34 import dictionary_specifics_pb2
     35 import get_updates_caller_info_pb2
     36 import extension_setting_specifics_pb2
     37 import extension_specifics_pb2
     38 import favicon_image_specifics_pb2
     39 import favicon_tracking_specifics_pb2
     40 import history_delete_directive_specifics_pb2
     41 import managed_user_setting_specifics_pb2
     42 import managed_user_specifics_pb2
     43 import managed_user_shared_setting_specifics_pb2
     44 import nigori_specifics_pb2
     45 import password_specifics_pb2
     46 import preference_specifics_pb2
     47 import priority_preference_specifics_pb2
     48 import search_engine_specifics_pb2
     49 import session_specifics_pb2
     50 import sync_pb2
     51 import sync_enums_pb2
     52 import synced_notification_app_info_specifics_pb2
     53 import synced_notification_data_pb2
     54 import synced_notification_render_pb2
     55 import synced_notification_specifics_pb2
     56 import theme_specifics_pb2
     57 import typed_url_specifics_pb2
     58 
     59 # An enumeration of the various kinds of data that can be synced.
     60 # Over the wire, this enumeration is not used: a sync object's type is
     61 # inferred by which EntitySpecifics field it has.  But in the context
     62 # of a program, it is useful to have an enumeration.
     63 ALL_TYPES = (
     64     TOP_LEVEL,  # The type of the 'Google Chrome' folder.
     65     APPS,
     66     APP_LIST,
     67     APP_NOTIFICATION,
     68     APP_SETTINGS,
     69     ARTICLE,
     70     AUTOFILL,
     71     AUTOFILL_PROFILE,
     72     BOOKMARK,
     73     DEVICE_INFO,
     74     DICTIONARY,
     75     EXPERIMENTS,
     76     EXTENSIONS,
     77     HISTORY_DELETE_DIRECTIVE,
     78     MANAGED_USER_SETTING,
     79     MANAGED_USER_SHARED_SETTING,
     80     MANAGED_USER,
     81     NIGORI,
     82     PASSWORD,
     83     PREFERENCE,
     84     PRIORITY_PREFERENCE,
     85     SEARCH_ENGINE,
     86     SESSION,
     87     SYNCED_NOTIFICATION,
     88     SYNCED_NOTIFICATION_APP_INFO,
     89     THEME,
     90     TYPED_URL,
     91     EXTENSION_SETTINGS,
     92     FAVICON_IMAGES,
     93     FAVICON_TRACKING) = range(30)
     94 
     95 # An enumeration on the frequency at which the server should send errors
     96 # to the client. This would be specified by the url that triggers the error.
     97 # Note: This enum should be kept in the same order as the enum in sync_test.h.
     98 SYNC_ERROR_FREQUENCY = (
     99     ERROR_FREQUENCY_NONE,
    100     ERROR_FREQUENCY_ALWAYS,
    101     ERROR_FREQUENCY_TWO_THIRDS) = range(3)
    102 
    103 # Well-known server tag of the top level 'Google Chrome' folder.
    104 TOP_LEVEL_FOLDER_TAG = 'google_chrome'
    105 
    106 # Given a sync type from ALL_TYPES, find the FieldDescriptor corresponding
    107 # to that datatype.  Note that TOP_LEVEL has no such token.
    108 SYNC_TYPE_FIELDS = sync_pb2.EntitySpecifics.DESCRIPTOR.fields_by_name
    109 SYNC_TYPE_TO_DESCRIPTOR = {
    110     APP_LIST: SYNC_TYPE_FIELDS['app_list'],
    111     APP_NOTIFICATION: SYNC_TYPE_FIELDS['app_notification'],
    112     APP_SETTINGS: SYNC_TYPE_FIELDS['app_setting'],
    113     APPS: SYNC_TYPE_FIELDS['app'],
    114     ARTICLE: SYNC_TYPE_FIELDS['article'],
    115     AUTOFILL: SYNC_TYPE_FIELDS['autofill'],
    116     AUTOFILL_PROFILE: SYNC_TYPE_FIELDS['autofill_profile'],
    117     BOOKMARK: SYNC_TYPE_FIELDS['bookmark'],
    118     DEVICE_INFO: SYNC_TYPE_FIELDS['device_info'],
    119     DICTIONARY: SYNC_TYPE_FIELDS['dictionary'],
    120     EXPERIMENTS: SYNC_TYPE_FIELDS['experiments'],
    121     EXTENSION_SETTINGS: SYNC_TYPE_FIELDS['extension_setting'],
    122     EXTENSIONS: SYNC_TYPE_FIELDS['extension'],
    123     FAVICON_IMAGES: SYNC_TYPE_FIELDS['favicon_image'],
    124     FAVICON_TRACKING: SYNC_TYPE_FIELDS['favicon_tracking'],
    125     HISTORY_DELETE_DIRECTIVE: SYNC_TYPE_FIELDS['history_delete_directive'],
    126     MANAGED_USER_SHARED_SETTING:
    127         SYNC_TYPE_FIELDS['managed_user_shared_setting'],
    128     MANAGED_USER_SETTING: SYNC_TYPE_FIELDS['managed_user_setting'],
    129     MANAGED_USER: SYNC_TYPE_FIELDS['managed_user'],
    130     NIGORI: SYNC_TYPE_FIELDS['nigori'],
    131     PASSWORD: SYNC_TYPE_FIELDS['password'],
    132     PREFERENCE: SYNC_TYPE_FIELDS['preference'],
    133     PRIORITY_PREFERENCE: SYNC_TYPE_FIELDS['priority_preference'],
    134     SEARCH_ENGINE: SYNC_TYPE_FIELDS['search_engine'],
    135     SESSION: SYNC_TYPE_FIELDS['session'],
    136     SYNCED_NOTIFICATION: SYNC_TYPE_FIELDS["synced_notification"],
    137     SYNCED_NOTIFICATION_APP_INFO:
    138         SYNC_TYPE_FIELDS["synced_notification_app_info"],
    139     THEME: SYNC_TYPE_FIELDS['theme'],
    140     TYPED_URL: SYNC_TYPE_FIELDS['typed_url'],
    141     }
    142 
    143 # The parent ID used to indicate a top-level node.
    144 ROOT_ID = '0'
    145 
    146 # Unix time epoch +1 day in struct_time format. The tuple corresponds to
    147 # UTC Thursday Jan 2 1970, 00:00:00, non-dst.
    148 # We have to add one day after start of epoch, since in timezones with positive
    149 # UTC offset time.mktime throws an OverflowError,
    150 # rather then returning negative number.
    151 FIRST_DAY_UNIX_TIME_EPOCH = (1970, 1, 2, 0, 0, 0, 4, 2, 0)
    152 ONE_DAY_SECONDS = 60 * 60 * 24
    153 
    154 # The number of characters in the server-generated encryption key.
    155 KEYSTORE_KEY_LENGTH = 16
    156 
    157 # The hashed client tags for some experiment nodes.
    158 KEYSTORE_ENCRYPTION_EXPERIMENT_TAG = "pis8ZRzh98/MKLtVEio2mr42LQA="
    159 PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG = "Z1xgeh3QUBa50vdEPd8C/4c7jfE="
    160 
    161 class Error(Exception):
    162   """Error class for this module."""
    163 
    164 
    165 class ProtobufDataTypeFieldNotUnique(Error):
    166   """An entry should not have more than one data type present."""
    167 
    168 
    169 class DataTypeIdNotRecognized(Error):
    170   """The requested data type is not recognized."""
    171 
    172 
    173 class MigrationDoneError(Error):
    174   """A server-side migration occurred; clients must re-sync some datatypes.
    175 
    176   Attributes:
    177     datatypes: a list of the datatypes (python enum) needing migration.
    178   """
    179 
    180   def __init__(self, datatypes):
    181     self.datatypes = datatypes
    182 
    183 
    184 class StoreBirthdayError(Error):
    185   """The client sent a birthday that doesn't correspond to this server."""
    186 
    187 
    188 class TransientError(Error):
    189   """The client would be sent a transient error."""
    190 
    191 
    192 class SyncInducedError(Error):
    193   """The client would be sent an error."""
    194 
    195 
    196 class InducedErrorFrequencyNotDefined(Error):
    197   """The error frequency defined is not handled."""
    198 
    199 
    200 class ClientNotConnectedError(Error):
    201   """The client is not connected to the server."""
    202 
    203 
    204 def GetEntryType(entry):
    205   """Extract the sync type from a SyncEntry.
    206 
    207   Args:
    208     entry: A SyncEntity protobuf object whose type to determine.
    209   Returns:
    210     An enum value from ALL_TYPES if the entry's type can be determined, or None
    211     if the type cannot be determined.
    212   Raises:
    213     ProtobufDataTypeFieldNotUnique: More than one type was indicated by
    214     the entry.
    215   """
    216   if entry.server_defined_unique_tag == TOP_LEVEL_FOLDER_TAG:
    217     return TOP_LEVEL
    218   entry_types = GetEntryTypesFromSpecifics(entry.specifics)
    219   if not entry_types:
    220     return None
    221 
    222   # If there is more than one, either there's a bug, or else the caller
    223   # should use GetEntryTypes.
    224   if len(entry_types) > 1:
    225     raise ProtobufDataTypeFieldNotUnique
    226   return entry_types[0]
    227 
    228 
    229 def GetEntryTypesFromSpecifics(specifics):
    230   """Determine the sync types indicated by an EntitySpecifics's field(s).
    231 
    232   If the specifics have more than one recognized data type field (as commonly
    233   happens with the requested_types field of GetUpdatesMessage), all types
    234   will be returned.  Callers must handle the possibility of the returned
    235   value having more than one item.
    236 
    237   Args:
    238     specifics: A EntitySpecifics protobuf message whose extensions to
    239       enumerate.
    240   Returns:
    241     A list of the sync types (values from ALL_TYPES) associated with each
    242     recognized extension of the specifics message.
    243   """
    244   return [data_type for data_type, field_descriptor
    245           in SYNC_TYPE_TO_DESCRIPTOR.iteritems()
    246           if specifics.HasField(field_descriptor.name)]
    247 
    248 
    249 def SyncTypeToProtocolDataTypeId(data_type):
    250   """Convert from a sync type (python enum) to the protocol's data type id."""
    251   return SYNC_TYPE_TO_DESCRIPTOR[data_type].number
    252 
    253 
    254 def ProtocolDataTypeIdToSyncType(protocol_data_type_id):
    255   """Convert from the protocol's data type id to a sync type (python enum)."""
    256   for data_type, field_descriptor in SYNC_TYPE_TO_DESCRIPTOR.iteritems():
    257     if field_descriptor.number == protocol_data_type_id:
    258       return data_type
    259   raise DataTypeIdNotRecognized
    260 
    261 
    262 def DataTypeStringToSyncTypeLoose(data_type_string):
    263   """Converts a human-readable string to a sync type (python enum).
    264 
    265   Capitalization and pluralization don't matter; this function is appropriate
    266   for values that might have been typed by a human being; e.g., command-line
    267   flags or query parameters.
    268   """
    269   if data_type_string.isdigit():
    270     return ProtocolDataTypeIdToSyncType(int(data_type_string))
    271   name = data_type_string.lower().rstrip('s')
    272   for data_type, field_descriptor in SYNC_TYPE_TO_DESCRIPTOR.iteritems():
    273     if field_descriptor.name.lower().rstrip('s') == name:
    274       return data_type
    275   raise DataTypeIdNotRecognized
    276 
    277 
    278 def MakeNewKeystoreKey():
    279   """Returns a new random keystore key."""
    280   return ''.join(random.choice(string.ascii_uppercase + string.digits)
    281         for x in xrange(KEYSTORE_KEY_LENGTH))
    282 
    283 
    284 def SyncTypeToString(data_type):
    285   """Formats a sync type enum (from ALL_TYPES) to a human-readable string."""
    286   return SYNC_TYPE_TO_DESCRIPTOR[data_type].name
    287 
    288 
    289 def CallerInfoToString(caller_info_source):
    290   """Formats a GetUpdatesSource enum value to a readable string."""
    291   return get_updates_caller_info_pb2.GetUpdatesCallerInfo \
    292       .DESCRIPTOR.enum_types_by_name['GetUpdatesSource'] \
    293       .values_by_number[caller_info_source].name
    294 
    295 
    296 def ShortDatatypeListSummary(data_types):
    297   """Formats compactly a list of sync types (python enums) for human eyes.
    298 
    299   This function is intended for use by logging.  If the list of datatypes
    300   contains almost all of the values, the return value will be expressed
    301   in terms of the datatypes that aren't set.
    302   """
    303   included = set(data_types) - set([TOP_LEVEL])
    304   if not included:
    305     return 'nothing'
    306   excluded = set(ALL_TYPES) - included - set([TOP_LEVEL])
    307   if not excluded:
    308     return 'everything'
    309   simple_text = '+'.join(sorted([SyncTypeToString(x) for x in included]))
    310   all_but_text = 'all except %s' % (
    311       '+'.join(sorted([SyncTypeToString(x) for x in excluded])))
    312   if len(included) < len(excluded) or len(simple_text) <= len(all_but_text):
    313     return simple_text
    314   else:
    315     return all_but_text
    316 
    317 
    318 def GetDefaultEntitySpecifics(data_type):
    319   """Get an EntitySpecifics having a sync type's default field value."""
    320   specifics = sync_pb2.EntitySpecifics()
    321   if data_type in SYNC_TYPE_TO_DESCRIPTOR:
    322     descriptor = SYNC_TYPE_TO_DESCRIPTOR[data_type]
    323     getattr(specifics, descriptor.name).SetInParent()
    324   return specifics
    325 
    326 
    327 class PermanentItem(object):
    328   """A specification of one server-created permanent item.
    329 
    330   Attributes:
    331     tag: A known-to-the-client value that uniquely identifies a server-created
    332       permanent item.
    333     name: The human-readable display name for this item.
    334     parent_tag: The tag of the permanent item's parent.  If ROOT_ID, indicates
    335       a top-level item.  Otherwise, this must be the tag value of some other
    336       server-created permanent item.
    337     sync_type: A value from ALL_TYPES, giving the datatype of this permanent
    338       item.  This controls which types of client GetUpdates requests will
    339       cause the permanent item to be created and returned.
    340     create_by_default: Whether the permanent item is created at startup or not.
    341       This value is set to True in the default case. Non-default permanent items
    342       are those that are created only when a client explicitly tells the server
    343       to do so.
    344   """
    345 
    346   def __init__(self, tag, name, parent_tag, sync_type, create_by_default=True):
    347     self.tag = tag
    348     self.name = name
    349     self.parent_tag = parent_tag
    350     self.sync_type = sync_type
    351     self.create_by_default = create_by_default
    352 
    353 
    354 class MigrationHistory(object):
    355   """A record of the migration events associated with an account.
    356 
    357   Each migration event invalidates one or more datatypes on all clients
    358   that had synced the datatype before the event.  Such clients will continue
    359   to receive MigrationDone errors until they throw away their progress and
    360   re-sync that datatype from the beginning.
    361   """
    362   def __init__(self):
    363     self._migrations = {}
    364     for datatype in ALL_TYPES:
    365       self._migrations[datatype] = [1]
    366     self._next_migration_version = 2
    367 
    368   def GetLatestVersion(self, datatype):
    369     return self._migrations[datatype][-1]
    370 
    371   def CheckAllCurrent(self, versions_map):
    372     """Raises an error if any the provided versions are out of date.
    373 
    374     This function intentionally returns migrations in the order that they were
    375     triggered.  Doing it this way allows the client to queue up two migrations
    376     in a row, so the second one is received while responding to the first.
    377 
    378     Arguments:
    379       version_map: a map whose keys are datatypes and whose values are versions.
    380 
    381     Raises:
    382       MigrationDoneError: if a mismatch is found.
    383     """
    384     problems = {}
    385     for datatype, client_migration in versions_map.iteritems():
    386       for server_migration in self._migrations[datatype]:
    387         if client_migration < server_migration:
    388           problems.setdefault(server_migration, []).append(datatype)
    389     if problems:
    390       raise MigrationDoneError(problems[min(problems.keys())])
    391 
    392   def Bump(self, datatypes):
    393     """Add a record of a migration, to cause errors on future requests."""
    394     for idx, datatype in enumerate(datatypes):
    395       self._migrations[datatype].append(self._next_migration_version)
    396     self._next_migration_version += 1
    397 
    398 
    399 class UpdateSieve(object):
    400   """A filter to remove items the client has already seen."""
    401   def __init__(self, request, migration_history=None):
    402     self._original_request = request
    403     self._state = {}
    404     self._migration_history = migration_history or MigrationHistory()
    405     self._migration_versions_to_check = {}
    406     if request.from_progress_marker:
    407       for marker in request.from_progress_marker:
    408         data_type = ProtocolDataTypeIdToSyncType(marker.data_type_id)
    409         if marker.HasField('timestamp_token_for_migration'):
    410           timestamp = marker.timestamp_token_for_migration
    411           if timestamp:
    412             self._migration_versions_to_check[data_type] = 1
    413         elif marker.token:
    414           (timestamp, version) = pickle.loads(marker.token)
    415           self._migration_versions_to_check[data_type] = version
    416         elif marker.HasField('token'):
    417           timestamp = 0
    418         else:
    419           raise ValueError('No timestamp information in progress marker.')
    420         data_type = ProtocolDataTypeIdToSyncType(marker.data_type_id)
    421         self._state[data_type] = timestamp
    422     elif request.HasField('from_timestamp'):
    423       for data_type in GetEntryTypesFromSpecifics(request.requested_types):
    424         self._state[data_type] = request.from_timestamp
    425         self._migration_versions_to_check[data_type] = 1
    426     if self._state:
    427       self._state[TOP_LEVEL] = min(self._state.itervalues())
    428 
    429   def SummarizeRequest(self):
    430     timestamps = {}
    431     for data_type, timestamp in self._state.iteritems():
    432       if data_type == TOP_LEVEL:
    433         continue
    434       timestamps.setdefault(timestamp, []).append(data_type)
    435     return ', '.join('<%s>@%d' % (ShortDatatypeListSummary(types), stamp)
    436                      for stamp, types in sorted(timestamps.iteritems()))
    437 
    438   def CheckMigrationState(self):
    439     self._migration_history.CheckAllCurrent(self._migration_versions_to_check)
    440 
    441   def ClientWantsItem(self, item):
    442     """Return true if the client hasn't already seen an item."""
    443     return self._state.get(GetEntryType(item), sys.maxint) < item.version
    444 
    445   def HasAnyTimestamp(self):
    446     """Return true if at least one datatype was requested."""
    447     return bool(self._state)
    448 
    449   def GetMinTimestamp(self):
    450     """Return true the smallest timestamp requested across all datatypes."""
    451     return min(self._state.itervalues())
    452 
    453   def GetFirstTimeTypes(self):
    454     """Return a list of datatypes requesting updates from timestamp zero."""
    455     return [datatype for datatype, timestamp in self._state.iteritems()
    456             if timestamp == 0]
    457 
    458   def GetCreateMobileBookmarks(self):
    459     """Return true if the client has requested to create the 'Mobile Bookmarks'
    460        folder.
    461     """
    462     return (self._original_request.HasField('create_mobile_bookmarks_folder')
    463             and self._original_request.create_mobile_bookmarks_folder)
    464 
    465   def SaveProgress(self, new_timestamp, get_updates_response):
    466     """Write the new_timestamp or new_progress_marker fields to a response."""
    467     if self._original_request.from_progress_marker:
    468       for data_type, old_timestamp in self._state.iteritems():
    469         if data_type == TOP_LEVEL:
    470           continue
    471         new_marker = sync_pb2.DataTypeProgressMarker()
    472         new_marker.data_type_id = SyncTypeToProtocolDataTypeId(data_type)
    473         final_stamp = max(old_timestamp, new_timestamp)
    474         final_migration = self._migration_history.GetLatestVersion(data_type)
    475         new_marker.token = pickle.dumps((final_stamp, final_migration))
    476         get_updates_response.new_progress_marker.add().MergeFrom(new_marker)
    477     elif self._original_request.HasField('from_timestamp'):
    478       if self._original_request.from_timestamp < new_timestamp:
    479         get_updates_response.new_timestamp = new_timestamp
    480 
    481 
    482 class SyncDataModel(object):
    483   """Models the account state of one sync user."""
    484   _BATCH_SIZE = 100
    485 
    486   # Specify all the permanent items that a model might need.
    487   _PERMANENT_ITEM_SPECS = [
    488       PermanentItem('google_chrome_apps', name='Apps',
    489                     parent_tag=ROOT_ID, sync_type=APPS),
    490       PermanentItem('google_chrome_app_list', name='App List',
    491                     parent_tag=ROOT_ID, sync_type=APP_LIST),
    492       PermanentItem('google_chrome_app_notifications', name='App Notifications',
    493                     parent_tag=ROOT_ID, sync_type=APP_NOTIFICATION),
    494       PermanentItem('google_chrome_app_settings',
    495                     name='App Settings',
    496                     parent_tag=ROOT_ID, sync_type=APP_SETTINGS),
    497       PermanentItem('google_chrome_bookmarks', name='Bookmarks',
    498                     parent_tag=ROOT_ID, sync_type=BOOKMARK),
    499       PermanentItem('bookmark_bar', name='Bookmark Bar',
    500                     parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK),
    501       PermanentItem('other_bookmarks', name='Other Bookmarks',
    502                     parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK),
    503       PermanentItem('synced_bookmarks', name='Synced Bookmarks',
    504                     parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK,
    505                     create_by_default=False),
    506       PermanentItem('google_chrome_autofill', name='Autofill',
    507                     parent_tag=ROOT_ID, sync_type=AUTOFILL),
    508       PermanentItem('google_chrome_autofill_profiles', name='Autofill Profiles',
    509                     parent_tag=ROOT_ID, sync_type=AUTOFILL_PROFILE),
    510       PermanentItem('google_chrome_device_info', name='Device Info',
    511                     parent_tag=ROOT_ID, sync_type=DEVICE_INFO),
    512       PermanentItem('google_chrome_experiments', name='Experiments',
    513                     parent_tag=ROOT_ID, sync_type=EXPERIMENTS),
    514       PermanentItem('google_chrome_extension_settings',
    515                     name='Extension Settings',
    516                     parent_tag=ROOT_ID, sync_type=EXTENSION_SETTINGS),
    517       PermanentItem('google_chrome_extensions', name='Extensions',
    518                     parent_tag=ROOT_ID, sync_type=EXTENSIONS),
    519       PermanentItem('google_chrome_history_delete_directives',
    520                     name='History Delete Directives',
    521                     parent_tag=ROOT_ID,
    522                     sync_type=HISTORY_DELETE_DIRECTIVE),
    523       PermanentItem('google_chrome_favicon_images',
    524                     name='Favicon Images',
    525                     parent_tag=ROOT_ID,
    526                     sync_type=FAVICON_IMAGES),
    527       PermanentItem('google_chrome_favicon_tracking',
    528                     name='Favicon Tracking',
    529                     parent_tag=ROOT_ID,
    530                     sync_type=FAVICON_TRACKING),
    531       PermanentItem('google_chrome_managed_user_settings',
    532                     name='Managed User Settings',
    533                     parent_tag=ROOT_ID, sync_type=MANAGED_USER_SETTING),
    534       PermanentItem('google_chrome_managed_users',
    535                     name='Managed Users',
    536                     parent_tag=ROOT_ID, sync_type=MANAGED_USER),
    537       PermanentItem('google_chrome_managed_user_shared_settings',
    538                     name='Managed User Shared Settings',
    539                     parent_tag=ROOT_ID, sync_type=MANAGED_USER_SHARED_SETTING),
    540       PermanentItem('google_chrome_nigori', name='Nigori',
    541                     parent_tag=ROOT_ID, sync_type=NIGORI),
    542       PermanentItem('google_chrome_passwords', name='Passwords',
    543                     parent_tag=ROOT_ID, sync_type=PASSWORD),
    544       PermanentItem('google_chrome_preferences', name='Preferences',
    545                     parent_tag=ROOT_ID, sync_type=PREFERENCE),
    546       PermanentItem('google_chrome_priority_preferences',
    547                     name='Priority Preferences',
    548                     parent_tag=ROOT_ID, sync_type=PRIORITY_PREFERENCE),
    549       PermanentItem('google_chrome_synced_notifications',
    550                     name='Synced Notifications',
    551                     parent_tag=ROOT_ID, sync_type=SYNCED_NOTIFICATION),
    552       PermanentItem('google_chrome_synced_notification_app_info',
    553                     name='Synced Notification App Info',
    554                     parent_tag=ROOT_ID, sync_type=SYNCED_NOTIFICATION_APP_INFO),
    555       PermanentItem('google_chrome_search_engines', name='Search Engines',
    556                     parent_tag=ROOT_ID, sync_type=SEARCH_ENGINE),
    557       PermanentItem('google_chrome_sessions', name='Sessions',
    558                     parent_tag=ROOT_ID, sync_type=SESSION),
    559       PermanentItem('google_chrome_themes', name='Themes',
    560                     parent_tag=ROOT_ID, sync_type=THEME),
    561       PermanentItem('google_chrome_typed_urls', name='Typed URLs',
    562                     parent_tag=ROOT_ID, sync_type=TYPED_URL),
    563       PermanentItem('google_chrome_dictionary', name='Dictionary',
    564                     parent_tag=ROOT_ID, sync_type=DICTIONARY),
    565       PermanentItem('google_chrome_articles', name='Articles',
    566                     parent_tag=ROOT_ID, sync_type=ARTICLE),
    567       ]
    568 
    569   def __init__(self):
    570     # Monotonically increasing version number.  The next object change will
    571     # take on this value + 1.
    572     self._version = 0
    573 
    574     # The definitive copy of this client's items: a map from ID string to a
    575     # SyncEntity protocol buffer.
    576     self._entries = {}
    577 
    578     self.ResetStoreBirthday()
    579     self.migration_history = MigrationHistory()
    580     self.induced_error = sync_pb2.ClientToServerResponse.Error()
    581     self.induced_error_frequency = 0
    582     self.sync_count_before_errors = 0
    583     self.acknowledge_managed_users = False
    584     self._keys = [MakeNewKeystoreKey()]
    585 
    586   def _SaveEntry(self, entry):
    587     """Insert or update an entry in the change log, and give it a new version.
    588 
    589     The ID fields of this entry are assumed to be valid server IDs.  This
    590     entry will be updated with a new version number and sync_timestamp.
    591 
    592     Args:
    593       entry: The entry to be added or updated.
    594     """
    595     self._version += 1
    596     # Maintain a global (rather than per-item) sequence number and use it
    597     # both as the per-entry version as well as the update-progress timestamp.
    598     # This simulates the behavior of the original server implementation.
    599     entry.version = self._version
    600     entry.sync_timestamp = self._version
    601 
    602     # Preserve the originator info, which the client is not required to send
    603     # when updating.
    604     base_entry = self._entries.get(entry.id_string)
    605     if base_entry:
    606       entry.originator_cache_guid = base_entry.originator_cache_guid
    607       entry.originator_client_item_id = base_entry.originator_client_item_id
    608 
    609     self._entries[entry.id_string] = copy.deepcopy(entry)
    610 
    611   def _ServerTagToId(self, tag):
    612     """Determine the server ID from a server-unique tag.
    613 
    614     The resulting value is guaranteed not to collide with the other ID
    615     generation methods.
    616 
    617     Args:
    618       tag: The unique, known-to-the-client tag of a server-generated item.
    619     Returns:
    620       The string value of the computed server ID.
    621     """
    622     if not tag or tag == ROOT_ID:
    623       return tag
    624     spec = [x for x in self._PERMANENT_ITEM_SPECS if x.tag == tag][0]
    625     return self._MakeCurrentId(spec.sync_type, '<server tag>%s' % tag)
    626 
    627   def _ClientTagToId(self, datatype, tag):
    628     """Determine the server ID from a client-unique tag.
    629 
    630     The resulting value is guaranteed not to collide with the other ID
    631     generation methods.
    632 
    633     Args:
    634       datatype: The sync type (python enum) of the identified object.
    635       tag: The unique, opaque-to-the-server tag of a client-tagged item.
    636     Returns:
    637       The string value of the computed server ID.
    638     """
    639     return self._MakeCurrentId(datatype, '<client tag>%s' % tag)
    640 
    641   def _ClientIdToId(self, datatype, client_guid, client_item_id):
    642     """Compute a unique server ID from a client-local ID tag.
    643 
    644     The resulting value is guaranteed not to collide with the other ID
    645     generation methods.
    646 
    647     Args:
    648       datatype: The sync type (python enum) of the identified object.
    649       client_guid: A globally unique ID that identifies the client which
    650         created this item.
    651       client_item_id: An ID that uniquely identifies this item on the client
    652         which created it.
    653     Returns:
    654       The string value of the computed server ID.
    655     """
    656     # Using the client ID info is not required here (we could instead generate
    657     # a random ID), but it's useful for debugging.
    658     return self._MakeCurrentId(datatype,
    659         '<server ID originally>%s/%s' % (client_guid, client_item_id))
    660 
    661   def _MakeCurrentId(self, datatype, inner_id):
    662     return '%d^%d^%s' % (datatype,
    663                          self.migration_history.GetLatestVersion(datatype),
    664                          inner_id)
    665 
    666   def _ExtractIdInfo(self, id_string):
    667     if not id_string or id_string == ROOT_ID:
    668       return None
    669     datatype_string, separator, remainder = id_string.partition('^')
    670     migration_version_string, separator, inner_id = remainder.partition('^')
    671     return (int(datatype_string), int(migration_version_string), inner_id)
    672 
    673   def _WritePosition(self, entry, parent_id):
    674     """Ensure the entry has an absolute, numeric position and parent_id.
    675 
    676     Historically, clients would specify positions using the predecessor-based
    677     references in the insert_after_item_id field; starting July 2011, this
    678     was changed and Chrome now sends up the absolute position.  The server
    679     must store a position_in_parent value and must not maintain
    680     insert_after_item_id.
    681     Starting in Jan 2013, the client will also send up a unique_position field
    682     which should be saved and returned on subsequent GetUpdates.
    683 
    684     Args:
    685       entry: The entry for which to write a position.  Its ID field are
    686         assumed to be server IDs.  This entry will have its parent_id_string,
    687         position_in_parent and unique_position fields updated; its
    688         insert_after_item_id field will be cleared.
    689       parent_id: The ID of the entry intended as the new parent.
    690     """
    691 
    692     entry.parent_id_string = parent_id
    693     if not entry.HasField('position_in_parent'):
    694       entry.position_in_parent = 1337  # A debuggable, distinctive default.
    695     entry.ClearField('insert_after_item_id')
    696 
    697   def _ItemExists(self, id_string):
    698     """Determine whether an item exists in the changelog."""
    699     return id_string in self._entries
    700 
    701   def _CreatePermanentItem(self, spec):
    702     """Create one permanent item from its spec, if it doesn't exist.
    703 
    704     The resulting item is added to the changelog.
    705 
    706     Args:
    707       spec: A PermanentItem object holding the properties of the item to create.
    708     """
    709     id_string = self._ServerTagToId(spec.tag)
    710     if self._ItemExists(id_string):
    711       return
    712     print 'Creating permanent item: %s' % spec.name
    713     entry = sync_pb2.SyncEntity()
    714     entry.id_string = id_string
    715     entry.non_unique_name = spec.name
    716     entry.name = spec.name
    717     entry.server_defined_unique_tag = spec.tag
    718     entry.folder = True
    719     entry.deleted = False
    720     entry.specifics.CopyFrom(GetDefaultEntitySpecifics(spec.sync_type))
    721     self._WritePosition(entry, self._ServerTagToId(spec.parent_tag))
    722     self._SaveEntry(entry)
    723 
    724   def _CreateDefaultPermanentItems(self, requested_types):
    725     """Ensure creation of all default permanent items for a given set of types.
    726 
    727     Args:
    728       requested_types: A list of sync data types from ALL_TYPES.
    729         All default permanent items of only these types will be created.
    730     """
    731     for spec in self._PERMANENT_ITEM_SPECS:
    732       if spec.sync_type in requested_types and spec.create_by_default:
    733         self._CreatePermanentItem(spec)
    734 
    735   def ResetStoreBirthday(self):
    736     """Resets the store birthday to a random value."""
    737     # TODO(nick): uuid.uuid1() is better, but python 2.5 only.
    738     self.store_birthday = '%0.30f' % random.random()
    739 
    740   def StoreBirthday(self):
    741     """Gets the store birthday."""
    742     return self.store_birthday
    743 
    744   def GetChanges(self, sieve):
    745     """Get entries which have changed, oldest first.
    746 
    747     The returned entries are limited to being _BATCH_SIZE many.  The entries
    748     are returned in strict version order.
    749 
    750     Args:
    751       sieve: An update sieve to use to filter out updates the client
    752         has already seen.
    753     Returns:
    754       A tuple of (version, entries, changes_remaining).  Version is a new
    755       timestamp value, which should be used as the starting point for the
    756       next query.  Entries is the batch of entries meeting the current
    757       timestamp query.  Changes_remaining indicates the number of changes
    758       left on the server after this batch.
    759     """
    760     if not sieve.HasAnyTimestamp():
    761       return (0, [], 0)
    762     min_timestamp = sieve.GetMinTimestamp()
    763     first_time_types = sieve.GetFirstTimeTypes()
    764     self._CreateDefaultPermanentItems(first_time_types)
    765     # Mobile bookmark folder is not created by default, create it only when
    766     # client requested it.
    767     if (sieve.GetCreateMobileBookmarks() and
    768         first_time_types.count(BOOKMARK) > 0):
    769       self.TriggerCreateSyncedBookmarks()
    770 
    771     self.TriggerAcknowledgeManagedUsers()
    772 
    773     change_log = sorted(self._entries.values(),
    774                         key=operator.attrgetter('version'))
    775     new_changes = [x for x in change_log if x.version > min_timestamp]
    776     # Pick batch_size new changes, and then filter them.  This matches
    777     # the RPC behavior of the production sync server.
    778     batch = new_changes[:self._BATCH_SIZE]
    779     if not batch:
    780       # Client is up to date.
    781       return (min_timestamp, [], 0)
    782 
    783     # Restrict batch to requested types.  Tombstones are untyped
    784     # and will always get included.
    785     filtered = [copy.deepcopy(item) for item in batch
    786                 if item.deleted or sieve.ClientWantsItem(item)]
    787 
    788     # The new client timestamp is the timestamp of the last item in the
    789     # batch, even if that item was filtered out.
    790     return (batch[-1].version, filtered, len(new_changes) - len(batch))
    791 
    792   def GetKeystoreKeys(self):
    793     """Returns the encryption keys for this account."""
    794     print "Returning encryption keys: %s" % self._keys
    795     return self._keys
    796 
    797   def _CopyOverImmutableFields(self, entry):
    798     """Preserve immutable fields by copying pre-commit state.
    799 
    800     Args:
    801       entry: A sync entity from the client.
    802     """
    803     if entry.id_string in self._entries:
    804       if self._entries[entry.id_string].HasField(
    805           'server_defined_unique_tag'):
    806         entry.server_defined_unique_tag = (
    807             self._entries[entry.id_string].server_defined_unique_tag)
    808 
    809   def _CheckVersionForCommit(self, entry):
    810     """Perform an optimistic concurrency check on the version number.
    811 
    812     Clients are only allowed to commit if they report having seen the most
    813     recent version of an object.
    814 
    815     Args:
    816       entry: A sync entity from the client.  It is assumed that ID fields
    817         have been converted to server IDs.
    818     Returns:
    819       A boolean value indicating whether the client's version matches the
    820       newest server version for the given entry.
    821     """
    822     if entry.id_string in self._entries:
    823       # Allow edits/deletes if the version matches, and any undeletion.
    824       return (self._entries[entry.id_string].version == entry.version or
    825               self._entries[entry.id_string].deleted)
    826     else:
    827       # Allow unknown ID only if the client thinks it's new too.
    828       return entry.version == 0
    829 
    830   def _CheckParentIdForCommit(self, entry):
    831     """Check that the parent ID referenced in a SyncEntity actually exists.
    832 
    833     Args:
    834       entry: A sync entity from the client.  It is assumed that ID fields
    835         have been converted to server IDs.
    836     Returns:
    837       A boolean value indicating whether the entity's parent ID is an object
    838       that actually exists (and is not deleted) in the current account state.
    839     """
    840     if entry.parent_id_string == ROOT_ID:
    841       # This is generally allowed.
    842       return True
    843     if entry.parent_id_string not in self._entries:
    844       print 'Warning: Client sent unknown ID.  Should never happen.'
    845       return False
    846     if entry.parent_id_string == entry.id_string:
    847       print 'Warning: Client sent circular reference.  Should never happen.'
    848       return False
    849     if self._entries[entry.parent_id_string].deleted:
    850       # This can happen in a race condition between two clients.
    851       return False
    852     if not self._entries[entry.parent_id_string].folder:
    853       print 'Warning: Client sent non-folder parent.  Should never happen.'
    854       return False
    855     return True
    856 
    857   def _RewriteIdsAsServerIds(self, entry, cache_guid, commit_session):
    858     """Convert ID fields in a client sync entry to server IDs.
    859 
    860     A commit batch sent by a client may contain new items for which the
    861     server has not generated IDs yet.  And within a commit batch, later
    862     items are allowed to refer to earlier items.  This method will
    863     generate server IDs for new items, as well as rewrite references
    864     to items whose server IDs were generated earlier in the batch.
    865 
    866     Args:
    867       entry: The client sync entry to modify.
    868       cache_guid: The globally unique ID of the client that sent this
    869         commit request.
    870       commit_session: A dictionary mapping the original IDs to the new server
    871         IDs, for any items committed earlier in the batch.
    872     """
    873     if entry.version == 0:
    874       data_type = GetEntryType(entry)
    875       if entry.HasField('client_defined_unique_tag'):
    876         # When present, this should determine the item's ID.
    877         new_id = self._ClientTagToId(data_type, entry.client_defined_unique_tag)
    878       else:
    879         new_id = self._ClientIdToId(data_type, cache_guid, entry.id_string)
    880         entry.originator_cache_guid = cache_guid
    881         entry.originator_client_item_id = entry.id_string
    882       commit_session[entry.id_string] = new_id  # Remember the remapping.
    883       entry.id_string = new_id
    884     if entry.parent_id_string in commit_session:
    885       entry.parent_id_string = commit_session[entry.parent_id_string]
    886     if entry.insert_after_item_id in commit_session:
    887       entry.insert_after_item_id = commit_session[entry.insert_after_item_id]
    888 
    889   def ValidateCommitEntries(self, entries):
    890     """Raise an exception if a commit batch contains any global errors.
    891 
    892     Arguments:
    893       entries: an iterable containing commit-form SyncEntity protocol buffers.
    894 
    895     Raises:
    896       MigrationDoneError: if any of the entries reference a recently-migrated
    897         datatype.
    898     """
    899     server_ids_in_commit = set()
    900     local_ids_in_commit = set()
    901     for entry in entries:
    902       if entry.version:
    903         server_ids_in_commit.add(entry.id_string)
    904       else:
    905         local_ids_in_commit.add(entry.id_string)
    906       if entry.HasField('parent_id_string'):
    907         if entry.parent_id_string not in local_ids_in_commit:
    908           server_ids_in_commit.add(entry.parent_id_string)
    909 
    910     versions_present = {}
    911     for server_id in server_ids_in_commit:
    912       parsed = self._ExtractIdInfo(server_id)
    913       if parsed:
    914         datatype, version, _ = parsed
    915         versions_present.setdefault(datatype, []).append(version)
    916 
    917     self.migration_history.CheckAllCurrent(
    918          dict((k, min(v)) for k, v in versions_present.iteritems()))
    919 
    920   def CommitEntry(self, entry, cache_guid, commit_session):
    921     """Attempt to commit one entry to the user's account.
    922 
    923     Args:
    924       entry: A SyncEntity protobuf representing desired object changes.
    925       cache_guid: A string value uniquely identifying the client; this
    926         is used for ID generation and will determine the originator_cache_guid
    927         if the entry is new.
    928       commit_session: A dictionary mapping client IDs to server IDs for any
    929         objects committed earlier this session.  If the entry gets a new ID
    930         during commit, the change will be recorded here.
    931     Returns:
    932       A SyncEntity reflecting the post-commit value of the entry, or None
    933       if the entry was not committed due to an error.
    934     """
    935     entry = copy.deepcopy(entry)
    936 
    937     # Generate server IDs for this entry, and write generated server IDs
    938     # from earlier entries into the message's fields, as appropriate.  The
    939     # ID generation state is stored in 'commit_session'.
    940     self._RewriteIdsAsServerIds(entry, cache_guid, commit_session)
    941 
    942     # Perform the optimistic concurrency check on the entry's version number.
    943     # Clients are not allowed to commit unless they indicate that they've seen
    944     # the most recent version of an object.
    945     if not self._CheckVersionForCommit(entry):
    946       return None
    947 
    948     # Check the validity of the parent ID; it must exist at this point.
    949     # TODO(nick): Implement cycle detection and resolution.
    950     if not self._CheckParentIdForCommit(entry):
    951       return None
    952 
    953     self._CopyOverImmutableFields(entry);
    954 
    955     # At this point, the commit is definitely going to happen.
    956 
    957     # Deletion works by storing a limited record for an entry, called a
    958     # tombstone.  A sync server must track deleted IDs forever, since it does
    959     # not keep track of client knowledge (there's no deletion ACK event).
    960     if entry.deleted:
    961       def MakeTombstone(id_string, datatype):
    962         """Make a tombstone entry that will replace the entry being deleted.
    963 
    964         Args:
    965           id_string: Index of the SyncEntity to be deleted.
    966         Returns:
    967           A new SyncEntity reflecting the fact that the entry is deleted.
    968         """
    969         # Only the ID, version and deletion state are preserved on a tombstone.
    970         tombstone = sync_pb2.SyncEntity()
    971         tombstone.id_string = id_string
    972         tombstone.deleted = True
    973         tombstone.name = ''
    974         tombstone.specifics.CopyFrom(GetDefaultEntitySpecifics(datatype))
    975         return tombstone
    976 
    977       def IsChild(child_id):
    978         """Check if a SyncEntity is a child of entry, or any of its children.
    979 
    980         Args:
    981           child_id: Index of the SyncEntity that is a possible child of entry.
    982         Returns:
    983           True if it is a child; false otherwise.
    984         """
    985         if child_id not in self._entries:
    986           return False
    987         if self._entries[child_id].parent_id_string == entry.id_string:
    988           return True
    989         return IsChild(self._entries[child_id].parent_id_string)
    990 
    991       # Identify any children entry might have.
    992       child_ids = [child.id_string for child in self._entries.itervalues()
    993                    if IsChild(child.id_string)]
    994 
    995       # Mark all children that were identified as deleted.
    996       for child_id in child_ids:
    997         datatype = GetEntryType(self._entries[child_id])
    998         self._SaveEntry(MakeTombstone(child_id, datatype))
    999 
   1000       # Delete entry itself.
   1001       datatype = GetEntryType(self._entries[entry.id_string])
   1002       entry = MakeTombstone(entry.id_string, datatype)
   1003     else:
   1004       # Comments in sync.proto detail how the representation of positional
   1005       # ordering works.
   1006       #
   1007       # We've almost fully deprecated the 'insert_after_item_id' field.
   1008       # The 'position_in_parent' field is also deprecated, but as of Jan 2013
   1009       # is still in common use.  The 'unique_position' field is the latest
   1010       # and greatest in positioning technology.
   1011       #
   1012       # This server supports 'position_in_parent' and 'unique_position'.
   1013       self._WritePosition(entry, entry.parent_id_string)
   1014 
   1015     # Preserve the originator info, which the client is not required to send
   1016     # when updating.
   1017     base_entry = self._entries.get(entry.id_string)
   1018     if base_entry and not entry.HasField('originator_cache_guid'):
   1019       entry.originator_cache_guid = base_entry.originator_cache_guid
   1020       entry.originator_client_item_id = base_entry.originator_client_item_id
   1021 
   1022     # Store the current time since the Unix epoch in milliseconds.
   1023     entry.mtime = (int((time.mktime(time.gmtime()) -
   1024         (time.mktime(FIRST_DAY_UNIX_TIME_EPOCH) - ONE_DAY_SECONDS))*1000))
   1025 
   1026     # Commit the change.  This also updates the version number.
   1027     self._SaveEntry(entry)
   1028     return entry
   1029 
   1030   def _RewriteVersionInId(self, id_string):
   1031     """Rewrites an ID so that its migration version becomes current."""
   1032     parsed_id = self._ExtractIdInfo(id_string)
   1033     if not parsed_id:
   1034       return id_string
   1035     datatype, old_migration_version, inner_id = parsed_id
   1036     return self._MakeCurrentId(datatype, inner_id)
   1037 
   1038   def TriggerMigration(self, datatypes):
   1039     """Cause a migration to occur for a set of datatypes on this account.
   1040 
   1041     Clients will see the MIGRATION_DONE error for these datatypes until they
   1042     resync them.
   1043     """
   1044     versions_to_remap = self.migration_history.Bump(datatypes)
   1045     all_entries = self._entries.values()
   1046     self._entries.clear()
   1047     for entry in all_entries:
   1048       new_id = self._RewriteVersionInId(entry.id_string)
   1049       entry.id_string = new_id
   1050       if entry.HasField('parent_id_string'):
   1051         entry.parent_id_string = self._RewriteVersionInId(
   1052             entry.parent_id_string)
   1053       self._entries[entry.id_string] = entry
   1054 
   1055   def TriggerSyncTabFavicons(self):
   1056     """Set the 'sync_tab_favicons' field to this account's nigori node.
   1057 
   1058     If the field is not currently set, will write a new nigori node entry
   1059     with the field set. Else does nothing.
   1060     """
   1061 
   1062     nigori_tag = "google_chrome_nigori"
   1063     nigori_original = self._entries.get(self._ServerTagToId(nigori_tag))
   1064     if (nigori_original.specifics.nigori.sync_tab_favicons):
   1065       return
   1066     nigori_new = copy.deepcopy(nigori_original)
   1067     nigori_new.specifics.nigori.sync_tabs = True
   1068     self._SaveEntry(nigori_new)
   1069 
   1070   def TriggerCreateSyncedBookmarks(self):
   1071     """Create the Synced Bookmarks folder under the Bookmarks permanent item.
   1072 
   1073     Clients will then receive the Synced Bookmarks folder on future
   1074     GetUpdates, and new bookmarks can be added within the Synced Bookmarks
   1075     folder.
   1076     """
   1077 
   1078     synced_bookmarks_spec, = [spec for spec in self._PERMANENT_ITEM_SPECS
   1079                               if spec.name == "Synced Bookmarks"]
   1080     self._CreatePermanentItem(synced_bookmarks_spec)
   1081 
   1082   def TriggerEnableKeystoreEncryption(self):
   1083     """Create the keystore_encryption experiment entity and enable it.
   1084 
   1085     A new entity within the EXPERIMENTS datatype is created with the unique
   1086     client tag "keystore_encryption" if it doesn't already exist. The
   1087     keystore_encryption message is then filled with |enabled| set to true.
   1088     """
   1089 
   1090     experiment_id = self._ServerTagToId("google_chrome_experiments")
   1091     keystore_encryption_id = self._ClientTagToId(
   1092         EXPERIMENTS,
   1093         KEYSTORE_ENCRYPTION_EXPERIMENT_TAG)
   1094     keystore_entry = self._entries.get(keystore_encryption_id)
   1095     if keystore_entry is None:
   1096       keystore_entry = sync_pb2.SyncEntity()
   1097       keystore_entry.id_string = keystore_encryption_id
   1098       keystore_entry.name = "Keystore Encryption"
   1099       keystore_entry.client_defined_unique_tag = (
   1100           KEYSTORE_ENCRYPTION_EXPERIMENT_TAG)
   1101       keystore_entry.folder = False
   1102       keystore_entry.deleted = False
   1103       keystore_entry.specifics.CopyFrom(GetDefaultEntitySpecifics(EXPERIMENTS))
   1104       self._WritePosition(keystore_entry, experiment_id)
   1105 
   1106     keystore_entry.specifics.experiments.keystore_encryption.enabled = True
   1107 
   1108     self._SaveEntry(keystore_entry)
   1109 
   1110   def TriggerRotateKeystoreKeys(self):
   1111     """Rotate the current set of keystore encryption keys.
   1112 
   1113     |self._keys| will have a new random encryption key appended to it. We touch
   1114     the nigori node so that each client will receive the new encryption keys
   1115     only once.
   1116     """
   1117 
   1118     # Add a new encryption key.
   1119     self._keys += [MakeNewKeystoreKey(), ]
   1120 
   1121     # Increment the nigori node's timestamp, so clients will get the new keys
   1122     # on their next GetUpdates (any time the nigori node is sent back, we also
   1123     # send back the keystore keys).
   1124     nigori_tag = "google_chrome_nigori"
   1125     self._SaveEntry(self._entries.get(self._ServerTagToId(nigori_tag)))
   1126 
   1127   def TriggerAcknowledgeManagedUsers(self):
   1128     """Set the "acknowledged" flag for any managed user entities that don't have
   1129        it set already.
   1130     """
   1131 
   1132     if not self.acknowledge_managed_users:
   1133       return
   1134 
   1135     managed_users = [copy.deepcopy(entry) for entry in self._entries.values()
   1136                      if entry.specifics.HasField('managed_user')
   1137                      and not entry.specifics.managed_user.acknowledged]
   1138     for user in managed_users:
   1139       user.specifics.managed_user.acknowledged = True
   1140       self._SaveEntry(user)
   1141 
   1142   def TriggerEnablePreCommitGetUpdateAvoidance(self):
   1143     """Sets the experiment to enable pre-commit GetUpdate avoidance."""
   1144     experiment_id = self._ServerTagToId("google_chrome_experiments")
   1145     pre_commit_gu_avoidance_id = self._ClientTagToId(
   1146         EXPERIMENTS,
   1147         PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG)
   1148     entry = self._entries.get(pre_commit_gu_avoidance_id)
   1149     if entry is None:
   1150       entry = sync_pb2.SyncEntity()
   1151       entry.id_string = pre_commit_gu_avoidance_id
   1152       entry.name = "Pre-commit GU avoidance"
   1153       entry.client_defined_unique_tag = PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG
   1154       entry.folder = False
   1155       entry.deleted = False
   1156       entry.specifics.CopyFrom(GetDefaultEntitySpecifics(EXPERIMENTS))
   1157       self._WritePosition(entry, experiment_id)
   1158     entry.specifics.experiments.pre_commit_update_avoidance.enabled = True
   1159     self._SaveEntry(entry)
   1160 
   1161   def SetInducedError(self, error, error_frequency,
   1162                       sync_count_before_errors):
   1163     self.induced_error = error
   1164     self.induced_error_frequency = error_frequency
   1165     self.sync_count_before_errors = sync_count_before_errors
   1166 
   1167   def GetInducedError(self):
   1168     return self.induced_error
   1169 
   1170   def AddSyncedNotification(self, serialized_notification):
   1171     """Adds a synced notification to the server data.
   1172 
   1173     The notification will be delivered to the client on the next GetUpdates
   1174     call.
   1175 
   1176     Args:
   1177       serialized_notification: A serialized CoalescedSyncedNotification.
   1178 
   1179     Returns:
   1180       The string representation of the added SyncEntity.
   1181 
   1182     Raises:
   1183       ClientNotConnectedError: if the client has not yet connected to this
   1184       server
   1185     """
   1186     # A unique string used wherever a unique ID for this notification is
   1187     # required.
   1188     unique_notification_id = str(uuid.uuid4())
   1189 
   1190     specifics = self._CreateSyncedNotificationEntitySpecifics(
   1191         unique_notification_id, serialized_notification)
   1192 
   1193     # Create the root SyncEntity representing a single notification.
   1194     entity = sync_pb2.SyncEntity()
   1195     entity.specifics.CopyFrom(specifics)
   1196     entity.parent_id_string = self._ServerTagToId(
   1197         'google_chrome_synced_notifications')
   1198     entity.name = 'Synced notification added for testing'
   1199     entity.version = self._GetNextVersionNumber()
   1200 
   1201     entity.client_defined_unique_tag = self._CreateSyncedNotificationClientTag(
   1202         specifics.synced_notification.coalesced_notification.key)
   1203     entity.id_string = self._ClientTagToId(GetEntryType(entity),
   1204                                            entity.client_defined_unique_tag)
   1205 
   1206     self._entries[entity.id_string] = copy.deepcopy(entity)
   1207 
   1208     return google.protobuf.text_format.MessageToString(entity)
   1209 
   1210   def _GetNextVersionNumber(self):
   1211     """Set the version to one more than the greatest version number seen."""
   1212     entries = sorted(self._entries.values(), key=operator.attrgetter('version'))
   1213     if len(entries) < 1:
   1214       raise ClientNotConnectedError
   1215     return entries[-1].version + 1
   1216 
   1217   def _CreateSyncedNotificationEntitySpecifics(self, unique_id,
   1218                                                serialized_notification):
   1219     """Create the EntitySpecifics proto for a synced notification."""
   1220     coalesced = synced_notification_data_pb2.CoalescedSyncedNotification()
   1221     google.protobuf.text_format.Merge(serialized_notification, coalesced)
   1222 
   1223     # Override the provided key so that we have a unique one.
   1224     coalesced.key = unique_id
   1225 
   1226     specifics = sync_pb2.EntitySpecifics()
   1227     notification_specifics = \
   1228         synced_notification_specifics_pb2.SyncedNotificationSpecifics()
   1229     notification_specifics.coalesced_notification.CopyFrom(coalesced)
   1230     specifics.synced_notification.CopyFrom(notification_specifics)
   1231 
   1232     return specifics
   1233 
   1234   def _CreateSyncedNotificationClientTag(self, key):
   1235     """Create the client_defined_unique_tag value for a SyncedNotification.
   1236 
   1237     Args:
   1238       key: The entity used to create the client tag.
   1239 
   1240     Returns:
   1241       The string value of the to be used as the client_defined_unique_tag.
   1242     """
   1243     serialized_type = sync_pb2.EntitySpecifics()
   1244     specifics = synced_notification_specifics_pb2.SyncedNotificationSpecifics()
   1245     serialized_type.synced_notification.CopyFrom(specifics)
   1246     hash_input = serialized_type.SerializeToString() + key
   1247     return base64.b64encode(hashlib.sha1(hash_input).digest())
   1248 
   1249   def AddSyncedNotificationAppInfo(self, app_info):
   1250     """Adds an app info struct to the server data.
   1251 
   1252     The notification will be delivered to the client on the next GetUpdates
   1253     call.
   1254 
   1255     Args:
   1256       app_info: A serialized AppInfo.
   1257 
   1258     Returns:
   1259       The string representation of the added SyncEntity.
   1260 
   1261     Raises:
   1262       ClientNotConnectedError: if the client has not yet connected to this
   1263       server
   1264     """
   1265     specifics = self._CreateSyncedNotificationAppInfoEntitySpecifics(app_info)
   1266 
   1267     # Create the root SyncEntity representing a single app info protobuf.
   1268     entity = sync_pb2.SyncEntity()
   1269     entity.specifics.CopyFrom(specifics)
   1270     entity.parent_id_string = self._ServerTagToId(
   1271         'google_chrome_synced_notification_app_info')
   1272     entity.name = 'App info added for testing'
   1273     entity.version = self._GetNextVersionNumber()
   1274 
   1275     # App Infos do not have a strong id, it only needs to be unique.
   1276     entity.client_defined_unique_tag = "foo"
   1277     entity.id_string = "foo"
   1278 
   1279     self._entries[entity.id_string] = copy.deepcopy(entity)
   1280 
   1281     print "entity before exit is ", entity
   1282 
   1283     return google.protobuf.text_format.MessageToString(entity)
   1284 
   1285   def _CreateSyncedNotificationAppInfoEntitySpecifics(
   1286     self, synced_notification_app_info):
   1287     """Create the EntitySpecifics proto for a synced notification app info."""
   1288     # Create a single, empty app_info object
   1289     app_info = \
   1290       synced_notification_app_info_specifics_pb2.SyncedNotificationAppInfo()
   1291     # Fill the app_info object from the text format protobuf.
   1292     google.protobuf.text_format.Merge(synced_notification_app_info, app_info)
   1293 
   1294     # Create a new specifics object with a contained app_info
   1295     specifics = sync_pb2.EntitySpecifics()
   1296     app_info_specifics = \
   1297         synced_notification_app_info_specifics_pb2.\
   1298         SyncedNotificationAppInfoSpecifics()
   1299 
   1300     # Copy the app info from the text format protobuf
   1301     contained_app_info = app_info_specifics.synced_notification_app_info.add()
   1302     contained_app_info.CopyFrom(app_info)
   1303 
   1304     # And put the new app_info_specifics into the specifics before returning.
   1305     specifics.synced_notification_app_info.CopyFrom(app_info_specifics)
   1306 
   1307     return specifics
   1308 
   1309 class TestServer(object):
   1310   """An object to handle requests for one (and only one) Chrome Sync account.
   1311 
   1312   TestServer consumes the sync command messages that are the outermost
   1313   layers of the protocol, performs the corresponding actions on its
   1314   SyncDataModel, and constructs an appropriate response message.
   1315   """
   1316 
   1317   def __init__(self):
   1318     # The implementation supports exactly one account; its state is here.
   1319     self.account = SyncDataModel()
   1320     self.account_lock = threading.Lock()
   1321     # Clients that have talked to us: a map from the full client ID
   1322     # to its nickname.
   1323     self.clients = {}
   1324     self.client_name_generator = ('+' * times + chr(c)
   1325         for times in xrange(0, sys.maxint) for c in xrange(ord('A'), ord('Z')))
   1326     self.transient_error = False
   1327     self.sync_count = 0
   1328     # Gaia OAuth2 Token fields and their default values.
   1329     self.response_code = 200
   1330     self.request_token = 'rt1'
   1331     self.access_token = 'at1'
   1332     self.expires_in = 3600
   1333     self.token_type = 'Bearer'
   1334     # The ClientCommand to send back on each ServerToClientResponse. If set to
   1335     # None, no ClientCommand should be sent.
   1336     self._client_command = None
   1337 
   1338 
   1339   def GetShortClientName(self, query):
   1340     parsed = cgi.parse_qs(query[query.find('?')+1:])
   1341     client_id = parsed.get('client_id')
   1342     if not client_id:
   1343       return '?'
   1344     client_id = client_id[0]
   1345     if client_id not in self.clients:
   1346       self.clients[client_id] = self.client_name_generator.next()
   1347     return self.clients[client_id]
   1348 
   1349   def CheckStoreBirthday(self, request):
   1350     """Raises StoreBirthdayError if the request's birthday is a mismatch."""
   1351     if not request.HasField('store_birthday'):
   1352       return
   1353     if self.account.StoreBirthday() != request.store_birthday:
   1354       raise StoreBirthdayError
   1355 
   1356   def CheckTransientError(self):
   1357     """Raises TransientError if transient_error variable is set."""
   1358     if self.transient_error:
   1359       raise TransientError
   1360 
   1361   def CheckSendError(self):
   1362      """Raises SyncInducedError if needed."""
   1363      if (self.account.induced_error.error_type !=
   1364          sync_enums_pb2.SyncEnums.UNKNOWN):
   1365        # Always means return the given error for all requests.
   1366        if self.account.induced_error_frequency == ERROR_FREQUENCY_ALWAYS:
   1367          raise SyncInducedError
   1368        # This means the FIRST 2 requests of every 3 requests
   1369        # return an error. Don't switch the order of failures. There are
   1370        # test cases that rely on the first 2 being the failure rather than
   1371        # the last 2.
   1372        elif (self.account.induced_error_frequency ==
   1373              ERROR_FREQUENCY_TWO_THIRDS):
   1374          if (((self.sync_count -
   1375                self.account.sync_count_before_errors) % 3) != 0):
   1376            raise SyncInducedError
   1377        else:
   1378          raise InducedErrorFrequencyNotDefined
   1379 
   1380   def HandleMigrate(self, path):
   1381     query = urlparse.urlparse(path)[4]
   1382     code = 200
   1383     self.account_lock.acquire()
   1384     try:
   1385       datatypes = [DataTypeStringToSyncTypeLoose(x)
   1386                    for x in urlparse.parse_qs(query).get('type',[])]
   1387       if datatypes:
   1388         self.account.TriggerMigration(datatypes)
   1389         response = 'Migrated datatypes %s' % (
   1390             ' and '.join(SyncTypeToString(x).upper() for x in datatypes))
   1391       else:
   1392         response = 'Please specify one or more <i>type=name</i> parameters'
   1393         code = 400
   1394     except DataTypeIdNotRecognized, error:
   1395       response = 'Could not interpret datatype name'
   1396       code = 400
   1397     finally:
   1398       self.account_lock.release()
   1399     return (code, '<html><title>Migration: %d</title><H1>%d %s</H1></html>' %
   1400                 (code, code, response))
   1401 
   1402   def HandleSetInducedError(self, path):
   1403      query = urlparse.urlparse(path)[4]
   1404      self.account_lock.acquire()
   1405      code = 200
   1406      response = 'Success'
   1407      error = sync_pb2.ClientToServerResponse.Error()
   1408      try:
   1409        error_type = urlparse.parse_qs(query)['error']
   1410        action = urlparse.parse_qs(query)['action']
   1411        error.error_type = int(error_type[0])
   1412        error.action = int(action[0])
   1413        try:
   1414          error.url = (urlparse.parse_qs(query)['url'])[0]
   1415        except KeyError:
   1416          error.url = ''
   1417        try:
   1418          error.error_description =(
   1419          (urlparse.parse_qs(query)['error_description'])[0])
   1420        except KeyError:
   1421          error.error_description = ''
   1422        try:
   1423          error_frequency = int((urlparse.parse_qs(query)['frequency'])[0])
   1424        except KeyError:
   1425          error_frequency = ERROR_FREQUENCY_ALWAYS
   1426        self.account.SetInducedError(error, error_frequency, self.sync_count)
   1427        response = ('Error = %d, action = %d, url = %s, description = %s' %
   1428                    (error.error_type, error.action,
   1429                     error.url,
   1430                     error.error_description))
   1431      except error:
   1432        response = 'Could not parse url'
   1433        code = 400
   1434      finally:
   1435        self.account_lock.release()
   1436      return (code, '<html><title>SetError: %d</title><H1>%d %s</H1></html>' %
   1437                 (code, code, response))
   1438 
   1439   def HandleCreateBirthdayError(self):
   1440     self.account.ResetStoreBirthday()
   1441     return (
   1442         200,
   1443         '<html><title>Birthday error</title><H1>Birthday error</H1></html>')
   1444 
   1445   def HandleSetTransientError(self):
   1446     self.transient_error = True
   1447     return (
   1448         200,
   1449         '<html><title>Transient error</title><H1>Transient error</H1></html>')
   1450 
   1451   def HandleSetSyncTabFavicons(self):
   1452     """Set 'sync_tab_favicons' field of the nigori node for this account."""
   1453     self.account.TriggerSyncTabFavicons()
   1454     return (
   1455         200,
   1456         '<html><title>Tab Favicons</title><H1>Tab Favicons</H1></html>')
   1457 
   1458   def HandleCreateSyncedBookmarks(self):
   1459     """Create the Synced Bookmarks folder under Bookmarks."""
   1460     self.account.TriggerCreateSyncedBookmarks()
   1461     return (
   1462         200,
   1463         '<html><title>Synced Bookmarks</title><H1>Synced Bookmarks</H1></html>')
   1464 
   1465   def HandleEnableKeystoreEncryption(self):
   1466     """Enables the keystore encryption experiment."""
   1467     self.account.TriggerEnableKeystoreEncryption()
   1468     return (
   1469         200,
   1470         '<html><title>Enable Keystore Encryption</title>'
   1471             '<H1>Enable Keystore Encryption</H1></html>')
   1472 
   1473   def HandleRotateKeystoreKeys(self):
   1474     """Rotate the keystore encryption keys."""
   1475     self.account.TriggerRotateKeystoreKeys()
   1476     return (
   1477         200,
   1478         '<html><title>Rotate Keystore Keys</title>'
   1479             '<H1>Rotate Keystore Keys</H1></html>')
   1480 
   1481   def HandleEnableManagedUserAcknowledgement(self):
   1482     """Enable acknowledging newly created managed users."""
   1483     self.account.acknowledge_managed_users = True
   1484     return (
   1485         200,
   1486         '<html><title>Enable Managed User Acknowledgement</title>'
   1487             '<h1>Enable Managed User Acknowledgement</h1></html>')
   1488 
   1489   def HandleEnablePreCommitGetUpdateAvoidance(self):
   1490     """Enables the pre-commit GU avoidance experiment."""
   1491     self.account.TriggerEnablePreCommitGetUpdateAvoidance()
   1492     return (
   1493         200,
   1494         '<html><title>Enable pre-commit GU avoidance</title>'
   1495             '<H1>Enable pre-commit GU avoidance</H1></html>')
   1496 
   1497   def HandleCommand(self, query, raw_request):
   1498     """Decode and handle a sync command from a raw input of bytes.
   1499 
   1500     This is the main entry point for this class.  It is safe to call this
   1501     method from multiple threads.
   1502 
   1503     Args:
   1504       raw_request: An iterable byte sequence to be interpreted as a sync
   1505         protocol command.
   1506     Returns:
   1507       A tuple (response_code, raw_response); the first value is an HTTP
   1508       result code, while the second value is a string of bytes which is the
   1509       serialized reply to the command.
   1510     """
   1511     self.account_lock.acquire()
   1512     self.sync_count += 1
   1513     def print_context(direction):
   1514       print '[Client %s %s %s.py]' % (self.GetShortClientName(query), direction,
   1515                                       __name__),
   1516 
   1517     try:
   1518       request = sync_pb2.ClientToServerMessage()
   1519       request.MergeFromString(raw_request)
   1520       contents = request.message_contents
   1521 
   1522       response = sync_pb2.ClientToServerResponse()
   1523       response.error_code = sync_enums_pb2.SyncEnums.SUCCESS
   1524 
   1525       if self._client_command:
   1526         response.client_command.CopyFrom(self._client_command)
   1527 
   1528       self.CheckStoreBirthday(request)
   1529       response.store_birthday = self.account.store_birthday
   1530       self.CheckTransientError()
   1531       self.CheckSendError()
   1532 
   1533       print_context('->')
   1534 
   1535       if contents == sync_pb2.ClientToServerMessage.AUTHENTICATE:
   1536         print 'Authenticate'
   1537         # We accept any authentication token, and support only one account.
   1538         # TODO(nick): Mock out the GAIA authentication as well; hook up here.
   1539         response.authenticate.user.email = 'syncjuser@chromium'
   1540         response.authenticate.user.display_name = 'Sync J User'
   1541       elif contents == sync_pb2.ClientToServerMessage.COMMIT:
   1542         print 'Commit %d item(s)' % len(request.commit.entries)
   1543         self.HandleCommit(request.commit, response.commit)
   1544       elif contents == sync_pb2.ClientToServerMessage.GET_UPDATES:
   1545         print 'GetUpdates',
   1546         self.HandleGetUpdates(request.get_updates, response.get_updates)
   1547         print_context('<-')
   1548         print '%d update(s)' % len(response.get_updates.entries)
   1549       else:
   1550         print 'Unrecognizable sync request!'
   1551         return (400, None)  # Bad request.
   1552       return (200, response.SerializeToString())
   1553     except MigrationDoneError, error:
   1554       print_context('<-')
   1555       print 'MIGRATION_DONE: <%s>' % (ShortDatatypeListSummary(error.datatypes))
   1556       response = sync_pb2.ClientToServerResponse()
   1557       response.store_birthday = self.account.store_birthday
   1558       response.error_code = sync_enums_pb2.SyncEnums.MIGRATION_DONE
   1559       response.migrated_data_type_id[:] = [
   1560           SyncTypeToProtocolDataTypeId(x) for x in error.datatypes]
   1561       return (200, response.SerializeToString())
   1562     except StoreBirthdayError, error:
   1563       print_context('<-')
   1564       print 'NOT_MY_BIRTHDAY'
   1565       response = sync_pb2.ClientToServerResponse()
   1566       response.store_birthday = self.account.store_birthday
   1567       response.error_code = sync_enums_pb2.SyncEnums.NOT_MY_BIRTHDAY
   1568       return (200, response.SerializeToString())
   1569     except TransientError, error:
   1570       ### This is deprecated now. Would be removed once test cases are removed.
   1571       print_context('<-')
   1572       print 'TRANSIENT_ERROR'
   1573       response.store_birthday = self.account.store_birthday
   1574       response.error_code = sync_enums_pb2.SyncEnums.TRANSIENT_ERROR
   1575       return (200, response.SerializeToString())
   1576     except SyncInducedError, error:
   1577       print_context('<-')
   1578       print 'INDUCED_ERROR'
   1579       response.store_birthday = self.account.store_birthday
   1580       error = self.account.GetInducedError()
   1581       response.error.error_type = error.error_type
   1582       response.error.url = error.url
   1583       response.error.error_description = error.error_description
   1584       response.error.action = error.action
   1585       return (200, response.SerializeToString())
   1586     finally:
   1587       self.account_lock.release()
   1588 
   1589   def HandleCommit(self, commit_message, commit_response):
   1590     """Respond to a Commit request by updating the user's account state.
   1591 
   1592     Commit attempts stop after the first error, returning a CONFLICT result
   1593     for any unattempted entries.
   1594 
   1595     Args:
   1596       commit_message: A sync_pb.CommitMessage protobuf holding the content
   1597         of the client's request.
   1598       commit_response: A sync_pb.CommitResponse protobuf into which a reply
   1599         to the client request will be written.
   1600     """
   1601     commit_response.SetInParent()
   1602     batch_failure = False
   1603     session = {}  # Tracks ID renaming during the commit operation.
   1604     guid = commit_message.cache_guid
   1605 
   1606     self.account.ValidateCommitEntries(commit_message.entries)
   1607 
   1608     for entry in commit_message.entries:
   1609       server_entry = None
   1610       if not batch_failure:
   1611         # Try to commit the change to the account.
   1612         server_entry = self.account.CommitEntry(entry, guid, session)
   1613 
   1614       # An entryresponse is returned in both success and failure cases.
   1615       reply = commit_response.entryresponse.add()
   1616       if not server_entry:
   1617         reply.response_type = sync_pb2.CommitResponse.CONFLICT
   1618         reply.error_message = 'Conflict.'
   1619         batch_failure = True  # One failure halts the batch.
   1620       else:
   1621         reply.response_type = sync_pb2.CommitResponse.SUCCESS
   1622         # These are the properties that the server is allowed to override
   1623         # during commit; the client wants to know their values at the end
   1624         # of the operation.
   1625         reply.id_string = server_entry.id_string
   1626         if not server_entry.deleted:
   1627           # Note: the production server doesn't actually send the
   1628           # parent_id_string on commit responses, so we don't either.
   1629           reply.position_in_parent = server_entry.position_in_parent
   1630           reply.version = server_entry.version
   1631           reply.name = server_entry.name
   1632           reply.non_unique_name = server_entry.non_unique_name
   1633         else:
   1634           reply.version = entry.version + 1
   1635 
   1636   def HandleGetUpdates(self, update_request, update_response):
   1637     """Respond to a GetUpdates request by querying the user's account.
   1638 
   1639     Args:
   1640       update_request: A sync_pb.GetUpdatesMessage protobuf holding the content
   1641         of the client's request.
   1642       update_response: A sync_pb.GetUpdatesResponse protobuf into which a reply
   1643         to the client request will be written.
   1644     """
   1645     update_response.SetInParent()
   1646     update_sieve = UpdateSieve(update_request, self.account.migration_history)
   1647 
   1648     print CallerInfoToString(update_request.caller_info.source),
   1649     print update_sieve.SummarizeRequest()
   1650 
   1651     update_sieve.CheckMigrationState()
   1652 
   1653     new_timestamp, entries, remaining = self.account.GetChanges(update_sieve)
   1654 
   1655     update_response.changes_remaining = remaining
   1656     sending_nigori_node = False
   1657     for entry in entries:
   1658       if entry.name == 'Nigori':
   1659         sending_nigori_node = True
   1660       reply = update_response.entries.add()
   1661       reply.CopyFrom(entry)
   1662     update_sieve.SaveProgress(new_timestamp, update_response)
   1663 
   1664     if update_request.need_encryption_key or sending_nigori_node:
   1665       update_response.encryption_keys.extend(self.account.GetKeystoreKeys())
   1666 
   1667   def HandleGetOauth2Token(self):
   1668     return (int(self.response_code),
   1669             '{\n'
   1670             '  \"refresh_token\": \"' + self.request_token + '\",\n'
   1671             '  \"access_token\": \"' + self.access_token + '\",\n'
   1672             '  \"expires_in\": ' + str(self.expires_in) + ',\n'
   1673             '  \"token_type\": \"' + self.token_type +'\"\n'
   1674             '}')
   1675 
   1676   def HandleSetOauth2Token(self, response_code, request_token, access_token,
   1677                            expires_in, token_type):
   1678     if response_code != 0:
   1679       self.response_code = response_code
   1680     if request_token != '':
   1681       self.request_token = request_token
   1682     if access_token != '':
   1683       self.access_token = access_token
   1684     if expires_in != 0:
   1685       self.expires_in = expires_in
   1686     if token_type != '':
   1687       self.token_type = token_type
   1688 
   1689     return (200,
   1690             '<html><title>Set OAuth2 Token</title>'
   1691             '<H1>This server will now return the OAuth2 Token:</H1>'
   1692             '<p>response_code: ' + str(self.response_code) + '</p>'
   1693             '<p>request_token: ' + self.request_token + '</p>'
   1694             '<p>access_token: ' + self.access_token + '</p>'
   1695             '<p>expires_in: ' + str(self.expires_in) + '</p>'
   1696             '<p>token_type: ' + self.token_type + '</p>'
   1697             '</html>')
   1698 
   1699   def CustomizeClientCommand(self, sessions_commit_delay_seconds):
   1700     """Customizes the value of the ClientCommand of ServerToClientResponse.
   1701 
   1702     Currently, this only allows for changing the sessions_commit_delay_seconds
   1703     field. This is useful for testing in conjunction with
   1704     AddSyncedNotification so that synced notifications are seen immediately
   1705     after triggering them with an HTTP call to the test server.
   1706 
   1707     Args:
   1708       sessions_commit_delay_seconds: The desired sync delay time for sessions.
   1709     """
   1710     if not self._client_command:
   1711       self._client_command = client_commands_pb2.ClientCommand()
   1712 
   1713     self._client_command.sessions_commit_delay_seconds = \
   1714         sessions_commit_delay_seconds
   1715     return self._client_command
   1716