1 # Copyright (c) 2013 The Chromium Authors. All rights reserved. 2 # Use of this source code is governed by a BSD-style license that can be 3 # found in the LICENSE file. 4 5 import os 6 import re 7 import sys 8 9 10 def LoadSupport(input_api): 11 if 'cloud_storage' not in globals(): 12 # Avoid leaking changes to global sys.path. 13 _old_sys_path = sys.path 14 try: 15 telemetry_path = os.path.join(os.path.dirname(os.path.dirname( 16 input_api.PresubmitLocalPath())), 'telemetry') 17 sys.path = [telemetry_path] + sys.path 18 from telemetry.page import cloud_storage 19 globals()['cloud_storage'] = cloud_storage 20 finally: 21 sys.path = _old_sys_path 22 23 return globals()['cloud_storage'] 24 25 26 def _SyncFilesToCloud(input_api, output_api): 27 """Searches for .sha1 files and uploads them to Cloud Storage. 28 29 It validates all the hashes and skips upload if not necessary. 30 """ 31 32 cloud_storage = LoadSupport(input_api) 33 34 # Look in both buckets, in case the user uploaded the file manually. But this 35 # script focuses on WPR archives, so it only uploads to the internal bucket. 36 hashes_in_cloud_storage = cloud_storage.List(cloud_storage.INTERNAL_BUCKET) 37 hashes_in_cloud_storage += cloud_storage.List(cloud_storage.PUBLIC_BUCKET) 38 39 results = [] 40 for affected_file in input_api.AffectedFiles(include_deletes=False): 41 hash_path = affected_file.AbsoluteLocalPath() 42 file_path, extension = os.path.splitext(hash_path) 43 if extension != '.sha1': 44 continue 45 46 with open(hash_path, 'rb') as f: 47 file_hash = f.read(1024).rstrip() 48 if file_hash in hashes_in_cloud_storage: 49 results.append(output_api.PresubmitNotifyResult( 50 'File already in Cloud Storage, skipping upload: %s' % hash_path)) 51 continue 52 53 if not re.match('^([A-Za-z0-9]{40})$', file_hash): 54 results.append(output_api.PresubmitError( 55 'Hash file does not contain a valid SHA-1 hash: %s' % hash_path)) 56 continue 57 if not os.path.exists(file_path): 58 results.append(output_api.PresubmitError( 59 'Hash file exists, but file not found: %s' % hash_path)) 60 continue 61 if cloud_storage.GetHash(file_path) != file_hash: 62 results.append(output_api.PresubmitError( 63 'Hash file does not match file\'s actual hash: %s' % hash_path)) 64 continue 65 66 try: 67 cloud_storage.Insert(cloud_storage.INTERNAL_BUCKET, file_hash, file_path) 68 results.append(output_api.PresubmitNotifyResult( 69 'Uploaded file to Cloud Storage: %s' % hash_path)) 70 except cloud_storage.CloudStorageError, e: 71 results.append(output_api.PresubmitError( 72 'Unable to upload to Cloud Storage: %s\n\n%s' % (hash_path, e))) 73 74 return results 75 76 77 def CheckChangeOnUpload(input_api, output_api): 78 return _SyncFilesToCloud(input_api, output_api) 79 80 81 def CheckChangeOnCommit(input_api, output_api): 82 return _SyncFilesToCloud(input_api, output_api) 83