1 <html><body> 2 <style> 3 4 body, h1, h2, h3, div, span, p, pre, a { 5 margin: 0; 6 padding: 0; 7 border: 0; 8 font-weight: inherit; 9 font-style: inherit; 10 font-size: 100%; 11 font-family: inherit; 12 vertical-align: baseline; 13 } 14 15 body { 16 font-size: 13px; 17 padding: 1em; 18 } 19 20 h1 { 21 font-size: 26px; 22 margin-bottom: 1em; 23 } 24 25 h2 { 26 font-size: 24px; 27 margin-bottom: 1em; 28 } 29 30 h3 { 31 font-size: 20px; 32 margin-bottom: 1em; 33 margin-top: 1em; 34 } 35 36 pre, code { 37 line-height: 1.5; 38 font-family: Monaco, 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', 'Lucida Console', monospace; 39 } 40 41 pre { 42 margin-top: 0.5em; 43 } 44 45 h1, h2, h3, p { 46 font-family: Arial, sans serif; 47 } 48 49 h1, h2, h3 { 50 border-bottom: solid #CCC 1px; 51 } 52 53 .toc_element { 54 margin-top: 0.5em; 55 } 56 57 .firstline { 58 margin-left: 2 em; 59 } 60 61 .method { 62 margin-top: 1em; 63 border: solid 1px #CCC; 64 padding: 1em; 65 background: #EEE; 66 } 67 68 .details { 69 font-weight: bold; 70 font-size: 14px; 71 } 72 73 </style> 74 75 <h1><a href="storagetransfer_v1.html">Google Storage Transfer API</a> . <a href="storagetransfer_v1.transferJobs.html">transferJobs</a></h1> 76 <h2>Instance Methods</h2> 77 <p class="toc_element"> 78 <code><a href="#create">create(body, x__xgafv=None)</a></code></p> 79 <p class="firstline">Creates a transfer job that runs periodically.</p> 80 <p class="toc_element"> 81 <code><a href="#get">get(jobName, projectId=None, x__xgafv=None)</a></code></p> 82 <p class="firstline">Gets a transfer job.</p> 83 <p class="toc_element"> 84 <code><a href="#list">list(pageSize=None, filter=None, pageToken=None, x__xgafv=None)</a></code></p> 85 <p class="firstline">Lists transfer jobs.</p> 86 <p class="toc_element"> 87 <code><a href="#list_next">list_next(previous_request, previous_response)</a></code></p> 88 <p class="firstline">Retrieves the next page of results.</p> 89 <p class="toc_element"> 90 <code><a href="#patch">patch(jobName, body, x__xgafv=None)</a></code></p> 91 <p class="firstline">Updates a transfer job. Updating a job's transfer spec does not affect</p> 92 <h3>Method Details</h3> 93 <div class="method"> 94 <code class="details" id="create">create(body, x__xgafv=None)</code> 95 <pre>Creates a transfer job that runs periodically. 96 97 Args: 98 body: object, The request body. (required) 99 The object takes the form of: 100 101 { # This resource represents the configuration of a transfer job that runs 102 # periodically. 103 "transferSpec": { # Configuration for running a transfer. # Transfer specification. 104 # Required. 105 "objectConditions": { # Conditions that determine which objects will be transferred. # Only objects that satisfy these object conditions are included in the set 106 # of data source and data sink objects. Object conditions based on 107 # objects' `lastModificationTime` do not exclude objects in a data sink. 108 "maxTimeElapsedSinceLastModification": "A String", # `maxTimeElapsedSinceLastModification` is the complement to 109 # `minTimeElapsedSinceLastModification`. 110 "includePrefixes": [ # If `includePrefixes` is specified, objects that satisfy the object 111 # conditions must have names that start with one of the `includePrefixes` 112 # and that do not start with any of the `excludePrefixes`. If `includePrefixes` 113 # is not specified, all objects except those that have names starting with 114 # one of the `excludePrefixes` must satisfy the object conditions. 115 # 116 # Requirements: 117 # 118 # * Each include-prefix and exclude-prefix can contain any sequence of 119 # Unicode characters, of max length 1024 bytes when UTF8-encoded, and 120 # must not contain Carriage Return or Line Feed characters. Wildcard 121 # matching and regular expression matching are not supported. 122 # 123 # * Each include-prefix and exclude-prefix must omit the leading slash. 124 # For example, to include the `requests.gz` object in a transfer from 125 # `s3://my-aws-bucket/logs/y=2015/requests.gz`, specify the include 126 # prefix as `logs/y=2015/requests.gz`. 127 # 128 # * None of the include-prefix or the exclude-prefix values can be empty, 129 # if specified. 130 # 131 # * Each include-prefix must include a distinct portion of the object 132 # namespace, i.e., no include-prefix may be a prefix of another 133 # include-prefix. 134 # 135 # * Each exclude-prefix must exclude a distinct portion of the object 136 # namespace, i.e., no exclude-prefix may be a prefix of another 137 # exclude-prefix. 138 # 139 # * If `includePrefixes` is specified, then each exclude-prefix must start 140 # with the value of a path explicitly included by `includePrefixes`. 141 # 142 # The max size of `includePrefixes` is 1000. 143 "A String", 144 ], 145 "excludePrefixes": [ # `excludePrefixes` must follow the requirements described for 146 # `includePrefixes`. 147 # 148 # The max size of `excludePrefixes` is 1000. 149 "A String", 150 ], 151 "minTimeElapsedSinceLastModification": "A String", # If unspecified, `minTimeElapsedSinceLastModification` takes a zero value 152 # and `maxTimeElapsedSinceLastModification` takes the maximum possible 153 # value of Duration. Objects that satisfy the object conditions 154 # must either have a `lastModificationTime` greater or equal to 155 # `NOW` - `maxTimeElapsedSinceLastModification` and less than 156 # `NOW` - `minTimeElapsedSinceLastModification`, or not have a 157 # `lastModificationTime`. 158 }, 159 "gcsDataSource": { # In a GcsData, an object's name is the Google Cloud Storage object's name and # A Google Cloud Storage data source. 160 # its `lastModificationTime` refers to the object's updated time, which changes 161 # when the content or the metadata of the object is updated. 162 "bucketName": "A String", # Google Cloud Storage bucket name (see 163 # [Bucket Name Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). 164 # Required. 165 }, 166 "httpDataSource": { # An HttpData specifies a list of objects on the web to be transferred over # An HTTP URL data source. 167 # HTTP. The information of the objects to be transferred is contained in a 168 # file referenced by a URL. The first line in the file must be 169 # "TsvHttpData-1.0", which specifies the format of the file. Subsequent lines 170 # specify the information of the list of objects, one object per list entry. 171 # Each entry has the following tab-delimited fields: 172 # 173 # * HTTP URL - The location of the object. 174 # 175 # * Length - The size of the object in bytes. 176 # 177 # * MD5 - The base64-encoded MD5 hash of the object. 178 # 179 # For an example of a valid TSV file, see 180 # [Transferring data from URLs](https://cloud.google.com/storage/transfer/#urls) 181 # 182 # When transferring data based on a URL list, keep the following in mind: 183 # 184 # * When an object located at `http(s)://hostname:port/<URL-path>` is transferred 185 # to a data sink, the name of the object at the data sink is 186 # `<hostname>/<URL-path>`. 187 # 188 # * If the specified size of an object does not match the actual size of the 189 # object fetched, the object will not be transferred. 190 # 191 # * If the specified MD5 does not match the MD5 computed from the transferred 192 # bytes, the object transfer will fail. For more information, see 193 # [Generating MD5 hashes](https://cloud.google.com/storage/transfer/#md5) 194 # 195 # * Ensure that each URL you specify is publicly accessible. For 196 # example, in Google Cloud Storage you can 197 # [share an object publicly] 198 # (https://cloud.google.com/storage/docs/cloud-console#_sharingdata) and get 199 # a link to it. 200 # 201 # * Storage Transfer Service obeys `robots.txt` rules and requires the source 202 # HTTP server to support `Range` requests and to return a `Content-Length` 203 # header in each response. 204 # 205 # * [ObjectConditions](#ObjectConditions) have no effect when filtering objects 206 # to transfer. 207 "listUrl": "A String", # The URL that points to the file that stores the object list entries. 208 # This file must allow public access. Currently, only URLs with HTTP and 209 # HTTPS schemes are supported. 210 # Required. 211 }, 212 "transferOptions": { # TransferOptions uses three boolean parameters to define the actions # If the option `deleteObjectsUniqueInSink` is `true`, object conditions 213 # based on objects' `lastModificationTime` are ignored and do not exclude 214 # objects in a data source or a data sink. 215 # to be performed on objects in a transfer. 216 "overwriteObjectsAlreadyExistingInSink": True or False, # Whether overwriting objects that already exist in the sink is allowed. 217 "deleteObjectsFromSourceAfterTransfer": True or False, # Whether objects should be deleted from the source after they are 218 # transferred to the sink. 219 "deleteObjectsUniqueInSink": True or False, # Whether objects that exist only in the sink should be deleted. 220 }, 221 "gcsDataSink": { # In a GcsData, an object's name is the Google Cloud Storage object's name and # A Google Cloud Storage data sink. 222 # its `lastModificationTime` refers to the object's updated time, which changes 223 # when the content or the metadata of the object is updated. 224 "bucketName": "A String", # Google Cloud Storage bucket name (see 225 # [Bucket Name Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). 226 # Required. 227 }, 228 "awsS3DataSource": { # An AwsS3Data can be a data source, but not a data sink. # An AWS S3 data source. 229 # In an AwsS3Data, an object's name is the S3 object's key name. 230 "awsAccessKey": { # AWS access key (see # AWS access key used to sign the API requests to the AWS S3 bucket. 231 # Permissions on the bucket must be granted to the access ID of the 232 # AWS access key. 233 # Required. 234 # [AWS Security Credentials](http://docs.aws.amazon.com/general/latest/gr/aws-security-credentials.html)). 235 "secretAccessKey": "A String", # AWS secret access key. This field is not returned in RPC responses. 236 # Required. 237 "accessKeyId": "A String", # AWS access key ID. 238 # Required. 239 }, 240 "bucketName": "A String", # S3 Bucket name (see 241 # [Creating a bucket](http://docs.aws.amazon.com/AmazonS3/latest/dev/create-bucket-get-location-example.html)). 242 # Required. 243 }, 244 }, 245 "status": "A String", # Status of the job. This value MUST be specified for 246 # `CreateTransferJobRequests`. 247 # 248 # NOTE: The effect of the new job status takes place during a subsequent job 249 # run. For example, if you change the job status from `ENABLED` to 250 # `DISABLED`, and an operation spawned by the transfer is running, the status 251 # change would not affect the current operation. 252 "deletionTime": "A String", # This field cannot be changed by user requests. 253 "description": "A String", # A description provided by the user for the job. Its max length is 1024 254 # bytes when Unicode-encoded. 255 "schedule": { # Transfers can be scheduled to recur or to run just once. # Schedule specification. 256 # Required. 257 "scheduleStartDate": { # Represents a whole calendar date, e.g. date of birth. The time of day and # The first day the recurring transfer is scheduled to run. If 258 # `scheduleStartDate` is in the past, the transfer will run for the first 259 # time on the following day. 260 # Required. 261 # time zone are either specified elsewhere or are not significant. The date 262 # is relative to the Proleptic Gregorian Calendar. The day may be 0 to 263 # represent a year and month where the day is not significant, e.g. credit card 264 # expiration date. The year may be 0 to represent a month and day independent 265 # of year, e.g. anniversary date. Related types are google.type.TimeOfDay 266 # and `google.protobuf.Timestamp`. 267 "year": 42, # Year of date. Must be from 1 to 9999, or 0 if specifying a date without 268 # a year. 269 "day": 42, # Day of month. Must be from 1 to 31 and valid for the year and month, or 0 270 # if specifying a year/month where the day is not significant. 271 "month": 42, # Month of year. Must be from 1 to 12. 272 }, 273 "startTimeOfDay": { # Represents a time of day. The date and time zone are either not significant # The time in UTC at which the transfer will be scheduled to start in a day. 274 # Transfers may start later than this time. If not specified, recurring and 275 # one-time transfers that are scheduled to run today will run immediately; 276 # recurring transfers that are scheduled to run on a future date will start 277 # at approximately midnight UTC on that date. Note that when configuring a 278 # transfer with the Cloud Platform Console, the transfer's start time in a 279 # day is specified in your local timezone. 280 # or are specified elsewhere. An API may choose to allow leap seconds. Related 281 # types are google.type.Date and `google.protobuf.Timestamp`. 282 "hours": 42, # Hours of day in 24 hour format. Should be from 0 to 23. An API may choose 283 # to allow the value "24:00:00" for scenarios like business closing time. 284 "nanos": 42, # Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999. 285 "seconds": 42, # Seconds of minutes of the time. Must normally be from 0 to 59. An API may 286 # allow the value 60 if it allows leap-seconds. 287 "minutes": 42, # Minutes of hour of day. Must be from 0 to 59. 288 }, 289 "scheduleEndDate": { # Represents a whole calendar date, e.g. date of birth. The time of day and # The last day the recurring transfer will be run. If `scheduleEndDate` 290 # is the same as `scheduleStartDate`, the transfer will be executed only 291 # once. 292 # time zone are either specified elsewhere or are not significant. The date 293 # is relative to the Proleptic Gregorian Calendar. The day may be 0 to 294 # represent a year and month where the day is not significant, e.g. credit card 295 # expiration date. The year may be 0 to represent a month and day independent 296 # of year, e.g. anniversary date. Related types are google.type.TimeOfDay 297 # and `google.protobuf.Timestamp`. 298 "year": 42, # Year of date. Must be from 1 to 9999, or 0 if specifying a date without 299 # a year. 300 "day": 42, # Day of month. Must be from 1 to 31 and valid for the year and month, or 0 301 # if specifying a year/month where the day is not significant. 302 "month": 42, # Month of year. Must be from 1 to 12. 303 }, 304 }, 305 "projectId": "A String", # The ID of the Google Cloud Platform Console project that owns the job. 306 # Required. 307 "lastModificationTime": "A String", # This field cannot be changed by user requests. 308 "creationTime": "A String", # This field cannot be changed by user requests. 309 "name": "A String", # A globally unique name assigned by Storage Transfer Service when the 310 # job is created. This field should be left empty in requests to create a new 311 # transfer job; otherwise, the requests result in an `INVALID_ARGUMENT` 312 # error. 313 } 314 315 x__xgafv: string, V1 error format. 316 Allowed values 317 1 - v1 error format 318 2 - v2 error format 319 320 Returns: 321 An object of the form: 322 323 { # This resource represents the configuration of a transfer job that runs 324 # periodically. 325 "transferSpec": { # Configuration for running a transfer. # Transfer specification. 326 # Required. 327 "objectConditions": { # Conditions that determine which objects will be transferred. # Only objects that satisfy these object conditions are included in the set 328 # of data source and data sink objects. Object conditions based on 329 # objects' `lastModificationTime` do not exclude objects in a data sink. 330 "maxTimeElapsedSinceLastModification": "A String", # `maxTimeElapsedSinceLastModification` is the complement to 331 # `minTimeElapsedSinceLastModification`. 332 "includePrefixes": [ # If `includePrefixes` is specified, objects that satisfy the object 333 # conditions must have names that start with one of the `includePrefixes` 334 # and that do not start with any of the `excludePrefixes`. If `includePrefixes` 335 # is not specified, all objects except those that have names starting with 336 # one of the `excludePrefixes` must satisfy the object conditions. 337 # 338 # Requirements: 339 # 340 # * Each include-prefix and exclude-prefix can contain any sequence of 341 # Unicode characters, of max length 1024 bytes when UTF8-encoded, and 342 # must not contain Carriage Return or Line Feed characters. Wildcard 343 # matching and regular expression matching are not supported. 344 # 345 # * Each include-prefix and exclude-prefix must omit the leading slash. 346 # For example, to include the `requests.gz` object in a transfer from 347 # `s3://my-aws-bucket/logs/y=2015/requests.gz`, specify the include 348 # prefix as `logs/y=2015/requests.gz`. 349 # 350 # * None of the include-prefix or the exclude-prefix values can be empty, 351 # if specified. 352 # 353 # * Each include-prefix must include a distinct portion of the object 354 # namespace, i.e., no include-prefix may be a prefix of another 355 # include-prefix. 356 # 357 # * Each exclude-prefix must exclude a distinct portion of the object 358 # namespace, i.e., no exclude-prefix may be a prefix of another 359 # exclude-prefix. 360 # 361 # * If `includePrefixes` is specified, then each exclude-prefix must start 362 # with the value of a path explicitly included by `includePrefixes`. 363 # 364 # The max size of `includePrefixes` is 1000. 365 "A String", 366 ], 367 "excludePrefixes": [ # `excludePrefixes` must follow the requirements described for 368 # `includePrefixes`. 369 # 370 # The max size of `excludePrefixes` is 1000. 371 "A String", 372 ], 373 "minTimeElapsedSinceLastModification": "A String", # If unspecified, `minTimeElapsedSinceLastModification` takes a zero value 374 # and `maxTimeElapsedSinceLastModification` takes the maximum possible 375 # value of Duration. Objects that satisfy the object conditions 376 # must either have a `lastModificationTime` greater or equal to 377 # `NOW` - `maxTimeElapsedSinceLastModification` and less than 378 # `NOW` - `minTimeElapsedSinceLastModification`, or not have a 379 # `lastModificationTime`. 380 }, 381 "gcsDataSource": { # In a GcsData, an object's name is the Google Cloud Storage object's name and # A Google Cloud Storage data source. 382 # its `lastModificationTime` refers to the object's updated time, which changes 383 # when the content or the metadata of the object is updated. 384 "bucketName": "A String", # Google Cloud Storage bucket name (see 385 # [Bucket Name Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). 386 # Required. 387 }, 388 "httpDataSource": { # An HttpData specifies a list of objects on the web to be transferred over # An HTTP URL data source. 389 # HTTP. The information of the objects to be transferred is contained in a 390 # file referenced by a URL. The first line in the file must be 391 # "TsvHttpData-1.0", which specifies the format of the file. Subsequent lines 392 # specify the information of the list of objects, one object per list entry. 393 # Each entry has the following tab-delimited fields: 394 # 395 # * HTTP URL - The location of the object. 396 # 397 # * Length - The size of the object in bytes. 398 # 399 # * MD5 - The base64-encoded MD5 hash of the object. 400 # 401 # For an example of a valid TSV file, see 402 # [Transferring data from URLs](https://cloud.google.com/storage/transfer/#urls) 403 # 404 # When transferring data based on a URL list, keep the following in mind: 405 # 406 # * When an object located at `http(s)://hostname:port/<URL-path>` is transferred 407 # to a data sink, the name of the object at the data sink is 408 # `<hostname>/<URL-path>`. 409 # 410 # * If the specified size of an object does not match the actual size of the 411 # object fetched, the object will not be transferred. 412 # 413 # * If the specified MD5 does not match the MD5 computed from the transferred 414 # bytes, the object transfer will fail. For more information, see 415 # [Generating MD5 hashes](https://cloud.google.com/storage/transfer/#md5) 416 # 417 # * Ensure that each URL you specify is publicly accessible. For 418 # example, in Google Cloud Storage you can 419 # [share an object publicly] 420 # (https://cloud.google.com/storage/docs/cloud-console#_sharingdata) and get 421 # a link to it. 422 # 423 # * Storage Transfer Service obeys `robots.txt` rules and requires the source 424 # HTTP server to support `Range` requests and to return a `Content-Length` 425 # header in each response. 426 # 427 # * [ObjectConditions](#ObjectConditions) have no effect when filtering objects 428 # to transfer. 429 "listUrl": "A String", # The URL that points to the file that stores the object list entries. 430 # This file must allow public access. Currently, only URLs with HTTP and 431 # HTTPS schemes are supported. 432 # Required. 433 }, 434 "transferOptions": { # TransferOptions uses three boolean parameters to define the actions # If the option `deleteObjectsUniqueInSink` is `true`, object conditions 435 # based on objects' `lastModificationTime` are ignored and do not exclude 436 # objects in a data source or a data sink. 437 # to be performed on objects in a transfer. 438 "overwriteObjectsAlreadyExistingInSink": True or False, # Whether overwriting objects that already exist in the sink is allowed. 439 "deleteObjectsFromSourceAfterTransfer": True or False, # Whether objects should be deleted from the source after they are 440 # transferred to the sink. 441 "deleteObjectsUniqueInSink": True or False, # Whether objects that exist only in the sink should be deleted. 442 }, 443 "gcsDataSink": { # In a GcsData, an object's name is the Google Cloud Storage object's name and # A Google Cloud Storage data sink. 444 # its `lastModificationTime` refers to the object's updated time, which changes 445 # when the content or the metadata of the object is updated. 446 "bucketName": "A String", # Google Cloud Storage bucket name (see 447 # [Bucket Name Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). 448 # Required. 449 }, 450 "awsS3DataSource": { # An AwsS3Data can be a data source, but not a data sink. # An AWS S3 data source. 451 # In an AwsS3Data, an object's name is the S3 object's key name. 452 "awsAccessKey": { # AWS access key (see # AWS access key used to sign the API requests to the AWS S3 bucket. 453 # Permissions on the bucket must be granted to the access ID of the 454 # AWS access key. 455 # Required. 456 # [AWS Security Credentials](http://docs.aws.amazon.com/general/latest/gr/aws-security-credentials.html)). 457 "secretAccessKey": "A String", # AWS secret access key. This field is not returned in RPC responses. 458 # Required. 459 "accessKeyId": "A String", # AWS access key ID. 460 # Required. 461 }, 462 "bucketName": "A String", # S3 Bucket name (see 463 # [Creating a bucket](http://docs.aws.amazon.com/AmazonS3/latest/dev/create-bucket-get-location-example.html)). 464 # Required. 465 }, 466 }, 467 "status": "A String", # Status of the job. This value MUST be specified for 468 # `CreateTransferJobRequests`. 469 # 470 # NOTE: The effect of the new job status takes place during a subsequent job 471 # run. For example, if you change the job status from `ENABLED` to 472 # `DISABLED`, and an operation spawned by the transfer is running, the status 473 # change would not affect the current operation. 474 "deletionTime": "A String", # This field cannot be changed by user requests. 475 "description": "A String", # A description provided by the user for the job. Its max length is 1024 476 # bytes when Unicode-encoded. 477 "schedule": { # Transfers can be scheduled to recur or to run just once. # Schedule specification. 478 # Required. 479 "scheduleStartDate": { # Represents a whole calendar date, e.g. date of birth. The time of day and # The first day the recurring transfer is scheduled to run. If 480 # `scheduleStartDate` is in the past, the transfer will run for the first 481 # time on the following day. 482 # Required. 483 # time zone are either specified elsewhere or are not significant. The date 484 # is relative to the Proleptic Gregorian Calendar. The day may be 0 to 485 # represent a year and month where the day is not significant, e.g. credit card 486 # expiration date. The year may be 0 to represent a month and day independent 487 # of year, e.g. anniversary date. Related types are google.type.TimeOfDay 488 # and `google.protobuf.Timestamp`. 489 "year": 42, # Year of date. Must be from 1 to 9999, or 0 if specifying a date without 490 # a year. 491 "day": 42, # Day of month. Must be from 1 to 31 and valid for the year and month, or 0 492 # if specifying a year/month where the day is not significant. 493 "month": 42, # Month of year. Must be from 1 to 12. 494 }, 495 "startTimeOfDay": { # Represents a time of day. The date and time zone are either not significant # The time in UTC at which the transfer will be scheduled to start in a day. 496 # Transfers may start later than this time. If not specified, recurring and 497 # one-time transfers that are scheduled to run today will run immediately; 498 # recurring transfers that are scheduled to run on a future date will start 499 # at approximately midnight UTC on that date. Note that when configuring a 500 # transfer with the Cloud Platform Console, the transfer's start time in a 501 # day is specified in your local timezone. 502 # or are specified elsewhere. An API may choose to allow leap seconds. Related 503 # types are google.type.Date and `google.protobuf.Timestamp`. 504 "hours": 42, # Hours of day in 24 hour format. Should be from 0 to 23. An API may choose 505 # to allow the value "24:00:00" for scenarios like business closing time. 506 "nanos": 42, # Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999. 507 "seconds": 42, # Seconds of minutes of the time. Must normally be from 0 to 59. An API may 508 # allow the value 60 if it allows leap-seconds. 509 "minutes": 42, # Minutes of hour of day. Must be from 0 to 59. 510 }, 511 "scheduleEndDate": { # Represents a whole calendar date, e.g. date of birth. The time of day and # The last day the recurring transfer will be run. If `scheduleEndDate` 512 # is the same as `scheduleStartDate`, the transfer will be executed only 513 # once. 514 # time zone are either specified elsewhere or are not significant. The date 515 # is relative to the Proleptic Gregorian Calendar. The day may be 0 to 516 # represent a year and month where the day is not significant, e.g. credit card 517 # expiration date. The year may be 0 to represent a month and day independent 518 # of year, e.g. anniversary date. Related types are google.type.TimeOfDay 519 # and `google.protobuf.Timestamp`. 520 "year": 42, # Year of date. Must be from 1 to 9999, or 0 if specifying a date without 521 # a year. 522 "day": 42, # Day of month. Must be from 1 to 31 and valid for the year and month, or 0 523 # if specifying a year/month where the day is not significant. 524 "month": 42, # Month of year. Must be from 1 to 12. 525 }, 526 }, 527 "projectId": "A String", # The ID of the Google Cloud Platform Console project that owns the job. 528 # Required. 529 "lastModificationTime": "A String", # This field cannot be changed by user requests. 530 "creationTime": "A String", # This field cannot be changed by user requests. 531 "name": "A String", # A globally unique name assigned by Storage Transfer Service when the 532 # job is created. This field should be left empty in requests to create a new 533 # transfer job; otherwise, the requests result in an `INVALID_ARGUMENT` 534 # error. 535 }</pre> 536 </div> 537 538 <div class="method"> 539 <code class="details" id="get">get(jobName, projectId=None, x__xgafv=None)</code> 540 <pre>Gets a transfer job. 541 542 Args: 543 jobName: string, The job to get. 544 Required. (required) 545 projectId: string, The ID of the Google Cloud Platform Console project that owns the job. 546 Required. 547 x__xgafv: string, V1 error format. 548 Allowed values 549 1 - v1 error format 550 2 - v2 error format 551 552 Returns: 553 An object of the form: 554 555 { # This resource represents the configuration of a transfer job that runs 556 # periodically. 557 "transferSpec": { # Configuration for running a transfer. # Transfer specification. 558 # Required. 559 "objectConditions": { # Conditions that determine which objects will be transferred. # Only objects that satisfy these object conditions are included in the set 560 # of data source and data sink objects. Object conditions based on 561 # objects' `lastModificationTime` do not exclude objects in a data sink. 562 "maxTimeElapsedSinceLastModification": "A String", # `maxTimeElapsedSinceLastModification` is the complement to 563 # `minTimeElapsedSinceLastModification`. 564 "includePrefixes": [ # If `includePrefixes` is specified, objects that satisfy the object 565 # conditions must have names that start with one of the `includePrefixes` 566 # and that do not start with any of the `excludePrefixes`. If `includePrefixes` 567 # is not specified, all objects except those that have names starting with 568 # one of the `excludePrefixes` must satisfy the object conditions. 569 # 570 # Requirements: 571 # 572 # * Each include-prefix and exclude-prefix can contain any sequence of 573 # Unicode characters, of max length 1024 bytes when UTF8-encoded, and 574 # must not contain Carriage Return or Line Feed characters. Wildcard 575 # matching and regular expression matching are not supported. 576 # 577 # * Each include-prefix and exclude-prefix must omit the leading slash. 578 # For example, to include the `requests.gz` object in a transfer from 579 # `s3://my-aws-bucket/logs/y=2015/requests.gz`, specify the include 580 # prefix as `logs/y=2015/requests.gz`. 581 # 582 # * None of the include-prefix or the exclude-prefix values can be empty, 583 # if specified. 584 # 585 # * Each include-prefix must include a distinct portion of the object 586 # namespace, i.e., no include-prefix may be a prefix of another 587 # include-prefix. 588 # 589 # * Each exclude-prefix must exclude a distinct portion of the object 590 # namespace, i.e., no exclude-prefix may be a prefix of another 591 # exclude-prefix. 592 # 593 # * If `includePrefixes` is specified, then each exclude-prefix must start 594 # with the value of a path explicitly included by `includePrefixes`. 595 # 596 # The max size of `includePrefixes` is 1000. 597 "A String", 598 ], 599 "excludePrefixes": [ # `excludePrefixes` must follow the requirements described for 600 # `includePrefixes`. 601 # 602 # The max size of `excludePrefixes` is 1000. 603 "A String", 604 ], 605 "minTimeElapsedSinceLastModification": "A String", # If unspecified, `minTimeElapsedSinceLastModification` takes a zero value 606 # and `maxTimeElapsedSinceLastModification` takes the maximum possible 607 # value of Duration. Objects that satisfy the object conditions 608 # must either have a `lastModificationTime` greater or equal to 609 # `NOW` - `maxTimeElapsedSinceLastModification` and less than 610 # `NOW` - `minTimeElapsedSinceLastModification`, or not have a 611 # `lastModificationTime`. 612 }, 613 "gcsDataSource": { # In a GcsData, an object's name is the Google Cloud Storage object's name and # A Google Cloud Storage data source. 614 # its `lastModificationTime` refers to the object's updated time, which changes 615 # when the content or the metadata of the object is updated. 616 "bucketName": "A String", # Google Cloud Storage bucket name (see 617 # [Bucket Name Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). 618 # Required. 619 }, 620 "httpDataSource": { # An HttpData specifies a list of objects on the web to be transferred over # An HTTP URL data source. 621 # HTTP. The information of the objects to be transferred is contained in a 622 # file referenced by a URL. The first line in the file must be 623 # "TsvHttpData-1.0", which specifies the format of the file. Subsequent lines 624 # specify the information of the list of objects, one object per list entry. 625 # Each entry has the following tab-delimited fields: 626 # 627 # * HTTP URL - The location of the object. 628 # 629 # * Length - The size of the object in bytes. 630 # 631 # * MD5 - The base64-encoded MD5 hash of the object. 632 # 633 # For an example of a valid TSV file, see 634 # [Transferring data from URLs](https://cloud.google.com/storage/transfer/#urls) 635 # 636 # When transferring data based on a URL list, keep the following in mind: 637 # 638 # * When an object located at `http(s)://hostname:port/<URL-path>` is transferred 639 # to a data sink, the name of the object at the data sink is 640 # `<hostname>/<URL-path>`. 641 # 642 # * If the specified size of an object does not match the actual size of the 643 # object fetched, the object will not be transferred. 644 # 645 # * If the specified MD5 does not match the MD5 computed from the transferred 646 # bytes, the object transfer will fail. For more information, see 647 # [Generating MD5 hashes](https://cloud.google.com/storage/transfer/#md5) 648 # 649 # * Ensure that each URL you specify is publicly accessible. For 650 # example, in Google Cloud Storage you can 651 # [share an object publicly] 652 # (https://cloud.google.com/storage/docs/cloud-console#_sharingdata) and get 653 # a link to it. 654 # 655 # * Storage Transfer Service obeys `robots.txt` rules and requires the source 656 # HTTP server to support `Range` requests and to return a `Content-Length` 657 # header in each response. 658 # 659 # * [ObjectConditions](#ObjectConditions) have no effect when filtering objects 660 # to transfer. 661 "listUrl": "A String", # The URL that points to the file that stores the object list entries. 662 # This file must allow public access. Currently, only URLs with HTTP and 663 # HTTPS schemes are supported. 664 # Required. 665 }, 666 "transferOptions": { # TransferOptions uses three boolean parameters to define the actions # If the option `deleteObjectsUniqueInSink` is `true`, object conditions 667 # based on objects' `lastModificationTime` are ignored and do not exclude 668 # objects in a data source or a data sink. 669 # to be performed on objects in a transfer. 670 "overwriteObjectsAlreadyExistingInSink": True or False, # Whether overwriting objects that already exist in the sink is allowed. 671 "deleteObjectsFromSourceAfterTransfer": True or False, # Whether objects should be deleted from the source after they are 672 # transferred to the sink. 673 "deleteObjectsUniqueInSink": True or False, # Whether objects that exist only in the sink should be deleted. 674 }, 675 "gcsDataSink": { # In a GcsData, an object's name is the Google Cloud Storage object's name and # A Google Cloud Storage data sink. 676 # its `lastModificationTime` refers to the object's updated time, which changes 677 # when the content or the metadata of the object is updated. 678 "bucketName": "A String", # Google Cloud Storage bucket name (see 679 # [Bucket Name Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). 680 # Required. 681 }, 682 "awsS3DataSource": { # An AwsS3Data can be a data source, but not a data sink. # An AWS S3 data source. 683 # In an AwsS3Data, an object's name is the S3 object's key name. 684 "awsAccessKey": { # AWS access key (see # AWS access key used to sign the API requests to the AWS S3 bucket. 685 # Permissions on the bucket must be granted to the access ID of the 686 # AWS access key. 687 # Required. 688 # [AWS Security Credentials](http://docs.aws.amazon.com/general/latest/gr/aws-security-credentials.html)). 689 "secretAccessKey": "A String", # AWS secret access key. This field is not returned in RPC responses. 690 # Required. 691 "accessKeyId": "A String", # AWS access key ID. 692 # Required. 693 }, 694 "bucketName": "A String", # S3 Bucket name (see 695 # [Creating a bucket](http://docs.aws.amazon.com/AmazonS3/latest/dev/create-bucket-get-location-example.html)). 696 # Required. 697 }, 698 }, 699 "status": "A String", # Status of the job. This value MUST be specified for 700 # `CreateTransferJobRequests`. 701 # 702 # NOTE: The effect of the new job status takes place during a subsequent job 703 # run. For example, if you change the job status from `ENABLED` to 704 # `DISABLED`, and an operation spawned by the transfer is running, the status 705 # change would not affect the current operation. 706 "deletionTime": "A String", # This field cannot be changed by user requests. 707 "description": "A String", # A description provided by the user for the job. Its max length is 1024 708 # bytes when Unicode-encoded. 709 "schedule": { # Transfers can be scheduled to recur or to run just once. # Schedule specification. 710 # Required. 711 "scheduleStartDate": { # Represents a whole calendar date, e.g. date of birth. The time of day and # The first day the recurring transfer is scheduled to run. If 712 # `scheduleStartDate` is in the past, the transfer will run for the first 713 # time on the following day. 714 # Required. 715 # time zone are either specified elsewhere or are not significant. The date 716 # is relative to the Proleptic Gregorian Calendar. The day may be 0 to 717 # represent a year and month where the day is not significant, e.g. credit card 718 # expiration date. The year may be 0 to represent a month and day independent 719 # of year, e.g. anniversary date. Related types are google.type.TimeOfDay 720 # and `google.protobuf.Timestamp`. 721 "year": 42, # Year of date. Must be from 1 to 9999, or 0 if specifying a date without 722 # a year. 723 "day": 42, # Day of month. Must be from 1 to 31 and valid for the year and month, or 0 724 # if specifying a year/month where the day is not significant. 725 "month": 42, # Month of year. Must be from 1 to 12. 726 }, 727 "startTimeOfDay": { # Represents a time of day. The date and time zone are either not significant # The time in UTC at which the transfer will be scheduled to start in a day. 728 # Transfers may start later than this time. If not specified, recurring and 729 # one-time transfers that are scheduled to run today will run immediately; 730 # recurring transfers that are scheduled to run on a future date will start 731 # at approximately midnight UTC on that date. Note that when configuring a 732 # transfer with the Cloud Platform Console, the transfer's start time in a 733 # day is specified in your local timezone. 734 # or are specified elsewhere. An API may choose to allow leap seconds. Related 735 # types are google.type.Date and `google.protobuf.Timestamp`. 736 "hours": 42, # Hours of day in 24 hour format. Should be from 0 to 23. An API may choose 737 # to allow the value "24:00:00" for scenarios like business closing time. 738 "nanos": 42, # Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999. 739 "seconds": 42, # Seconds of minutes of the time. Must normally be from 0 to 59. An API may 740 # allow the value 60 if it allows leap-seconds. 741 "minutes": 42, # Minutes of hour of day. Must be from 0 to 59. 742 }, 743 "scheduleEndDate": { # Represents a whole calendar date, e.g. date of birth. The time of day and # The last day the recurring transfer will be run. If `scheduleEndDate` 744 # is the same as `scheduleStartDate`, the transfer will be executed only 745 # once. 746 # time zone are either specified elsewhere or are not significant. The date 747 # is relative to the Proleptic Gregorian Calendar. The day may be 0 to 748 # represent a year and month where the day is not significant, e.g. credit card 749 # expiration date. The year may be 0 to represent a month and day independent 750 # of year, e.g. anniversary date. Related types are google.type.TimeOfDay 751 # and `google.protobuf.Timestamp`. 752 "year": 42, # Year of date. Must be from 1 to 9999, or 0 if specifying a date without 753 # a year. 754 "day": 42, # Day of month. Must be from 1 to 31 and valid for the year and month, or 0 755 # if specifying a year/month where the day is not significant. 756 "month": 42, # Month of year. Must be from 1 to 12. 757 }, 758 }, 759 "projectId": "A String", # The ID of the Google Cloud Platform Console project that owns the job. 760 # Required. 761 "lastModificationTime": "A String", # This field cannot be changed by user requests. 762 "creationTime": "A String", # This field cannot be changed by user requests. 763 "name": "A String", # A globally unique name assigned by Storage Transfer Service when the 764 # job is created. This field should be left empty in requests to create a new 765 # transfer job; otherwise, the requests result in an `INVALID_ARGUMENT` 766 # error. 767 }</pre> 768 </div> 769 770 <div class="method"> 771 <code class="details" id="list">list(pageSize=None, filter=None, pageToken=None, x__xgafv=None)</code> 772 <pre>Lists transfer jobs. 773 774 Args: 775 pageSize: integer, The list page size. The max allowed value is 256. 776 filter: string, A list of query parameters specified as JSON text in the form of 777 {"project_id":"my_project_id", 778 "job_names":["jobid1","jobid2",...], 779 "job_statuses":["status1","status2",...]}. 780 Since `job_names` and `job_statuses` support multiple values, their values 781 must be specified with array notation. `project_id` is required. `job_names` 782 and `job_statuses` are optional. The valid values for `job_statuses` are 783 case-insensitive: `ENABLED`, `DISABLED`, and `DELETED`. 784 pageToken: string, The list page token. 785 x__xgafv: string, V1 error format. 786 Allowed values 787 1 - v1 error format 788 2 - v2 error format 789 790 Returns: 791 An object of the form: 792 793 { # Response from ListTransferJobs. 794 "nextPageToken": "A String", # The list next page token. 795 "transferJobs": [ # A list of transfer jobs. 796 { # This resource represents the configuration of a transfer job that runs 797 # periodically. 798 "transferSpec": { # Configuration for running a transfer. # Transfer specification. 799 # Required. 800 "objectConditions": { # Conditions that determine which objects will be transferred. # Only objects that satisfy these object conditions are included in the set 801 # of data source and data sink objects. Object conditions based on 802 # objects' `lastModificationTime` do not exclude objects in a data sink. 803 "maxTimeElapsedSinceLastModification": "A String", # `maxTimeElapsedSinceLastModification` is the complement to 804 # `minTimeElapsedSinceLastModification`. 805 "includePrefixes": [ # If `includePrefixes` is specified, objects that satisfy the object 806 # conditions must have names that start with one of the `includePrefixes` 807 # and that do not start with any of the `excludePrefixes`. If `includePrefixes` 808 # is not specified, all objects except those that have names starting with 809 # one of the `excludePrefixes` must satisfy the object conditions. 810 # 811 # Requirements: 812 # 813 # * Each include-prefix and exclude-prefix can contain any sequence of 814 # Unicode characters, of max length 1024 bytes when UTF8-encoded, and 815 # must not contain Carriage Return or Line Feed characters. Wildcard 816 # matching and regular expression matching are not supported. 817 # 818 # * Each include-prefix and exclude-prefix must omit the leading slash. 819 # For example, to include the `requests.gz` object in a transfer from 820 # `s3://my-aws-bucket/logs/y=2015/requests.gz`, specify the include 821 # prefix as `logs/y=2015/requests.gz`. 822 # 823 # * None of the include-prefix or the exclude-prefix values can be empty, 824 # if specified. 825 # 826 # * Each include-prefix must include a distinct portion of the object 827 # namespace, i.e., no include-prefix may be a prefix of another 828 # include-prefix. 829 # 830 # * Each exclude-prefix must exclude a distinct portion of the object 831 # namespace, i.e., no exclude-prefix may be a prefix of another 832 # exclude-prefix. 833 # 834 # * If `includePrefixes` is specified, then each exclude-prefix must start 835 # with the value of a path explicitly included by `includePrefixes`. 836 # 837 # The max size of `includePrefixes` is 1000. 838 "A String", 839 ], 840 "excludePrefixes": [ # `excludePrefixes` must follow the requirements described for 841 # `includePrefixes`. 842 # 843 # The max size of `excludePrefixes` is 1000. 844 "A String", 845 ], 846 "minTimeElapsedSinceLastModification": "A String", # If unspecified, `minTimeElapsedSinceLastModification` takes a zero value 847 # and `maxTimeElapsedSinceLastModification` takes the maximum possible 848 # value of Duration. Objects that satisfy the object conditions 849 # must either have a `lastModificationTime` greater or equal to 850 # `NOW` - `maxTimeElapsedSinceLastModification` and less than 851 # `NOW` - `minTimeElapsedSinceLastModification`, or not have a 852 # `lastModificationTime`. 853 }, 854 "gcsDataSource": { # In a GcsData, an object's name is the Google Cloud Storage object's name and # A Google Cloud Storage data source. 855 # its `lastModificationTime` refers to the object's updated time, which changes 856 # when the content or the metadata of the object is updated. 857 "bucketName": "A String", # Google Cloud Storage bucket name (see 858 # [Bucket Name Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). 859 # Required. 860 }, 861 "httpDataSource": { # An HttpData specifies a list of objects on the web to be transferred over # An HTTP URL data source. 862 # HTTP. The information of the objects to be transferred is contained in a 863 # file referenced by a URL. The first line in the file must be 864 # "TsvHttpData-1.0", which specifies the format of the file. Subsequent lines 865 # specify the information of the list of objects, one object per list entry. 866 # Each entry has the following tab-delimited fields: 867 # 868 # * HTTP URL - The location of the object. 869 # 870 # * Length - The size of the object in bytes. 871 # 872 # * MD5 - The base64-encoded MD5 hash of the object. 873 # 874 # For an example of a valid TSV file, see 875 # [Transferring data from URLs](https://cloud.google.com/storage/transfer/#urls) 876 # 877 # When transferring data based on a URL list, keep the following in mind: 878 # 879 # * When an object located at `http(s)://hostname:port/<URL-path>` is transferred 880 # to a data sink, the name of the object at the data sink is 881 # `<hostname>/<URL-path>`. 882 # 883 # * If the specified size of an object does not match the actual size of the 884 # object fetched, the object will not be transferred. 885 # 886 # * If the specified MD5 does not match the MD5 computed from the transferred 887 # bytes, the object transfer will fail. For more information, see 888 # [Generating MD5 hashes](https://cloud.google.com/storage/transfer/#md5) 889 # 890 # * Ensure that each URL you specify is publicly accessible. For 891 # example, in Google Cloud Storage you can 892 # [share an object publicly] 893 # (https://cloud.google.com/storage/docs/cloud-console#_sharingdata) and get 894 # a link to it. 895 # 896 # * Storage Transfer Service obeys `robots.txt` rules and requires the source 897 # HTTP server to support `Range` requests and to return a `Content-Length` 898 # header in each response. 899 # 900 # * [ObjectConditions](#ObjectConditions) have no effect when filtering objects 901 # to transfer. 902 "listUrl": "A String", # The URL that points to the file that stores the object list entries. 903 # This file must allow public access. Currently, only URLs with HTTP and 904 # HTTPS schemes are supported. 905 # Required. 906 }, 907 "transferOptions": { # TransferOptions uses three boolean parameters to define the actions # If the option `deleteObjectsUniqueInSink` is `true`, object conditions 908 # based on objects' `lastModificationTime` are ignored and do not exclude 909 # objects in a data source or a data sink. 910 # to be performed on objects in a transfer. 911 "overwriteObjectsAlreadyExistingInSink": True or False, # Whether overwriting objects that already exist in the sink is allowed. 912 "deleteObjectsFromSourceAfterTransfer": True or False, # Whether objects should be deleted from the source after they are 913 # transferred to the sink. 914 "deleteObjectsUniqueInSink": True or False, # Whether objects that exist only in the sink should be deleted. 915 }, 916 "gcsDataSink": { # In a GcsData, an object's name is the Google Cloud Storage object's name and # A Google Cloud Storage data sink. 917 # its `lastModificationTime` refers to the object's updated time, which changes 918 # when the content or the metadata of the object is updated. 919 "bucketName": "A String", # Google Cloud Storage bucket name (see 920 # [Bucket Name Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). 921 # Required. 922 }, 923 "awsS3DataSource": { # An AwsS3Data can be a data source, but not a data sink. # An AWS S3 data source. 924 # In an AwsS3Data, an object's name is the S3 object's key name. 925 "awsAccessKey": { # AWS access key (see # AWS access key used to sign the API requests to the AWS S3 bucket. 926 # Permissions on the bucket must be granted to the access ID of the 927 # AWS access key. 928 # Required. 929 # [AWS Security Credentials](http://docs.aws.amazon.com/general/latest/gr/aws-security-credentials.html)). 930 "secretAccessKey": "A String", # AWS secret access key. This field is not returned in RPC responses. 931 # Required. 932 "accessKeyId": "A String", # AWS access key ID. 933 # Required. 934 }, 935 "bucketName": "A String", # S3 Bucket name (see 936 # [Creating a bucket](http://docs.aws.amazon.com/AmazonS3/latest/dev/create-bucket-get-location-example.html)). 937 # Required. 938 }, 939 }, 940 "status": "A String", # Status of the job. This value MUST be specified for 941 # `CreateTransferJobRequests`. 942 # 943 # NOTE: The effect of the new job status takes place during a subsequent job 944 # run. For example, if you change the job status from `ENABLED` to 945 # `DISABLED`, and an operation spawned by the transfer is running, the status 946 # change would not affect the current operation. 947 "deletionTime": "A String", # This field cannot be changed by user requests. 948 "description": "A String", # A description provided by the user for the job. Its max length is 1024 949 # bytes when Unicode-encoded. 950 "schedule": { # Transfers can be scheduled to recur or to run just once. # Schedule specification. 951 # Required. 952 "scheduleStartDate": { # Represents a whole calendar date, e.g. date of birth. The time of day and # The first day the recurring transfer is scheduled to run. If 953 # `scheduleStartDate` is in the past, the transfer will run for the first 954 # time on the following day. 955 # Required. 956 # time zone are either specified elsewhere or are not significant. The date 957 # is relative to the Proleptic Gregorian Calendar. The day may be 0 to 958 # represent a year and month where the day is not significant, e.g. credit card 959 # expiration date. The year may be 0 to represent a month and day independent 960 # of year, e.g. anniversary date. Related types are google.type.TimeOfDay 961 # and `google.protobuf.Timestamp`. 962 "year": 42, # Year of date. Must be from 1 to 9999, or 0 if specifying a date without 963 # a year. 964 "day": 42, # Day of month. Must be from 1 to 31 and valid for the year and month, or 0 965 # if specifying a year/month where the day is not significant. 966 "month": 42, # Month of year. Must be from 1 to 12. 967 }, 968 "startTimeOfDay": { # Represents a time of day. The date and time zone are either not significant # The time in UTC at which the transfer will be scheduled to start in a day. 969 # Transfers may start later than this time. If not specified, recurring and 970 # one-time transfers that are scheduled to run today will run immediately; 971 # recurring transfers that are scheduled to run on a future date will start 972 # at approximately midnight UTC on that date. Note that when configuring a 973 # transfer with the Cloud Platform Console, the transfer's start time in a 974 # day is specified in your local timezone. 975 # or are specified elsewhere. An API may choose to allow leap seconds. Related 976 # types are google.type.Date and `google.protobuf.Timestamp`. 977 "hours": 42, # Hours of day in 24 hour format. Should be from 0 to 23. An API may choose 978 # to allow the value "24:00:00" for scenarios like business closing time. 979 "nanos": 42, # Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999. 980 "seconds": 42, # Seconds of minutes of the time. Must normally be from 0 to 59. An API may 981 # allow the value 60 if it allows leap-seconds. 982 "minutes": 42, # Minutes of hour of day. Must be from 0 to 59. 983 }, 984 "scheduleEndDate": { # Represents a whole calendar date, e.g. date of birth. The time of day and # The last day the recurring transfer will be run. If `scheduleEndDate` 985 # is the same as `scheduleStartDate`, the transfer will be executed only 986 # once. 987 # time zone are either specified elsewhere or are not significant. The date 988 # is relative to the Proleptic Gregorian Calendar. The day may be 0 to 989 # represent a year and month where the day is not significant, e.g. credit card 990 # expiration date. The year may be 0 to represent a month and day independent 991 # of year, e.g. anniversary date. Related types are google.type.TimeOfDay 992 # and `google.protobuf.Timestamp`. 993 "year": 42, # Year of date. Must be from 1 to 9999, or 0 if specifying a date without 994 # a year. 995 "day": 42, # Day of month. Must be from 1 to 31 and valid for the year and month, or 0 996 # if specifying a year/month where the day is not significant. 997 "month": 42, # Month of year. Must be from 1 to 12. 998 }, 999 }, 1000 "projectId": "A String", # The ID of the Google Cloud Platform Console project that owns the job. 1001 # Required. 1002 "lastModificationTime": "A String", # This field cannot be changed by user requests. 1003 "creationTime": "A String", # This field cannot be changed by user requests. 1004 "name": "A String", # A globally unique name assigned by Storage Transfer Service when the 1005 # job is created. This field should be left empty in requests to create a new 1006 # transfer job; otherwise, the requests result in an `INVALID_ARGUMENT` 1007 # error. 1008 }, 1009 ], 1010 }</pre> 1011 </div> 1012 1013 <div class="method"> 1014 <code class="details" id="list_next">list_next(previous_request, previous_response)</code> 1015 <pre>Retrieves the next page of results. 1016 1017 Args: 1018 previous_request: The request for the previous page. (required) 1019 previous_response: The response from the request for the previous page. (required) 1020 1021 Returns: 1022 A request object that you can call 'execute()' on to request the next 1023 page. Returns None if there are no more items in the collection. 1024 </pre> 1025 </div> 1026 1027 <div class="method"> 1028 <code class="details" id="patch">patch(jobName, body, x__xgafv=None)</code> 1029 <pre>Updates a transfer job. Updating a job's transfer spec does not affect 1030 transfer operations that are running already. Updating the scheduling 1031 of a job is not allowed. 1032 1033 Args: 1034 jobName: string, The name of job to update. 1035 Required. (required) 1036 body: object, The request body. (required) 1037 The object takes the form of: 1038 1039 { # Request passed to UpdateTransferJob. 1040 "projectId": "A String", # The ID of the Google Cloud Platform Console project that owns the job. 1041 # Required. 1042 "updateTransferJobFieldMask": "A String", # The field mask of the fields in `transferJob` that are to be updated in 1043 # this request. Fields in `transferJob` that can be updated are: 1044 # `description`, `transferSpec`, and `status`. To update the `transferSpec` 1045 # of the job, a complete transfer specification has to be provided. An 1046 # incomplete specification which misses any required fields will be rejected 1047 # with the error `INVALID_ARGUMENT`. 1048 "transferJob": { # This resource represents the configuration of a transfer job that runs # The job to update. 1049 # Required. 1050 # periodically. 1051 "transferSpec": { # Configuration for running a transfer. # Transfer specification. 1052 # Required. 1053 "objectConditions": { # Conditions that determine which objects will be transferred. # Only objects that satisfy these object conditions are included in the set 1054 # of data source and data sink objects. Object conditions based on 1055 # objects' `lastModificationTime` do not exclude objects in a data sink. 1056 "maxTimeElapsedSinceLastModification": "A String", # `maxTimeElapsedSinceLastModification` is the complement to 1057 # `minTimeElapsedSinceLastModification`. 1058 "includePrefixes": [ # If `includePrefixes` is specified, objects that satisfy the object 1059 # conditions must have names that start with one of the `includePrefixes` 1060 # and that do not start with any of the `excludePrefixes`. If `includePrefixes` 1061 # is not specified, all objects except those that have names starting with 1062 # one of the `excludePrefixes` must satisfy the object conditions. 1063 # 1064 # Requirements: 1065 # 1066 # * Each include-prefix and exclude-prefix can contain any sequence of 1067 # Unicode characters, of max length 1024 bytes when UTF8-encoded, and 1068 # must not contain Carriage Return or Line Feed characters. Wildcard 1069 # matching and regular expression matching are not supported. 1070 # 1071 # * Each include-prefix and exclude-prefix must omit the leading slash. 1072 # For example, to include the `requests.gz` object in a transfer from 1073 # `s3://my-aws-bucket/logs/y=2015/requests.gz`, specify the include 1074 # prefix as `logs/y=2015/requests.gz`. 1075 # 1076 # * None of the include-prefix or the exclude-prefix values can be empty, 1077 # if specified. 1078 # 1079 # * Each include-prefix must include a distinct portion of the object 1080 # namespace, i.e., no include-prefix may be a prefix of another 1081 # include-prefix. 1082 # 1083 # * Each exclude-prefix must exclude a distinct portion of the object 1084 # namespace, i.e., no exclude-prefix may be a prefix of another 1085 # exclude-prefix. 1086 # 1087 # * If `includePrefixes` is specified, then each exclude-prefix must start 1088 # with the value of a path explicitly included by `includePrefixes`. 1089 # 1090 # The max size of `includePrefixes` is 1000. 1091 "A String", 1092 ], 1093 "excludePrefixes": [ # `excludePrefixes` must follow the requirements described for 1094 # `includePrefixes`. 1095 # 1096 # The max size of `excludePrefixes` is 1000. 1097 "A String", 1098 ], 1099 "minTimeElapsedSinceLastModification": "A String", # If unspecified, `minTimeElapsedSinceLastModification` takes a zero value 1100 # and `maxTimeElapsedSinceLastModification` takes the maximum possible 1101 # value of Duration. Objects that satisfy the object conditions 1102 # must either have a `lastModificationTime` greater or equal to 1103 # `NOW` - `maxTimeElapsedSinceLastModification` and less than 1104 # `NOW` - `minTimeElapsedSinceLastModification`, or not have a 1105 # `lastModificationTime`. 1106 }, 1107 "gcsDataSource": { # In a GcsData, an object's name is the Google Cloud Storage object's name and # A Google Cloud Storage data source. 1108 # its `lastModificationTime` refers to the object's updated time, which changes 1109 # when the content or the metadata of the object is updated. 1110 "bucketName": "A String", # Google Cloud Storage bucket name (see 1111 # [Bucket Name Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). 1112 # Required. 1113 }, 1114 "httpDataSource": { # An HttpData specifies a list of objects on the web to be transferred over # An HTTP URL data source. 1115 # HTTP. The information of the objects to be transferred is contained in a 1116 # file referenced by a URL. The first line in the file must be 1117 # "TsvHttpData-1.0", which specifies the format of the file. Subsequent lines 1118 # specify the information of the list of objects, one object per list entry. 1119 # Each entry has the following tab-delimited fields: 1120 # 1121 # * HTTP URL - The location of the object. 1122 # 1123 # * Length - The size of the object in bytes. 1124 # 1125 # * MD5 - The base64-encoded MD5 hash of the object. 1126 # 1127 # For an example of a valid TSV file, see 1128 # [Transferring data from URLs](https://cloud.google.com/storage/transfer/#urls) 1129 # 1130 # When transferring data based on a URL list, keep the following in mind: 1131 # 1132 # * When an object located at `http(s)://hostname:port/<URL-path>` is transferred 1133 # to a data sink, the name of the object at the data sink is 1134 # `<hostname>/<URL-path>`. 1135 # 1136 # * If the specified size of an object does not match the actual size of the 1137 # object fetched, the object will not be transferred. 1138 # 1139 # * If the specified MD5 does not match the MD5 computed from the transferred 1140 # bytes, the object transfer will fail. For more information, see 1141 # [Generating MD5 hashes](https://cloud.google.com/storage/transfer/#md5) 1142 # 1143 # * Ensure that each URL you specify is publicly accessible. For 1144 # example, in Google Cloud Storage you can 1145 # [share an object publicly] 1146 # (https://cloud.google.com/storage/docs/cloud-console#_sharingdata) and get 1147 # a link to it. 1148 # 1149 # * Storage Transfer Service obeys `robots.txt` rules and requires the source 1150 # HTTP server to support `Range` requests and to return a `Content-Length` 1151 # header in each response. 1152 # 1153 # * [ObjectConditions](#ObjectConditions) have no effect when filtering objects 1154 # to transfer. 1155 "listUrl": "A String", # The URL that points to the file that stores the object list entries. 1156 # This file must allow public access. Currently, only URLs with HTTP and 1157 # HTTPS schemes are supported. 1158 # Required. 1159 }, 1160 "transferOptions": { # TransferOptions uses three boolean parameters to define the actions # If the option `deleteObjectsUniqueInSink` is `true`, object conditions 1161 # based on objects' `lastModificationTime` are ignored and do not exclude 1162 # objects in a data source or a data sink. 1163 # to be performed on objects in a transfer. 1164 "overwriteObjectsAlreadyExistingInSink": True or False, # Whether overwriting objects that already exist in the sink is allowed. 1165 "deleteObjectsFromSourceAfterTransfer": True or False, # Whether objects should be deleted from the source after they are 1166 # transferred to the sink. 1167 "deleteObjectsUniqueInSink": True or False, # Whether objects that exist only in the sink should be deleted. 1168 }, 1169 "gcsDataSink": { # In a GcsData, an object's name is the Google Cloud Storage object's name and # A Google Cloud Storage data sink. 1170 # its `lastModificationTime` refers to the object's updated time, which changes 1171 # when the content or the metadata of the object is updated. 1172 "bucketName": "A String", # Google Cloud Storage bucket name (see 1173 # [Bucket Name Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). 1174 # Required. 1175 }, 1176 "awsS3DataSource": { # An AwsS3Data can be a data source, but not a data sink. # An AWS S3 data source. 1177 # In an AwsS3Data, an object's name is the S3 object's key name. 1178 "awsAccessKey": { # AWS access key (see # AWS access key used to sign the API requests to the AWS S3 bucket. 1179 # Permissions on the bucket must be granted to the access ID of the 1180 # AWS access key. 1181 # Required. 1182 # [AWS Security Credentials](http://docs.aws.amazon.com/general/latest/gr/aws-security-credentials.html)). 1183 "secretAccessKey": "A String", # AWS secret access key. This field is not returned in RPC responses. 1184 # Required. 1185 "accessKeyId": "A String", # AWS access key ID. 1186 # Required. 1187 }, 1188 "bucketName": "A String", # S3 Bucket name (see 1189 # [Creating a bucket](http://docs.aws.amazon.com/AmazonS3/latest/dev/create-bucket-get-location-example.html)). 1190 # Required. 1191 }, 1192 }, 1193 "status": "A String", # Status of the job. This value MUST be specified for 1194 # `CreateTransferJobRequests`. 1195 # 1196 # NOTE: The effect of the new job status takes place during a subsequent job 1197 # run. For example, if you change the job status from `ENABLED` to 1198 # `DISABLED`, and an operation spawned by the transfer is running, the status 1199 # change would not affect the current operation. 1200 "deletionTime": "A String", # This field cannot be changed by user requests. 1201 "description": "A String", # A description provided by the user for the job. Its max length is 1024 1202 # bytes when Unicode-encoded. 1203 "schedule": { # Transfers can be scheduled to recur or to run just once. # Schedule specification. 1204 # Required. 1205 "scheduleStartDate": { # Represents a whole calendar date, e.g. date of birth. The time of day and # The first day the recurring transfer is scheduled to run. If 1206 # `scheduleStartDate` is in the past, the transfer will run for the first 1207 # time on the following day. 1208 # Required. 1209 # time zone are either specified elsewhere or are not significant. The date 1210 # is relative to the Proleptic Gregorian Calendar. The day may be 0 to 1211 # represent a year and month where the day is not significant, e.g. credit card 1212 # expiration date. The year may be 0 to represent a month and day independent 1213 # of year, e.g. anniversary date. Related types are google.type.TimeOfDay 1214 # and `google.protobuf.Timestamp`. 1215 "year": 42, # Year of date. Must be from 1 to 9999, or 0 if specifying a date without 1216 # a year. 1217 "day": 42, # Day of month. Must be from 1 to 31 and valid for the year and month, or 0 1218 # if specifying a year/month where the day is not significant. 1219 "month": 42, # Month of year. Must be from 1 to 12. 1220 }, 1221 "startTimeOfDay": { # Represents a time of day. The date and time zone are either not significant # The time in UTC at which the transfer will be scheduled to start in a day. 1222 # Transfers may start later than this time. If not specified, recurring and 1223 # one-time transfers that are scheduled to run today will run immediately; 1224 # recurring transfers that are scheduled to run on a future date will start 1225 # at approximately midnight UTC on that date. Note that when configuring a 1226 # transfer with the Cloud Platform Console, the transfer's start time in a 1227 # day is specified in your local timezone. 1228 # or are specified elsewhere. An API may choose to allow leap seconds. Related 1229 # types are google.type.Date and `google.protobuf.Timestamp`. 1230 "hours": 42, # Hours of day in 24 hour format. Should be from 0 to 23. An API may choose 1231 # to allow the value "24:00:00" for scenarios like business closing time. 1232 "nanos": 42, # Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999. 1233 "seconds": 42, # Seconds of minutes of the time. Must normally be from 0 to 59. An API may 1234 # allow the value 60 if it allows leap-seconds. 1235 "minutes": 42, # Minutes of hour of day. Must be from 0 to 59. 1236 }, 1237 "scheduleEndDate": { # Represents a whole calendar date, e.g. date of birth. The time of day and # The last day the recurring transfer will be run. If `scheduleEndDate` 1238 # is the same as `scheduleStartDate`, the transfer will be executed only 1239 # once. 1240 # time zone are either specified elsewhere or are not significant. The date 1241 # is relative to the Proleptic Gregorian Calendar. The day may be 0 to 1242 # represent a year and month where the day is not significant, e.g. credit card 1243 # expiration date. The year may be 0 to represent a month and day independent 1244 # of year, e.g. anniversary date. Related types are google.type.TimeOfDay 1245 # and `google.protobuf.Timestamp`. 1246 "year": 42, # Year of date. Must be from 1 to 9999, or 0 if specifying a date without 1247 # a year. 1248 "day": 42, # Day of month. Must be from 1 to 31 and valid for the year and month, or 0 1249 # if specifying a year/month where the day is not significant. 1250 "month": 42, # Month of year. Must be from 1 to 12. 1251 }, 1252 }, 1253 "projectId": "A String", # The ID of the Google Cloud Platform Console project that owns the job. 1254 # Required. 1255 "lastModificationTime": "A String", # This field cannot be changed by user requests. 1256 "creationTime": "A String", # This field cannot be changed by user requests. 1257 "name": "A String", # A globally unique name assigned by Storage Transfer Service when the 1258 # job is created. This field should be left empty in requests to create a new 1259 # transfer job; otherwise, the requests result in an `INVALID_ARGUMENT` 1260 # error. 1261 }, 1262 } 1263 1264 x__xgafv: string, V1 error format. 1265 Allowed values 1266 1 - v1 error format 1267 2 - v2 error format 1268 1269 Returns: 1270 An object of the form: 1271 1272 { # This resource represents the configuration of a transfer job that runs 1273 # periodically. 1274 "transferSpec": { # Configuration for running a transfer. # Transfer specification. 1275 # Required. 1276 "objectConditions": { # Conditions that determine which objects will be transferred. # Only objects that satisfy these object conditions are included in the set 1277 # of data source and data sink objects. Object conditions based on 1278 # objects' `lastModificationTime` do not exclude objects in a data sink. 1279 "maxTimeElapsedSinceLastModification": "A String", # `maxTimeElapsedSinceLastModification` is the complement to 1280 # `minTimeElapsedSinceLastModification`. 1281 "includePrefixes": [ # If `includePrefixes` is specified, objects that satisfy the object 1282 # conditions must have names that start with one of the `includePrefixes` 1283 # and that do not start with any of the `excludePrefixes`. If `includePrefixes` 1284 # is not specified, all objects except those that have names starting with 1285 # one of the `excludePrefixes` must satisfy the object conditions. 1286 # 1287 # Requirements: 1288 # 1289 # * Each include-prefix and exclude-prefix can contain any sequence of 1290 # Unicode characters, of max length 1024 bytes when UTF8-encoded, and 1291 # must not contain Carriage Return or Line Feed characters. Wildcard 1292 # matching and regular expression matching are not supported. 1293 # 1294 # * Each include-prefix and exclude-prefix must omit the leading slash. 1295 # For example, to include the `requests.gz` object in a transfer from 1296 # `s3://my-aws-bucket/logs/y=2015/requests.gz`, specify the include 1297 # prefix as `logs/y=2015/requests.gz`. 1298 # 1299 # * None of the include-prefix or the exclude-prefix values can be empty, 1300 # if specified. 1301 # 1302 # * Each include-prefix must include a distinct portion of the object 1303 # namespace, i.e., no include-prefix may be a prefix of another 1304 # include-prefix. 1305 # 1306 # * Each exclude-prefix must exclude a distinct portion of the object 1307 # namespace, i.e., no exclude-prefix may be a prefix of another 1308 # exclude-prefix. 1309 # 1310 # * If `includePrefixes` is specified, then each exclude-prefix must start 1311 # with the value of a path explicitly included by `includePrefixes`. 1312 # 1313 # The max size of `includePrefixes` is 1000. 1314 "A String", 1315 ], 1316 "excludePrefixes": [ # `excludePrefixes` must follow the requirements described for 1317 # `includePrefixes`. 1318 # 1319 # The max size of `excludePrefixes` is 1000. 1320 "A String", 1321 ], 1322 "minTimeElapsedSinceLastModification": "A String", # If unspecified, `minTimeElapsedSinceLastModification` takes a zero value 1323 # and `maxTimeElapsedSinceLastModification` takes the maximum possible 1324 # value of Duration. Objects that satisfy the object conditions 1325 # must either have a `lastModificationTime` greater or equal to 1326 # `NOW` - `maxTimeElapsedSinceLastModification` and less than 1327 # `NOW` - `minTimeElapsedSinceLastModification`, or not have a 1328 # `lastModificationTime`. 1329 }, 1330 "gcsDataSource": { # In a GcsData, an object's name is the Google Cloud Storage object's name and # A Google Cloud Storage data source. 1331 # its `lastModificationTime` refers to the object's updated time, which changes 1332 # when the content or the metadata of the object is updated. 1333 "bucketName": "A String", # Google Cloud Storage bucket name (see 1334 # [Bucket Name Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). 1335 # Required. 1336 }, 1337 "httpDataSource": { # An HttpData specifies a list of objects on the web to be transferred over # An HTTP URL data source. 1338 # HTTP. The information of the objects to be transferred is contained in a 1339 # file referenced by a URL. The first line in the file must be 1340 # "TsvHttpData-1.0", which specifies the format of the file. Subsequent lines 1341 # specify the information of the list of objects, one object per list entry. 1342 # Each entry has the following tab-delimited fields: 1343 # 1344 # * HTTP URL - The location of the object. 1345 # 1346 # * Length - The size of the object in bytes. 1347 # 1348 # * MD5 - The base64-encoded MD5 hash of the object. 1349 # 1350 # For an example of a valid TSV file, see 1351 # [Transferring data from URLs](https://cloud.google.com/storage/transfer/#urls) 1352 # 1353 # When transferring data based on a URL list, keep the following in mind: 1354 # 1355 # * When an object located at `http(s)://hostname:port/<URL-path>` is transferred 1356 # to a data sink, the name of the object at the data sink is 1357 # `<hostname>/<URL-path>`. 1358 # 1359 # * If the specified size of an object does not match the actual size of the 1360 # object fetched, the object will not be transferred. 1361 # 1362 # * If the specified MD5 does not match the MD5 computed from the transferred 1363 # bytes, the object transfer will fail. For more information, see 1364 # [Generating MD5 hashes](https://cloud.google.com/storage/transfer/#md5) 1365 # 1366 # * Ensure that each URL you specify is publicly accessible. For 1367 # example, in Google Cloud Storage you can 1368 # [share an object publicly] 1369 # (https://cloud.google.com/storage/docs/cloud-console#_sharingdata) and get 1370 # a link to it. 1371 # 1372 # * Storage Transfer Service obeys `robots.txt` rules and requires the source 1373 # HTTP server to support `Range` requests and to return a `Content-Length` 1374 # header in each response. 1375 # 1376 # * [ObjectConditions](#ObjectConditions) have no effect when filtering objects 1377 # to transfer. 1378 "listUrl": "A String", # The URL that points to the file that stores the object list entries. 1379 # This file must allow public access. Currently, only URLs with HTTP and 1380 # HTTPS schemes are supported. 1381 # Required. 1382 }, 1383 "transferOptions": { # TransferOptions uses three boolean parameters to define the actions # If the option `deleteObjectsUniqueInSink` is `true`, object conditions 1384 # based on objects' `lastModificationTime` are ignored and do not exclude 1385 # objects in a data source or a data sink. 1386 # to be performed on objects in a transfer. 1387 "overwriteObjectsAlreadyExistingInSink": True or False, # Whether overwriting objects that already exist in the sink is allowed. 1388 "deleteObjectsFromSourceAfterTransfer": True or False, # Whether objects should be deleted from the source after they are 1389 # transferred to the sink. 1390 "deleteObjectsUniqueInSink": True or False, # Whether objects that exist only in the sink should be deleted. 1391 }, 1392 "gcsDataSink": { # In a GcsData, an object's name is the Google Cloud Storage object's name and # A Google Cloud Storage data sink. 1393 # its `lastModificationTime` refers to the object's updated time, which changes 1394 # when the content or the metadata of the object is updated. 1395 "bucketName": "A String", # Google Cloud Storage bucket name (see 1396 # [Bucket Name Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). 1397 # Required. 1398 }, 1399 "awsS3DataSource": { # An AwsS3Data can be a data source, but not a data sink. # An AWS S3 data source. 1400 # In an AwsS3Data, an object's name is the S3 object's key name. 1401 "awsAccessKey": { # AWS access key (see # AWS access key used to sign the API requests to the AWS S3 bucket. 1402 # Permissions on the bucket must be granted to the access ID of the 1403 # AWS access key. 1404 # Required. 1405 # [AWS Security Credentials](http://docs.aws.amazon.com/general/latest/gr/aws-security-credentials.html)). 1406 "secretAccessKey": "A String", # AWS secret access key. This field is not returned in RPC responses. 1407 # Required. 1408 "accessKeyId": "A String", # AWS access key ID. 1409 # Required. 1410 }, 1411 "bucketName": "A String", # S3 Bucket name (see 1412 # [Creating a bucket](http://docs.aws.amazon.com/AmazonS3/latest/dev/create-bucket-get-location-example.html)). 1413 # Required. 1414 }, 1415 }, 1416 "status": "A String", # Status of the job. This value MUST be specified for 1417 # `CreateTransferJobRequests`. 1418 # 1419 # NOTE: The effect of the new job status takes place during a subsequent job 1420 # run. For example, if you change the job status from `ENABLED` to 1421 # `DISABLED`, and an operation spawned by the transfer is running, the status 1422 # change would not affect the current operation. 1423 "deletionTime": "A String", # This field cannot be changed by user requests. 1424 "description": "A String", # A description provided by the user for the job. Its max length is 1024 1425 # bytes when Unicode-encoded. 1426 "schedule": { # Transfers can be scheduled to recur or to run just once. # Schedule specification. 1427 # Required. 1428 "scheduleStartDate": { # Represents a whole calendar date, e.g. date of birth. The time of day and # The first day the recurring transfer is scheduled to run. If 1429 # `scheduleStartDate` is in the past, the transfer will run for the first 1430 # time on the following day. 1431 # Required. 1432 # time zone are either specified elsewhere or are not significant. The date 1433 # is relative to the Proleptic Gregorian Calendar. The day may be 0 to 1434 # represent a year and month where the day is not significant, e.g. credit card 1435 # expiration date. The year may be 0 to represent a month and day independent 1436 # of year, e.g. anniversary date. Related types are google.type.TimeOfDay 1437 # and `google.protobuf.Timestamp`. 1438 "year": 42, # Year of date. Must be from 1 to 9999, or 0 if specifying a date without 1439 # a year. 1440 "day": 42, # Day of month. Must be from 1 to 31 and valid for the year and month, or 0 1441 # if specifying a year/month where the day is not significant. 1442 "month": 42, # Month of year. Must be from 1 to 12. 1443 }, 1444 "startTimeOfDay": { # Represents a time of day. The date and time zone are either not significant # The time in UTC at which the transfer will be scheduled to start in a day. 1445 # Transfers may start later than this time. If not specified, recurring and 1446 # one-time transfers that are scheduled to run today will run immediately; 1447 # recurring transfers that are scheduled to run on a future date will start 1448 # at approximately midnight UTC on that date. Note that when configuring a 1449 # transfer with the Cloud Platform Console, the transfer's start time in a 1450 # day is specified in your local timezone. 1451 # or are specified elsewhere. An API may choose to allow leap seconds. Related 1452 # types are google.type.Date and `google.protobuf.Timestamp`. 1453 "hours": 42, # Hours of day in 24 hour format. Should be from 0 to 23. An API may choose 1454 # to allow the value "24:00:00" for scenarios like business closing time. 1455 "nanos": 42, # Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999. 1456 "seconds": 42, # Seconds of minutes of the time. Must normally be from 0 to 59. An API may 1457 # allow the value 60 if it allows leap-seconds. 1458 "minutes": 42, # Minutes of hour of day. Must be from 0 to 59. 1459 }, 1460 "scheduleEndDate": { # Represents a whole calendar date, e.g. date of birth. The time of day and # The last day the recurring transfer will be run. If `scheduleEndDate` 1461 # is the same as `scheduleStartDate`, the transfer will be executed only 1462 # once. 1463 # time zone are either specified elsewhere or are not significant. The date 1464 # is relative to the Proleptic Gregorian Calendar. The day may be 0 to 1465 # represent a year and month where the day is not significant, e.g. credit card 1466 # expiration date. The year may be 0 to represent a month and day independent 1467 # of year, e.g. anniversary date. Related types are google.type.TimeOfDay 1468 # and `google.protobuf.Timestamp`. 1469 "year": 42, # Year of date. Must be from 1 to 9999, or 0 if specifying a date without 1470 # a year. 1471 "day": 42, # Day of month. Must be from 1 to 31 and valid for the year and month, or 0 1472 # if specifying a year/month where the day is not significant. 1473 "month": 42, # Month of year. Must be from 1 to 12. 1474 }, 1475 }, 1476 "projectId": "A String", # The ID of the Google Cloud Platform Console project that owns the job. 1477 # Required. 1478 "lastModificationTime": "A String", # This field cannot be changed by user requests. 1479 "creationTime": "A String", # This field cannot be changed by user requests. 1480 "name": "A String", # A globally unique name assigned by Storage Transfer Service when the 1481 # job is created. This field should be left empty in requests to create a new 1482 # transfer job; otherwise, the requests result in an `INVALID_ARGUMENT` 1483 # error. 1484 }</pre> 1485 </div> 1486 1487 </body></html>