mirror of
https://github.com/ansible-collections/google.cloud.git
synced 2025-04-05 02:10:27 -07:00
2256 lines
86 KiB
Python
2256 lines
86 KiB
Python
#!/usr/bin/python
|
|
# -*- coding: utf-8 -*-
|
|
#
|
|
# Copyright (C) 2017 Google
|
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
|
# ----------------------------------------------------------------------------
|
|
#
|
|
# *** AUTO GENERATED CODE *** Type: MMv1 ***
|
|
#
|
|
# ----------------------------------------------------------------------------
|
|
#
|
|
# This file is automatically generated by Magic Modules and manual
|
|
# changes will be clobbered when the file is regenerated.
|
|
#
|
|
# Please read more about how to change this file at
|
|
# https://www.github.com/GoogleCloudPlatform/magic-modules
|
|
#
|
|
# ----------------------------------------------------------------------------
|
|
|
|
from __future__ import absolute_import, division, print_function
|
|
|
|
__metaclass__ = type
|
|
|
|
################################################################################
|
|
# Documentation
|
|
################################################################################
|
|
|
|
ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'}
|
|
|
|
DOCUMENTATION = '''
|
|
---
|
|
module: gcp_cloudbuild_trigger
|
|
description:
|
|
- Configuration for an automated build in response to source repository changes.
|
|
short_description: Creates a GCP Trigger
|
|
author: Google Inc. (@googlecloudplatform)
|
|
requirements:
|
|
- python >= 2.6
|
|
- requests >= 2.18.4
|
|
- google-auth >= 1.3.0
|
|
options:
|
|
state:
|
|
description:
|
|
- Whether the given object should exist in GCP
|
|
choices:
|
|
- present
|
|
- absent
|
|
default: present
|
|
type: str
|
|
id:
|
|
description:
|
|
- The unique identifier for the trigger.
|
|
required: false
|
|
type: str
|
|
name:
|
|
description:
|
|
- Name of the trigger. Must be unique within the project.
|
|
required: false
|
|
type: str
|
|
description:
|
|
description:
|
|
- Human-readable description of the trigger.
|
|
required: false
|
|
type: str
|
|
tags:
|
|
description:
|
|
- Tags for annotation of a BuildTrigger .
|
|
elements: str
|
|
required: false
|
|
type: list
|
|
disabled:
|
|
description:
|
|
- Whether the trigger is disabled or not. If true, the trigger will never result
|
|
in a build.
|
|
required: false
|
|
type: bool
|
|
substitutions:
|
|
description:
|
|
- Substitutions data for Build resource.
|
|
required: false
|
|
type: dict
|
|
filename:
|
|
description:
|
|
- Path, from the source root, to a file whose contents is used for the template.
|
|
Either a filename or build template must be provided.
|
|
required: false
|
|
type: str
|
|
ignored_files:
|
|
description:
|
|
- ignoredFiles and includedFiles are file glob matches using U(https://golang.org/pkg/path/filepath/#Match)
|
|
extended with support for `**`.
|
|
- If ignoredFiles and changed files are both empty, then they are not used to
|
|
determine whether or not to trigger a build.
|
|
- If ignoredFiles is not empty, then we ignore any files that match any of the
|
|
ignored_file globs. If the change has no files that are outside of the ignoredFiles
|
|
globs, then we do not trigger a build.
|
|
elements: str
|
|
required: false
|
|
type: list
|
|
included_files:
|
|
description:
|
|
- ignoredFiles and includedFiles are file glob matches using U(https://golang.org/pkg/path/filepath/#Match)
|
|
extended with support for `**`.
|
|
- If any of the files altered in the commit pass the ignoredFiles filter and includedFiles
|
|
is empty, then as far as this filter is concerned, we should trigger the build.
|
|
- If any of the files altered in the commit pass the ignoredFiles filter and includedFiles
|
|
is not empty, then we make sure that at least one of those files matches a includedFiles
|
|
glob. If not, then we do not trigger a build.
|
|
elements: str
|
|
required: false
|
|
type: list
|
|
trigger_template:
|
|
description:
|
|
- Template describing the types of source changes to trigger a build.
|
|
- Branch and tag names in trigger templates are interpreted as regular expressions.
|
|
Any branch or tag change that matches that regular expression will trigger a
|
|
build.
|
|
required: false
|
|
type: dict
|
|
suboptions:
|
|
project_id:
|
|
description:
|
|
- ID of the project that owns the Cloud Source Repository. If omitted, the
|
|
project ID requesting the build is assumed.
|
|
required: false
|
|
type: str
|
|
repo_name:
|
|
description:
|
|
- Name of the Cloud Source Repository. If omitted, the name "default" is assumed.
|
|
required: false
|
|
default: default
|
|
type: str
|
|
dir:
|
|
description:
|
|
- Directory, relative to the source root, in which to run the build.
|
|
- This must be a relative path. If a step's dir is specified and is an absolute
|
|
path, this value is ignored for that step's execution.
|
|
required: false
|
|
type: str
|
|
invert_regex:
|
|
description:
|
|
- Only trigger a build if the revision regex does NOT match the revision regex.
|
|
required: false
|
|
type: bool
|
|
branch_name:
|
|
description:
|
|
- Name of the branch to build. Exactly one a of branch name, tag, or commit
|
|
SHA must be provided.
|
|
- This field is a regular expression.
|
|
required: false
|
|
type: str
|
|
tag_name:
|
|
description:
|
|
- Name of the tag to build. Exactly one of a branch name, tag, or commit SHA
|
|
must be provided.
|
|
- This field is a regular expression.
|
|
required: false
|
|
type: str
|
|
commit_sha:
|
|
description:
|
|
- Explicit commit SHA to build. Exactly one of a branch name, tag, or commit
|
|
SHA must be provided.
|
|
required: false
|
|
type: str
|
|
github:
|
|
description:
|
|
- Describes the configuration of a trigger that creates a build whenever a GitHub
|
|
event is received.
|
|
required: false
|
|
type: dict
|
|
suboptions:
|
|
owner:
|
|
description:
|
|
- 'Owner of the repository. For example: The owner for U(https://github.com/googlecloudplatform/cloud-builders)
|
|
is "googlecloudplatform".'
|
|
required: false
|
|
type: str
|
|
name:
|
|
description:
|
|
- 'Name of the repository. For example: The name for U(https://github.com/googlecloudplatform/cloud-builders)
|
|
is "cloud-builders".'
|
|
required: false
|
|
type: str
|
|
pull_request:
|
|
description:
|
|
- filter to match changes in pull requests. Specify only one of pullRequest
|
|
or push.
|
|
required: false
|
|
type: dict
|
|
suboptions:
|
|
branch:
|
|
description:
|
|
- Regex of branches to match.
|
|
required: true
|
|
type: str
|
|
comment_control:
|
|
description:
|
|
- Whether to block builds on a "/gcbrun" comment from a repository owner
|
|
or collaborator.
|
|
- 'Some valid choices include: "COMMENTS_DISABLED", "COMMENTS_ENABLED",
|
|
"COMMENTS_ENABLED_FOR_EXTERNAL_CONTRIBUTORS_ONLY"'
|
|
required: false
|
|
type: str
|
|
invert_regex:
|
|
description:
|
|
- If true, branches that do NOT match the git_ref will trigger a build.
|
|
required: false
|
|
type: bool
|
|
push:
|
|
description:
|
|
- filter to match changes in refs, like branches or tags. Specify only one
|
|
of pullRequest or push.
|
|
required: false
|
|
type: dict
|
|
suboptions:
|
|
invert_regex:
|
|
description:
|
|
- When true, only trigger a build if the revision regex does NOT match
|
|
the git_ref regex.
|
|
required: false
|
|
type: bool
|
|
branch:
|
|
description:
|
|
- Regex of branches to match. Specify only one of branch or tag.
|
|
required: false
|
|
type: str
|
|
tag:
|
|
description:
|
|
- Regex of tags to match. Specify only one of branch or tag.
|
|
required: false
|
|
type: str
|
|
pubsub_config:
|
|
description:
|
|
- PubsubConfig describes the configuration of a trigger that creates a build whenever
|
|
a Pub/Sub message is published.
|
|
required: false
|
|
type: dict
|
|
suboptions:
|
|
topic:
|
|
description:
|
|
- The name of the topic from which this subscription is receiving messages.
|
|
required: true
|
|
type: str
|
|
service_account_email:
|
|
description:
|
|
- Service account that will make the push request.
|
|
required: false
|
|
type: str
|
|
webhook_config:
|
|
description:
|
|
- WebhookConfig describes the configuration of a trigger that creates a build
|
|
whenever a webhook is sent to a trigger's webhook URL.
|
|
required: false
|
|
type: dict
|
|
suboptions:
|
|
secret:
|
|
description:
|
|
- Resource name for the secret required as a URL parameter.
|
|
required: true
|
|
type: str
|
|
build:
|
|
description:
|
|
- Contents of the build template. Either a filename or build template must be
|
|
provided.
|
|
required: false
|
|
type: dict
|
|
suboptions:
|
|
source:
|
|
description:
|
|
- The location of the source files to build.
|
|
required: false
|
|
type: dict
|
|
suboptions:
|
|
storage_source:
|
|
description:
|
|
- Location of the source in an archive file in Google Cloud Storage.
|
|
required: false
|
|
type: dict
|
|
suboptions:
|
|
bucket:
|
|
description:
|
|
- Google Cloud Storage bucket containing the source.
|
|
required: true
|
|
type: str
|
|
object:
|
|
description:
|
|
- Google Cloud Storage object containing the source.
|
|
- This object must be a gzipped archive file (.tar.gz) containing
|
|
source to build.
|
|
required: true
|
|
type: str
|
|
generation:
|
|
description:
|
|
- Google Cloud Storage generation for the object. If the generation
|
|
is omitted, the latest generation will be used .
|
|
required: false
|
|
type: str
|
|
repo_source:
|
|
description:
|
|
- Location of the source in a Google Cloud Source Repository.
|
|
required: false
|
|
type: dict
|
|
suboptions:
|
|
project_id:
|
|
description:
|
|
- ID of the project that owns the Cloud Source Repository. If omitted,
|
|
the project ID requesting the build is assumed.
|
|
required: false
|
|
type: str
|
|
repo_name:
|
|
description:
|
|
- Name of the Cloud Source Repository.
|
|
required: true
|
|
type: str
|
|
dir:
|
|
description:
|
|
- Directory, relative to the source root, in which to run the build.
|
|
- This must be a relative path. If a step's dir is specified and is
|
|
an absolute path, this value is ignored for that step's execution.
|
|
required: false
|
|
type: str
|
|
invert_regex:
|
|
description:
|
|
- Only trigger a build if the revision regex does NOT match the revision
|
|
regex.
|
|
required: false
|
|
type: bool
|
|
substitutions:
|
|
description:
|
|
- Substitutions to use in a triggered build. Should only be used with
|
|
triggers.run .
|
|
required: false
|
|
type: dict
|
|
branch_name:
|
|
description:
|
|
- Regex matching branches to build. Exactly one a of branch name,
|
|
tag, or commit SHA must be provided.
|
|
- The syntax of the regular expressions accepted is the syntax accepted
|
|
by RE2 and described at U(https://github.com/google/re2/wiki/Syntax)
|
|
.
|
|
required: false
|
|
type: str
|
|
tag_name:
|
|
description:
|
|
- Regex matching tags to build. Exactly one a of branch name, tag,
|
|
or commit SHA must be provided.
|
|
- The syntax of the regular expressions accepted is the syntax accepted
|
|
by RE2 and described at U(https://github.com/google/re2/wiki/Syntax)
|
|
.
|
|
required: false
|
|
type: str
|
|
commit_sha:
|
|
description:
|
|
- Explicit commit SHA to build. Exactly one a of branch name, tag,
|
|
or commit SHA must be provided.
|
|
required: false
|
|
type: str
|
|
tags:
|
|
description:
|
|
- Tags for annotation of a Build. These are not docker tags.
|
|
elements: str
|
|
required: false
|
|
type: list
|
|
images:
|
|
description:
|
|
- A list of images to be pushed upon the successful completion of all build
|
|
steps.
|
|
- The images are pushed using the builder service account's credentials.
|
|
- The digests of the pushed images will be stored in the Build resource's
|
|
results field.
|
|
- If any of the images fail to be pushed, the build status is marked FAILURE.
|
|
elements: str
|
|
required: false
|
|
type: list
|
|
substitutions:
|
|
description:
|
|
- Substitutions data for Build resource.
|
|
required: false
|
|
type: dict
|
|
queue_ttl:
|
|
description:
|
|
- TTL in queue for this build. If provided and the build is enqueued longer
|
|
than this value, the build will expire and the build status will be EXPIRED.
|
|
- The TTL starts ticking from createTime.
|
|
- 'A duration in seconds with up to nine fractional digits, terminated by
|
|
''s''. Example: "3.5s".'
|
|
required: false
|
|
type: str
|
|
logs_bucket:
|
|
description:
|
|
- Google Cloud Storage bucket where logs should be written. Logs file names
|
|
will be of the format ${logsBucket}/log-${build_id}.txt.
|
|
required: false
|
|
type: str
|
|
timeout:
|
|
description:
|
|
- Amount of time that this build should be allowed to run, to second granularity.
|
|
- If this amount of time elapses, work on the build will cease and the build
|
|
status will be TIMEOUT.
|
|
- This timeout must be equal to or greater than the sum of the timeouts for
|
|
build steps within the build.
|
|
- The expected format is the number of seconds followed by s.
|
|
- Default time is ten minutes (600s).
|
|
required: false
|
|
default: 600s
|
|
type: str
|
|
secrets:
|
|
description:
|
|
- Secrets to decrypt using Cloud Key Management Service.
|
|
elements: dict
|
|
required: false
|
|
type: list
|
|
suboptions:
|
|
kms_key_name:
|
|
description:
|
|
- Cloud KMS key name to use to decrypt these envs.
|
|
required: true
|
|
type: str
|
|
secret_env:
|
|
description:
|
|
- Map of environment variable name to its encrypted value.
|
|
- Secret environment variables must be unique across all of a build's
|
|
secrets, and must be used by at least one build step. Values can be
|
|
at most 64 KB in size. There can be at most 100 secret values across
|
|
all of a build's secrets.
|
|
required: false
|
|
type: dict
|
|
steps:
|
|
description:
|
|
- The operations to be performed on the workspace.
|
|
elements: dict
|
|
required: true
|
|
type: list
|
|
suboptions:
|
|
name:
|
|
description:
|
|
- The name of the container image that will run this particular build
|
|
step.
|
|
- If the image is available in the host's Docker daemon's cache, it will
|
|
be run directly. If not, the host will attempt to pull the image first,
|
|
using the builder service account's credentials if necessary.
|
|
- The Docker daemon's cache will already have the latest versions of all
|
|
of the officially supported build steps (see U(https://github.com/GoogleCloudPlatform/cloud-builders)
|
|
for images and examples).
|
|
- The Docker daemon will also have cached many of the layers for some
|
|
popular images, like "ubuntu", "debian", but they will be refreshed
|
|
at the time you attempt to use them.
|
|
- If you built an image in a previous build step, it will be stored in
|
|
the host's Docker daemon's cache and is available to use as the name
|
|
for a later build step.
|
|
required: true
|
|
type: str
|
|
args:
|
|
description:
|
|
- A list of arguments that will be presented to the step when it is started.
|
|
- If the image used to run the step's container has an entrypoint, the
|
|
args are used as arguments to that entrypoint. If the image does not
|
|
define an entrypoint, the first element in args is used as the entrypoint,
|
|
and the remainder will be used as arguments.
|
|
elements: str
|
|
required: false
|
|
type: list
|
|
env:
|
|
description:
|
|
- A list of environment variable definitions to be used when running a
|
|
step.
|
|
- The elements are of the form "KEY=VALUE" for the environment variable
|
|
"KEY" being given the value "VALUE".
|
|
elements: str
|
|
required: false
|
|
type: list
|
|
id:
|
|
description:
|
|
- Unique identifier for this build step, used in `wait_for` to reference
|
|
this build step as a dependency.
|
|
required: false
|
|
type: str
|
|
entrypoint:
|
|
description:
|
|
- Entrypoint to be used instead of the build step image's default entrypoint.
|
|
- If unset, the image's default entrypoint is used .
|
|
required: false
|
|
type: str
|
|
dir:
|
|
description:
|
|
- Working directory to use when running this step's container.
|
|
- If this value is a relative path, it is relative to the build's working
|
|
directory. If this value is absolute, it may be outside the build's
|
|
working directory, in which case the contents of the path may not be
|
|
persisted across build step executions, unless a `volume` for that path
|
|
is specified.
|
|
- If the build specifies a `RepoSource` with `dir` and a step with a `dir`,
|
|
which specifies an absolute path, the `RepoSource` `dir` is ignored
|
|
for the step's execution.
|
|
required: false
|
|
type: str
|
|
secret_env:
|
|
description:
|
|
- A list of environment variables which are encrypted using a Cloud Key
|
|
Management Service crypto key. These values must be specified in the
|
|
build's `Secret`.
|
|
elements: str
|
|
required: false
|
|
type: list
|
|
timeout:
|
|
description:
|
|
- Time limit for executing this build step. If not defined, the step has
|
|
no time limit and will be allowed to continue to run until either it
|
|
completes or the build itself times out.
|
|
required: false
|
|
type: str
|
|
timing:
|
|
description:
|
|
- Output only. Stores timing information for executing this build step.
|
|
required: false
|
|
type: str
|
|
volumes:
|
|
description:
|
|
- List of volumes to mount into the build step.
|
|
- Each volume is created as an empty volume prior to execution of the
|
|
build step. Upon completion of the build, volumes and their contents
|
|
are discarded.
|
|
- Using a named volume in only one step is not valid as it is indicative
|
|
of a build request with an incorrect configuration.
|
|
elements: dict
|
|
required: false
|
|
type: list
|
|
suboptions:
|
|
name:
|
|
description:
|
|
- Name of the volume to mount.
|
|
- Volume names must be unique per build step and must be valid names
|
|
for Docker volumes. Each named volume must be used by at least two
|
|
build steps.
|
|
required: true
|
|
type: str
|
|
path:
|
|
description:
|
|
- Path at which to mount the volume.
|
|
- Paths must be absolute and cannot conflict with other volume paths
|
|
on the same build step or with certain reserved volume paths.
|
|
required: true
|
|
type: str
|
|
wait_for:
|
|
description:
|
|
- The ID(s) of the step(s) that this build step depends on.
|
|
- This build step will not start until all the build steps in `wait_for`
|
|
have completed successfully. If `wait_for` is empty, this build step
|
|
will start when all previous build steps in the `Build.Steps` list have
|
|
completed successfully.
|
|
elements: str
|
|
required: false
|
|
type: list
|
|
artifacts:
|
|
description:
|
|
- Artifacts produced by the build that should be uploaded upon successful
|
|
completion of all build steps.
|
|
required: false
|
|
type: dict
|
|
suboptions:
|
|
images:
|
|
description:
|
|
- A list of images to be pushed upon the successful completion of all
|
|
build steps.
|
|
- The images will be pushed using the builder service account's credentials.
|
|
- The digests of the pushed images will be stored in the Build resource's
|
|
results field.
|
|
- If any of the images fail to be pushed, the build is marked FAILURE.
|
|
elements: str
|
|
required: false
|
|
type: list
|
|
objects:
|
|
description:
|
|
- A list of objects to be uploaded to Cloud Storage upon successful completion
|
|
of all build steps.
|
|
- Files in the workspace matching specified paths globs will be uploaded
|
|
to the Cloud Storage location using the builder service account's credentials.
|
|
- The location and generation of the uploaded objects will be stored in
|
|
the Build resource's results field.
|
|
- If any objects fail to be pushed, the build is marked FAILURE.
|
|
required: false
|
|
type: dict
|
|
suboptions:
|
|
location:
|
|
description:
|
|
- Cloud Storage bucket and optional object path, in the form "gs://bucket/path/to/somewhere/".
|
|
- Files in the workspace matching any path pattern will be uploaded
|
|
to Cloud Storage with this location as a prefix.
|
|
required: false
|
|
type: str
|
|
paths:
|
|
description:
|
|
- Path globs used to match files in the build's workspace.
|
|
elements: str
|
|
required: false
|
|
type: list
|
|
options:
|
|
description:
|
|
- Special options for this build.
|
|
required: false
|
|
type: dict
|
|
suboptions:
|
|
source_provenance_hash:
|
|
description:
|
|
- Requested hash for SourceProvenance.
|
|
elements: str
|
|
required: false
|
|
type: list
|
|
requested_verify_option:
|
|
description:
|
|
- Requested verifiability options.
|
|
- 'Some valid choices include: "NOT_VERIFIED", "VERIFIED"'
|
|
required: false
|
|
type: str
|
|
machine_type:
|
|
description:
|
|
- Compute Engine machine type on which to run the build.
|
|
- 'Some valid choices include: "UNSPECIFIED", "N1_HIGHCPU_8", "N1_HIGHCPU_32",
|
|
"E2_HIGHCPU_8", "E2_HIGHCPU_32"'
|
|
required: false
|
|
type: str
|
|
disk_size_gb:
|
|
description:
|
|
- Requested disk size for the VM that runs the build. Note that this is
|
|
NOT "disk free"; some of the space will be used by the operating system
|
|
and build utilities.
|
|
- Also note that this is the minimum disk size that will be allocated
|
|
for the build -- the build may run with a larger disk than requested.
|
|
At present, the maximum disk size is 1000GB; builds that request more
|
|
than the maximum are rejected with an error.
|
|
required: false
|
|
type: int
|
|
substitution_option:
|
|
description:
|
|
- Option to specify behavior when there is an error in the substitution
|
|
checks.
|
|
- NOTE this is always set to ALLOW_LOOSE for triggered builds and cannot
|
|
be overridden in the build configuration file.
|
|
- 'Some valid choices include: "MUST_MATCH", "ALLOW_LOOSE"'
|
|
required: false
|
|
type: str
|
|
dynamic_substitutions:
|
|
description:
|
|
- Option to specify whether or not to apply bash style string operations
|
|
to the substitutions.
|
|
- NOTE this is always enabled for triggered builds and cannot be overridden
|
|
in the build configuration file.
|
|
required: false
|
|
type: bool
|
|
log_streaming_option:
|
|
description:
|
|
- Option to define build log streaming behavior to Google Cloud Storage.
|
|
- 'Some valid choices include: "STREAM_DEFAULT", "STREAM_ON", "STREAM_OFF"'
|
|
required: false
|
|
type: str
|
|
worker_pool:
|
|
description:
|
|
- Option to specify a WorkerPool for the build. Format projects/{project}/workerPools/{workerPool}
|
|
This field is experimental.
|
|
required: false
|
|
type: str
|
|
logging:
|
|
description:
|
|
- Option to specify the logging mode, which determines if and where build
|
|
logs are stored.
|
|
- 'Some valid choices include: "LOGGING_UNSPECIFIED", "LEGACY", "GCS_ONLY",
|
|
"STACKDRIVER_ONLY", "NONE"'
|
|
required: false
|
|
type: str
|
|
env:
|
|
description:
|
|
- A list of global environment variable definitions that will exist for
|
|
all build steps in this build. If a variable is defined in both globally
|
|
and in a build step, the variable will use the build step value.
|
|
- The elements are of the form "KEY=VALUE" for the environment variable
|
|
"KEY" being given the value "VALUE".
|
|
elements: str
|
|
required: false
|
|
type: list
|
|
secret_env:
|
|
description:
|
|
- A list of global environment variables, which are encrypted using a
|
|
Cloud Key Management Service crypto key. These values must be specified
|
|
in the build's Secret. These variables will be available to all build
|
|
steps in this build.
|
|
elements: str
|
|
required: false
|
|
type: list
|
|
volumes:
|
|
description:
|
|
- Global list of volumes to mount for ALL build steps Each volume is created
|
|
as an empty volume prior to starting the build process.
|
|
- Upon completion of the build, volumes and their contents are discarded.
|
|
Global volume names and paths cannot conflict with the volumes defined
|
|
a build step.
|
|
- Using a global volume in a build with only one step is not valid as
|
|
it is indicative of a build request with an incorrect configuration.
|
|
elements: dict
|
|
required: false
|
|
type: list
|
|
suboptions:
|
|
name:
|
|
description:
|
|
- Name of the volume to mount.
|
|
- Volume names must be unique per build step and must be valid names
|
|
for Docker volumes.
|
|
- Each named volume must be used by at least two build steps.
|
|
required: false
|
|
type: str
|
|
path:
|
|
description:
|
|
- Path at which to mount the volume.
|
|
- Paths must be absolute and cannot conflict with other volume paths
|
|
on the same build step or with certain reserved volume paths.
|
|
required: false
|
|
type: str
|
|
project:
|
|
description:
|
|
- The Google Cloud Platform project to use.
|
|
type: str
|
|
auth_kind:
|
|
description:
|
|
- The type of credential used.
|
|
type: str
|
|
required: true
|
|
choices:
|
|
- application
|
|
- machineaccount
|
|
- serviceaccount
|
|
- accesstoken
|
|
service_account_contents:
|
|
description:
|
|
- The contents of a Service Account JSON file, either in a dictionary or as a
|
|
JSON string that represents it.
|
|
type: jsonarg
|
|
service_account_file:
|
|
description:
|
|
- The path of a Service Account JSON file if serviceaccount is selected as type.
|
|
type: path
|
|
service_account_email:
|
|
description:
|
|
- An optional service account email address if machineaccount is selected and
|
|
the user does not wish to use the default email.
|
|
type: str
|
|
access_token:
|
|
description:
|
|
- An OAuth2 access token if credential type is accesstoken.
|
|
type: str
|
|
scopes:
|
|
description:
|
|
- Array of scopes to be used
|
|
type: list
|
|
elements: str
|
|
env_type:
|
|
description:
|
|
- Specifies which Ansible environment you're running this module within.
|
|
- This should not be set unless you know what you're doing.
|
|
- This only alters the User Agent string for any API requests.
|
|
type: str
|
|
notes:
|
|
- 'API Reference: U(https://cloud.google.com/cloud-build/docs/api/reference/rest/v1/projects.triggers)'
|
|
- 'Automating builds using build triggers: U(https://cloud.google.com/cloud-build/docs/running-builds/automate-builds)'
|
|
- for authentication, you can set service_account_file using the C(GCP_SERVICE_ACCOUNT_FILE)
|
|
env variable.
|
|
- for authentication, you can set service_account_contents using the C(GCP_SERVICE_ACCOUNT_CONTENTS)
|
|
env variable.
|
|
- For authentication, you can set service_account_email using the C(GCP_SERVICE_ACCOUNT_EMAIL)
|
|
env variable.
|
|
- For authentication, you can set access_token using the C(GCP_ACCESS_TOKEN)
|
|
env variable.
|
|
- For authentication, you can set auth_kind using the C(GCP_AUTH_KIND) env variable.
|
|
- For authentication, you can set scopes using the C(GCP_SCOPES) env variable.
|
|
- Environment variables values will only be used if the playbook values are not set.
|
|
- The I(service_account_email) and I(service_account_file) options are mutually exclusive.
|
|
- The id for this resource is created by the API after you create the resource the
|
|
first time. If you want to manage this resource after creation, you'll have to copy
|
|
the generated id into the playbook. If you do not, new triggers will be created
|
|
on subsequent runs.
|
|
'''
|
|
|
|
EXAMPLES = '''
|
|
- name: create a repository
|
|
google.cloud.gcp_sourcerepo_repository:
|
|
name: projects/{{ gcp_project }}/repos/{{ resource_name }}
|
|
project: "{{ gcp_project }}"
|
|
auth_kind: "{{ gcp_cred_kind }}"
|
|
service_account_file: "{{ gcp_cred_file }}"
|
|
state: present
|
|
|
|
- name: create a trigger
|
|
google.cloud.gcp_cloudbuild_trigger:
|
|
trigger_template:
|
|
branch_name: master
|
|
project_id: test_project
|
|
repo_name: test_object
|
|
filename: cloudbuild.yaml
|
|
project: test_project
|
|
auth_kind: serviceaccount
|
|
service_account_file: "/tmp/auth.pem"
|
|
state: present
|
|
'''
|
|
|
|
RETURN = '''
|
|
id:
|
|
description:
|
|
- The unique identifier for the trigger.
|
|
returned: success
|
|
type: str
|
|
name:
|
|
description:
|
|
- Name of the trigger. Must be unique within the project.
|
|
returned: success
|
|
type: str
|
|
description:
|
|
description:
|
|
- Human-readable description of the trigger.
|
|
returned: success
|
|
type: str
|
|
tags:
|
|
description:
|
|
- Tags for annotation of a BuildTrigger .
|
|
returned: success
|
|
type: list
|
|
disabled:
|
|
description:
|
|
- Whether the trigger is disabled or not. If true, the trigger will never result
|
|
in a build.
|
|
returned: success
|
|
type: bool
|
|
createTime:
|
|
description:
|
|
- Time when the trigger was created.
|
|
returned: success
|
|
type: str
|
|
substitutions:
|
|
description:
|
|
- Substitutions data for Build resource.
|
|
returned: success
|
|
type: dict
|
|
filename:
|
|
description:
|
|
- Path, from the source root, to a file whose contents is used for the template.
|
|
Either a filename or build template must be provided.
|
|
returned: success
|
|
type: str
|
|
ignoredFiles:
|
|
description:
|
|
- ignoredFiles and includedFiles are file glob matches using U(https://golang.org/pkg/path/filepath/#Match)
|
|
extended with support for `**`.
|
|
- If ignoredFiles and changed files are both empty, then they are not used to determine
|
|
whether or not to trigger a build.
|
|
- If ignoredFiles is not empty, then we ignore any files that match any of the ignored_file
|
|
globs. If the change has no files that are outside of the ignoredFiles globs,
|
|
then we do not trigger a build.
|
|
returned: success
|
|
type: list
|
|
includedFiles:
|
|
description:
|
|
- ignoredFiles and includedFiles are file glob matches using U(https://golang.org/pkg/path/filepath/#Match)
|
|
extended with support for `**`.
|
|
- If any of the files altered in the commit pass the ignoredFiles filter and includedFiles
|
|
is empty, then as far as this filter is concerned, we should trigger the build.
|
|
- If any of the files altered in the commit pass the ignoredFiles filter and includedFiles
|
|
is not empty, then we make sure that at least one of those files matches a includedFiles
|
|
glob. If not, then we do not trigger a build.
|
|
returned: success
|
|
type: list
|
|
triggerTemplate:
|
|
description:
|
|
- Template describing the types of source changes to trigger a build.
|
|
- Branch and tag names in trigger templates are interpreted as regular expressions.
|
|
Any branch or tag change that matches that regular expression will trigger a build.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
projectId:
|
|
description:
|
|
- ID of the project that owns the Cloud Source Repository. If omitted, the project
|
|
ID requesting the build is assumed.
|
|
returned: success
|
|
type: str
|
|
repoName:
|
|
description:
|
|
- Name of the Cloud Source Repository. If omitted, the name "default" is assumed.
|
|
returned: success
|
|
type: str
|
|
dir:
|
|
description:
|
|
- Directory, relative to the source root, in which to run the build.
|
|
- This must be a relative path. If a step's dir is specified and is an absolute
|
|
path, this value is ignored for that step's execution.
|
|
returned: success
|
|
type: str
|
|
invertRegex:
|
|
description:
|
|
- Only trigger a build if the revision regex does NOT match the revision regex.
|
|
returned: success
|
|
type: bool
|
|
branchName:
|
|
description:
|
|
- Name of the branch to build. Exactly one a of branch name, tag, or commit
|
|
SHA must be provided.
|
|
- This field is a regular expression.
|
|
returned: success
|
|
type: str
|
|
tagName:
|
|
description:
|
|
- Name of the tag to build. Exactly one of a branch name, tag, or commit SHA
|
|
must be provided.
|
|
- This field is a regular expression.
|
|
returned: success
|
|
type: str
|
|
commitSha:
|
|
description:
|
|
- Explicit commit SHA to build. Exactly one of a branch name, tag, or commit
|
|
SHA must be provided.
|
|
returned: success
|
|
type: str
|
|
github:
|
|
description:
|
|
- Describes the configuration of a trigger that creates a build whenever a GitHub
|
|
event is received.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
owner:
|
|
description:
|
|
- 'Owner of the repository. For example: The owner for U(https://github.com/googlecloudplatform/cloud-builders)
|
|
is "googlecloudplatform".'
|
|
returned: success
|
|
type: str
|
|
name:
|
|
description:
|
|
- 'Name of the repository. For example: The name for U(https://github.com/googlecloudplatform/cloud-builders)
|
|
is "cloud-builders".'
|
|
returned: success
|
|
type: str
|
|
pullRequest:
|
|
description:
|
|
- filter to match changes in pull requests. Specify only one of pullRequest
|
|
or push.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
branch:
|
|
description:
|
|
- Regex of branches to match.
|
|
returned: success
|
|
type: str
|
|
commentControl:
|
|
description:
|
|
- Whether to block builds on a "/gcbrun" comment from a repository owner
|
|
or collaborator.
|
|
returned: success
|
|
type: str
|
|
invertRegex:
|
|
description:
|
|
- If true, branches that do NOT match the git_ref will trigger a build.
|
|
returned: success
|
|
type: bool
|
|
push:
|
|
description:
|
|
- filter to match changes in refs, like branches or tags. Specify only one of
|
|
pullRequest or push.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
invertRegex:
|
|
description:
|
|
- When true, only trigger a build if the revision regex does NOT match the
|
|
git_ref regex.
|
|
returned: success
|
|
type: bool
|
|
branch:
|
|
description:
|
|
- Regex of branches to match. Specify only one of branch or tag.
|
|
returned: success
|
|
type: str
|
|
tag:
|
|
description:
|
|
- Regex of tags to match. Specify only one of branch or tag.
|
|
returned: success
|
|
type: str
|
|
pubsubConfig:
|
|
description:
|
|
- PubsubConfig describes the configuration of a trigger that creates a build whenever
|
|
a Pub/Sub message is published.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
subscription:
|
|
description:
|
|
- Output only. Name of the subscription.
|
|
returned: success
|
|
type: str
|
|
topic:
|
|
description:
|
|
- The name of the topic from which this subscription is receiving messages.
|
|
returned: success
|
|
type: str
|
|
service_account_email:
|
|
description:
|
|
- Service account that will make the push request.
|
|
returned: success
|
|
type: str
|
|
state:
|
|
description:
|
|
- Potential issues with the underlying Pub/Sub subscription configuration.
|
|
- Only populated on get requests.
|
|
returned: success
|
|
type: str
|
|
webhookConfig:
|
|
description:
|
|
- WebhookConfig describes the configuration of a trigger that creates a build whenever
|
|
a webhook is sent to a trigger's webhook URL.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
secret:
|
|
description:
|
|
- Resource name for the secret required as a URL parameter.
|
|
returned: success
|
|
type: str
|
|
state:
|
|
description:
|
|
- Potential issues with the underlying Pub/Sub subscription configuration.
|
|
- Only populated on get requests.
|
|
returned: success
|
|
type: str
|
|
build:
|
|
description:
|
|
- Contents of the build template. Either a filename or build template must be provided.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
source:
|
|
description:
|
|
- The location of the source files to build.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
storageSource:
|
|
description:
|
|
- Location of the source in an archive file in Google Cloud Storage.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
bucket:
|
|
description:
|
|
- Google Cloud Storage bucket containing the source.
|
|
returned: success
|
|
type: str
|
|
object:
|
|
description:
|
|
- Google Cloud Storage object containing the source.
|
|
- This object must be a gzipped archive file (.tar.gz) containing source
|
|
to build.
|
|
returned: success
|
|
type: str
|
|
generation:
|
|
description:
|
|
- Google Cloud Storage generation for the object. If the generation
|
|
is omitted, the latest generation will be used .
|
|
returned: success
|
|
type: str
|
|
repoSource:
|
|
description:
|
|
- Location of the source in a Google Cloud Source Repository.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
projectId:
|
|
description:
|
|
- ID of the project that owns the Cloud Source Repository. If omitted,
|
|
the project ID requesting the build is assumed.
|
|
returned: success
|
|
type: str
|
|
repoName:
|
|
description:
|
|
- Name of the Cloud Source Repository.
|
|
returned: success
|
|
type: str
|
|
dir:
|
|
description:
|
|
- Directory, relative to the source root, in which to run the build.
|
|
- This must be a relative path. If a step's dir is specified and is
|
|
an absolute path, this value is ignored for that step's execution.
|
|
returned: success
|
|
type: str
|
|
invertRegex:
|
|
description:
|
|
- Only trigger a build if the revision regex does NOT match the revision
|
|
regex.
|
|
returned: success
|
|
type: bool
|
|
substitutions:
|
|
description:
|
|
- Substitutions to use in a triggered build. Should only be used with
|
|
triggers.run .
|
|
returned: success
|
|
type: dict
|
|
branchName:
|
|
description:
|
|
- Regex matching branches to build. Exactly one a of branch name, tag,
|
|
or commit SHA must be provided.
|
|
- The syntax of the regular expressions accepted is the syntax accepted
|
|
by RE2 and described at U(https://github.com/google/re2/wiki/Syntax)
|
|
.
|
|
returned: success
|
|
type: str
|
|
tagName:
|
|
description:
|
|
- Regex matching tags to build. Exactly one a of branch name, tag, or
|
|
commit SHA must be provided.
|
|
- The syntax of the regular expressions accepted is the syntax accepted
|
|
by RE2 and described at U(https://github.com/google/re2/wiki/Syntax)
|
|
.
|
|
returned: success
|
|
type: str
|
|
commitSha:
|
|
description:
|
|
- Explicit commit SHA to build. Exactly one a of branch name, tag, or
|
|
commit SHA must be provided.
|
|
returned: success
|
|
type: str
|
|
tags:
|
|
description:
|
|
- Tags for annotation of a Build. These are not docker tags.
|
|
returned: success
|
|
type: list
|
|
images:
|
|
description:
|
|
- A list of images to be pushed upon the successful completion of all build
|
|
steps.
|
|
- The images are pushed using the builder service account's credentials.
|
|
- The digests of the pushed images will be stored in the Build resource's results
|
|
field.
|
|
- If any of the images fail to be pushed, the build status is marked FAILURE.
|
|
returned: success
|
|
type: list
|
|
substitutions:
|
|
description:
|
|
- Substitutions data for Build resource.
|
|
returned: success
|
|
type: dict
|
|
queueTtl:
|
|
description:
|
|
- TTL in queue for this build. If provided and the build is enqueued longer
|
|
than this value, the build will expire and the build status will be EXPIRED.
|
|
- The TTL starts ticking from createTime.
|
|
- 'A duration in seconds with up to nine fractional digits, terminated by ''s''.
|
|
Example: "3.5s".'
|
|
returned: success
|
|
type: str
|
|
logsBucket:
|
|
description:
|
|
- Google Cloud Storage bucket where logs should be written. Logs file names
|
|
will be of the format ${logsBucket}/log-${build_id}.txt.
|
|
returned: success
|
|
type: str
|
|
timeout:
|
|
description:
|
|
- Amount of time that this build should be allowed to run, to second granularity.
|
|
- If this amount of time elapses, work on the build will cease and the build
|
|
status will be TIMEOUT.
|
|
- This timeout must be equal to or greater than the sum of the timeouts for
|
|
build steps within the build.
|
|
- The expected format is the number of seconds followed by s.
|
|
- Default time is ten minutes (600s).
|
|
returned: success
|
|
type: str
|
|
secrets:
|
|
description:
|
|
- Secrets to decrypt using Cloud Key Management Service.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
kmsKeyName:
|
|
description:
|
|
- Cloud KMS key name to use to decrypt these envs.
|
|
returned: success
|
|
type: str
|
|
secretEnv:
|
|
description:
|
|
- Map of environment variable name to its encrypted value.
|
|
- Secret environment variables must be unique across all of a build's secrets,
|
|
and must be used by at least one build step. Values can be at most 64
|
|
KB in size. There can be at most 100 secret values across all of a build's
|
|
secrets.
|
|
returned: success
|
|
type: dict
|
|
steps:
|
|
description:
|
|
- The operations to be performed on the workspace.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
name:
|
|
description:
|
|
- The name of the container image that will run this particular build step.
|
|
- If the image is available in the host's Docker daemon's cache, it will
|
|
be run directly. If not, the host will attempt to pull the image first,
|
|
using the builder service account's credentials if necessary.
|
|
- The Docker daemon's cache will already have the latest versions of all
|
|
of the officially supported build steps (see U(https://github.com/GoogleCloudPlatform/cloud-builders)
|
|
for images and examples).
|
|
- The Docker daemon will also have cached many of the layers for some popular
|
|
images, like "ubuntu", "debian", but they will be refreshed at the time
|
|
you attempt to use them.
|
|
- If you built an image in a previous build step, it will be stored in the
|
|
host's Docker daemon's cache and is available to use as the name for a
|
|
later build step.
|
|
returned: success
|
|
type: str
|
|
args:
|
|
description:
|
|
- A list of arguments that will be presented to the step when it is started.
|
|
- If the image used to run the step's container has an entrypoint, the args
|
|
are used as arguments to that entrypoint. If the image does not define
|
|
an entrypoint, the first element in args is used as the entrypoint, and
|
|
the remainder will be used as arguments.
|
|
returned: success
|
|
type: list
|
|
env:
|
|
description:
|
|
- A list of environment variable definitions to be used when running a step.
|
|
- The elements are of the form "KEY=VALUE" for the environment variable
|
|
"KEY" being given the value "VALUE".
|
|
returned: success
|
|
type: list
|
|
id:
|
|
description:
|
|
- Unique identifier for this build step, used in `wait_for` to reference
|
|
this build step as a dependency.
|
|
returned: success
|
|
type: str
|
|
entrypoint:
|
|
description:
|
|
- Entrypoint to be used instead of the build step image's default entrypoint.
|
|
- If unset, the image's default entrypoint is used .
|
|
returned: success
|
|
type: str
|
|
dir:
|
|
description:
|
|
- Working directory to use when running this step's container.
|
|
- If this value is a relative path, it is relative to the build's working
|
|
directory. If this value is absolute, it may be outside the build's working
|
|
directory, in which case the contents of the path may not be persisted
|
|
across build step executions, unless a `volume` for that path is specified.
|
|
- If the build specifies a `RepoSource` with `dir` and a step with a `dir`,
|
|
which specifies an absolute path, the `RepoSource` `dir` is ignored for
|
|
the step's execution.
|
|
returned: success
|
|
type: str
|
|
secretEnv:
|
|
description:
|
|
- A list of environment variables which are encrypted using a Cloud Key
|
|
Management Service crypto key. These values must be specified in the build's
|
|
`Secret`.
|
|
returned: success
|
|
type: list
|
|
timeout:
|
|
description:
|
|
- Time limit for executing this build step. If not defined, the step has
|
|
no time limit and will be allowed to continue to run until either it completes
|
|
or the build itself times out.
|
|
returned: success
|
|
type: str
|
|
timing:
|
|
description:
|
|
- Output only. Stores timing information for executing this build step.
|
|
returned: success
|
|
type: str
|
|
volumes:
|
|
description:
|
|
- List of volumes to mount into the build step.
|
|
- Each volume is created as an empty volume prior to execution of the build
|
|
step. Upon completion of the build, volumes and their contents are discarded.
|
|
- Using a named volume in only one step is not valid as it is indicative
|
|
of a build request with an incorrect configuration.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
name:
|
|
description:
|
|
- Name of the volume to mount.
|
|
- Volume names must be unique per build step and must be valid names
|
|
for Docker volumes. Each named volume must be used by at least two
|
|
build steps.
|
|
returned: success
|
|
type: str
|
|
path:
|
|
description:
|
|
- Path at which to mount the volume.
|
|
- Paths must be absolute and cannot conflict with other volume paths
|
|
on the same build step or with certain reserved volume paths.
|
|
returned: success
|
|
type: str
|
|
waitFor:
|
|
description:
|
|
- The ID(s) of the step(s) that this build step depends on.
|
|
- This build step will not start until all the build steps in `wait_for`
|
|
have completed successfully. If `wait_for` is empty, this build step will
|
|
start when all previous build steps in the `Build.Steps` list have completed
|
|
successfully.
|
|
returned: success
|
|
type: list
|
|
artifacts:
|
|
description:
|
|
- Artifacts produced by the build that should be uploaded upon successful completion
|
|
of all build steps.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
images:
|
|
description:
|
|
- A list of images to be pushed upon the successful completion of all build
|
|
steps.
|
|
- The images will be pushed using the builder service account's credentials.
|
|
- The digests of the pushed images will be stored in the Build resource's
|
|
results field.
|
|
- If any of the images fail to be pushed, the build is marked FAILURE.
|
|
returned: success
|
|
type: list
|
|
objects:
|
|
description:
|
|
- A list of objects to be uploaded to Cloud Storage upon successful completion
|
|
of all build steps.
|
|
- Files in the workspace matching specified paths globs will be uploaded
|
|
to the Cloud Storage location using the builder service account's credentials.
|
|
- The location and generation of the uploaded objects will be stored in
|
|
the Build resource's results field.
|
|
- If any objects fail to be pushed, the build is marked FAILURE.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
location:
|
|
description:
|
|
- Cloud Storage bucket and optional object path, in the form "gs://bucket/path/to/somewhere/".
|
|
- Files in the workspace matching any path pattern will be uploaded
|
|
to Cloud Storage with this location as a prefix.
|
|
returned: success
|
|
type: str
|
|
paths:
|
|
description:
|
|
- Path globs used to match files in the build's workspace.
|
|
returned: success
|
|
type: list
|
|
timing:
|
|
description:
|
|
- Output only. Stores timing information for pushing all artifact objects.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
startTime:
|
|
description:
|
|
- Start of time span.
|
|
- 'A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution
|
|
and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z"
|
|
and "2014-10-02T15:01:23.045123456Z".'
|
|
returned: success
|
|
type: str
|
|
endTime:
|
|
description:
|
|
- End of time span.
|
|
- 'A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution
|
|
and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z"
|
|
and "2014-10-02T15:01:23.045123456Z".'
|
|
returned: success
|
|
type: str
|
|
options:
|
|
description:
|
|
- Special options for this build.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
sourceProvenanceHash:
|
|
description:
|
|
- Requested hash for SourceProvenance.
|
|
returned: success
|
|
type: list
|
|
requestedVerifyOption:
|
|
description:
|
|
- Requested verifiability options.
|
|
returned: success
|
|
type: str
|
|
machineType:
|
|
description:
|
|
- Compute Engine machine type on which to run the build.
|
|
returned: success
|
|
type: str
|
|
diskSizeGb:
|
|
description:
|
|
- Requested disk size for the VM that runs the build. Note that this is
|
|
NOT "disk free"; some of the space will be used by the operating system
|
|
and build utilities.
|
|
- Also note that this is the minimum disk size that will be allocated for
|
|
the build -- the build may run with a larger disk than requested. At present,
|
|
the maximum disk size is 1000GB; builds that request more than the maximum
|
|
are rejected with an error.
|
|
returned: success
|
|
type: int
|
|
substitutionOption:
|
|
description:
|
|
- Option to specify behavior when there is an error in the substitution
|
|
checks.
|
|
- NOTE this is always set to ALLOW_LOOSE for triggered builds and cannot
|
|
be overridden in the build configuration file.
|
|
returned: success
|
|
type: str
|
|
dynamicSubstitutions:
|
|
description:
|
|
- Option to specify whether or not to apply bash style string operations
|
|
to the substitutions.
|
|
- NOTE this is always enabled for triggered builds and cannot be overridden
|
|
in the build configuration file.
|
|
returned: success
|
|
type: bool
|
|
logStreamingOption:
|
|
description:
|
|
- Option to define build log streaming behavior to Google Cloud Storage.
|
|
returned: success
|
|
type: str
|
|
workerPool:
|
|
description:
|
|
- Option to specify a WorkerPool for the build. Format projects/{project}/workerPools/{workerPool}
|
|
This field is experimental.
|
|
returned: success
|
|
type: str
|
|
logging:
|
|
description:
|
|
- Option to specify the logging mode, which determines if and where build
|
|
logs are stored.
|
|
returned: success
|
|
type: str
|
|
env:
|
|
description:
|
|
- A list of global environment variable definitions that will exist for
|
|
all build steps in this build. If a variable is defined in both globally
|
|
and in a build step, the variable will use the build step value.
|
|
- The elements are of the form "KEY=VALUE" for the environment variable
|
|
"KEY" being given the value "VALUE".
|
|
returned: success
|
|
type: list
|
|
secretEnv:
|
|
description:
|
|
- A list of global environment variables, which are encrypted using a Cloud
|
|
Key Management Service crypto key. These values must be specified in the
|
|
build's Secret. These variables will be available to all build steps in
|
|
this build.
|
|
returned: success
|
|
type: list
|
|
volumes:
|
|
description:
|
|
- Global list of volumes to mount for ALL build steps Each volume is created
|
|
as an empty volume prior to starting the build process.
|
|
- Upon completion of the build, volumes and their contents are discarded.
|
|
Global volume names and paths cannot conflict with the volumes defined
|
|
a build step.
|
|
- Using a global volume in a build with only one step is not valid as it
|
|
is indicative of a build request with an incorrect configuration.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
name:
|
|
description:
|
|
- Name of the volume to mount.
|
|
- Volume names must be unique per build step and must be valid names
|
|
for Docker volumes.
|
|
- Each named volume must be used by at least two build steps.
|
|
returned: success
|
|
type: str
|
|
path:
|
|
description:
|
|
- Path at which to mount the volume.
|
|
- Paths must be absolute and cannot conflict with other volume paths
|
|
on the same build step or with certain reserved volume paths.
|
|
returned: success
|
|
type: str
|
|
'''
|
|
|
|
################################################################################
|
|
# Imports
|
|
################################################################################
|
|
|
|
from ansible_collections.google.cloud.plugins.module_utils.gcp_utils import (
|
|
navigate_hash,
|
|
GcpSession,
|
|
GcpModule,
|
|
GcpRequest,
|
|
remove_nones_from_dict,
|
|
)
|
|
import json
|
|
|
|
################################################################################
|
|
# Main
|
|
################################################################################
|
|
|
|
|
|
def main():
|
|
"""Main function"""
|
|
|
|
module = GcpModule(
|
|
argument_spec=dict(
|
|
state=dict(default='present', choices=['present', 'absent'], type='str'),
|
|
id=dict(type='str'),
|
|
name=dict(type='str'),
|
|
description=dict(type='str'),
|
|
tags=dict(type='list', elements='str'),
|
|
disabled=dict(type='bool'),
|
|
substitutions=dict(type='dict'),
|
|
filename=dict(type='str'),
|
|
ignored_files=dict(type='list', elements='str'),
|
|
included_files=dict(type='list', elements='str'),
|
|
trigger_template=dict(
|
|
type='dict',
|
|
options=dict(
|
|
project_id=dict(type='str'),
|
|
repo_name=dict(default='default', type='str'),
|
|
dir=dict(type='str'),
|
|
invert_regex=dict(type='bool'),
|
|
branch_name=dict(type='str'),
|
|
tag_name=dict(type='str'),
|
|
commit_sha=dict(type='str'),
|
|
),
|
|
),
|
|
github=dict(
|
|
type='dict',
|
|
options=dict(
|
|
owner=dict(type='str'),
|
|
name=dict(type='str'),
|
|
pull_request=dict(
|
|
type='dict', options=dict(branch=dict(required=True, type='str'), comment_control=dict(type='str'), invert_regex=dict(type='bool'))
|
|
),
|
|
push=dict(type='dict', options=dict(invert_regex=dict(type='bool'), branch=dict(type='str'), tag=dict(type='str'))),
|
|
),
|
|
),
|
|
pubsub_config=dict(type='dict', options=dict(topic=dict(required=True, type='str'), service_account_email=dict(type='str'))),
|
|
webhook_config=dict(type='dict', options=dict(secret=dict(required=True, type='str', no_log=True))),
|
|
build=dict(
|
|
type='dict',
|
|
options=dict(
|
|
source=dict(
|
|
type='dict',
|
|
options=dict(
|
|
storage_source=dict(
|
|
type='dict',
|
|
options=dict(bucket=dict(required=True, type='str'), object=dict(required=True, type='str'), generation=dict(type='str')),
|
|
),
|
|
repo_source=dict(
|
|
type='dict',
|
|
options=dict(
|
|
project_id=dict(type='str'),
|
|
repo_name=dict(required=True, type='str'),
|
|
dir=dict(type='str'),
|
|
invert_regex=dict(type='bool'),
|
|
substitutions=dict(type='dict'),
|
|
branch_name=dict(type='str'),
|
|
tag_name=dict(type='str'),
|
|
commit_sha=dict(type='str'),
|
|
),
|
|
),
|
|
),
|
|
),
|
|
tags=dict(type='list', elements='str'),
|
|
images=dict(type='list', elements='str'),
|
|
substitutions=dict(type='dict'),
|
|
queue_ttl=dict(type='str'),
|
|
logs_bucket=dict(type='str'),
|
|
timeout=dict(default='600s', type='str'),
|
|
secrets=dict(
|
|
type='list',
|
|
elements='dict',
|
|
no_log=True,
|
|
options=dict(kms_key_name=dict(required=True, type='str'), secret_env=dict(type='dict', no_log=True))),
|
|
steps=dict(
|
|
required=True,
|
|
type='list',
|
|
elements='dict',
|
|
options=dict(
|
|
name=dict(required=True, type='str'),
|
|
args=dict(type='list', elements='str'),
|
|
env=dict(type='list', elements='str'),
|
|
id=dict(type='str'),
|
|
entrypoint=dict(type='str'),
|
|
dir=dict(type='str'),
|
|
secret_env=dict(type='list', elements='str', no_log=True),
|
|
timeout=dict(type='str'),
|
|
timing=dict(type='str'),
|
|
volumes=dict(
|
|
type='list', elements='dict', options=dict(name=dict(required=True, type='str'), path=dict(required=True, type='str'))
|
|
),
|
|
wait_for=dict(type='list', elements='str'),
|
|
),
|
|
),
|
|
artifacts=dict(
|
|
type='dict',
|
|
options=dict(
|
|
images=dict(type='list', elements='str'),
|
|
objects=dict(type='dict', options=dict(location=dict(type='str'), paths=dict(type='list', elements='str'))),
|
|
),
|
|
),
|
|
options=dict(
|
|
type='dict',
|
|
options=dict(
|
|
source_provenance_hash=dict(type='list', elements='str'),
|
|
requested_verify_option=dict(type='str'),
|
|
machine_type=dict(type='str'),
|
|
disk_size_gb=dict(type='int'),
|
|
substitution_option=dict(type='str'),
|
|
dynamic_substitutions=dict(type='bool'),
|
|
log_streaming_option=dict(type='str'),
|
|
worker_pool=dict(type='str'),
|
|
logging=dict(type='str'),
|
|
env=dict(type='list', elements='str'),
|
|
secret_env=dict(type='list', elements='str', no_log=True),
|
|
volumes=dict(type='list', elements='dict', options=dict(name=dict(type='str'), path=dict(type='str'))),
|
|
),
|
|
),
|
|
),
|
|
),
|
|
)
|
|
)
|
|
|
|
if not module.params['scopes']:
|
|
module.params['scopes'] = ['https://www.googleapis.com/auth/cloud-platform']
|
|
|
|
state = module.params['state']
|
|
|
|
fetch = fetch_resource(module, self_link(module))
|
|
changed = False
|
|
|
|
if fetch:
|
|
if state == 'present':
|
|
if is_different(module, fetch):
|
|
update(module, self_link(module))
|
|
fetch = fetch_resource(module, self_link(module))
|
|
changed = True
|
|
else:
|
|
delete(module, self_link(module))
|
|
fetch = {}
|
|
changed = True
|
|
else:
|
|
if state == 'present':
|
|
fetch = create(module, collection(module))
|
|
changed = True
|
|
else:
|
|
fetch = {}
|
|
|
|
fetch.update({'changed': changed})
|
|
|
|
module.exit_json(**fetch)
|
|
|
|
|
|
def create(module, link):
|
|
auth = GcpSession(module, 'cloudbuild')
|
|
return return_if_object(module, auth.post(link, resource_to_request(module)))
|
|
|
|
|
|
def update(module, link):
|
|
auth = GcpSession(module, 'cloudbuild')
|
|
return return_if_object(module, auth.patch(link, resource_to_request(module)))
|
|
|
|
|
|
def delete(module, link):
|
|
auth = GcpSession(module, 'cloudbuild')
|
|
return return_if_object(module, auth.delete(link))
|
|
|
|
|
|
def resource_to_request(module):
|
|
request = {
|
|
u'id': module.params.get('id'),
|
|
u'name': module.params.get('name'),
|
|
u'description': module.params.get('description'),
|
|
u'tags': module.params.get('tags'),
|
|
u'disabled': module.params.get('disabled'),
|
|
u'substitutions': module.params.get('substitutions'),
|
|
u'filename': module.params.get('filename'),
|
|
u'ignoredFiles': module.params.get('ignored_files'),
|
|
u'includedFiles': module.params.get('included_files'),
|
|
u'triggerTemplate': TriggerTriggertemplate(module.params.get('trigger_template', {}), module).to_request(),
|
|
u'github': TriggerGithub(module.params.get('github', {}), module).to_request(),
|
|
u'pubsubConfig': TriggerPubsubconfig(module.params.get('pubsub_config', {}), module).to_request(),
|
|
u'webhookConfig': TriggerWebhookconfig(module.params.get('webhook_config', {}), module).to_request(),
|
|
u'build': TriggerBuild(module.params.get('build', {}), module).to_request(),
|
|
}
|
|
return_vals = {}
|
|
for k, v in request.items():
|
|
if v or v is False:
|
|
return_vals[k] = v
|
|
|
|
return return_vals
|
|
|
|
|
|
def fetch_resource(module, link, allow_not_found=True):
|
|
auth = GcpSession(module, 'cloudbuild')
|
|
return return_if_object(module, auth.get(link), allow_not_found)
|
|
|
|
|
|
def self_link(module):
|
|
return "https://cloudbuild.googleapis.com/v1/projects/{project}/triggers/{id}".format(**module.params)
|
|
|
|
|
|
def collection(module):
|
|
return "https://cloudbuild.googleapis.com/v1/projects/{project}/triggers".format(**module.params)
|
|
|
|
|
|
def return_if_object(module, response, allow_not_found=False):
|
|
# If not found, return nothing.
|
|
if allow_not_found and response.status_code == 404:
|
|
return None
|
|
|
|
# If no content, return nothing.
|
|
if response.status_code == 204:
|
|
return None
|
|
|
|
try:
|
|
module.raise_for_status(response)
|
|
result = response.json()
|
|
except getattr(json.decoder, 'JSONDecodeError', ValueError):
|
|
module.fail_json(msg="Invalid JSON response with error: %s" % response.text)
|
|
|
|
if navigate_hash(result, ['error', 'errors']):
|
|
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
|
|
|
|
return result
|
|
|
|
|
|
def is_different(module, response):
|
|
request = resource_to_request(module)
|
|
response = response_to_hash(module, response)
|
|
|
|
# Remove all output-only from response.
|
|
response_vals = {}
|
|
for k, v in response.items():
|
|
if k in request:
|
|
response_vals[k] = v
|
|
|
|
request_vals = {}
|
|
for k, v in request.items():
|
|
if k in response:
|
|
request_vals[k] = v
|
|
|
|
return GcpRequest(request_vals) != GcpRequest(response_vals)
|
|
|
|
|
|
# Remove unnecessary properties from the response.
|
|
# This is for doing comparisons with Ansible's current parameters.
|
|
def response_to_hash(module, response):
|
|
return {
|
|
u'id': response.get(u'id'),
|
|
u'name': response.get(u'name'),
|
|
u'description': response.get(u'description'),
|
|
u'tags': response.get(u'tags'),
|
|
u'disabled': response.get(u'disabled'),
|
|
u'createTime': response.get(u'createTime'),
|
|
u'substitutions': response.get(u'substitutions'),
|
|
u'filename': response.get(u'filename'),
|
|
u'ignoredFiles': response.get(u'ignoredFiles'),
|
|
u'includedFiles': response.get(u'includedFiles'),
|
|
u'triggerTemplate': TriggerTriggertemplate(response.get(u'triggerTemplate', {}), module).from_response(),
|
|
u'github': TriggerGithub(response.get(u'github', {}), module).from_response(),
|
|
u'pubsubConfig': TriggerPubsubconfig(response.get(u'pubsubConfig', {}), module).from_response(),
|
|
u'webhookConfig': TriggerWebhookconfig(response.get(u'webhookConfig', {}), module).from_response(),
|
|
u'build': TriggerBuild(response.get(u'build', {}), module).from_response(),
|
|
}
|
|
|
|
|
|
class TriggerTriggertemplate(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'projectId': self.request.get('project_id'),
|
|
u'repoName': self.request.get('repo_name'),
|
|
u'dir': self.request.get('dir'),
|
|
u'invertRegex': self.request.get('invert_regex'),
|
|
u'branchName': self.request.get('branch_name'),
|
|
u'tagName': self.request.get('tag_name'),
|
|
u'commitSha': self.request.get('commit_sha'),
|
|
}
|
|
)
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'projectId': self.request.get(u'projectId'),
|
|
u'repoName': self.request.get(u'repoName'),
|
|
u'dir': self.request.get(u'dir'),
|
|
u'invertRegex': self.request.get(u'invertRegex'),
|
|
u'branchName': self.request.get(u'branchName'),
|
|
u'tagName': self.request.get(u'tagName'),
|
|
u'commitSha': self.request.get(u'commitSha'),
|
|
}
|
|
)
|
|
|
|
|
|
class TriggerGithub(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'owner': self.request.get('owner'),
|
|
u'name': self.request.get('name'),
|
|
u'pullRequest': TriggerPullrequest(self.request.get('pull_request', {}), self.module).to_request(),
|
|
u'push': TriggerPush(self.request.get('push', {}), self.module).to_request(),
|
|
}
|
|
)
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'owner': self.request.get(u'owner'),
|
|
u'name': self.request.get(u'name'),
|
|
u'pullRequest': TriggerPullrequest(self.request.get(u'pullRequest', {}), self.module).from_response(),
|
|
u'push': TriggerPush(self.request.get(u'push', {}), self.module).from_response(),
|
|
}
|
|
)
|
|
|
|
|
|
class TriggerPullrequest(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict(
|
|
{u'branch': self.request.get('branch'), u'commentControl': self.request.get('comment_control'), u'invertRegex': self.request.get('invert_regex')}
|
|
)
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict(
|
|
{u'branch': self.request.get(u'branch'), u'commentControl': self.request.get(u'commentControl'), u'invertRegex': self.request.get(u'invertRegex')}
|
|
)
|
|
|
|
|
|
class TriggerPush(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict(
|
|
{u'invertRegex': self.request.get('invert_regex'), u'branch': self.request.get('branch'), u'tag': self.request.get('tag')}
|
|
)
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict(
|
|
{u'invertRegex': self.request.get(u'invertRegex'), u'branch': self.request.get(u'branch'), u'tag': self.request.get(u'tag')}
|
|
)
|
|
|
|
|
|
class TriggerPubsubconfig(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict({u'topic': self.request.get('topic'), u'service_account_email': self.request.get('service_account_email')})
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict({u'topic': self.request.get(u'topic'), u'service_account_email': self.request.get(u'service_account_email')})
|
|
|
|
|
|
class TriggerWebhookconfig(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict({u'secret': self.request.get('secret')})
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict({u'secret': self.request.get(u'secret')})
|
|
|
|
|
|
class TriggerBuild(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'source': TriggerSource(self.request.get('source', {}), self.module).to_request(),
|
|
u'tags': self.request.get('tags'),
|
|
u'images': self.request.get('images'),
|
|
u'substitutions': self.request.get('substitutions'),
|
|
u'queueTtl': self.request.get('queue_ttl'),
|
|
u'logsBucket': self.request.get('logs_bucket'),
|
|
u'timeout': self.request.get('timeout'),
|
|
u'secrets': TriggerSecretsArray(self.request.get('secrets', []), self.module).to_request(),
|
|
u'steps': TriggerStepsArray(self.request.get('steps', []), self.module).to_request(),
|
|
u'artifacts': TriggerArtifacts(self.request.get('artifacts', {}), self.module).to_request(),
|
|
u'options': TriggerOptions(self.request.get('options', {}), self.module).to_request(),
|
|
}
|
|
)
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'source': TriggerSource(self.request.get(u'source', {}), self.module).from_response(),
|
|
u'tags': self.request.get(u'tags'),
|
|
u'images': self.request.get(u'images'),
|
|
u'substitutions': self.request.get(u'substitutions'),
|
|
u'queueTtl': self.request.get(u'queueTtl'),
|
|
u'logsBucket': self.request.get(u'logsBucket'),
|
|
u'timeout': self.request.get(u'timeout'),
|
|
u'secrets': TriggerSecretsArray(self.request.get(u'secrets', []), self.module).from_response(),
|
|
u'steps': TriggerStepsArray(self.request.get(u'steps', []), self.module).from_response(),
|
|
u'artifacts': TriggerArtifacts(self.request.get(u'artifacts', {}), self.module).from_response(),
|
|
u'options': TriggerOptions(self.request.get(u'options', {}), self.module).from_response(),
|
|
}
|
|
)
|
|
|
|
|
|
class TriggerSource(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'storageSource': TriggerStoragesource(self.request.get('storage_source', {}), self.module).to_request(),
|
|
u'repoSource': TriggerReposource(self.request.get('repo_source', {}), self.module).to_request(),
|
|
}
|
|
)
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'storageSource': TriggerStoragesource(self.request.get(u'storageSource', {}), self.module).from_response(),
|
|
u'repoSource': TriggerReposource(self.request.get(u'repoSource', {}), self.module).from_response(),
|
|
}
|
|
)
|
|
|
|
|
|
class TriggerStoragesource(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict(
|
|
{u'bucket': self.request.get('bucket'), u'object': self.request.get('object'), u'generation': self.request.get('generation')}
|
|
)
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict(
|
|
{u'bucket': self.request.get(u'bucket'), u'object': self.request.get(u'object'), u'generation': self.request.get(u'generation')}
|
|
)
|
|
|
|
|
|
class TriggerReposource(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'projectId': self.request.get('project_id'),
|
|
u'repoName': self.request.get('repo_name'),
|
|
u'dir': self.request.get('dir'),
|
|
u'invertRegex': self.request.get('invert_regex'),
|
|
u'substitutions': self.request.get('substitutions'),
|
|
u'branchName': self.request.get('branch_name'),
|
|
u'tagName': self.request.get('tag_name'),
|
|
u'commitSha': self.request.get('commit_sha'),
|
|
}
|
|
)
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'projectId': self.request.get(u'projectId'),
|
|
u'repoName': self.request.get(u'repoName'),
|
|
u'dir': self.request.get(u'dir'),
|
|
u'invertRegex': self.request.get(u'invertRegex'),
|
|
u'substitutions': self.request.get(u'substitutions'),
|
|
u'branchName': self.request.get(u'branchName'),
|
|
u'tagName': self.request.get(u'tagName'),
|
|
u'commitSha': self.request.get(u'commitSha'),
|
|
}
|
|
)
|
|
|
|
|
|
class TriggerSecretsArray(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = []
|
|
|
|
def to_request(self):
|
|
items = []
|
|
for item in self.request:
|
|
items.append(self._request_for_item(item))
|
|
return items
|
|
|
|
def from_response(self):
|
|
items = []
|
|
for item in self.request:
|
|
items.append(self._response_from_item(item))
|
|
return items
|
|
|
|
def _request_for_item(self, item):
|
|
return remove_nones_from_dict({u'kmsKeyName': item.get('kms_key_name'), u'secretEnv': item.get('secret_env')})
|
|
|
|
def _response_from_item(self, item):
|
|
return remove_nones_from_dict({u'kmsKeyName': item.get(u'kmsKeyName'), u'secretEnv': item.get(u'secretEnv')})
|
|
|
|
|
|
class TriggerStepsArray(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = []
|
|
|
|
def to_request(self):
|
|
items = []
|
|
for item in self.request:
|
|
items.append(self._request_for_item(item))
|
|
return items
|
|
|
|
def from_response(self):
|
|
items = []
|
|
for item in self.request:
|
|
items.append(self._response_from_item(item))
|
|
return items
|
|
|
|
def _request_for_item(self, item):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'name': item.get('name'),
|
|
u'args': item.get('args'),
|
|
u'env': item.get('env'),
|
|
u'id': item.get('id'),
|
|
u'entrypoint': item.get('entrypoint'),
|
|
u'dir': item.get('dir'),
|
|
u'secretEnv': item.get('secret_env'),
|
|
u'timeout': item.get('timeout'),
|
|
u'timing': item.get('timing'),
|
|
u'volumes': TriggerVolumesArray(item.get('volumes', []), self.module).to_request(),
|
|
u'waitFor': item.get('wait_for'),
|
|
}
|
|
)
|
|
|
|
def _response_from_item(self, item):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'name': item.get(u'name'),
|
|
u'args': item.get(u'args'),
|
|
u'env': item.get(u'env'),
|
|
u'id': item.get(u'id'),
|
|
u'entrypoint': item.get(u'entrypoint'),
|
|
u'dir': item.get(u'dir'),
|
|
u'secretEnv': item.get(u'secretEnv'),
|
|
u'timeout': item.get(u'timeout'),
|
|
u'timing': item.get(u'timing'),
|
|
u'volumes': TriggerVolumesArray(item.get(u'volumes', []), self.module).from_response(),
|
|
u'waitFor': item.get(u'waitFor'),
|
|
}
|
|
)
|
|
|
|
|
|
class TriggerVolumesArray(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = []
|
|
|
|
def to_request(self):
|
|
items = []
|
|
for item in self.request:
|
|
items.append(self._request_for_item(item))
|
|
return items
|
|
|
|
def from_response(self):
|
|
items = []
|
|
for item in self.request:
|
|
items.append(self._response_from_item(item))
|
|
return items
|
|
|
|
def _request_for_item(self, item):
|
|
return remove_nones_from_dict({u'name': item.get('name'), u'path': item.get('path')})
|
|
|
|
def _response_from_item(self, item):
|
|
return remove_nones_from_dict({u'name': item.get(u'name'), u'path': item.get(u'path')})
|
|
|
|
|
|
class TriggerArtifacts(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict(
|
|
{u'images': self.request.get('images'), u'objects': TriggerObjects(self.request.get('objects', {}), self.module).to_request()}
|
|
)
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict(
|
|
{u'images': self.request.get(u'images'), u'objects': TriggerObjects(self.request.get(u'objects', {}), self.module).from_response()}
|
|
)
|
|
|
|
|
|
class TriggerObjects(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict({u'location': self.request.get('location'), u'paths': self.request.get('paths')})
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict({u'location': self.request.get(u'location'), u'paths': self.request.get(u'paths')})
|
|
|
|
|
|
class TriggerTiming(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict({u'startTime': self.request.get('start_time'), u'endTime': self.request.get('end_time')})
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict({u'startTime': self.request.get(u'startTime'), u'endTime': self.request.get(u'endTime')})
|
|
|
|
|
|
class TriggerOptions(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'sourceProvenanceHash': self.request.get('source_provenance_hash'),
|
|
u'requestedVerifyOption': self.request.get('requested_verify_option'),
|
|
u'machineType': self.request.get('machine_type'),
|
|
u'diskSizeGb': self.request.get('disk_size_gb'),
|
|
u'substitutionOption': self.request.get('substitution_option'),
|
|
u'dynamicSubstitutions': self.request.get('dynamic_substitutions'),
|
|
u'logStreamingOption': self.request.get('log_streaming_option'),
|
|
u'workerPool': self.request.get('worker_pool'),
|
|
u'logging': self.request.get('logging'),
|
|
u'env': self.request.get('env'),
|
|
u'secretEnv': self.request.get('secret_env'),
|
|
u'volumes': TriggerVolumesArray(self.request.get('volumes', []), self.module).to_request(),
|
|
}
|
|
)
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'sourceProvenanceHash': self.request.get(u'sourceProvenanceHash'),
|
|
u'requestedVerifyOption': self.request.get(u'requestedVerifyOption'),
|
|
u'machineType': self.request.get(u'machineType'),
|
|
u'diskSizeGb': self.request.get(u'diskSizeGb'),
|
|
u'substitutionOption': self.request.get(u'substitutionOption'),
|
|
u'dynamicSubstitutions': self.request.get(u'dynamicSubstitutions'),
|
|
u'logStreamingOption': self.request.get(u'logStreamingOption'),
|
|
u'workerPool': self.request.get(u'workerPool'),
|
|
u'logging': self.request.get(u'logging'),
|
|
u'env': self.request.get(u'env'),
|
|
u'secretEnv': self.request.get(u'secretEnv'),
|
|
u'volumes': TriggerVolumesArray(self.request.get(u'volumes', []), self.module).from_response(),
|
|
}
|
|
)
|
|
|
|
|
|
class TriggerVolumesArray(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = []
|
|
|
|
def to_request(self):
|
|
items = []
|
|
for item in self.request:
|
|
items.append(self._request_for_item(item))
|
|
return items
|
|
|
|
def from_response(self):
|
|
items = []
|
|
for item in self.request:
|
|
items.append(self._response_from_item(item))
|
|
return items
|
|
|
|
def _request_for_item(self, item):
|
|
return remove_nones_from_dict({u'name': item.get('name'), u'path': item.get('path')})
|
|
|
|
def _response_from_item(self, item):
|
|
return remove_nones_from_dict({u'name': item.get(u'name'), u'path': item.get(u'path')})
|
|
|
|
|
|
if __name__ == '__main__':
|
|
main()
|