Skip to content

Commit d501017

Browse files
committed
Update makefile, fix doc_generator tags /#14
1 parent b59cf70 commit d501017

File tree

7 files changed

+82
-859
lines changed

7 files changed

+82
-859
lines changed

Makefile

Lines changed: 52 additions & 111 deletions
Original file line numberDiff line numberDiff line change
@@ -12,133 +12,74 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
# Please note that this file was generated from [terraform-google-module-template](https://github.com/terraform-google-modules/terraform-google-module-template).
16+
# Please make sure to contribute relevant changes upstream!
17+
1518
# Make will use bash instead of sh
1619
SHELL := /usr/bin/env bash
1720

18-
# Docker build config variables
19-
CREDENTIALS_PATH ?= /cft/workdir/credentials.json
20-
DOCKER_ORG := gcr.io/cloud-foundation-cicd
21-
DOCKER_TAG_BASE_KITCHEN_TERRAFORM ?= 2.3.0
22-
DOCKER_REPO_BASE_KITCHEN_TERRAFORM := ${DOCKER_ORG}/cft/kitchen-terraform:${DOCKER_TAG_BASE_KITCHEN_TERRAFORM}
23-
24-
# All is the first target in the file so it will get picked up when you just run 'make' on its own
25-
all: check generate_docs
26-
27-
.PHONY: check
28-
check: check_shell check_python check_golang check_terraform check_docker check_base_files test_check_headers check_headers check_trailing_whitespace
29-
30-
31-
# The .PHONY directive tells make that this isn't a real target and so
32-
# the presence of a file named 'check_shell' won't cause this target to stop
33-
# working
34-
.PHONY: check_shell
35-
check_shell:
36-
@source test/make.sh && check_shell
37-
38-
.PHONY: check_python
39-
check_python:
40-
@source test/make.sh && check_python
41-
42-
.PHONY: check_golang
43-
check_golang:
44-
@source test/make.sh && golang
45-
46-
.PHONY: check_terraform
47-
check_terraform:
48-
@source test/make.sh && check_terraform
49-
50-
.PHONY: check_docker
51-
check_docker:
52-
@source test/make.sh && docker
53-
54-
.PHONY: check_base_files
55-
check_base_files:
56-
@source test/make.sh && basefiles
57-
58-
.PHONY: check_trailing_whitespace
59-
check_trailing_whitespace:
60-
@source test/make.sh && check_trailing_whitespace
61-
62-
.PHONY: test_check_headers
63-
test_check_headers:
64-
@echo "Testing the validity of the header check"
65-
@python test/test_verify_boilerplate.py
21+
DOCKER_TAG_VERSION_DEVELOPER_TOOLS := 0.1.0
22+
DOCKER_IMAGE_DEVELOPER_TOOLS := cft/developer-tools
23+
REGISTRY_URL := gcr.io/cloud-foundation-cicd
6624

67-
.PHONY: check_headers
68-
check_headers:
69-
@source test/make.sh && check_headers
70-
71-
# Integration tests
72-
.PHONY: test_integration
73-
test_integration:
74-
./test/ci_integration.sh
75-
76-
.PHONY: generate_docs
77-
generate_docs:
78-
@source test/make.sh && generate_docs
79-
80-
# Versioning
81-
.PHONY: version
82-
version:
83-
@source helpers/version-repo.sh
84-
85-
# Run docker
25+
# Enter docker container for local development
8626
.PHONY: docker_run
8727
docker_run:
8828
docker run --rm -it \
89-
-e PROJECT_ID \
9029
-e SERVICE_ACCOUNT_JSON \
91-
-e GOOGLE_APPLICATION_CREDENTIALS=${CREDENTIALS_PATH} \
92-
-v $(CURDIR):/cft/workdir \
93-
${DOCKER_REPO_BASE_KITCHEN_TERRAFORM} \
94-
/bin/bash -c "source test/ci_integration.sh && setup_environment && exec /bin/bash"
30+
-v $(CURDIR):/workspace \
31+
$(REGISTRY_URL)/${DOCKER_IMAGE_DEVELOPER_TOOLS}:${DOCKER_TAG_VERSION_DEVELOPER_TOOLS} \
32+
/bin/bash
9533

96-
.PHONY: docker_create
97-
docker_create:
34+
# Execute prepare tests within the docker container
35+
.PHONY: docker_test_prepare
36+
docker_test_prepare:
9837
docker run --rm -it \
99-
-e PROJECT_ID \
10038
-e SERVICE_ACCOUNT_JSON \
101-
-e GOOGLE_APPLICATION_CREDENTIALS=${CREDENTIALS_PATH} \
102-
-v $(CURDIR):/cft/workdir \
103-
${DOCKER_REPO_BASE_KITCHEN_TERRAFORM} \
104-
/bin/bash -c "source test/ci_integration.sh && setup_environment && kitchen create"
105-
106-
.PHONY: docker_converge
107-
docker_converge:
39+
-e TF_VAR_org_id \
40+
-e TF_VAR_folder_id \
41+
-e TF_VAR_billing_account \
42+
-v $(CURDIR):/workspace \
43+
$(REGISTRY_URL)/${DOCKER_IMAGE_DEVELOPER_TOOLS}:${DOCKER_TAG_VERSION_DEVELOPER_TOOLS} \
44+
/usr/local/bin/execute_with_credentials.sh prepare_environment
45+
46+
# Clean up test environment within the docker container
47+
.PHONY: docker_test_cleanup
48+
docker_test_cleanup:
10849
docker run --rm -it \
109-
-e PROJECT_ID \
11050
-e SERVICE_ACCOUNT_JSON \
111-
-e GOOGLE_APPLICATION_CREDENTIALS=${CREDENTIALS_PATH} \
112-
-v $(CURDIR):/cft/workdir \
113-
${DOCKER_REPO_BASE_KITCHEN_TERRAFORM} \
114-
/bin/bash -c "source test/ci_integration.sh && setup_environment && kitchen converge"
115-
116-
.PHONY: docker_verify
117-
docker_verify:
51+
-e TF_VAR_org_id \
52+
-e TF_VAR_folder_id \
53+
-e TF_VAR_billing_account \
54+
-v $(CURDIR):/workspace \
55+
$(REGISTRY_URL)/${DOCKER_IMAGE_DEVELOPER_TOOLS}:${DOCKER_TAG_VERSION_DEVELOPER_TOOLS} \
56+
/usr/local/bin/execute_with_credentials.sh cleanup_environment
57+
58+
# Execute integration tests within the docker container
59+
.PHONY: docker_test_integration
60+
docker_test_integration:
11861
docker run --rm -it \
119-
-e PROJECT_ID \
12062
-e SERVICE_ACCOUNT_JSON \
121-
-e GOOGLE_APPLICATION_CREDENTIALS=${CREDENTIALS_PATH} \
122-
-v $(CURDIR):/cft/workdir \
123-
${DOCKER_REPO_BASE_KITCHEN_TERRAFORM} \
124-
/bin/bash -c "source test/ci_integration.sh && setup_environment && kitchen verify"
63+
-v $(CURDIR):/workspace \
64+
$(REGISTRY_URL)/${DOCKER_IMAGE_DEVELOPER_TOOLS}:${DOCKER_TAG_VERSION_DEVELOPER_TOOLS} \
65+
/usr/local/bin/test_integration.sh
12566

126-
.PHONY: docker_destroy
127-
docker_destroy:
67+
# Execute lint tests within the docker container
68+
.PHONY: docker_test_lint
69+
docker_test_lint:
12870
docker run --rm -it \
129-
-e PROJECT_ID \
130-
-e SERVICE_ACCOUNT_JSON \
131-
-e GOOGLE_APPLICATION_CREDENTIALS=${CREDENTIALS_PATH} \
132-
-v $(CURDIR):/cft/workdir \
133-
${DOCKER_REPO_BASE_KITCHEN_TERRAFORM} \
134-
/bin/bash -c "source test/ci_integration.sh && setup_environment && kitchen destroy"
71+
-v $(CURDIR):/workspace \
72+
$(REGISTRY_URL)/${DOCKER_IMAGE_DEVELOPER_TOOLS}:${DOCKER_TAG_VERSION_DEVELOPER_TOOLS} \
73+
/usr/local/bin/test_lint.sh
13574

136-
.PHONY: test_integration_docker
137-
test_integration_docker:
75+
# Generate documentation
76+
.PHONY: docker_generate_docs
77+
docker_generate_docs:
13878
docker run --rm -it \
139-
-e PROJECT_ID \
140-
-e SERVICE_ACCOUNT_JSON \
141-
-e GOOGLE_APPLICATION_CREDENTIALS=${CREDENTIALS_PATH} \
142-
-v $(CURDIR):/cft/workdir \
143-
${DOCKER_REPO_BASE_KITCHEN_TERRAFORM} \
144-
make test_integration
79+
-v $(CURDIR):/workspace \
80+
$(REGISTRY_URL)/${DOCKER_IMAGE_DEVELOPER_TOOLS}:${DOCKER_TAG_VERSION_DEVELOPER_TOOLS} \
81+
/bin/bash -c 'source /usr/local/bin/task_helper_functions.sh && generate_docs'
82+
83+
# Alias for backwards compatibility
84+
.PHONY: generate_docs
85+
generate_docs: docker_generate_docs

README.md

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -33,40 +33,40 @@ Then perform the following commands on the root folder:
3333
- `terraform apply` to apply the infrastructure build
3434
- `terraform destroy` to destroy the built infrastructure
3535

36-
[^]: (autogen_docs_start)
37-
36+
<!-- BEGINNING OF PRE-COMMIT-TERRAFORM DOCS HOOK -->
3837
## Inputs
3938

4039
| Name | Description | Type | Default | Required |
4140
|------|-------------|:----:|:-----:|:-----:|
4241
| bucket\_name | The name to apply to the bucket. Will default to a string of <project-id>-scheduled-function-XXXX> with XXXX being random characters. | string | `""` | no |
43-
| function\_available\_memory\_mb | The amount of memory in megabytes allotted for the function to use. | string | `"256"` | no |
42+
| function\_available\_memory\_mb | The amount of memory in megabytes allotted for the function to use. | number | `"256"` | no |
4443
| function\_description | The description of the function. | string | `"Processes log export events provided through a Pub/Sub topic subscription."` | no |
4544
| function\_entry\_point | The name of a method in the function source which will be invoked when the function is executed. | string | n/a | yes |
46-
| function\_environment\_variables | A set of key/value environment variable pairs to assign to the function. | map | `<map>` | no |
47-
| function\_event\_trigger\_failure\_policy\_retry | A toggle to determine if the function should be retried on failure. | string | `"false"` | no |
48-
| function\_labels | A set of key/value label pairs to assign to the function. | map | `<map>` | no |
45+
| function\_environment\_variables | A set of key/value environment variable pairs to assign to the function. | map(string) | `<map>` | no |
46+
| function\_event\_trigger\_failure\_policy\_retry | A toggle to determine if the function should be retried on failure. | bool | `"false"` | no |
47+
| function\_labels | A set of key/value label pairs to assign to the function. | map(string) | `<map>` | no |
4948
| function\_name | The name to apply to the function | string | n/a | yes |
5049
| function\_runtime | The runtime in which the function will be executed. | string | `"nodejs6"` | no |
51-
| function\_source\_archive\_bucket\_labels | A set of key/value label pairs to assign to the function source archive bucket. | map | `<map>` | no |
50+
| function\_service\_account\_email | The service account to run the function as. | string | `""` | no |
51+
| function\_source\_archive\_bucket\_labels | A set of key/value label pairs to assign to the function source archive bucket. | map(string) | `<map>` | no |
5252
| function\_source\_directory | The contents of this directory will be archived and used as the function source. | string | n/a | yes |
53-
| function\_timeout\_s | The amount of time in seconds allotted for the execution of the function. | string | `"60"` | no |
53+
| function\_timeout\_s | The amount of time in seconds allotted for the execution of the function. | number | `"60"` | no |
5454
| job\_description | Addition text to describet the job | string | `""` | no |
5555
| job\_name | The name of the scheduled job to run | string | n/a | yes |
5656
| job\_schedule | The job frequency, in cron syntax | string | `"*/2 * * * *"` | no |
5757
| message\_data | The data to send in the topic message. | string | `"dGVzdA=="` | no |
58-
| project\_id | The ID of the project where this VPC will be created | string | n/a | yes |
58+
| project\_id | The ID of the project where the resources will be created | string | n/a | yes |
5959
| region | The region in which resources will be applied. | string | n/a | yes |
60+
| time\_zone | The timezone to use in scheduler | string | `"Etc/UTC"` | no |
6061
| topic\_name | Name of pubsub topic connecting the scheduled job and the function | string | `"test-topic"` | no |
61-
| time\_zone | The timezone to be used in scheduler job | string | `"Etc/UTC"` | no |
6262

6363
## Outputs
6464

6565
| Name | Description |
6666
|------|-------------|
6767
| name | The name of the job created |
6868

69-
[^]: (autogen_docs_end)
69+
<!-- END OF PRE-COMMIT-TERRAFORM DOCS HOOK -->
7070

7171
## Requirements
7272
### Terraform plugins

examples/logs-slack-alerts/README.md

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -12,19 +12,18 @@ A good example of exported logging in BigQuery can be found in [Stackdriver Logg
1212
If not using the default App Engine default service account (PROJECT_ID@appspot.gserviceaccount.com), which has the Editor role on the project, one can configure a service account for the cloud function which has the following IAM role - (roles/bigquery.dataViewer, roles/bigquery.jobUser). Additionally, the BigQuery API (https://bigquery.googleapis.com) needs to be enabled as well.
1313

1414

15-
[^]: (autogen_docs_start)
16-
15+
<!-- BEGINNING OF PRE-COMMIT-TERRAFORM DOCS HOOK -->
1716
## Inputs
1817

1918
| Name | Description | Type | Default | Required |
2019
|------|-------------|:----:|:-----:|:-----:|
20+
| audit\_log\_table | BigQuery Table where logs are sent | string | n/a | yes |
21+
| dataset\_name | BigQuery Dataset where logs are sent | string | n/a | yes |
22+
| error\_message\_column | BigQuery Column in audit log table representing logging error | string | n/a | yes |
23+
| job\_schedule | The cron schedule for triggering the cloud function | string | `"55 * * * *"` | no |
2124
| project\_id | The project ID to host the network in | string | n/a | yes |
2225
| region | The region the project is in (App Engine specific) | string | `"us-central1"` | no |
23-
| slack_webhook | The Slack webhook to send alerts | string | n/a | yes |
24-
| dataset_name | The BigQuery Dataset where exported logging is sent | string | n/a | yes |
25-
| audit_log_table | The BigQuery Table within the dataset where logging is sent | string | n/a | yes |
26-
| time_column | The column within the BQ Table representing logging time | string | n/a | yes |
27-
| error_message_column | The column within the BQ Table representing logging errors | string | n/a | yes |
28-
26+
| slack\_webhook | Slack webhook to send alerts | string | n/a | yes |
27+
| time\_column | BigQuery Column in audit log table representing logging time | string | n/a | yes |
2928

30-
[^]: (autogen_docs_end)
29+
<!-- END OF PRE-COMMIT-TERRAFORM DOCS HOOK -->

examples/pubsub_scheduled/README.md

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,7 @@ This example module schedules a job to publish a message to a Pub/Sub topic ever
55
Running this module requires an App Engine app in the specified project/region, which is not handled by this example.
66
More information is in the [root readme](../../README.md#app-engine).
77

8-
[^]: (autogen_docs_start)
9-
8+
<!-- BEGINNING OF PRE-COMMIT-TERRAFORM DOCS HOOK -->
109
## Inputs
1110

1211
| Name | Description | Type | Default | Required |
@@ -21,4 +20,4 @@ More information is in the [root readme](../../README.md#app-engine).
2120
| name | The name of the job created |
2221
| project\_id | The project ID |
2322

24-
[^]: (autogen_docs_end)
23+
<!-- END OF PRE-COMMIT-TERRAFORM DOCS HOOK -->

0 commit comments

Comments
 (0)