-
Notifications
You must be signed in to change notification settings - Fork 9
Expand file tree
/
Copy pathconcourse.yml
More file actions
90 lines (90 loc) · 3.25 KB
/
concourse.yml
File metadata and controls
90 lines (90 loc) · 3.25 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
# Declare Concourse resources to use as IN/OUT
resources:
# 'code' resource is a GIT resource used to checkout source-code
- name: code
type: git
source:
uri: https://gitlab.gologic.ca/gologic-technos/continuous-deployment.git
branch: master
# 'storage' resource is S3 resource to store JAR between build and deploy since Concourse do not provide any internal storage tool
- name: storage
type: s3
source:
# Name of the bucket in S3 account
bucket: gologic-concourse-demo-bucket
region_name: ca-central-1
# filename of the application to read/write in S3 (check S3 resource documentation for parameters)
versioned_file: demo.jar
# AWS Credentials are passed in command line on set-pipeline. Concourse can use an external vault system too to store credentials
access_key_id: ((AWS_ACCESS_KEY_ID))
secret_access_key: ((AWS_SECRET_ACCESS_KEY))
jobs:
# First job: Package Application as a JAR and Upload to S3 Bucket for storage
- name: Build
plan:
# Check for new commit (trigger=true), 'code' refers to GIT resource
- get: code
trigger: true
# Package and copy application to output 'build' folder
- task: compile
config:
# Use a docker image with Maven to build application
platform: linux
image_resource:
type: docker-image
source:
repository: maven
# 'code' folder contains checkout code
inputs:
- name: code
# 'build' folder is used to store file for next PUT step after RUN step
outputs:
- name: build
caches:
- path: code/.m2
# RUN step allows inline command and FILE step allow to use external task file
run:
path: sh
args:
- -c
- |
mvn -f code/pom.xml package -Dmaven.repo.local=code/.m2
cp code/target/demo-1.0.jar build/demo.jar
# Upload build/demo.jar to S3 bucket, 'storage' refers to S3 Resource
- put: storage
params:
file: build/demo.jar
name: demo.jar
# Second job: Retrieve application from S3 Bucket and Deploy to AWS Beanstalk
- name: Deploy
plan:
# Download application from S3 bucket, 'storage' refers to S3 Resource
- get: storage
# Only if build job has passed
passed:
- Build
trigger: true
# Deploy to AWS using credentials
- task: deploy-aws
params:
AWS_ACCESS_KEY_ID: ((AWS_ACCESS_KEY_ID))
AWS_SECRET_ACCESS_KEY: ((AWS_SECRET_ACCESS_KEY))
config:
# Use a docker image with AWS eb-cli to init, create environment and deploy application
platform: linux
image_resource:
type: docker-image
source:
repository: chriscamicas/awscli-awsebcli
inputs:
- name: storage
# Run a set of AWS eb commands to deploy application to AWS (Check for AWS Beanstalk logs to check for creation and deployment)
run:
path: sh
args:
- -c
- |
eb init continuous-deployment-demo -p "corretto-17" --region "ca-central-1"
eb create concourse-env --single
eb deploy concourse-env
eb status