Compare commits
10 Commits
49a60993b4
...
a5f17567ee
| Author | SHA1 | Date |
|---|---|---|
|
|
a5f17567ee | |
|
|
02a77c976c | |
|
|
e6b1fb6128 | |
|
|
d8bb9c04a8 | |
|
|
53e1e741e9 | |
|
|
f72b956ef2 | |
|
|
3d0085f982 | |
|
|
585651f460 | |
|
|
2adc220615 | |
|
|
1b8797929e |
|
|
@ -1,193 +1,193 @@
|
|||
.env
|
||||
.vscode
|
||||
output/
|
||||
|
||||
|
||||
# Local .terraform directories
|
||||
**/.terraform/*
|
||||
|
||||
# .tfstate files
|
||||
*.tfstate
|
||||
*.tfstate.*
|
||||
|
||||
# Crash log files
|
||||
crash.log
|
||||
crash.*.log
|
||||
|
||||
# Exclude all .tfvars files, which are likely to contain sensitive data, such as
|
||||
# password, private keys, and other secrets. These should not be part of version
|
||||
# control as they are data points which are potentially sensitive and subject
|
||||
# to change depending on the environment.
|
||||
*.tfvars
|
||||
*.tfvars.json
|
||||
|
||||
# Ignore override files as they are usually used to override resources locally and so
|
||||
# are not checked in
|
||||
override.tf
|
||||
override.tf.json
|
||||
*_override.tf
|
||||
*_override.tf.json
|
||||
|
||||
# Include override files you do wish to add to version control using negated pattern
|
||||
# !example_override.tf
|
||||
|
||||
# Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan
|
||||
# example: *tfplan*
|
||||
|
||||
# Ignore CLI configuration files
|
||||
.terraformrc
|
||||
terraform.rc
|
||||
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
.env
|
||||
.vscode
|
||||
output/
|
||||
|
||||
|
||||
# Local .terraform directories
|
||||
**/.terraform/*
|
||||
|
||||
# .tfstate files
|
||||
*.tfstate
|
||||
*.tfstate.*
|
||||
|
||||
# Crash log files
|
||||
crash.log
|
||||
crash.*.log
|
||||
|
||||
# Exclude all .tfvars files, which are likely to contain sensitive data, such as
|
||||
# password, private keys, and other secrets. These should not be part of version
|
||||
# control as they are data points which are potentially sensitive and subject
|
||||
# to change depending on the environment.
|
||||
*.tfvars
|
||||
*.tfvars.json
|
||||
|
||||
# Ignore override files as they are usually used to override resources locally and so
|
||||
# are not checked in
|
||||
override.tf
|
||||
override.tf.json
|
||||
*_override.tf
|
||||
*_override.tf.json
|
||||
|
||||
# Include override files you do wish to add to version control using negated pattern
|
||||
# !example_override.tf
|
||||
|
||||
# Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan
|
||||
# example: *tfplan*
|
||||
|
||||
# Ignore CLI configuration files
|
||||
.terraformrc
|
||||
terraform.rc
|
||||
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
546
JENKINSFILE
546
JENKINSFILE
|
|
@ -1,281 +1,265 @@
|
|||
def params
|
||||
def String[] envs
|
||||
def String[] folders
|
||||
def rootdirlist = ''
|
||||
def envlist = ''
|
||||
environments=['EMEA_PROD', 'EMEA_PREPROD', 'NA_PROD', 'NA_PREPROD', 'CN_PROD', 'CN_PREPROD']
|
||||
|
||||
|
||||
@NonCPS // has to be NonCPS or the build breaks on the call to .each
|
||||
def export_config_all(list) {
|
||||
list.each { env ->
|
||||
sh "python3 export.py ${env}"
|
||||
}
|
||||
}
|
||||
pipeline {
|
||||
options {
|
||||
ansiColor('xterm')
|
||||
}
|
||||
//label libraryBuild is available in CN JAWS and ROW JAWS, therefore this one was used; no additional intents
|
||||
agent {label 'jaws-slaves'}
|
||||
parameters {
|
||||
//string(name: 'ENVIRONMENT', defaultValue: 'ALL', description: 'Possible values EMEA_PROD, EMEA_PREPROD, NA_PROD, NA_PREPROD, CN_PROD, CN_PREPROD')
|
||||
choice(name: 'ENVIRONMENT', choices: ['ALL', 'EMEA_PROD', 'EMEA_PREPROD', 'NA_PROD', 'NA_PREPROD', 'CN_PROD', 'CN_PREPROD'])
|
||||
string(name: 'BRANCH', defaultValue: 'master', description: 'Enter the branch you want to commit')
|
||||
string(name: 'CONFIGS', defaultValue: '["slo"]', description: 'enter the folders, you want to deploy e.g.["CD_ABC","slo","calculated_service_metrics"]')
|
||||
booleanParam(name: 'DRY_RUN_ONLY', defaultValue: true, description: 'Execute only a terraform init and plan, no config gets deployed')
|
||||
}
|
||||
|
||||
//here comes the trigger according to crontabs - jenkins is in UTC
|
||||
/*triggers {
|
||||
//every 1st of every month at 00:00
|
||||
//cron('0 0 1 * *')
|
||||
|
||||
//every day at 08:00
|
||||
//cron('0 8 * * *')
|
||||
|
||||
//every monday at 08:00
|
||||
//cron('0 8 * * MON')
|
||||
|
||||
|
||||
}*/
|
||||
environment {
|
||||
//ProxySettings
|
||||
AUTH = credentials('proxy')
|
||||
proxy_user = "${AUTH_USR}"
|
||||
proxy_pw = "${AUTH_PSW}"
|
||||
//http_proxy="http://${proxy_user}:${proxy_pw}@proxy.muc:8080"
|
||||
//https_proxy="http://${proxy_user}:${proxy_pw}@proxy.muc:8080"
|
||||
//no_proxy="localhost,127.0.0.1,.muc,.bmwgroup.net"
|
||||
//HTTP_PROXY="${http_proxy}"
|
||||
//HTTPS_PROXY="${https_proxy}"
|
||||
//NO_PROXY="${no_proxy}"
|
||||
|
||||
// EUPROD_TOKEN_VAR = credentials('EUPROD_TOKEN_VAR')
|
||||
// EUPREPROD_TOKEN_VAR = credentials('EUPREPROD_TOKEN_VAR')
|
||||
// NAPROD_TOKEN_VAR = credentials('NAPROD_TOKEN_VAR')
|
||||
// NAPREPROD_TOKEN_VAR = credentials('NAPREPROD_TOKEN_VAR')
|
||||
// CNPROD_TOKEN_VAR = credentials('CNPROD_TOKEN_VAR')
|
||||
// CNPREPROD_TOKEN_VAR = credentials('CNPREPROD_TOKEN_VAR')
|
||||
AWS_ACCESS_KEY_ID = credentials('AWS_TERRAFORM_KEY')
|
||||
AWS_SECRET_ACCESS_KEY = credentials('AWS_SECRET_ACCESS_KEY')
|
||||
AWS_S3_BUCKET="coco-dynatrace-tfstate"
|
||||
AWS_S3_REGION="eu-central-1"
|
||||
|
||||
//TERRAFORM_RESOURCES="dynatrace_management_zone"
|
||||
|
||||
//EMEA PROD
|
||||
TF_VAR_EMEA_PROD_ENV_URL="https://xxu26128.live.dynatrace.com"
|
||||
TF_VAR_EMEA_PROD_API_TOKEN=credentials('EUPROD_TOKEN_VAR')
|
||||
|
||||
//EMEA PREPROD
|
||||
TF_VAR_EMEA_PREPROD_ENV_URL="https://qqk70169.live.dynatrace.com"
|
||||
TF_VAR_EMEA_PREPROD_API_TOKEN=credentials('EUPREPROD_TOKEN_VAR')
|
||||
|
||||
//NA PROD
|
||||
TF_VAR_NA_PROD_ENV_URL="https://wgv50241.live.dynatrace.com/"
|
||||
TF_VAR_NA_PROD_API_TOKEN=credentials('NAPROD_TOKEN_VAR')
|
||||
|
||||
//NA PREPROD
|
||||
TF_VAR_NA_PREPROD_ENV_URL="https://onb44935.live.dynatrace.com/"
|
||||
TF_VAR_NA_PREPROD_API_TOKEN=credentials('NAPREPROD_TOKEN_VAR')
|
||||
|
||||
//CN PROD
|
||||
TF_VAR_CN_PROD_ENV_URL="https://dyna-synth-cn.bmwgroup.com.cn/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b"
|
||||
TF_VAR_CN_PROD_API_TOKEN=credentials('CNPROD_TOKEN_VAR')
|
||||
|
||||
//CN PREPROD
|
||||
TF_VAR_CN_PREPROD_ENV_URL="https://dyna-synth-cn.bmwgroup.com.cn/e/ab88c03b-b7fc-45f0-9115-9e9ecc0ced35"
|
||||
TF_VAR_CN_PREPROD_API_TOKEN=credentials('CNPREPROD_TOKEN_VAR')
|
||||
|
||||
//TERRAFORM
|
||||
TF_VAR_TERRAFORM_ENV_URL="https://rsb41808.live.dynatrace.com"
|
||||
TF_VAR_TERRAFORM_API_TOKEN=credentials('TERRAFORM_TOKEN_VAR')
|
||||
|
||||
}
|
||||
|
||||
stages {
|
||||
|
||||
stage('Checkout branch') {
|
||||
steps {
|
||||
git branch: '${branch}', credentialsId: 'jaws_dynatrace_bitbuket_user', url: 'https://atc.bmwgroup.net/bitbucket/scm/opapm/coco_terraform_config.git'
|
||||
}
|
||||
}
|
||||
/*
|
||||
stage('install required python packages') {
|
||||
steps {
|
||||
sh '''
|
||||
pip3 install --user -r requirements.txt
|
||||
'''
|
||||
}
|
||||
}
|
||||
*/
|
||||
stage('Install Terraform') {
|
||||
steps {
|
||||
sh '''
|
||||
cd /tmp
|
||||
curl https://releases.hashicorp.com/terraform/1.1.6/terraform_1.1.6_linux_amd64.zip > terraform.zip
|
||||
unzip terraform.zip
|
||||
sudo mv /tmp/terraform /usr/local/bin
|
||||
sudo chmod +x /usr/local/bin/terraform
|
||||
terraform --version
|
||||
cd ~
|
||||
'''
|
||||
}
|
||||
}
|
||||
|
||||
//Not required for using terraform, but for export config from dynatrace
|
||||
/*
|
||||
stage('Download & Make exporter executable') {
|
||||
steps {
|
||||
sh '''
|
||||
cd /tmp
|
||||
curl -L https://github.com/dynatrace-oss/terraform-provider-dynatrace/releases/download/v1.10.0/terraform-provider-dynatrace_1.10.0_linux_amd64.zip > terraform_provider.zip
|
||||
unzip terraform_provider.zip
|
||||
sudo mv /tmp/terraform-provider-dynatrace* /usr/local/bin/terraform-provider-dynatrace
|
||||
sudo chmod +x /usr/local/bin/terraform-provider-dynatrace
|
||||
'''
|
||||
|
||||
sh 'python3 --version'
|
||||
}
|
||||
|
||||
}
|
||||
*/
|
||||
stage('Dry Run') {
|
||||
steps {
|
||||
script {
|
||||
ArrayList folderArr = evaluate("${CONFIGS}")
|
||||
if(ENVIRONMENT != 'ALL'){
|
||||
if(folderArr != null){
|
||||
for (folder in folderArr)
|
||||
{
|
||||
/*
|
||||
sh """
|
||||
cd $WORKSPACE
|
||||
cd $WORKSPACE/$environment/$folder
|
||||
"""
|
||||
*/
|
||||
init_status = sh(returnStatus: true, script: "cd $WORKSPACE/$environment/$folder && set +e; terraform init")
|
||||
if (init_status == 0) {
|
||||
println "Starting terraform plan..."
|
||||
plan_status = sh(returnStatus: true, script: "cd $WORKSPACE/$environment/$folder && set +e; terraform plan -out=tfplan -detailed-exitcode")
|
||||
if (plan_status != 0) {
|
||||
println "Failed to plan $folder for $environment"
|
||||
sh("curl -H 'Content-Type: application/json' -d '{\"text\":\"Failed to plan $folder for ${environment}\"}' https://bmwgroup.webhook.office.com/webhookb2/483edc00-c925-4672-8088-8299a0139fca@ce849bab-cc1c-465b-b62e-18f07c9ac198/JenkinsCI/1cf354c0e2e54e2baaf2f20b051b1dda/af36b177-c3fb-4707-a2d4-c75dbce454a2")
|
||||
currentBuild.result = 'UNSTABLE'}
|
||||
}
|
||||
else {
|
||||
println "Failed to init $folder for $environment"
|
||||
sh("curl -H 'Content-Type: application/json' -d '{\"text\":\"Failed to init $folder for ${environment}\"}' https://bmwgroup.webhook.office.com/webhookb2/483edc00-c925-4672-8088-8299a0139fca@ce849bab-cc1c-465b-b62e-18f07c9ac198/JenkinsCI/1cf354c0e2e54e2baaf2f20b051b1dda/af36b177-c3fb-4707-a2d4-c75dbce454a2")
|
||||
currentBuild.result = 'UNSTABLE'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
for (environment in environments){
|
||||
if(folderArr != null){
|
||||
for (folder in folderArr)
|
||||
{
|
||||
sh """
|
||||
cd $WORKSPACE
|
||||
cd $environment/$folder
|
||||
"""
|
||||
status = sh(returnStatus: true, script: "set +e; terraform init")
|
||||
if (status == "0") {
|
||||
status = sh(returnStatus: true, script: "set +e; terraform plan -out=tfplan -detailed-exitcode")
|
||||
if (status != "0") {currentBuild.result = 'UNSTABLE'}
|
||||
}
|
||||
else {
|
||||
currentBuild.result = 'UNSTABLE'
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Deploy Configuration') {
|
||||
when { expression { return !DRY_RUN_ONLY.toBoolean() }}
|
||||
steps {
|
||||
script {
|
||||
ArrayList folderArr = evaluate("${CONFIGS}")
|
||||
if(ENVIRONMENT != 'ALL'){
|
||||
if(folderArr != null){
|
||||
for (folder in folderArr)
|
||||
{
|
||||
sh """
|
||||
cd $WORKSPACE
|
||||
cd $ENVIRONMENT/$folder
|
||||
set +e; terraform apply tfplan
|
||||
"""
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
for (environment in environments){
|
||||
if(folderArr != null){
|
||||
for (folder in folderArr)
|
||||
{
|
||||
sh """
|
||||
cd $WORKSPACE
|
||||
cd $environment/$folder
|
||||
terraform apply tfplan
|
||||
"""
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
/*
|
||||
stage('Execute Export Script TERRAFORM') {
|
||||
steps {
|
||||
|
||||
export_config_all(environments)
|
||||
//sh 'python3 export.py EMEA_PROD'
|
||||
//sh 'python3 export.py TERRAFORM'
|
||||
|
||||
//Only required once CN is not reachable from EMEA
|
||||
//loopEnvironments(environments)
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
*/
|
||||
|
||||
/*
|
||||
stage('Send report') {
|
||||
steps {
|
||||
script {
|
||||
try {
|
||||
emailext subject: env.JOB_NAME,
|
||||
body: 'Please find the output of your reports attached',
|
||||
to: 'rene.forstner@nttdata.com',
|
||||
replyTo: 'coco-apm@bmw.de',
|
||||
attachmentsPattern: '*.csv'
|
||||
|
||||
}
|
||||
catch ( mailExc ){
|
||||
echo "Sending Email Failed: ${mailExc}"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
*/
|
||||
}
|
||||
|
||||
post {
|
||||
always {
|
||||
cleanWs()
|
||||
}
|
||||
}
|
||||
}
|
||||
def params
|
||||
def String[] envs
|
||||
def String[] folders
|
||||
def rootdirlist = ''
|
||||
def envlist = ''
|
||||
environments=['EMEA_PROD', 'EMEA_PREPROD', 'NA_PROD', 'NA_PREPROD', 'CN_PROD', 'CN_PREPROD']
|
||||
|
||||
|
||||
@NonCPS // has to be NonCPS or the build breaks on the call to .each
|
||||
def export_config_all(list) {
|
||||
list.each { env ->
|
||||
sh "python3 export.py ${env}"
|
||||
}
|
||||
}
|
||||
pipeline {
|
||||
options {
|
||||
ansiColor('xterm')
|
||||
}
|
||||
//label libraryBuild is available in CN JAWS and ROW JAWS, therefore this one was used; no additional intents
|
||||
agent {label 'jaws-slaves'}
|
||||
parameters {
|
||||
//string(name: 'ENVIRONMENT', defaultValue: 'ALL', description: 'Possible values EMEA_PROD, EMEA_PREPROD, NA_PROD, NA_PREPROD, CN_PROD, CN_PREPROD')
|
||||
choice(name: 'ENVIRONMENT', choices: ['ALL', 'EMEA_PROD', 'EMEA_PREPROD', 'NA_PROD', 'NA_PREPROD', 'CN_PROD', 'CN_PREPROD'])
|
||||
string(name: 'BRANCH', defaultValue: 'master', description: 'Enter the branch you want to commit')
|
||||
string(name: 'CONFIGS', defaultValue: '["slo"]', description: 'enter the folders, you want to deploy e.g.["CD_ABC","slo","calculated_service_metrics"]')
|
||||
booleanParam(name: 'DRY_RUN_ONLY', defaultValue: true, description: 'Execute only a terraform init and plan, no config gets deployed')
|
||||
}
|
||||
|
||||
//here comes the trigger according to crontabs - jenkins is in UTC
|
||||
/*triggers {
|
||||
//every 1st of every month at 00:00
|
||||
//cron('0 0 1 * *')
|
||||
|
||||
//every day at 08:00
|
||||
//cron('0 8 * * *')
|
||||
|
||||
//every monday at 08:00
|
||||
//cron('0 8 * * MON')
|
||||
|
||||
|
||||
}*/
|
||||
environment {
|
||||
//ProxySettings
|
||||
AUTH = credentials('proxy')
|
||||
proxy_user = '${AUTH_USR}'
|
||||
proxy_pw = '${AUTH_PSW}'
|
||||
//http_proxy="http://${proxy_user}:${proxy_pw}@proxy.muc:8080"
|
||||
//https_proxy="http://${proxy_user}:${proxy_pw}@proxy.muc:8080"
|
||||
//no_proxy="localhost,127.0.0.1,.muc,.bmwgroup.net"
|
||||
//HTTP_PROXY="${http_proxy}"
|
||||
//HTTPS_PROXY="${https_proxy}"
|
||||
//NO_PROXY="${no_proxy}"
|
||||
|
||||
// EUPROD_TOKEN_VAR = credentials('EUPROD_TOKEN_VAR')
|
||||
// EUPREPROD_TOKEN_VAR = credentials('EUPREPROD_TOKEN_VAR')
|
||||
// NAPROD_TOKEN_VAR = credentials('NAPROD_TOKEN_VAR')
|
||||
// NAPREPROD_TOKEN_VAR = credentials('NAPREPROD_TOKEN_VAR')
|
||||
// CNPROD_TOKEN_VAR = credentials('CNPROD_TOKEN_VAR')
|
||||
// CNPREPROD_TOKEN_VAR = credentials('CNPREPROD_TOKEN_VAR')
|
||||
AWS_ACCESS_KEY_ID = credentials('AWS_TERRAFORM_KEY')
|
||||
AWS_SECRET_ACCESS_KEY = credentials('AWS_SECRET_ACCESS_KEY')
|
||||
AWS_S3_BUCKET="coco-dynatrace-tfstate"
|
||||
AWS_S3_REGION="eu-central-1"
|
||||
|
||||
//TERRAFORM_RESOURCES="dynatrace_management_zone"
|
||||
|
||||
//EMEA PROD
|
||||
TF_VAR_EMEA_PROD_ENV_URL="https://xxu26128.live.dynatrace.com"
|
||||
TF_VAR_EMEA_PROD_API_TOKEN=credentials('EUPROD_TOKEN_VAR')
|
||||
|
||||
//EMEA PREPROD
|
||||
TF_VAR_EMEA_PREPROD_ENV_URL="https://qqk70169.live.dynatrace.com"
|
||||
TF_VAR_EMEA_PREPROD_API_TOKEN=credentials('EUPREPROD_TOKEN_VAR')
|
||||
|
||||
//NA PROD
|
||||
TF_VAR_NA_PROD_ENV_URL="https://wgv50241.live.dynatrace.com/"
|
||||
TF_VAR_NA_PROD_API_TOKEN=credentials('NAPROD_TOKEN_VAR')
|
||||
|
||||
//NA PREPROD
|
||||
TF_VAR_NA_PREPROD_ENV_URL="https://onb44935.live.dynatrace.com/"
|
||||
TF_VAR_NA_PREPROD_API_TOKEN=credentials('NAPREPROD_TOKEN_VAR')
|
||||
|
||||
//CN PROD
|
||||
TF_VAR_CN_PROD_ENV_URL="https://dynatrace-cn-int.bmwgroup.com/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b"
|
||||
TF_VAR_CN_PROD_API_TOKEN=credentials('CNPROD_TOKEN_VAR')
|
||||
|
||||
//CN PREPROD
|
||||
TF_VAR_CN_PREPROD_ENV_URL="https://dynatrace-cn-int.bmwgroup.com/e/ab88c03b-b7fc-45f0-9115-9e9ecc0ced35"
|
||||
TF_VAR_CN_PREPROD_API_TOKEN=credentials('CNPREPROD_TOKEN_VAR')
|
||||
|
||||
//TERRAFORM
|
||||
TF_VAR_TERRAFORM_ENV_URL="https://rsb41808.live.dynatrace.com"
|
||||
TF_VAR_TERRAFORM_API_TOKEN=credentials('TERRAFORM_TOKEN_VAR')
|
||||
|
||||
}
|
||||
|
||||
stages {
|
||||
|
||||
stage('Checkout branch') {
|
||||
steps {
|
||||
git branch: '${branch}', credentialsId: 'jaws_dynatrace_bitbuket_user', url: 'https://atc.bmwgroup.net/bitbucket/scm/opapm/coco_terraform_config.git'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Install Terraform') {
|
||||
steps {
|
||||
sh '''
|
||||
cd /tmp
|
||||
curl https://releases.hashicorp.com/terraform/1.1.6/terraform_1.1.6_linux_amd64.zip > terraform.zip
|
||||
yes | sudo unzip terraform.zip
|
||||
yes | sudo mv -f /tmp/terraform /usr/local/bin
|
||||
sudo chmod +x /usr/local/bin/terraform
|
||||
terraform --version
|
||||
cd ~
|
||||
'''
|
||||
}
|
||||
}
|
||||
|
||||
//Not required for using terraform, but for export config from dynatrace
|
||||
/*
|
||||
stage('Download & Make exporter executable') {
|
||||
steps {
|
||||
sh '''
|
||||
cd /tmp
|
||||
curl -L https://github.com/dynatrace-oss/terraform-provider-dynatrace/releases/download/v1.10.0/terraform-provider-dynatrace_1.10.0_linux_amd64.zip > terraform_provider.zip
|
||||
unzip terraform_provider.zip
|
||||
sudo mv /tmp/terraform-provider-dynatrace* /usr/local/bin/terraform-provider-dynatrace
|
||||
sudo chmod +x /usr/local/bin/terraform-provider-dynatrace
|
||||
'''
|
||||
|
||||
sh 'python3 --version'
|
||||
}
|
||||
|
||||
}
|
||||
*/
|
||||
stage('Dry Run') {
|
||||
steps {
|
||||
script {
|
||||
ArrayList folderArr = evaluate("${CONFIGS}")
|
||||
if(ENVIRONMENT != 'ALL'){
|
||||
if(folderArr != null){
|
||||
for (folder in folderArr)
|
||||
{
|
||||
init_status = sh(returnStatus: true, script: "cd $WORKSPACE/$environment/$folder && set +e; terraform init")
|
||||
if (init_status == 0) {
|
||||
println "Starting terraform plan..."
|
||||
plan_status = sh(returnStatus: true, script: "cd $WORKSPACE/$environment/$folder && set +e; terraform plan -out=tfplan -detailed-exitcode")
|
||||
if (plan_status == 1) {
|
||||
println "Failed to plan $folder for $environment"
|
||||
sh("curl -H 'Content-Type: application/json' -d '{\"text\":\"Failed to plan $folder for ${environment}\"}' https://bmwgroup.webhook.office.com/webhookb2/483edc00-c925-4672-8088-8299a0139fca@ce849bab-cc1c-465b-b62e-18f07c9ac198/JenkinsCI/1cf354c0e2e54e2baaf2f20b051b1dda/af36b177-c3fb-4707-a2d4-c75dbce454a2")
|
||||
currentBuild.result = 'UNSTABLE'
|
||||
}
|
||||
}
|
||||
else {
|
||||
println "Failed to init $folder for $environment"
|
||||
sh("curl -H 'Content-Type: application/json' -d '{\"text\":\"Failed to init $folder for ${environment}\"}' https://bmwgroup.webhook.office.com/webhookb2/483edc00-c925-4672-8088-8299a0139fca@ce849bab-cc1c-465b-b62e-18f07c9ac198/JenkinsCI/1cf354c0e2e54e2baaf2f20b051b1dda/af36b177-c3fb-4707-a2d4-c75dbce454a2")
|
||||
currentBuild.result = 'UNSTABLE'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
for (environment in environments){
|
||||
if(folderArr != null){
|
||||
for (folder in folderArr)
|
||||
{
|
||||
init_status = sh(returnStatus: true, script: "cd $WORKSPACE/$environment/$folder && set +e; terraform init")
|
||||
if (init_status == 0) {
|
||||
println "Starting terraform plan..."
|
||||
plan_status = sh(returnStatus: true, script: "cd $WORKSPACE/$environment/$folder && set +e; terraform plan -out=tfplan -detailed-exitcode")
|
||||
println "Statuscode: $plan_status"
|
||||
if (plan_status == 1) {
|
||||
println "Failed to plan $folder for $environment"
|
||||
sh("curl -H 'Content-Type: application/json' -d '{\"text\":\"Failed to plan $folder for ${environment}\"}' https://bmwgroup.webhook.office.com/webhookb2/483edc00-c925-4672-8088-8299a0139fca@ce849bab-cc1c-465b-b62e-18f07c9ac198/JenkinsCI/1cf354c0e2e54e2baaf2f20b051b1dda/af36b177-c3fb-4707-a2d4-c75dbce454a2")
|
||||
currentBuild.result = 'UNSTABLE'
|
||||
}
|
||||
}
|
||||
else {
|
||||
println "Failed to init $folder for $environment"
|
||||
sh("curl -H 'Content-Type: application/json' -d '{\"text\":\"Failed to init $folder for ${environment}\"}' https://bmwgroup.webhook.office.com/webhookb2/483edc00-c925-4672-8088-8299a0139fca@ce849bab-cc1c-465b-b62e-18f07c9ac198/JenkinsCI/1cf354c0e2e54e2baaf2f20b051b1dda/af36b177-c3fb-4707-a2d4-c75dbce454a2")
|
||||
currentBuild.result = 'UNSTABLE'
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Deploy Configuration') {
|
||||
when { expression { return !DRY_RUN_ONLY.toBoolean() }}
|
||||
steps {
|
||||
script {
|
||||
ArrayList folderArr = evaluate("${CONFIGS}")
|
||||
if(ENVIRONMENT != 'ALL')
|
||||
{
|
||||
if(folderArr != null)
|
||||
{
|
||||
for (folder in folderArr)
|
||||
{
|
||||
apply_status = sh(returnStatus: true, script: "cd $WORKSPACE/$environment/$folder && set +e; terraform apply tfplan")
|
||||
if (apply_status != 0)
|
||||
{
|
||||
println "Failed to apply $folder for $environment"
|
||||
sh("curl -H 'Content-Type: application/json' -d '{\"text\":\"Failed to apply $folder for ${environment}\"}' https://bmwgroup.webhook.office.com/webhookb2/483edc00-c925-4672-8088-8299a0139fca@ce849bab-cc1c-465b-b62e-18f07c9ac198/JenkinsCI/1cf354c0e2e54e2baaf2f20b051b1dda/af36b177-c3fb-4707-a2d4-c75dbce454a2")
|
||||
currentBuild.result = 'UNSTABLE'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
else {
|
||||
for (environment in environments)
|
||||
{
|
||||
if(folderArr != null)
|
||||
{
|
||||
for (folder in folderArr)
|
||||
{
|
||||
// apply_status = sh(returnStatus: true, script: "cd $WORKSPACE/$environment/$folder && set +e; terraform plan -out=tfplan -detailed-exitcode")
|
||||
apply_status = sh(returnStatus: true, script: "cd $WORKSPACE/$environment/$folder && set +e; terraform apply tfplan")
|
||||
if (apply_status != 0)
|
||||
{
|
||||
println "Failed to apply $folder for $environment"
|
||||
sh("curl -H 'Content-Type: application/json' -d '{\"text\":\"Failed to apply $folder for ${environment}\"}' https://bmwgroup.webhook.office.com/webhookb2/483edc00-c925-4672-8088-8299a0139fca@ce849bab-cc1c-465b-b62e-18f07c9ac198/JenkinsCI/1cf354c0e2e54e2baaf2f20b051b1dda/af36b177-c3fb-4707-a2d4-c75dbce454a2")
|
||||
currentBuild.result = 'UNSTABLE'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
stage('Send report') {
|
||||
steps {
|
||||
script {
|
||||
try {
|
||||
emailext subject: env.JOB_NAME,
|
||||
body: 'Please find the output of your reports attached',
|
||||
to: 'bmw.dynatrace@nttdata.com',
|
||||
replyTo: 'coco-apm@bmw.de',
|
||||
attachmentsPattern: '*.csv'
|
||||
|
||||
}
|
||||
catch ( mailExc ){
|
||||
echo "Sending Email Failed: ${mailExc}"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
*/
|
||||
}
|
||||
|
||||
post {
|
||||
always {
|
||||
cleanWs()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue