Compare commits

..

No commits in common. "master" and "staging" have entirely different histories.

21 changed files with 49 additions and 1041 deletions

3
.gitignore vendored
View File

@ -1,3 +0,0 @@
output/
.env
.terraform.lock.hcl

38
.vscode/launch.json vendored
View File

@ -1,38 +0,0 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Python: Aktuelle Datei",
"type": "python",
"request": "launch",
"program": "${file}",
"console": "integratedTerminal",
"args": [
"TERRAFORM"
],
},
{
"name": "Python: Aktuelle Datei EMEA_PROD",
"type": "python",
"request": "launch",
"program": "${file}",
"console": "integratedTerminal",
"args": [
"EMEA_PROD"
],
},
{
"name": "Python: Aktuelle Datei EMEA_PREPROD",
"type": "python",
"request": "launch",
"program": "${file}",
"console": "integratedTerminal",
"args": [
"EMEA_PREPROD"
],
}
]
}

149
Jenkinsfile vendored
View File

@ -1,159 +1,46 @@
//not required right now as CN is reachable from EMEA as well
environments=['EMEA_PROD', 'EMEA_PREPROD', 'NA_PROD', 'NA_PREPROD', 'CN_PROD', 'CN_PREPROD']
@NonCPS // has to be NonCPS or the build breaks on the call to .each
def export_config_all(list) {
list.each { env ->
sh "python3 export.py ${env}"
}
}
pipeline {
options {
ansiColor('xterm')
}
//label libraryBuild is available in CN JAWS and ROW JAWS, therefore this one was used; no additional intents
agent {label 'jaws-slaves'}
agent{label 'libraryBuild'}
//here comes the trigger according to crontabs - jenkins is in UTC
//here comes the trigger according to crontabs
triggers {
//every 1st of every month at 00:00
cron('0 0 1 * *')
//every day at 08:00
//cron('0 8 * * *')
//every monday at 08:00
//cron('0 8 * * MON')
}
environment {
//ProxySettings
AUTH = credentials('proxy')
proxy_user = "${AUTH_USR}"
proxy_pw = "${AUTH_PSW}"
//http_proxy="http://${proxy_user}:${proxy_pw}@proxy.muc:8080"
//https_proxy="http://${proxy_user}:${proxy_pw}@proxy.muc:8080"
//no_proxy="localhost,127.0.0.1,.muc,.bmwgroup.net"
//HTTP_PROXY="${http_proxy}"
//HTTPS_PROXY="${https_proxy}"
//NO_PROXY="${no_proxy}"
http_proxy="http://${proxy_user}:${proxy_pw}@proxy.muc:8080"
https_proxy="http://${proxy_user}:${proxy_pw}@proxy.muc:8080"
no_proxy="localhost,127.0.0.1,.muc,.bmwgroup.net"
HTTP_PROXY="${http_proxy}"
HTTPS_PROXY="${https_proxy}"
NO_PROXY="${no_proxy}"
// EUPROD_TOKEN_VAR = credentials('EUPROD_TOKEN_VAR')
// EUPREPROD_TOKEN_VAR = credentials('EUPREPROD_TOKEN_VAR')
// NAPROD_TOKEN_VAR = credentials('NAPROD_TOKEN_VAR')
// NAPREPROD_TOKEN_VAR = credentials('NAPREPROD_TOKEN_VAR')
// CNPROD_TOKEN_VAR = credentials('CNPROD_TOKEN_VAR')
// CNPREPROD_TOKEN_VAR = credentials('CNPREPROD_TOKEN_VAR')
AWS_ACCESS_KEY_ID = credentials('AWS_TERRAFORM_KEY')
AWS_SECRET_ACCESS_KEY = credentials('AWS_SECRET_ACCESS_KEY')
AWS_S3_BUCKET="coco-dynatrace-tfstate"
AWS_S3_REGION="eu-central-1"
//TERRAFORM_RESOURCES="dynatrace_management_zone"
//EMEA PROD
TF_VAR_EMEA_PROD_ENV_URL="https://xxu26128.live.dynatrace.com"
TF_VAR_EMEA_PROD_API_TOKEN=credentials('EUPROD_TOKEN_VAR')
//EMEA PREPROD
TF_VAR_EMEA_PREPROD_ENV_URL="https://qqk70169.live.dynatrace.com"
TF_VAR_EMEA_PREPROD_API_TOKEN=credentials('EUPREPROD_TOKEN_VAR')
//NA PROD
TF_VAR_NA_PROD_ENV_URL="https://wgv50241.live.dynatrace.com/"
TF_VAR_NA_PROD_API_TOKEN=credentials('NAPROD_TOKEN_VAR')
//NA PREPROD
TF_VAR_NA_PREPROD_ENV_URL="https://onb44935.live.dynatrace.com/"
TF_VAR_NA_PREPROD_API_TOKEN=credentials('NAPREPROD_TOKEN_VAR')
//CN PROD
TF_VAR_CN_PROD_ENV_URL="https://dyna-synth-cn.bmwgroup.com.cn/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b"
TF_VAR_CN_PROD_API_TOKEN=credentials('CNPROD_TOKEN_VAR')
//CN PREPROD
TF_VAR_CN_PREPROD_ENV_URL="https://dyna-synth-cn.bmwgroup.com.cn/e/ab88c03b-b7fc-45f0-9115-9e9ecc0ced35"
TF_VAR_CN_PREPROD_API_TOKEN=credentials('CNPREPROD_TOKEN_VAR')
//TERRAFORM
TF_VAR_TERRAFORM_ENV_URL="https://rsb41808.live.dynatrace.com"
TF_VAR_TERRAFORM_API_TOKEN=credentials('TERRAFORM_TOKEN_VAR')
EUPROD_TOKEN_VAR = credentials('EUPROD_TOKEN_VAR')
EUPREPROD_TOKEN_VAR = credentials('EUPREPROD_TOKEN_VAR')
NAPROD_TOKEN_VAR = credentials('NAPROD_TOKEN_VAR')
NAPREPROD_TOKEN_VAR = credentials('NAPREPROD_TOKEN_VAR')
CNPROD_TOKEN_VAR = credentials('CNPROD_TOKEN_VAR')
CNPREPROD_TOKEN_VAR = credentials('CNPREPROD_TOKEN_VAR')
}
stages {
stage('install required python packages') {
stage('install required packages') {
steps {
sh '''
pip3 install --user -r requirements.txt
pip install -upgrade pip
pip install -r requirements.txt
'''
}
}
stage('Install Terraform') {
steps {
sh '''
cd /tmp
curl https://releases.hashicorp.com/terraform/1.1.4/terraform_1.1.4_linux_amd64.zip > terraform.zip
unzip terraform.zip
sudo mv /tmp/terraform /usr/local/bin
terraform --version
cd ~
'''
}
}
stage('Make exporter executable') {
steps {
sh 'chmod 755 ./bin/terraform-provider-dynatrace_v1.9.1'
sh 'python3 --version'
//sh 'sudo zypper refresh'
//sh 'chmod 755 ./updatepython.sh'
//sh './updatepython.sh'
//Only required once CN is not reachable from EMEA
//loopEnvironments(environments)
}
}
stage('Execute Export Script TERRAFORM') {
steps {
export_config_all(environments)
//sh 'python3 export.py EMEA_PROD'
//sh 'python3 export.py TERRAFORM'
//Only required once CN is not reachable from EMEA
//loopEnvironments(environments)
}
}
stage('Send report') {
steps {
script {
try {
emailext subject: env.JOB_NAME,
body: 'Please find the output of your reports attached',
to: 'rene.forstner@nttdata.com',
replyTo: 'coco-apm@bmw.de',
attachmentsPattern: '*.csv'
}
catch ( mailExc ){
echo "Sending Email Failed: ${mailExc}"
}
}
}
}
}
}
post {
always {
cleanWs()

197
README.md
View File

@ -1,196 +1 @@
<<<<<<< HEAD
# Dynatrace Reporting Pipeline
This repository is used as a template to create automated Dynatrace reports through Jenkins (JAWS) which are sent as attachement through mail.
***
## Jenkins environments
EMEA & NA: https://jaws.bmwgroup.net/opapm/
CN: https://jaws-china.bmwgroup.net/opmaas/
### Request access
Access is granted manually through stephan.oertelt@bmw.de and Mohammed.Abadel@bmw.de
### Multi-Branch Pipelines
- master (=latest) --> for testing and developing stuff
- staging --> pre-release branch, if stuff tested successfully here merge it to production
- production --> actively used, productive reports
***
## Minimum Content of a Dynatrace Reporting Repo
### Repository & Pipeline Naming
- Repository and Pipelines must have identical names
- Naming must start with CoCo_APM_Reporting_**report name here**
### readme-File
The readme file must contain a useful description **what** is reported **when** to **whom**
Example: <br>
> This report is reporting all installed OneAgent versions including:
> - Hostnames
> - Environment Tag
> - PaaS Tag
>
> Sent to: coco-apm@bmw.de
>
> Executed on each Monday of every week <br>
> Executed on every Dynatrace environment
### requirements.txt
The requirements.txt file must contain **all** python packages which are used within the script through ```import``` <br>
e.g.: <br>
``` python-decouple
pyyaml
pandas
decouple
requests
datetime
argparse
```
### environments.yaml
The environments.yaml contains all environments on which the script should be executed, environments which should not be executed may be excluded through ```#```
**Do NOT change the environments names, the pipeline script is configured to distingue between EMEA/NA and CN as their are different Jenkins environments!**
The following snipped shows an environment file, which is only executed on Dynatrace prod environments.
```
euprod:
- name: "euprod"
- env-url: "https://xxu26128.live.dynatrace.com"
- env-token-name: "EUPROD_TOKEN_VAR"
#eupreprod:
- name: "eupreprod"
- env-url: "https://qqk70169.live.dynatrace.com"
- env-token-name: "EUPREPROD_TOKEN_VAR"
#napreprod:
- name: "napreprod"
- env-url: "https://onb44935.live.dynatrace.com"
- env-token-name: "NAPREPROD_TOKEN_VAR"
naprod:
- name: "naprod"
- env-url: "https://wgv50241.live.dynatrace.com"
- env-token-name: "NAPROD_TOKEN_VAR"
cnprod:
- name: "cnprod"
- env-url: "https://dynatracemgd-cn.bmwgroup.net/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b"
- env-token-name: "CNPROD_TOKEN_VAR"
#cnpreprod:
- name: "cnpreprod"
- env-url: "https://dynatracemgd-cn.bmwgroup.net/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b"
- env-token-name: "CNPREPROD_TOKEN_VAR"
```
### Jenkinsfile
The Jenkinsfile is the pipeline script itself including:
- Time-Trigger
- Install package step
- Execute script step
- Send Mail step
### Python Script
The script itself will gather and aggregate the data from the Dynatrace environments.
Script output must be a csv or excel file in the **folder where the script is executed**
***
## First Usage - Create a new Pipeline
### Fork this repo
Do **NOT** clone this repo, create a fork instead.
1. On the left menue bar click on *Create fork*
![Bitbucket Fork Step 1](assets/bitbucket_fork_1.PNG)
2. Choose Project *Offboard Platform - APM - Application Performance Monitoring*
3. Choose a name starting with *CoCo_APM_Reporting_* and a useful suffix
4. Uncheck *Enable fork syncing*
![Bitbucket Fork Step 2](assets/bitbucket_fork_2.PNG)
5. Edit readme.md and describe your report
6. Change/Update the environment, requirements and script according to your needs
7. Login to Jenkins and select the folder *Coco APM Reporting*
8. Click the *New Item* Button
9. Enter the name of your repo (e.g. *CoCo_APM_Reporting_OneAgentVersion*)
10. Select *Multibranch Pipeline*
11. Click *OK* Button
![Jenkins Create Pipieline Step 1](assets/jenkins_create_pipeline_1.PNG)
12. On The Pipeline Settings go to *Branch Sources*, click *ADD SOURCE* and select *Bitbucket*
- Server: **ATC**
- Credentials: **qqjaws7**
- Owner: **OPAPM**
- Repository Name: **your forked repository**
- Behavious: **According to screenshot**
<br>
![Jenkins Create Pipieline Step 2](assets/jenkins_create_pipeline_2.PNG)
<br>
Your pipeline will automatically test-run for all 3 branches.
=======
# TerraformOnboarding
The purpose of the exportConfig.py script is to export dynatrace specific services as terraform files. The importConfig.py script is used to additionally also export the states for each exported terraform file, since terraform does not do that by default.
# Setup
Run the following command to install all necessary dependencies:
```python
pip install -r requirements.txt
```
In order to ensure full functionality a `.env` file is necessary with the following format:
```yml
# Environment URLs
CN_PREPROD_ENV_URL="https://dynatracemgd-cn.bmwgroup.net/e/ab88c03b-b7fc-45f0-9115-9e9ecc0ced35"
CN_PROD_ENV_URL="https://dynatracemgd-cn.bmwgroup.net/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b"
EMEA_PREPROD_ENV_URL="https://qqk70169.live.dynatrace.com"
EMEA_PROD_ENV_URL="https://xxu26128.live.dynatrace.com"
NA_PREPROD_ENV_URL="https://onb44935.live.dynatrace.com"
NA_PROD_ENV_URL="https://wgv50241.live.dynatrace.com"
# Environment Tokens
CN_PREPROD_API_TOKEN="<your-token>"
CN_PROD_API_TOKEN="<your-token>"
EMEA_PREPROD_API_TOKEN="<your-token>"
EMEA_PROD_API_TOKEN="<your-token>"
NA_PREPROD_API_TOKEN="<your-token>"
NA_PROD_API_TOKEN="<your-token>"
```
Place the `.env` file within the root directory of the project folder:
```bash
TerraformDynatrace Porter # Project Folder
├─── res
├─── templates
├─── .env # Add the environment file
├─── .gitignore
├─── README.md
├─── exportConfig.py
├─── imortConfig.py
├─── main.tf
└─── requirements.txt
```
# Run
You can simply run the script by executing the following example command within the projects root directory:
```python
python exportConfig.py
python importConfig.py
```
**Note:** First run the exportConfig.py script and once that is done run the importConfig.py script.
# Version
```python
Python 3.9.9
```
>>>>>>> 5b9784368712fb52b5411a7924b67569d40c051f
init repo

View File

@ -1,28 +0,0 @@
# !/bin/bash
# Step 1. Install pyenv
git clone https://github.com/pyenv/pyenv.git ~/.pyenv
echo 'export PYENV_ROOT="$HOME/.pyenv"' >> ~/.bashrc
echo 'export PATH="$PYENV_ROOT/bin:$PATH"' >> ~/.bashrc
echo -e 'if command -v pyenv 1>/dev/null 2>&1; then\n eval "$(pyenv init -)"\nfi' >> ~/.bashrc
source ~/.bashrc
# Step 2. Install missing headers for all the Python modules to be built and make sure gcc is installed
#sudo zypper install -y readline-devel sqlite3-devel libbz2-devel zlib-devel libopenssl-devel libffi-devel gcc
# Step 3. Install the desired Python version
#pyenv install 3.7.4
# Step 4. Install virtualenv
#sudo zypper install -y python3-virtualenv
pip3 install --user virtualenv
# Step 5. Create a virtual environment for the installed Python and activate it
#mkdir ~/pythons
#cd ~/pythons
virtualenv -p ~/.pyenv/versions/3.7.4/bin/python3.7 python3.7.4
source ./python3.7.4/bin/activate

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 141 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 53 KiB

View File

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -1,80 +0,0 @@
# Dynatrace Terraform Provider
## Requirements
- [Terraform](https://www.terraform.io/downloads.html) 0.13.x+
## Using the provider
If you want to run Terraform with the dynatrace provider plugin on your system, add the plug-in provider to the Terraform configuration file.
```hcl
terraform {
required_providers {
dynatrace = {
version = "1.9.0"
source = "dynatrace-oss/dynatrace"
}
}
}
```
In order to configure the provider, add a code section like this into your Terraform configuration file
```hcl
provider "dynatrace" {
dt_env_url = "https://#######.live.dynatrace.com"
dt_api_token = "##########################################"
}
```
where `dt_env_url` represents the URL of your Dynatrace Environment and `dt_api_token` needs to be an API Token with the permissions `Read configuration` and `Capture request data`.
## Currently supported configuration settings
* Dashboards
* Management Zones
* Custom Services
* Request Attributes
* Alerting Profiles
* Problem Notifiations
* Auto Tag Configuration
* Kubernetes Credentials
* AWS Credentials
* Azure Credentials
* Maintenance Windows
* Service Level Objectives
* Service Naming
* Host Naming
* Process Group Naming
* Calculated Service Metrics
## Using API Parameters not supported by this module
By default, this terraform provider also includes an 'unknowns' operator for configuring properties that are not yet explicitly supported by this provider
To use this, simply pass a [jsonencoded](https://www.terraform.io/docs/language/functions/jsonencode.html) list of key-value parameters you want the provider to also manage via API call
```hcl
resource "dynatrace_k8s_credentials" "k8s_integration" {
unkowns = jsonencode("activeGateGroup"="myactivegategroup.nonp")
}
```
## Exporting existing configuration from a Dynatrace Environment
In addition to acting as a Terraform Provider Plugin the executable `terraform-provider-dynatrace` (`terraform-provider-dynatrace.exe` on Windows) can also get directly invoked.
The utility then reaches out to the Dynatrace Environment specified by the command line arguments and fetches all currently supported configuration items. These results will then automatically get transformed into HCL (the configuration language to be used for `.tf` files) and places each configuration item into its own `.tf` file).
### Command Line Syntax
Invoking the export functionality requires
* The environment varibale `DYNATRACE_ENV_URL` as the URL of your Dynatrace Environment
* The environment variable `DYNATRACE_API_TOKEN` as the API Token with the permissions `Read configuration` and `Capture request data`
* Optinonally the environment variable `DYNATRACE_TARGET_FOLDER`. If it's not set, the output folder `./configuration` is assumed
#### Windows
`terraform-provider-dynatrace.exe export *[<resourcename>[=<id>]]`
#### Linux
`./terraform-provider-dynatrace export *[<resourcename>[=<id>]]`
#### Usage Examples
* `./terraform-provider-dynatrace export` downloads all available configuration settings
* `./terraform-provider-dynatrace export dynatrace_dashboard` downloads all available dashboards
* `./terraform-provider-dynatrace export dynatrace_dashboard dynatrace_slo` downloads all available dashboards and all available SLOs
* `./terraform-provider-dynatrace export dynatrace_dashboard=4f5942d4-3450-40a8-818f-c5faeb3563d0` downloads only the dashboard with the id `4f5942d4-3450-40a8-818f-c5faeb3563d0`
* `./terraform-provider-dynatrace export dynatrace_dashboard=4f5942d4-3450-40a8-818f-c5faeb3563d0 dynatrace_dashboard=9c4b75f1-9a64-4b44-a8e4-149154fd5325` downloads only the dashboards with the ids `4f5942d4-3450-40a8-818f-c5faeb3563d0` and `9c4b75f1-9a64-4b44-a8e4-149154fd5325`
* `./terraform-provider-dynatrace export dynatrace_slo dynatrace_dashboard=4f5942d4-3450-40a8-818f-c5faeb3563d0 dynatrace_dashboard=9c4b75f1-9a64-4b44-a8e4-149154fd5325` downloads all available SLOs and only the dashboards with the ids `4f5942d4-3450-40a8-818f-c5faeb3563d0` and `9c4b75f1-9a64-4b44-a8e4-149154fd5

24
environment.yaml Normal file
View File

@ -0,0 +1,24 @@
euprod:
- name: "euprod"
- env-url: "https://xxu26128.live.dynatrace.com"
- env-token-name: "EUPROD_TOKEN_VAR"
eupreprod:
- name: "eupreprod"
- env-url: "https://qqk70169.live.dynatrace.com"
- env-token-name: "EUPREPROD_TOKEN_VAR"
napreprod:
- name: "napreprod"
- env-url: "https://onb44935.live.dynatrace.com"
- env-token-name: "NAPREPROD_TOKEN_VAR"
naprod:
- name: "naprod"
- env-url: "https://wgv50241.live.dynatrace.com"
- env-token-name: "NAPROD_TOKEN_VAR"
cnprod:
- name: "cnprod"
- env-url: "https://dynatracemgd-cn.bmwgroup.net/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b"
- env-token-name: "CNPROD_TOKEN_VAR"
cnpreprod:
- name: "cnpreprod"
- env-url: "https://dynatracemgd-cn.bmwgroup.net/e/b921f1b9-c00e-4031-b9d1-f5a0d530757b"
- env-token-name: "CNPREPROD_TOKEN_VAR"

268
export.py
View File

@ -1,268 +0,0 @@
import os
from subprocess import Popen, PIPE, STDOUT, TimeoutExpired
import sys
import time
import shutil
import hcl
from dotenv import load_dotenv
from glob import glob
#from git import Repo
import zipfile
import boto3
# [AA 2022.01.17] Set available resources
if os.name == 'nt':
export_tool = ".\\bin\\terraform-provider-dynatrace_v1.9.1.exe"
else:
export_tool = "./bin/terraform-provider-dynatrace_v1.9.1"
Resources = os.getenv("TERRAFORM_RESOURCES").split(",") if os.getenv("TERRAFORM_RESOURCES") else [
"dynatrace_custom_service",
"dynatrace_dashboard",
"dynatrace_management_zone",
"dynatrace_maintenance_window",
"dynatrace_request_attribute",
"dynatrace_alerting_profile",
"dynatrace_notification",
"dynatrace_autotag"
"dynatrace_aws_credentials",
"dynatrace_azure_credentials",
"dynatrace_k8s_credentials",
"dynatrace_service_anomalies",
"dynatrace_application_anomalies",
"dynatrace_host_anomalies",
"dynatrace_database_anomalies",
"dynatrace_custom_anomalies",
"dynatrace_disk_anomalies",
"dynatrace_calculated_service_metric", #issue -> bug: windows specific due to path length limit
"dynatrace_service_naming",
"dynatrace_host_naming",
"dynatrace_processgroup_naming",
"dynatrace_slo", # issue -> bug: whitespace issue
"dynatrace_span_entry_point",
"dynatrace_span_capture_rule",
"dynatrace_span_context_propagation",
"dynatrace_resource_attributes",
"dynatrace_span_attribute",
"dynatrace_mobile_application",
# "dynatrace_credentials", #issue -> bug: unknown issue? not supported?
"dynatrace_browser_monitor",
"dynatrace_http_monitor",
]
# [AA 2021.12.10] Method to set environments
def setEnv(env, time, path):
if not (os.getenv("TF_VAR_"+ env + "_ENV_URL") ):
raise Exception ("Environment variable missing: TF_VAR_"+ env + "_ENV_URL")
if not ( os.getenv("TF_VAR_"+ env + "_API_TOKEN")):
raise Exception ("Environment variable missing: TF_VAR_"+env + "_API_TOKEN")
os.environ['DYNATRACE_ENV_URL'] = str(os.getenv("TF_VAR_"+env + "_ENV_URL"))
os.environ['DYNATRACE_API_TOKEN'] = str(os.getenv("TF_VAR_"+env + "_API_TOKEN"))
os.environ['DYNATRACE_TARGET_FOLDER'] = str(path + time + "_" + env)
return os.environ
# [AA 2021.12.10] Method to call process synchronously
def runExportProcess(process_name, input_params):
process_names = ["Export", "Terraform init"]
success = True
print("[DEBUG] Start run process: "+ ' '.join(input_params))
process = Popen(input_params,stdout=PIPE, stderr=PIPE)
output, error_output = process.communicate()
process.wait(timeout=60*60) # 10 minutes
if len(output) > 0:
raise Exception ("Eception occured during export config: "+output.decode("utf-8"))
print("[DEBUG]", "Process:", process_name, "Success:", success)
def runImportProcess(process_name, input_params):
process_names = ["Export", "Terraform init"]
success = True
print("[DEBUG] Start run process: "+ ' '.join(input_params))
process = Popen(input_params)
process.wait(timeout=60*60)
if process.returncode > 0:
raise Exception ("Eception occured during generating state File!")
print("[DEBUG]", "Process:", process_name, "Success:", success)
# [AA 2021.12.17] Methods needed to replace the matching keys
def replacedata(p, maplist):
# [AA 2021.12.14] Open each template to read
with open(p, 'r+') as template:
print("[DEBUG]", "Opening file at %s." % p)
data = template.read()
template.seek(0)
template.truncate()
# [AA 2021.12.17] With the values for management_zone and msid in memory
for key, val in maplist.items():
print("[DEBUG]", "Replacing key values %s at %s." % (key, p))
data = data.replace(key, val)
# [AA 2021.12.14] Write data from memory into file
with open(p, 'w+') as template:
template.write(data)
# [AA 2021.12.13] Fill dictionary
def readFile(path):
with open(path, 'r', encoding='utf8') as cfg:
# [AA 2021.12.01] Load the content of the particular resource file in eg: management_zones
obj = hcl.load(cfg)
# [AA, EW 2021.12.01] Store resource type and resource name of that file into a dictionary
key = list(obj['resource'].keys())[0]
val = list(obj['resource'][key].keys())[0]
return key, val
# [AA, EW 2022.01.17] Load all resources and add them to a dictionary
def createResourceDict():
files = [os.path.normpath(f).replace('\\', '/')
for f in (glob(targetFolder + "**/**.tf") + glob(targetFolder + "**/**/**.tf"))]
for index, file in enumerate(files):
filedir = "./"+("./"+os.path.dirname(file)).replace(targetFolder, "")
splittedFilename = os.path.basename(file).split(".")
if len(splittedFilename) == 5:
resourceID = splittedFilename[1]
moduleName, resourceName = readFile(file)
if not filedir in myDict.keys():
myDict.setdefault(filedir, {})
if not moduleName in myDict[filedir].keys():
myDict[filedir].setdefault(moduleName, [])
resourceValue = {"resourceName": resourceName,
"resourceID": resourceID}
myDict[filedir][moduleName].append(resourceValue)
# [AA, EW 2022.01.17] Copy main.tf into the target folder
def copyMainTemplate():
shutil.copyfile(templatesFolder + "main.tf", targetFolder + "main.tf")
replacedata(targetFolder + "main.tf", {"{$env}":env,
"{$timestamp}":timestamp,
"{$S3_BUCKET}": str(os.getenv("AWS_S3_BUCKET")),
"{$S3_REGION}":str(os.getenv("AWS_S3_REGION"))})
# [AA 2022.01.17] Copy module.tf in all folders and subfolders except where main.tf is
def copyModuleTemplate():
dirs = glob(targetFolder + "**/") + glob(targetFolder + "**/**/")
for index, dir in enumerate(dirs):
shutil.copyfile(templatesFolder + "module.tf", dir + "module.tf")
# [AA 2021.12.13] Append correct configuration path
def writeFile(k, d):
with open(".\\main.tf", "a") as mf:
mf.writelines("\n" + "module \"" + k + "\" { source = \"" + d + "\" }")
# [AA, EW 2022.01.17] Adjust the resource module name
def getModuleTag(str):
return str.replace("./", "").replace("/", "_")
# [AA, EW 2022.01.17] Set the resource names
def editMainTF():
with open(targetFolder + "main.tf", "a") as mf:
for index, (filedir, value) in enumerate(myDict.items()):
mf.writelines("\n" + "module \"" + getModuleTag(filedir) +
"\" { source = \"" + filedir + "\" }")
# [AA, EW 2022.01.17] Start importing
def importStates():
os.chdir(targetFolder)
input_params = ["terraform", "init"]
runImportProcess("Terraform init",input_params)
for filedir, resourceV in myDict.items():
for resource, valueArray in resourceV.items():
for rObject in valueArray:
input_params = ["terraform", "import", "module."+getModuleTag(
filedir)+"."+resource+"."+rObject["resourceName"], rObject["resourceID"]]
runImportProcess("Import", input_params)
# terraform import module.alerting_profiles.dynatrace_alerting_profiles.CD_ABC 9348098098safs9f8
os.chdir(cwd)
def zipdir(path, ziph):# ziph is zipfile handlefor root, dirs, files in os.walk(path):for file in files:
for root, dirs, files in os.walk(path):
if ".terraform" not in root:
for file in files:
if ".terraform.lock.hcl" not in file:
ziph.write(os.path.join(root, file),
os.path.relpath(os.path.join(root, file),
os.path.join(path, '..')))
# [AA 2022.01.17] Arguments passed
if(len(sys.argv) == 2):
try:
# [AA 2021.11.29] Load enviroment file
load_dotenv()
# [AA, EW 2022.01.17] Set global variables
global timestamp, templatesFolder, outputFolder, targetFolder, myDict, cwd, env
env=sys.argv[1]
timestamp = time.strftime("%Y%m%d-%H%M%S")
cwd = os.getcwd()
outputFolder = "./output/"
targetFolder = outputFolder + timestamp + "_" + sys.argv[1] + "/"
templatesFolder = "./templates/"
myDict = {}
# [AA, EW 2022.01.17] Set env varibales
setEnv(sys.argv[1], timestamp, outputFolder)
# [AA, EW 2022.01.17] Download resource files
runExportProcess("Export", [export_tool, "export"] + Resources)
# [AA, EW 2022.01.17] Create a dictionary to store information of resources
createResourceDict()
# [AA, EW 2022.01.17] Copy main.tf file and add module.tf files
copyMainTemplate()
copyModuleTemplate()
# [AA, EW 2022.01.17] Print the module names with their associated module path into the main.tf file
editMainTF()
# [AA, EW 2022.01.17] Import the states for each module
importStates()
zipf = zipfile.ZipFile(outputFolder+"/"+timestamp + "_" +env+'.zip', 'w', zipfile.ZIP_DEFLATED)
zipdir(targetFolder, zipf)
zipf.close()
s3 = boto3.client('s3')
with open(outputFolder+"/"+timestamp + "_" +env+'.zip', 'rb') as data:
s3.upload_fileobj(data, str(os.getenv("AWS_S3_BUCKET")), 'backups/'+timestamp + "_" +env+'.zip') #TODO: Make s3 bucket name configurable over environment variables
print("Finished!")
sys.exit(0)
except Exception as err:
print("Exception occured: "+ str(err))
sys.exit(1)
else:
print("Usage example: ")
print("List of available environments: CN_PREPROD, CN_PROD, EMEA_PREPROD, EMEA_PROD, NA_PREPROD, NA_PROD, etc.")
print("python .\export.py [Emvironment] ")
sys.exit(1)

View File

@ -1,12 +1,6 @@
#python-decouple
#pyyaml
#pandas
#requests
#datetime
#argparse
#=======
typing
python-dotenv
pyhcl
subprocess32
boto3==1.17.0
python-decouple
pyyaml
pandas
requests
datetime
argparse

View File

@ -1,12 +0,0 @@
import subprocess
import sys
if(len(sys.argv) == 1):
try:
process = subprocess.Popen(["terraform", "init"])
outs = process.wait(timeout=10*60)
print("[DEBUG]", "Process return code:", outs)
except subprocess.TimeoutExpired:
print("[DEBUG]", "Exception occured:", subprocess.TimeoutExpired)
print("[DEBUG]", "Killing process.")
process.kill()

View File

@ -1,9 +0,0 @@
terraform {
required_providers {
dynatrace = {
version = "1.9.1"
source = "dynatrace-oss/dynatrace"
}
}
}

View File

@ -1,25 +0,0 @@
terraform {
required_providers {
dynatrace = {
version = "1.9.1"
source = "dynatrace-oss/dynatrace"
}
}
backend "s3" {
bucket = "{$S3_BUCKET}"
key = "backup/{$env}/{$timestamp}/terraform.tfstate"
region = "{$S3_REGION}"
dynamodb_table = "coco-dynatrace-tfstate"
encrypt = true
}
}
variable {$env}_ENV_URL {}
variable {$env}_API_TOKEN {}
provider "dynatrace" {
dt_env_url = "${var.{$env}_ENV_URL}"
dt_api_token = "${var.{$env}_API_TOKEN}"
}

View File

@ -1,9 +0,0 @@
terraform {
required_providers {
dynatrace = {
version = "1.9.1"
source = "dynatrace-oss/dynatrace"
}
}
}

View File

@ -1,29 +0,0 @@
# !/bin/bash
# Step 1. Install pyenv
sudo zypper refresh
sudo zypper update -y
sudo zypper install -y python38
#sudo zypper install -y readline-devel sqlite3-devel libbz2-devel zlib-devel libopenssl-devel libffi-devel gcc make
#cd /tmp
#wget https://www.python.org/ftp/python/3.8.7/Python-3.8.7.tgz
#tar -xvf Python-3.8.7.tgz
#cd Python-3.8.7/
#./configure
#make
#make install
#echo export PATH=”$PATH/root/Python-3.8.7/”’>> ~/.bashrc
#source ~/.bashrc
python3 --version
python38 --version
# Step 3. Install the desired Python version
#pyenv install 3.7.4
# Step 4. Install virtualenv
#sudo zypper install -y python3-virtualenv