Skip to content

Commit e2625c5

Browse files
sshi100bmonkman
andauthored
add new user - more commits with modules (#99)
* initial commit, need check together with terraform-zero-modules/iam_users * fix with review * enhancement with map structure * enhancement with user group for AWS access * fix with template * enhancements with shared state and k8s rules etc. * refined modules-environemnt-root parameters etc. after review * Tweaked documentation a bit * remove unnecessary templating and use variables * replace space with tab * typo fix * recover templating for terraform block * typo comment fix * use remote modules and fixes * added tearndown steps for shared env * refer to newer versions of modules * add shared-remote-state generation Co-authored-by: Bill Monkman <[email protected]>
1 parent 334b89f commit e2625c5

9 files changed

Lines changed: 453 additions & 28 deletions

File tree

templates/Makefile

Lines changed: 28 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
11
SHELL = /usr/bin/env bash
22
ENVIRONMENT ?= stage
33
PROJECT = <% .Name %>
4+
ROLE ?= admin
45
export AWS_DEFAULT_REGION = <% index .Params `region` %>
56
export AWS_PAGER =
67
KUBE_CONTEXT := $(PROJECT)-$(ENVIRONMENT)-$(AWS_DEFAULT_REGION)
78

8-
apply: apply-remote-state apply-secrets apply-env update-k8s-conf pre-k8s apply-k8s-utils post-apply-setup
9+
apply: apply-remote-state apply-shared-remote-state apply-secrets apply-shared-env apply-env update-k8s-conf pre-k8s apply-k8s-utils post-apply-setup
910

1011
apply-remote-state:
1112
aws s3 ls $(PROJECT)-$(ENVIRONMENT)-terraform-state > /dev/null 2>&1 || ( \
@@ -14,13 +15,25 @@ apply-remote-state:
1415
terraform apply -var "environment=$(ENVIRONMENT)" $(AUTO_APPROVE) && \
1516
rm ./terraform.tfstate )
1617

18+
apply-shared-remote-state:
19+
aws s3 ls $(PROJECT)-shared-terraform-state > /dev/null 2>&1 || ( \
20+
cd terraform/bootstrap/remote-state && \
21+
terraform init && \
22+
terraform apply -var "environment=shared" $(AUTO_APPROVE) && \
23+
rm ./terraform.tfstate )
24+
1725
apply-secrets:
1826
aws iam list-access-keys --user-name $(PROJECT)-ci-user > /dev/null 2>&1 || ( \
1927
cd terraform/bootstrap/secrets && \
2028
terraform init && \
2129
terraform apply $(AUTO_APPROVE) && \
2230
rm ./terraform.tfstate )
2331

32+
apply-shared-env:
33+
cd terraform/environments/shared; \
34+
terraform init && \
35+
terraform apply $(AUTO_APPROVE)
36+
2437
apply-env:
2538
cd terraform/environments/$(ENVIRONMENT); \
2639
terraform init && \
@@ -40,12 +53,12 @@ apply-k8s-utils:
4053
terraform apply $(AUTO_APPROVE)
4154

4255
update-k8s-conf:
43-
aws eks --region $(AWS_DEFAULT_REGION) update-kubeconfig --role "arn:aws:iam::<% index .Params `accountId` %>:role/$(PROJECT)-kubernetes-admin-$(ENVIRONMENT)" --name $(KUBE_CONTEXT) --alias $(KUBE_CONTEXT)
56+
aws eks --region $(AWS_DEFAULT_REGION) update-kubeconfig --role "arn:aws:iam::<% index .Params `accountId` %>:role/$(PROJECT)-kubernetes-$(ROLE)-$(ENVIRONMENT)" --name $(KUBE_CONTEXT) --alias $(KUBE_CONTEXT)
4457

4558
post-apply-setup:
4659
cd scripts && ENVIRONMENT=$(ENVIRONMENT) PROJECT=$(PROJECT) sh post-apply.sh
4760

48-
teardown: teardown-k8s-utils teardown-env teardown-secrets teardown-remote-state
61+
teardown: teardown-k8s-utils teardown-env teardown-shared-env teardown-secrets teardown-remote-state teardown-shared-remote-state
4962

5063
teardown-remote-state:
5164
@echo "Deleting remote state is not reversible, are you sure you want to delete the resources? [y/N]:" ; read ans ; [ $${ans:-N} == "y" ] || exit 1
@@ -54,6 +67,13 @@ teardown-remote-state:
5467
# TODO : This doesn't work because bucket versioning is enabled, we would need to loop through all versions of files and delete them manually
5568
aws s3 rb s3://$(PROJECT)-$(ENVIRONMENT)-terraform-state --force
5669

70+
teardown-shared-remote-state:
71+
@echo "Deleting shared remote state is not reversible, are you sure you want to delete the resources? [y/N]:" ; read ans ; [ $${ans:-N} == "y" ] || exit 1
72+
aws dynamodb delete-table --region $(AWS_DEFAULT_REGION) --table-name $(PROJECT)-shared-terraform-state-locks
73+
aws s3 rm s3://$(PROJECT)-shared-terraform-state --recursive
74+
# TODO : This doesn't work because bucket versioning is enabled, we would need to loop through all versions of files and delete them manually
75+
aws s3 rb s3://$(PROJECT)-shared-terraform-state --force
76+
5777
teardown-secrets:
5878
@echo "Deleting secrets is not reversible, are you sure you want to delete the secrets? [y/N]:" ; read ans ; [ $${ans:-N} == "y" ] || exit 1
5979
aws secretsmanager list-secrets --region $(AWS_DEFAULT_REGION) --query "SecretList[?Tags[?Key=='project' && Value=='$(PROJECT)']].[Name] | [0][0]" | xargs aws secretsmanager delete-secret --region $(AWS_DEFAULT_REGION) --secret-id || echo "Secret already removed"
@@ -69,8 +89,12 @@ teardown-env:
6989
cd terraform/environments/$(ENVIRONMENT) && \
7090
terraform destroy
7191

92+
teardown-shared-env:
93+
cd terraform/environments/shared && \
94+
terraform destroy
95+
7296
teardown-k8s-utils:
7397
cd kubernetes/terraform/environments/$(ENVIRONMENT) && \
7498
terraform destroy
7599

76-
.PHONY: apply apply-remote-state apply-secrets apply-env apply-k8s-utils teardown-k8s-utils teardown-env teardown-secrets teardown-remote-state
100+
.PHONY: apply apply-remote-state apply-secrets apply-env apply-k8s-utils teardown-k8s-utils teardown-env teardown-shared-env teardown-secrets teardown-remote-state teardown-shared-remote-state

templates/terraform/README.md

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -102,6 +102,26 @@
102102
make update-k8s-conf
103103
```
104104

105+
If a user has a role other than admin (dev, operations, etc.) they can specify it here as well:
106+
```
107+
ROLE=<role> make update-k8s-conf
108+
```
109+
110+
## User Access
111+
112+
You may want to give memebers of your team access to the infrastructure.
113+
Individual roles and permissions are defined in `environments/<env>/user_access.tf`, these will define the amount of access a user in that role has to both AWS and Kubernetes.
114+
115+
1. Add users in `environments/shared/main.tf` and specify the role they should have in each environment, then run:
116+
```
117+
make apply-shared-env
118+
```
119+
120+
2. To do the assignment of users to roles in each environment, you must run this for each:
121+
```
122+
ENVIRONENT=<env> make apply-env
123+
```
124+
This should detect that there was a new user created, and put them into the necessary group.
105125

106126

107127
## Upgrading an EKS Cluster

templates/terraform/environments/prod/main.tf

Lines changed: 45 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -9,9 +9,28 @@ terraform {
99
}
1010
}
1111

12+
locals {
13+
project = "<% .Name %>"
14+
region = "<% index .Params `region` %>"
15+
account_id = "<% index .Params `accountId` %>"
16+
domain_name = "<% index .Params `productionHostRoot` %>"
17+
}
18+
1219
provider "aws" {
13-
region = "<% index .Params `region` %>"
14-
allowed_account_ids = ["<% index .Params `accountId` %>"]
20+
region = local.region
21+
allowed_account_ids = [local.account_id]
22+
}
23+
24+
# remote state of "shared"
25+
data "terraform_remote_state" "shared" {
26+
backend = "s3"
27+
config = {
28+
bucket = "${local.project}-shared-terraform-state"
29+
key = "infrastructure/terraform/environments/shared/main"
30+
region = local.region
31+
encrypt = true
32+
dynamodb_table = "${local.project}-shared-terraform-state-locks"
33+
}
1534
}
1635

1736
# Instantiate the production environment
@@ -20,9 +39,9 @@ module "prod" {
2039
environment = "prod"
2140

2241
# Project configuration
23-
project = "<% .Name %>"
24-
region = "<% index .Params `region` %>"
25-
allowed_account_ids = ["<% index .Params `accountId` %>"]
42+
project = local.project
43+
region = local.region
44+
allowed_account_ids = [local.account_id]
2645
random_seed = "<% index .Params `randomSeed` %>"
2746

2847
# ECR configuration
@@ -35,15 +54,15 @@ module "prod" {
3554
eks_worker_asg_max_size = 4
3655

3756
# EKS-Optimized AMI for your region: https://docs.aws.amazon.com/eks/latest/userguide/eks-optimized-ami.html
38-
# https://<% index .Params `region` %>.console.aws.amazon.com/systems-manager/parameters/%252Faws%252Fservice%252Feks%252Foptimized-ami%252F1.17%252Famazon-linux-2%252Frecommended%252Fimage_id/description?region=<% index .Params `region` %>
57+
# https://${local.region}.console.aws.amazon.com/systems-manager/parameters/%252Faws%252Fservice%252Feks%252Foptimized-ami%252F1.17%252Famazon-linux-2%252Frecommended%252Fimage_id/description?region=${local.region}
3958
eks_worker_ami = "<% index .Params `eksWorkerAMI` %>"
4059

4160
# Hosting configuration. Each domain will have a bucket created for it, but may have mulitple aliases pointing to the same bucket.
4261
hosted_domains = [
43-
{ domain : "<% index .Params `productionHostRoot` %>", aliases : [] },
44-
{ domain : "<% index .Params `productionFrontendSubdomain` %><% index .Params `productionHostRoot` %>", aliases : [] },
62+
{ domain : local.domain_name, aliases : [] },
63+
{ domain : "<% index .Params `productionFrontendSubdomain` %>${local.domain_name}", aliases : [] },
4564
]
46-
domain_name = "<% index .Params `productionHostRoot` %>"
65+
domain_name = "${local.domain_name}"
4766
cf_signed_downloads = <% if eq (index .Params `fileUploads`) "yes" %>true<% else %>false<% end %>
4867

4968
# DB configuration
@@ -61,5 +80,21 @@ module "prod" {
6180
# See https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/aes-limits.html
6281

6382
sendgrid_enabled = <%if eq (index .Params `sendgridApiKey`) "" %>false<% else %>true<% end %>
64-
sendgrid_api_key_secret_name = "<% .Name %>-sendgrid-<% index .Params `randomSeed` %>"
83+
sendgrid_api_key_secret_name = "${local.project}-sendgrid-<% index .Params `randomSeed` %>"
84+
85+
# Roles configuration
86+
roles = [
87+
{
88+
name = "developer"
89+
aws_policy = data.aws_iam_policy_document.developer_access.json
90+
k8s_policies = local.k8s_developer_access
91+
},
92+
{
93+
name = "operator"
94+
aws_policy = data.aws_iam_policy_document.operator_access.json
95+
k8s_policies = local.k8s_operator_access
96+
}
97+
]
98+
99+
user_role_mapping = data.terraform_remote_state.shared.outputs.user_role_mapping
65100
}
Lines changed: 101 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,101 @@
1+
# define AWS policy documents for developer
2+
data "aws_iam_policy_document" "developer_access" {
3+
# EKS
4+
statement {
5+
effect = "Allow"
6+
actions = ["eks:ListClusters"]
7+
resources = ["*"]
8+
}
9+
statement {
10+
effect = "Allow"
11+
actions = ["eks:DescribeCluster"]
12+
resources = ["arn:aws:eks:${local.region}:${local.account_id}:cluster/${local.project}-stage*"]
13+
}
14+
15+
# ECR
16+
statement {
17+
effect = "Allow"
18+
actions = [
19+
"ecr:DescribeImages",
20+
"ecr:DescribeRepositories"
21+
]
22+
resources = ["*"]
23+
}
24+
25+
# S3
26+
statement {
27+
effect = "Allow"
28+
actions = ["s3:ListBucket"]
29+
resources = ["arn:aws:s3:::*${local.domain_name}"]
30+
}
31+
statement {
32+
effect = "Allow"
33+
actions = ["s3:GetObject"]
34+
resources = ["arn:aws:s3:::*${local.domain_name}/*"]
35+
}
36+
}
37+
38+
# define AWS policy documents for operator
39+
data "aws_iam_policy_document" "operator_access" {
40+
# IAM
41+
statement {
42+
effect = "Allow"
43+
actions = [
44+
"iam:ListRoles",
45+
"sts:AssumeRole"
46+
]
47+
resources = ["arn:aws:iam::${local.account_id}:role/${local.project}-kubernetes-operator-stage"]
48+
}
49+
50+
# EKS
51+
statement {
52+
effect = "Allow"
53+
actions = ["eks:*"]
54+
resources = ["arn:aws:eks:${local.region}:${local.account_id}:cluster/${local.project}-stage*"]
55+
}
56+
57+
# ECR
58+
statement {
59+
effect = "Allow"
60+
actions = ["ecr:*"]
61+
resources = ["*"]
62+
}
63+
64+
# S3
65+
statement {
66+
effect = "Allow"
67+
actions = ["s3:*"]
68+
resources = ["arn:aws:s3:::*${local.domain_name}"]
69+
}
70+
statement {
71+
effect = "Allow"
72+
actions = ["s3:*"]
73+
resources = ["arn:aws:s3:::*${local.domain_name}/*"]
74+
}
75+
}
76+
77+
78+
79+
locals {
80+
# define Kubernetes policy for developer
81+
k8s_developer_access = [
82+
{
83+
verbs = ["exec"]
84+
api_groups = [""]
85+
resources = ["pods", "pods/exec", "pods/log", "pods/portforward"]
86+
}, {
87+
verbs = ["get", "list", "watch"]
88+
api_groups = [""]
89+
resources = ["deployments", "configmaps", "pods", "services", "endpoints"]
90+
}
91+
]
92+
93+
# define Kubernetes policy for operator
94+
k8s_operator_access = [
95+
{
96+
verbs = ["exec", "create", "list", "get", "delete", "patch", "update"]
97+
api_groups = [""]
98+
resources = ["deployments", "configmaps", "pods", "secrets", "services", "endpoints"]
99+
}
100+
]
101+
}
Lines changed: 63 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
terraform {
2+
required_version = ">= 0.13"
3+
backend "s3" {
4+
bucket = "<% .Name %>-shared-terraform-state"
5+
key = "infrastructure/terraform/environments/shared/main"
6+
encrypt = true
7+
region = "<% index .Params `region` %>"
8+
dynamodb_table = "<% .Name %>-shared-terraform-state-locks"
9+
}
10+
}
11+
12+
locals {
13+
project = "<% .Name %>"
14+
region = "<% index .Params `region` %>"
15+
account_id = "<% index .Params `accountId` %>"
16+
}
17+
18+
provider "aws" {
19+
region = local.region
20+
allowed_account_ids = [local.account_id]
21+
}
22+
23+
# Instantiate the environment
24+
locals {
25+
# Users configuration
26+
users = [
27+
# {
28+
# name = "dev1"
29+
# roles = [
30+
# { name = "developer", environments = ["stage", "prod"] }
31+
# ]
32+
# }, {
33+
# name = "devops1"
34+
# roles = [
35+
# { name = "developer", environments = ["stage", "prod"] },
36+
# { name = "operator", environments = ["stage"] }
37+
# ]
38+
# }, {
39+
# name = "operator1"
40+
# roles = [
41+
# { name = "operator", environments = ["stage", "prod"] }
42+
# ]
43+
# },
44+
]
45+
}
46+
47+
## Create users
48+
resource "aws_iam_user" "access_user" {
49+
count = length(local.users)
50+
name = "${local.project}-${local.users[count.index].name}"
51+
52+
tags = {
53+
for r in local.users[count.index].roles : "role:${r.name}" => join("/", r.environments)
54+
}
55+
}
56+
57+
output "iam_users" {
58+
value = aws_iam_user.access_user
59+
}
60+
61+
output "user_role_mapping" {
62+
value = local.users
63+
}

0 commit comments

Comments
 (0)