d0edb77bbf20cb4c439e5e510fc474c981ac4ab2
[awsible] / roles / aws-management-infrastructure / tasks / main.yml
1 ---
2 - assert:
3 that:
4 - MANAGEMENT_SUBNET != ''
5 - DEFAULT_AMI != ''
6 tags: ['check_vars']
7
8 - name: sg management-elb
9 ec2_group:
10 vpc_id: "{{ vpc.vpc.id }}"
11 region: "{{ vpc_region }}"
12 state: present
13 name: management-elb
14 description: "sg for internal elb for monitoring management"
15 purge_rules: false
16 rules:
17 - proto: tcp
18 from_port: 22
19 to_port: 22
20 cidr_ip: 0.0.0.0/0
21 rules_egress:
22 - proto: all
23 cidr_ip: 0.0.0.0/0
24
25 - name: sg management
26 ec2_group:
27 vpc_id: "{{ vpc.vpc.id }}"
28 region: "{{ vpc_region }}"
29 state: present
30 name: management
31 description: "sg for management"
32 purge_rules: false
33 rules:
34 - proto: all
35 group_name: management
36 - proto: all
37 group_name: management-elb
38 register: sg_management
39
40 - name: elb management-int-elb
41 ec2_elb_lb:
42 region: "{{ vpc_region }}"
43 state: present
44 name: management-int-elb
45 cross_az_load_balancing: yes
46 scheme: internal
47 subnets: "{{ MANAGEMENT_SUBNET }}"
48 security_group_names:
49 - management-elb
50 listeners:
51 - protocol: tcp
52 load_balancer_port: 22
53 instance_port: 22
54 health_check:
55 ping_protocol: tcp
56 ping_port: 22
57 response_timeout: 5
58 interval: 30
59 unhealthy_threshold: 2
60 healthy_threshold: 2
61
62 - name: management key
63 ec2_key:
64 region: "{{ vpc_region }}"
65 name: "{{ MANAGEMENT_KEY_NAME }}"
66 key_material: "{{ item }}"
67 with_file: keys/{{ MANAGEMENT_KEY_NAME }}.pub
68
69 - name: management iam
70 iam:
71 name: management
72 iam_type: role
73 state: present
74
75 # this is only ansible 2.3+
76 # - name: management role policies
77 # iam_role:
78 # name: management
79 # state: present
80 # managed_policy:
81 # - arn:aws:iam::{{ ACCT_ID }}:policy/base-policy
82 # - arn:aws:iam::{{ ACCT_ID }}:policy/management-policy
83
84 # will need to rev name-version when changing AMI
85 - name: management lc
86 ec2_lc:
87 region: "{{ vpc_region }}"
88 name: management-0000
89 image_id: "{{ DEFAULT_AMI }}"
90 key_name: "{{ MANAGEMENT_KEY_NAME }}"
91 instance_profile_name: management
92 security_groups:
93 - "{{ sg_management.group_id }}"
94 - "{{ sg_ssh.group_id }}"
95 instance_type: m4.large
96 volumes:
97 # setting the root volume seems to prevent instances from launching
98 # - device_name: /dev/sda1
99 # volume_size: 8
100 # volume_type: gp2
101 # delete_on_termination: true
102 - device_name: /dev/sdb
103 ephemeral: ephemeral0
104 - device_name: /dev/sdc
105 ephemeral: ephemeral1
106 - device_name: /dev/sdd
107 ephemeral: ephemeral2
108 - device_name: /dev/sde
109 ephemeral: ephemeral3
110 register: mgmt_lc
111
112 - name: management asg
113 ec2_asg:
114 region: "{{ vpc_region }}"
115 name: management
116 min_size: 1
117 max_size: 1
118 desired_capacity: 1
119 default_cooldown: 10
120 vpc_zone_identifier: "{{ MANAGEMENT_SUBNET }}"
121 launch_config_name: "{{ mgmt_lc.name|default('checkmode') }}"
122 notification_topic: "{{ management_topic.sns_arn }}"
123 notification_types:
124 - autoscaling:EC2_INSTANCE_LAUNCH
125 load_balancers:
126 - management-int-elb
127 tags:
128 - account: "{{ ACCT_NAME }}"
129 propagate_at_launch: yes
130 - module: management
131 propagate_at_launch: yes
132 - stack: ""
133 propagate_at_launch: yes
134 - country: ""
135 propagate_at_launch: yes
136 - phase: dev
137 propagate_at_launch: yes
138
139 - name: not implemented yet
140 debug:
141 msg: |
142 attach policies to iam role