-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathfetchEC2VolumeDetailsUsingBoto3
125 lines (109 loc) · 4.27 KB
/
fetchEC2VolumeDetailsUsingBoto3
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
import json
import boto3
import csv
import datetime
import logging
from os import environ
import collections
import time
import sys
from botocore.exceptions import ClientError
##########
# 1. Add read-only access to lambda iam role
# 2. Add S3 access to the lambda iam role (for upload access)
# 3. use Lambda Python3.8 base
##########
### ENABLE The profilename below, while testing from local. Disable this and session line in 63, enable line 64 session before pushing to Lambda#######
profilename='<>'
aws_Acct='/tmp/temp.csv'
volume_id_list=[]
result = []
#regions = ['us-east-1', 'us-east-2', 'us-west-1', 'us-west-2']
regions = ['us-east-1']
#Name Tag
def get_tag(each, tag_name):
if 'Tags' in each:
for tag in each['Tags']:
if tag['Key'] == tag_name:
return tag['Value']
return ''
#Volumes
def get_vol(each, ec2):
resultVol = {
"vol_id": "",
"vol_size": "",
"vol_type": ""
}
resp = ec2.describe_volumes(
Filters=[{'Name':'attachment.instance-id','Values':[each['InstanceId']]}]
)
for volume in (resp["Volumes"]):
resultVol['vol_id'] += (str(volume["VolumeId"]) + "\n")
resultVol['vol_size'] += (str(volume["Size"]) + "\n")
resultVol['vol_type'] += (str(volume["VolumeType"]) + "\n")
return resultVol
#Security Groups
def sec_gp(each, ec2):
resultSG = {
"sg_id": "",
"sg_name": ""
}
for sg in each['SecurityGroups']:
resultSG['sg_id'] += (str(sg["GroupId"]) + "\n")
resultSG['sg_name'] += (str(sg["GroupName"]) + "\n")
return resultSG
def lambda_handler(event, context):
try:
logging.basicConfig(level=logging.INFO)
logging.info('EC2 Inventory details')
for region in regions:
#session = boto3.Session(profile_name=profilename, region_name=region)
session = boto3.Session(region_name=region)
ec2 = session.client('ec2')
response = ec2.describe_instances()
for item in response["Reservations"]:
for each in item['Instances']:
volsss = get_vol(each, ec2)
sgss = sec_gp(each, ec2)
#print(sgss)
result.append({
#'ImageId': each.get('ImageId', ''),
'InstanceType': each.get('InstanceType', ''),
#'PublicIp': each.get('PublicIpAddress', ''),
#'PrivateIp': each.get('PrivateIpAddress', ''),
#'InstanceId': each.get('InstanceId', ''),
#'SubnetId': each.get('SubnetId', ''),
#'VpcId': each.get('VpcId', ''),
'InstanceName': get_tag(each, 'Name'),
'volume.size': volsss['vol_size'],
'volume.id': volsss['vol_id'],
'volume.type': volsss['vol_type']
#'DeleteOnTermination': each.get('DeleteOnTermination', ''),
#'SGGroupName': sgss['sg_name'],
#'SGGroupID': sgss['sg_id'],
#'State': each['State']['Name'],
#'Region': each['Placement']['AvailabilityZone']
})
# Write to csv file.
header = ['ImageId', 'InstanceType', 'InstanceId', 'InstanceName', 'PublicIp', 'PrivateIp', 'Region', 'State', 'volume.id', 'volume.size', 'volume.type', 'SubnetId', 'VpcId', 'SGGroupName', 'SGGroupID', 'DeleteOnTermination']
with open(aws_Acct, 'w') as file:
writer = csv.DictWriter(file, fieldnames=header)
writer.writeheader()
writer.writerows(result)
file_name = "/tmp/temp.csv"
bucket = "devaansh-webscale"
object_name = "temp.csv"
# Upload the file to S3 bucket
s3_client = boto3.client('s3')
try:
response = s3_client.upload_file(file_name, bucket, object_name)
print('Step 3: upload done')
except ClientError as e:
logging.error(e)
return False
except Exception as e:
logging.error(
'EC2 inventory with uncaught exception: {}'.format(e)
)
if __name__ == '__main__':
lambda_handler(None, None)