Hi,
I have a lambda function that makes a call to the external Azure API and return a JSON object. I am able to upload the same JSON object to S3 successfully but while reading the same file, getting an error about "errorType": "JSONDecodeError",
# Main Azure API Block
def get_subscription_assignment_roles(subscriptions,access_token):
try:
# print(f"get_subscription_assignment_roles for total {len(subscriptions)} subscriptions" )
assignment_roles = []
for subscription in subscriptions[:5]:
subscription_id = subscription['name']
url = f"https://management.azure.com/subscriptions/{subscription_id}/providers/Microsoft.Authorization/roleAssignments?api-version=2022-04-01"
# breare_toke = f"Bearer ${access_token}"
# print(breare_toke)
response = requests.get(url,headers={'Authorization': f'Bearer {access_token}'})
response.raise_for_status()
jsonResponse = response.json()
# print(f"get_subscription_assignment_roles.status_code: ${response.status_code}")
# print(f"get_subscription_assignment_roles.len: ",len(jsonResponse["value"]))
if(len(jsonResponse["value"]) > 0):
for assignment_role in jsonResponse["value"]:
if(assignment_role['properties']['principalType'] == "Group"):
assignment_roles.append(json.dumps({
'id':assignment_role['id'],
'principal_id':assignment_role['properties']['principalId'],
'account_id':subscription_id,
'role_definition_id':assignment_role['properties']['roleDefinitionId'],
'account_name':subscription['properties']['displayName'],
'created_on':assignment_role['properties']['createdOn'],
'updated_on':assignment_role['properties']['updatedOn']
}).encode('utf-8'))
return assignment_roles
except Exception as err:
print(f"get_subscription_assignment_roles.error: {err}")
raise err
# Function to upload file to S3 Bucket
def store_payload_into_bucket(assignment_roles):
# print("inside put_dynamodb_item")
try:
# print(f'store_payload_into_bucket: {assignment_roles}')
bucket_name = os.environ['permission_data_bucket']
today = date.today()
file_name = f"azure/datapayload_{today}.json" # platform/assignment_role_data_{datetime}
table_name = os.environ['dynamodb_id']
client = boto3.client('s3')
assignment_roles = str(assignment_roles)
try:
client.put_object(Bucket=bucket_name,Key=file_name,Body=assignment_roles)
return {
'statusCode':200,
'body': {'key':file_name,'bucket_id':bucket_name}
}
except Exception as err:
return {
'statusCode':500,
'body':f'Error: {str(err)}'
}
except Exception as err:
raise err
# Function to read the same file again:
def read_payload_from_bucket(event):
# print("inside put_dynamodb_item")
try:
print(f'read_payload_from_bucket: {event}')
bucket_name = os.environ['permission_data_bucket']
today = date.today()
file_name = f"azure/datapayload_{today}.json" # platform/assignment_role_data_{datetime}
table_name = os.environ['dynamodb_id']
client = boto3.client('s3')
response = client.get_object(Bucket=event['bucket_id'],Key=event['key'])
json_data = response['Body'].read()
parsed_data = json.loads(json_data)
print(f'Response data: {parsed_data}')
except Exception as err:
raise err
Here is the error. event tried decoding data before doing json.loads() but still no luck. can anyone help here.
{
"errorMessage": "Expecting value: line 1 column 2 (char 1)",
"errorType": "JSONDecodeError",
"stackTrace": [
" File \"/var/task/azure_get_assignment_role_data.py\", line 49, in lambda_handler\n read_payload_from_bucket(response['body'])\n",
" File \"/var/task/azure_get_assignment_role_data.py\", line 221, in read_payload_from_bucket\n raise err\n",
" File \"/var/task/azure_get_assignment_role_data.py\", line 217, in read_payload_from_bucket\n parsed_data = json.loads(json_data)\n",
" File \"/var/lang/lib/python3.8/json/__init__.py\", line 357, in loads\n return _default_decoder.decode(s)\n",
" File \"/var/lang/lib/python3.8/json/decoder.py\", line 337, in decode\n obj, end = self.raw_decode(s, idx=_w(s, 0).end())\n",
" File \"/var/lang/lib/python3.8/json/decoder.py\", line 355, in raw_decode\n raise JSONDecodeError(\"Expecting value\", s, err.value) from None\n"
]
}
@Nitin I am already returning json.dumps formatted data from the main function to store_payload_into_bucket.