Uploading Lambda response to S3 bucket and reading it again issue

0

Hi,

I have a lambda function that makes a call to the external Azure API and return a JSON object. I am able to upload the same JSON object to S3 successfully but while reading the same file, getting an error about "errorType": "JSONDecodeError",

# Main Azure API Block
def get_subscription_assignment_roles(subscriptions,access_token):
    try:
        # print(f"get_subscription_assignment_roles for total {len(subscriptions)} subscriptions" )
        assignment_roles = []
        for subscription in subscriptions[:5]:
            subscription_id = subscription['name']
            url = f"https://management.azure.com/subscriptions/{subscription_id}/providers/Microsoft.Authorization/roleAssignments?api-version=2022-04-01"
            # breare_toke = f"Bearer ${access_token}"
            # print(breare_toke)
            response = requests.get(url,headers={'Authorization': f'Bearer {access_token}'})
            response.raise_for_status()
            jsonResponse = response.json()
            # print(f"get_subscription_assignment_roles.status_code: ${response.status_code}")
            # print(f"get_subscription_assignment_roles.len: ",len(jsonResponse["value"]))
            if(len(jsonResponse["value"]) > 0):
                for assignment_role in jsonResponse["value"]:
                    if(assignment_role['properties']['principalType'] == "Group"):
                        assignment_roles.append(json.dumps({
                            'id':assignment_role['id'],
                            'principal_id':assignment_role['properties']['principalId'],
                            'account_id':subscription_id,
                            'role_definition_id':assignment_role['properties']['roleDefinitionId'],
                            'account_name':subscription['properties']['displayName'],
                            'created_on':assignment_role['properties']['createdOn'],
                            'updated_on':assignment_role['properties']['updatedOn']
                        }).encode('utf-8'))
        
        return assignment_roles
    except Exception as err:
        print(f"get_subscription_assignment_roles.error: {err}")
        raise err


# Function to upload file to S3 Bucket
def store_payload_into_bucket(assignment_roles):
    # print("inside put_dynamodb_item")
    try:
        # print(f'store_payload_into_bucket: {assignment_roles}')
        bucket_name = os.environ['permission_data_bucket']
        today = date.today()
        file_name = f"azure/datapayload_{today}.json" # platform/assignment_role_data_{datetime}
        table_name = os.environ['dynamodb_id']
        client = boto3.client('s3')
        assignment_roles = str(assignment_roles)
        try:
            client.put_object(Bucket=bucket_name,Key=file_name,Body=assignment_roles)
            return {
                'statusCode':200,
                'body': {'key':file_name,'bucket_id':bucket_name}
            }
        except Exception as err:
            return {
                'statusCode':500,
                'body':f'Error: {str(err)}'
            }
    except Exception as err:
        raise err


# Function to read the same file again:
def read_payload_from_bucket(event):
    # print("inside put_dynamodb_item")
    try:
        print(f'read_payload_from_bucket: {event}')
        bucket_name = os.environ['permission_data_bucket']
        today = date.today()
        file_name = f"azure/datapayload_{today}.json" # platform/assignment_role_data_{datetime}
        table_name = os.environ['dynamodb_id']
        client = boto3.client('s3')
        response = client.get_object(Bucket=event['bucket_id'],Key=event['key'])
        json_data = response['Body'].read()
        parsed_data = json.loads(json_data)
        print(f'Response data: {parsed_data}')

    except Exception as err:
        raise err


Here is the error. event tried decoding data before doing json.loads() but still no luck. can anyone help here.

{
  "errorMessage": "Expecting value: line 1 column 2 (char 1)",
  "errorType": "JSONDecodeError",
  "stackTrace": [
    "  File \"/var/task/azure_get_assignment_role_data.py\", line 49, in lambda_handler\n    read_payload_from_bucket(response['body'])\n",
    "  File \"/var/task/azure_get_assignment_role_data.py\", line 221, in read_payload_from_bucket\n    raise err\n",
    "  File \"/var/task/azure_get_assignment_role_data.py\", line 217, in read_payload_from_bucket\n    parsed_data = json.loads(json_data)\n",
    "  File \"/var/lang/lib/python3.8/json/__init__.py\", line 357, in loads\n    return _default_decoder.decode(s)\n",
    "  File \"/var/lang/lib/python3.8/json/decoder.py\", line 337, in decode\n    obj, end = self.raw_decode(s, idx=_w(s, 0).end())\n",
    "  File \"/var/lang/lib/python3.8/json/decoder.py\", line 355, in raw_decode\n    raise JSONDecodeError(\"Expecting value\", s, err.value) from None\n"
  ]
}
2 Answers
1

may be try upload in json formate, see if this change works

# Function to upload file to S3 Bucket
def store_payload_into_bucket(assignment_roles):
    # print("inside put_dynamodb_item")
    try:
        # print(f'store_payload_into_bucket: {assignment_roles}')
        bucket_name = os.environ['permission_data_bucket']
        today = date.today()
        file_name = f"azure/datapayload_{today}.json" # platform/assignment_role_data_{datetime}
        table_name = os.environ['dynamodb_id']
        client = boto3.client('s3')
        # assignment_roles = str(assignment_roles)
        # Convert assignment_roles list to a JSON string
        json_data = json.dumps(assignment_roles)
        try:
            # Save the JSON data with content type set to 'application/json'
            client.put_object(Bucket=bucket_name, Key=file_name, Body=json_data, ContentType='application/json')
            return {
                'statusCode':200,
                'body': {'key':file_name,'bucket_id':bucket_name}
            }
        except Exception as err:
            return {
                'statusCode':500,
                'body':f'Error: {str(err)}'
            }
    except Exception as err:
        raise err
answered 9 months ago
  • @Nitin I am already returning json.dumps formatted data from the main function to store_payload_into_bucket.

0
Accepted Answer

I think that where you do assignment_roles = str(assignment_roles) you should be doing json.dumps instead of str. While you can convert most data types in Python to string in this case it won't encode it as valid JSON.

profile pictureAWS
EXPERT
answered 9 months ago
  • @Brettski-AWS I am already returning json.dumps formatted data from the main function to store_payload_into_bucket.

  • Then you don't need to do a str() on it - you might find that is putting additional quotes around the data. Try downloading the raw file from S3 using the CLI - I think there will be the slightest of change there.

  • Actually, that worked. I am still wondering why it did not as I was returning response json.dumps(data), but anyway thank you so much for your help.

You are not logged in. Log in to post an answer.

A good answer clearly answers the question and provides constructive feedback and encourages professional growth in the question asker.

Guidelines for Answering Questions