My jupyter notebook .ipynb file with the following code works perfectly and returns the results
# Predefined Variables
MX_TKN = 4000
TMPRT = 0.1
TOP_K = 1
# defining function
def retrieveAndGenerate(
TMPRT,
MX_TKN,
TOP_K,
input: str,
kbId: str,
region: str = "us-east-1",
sessionId: str = None,
mdl_id: str = "anthropic.claude-v2:1",
):
model_arn = f"arn:aws:bedrock:{region}::foundation-model/{mdl_id}"
if sessionId:
return bedrock_agent_client.retrieve_and_generate(
input={"text": input},
retrieveAndGenerateConfiguration={
"type": "KNOWLEDGE_BASE",
"knowledgeBaseConfiguration": {
"knowledgeBaseId": kbId,
"modelArn": model_arn,
"generationConfiguration": {
"inferenceConfig": {
"textInferenceConfig": {
"temperature": TMPRT,
"topP": 0.5,
"maxTokens": MX_TKN
}
},
"additionalModelRequestFields" : {
"top_k": TOP_K
},
},
},
},
sessionId=sessionId,
)
else:
return bedrock_agent_client.retrieve_and_generate(
input={"text": input},
retrieveAndGenerateConfiguration={
"type": "KNOWLEDGE_BASE",
"knowledgeBaseConfiguration": {
"knowledgeBaseId": kbId,
"modelArn": model_arn,
"generationConfiguration": {
"inferenceConfig": {
"textInferenceConfig": {
"temperature": TMPRT,
"topP": 0.5,
"maxTokens": MX_TKN
}
},
"additionalModelRequestFields" : {
"top_k": TOP_K
},
},
},
},
)
# List of Questions:
questions = ["Give me a list of 100 Injection Molders company names by injection molding sales and rank?",
"Give me the list of TOP Profile Extruders companies?",
"Give me the list of TOP THERMOFORMERS companies?",
"Give me a list of top 5 Injection Molders and their molding sales in $?"]
for query in questions:
Q_Num = "Question - " + str(counter)
print("question number:", Q_Num)
print("Question being processed: ", query)
question = query
# Calling the function
response = retrieveAndGenerate(TMPRT, MX_TKN, TOP_K, question, kb_id, region_name, '', model_id)
But when I save the jupyter .ipynb as a .py file and execute the .py file, I get the following error and I tried addressing this problem by changing the position of the inference parameters in a number of way and I was unable to solve it. Hence, I need your help in solving it.
ParamValidationError: Parameter validation failed:
Unknown parameter in retrieveAndGenerateConfiguration.knowledgeBaseConfiguration.generationConfiguration: "inferenceConfig", must be one of: promptTemplate
Unknown parameter in retrieveAndGenerateConfiguration.knowledgeBaseConfiguration.generationConfiguration: "additionalModelRequestFields", must be one of: promptTemplate
Thanks in advance for your help.
Regards,
SSV
Excellent, the problem is resolved by using the latest boto3 version. Thanks a ton Riku Kobayashi.