@@ -33,18 +33,15 @@ exports.handler = awslambda.streamifyResponse(
3333 }
3434 } ;
3535
36- const
36+ const
3737 maxTokens = 2500 ,
3838 temperature = .7 ,
3939 topP = 1 ,
40- stopSequences = [ "\n\nHuman:" ] ,
41- anthropicVersion = "bedrock-2023-05-31" ,
42- modelId = 'anthropic.claude-v2' ,
40+ anthropicVersion = "bedrock-2023-05-31"
41+ modelId = 'anthropic.claude-3-haiku-20240307-v1:0' ,
4342 contentType = 'application/json' ,
4443 accept = '*/*' ;
4544
46- const formattedPrompt = `Human: ${ prompt } \n\nAssistant:`
47-
4845 try {
4946 responseStream = awslambda . HttpResponseStream . from ( responseStream , httpResponseMetadata ) ;
5047
@@ -58,12 +55,16 @@ exports.handler = awslambda.streamifyResponse(
5855 } ) ;
5956
6057 const llmRequestBody = {
61- prompt : formattedPrompt ,
62- max_tokens_to_sample : maxTokens ,
58+ max_tokens : maxTokens ,
59+ messages : [
60+ {
61+ role : "user" ,
62+ content : prompt
63+ }
64+ ] ,
6365 temperature,
64- top_p : topP ,
65- stop_sequences : stopSequences ,
66- anthropic_version : anthropicVersion
66+ top_p : topP
67+ anthropic_version : anthropicVersion ,
6768 } ;
6869
6970 const params = {
@@ -81,19 +82,21 @@ exports.handler = awslambda.streamifyResponse(
8182 responseStream = awslambda . HttpResponseStream . from ( responseStream , httpResponseMetadata ) ;
8283 const chunks = [ ]
8384 for await ( const value of actualStream ) {
84- const jsonString = new TextDecoder ( ) . decode ( value . body ) ; // body is a Uint8Array. jsonString->'{"bytes":"eyJjb21wbGV0aW9uIjoiIEkiLCJzdG9wX3JlYXNvbiI6bnVsbH0="}'
85- const base64encoded = JSON . parse ( jsonString ) . bytes ; // base64 encoded string.
86- const decodedString = Buffer . from ( base64encoded , "base64" ) . toString (
87- "utf-8"
88- ) ;
85+ const jsonString = new TextDecoder ( ) . decode ( value . body ) ;
86+ const base64encoded = JSON . parse ( jsonString ) . bytes ;
87+ const decodedString = Buffer . from ( base64encoded , "base64" ) . toString ( "utf-8" ) ;
88+
8989 try {
90- const streamingCompletion = JSON . parse ( decodedString ) . completion ;
91- chunks . push ( streamingCompletion )
92- responseStream . write ( streamingCompletion )
90+ const chunk = JSON . parse ( decodedString ) ;
91+ if ( chunk . type === 'content_block_delta' && chunk . delta ?. text ) {
92+ const text = chunk . delta . text ;
93+ chunks . push ( text ) ;
94+ responseStream . write ( text ) ;
95+ }
9396 } catch ( error ) {
9497 console . error ( error ) ;
9598 responseStream . write ( null ) ;
96- responseStream . end ( )
99+ responseStream . end ( ) ;
97100 }
98101 }
99102 console . log ( "stream ended: " , chunks . join ( '' ) )
0 commit comments