@@ -40,8 +40,10 @@ public RoleDialogModel GetChatCompletions(Agent agent, List<RoleDialogModel> con
4040 var contentHooks = _services . GetServices < IContentGeneratingHook > ( ) . ToList ( ) ;
4141
4242 // Before chat completion hook
43- Task . WaitAll ( contentHooks . Select ( hook =>
44- hook . BeforeGenerating ( agent , conversations ) ) . ToArray ( ) ) ;
43+ foreach ( var hook in contentHooks )
44+ {
45+ hook . BeforeGenerating ( agent , conversations ) . Wait ( ) ;
46+ }
4547
4648 var client = ProviderHelper . GetClient ( _model , _settings ) ;
4749 var ( prompt , chatCompletionsOptions ) = PrepareOptions ( agent , conversations ) ;
@@ -74,14 +76,16 @@ public RoleDialogModel GetChatCompletions(Agent agent, List<RoleDialogModel> con
7476 }
7577
7678 // After chat completion hook
77- Task . WaitAll ( contentHooks . Select ( hook =>
79+ foreach ( var hook in contentHooks )
80+ {
7881 hook . AfterGenerated ( responseMessage , new TokenStatsModel
7982 {
8083 Prompt = prompt ,
8184 Model = _model ,
8285 PromptCount = response . Value . Usage . PromptTokens ,
8386 CompletionCount = response . Value . Usage . CompletionTokens
84- } ) ) . ToArray ( ) ) ;
87+ } ) . Wait ( ) ;
88+ }
8589
8690 return responseMessage ;
8791 }
@@ -94,8 +98,10 @@ public async Task<bool> GetChatCompletionsAsync(Agent agent,
9498 var hooks = _services . GetServices < IContentGeneratingHook > ( ) . ToList ( ) ;
9599
96100 // Before chat completion hook
97- Task . WaitAll ( hooks . Select ( hook =>
98- hook . BeforeGenerating ( agent , conversations ) ) . ToArray ( ) ) ;
101+ foreach ( var hook in hooks )
102+ {
103+ await hook . BeforeGenerating ( agent , conversations ) ;
104+ }
99105
100106 var client = ProviderHelper . GetClient ( _model , _settings ) ;
101107 var ( prompt , chatCompletionsOptions ) = PrepareOptions ( agent , conversations ) ;
@@ -111,14 +117,16 @@ public async Task<bool> GetChatCompletionsAsync(Agent agent,
111117 } ;
112118
113119 // After chat completion hook
114- Task . WaitAll ( hooks . Select ( hook =>
115- hook . AfterGenerated ( msg , new TokenStatsModel
120+ foreach ( var hook in hooks )
121+ {
122+ await hook . AfterGenerated ( msg , new TokenStatsModel
116123 {
117124 Prompt = prompt ,
118125 Model = _model ,
119126 PromptCount = response . Value . Usage . PromptTokens ,
120127 CompletionCount = response . Value . Usage . CompletionTokens
121- } ) ) . ToArray ( ) ) ;
128+ } ) ;
129+ }
122130
123131 if ( choice . FinishReason == CompletionsFinishReason . FunctionCall )
124132 {
0 commit comments