@@ -12,6 +12,7 @@ import { getCacheConfig, getOriginConfig } from '../storage/config'
1212import { sendResponse } from '../utils'
1313import { isNotEmptyString } from '../utils/is'
1414import type { ChatContext , ChatGPTUnofficialProxyAPIOptions , ModelConfig } from '../types'
15+ import { getChatByMessageId } from '../storage/mongo'
1516import type { RequestOptions } from './types'
1617
1718const { HttpsProxyAgent } = httpsProxyAgent
@@ -45,6 +46,8 @@ export async function initApi() {
4546 apiKey : config . apiKey ,
4647 completionParams : { model } ,
4748 debug : ! config . apiDisableDebug ,
49+ messageStore : undefined ,
50+ getMessageById,
4851 }
4952 // increase max token limit if use gpt-4
5053 if ( model . toLowerCase ( ) . includes ( 'gpt-4' ) ) {
@@ -261,6 +264,33 @@ async function setupProxy(options: ChatGPTAPIOptions | ChatGPTUnofficialProxyAPI
261264 }
262265}
263266
267+ async function getMessageById ( id : string ) : Promise < ChatMessage | undefined > {
268+ const isPrompt = id . startsWith ( 'prompt_' )
269+ const chatInfo = await getChatByMessageId ( isPrompt ? id . substring ( 7 ) : id )
270+
271+ if ( chatInfo ) {
272+ if ( isPrompt ) { // prompt
273+ return {
274+ id,
275+ conversationId : chatInfo . options . conversationId ,
276+ parentMessageId : chatInfo . options . parentMessageId ,
277+ role : 'user' ,
278+ text : chatInfo . prompt ,
279+ }
280+ }
281+ else {
282+ return { // completion
283+ id,
284+ conversationId : chatInfo . options . conversationId ,
285+ parentMessageId : `prompt_${ id } ` , // parent message is the prompt
286+ role : 'assistant' ,
287+ text : chatInfo . response ,
288+ }
289+ }
290+ }
291+ else { return undefined }
292+ }
293+
264294initApi ( )
265295
266296export type { ChatContext , ChatMessage }
0 commit comments