|  | @@ -0,0 +1,100 @@
 | 
	
		
			
				|  |  | +export interface TestChatMessage {
 | 
	
		
			
				|  |  | +    role: string
 | 
	
		
			
				|  |  | +    content: string
 | 
	
		
			
				|  |  | +}
 | 
	
		
			
				|  |  | +export class TestChatCompletion {
 | 
	
		
			
				|  |  | +    messageList: Array<TestChatMessage>
 | 
	
		
			
				|  |  | +    constructor(messageList: Array<TestChatMessage>) {
 | 
	
		
			
				|  |  | +        this.messageList = messageList
 | 
	
		
			
				|  |  | +    }
 | 
	
		
			
				|  |  | +    async createCompletionByStream(call?: Function) {
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +        let token = localStorage.getItem("token");
 | 
	
		
			
				|  |  | +        let bodyJson = {
 | 
	
		
			
				|  |  | +            "token": `Bearer ${token}`,
 | 
	
		
			
				|  |  | +            "messages": this.messageList,
 | 
	
		
			
				|  |  | +            "model": "gpt-3.5-turbo",
 | 
	
		
			
				|  |  | +            "temperature": 0.5,
 | 
	
		
			
				|  |  | +            "presence_penalty": 0,
 | 
	
		
			
				|  |  | +            "frequency_penalty": 0,
 | 
	
		
			
				|  |  | +            "top_p": 1,
 | 
	
		
			
				|  |  | +            "stream": true
 | 
	
		
			
				|  |  | +        };
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +        let response = await fetch("https://test.fmode.cn/api/apig/aigc/gpt/v1/chat/completions", {
 | 
	
		
			
				|  |  | +            "headers": {
 | 
	
		
			
				|  |  | +                "accept": "text/event-stream",
 | 
	
		
			
				|  |  | +                "sec-fetch-dest": "empty",
 | 
	
		
			
				|  |  | +                "sec-fetch-mode": "cors",
 | 
	
		
			
				|  |  | +                "sec-fetch-site": "same-site"
 | 
	
		
			
				|  |  | +            },
 | 
	
		
			
				|  |  | +            "referrer": "https://ai.fmode.cn/",
 | 
	
		
			
				|  |  | +            "referrerPolicy": "strict-origin-when-cross-origin",
 | 
	
		
			
				|  |  | +            "body": JSON.stringify(bodyJson),
 | 
	
		
			
				|  |  | +            "method": "POST",
 | 
	
		
			
				|  |  | +            "mode": "cors",
 | 
	
		
			
				|  |  | +            "credentials": "omit"
 | 
	
		
			
				|  |  | +        });
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +        let messageAiReply = ""
 | 
	
		
			
				|  |  | +        let messageIndex = this.messageList.length
 | 
	
		
			
				|  |  | +        let reader = response.body?.getReader();
 | 
	
		
			
				|  |  | +        if (!reader) {
 | 
	
		
			
				|  |  | +            throw new Error("Failed to get the response reader.");
 | 
	
		
			
				|  |  | +        }
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +        let decoder = new TextDecoder();
 | 
	
		
			
				|  |  | +        let buffer = "";
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +        while (true) {
 | 
	
		
			
				|  |  | +            let { done, value } = await reader.read();
 | 
	
		
			
				|  |  | +            if (done) {
 | 
	
		
			
				|  |  | +                break;
 | 
	
		
			
				|  |  | +            }
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +            buffer += decoder.decode(value);
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +            // Split the buffer by newlines to get individual messages
 | 
	
		
			
				|  |  | +            let messages = buffer.split("\n");
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +            // Process each message
 | 
	
		
			
				|  |  | +            for (let i = 0; i < messages.length - 1; i++) {
 | 
	
		
			
				|  |  | +                let message = messages[i];
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +                // Process the message as needed
 | 
	
		
			
				|  |  | +                /**
 | 
	
		
			
				|  |  | +                 * data: {"id":"chatcmpl-y2PLKqPDnwAFJIj2L5aqdH5TWK9Yv","object":"chat.completion.chunk","created":1696770162,"model":"gpt-3.5-turbo-0613","choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null}]}
 | 
	
		
			
				|  |  | +                 * data: {"id":"chatcmpl-y2PLKqPDnwAFJIj2L5aqdH5TWK9Yv","object":"chat.completion.chunk","created":1696770162,"model":"gpt-3.5-turbo-0613","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]}
 | 
	
		
			
				|  |  | +                 * data: [DONE]
 | 
	
		
			
				|  |  | +                 */
 | 
	
		
			
				|  |  | +                let dataText = message.replace("data:\ ", "")
 | 
	
		
			
				|  |  | +                if (dataText.startsWith("{")) {
 | 
	
		
			
				|  |  | +                    try {
 | 
	
		
			
				|  |  | +                        let dataJson = JSON.parse(dataText)
 | 
	
		
			
				|  |  | +                        console.log(dataJson)
 | 
	
		
			
				|  |  | +                        messageAiReply += dataJson?.choices?.[0]?.delta?.content || ""
 | 
	
		
			
				|  |  | +                        this.messageList[messageIndex] = {
 | 
	
		
			
				|  |  | +                            role: "assistant",
 | 
	
		
			
				|  |  | +                            content: messageAiReply
 | 
	
		
			
				|  |  | +                        }
 | 
	
		
			
				|  |  | +                    } catch (err) { }
 | 
	
		
			
				|  |  | +                }
 | 
	
		
			
				|  |  | +                if (dataText.startsWith("[")) {
 | 
	
		
			
				|  |  | +                    console.log(message)
 | 
	
		
			
				|  |  | +                    console.log("完成")
 | 
	
		
			
				|  |  | +                    this.messageList[messageIndex] = {
 | 
	
		
			
				|  |  | +                        role: "assistant",
 | 
	
		
			
				|  |  | +                        content: messageAiReply
 | 
	
		
			
				|  |  | +                    }
 | 
	
		
			
				|  |  | +                    call && call(true)
 | 
	
		
			
				|  |  | +                    messageAiReply = ""
 | 
	
		
			
				|  |  | +                }
 | 
	
		
			
				|  |  | +                // Parse the message as JSON
 | 
	
		
			
				|  |  | +                // let data = JSON.parse(message);
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +                // Clear the processed message from the buffer
 | 
	
		
			
				|  |  | +                buffer = buffer.slice(message.length + 1);
 | 
	
		
			
				|  |  | +            }
 | 
	
		
			
				|  |  | +        }
 | 
	
		
			
				|  |  | +    }
 | 
	
		
			
				|  |  | +}
 |