zhangqian
2024-11-22 165d75b97916335ec7b5046f7c476e494a12df51
app/service/basic.py
@@ -1,4 +1,8 @@
import json
import httpx
from Log import logger
class BasicService:
@@ -10,10 +14,7 @@
        if response.status_code not in [200, 201]:
            raise Exception(f"Failed to fetch data from API: {response.text}")
        response_data = response.json()
        status_code = response_data.get("status_code", 0)
        if status_code != 200:
            raise Exception(f"Failed to fetch data from API: {response.text}")
        return response_data.get("data", {})
        return response_data
    async def download_from_url(self, url, params=None):
        async with httpx.AsyncClient() as client:
@@ -43,12 +44,11 @@
        url = f"{self.base_url}/exceltalk/download/excel"
        return await self.download_from_url(url, params={'excel_name': file_id})
    async def excel_talk_upload(self, chat_id: str, filename: str, file_content: bytes):
    async def excel_talk_upload(self, chat_id: str, files):
        url = f"{self.base_url}/exceltalk/upload/files"
        params = {'chat_id': chat_id, 'is_col': '0'}
        # 创建 FormData 对象
        files = [('files', (filename, file_content, 'application/octet-stream'))]
        async with httpx.AsyncClient() as client:
            response = await client.post(
@@ -56,22 +56,73 @@
                files=files,
                params=params
            )
            return await self._check_response(response)
            return self._check_response(response)
    async def excel_talk(self, question: str, chat_id: str):
        url = f"{self.base_url}/exceltalk/talk"
        params = {'chat_id': chat_id}
        data = {"query": question}
        headers = {'Content-Type': 'application/json'}
        buffer = bytearray()
        async with httpx.AsyncClient(timeout=300.0) as client:
            async with client.stream("POST", url, params=params, json=data, headers=headers) as response:
                if response.status_code == 200:
                    try:
                        async for answer in response.aiter_text():
                            print(f"response of ragflow chat: {answer}")
                            yield answer
                        async for chunk in response.aiter_bytes():
                            json_data = process_buffer(chunk, buffer)
                            if json_data:
                                yield json_data
                                buffer.clear()
                    except GeneratorExit as e:
                        print(e)
                        return
                        yield {"message": "内部错误", "type": "close"}
                    finally:
                        # 在所有数据接收完毕后记录日志
                        logger.info("All messages received and processed - over")
                        yield {"message": "", "type": "close"}
                else:
                    yield f"Error: {response.status_code}"
                    yield f"Error: {response.status_code}"
    async def questions_talk(self, question, chat_id: str):
        logger.error("---------------questions_talk--------------------------")
        url = f"{self.base_url}/questions/talk"
        params = {'chat_id': chat_id}
        headers = {'Content-Type': 'text/plain'}
        async with httpx.AsyncClient(timeout=1800) as client:
            response = await client.post(
                url,
                data=question,
                headers=headers,
                params=params
            )
            return self._check_response(response)
    async def questions_talk_word_download(self, file_id: str):
        url = f"{self.base_url}/questions/download/word"
        return await self.download_from_url(url, params={'word_name': file_id})
def process_buffer(data, buffer):
    def try_parse_json(data1):
        try:
            return True, json.loads(data1)
        except json.JSONDecodeError:
            return False, None
    if data.startswith(b'data:'):
        # 删除 'data:' 头
        data = data[5:].strip()
    else:
        pass
    # 直接拼接到缓冲区尝试解析JSON
    buffer.extend(data.strip())
    success, parsed_data = try_parse_json(buffer)
    if success:
        return parsed_data
    else:
        # 解析失败,继续拼接
        return None