其他备份

@router.post('/openai', response_class=EventSourceResponse)
async def simple_chat(user_input=Body(..., embed=True, alias='input'),
                      chat_history=Body(..., embed=True, alias='chat_history')):
    app_input = user_input
    callback_handler = StreamingCallbackHandler()
    chat_prompt = PromptTemplate(
        input_variables=['human_input', 'chat_history'],
        template='''you are a robot having a conversation with a human being.
        previous conversation:
        {chat_history}
        
        human: {human_input}
        robot:'''
    )
    chain = LLMChain(
        llm=OpenAI(
            temperature=0.8,
            max_retries=1,
            max_tokens=2048,
            streaming=True,
            verbose=True,
        ),
        prompt=chat_prompt,
    )
    task = chain.arun({'human_input': app_input, 'chat_history': chat_history}, callbacks=[callback_handler])
    loop = asyncio.get_event_loop()
    asyncio.run_coroutine_threadsafe(task, loop)

    def resp():
        while True:
            try:

                tk = callback_handler.tokens.get()
                if tk is StopIteration:raise tk
                yield ServerSentEvent(data=json.dumps(tk, ensure_ascii=False))
            except StopIteration:
                break

    return EventSourceResponse(resp())

  1. 后台启动
nohup gunicorn main:app -w 4 -b 0.0.0.0:8081 --worker-class uvicorn.workers.UvicornWorker > test.log 2>&1 &
  1. libreoffice 安装
    参考:https://blog.csdn.net/weixin_43148701/article/details/129403372

posted on 2023-11-30 09:40  朝朝暮Mu  阅读(16)  评论(0)    收藏  举报