# 一.chainlit 简介

# 1.官方文档

官方文档 (opens new window)

github (opens new window)

langchain 方式 (opens new window)

python (opens new window)

docker 启动 chainlit (opens new window)

# 2.python 安装

在 centos 服务器上安装 python3.10

安装依赖

#安装依赖库
sudo yum install gcc openssl-devel bzip2-devel libffi-devel zlib-devel wget sqlite-devel

#下载python
wget https://www.python.org/ftp/python/3.10.0/Python-3.10.0.tgz

#解压
tar -zxvf Python-3.10.0.tgz
1
2
3
4
5
6
7
8

安装python3.10

#进入目录
cd Python-3.10.0

#校验
./configure --enable-optimizations

#编译
make -j 8

#安装
sudo make altinstall
1
2
3
4
5
6
7
8
9
10
11

验证

#验证
python3.10 --version
1
2

# 3.安装 chainlit

创建虚拟环境:

#创建虚拟环境
python3.10 -m venv myenv

#激活虚拟环境
source myenv/bin/activate

#退出虚拟环境
deactivate
1
2
3
4
5
6
7
8

安装依赖

#安装chainlit
pip install chainlit

#安装langchain
pip install langchain
1
2
3
4
5

创建azure_demo.py文件,内容如下

import os
import chainlit as cl

from langchain.chat_models import ChatOpenAI
from langchain.schema import (
    HumanMessage,
    SystemMessage
)

# key相关
os.environ["OPENAI_API_KEY"] = 'xxxxx'
os.environ["OPENAI_API_BASE"] = 'https://gpt.openai.azure.com/'
os.environ["OPENAI_API_TYPE"] = 'azure'
os.environ["OPENAI_API_VERSION"] = '2023-05-15'

chat = ChatOpenAI(model_name="gpt-35-turbo", engine="gpt-35-turbo")
history = [SystemMessage(content="你是一个聊天机器人,请回答下列问题。\n")]


@cl.on_message  # this function will be called every time a user inputs a message in the UI
async def main(message: str):
    # history = [SystemMessage(content="你是一个聊天机器人,请回答下列问题。\n")]
    history.append(HumanMessage(content=message))
    res = await cl.make_async(sync_func)()
    # res = chat(history)
    # print(res.content)
    # this is an intermediate step
    # await cl.Message(author="Tool 1", content=f"Response from tool1", indent=1).send()
    # send back the final answer
    history.append(res)
    await cl.Message(content=f"{res.content}").send()


def sync_func():
    return chat(history)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35

方式二:

import openai
import chainlit as cl

openai.proxy = 'http://127.0.0.1:7890'
openai.api_key = "xxxx"
# model_name = "text-davinci-003"
model_name = "gpt-3.5-turbo"
settings = {
    "temperature": 0.7,
    "max_tokens": 500,
    "top_p": 1,
    "frequency_penalty": 0,
    "presence_penalty": 0,
}


@cl.on_chat_start
def start_chat():
    cl.user_session.set(
        "message_history",
        [{"role": "system", "content": "You are a helpful assistant."}],
    )


@cl.on_message
async def main(message: str):
    message_history = cl.user_session.get("message_history")
    message_history.append({"role": "user", "content": message})
    msg = cl.Message(content="")
    async for stream_resp in await openai.ChatCompletion.acreate(
            model=model_name, 
            messages=message_history, 
            stream=True, 
            **settings
    ):
        token = stream_resp.choices[0]["delta"].get("content", "")
        await msg.stream_token(token)
    message_history.append({"role": "assistant", "content": msg.content})
    await msg.send()

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40

# 4.启动脚本

nohup chainlit run azure_demo.py &
1

# 5.一键启动

echo `ps -ef | grep azure_demo | grep -v grep | awk '{print $2}'`
kill -9  `ps -ef | grep azure_demo | grep -v grep | awk '{print $2}'`
cd /kwan/chainlit
python3.10 -m venv myenv
source myenv/bin/activate
nohup  chainlit run azure_demo.py >/dev/null 2>&1 & exit
1
2
3
4
5
6

# 二.docker 部署

# 1.github 地址

Github (opens new window)

gitcode (opens new window)

# 2.Dockerfile

FROM python:3.11-slim-buster as builder

#RUN apt-get update && apt-get install -y git

RUN pip install poetry==1.4.2 -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple/ \
&& pip install DBUtils==3.0.3 -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple/ \
&& pip install PyMySQL==1.1.0 -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple/

ENV POETRY_NO_INTERACTION=1 \
    POETRY_VIRTUALENVS_IN_PROJECT=1 \
    POETRY_VIRTUALENVS_CREATE=1 \
    POETRY_CACHE_DIR=/tmp/poetry_cache

ENV HOST=0.0.0.0
ENV LISTEN_PORT 8000
EXPOSE 8000

WORKDIR /app

COPY pyproject.toml poetry.lock ./

RUN poetry config repositories.clearlydefined https://pypi.tuna.tsinghua.edu.cn/simple/

RUN poetry config cache-dir /kwan/chainlit/demo

RUN poetry config virtualenvs.create false

RUN poetry install  --without dev --no-root && rm -rf $POETRY_CACHE_DIR


# The runtime image, used to just run the code provided its virtual environment
FROM python:3.11-slim-buster as runtime

ENV VIRTUAL_ENV=/app/.venv \
    PATH="/app/.venv/bin:$PATH"

COPY --from=builder ${VIRTUAL_ENV} ${VIRTUAL_ENV}

COPY ./demo_app ./demo_app
COPY ./.chainlit ./.chainlit
COPY chainlit.md ./

CMD ["chainlit", "run", "demo_app/main.py"]
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43

# 3.新增依赖

#在pyproject.toml中新增依赖
[tool.poetry.dependencies]
python = "^3.10"
langchain = "0.0.199"
openai = "0.27.8"
chainlit = "0.5.2"
DBUtils = "3.0.3"
PyMySQL = "1.1.0"

#执行poetry update会更新poetry.lock文件
poetry update
1
2
3
4
5
6
7
8
9
10
11

# 4.部署步骤

#指定解释器执行脚本
#!/bin/bash

#创建缓存目录
mkdir -p /kwan/chainlit

#进入目录
cd  /kwan/chainlit

#下载源码
git clone https://gitcode.net/qyj19920704/langchain-chainlit-docker-deployment-template.git

#进入目录
cd /kwan/chainlit/langchain-chainlit-docker-deployment-template

#修改代码
cd /kwan/chainlit/langchain-chainlit-docker-deployment-template/demo_app

#构建镜像
cd /kwan/chainlit/langchain-chainlit-docker-deployment-template

DOCKER_BUILDKIT=1 docker build --target=runtime . -t langchain-chainlit-chat-app:latest

#删除容器
docker rm -f langchain-chainlit-chat-app

#启动容器
docker run -d --name langchain-chainlit-chat-app -p 8000:8000 langchain-chainlit-chat-app

#容器日志
docker logs -f langchain-chainlit-chat-app

#所有容器
docker ps -a
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34

# 5.修改配置

#修改chainlit的配置
cd /kwan/chainlit/.chainlit

#修改markdown文件
cd  /kwan/chainlit
1
2
3
4
5

# 6.访问验证

#页面验证
https://qinyingjie.top:8000/

# 服务器访问
http://43.139.90.182:8888

# 本地访问
http://localhost:8888
1
2
3
4
5
6
7
8
上次更新: 10/29/2024, 10:27:50 AM