mirror of
https://github.com/JasonYANG170/CodeGeeX4.git
synced 2024-11-23 12:16:33 +00:00
1.Fix some typos.
2.Eliminate pep8 warnings.
This commit is contained in:
parent
6d9d3c5ad4
commit
a714607eaf
|
@ -51,7 +51,7 @@ auto_tag_thread = true
|
|||
|
||||
[UI]
|
||||
# Name of the assistant.
|
||||
name = "CodeGeeX4 RepoDome"
|
||||
name = "CodeGeeX4 RepoDemo"
|
||||
|
||||
# Description of the assistant. This is used for HTML tags.
|
||||
description = "CodeGeeX4项目级能力展示"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# CodeGeeX
|
||||
|
||||
## Welcome to My Chat Dome Application
|
||||
## Welcome to My Chat Demo Application
|
||||
|
||||
This is a simple demonstration application.
|
||||
|
||||
|
|
|
@ -21,8 +21,8 @@
|
|||
1. 克隆仓库到本地
|
||||
2. 设置模型,可以选择本地模型或者api模型,如果使用本地模型需要到run_local.py里设置local_model_path
|
||||
3. 如果要用联网问答需要设置bingsearch API,在utils/bingsearch.py中设置bingsearch_api_key
|
||||
3. 安装依赖:`pip install -r requirements.txt`
|
||||
4. 运行应用:`chainlit run run.py --port 8888` 如果用本地:`chainlit run run_local.py --port 8888`
|
||||
4. 安装依赖:`pip install -r requirements.txt`
|
||||
5. 运行应用:`chainlit run run.py --port 8888` 如果用本地:`chainlit run run_local.py --port 8888`
|
||||
|
||||
|
||||
## 注意
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import requests
|
||||
import json
|
||||
|
||||
import requests
|
||||
|
||||
URL = "" # the url you deploy codegeex service
|
||||
|
||||
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import torch
|
||||
from pydantic import Field
|
||||
from transformers import AutoModel, AutoTokenizer
|
||||
from typing import Iterator
|
||||
import torch
|
||||
|
||||
|
||||
class CodegeexChatModel:
|
||||
|
@ -34,17 +33,17 @@ class CodegeexChatModel:
|
|||
)
|
||||
return response
|
||||
except Exception as e:
|
||||
return f"error:{e}"
|
||||
return f"error: {e}"
|
||||
|
||||
def stream_chat(self, prompt, temperature=0.2, top_p=0.95):
|
||||
|
||||
try:
|
||||
for response, _ in self.model.stream_chat(
|
||||
self.tokenizer,
|
||||
query=prompt,
|
||||
max_length=120000,
|
||||
temperature=temperature,
|
||||
top_p=top_p,
|
||||
self.tokenizer,
|
||||
query=prompt,
|
||||
max_length=120000,
|
||||
temperature=temperature,
|
||||
top_p=top_p,
|
||||
):
|
||||
yield response
|
||||
except Exception as e:
|
||||
|
|
|
@ -21,8 +21,8 @@
|
|||
1. 克隆仓库到本地
|
||||
2. 设置模型,可以选择本地模型或者api模型,如果使用本地模型需要到run_local.py里设置local_model_path
|
||||
3. 如果要用联网问答需要设置bingsearch API,在utils/bingsearch.py中设置bingsearch_api_key
|
||||
3. 安装依赖:`pip install -r requirements.txt`
|
||||
4. 运行应用:`chainlit run run.py --port 8888` 如果用本地:`chainlit run run_local.py --port 8888`
|
||||
4. 安装依赖:`pip install -r requirements.txt`
|
||||
5. 运行应用:`chainlit run run.py --port 8888` 如果用本地:`chainlit run run_local.py --port 8888`
|
||||
|
||||
|
||||
## 注意
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
import chainlit as cl
|
||||
from chainlit.input_widget import Slider
|
||||
|
||||
from llm.api.codegeex4 import codegeex4
|
||||
from prompts.base_prompt import (
|
||||
judge_task_prompt,
|
||||
get_cur_base_user_prompt,
|
||||
web_judge_task_prompt,
|
||||
)
|
||||
from utils.tools import unzip_file, get_project_files_with_content
|
||||
from utils.bingsearch import bing_search_prompt
|
||||
from utils.tools import unzip_file, get_project_files_with_content
|
||||
|
||||
|
||||
@cl.set_chat_profiles
|
||||
|
@ -15,7 +16,7 @@ async def chat_profile():
|
|||
return [
|
||||
cl.ChatProfile(
|
||||
name="chat聊天",
|
||||
markdown_description="聊天demo:支持多轮对话。",
|
||||
markdown_description="聊天demo:支持多轮对话。",
|
||||
starters=[
|
||||
cl.Starter(
|
||||
label="请你用python写一个快速排序。",
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
import chainlit as cl
|
||||
from chainlit.input_widget import Slider
|
||||
from llm.api.codegeex4 import codegeex4
|
||||
|
||||
from llm.local.codegeex4 import CodegeexChatModel
|
||||
from prompts.base_prompt import (
|
||||
judge_task_prompt,
|
||||
get_cur_base_user_prompt,
|
||||
web_judge_task_prompt,
|
||||
)
|
||||
from utils.tools import unzip_file, get_project_files_with_content
|
||||
from utils.bingsearch import bing_search_prompt
|
||||
from llm.local.codegeex4 import CodegeexChatModel
|
||||
from utils.tools import unzip_file, get_project_files_with_content
|
||||
|
||||
local_model_path = "<your_local_model_path>"
|
||||
llm = CodegeexChatModel(local_model_path)
|
||||
|
@ -19,7 +19,7 @@ class StreamProcessor:
|
|||
self.previous_str = ""
|
||||
|
||||
def get_new_part(self, new_str):
|
||||
new_part = new_str[len(self.previous_str) :]
|
||||
new_part = new_str[len(self.previous_str):]
|
||||
self.previous_str = new_str
|
||||
return new_part
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import requests
|
||||
from bs4 import BeautifulSoup as BS4
|
||||
import requests
|
||||
|
||||
BING_API_KEY = "<your_bing_api_key>"
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import zipfile
|
||||
import os
|
||||
import json
|
||||
import os
|
||||
import zipfile
|
||||
|
||||
|
||||
def unzip_file(zip_path, extract_dir):
|
||||
|
@ -103,7 +103,6 @@ def filter_data(obj):
|
|||
"delphi": "{ Delphi }",
|
||||
"scheme": "; Scheme",
|
||||
"basic": "' Basic",
|
||||
"assembly": "; Assembly",
|
||||
"groovy": "// Groovy",
|
||||
"abap": "* Abap",
|
||||
"gdscript": "# GDScript",
|
||||
|
@ -122,7 +121,6 @@ def filter_data(obj):
|
|||
"dockerfile": "# Dockerfile",
|
||||
"markdown": "<!-- Markdown -->",
|
||||
"cmake": "# CMake",
|
||||
"dockerfile": "# Dockerfile",
|
||||
}
|
||||
|
||||
programming_languages_to_file_extensions = json.load(
|
||||
|
|
Loading…
Reference in New Issue
Block a user