git clone https://github.com/TuGraph-family/chat2graph.git
conda
conda create -n chat2graph_env python=3.10 conda activate chat2graph_env
cd chat2graph ./bin/build.sh
.env
cp .env.template .env && vim .env
LLM_NAME=deepseek-ai/DeepSeek-V3 LLM_ENDPOINT=https://api.siliconflow.cn/v1 LLM_APIKEY={your-siliconflow-api-key} EMBEDDING_MODEL_NAME=BAAI/bge-large-zh-v1.5 EMBEDDING_MODEL_ENDPOINT=https://api.siliconflow.cn/v1/embeddings EMBEDDING_MODEL_APIKEY={your-siliconflow-api-key}
./bin/start.sh
Starting server... Web resources location: /Users/florian/code/chat2graph/app/server/web System database url: sqlite:////Users/florian/.chat2graph/system/chat2graph.db Loading AgenticService from app/core/sdk/chat2graph.yml with encoding utf-8 Init application: Chat2Graph Init the Leader agent Init the Expert agents ____ _ _ ____ ____ _ / ___| |__ __ _| |_|___ \ / ___|_ __ __ _ _ __ | |__ | | | '_ \ / _` | __| __) | | _| '__/ _` | '_ \| '_ \ | |___| | | | (_| | |_ / __/| |_| | | | (_| | |_) | | | | \____|_| |_|\__,_|\__|_____|\____|_| \__,_| .__/|_| |_| |_| * Serving Flask app 'bootstrap' * Debug mode: off WARNING: This is a development server. Do not use it in a production deployment. Use a production WSGI server instead. * Running on all addresses (0.0.0.0) * Running on http://127.0.0.1:5010 * Running on http://192.168.1.1:5010 Chat2Graph server started success !
docker pull neo4j:latest docker run -d -p 7474:7474 -p 7687:7687 --name neo4j-server --env NEO4J_AUTH=none \ --env NEO4J_PLUGINS='["apoc", "graph-data-science"]' neo4j:latest
docker pull tugraph/tugraph-runtime-centos7:4.5.1 docker run -d -p 7070:7070 -p 7687:7687 -p 9090:9090 --name tugraph-server \ tugraph/tugraph-runtime-centos7:latest lgraph_server -d run --enable_plugin true
SystemEnv.LLM_NAME="deepseek-ai/DeepSeek-V3" SystemEnv.LLM_ENDPOINT="https://api.siliconflow.cn/v1" SystemEnv.LLM_APIKEY="{your-siliconflow-api-key}" SystemEnv.EMBEDDING_MODEL_NAME="BAAI/bge-large-zh-v1.5" SystemEnv.EMBEDDING_MODEL_ENDPOINT="https://api.siliconflow.cn/v1/embeddings" SystemEnv.EMBEDDING_MODEL_APIKEY="{your-siliconflow-api-key}"
chat2graph.yml
chat2graph = AgenticService.load("app/core/sdk/chat2graph.yml")
answer = chat2graph.execute("What is TuGraph ?").get_payload()
job = chat2graph.session().submit("What is TuGraph ?") answer = job.wait().get_payload()