From e7e7646d768fa9e2cdfc3bec69ca32332d9edebd Mon Sep 17 00:00:00 2001 From: aries_ckt <916701291@qq.com> Date: Sat, 11 May 2024 16:55:36 +0800 Subject: [PATCH 1/2] doc:ollama document --- .../installation/advanced_usage/ollama.md | 41 +++++++++++++++++++ docs/sidebars.js | 4 ++ 2 files changed, 45 insertions(+) create mode 100644 docs/docs/installation/advanced_usage/ollama.md diff --git a/docs/docs/installation/advanced_usage/ollama.md b/docs/docs/installation/advanced_usage/ollama.md new file mode 100644 index 000000000..f432d7054 --- /dev/null +++ b/docs/docs/installation/advanced_usage/ollama.md @@ -0,0 +1,41 @@ +# ollama +ollama is a model serving platform that allows you to deploy models in a few seconds. +It is a great tool. + +### Install ollama +If your system is linux. +```bash +curl -fsSL https://ollama.com/install.sh | sh +``` +other environments, please refer to the [official ollama website](https://ollama.com/). +### Pull models. +1. Pull LLM +```bash +ollama pull qwen:0.5b +``` +2. Pull embedding model. +```bash +ollama pull nomic-embed-text +``` + +3. install ollama package. +```bash +pip install ollama +``` + +### Use ollama proxy model in DB-GPT `.env` file + +```bash +LLM_MODEL=ollama_proxyllm +PROXY_SERVER_URL=http://127.0.0.1:11434 +PROXYLLM_BACKEND="qwen:0.5b" +PROXY_API_KEY=not_used +EMBEDDING_MODEL=proxy_ollama +proxy_ollama_proxy_server_url=http://127.0.0.1:11434 +proxy_ollama_proxy_backend="nomic-embed-text:latest" +``` + +### run dbgpt server +```bash +python dbgpt/app/dbgpt_server.py +``` \ No newline at end of file diff --git a/docs/sidebars.js b/docs/sidebars.js index 1ac43997c..9fa03854b 100755 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -237,6 +237,10 @@ const sidebars = { type: 'doc', id: 'installation/advanced_usage/More_proxyllms', }, + { + type: 'doc', + id: 'installation/advanced_usage/ollama', + }, { type: 'doc', id: 'installation/advanced_usage/vLLM_inference', From 871daaba040bbf384ae930542ce0b5cc47f955ad Mon Sep 17 00:00:00 2001 From: aries_ckt <916701291@qq.com> Date: Sat, 11 May 2024 17:07:22 +0800 Subject: [PATCH 2/2] style:fmt --- dbgpt/storage/vector_store/chroma_store.py | 2 +- dbgpt/storage/vector_store/pgvector_store.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dbgpt/storage/vector_store/chroma_store.py b/dbgpt/storage/vector_store/chroma_store.py index 28bff63c6..5e921377e 100644 --- a/dbgpt/storage/vector_store/chroma_store.py +++ b/dbgpt/storage/vector_store/chroma_store.py @@ -19,6 +19,7 @@ CHROMA_COLLECTION_NAME = "langchain" + @register_resource( _("Chroma Vector Store"), "chroma_vector_store", @@ -152,7 +153,6 @@ def vector_name_exists(self) -> bool: files = list(filter(lambda f: f != "chroma.sqlite3", files)) return len(files) > 0 - def load_document(self, chunks: List[Chunk]) -> List[str]: """Load document to vector store.""" logger.info("ChromaStore load document") diff --git a/dbgpt/storage/vector_store/pgvector_store.py b/dbgpt/storage/vector_store/pgvector_store.py index 0f56fbe03..6f48aaf36 100644 --- a/dbgpt/storage/vector_store/pgvector_store.py +++ b/dbgpt/storage/vector_store/pgvector_store.py @@ -66,7 +66,7 @@ def __init__(self, vector_store_config: PGVectorConfig) -> None: embedding_function=self.embeddings, collection_name=self.collection_name, connection_string=self.connection_string, - ) # mypy: ignore + ) # mypy: ignore def similar_search( self, text: str, topk: int, filters: Optional[MetadataFilters] = None