|
@@ -1,6 +1,6 @@
|
|
|
import torch.cuda
|
|
|
import torch.backends
|
|
|
-
|
|
|
+import os
|
|
|
|
|
|
embedding_model_dict = {
|
|
|
"ernie-tiny": "nghuyong/ernie-3.0-nano-zh",
|
|
@@ -31,7 +31,7 @@ USE_PTUNING_V2 = False
|
|
|
# LLM running device
|
|
|
LLM_DEVICE = "cuda" if torch.cuda.is_available() else "mps" if torch.backends.mps.is_available() else "cpu"
|
|
|
|
|
|
-VS_ROOT_PATH = "./vector_store/"
|
|
|
+VS_ROOT_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), "vector_store")
|
|
|
|
|
|
-UPLOAD_ROOT_PATH = "./content/"
|
|
|
+UPLOAD_ROOT_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), "content")
|
|
|
|