prompt based results

This commit is contained in:
Kuro0911
2025-11-03 21:08:45 +08:00
parent 8c76d2673e
commit 45bbb50b96
5 changed files with 7 additions and 4 deletions

3
.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
__pycache__/
.ipynb_checkpoints/

View File

@@ -23,7 +23,7 @@ DEFAULT_MODELS = {
DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
DTYPE = torch.bfloat16 if torch.cuda.is_available() else torch.float32
auth_token = "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"
auth_token = "HF_TOKEN"
_PREFERRED_Q4K_ORDER = ("Q4_K_M", "Q4_K_S", "Q4_K_L", "Q4_K")
_ENV_LOCAL_GGUF = "HF_GGUF_LOCAL_PATH"

View File

@@ -70,7 +70,7 @@ def _get_hf_judge():
device = 0 if torch.cuda.is_available() else -1
dtype = _pick_dtype()
hf_token = "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"
hf_token = "HF_TOKEN"
if hf_token is None:
raise RuntimeError(
"❌ Hugging Face token not found. Set it with:\n"

View File

@@ -23,7 +23,7 @@ DEFAULT_MODELS = {
DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
DTYPE = torch.bfloat16 if torch.cuda.is_available() else torch.float32
auth_token = "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"
auth_token = "HF_TOKEN"
_PREFERRED_Q4K_ORDER = ("Q4_K_M", "Q4_K_S", "Q4_K_L", "Q4_K")
_ENV_LOCAL_GGUF = "HF_GGUF_LOCAL_PATH"

View File

@@ -70,7 +70,7 @@ def _get_hf_judge():
device = 0 if torch.cuda.is_available() else -1
dtype = _pick_dtype()
hf_token = "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"
hf_token = "HF_TOKEN"
if hf_token is None:
raise RuntimeError(
"❌ Hugging Face token not found. Set it with:\n"