Skip to content

Commit

Permalink
Testing
Browse files Browse the repository at this point in the history
  • Loading branch information
Weves committed Apr 26, 2024
1 parent 648f2d0 commit 87ce2ca
Show file tree
Hide file tree
Showing 6 changed files with 1,880 additions and 23 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -31,14 +31,5 @@ jobs:
push: true
tags: |
danswer/danswer-backend:${{ github.ref_name }}
danswer/danswer-backend:latest
build-args: |
DANSWER_VERSION=${{ github.ref_name }}
- name: Run Trivy vulnerability scanner
uses: aquasecurity/trivy-action@master
with:
# To run locally: trivy image --severity HIGH,CRITICAL danswer/danswer-backend
image-ref: docker.io/danswer/danswer-backend:${{ github.ref_name }}
severity: 'CRITICAL,HIGH'
trivyignores: ./backend/.trivyignore
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,5 @@ jobs:
push: true
tags: |
danswer/danswer-model-server:${{ github.ref_name }}
danswer/danswer-model-server:latest
build-args: |
DANSWER_VERSION=${{ github.ref_name }}
- name: Run Trivy vulnerability scanner
uses: aquasecurity/trivy-action@master
with:
image-ref: docker.io/danswer/danswer-model-server:${{ github.ref_name }}
severity: 'CRITICAL,HIGH'
7 changes: 0 additions & 7 deletions .github/workflows/docker-build-push-web-container-on-tag.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,5 @@ jobs:
push: true
tags: |
danswer/danswer-web-server:${{ github.ref_name }}
danswer/danswer-web-server:latest
build-args: |
DANSWER_VERSION=${{ github.ref_name }}
- name: Run Trivy vulnerability scanner
uses: aquasecurity/trivy-action@master
with:
image-ref: docker.io/danswer/danswer-web-server:${{ github.ref_name }}
severity: 'CRITICAL,HIGH'
13 changes: 13 additions & 0 deletions backend/danswer/search/search_nlp_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@
from typing import TYPE_CHECKING

import requests
from huggingface_hub.constants import HF_HOME
from transformers import logging as transformer_logging # type:ignore
from transformers.utils import TRANSFORMERS_CACHE

from danswer.configs.model_configs import DOC_EMBEDDING_CONTEXT_SIZE
from danswer.configs.model_configs import DOCUMENT_ENCODER_MODEL
Expand Down Expand Up @@ -58,6 +60,17 @@ def get_default_tokenizer(model_name: str | None = None) -> "AutoTokenizer":
# This could be inaccurate
model_name = DOCUMENT_ENCODER_MODEL

print(f"HF Home: {HF_HOME}")
print(f"Cache dir: {TRANSFORMERS_CACHE}")

from model_server.hf import hf_hub_download
from transformers.tokenization_utils_base import TOKENIZER_CONFIG_FILE

hf_file_path = hf_hub_download(
model_name, TOKENIZER_CONFIG_FILE, local_files_only=True
)
print("hf_file_path", hf_file_path)

_TOKENIZER = (AutoTokenizer.from_pretrained(model_name), model_name)

if hasattr(_TOKENIZER[0], "is_fast") and _TOKENIZER[0].is_fast:
Expand Down
15 changes: 15 additions & 0 deletions backend/model_server/custom_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,10 @@
import numpy as np
import tensorflow as tf # type: ignore
from fastapi import APIRouter
from huggingface_hub.constants import HF_HOME
from transformers import AutoTokenizer # type: ignore
from transformers import TFDistilBertForSequenceClassification
from transformers.utils import TRANSFORMERS_CACHE

from model_server.constants import MODEL_WARM_UP_STRING
from model_server.utils import simple_log_function_time
Expand All @@ -24,6 +26,17 @@
def get_intent_model_tokenizer(
model_name: str = INTENT_MODEL_VERSION,
) -> "AutoTokenizer":
print(f"HF Home: {HF_HOME}")
print(f"Cache dir: {TRANSFORMERS_CACHE}")

from model_server.hf import hf_hub_download
from transformers.tokenization_utils_base import TOKENIZER_CONFIG_FILE

hf_file_path = hf_hub_download(
model_name, TOKENIZER_CONFIG_FILE, local_files_only=True
)
print("hf_file_path", hf_file_path)

global _INTENT_TOKENIZER
if _INTENT_TOKENIZER is None:
_INTENT_TOKENIZER = AutoTokenizer.from_pretrained(model_name)
Expand All @@ -36,6 +49,8 @@ def get_local_intent_model(
) -> TFDistilBertForSequenceClassification:
global _INTENT_MODEL
if _INTENT_MODEL is None or max_context_length != _INTENT_MODEL.max_seq_length:
print(f"HF Home: {HF_HOME}")
print(f"Cache dir: {TRANSFORMERS_CACHE}")
_INTENT_MODEL = TFDistilBertForSequenceClassification.from_pretrained(
model_name
)
Expand Down

0 comments on commit 87ce2ca

Please sign in to comment.