Skip to content

Commit

Permalink
Renaming part2 (#2665)
Browse files Browse the repository at this point in the history
* rename the space

* rename hub-ci users/orgs + default databases
  • Loading branch information
severo authored Apr 8, 2024
1 parent d147ad6 commit 8efd4a5
Show file tree
Hide file tree
Showing 33 changed files with 78 additions and 80 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/_e2e_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ jobs:
WORKER_SLEEP_SECONDS: "1"
MONGO_PORT: ${{ env.mongo-port }}
MONGO_REPLICASET: ${{ env.replicaset }}
COMMON_BLOCKED_DATASETS: "DSSUser/blocked-*"
COMMON_BLOCKED_DATASETS: "DVUser/blocked-*"
ADMIN_HF_ORGANIZATION: "valid_org"
E2E_ADMIN_USER_TOKEN: "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"
# the containers need to access S3 and Cloudfront
Expand Down Expand Up @@ -109,7 +109,7 @@ jobs:
PORT_REVERSE_PROXY: "8000"
PROMETHEUS_MULTIPROC_DIR: "/tmp"
WORKER_SLEEP_SECONDS: "1"
COMMON_BLOCKED_DATASETS: "DSSUser/blocked-*"
COMMON_BLOCKED_DATASETS: "DVUser/blocked-*"
ADMIN_HF_ORGANIZATION: "valid_org"
E2E_ADMIN_USER_TOKEN: "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"
run: |
Expand Down
2 changes: 1 addition & 1 deletion e2e/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ export SEARCH_UVICORN_PORT := 9083
export COMMON_HF_ENDPOINT := https://hub-ci.huggingface.co
export COMMON_HF_TOKEN := hf_app_datasets-server_token
# ^ hard coded, see e2e/tests/fixtures/hub.py
export COMMON_BLOCKED_DATASETS := DSSUser/blocked-*
export COMMON_BLOCKED_DATASETS := DVUser/blocked-*
export LOG_LEVEL := DEBUG
export MONGO_PORT := 27050
export PARQUET_AND_INFO_COMMITTER_HF_TOKEN := hf_app_datasets-server-parquet-converter_token
Expand Down
10 changes: 5 additions & 5 deletions e2e/tests/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,14 @@
# see https://github.com/huggingface/moon-landing/blob/main/server/scripts/staging-seed-db.ts
CI_APP_TOKEN = "hf_app_datasets-server_token"
CI_HUB_ENDPOINT = "https://hub-ci.huggingface.co"
NORMAL_USER = "DSSUser"
NORMAL_USER = "DVUser"
NORMAL_USER_TOKEN = "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"
NORMAL_USER_COOKIE = "oMidckPVQYumfKrAHNYKqnbacRoLaMppHRRlfNbupNahzAHCzInBVbhgGosDneYXHVTKkkWygoMDxBfFUkFPIPiVWBtZtSTYIYTScnEKAJYkyGBAcbVTbokAygCCTWvH"
NORMAL_ORG = "DSSNormalOrg"
PRO_USER = "DSSProUser"
NORMAL_ORG = "DVNormalOrg"
PRO_USER = "DVProUser"
PRO_USER_TOKEN = "hf_pro_user_token"
ENTERPRISE_ORG = "DSSEnterpriseOrg"
ENTERPRISE_USER = "DSSEnterpriseUser"
ENTERPRISE_ORG = "DVEnterpriseOrg"
ENTERPRISE_USER = "DVEnterpriseUser"
ENTERPRISE_USER_TOKEN = "hf_enterprise_user_token"

DATA = [
Expand Down
4 changes: 2 additions & 2 deletions e2e/tests/test_11_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,7 +223,7 @@ def test_normal_user_blocked_private(csv_path: str) -> None:
token=NORMAL_USER_TOKEN,
files={"data.csv": csv_path},
dataset_prefix="blocked-",
# ^ should be caught by COMMON_BLOCKED_DATASETS := "DSSUser/blocked-*"
# ^ should be caught by COMMON_BLOCKED_DATASETS := "DVUser/blocked-*"
repo_settings={"private": True},
) as dataset:
poll_parquet_until_ready_and_assert(
Expand All @@ -240,7 +240,7 @@ def test_normal_user_blocked_public(csv_path: str) -> None:
token=NORMAL_USER_TOKEN,
files={"data.csv": csv_path},
dataset_prefix="blocked-",
# ^ should be caught by COMMON_BLOCKED_DATASETS := "DSSUser/blocked-*"
# ^ should be caught by COMMON_BLOCKED_DATASETS := "DVUser/blocked-*"
) as dataset:
poll_parquet_until_ready_and_assert(
dataset=dataset, expected_status_code=501, expected_error_code="DatasetInBlockListError"
Expand Down
2 changes: 1 addition & 1 deletion front/admin_ui/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ pinned: false

## Datasets-server Admin UI

Deployed at (internal) https://huggingface.co/spaces/datasets-maintainers/datasets-server-admin-ui
Deployed at (internal) https://huggingface.co/spaces/datasets-maintainers/dataset-viewer-admin-ui

### Setup:

Expand Down
32 changes: 16 additions & 16 deletions front/admin_ui/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,14 @@

DEV = os.environ.get("DEV", False)
HF_ENDPOINT = os.environ.get("HF_ENDPOINT", "https://huggingface.co")
PROD_DSS_ENDPOINT = os.environ.get(
"PROD_DSS_ENDPOINT", "https://datasets-server.huggingface.co"
PROD_DV_ENDPOINT = os.environ.get(
"PROD_DV_ENDPOINT", "https://datasets-server.huggingface.co"
)
DEV_DSS_ENDPOINT = os.environ.get("DEV_DSS_ENDPOINT", "http://localhost:8100")
DEV_DV_ENDPOINT = os.environ.get("DEV_DV_ENDPOINT", "http://localhost:8100")
ADMIN_HF_ORGANIZATION = os.environ.get("ADMIN_HF_ORGANIZATION", "huggingface")
HF_TOKEN = os.environ.get("HF_TOKEN")

DSS_ENDPOINT = DEV_DSS_ENDPOINT if DEV else PROD_DSS_ENDPOINT
DV_ENDPOINT = DEV_DV_ENDPOINT if DEV else PROD_DV_ENDPOINT


# global state (shared with all the user sessions)
Expand All @@ -34,13 +34,13 @@

def healthcheck():
try:
response = requests.head(f"{DSS_ENDPOINT}/admin/healthcheck", timeout=10)
response = requests.head(f"{DV_ENDPOINT}/admin/healthcheck", timeout=10)
except requests.ConnectionError as error:
return f"❌ Failed to connect to {DSS_ENDPOINT} (error {error})"
return f"❌ Failed to connect to {DV_ENDPOINT} (error {error})"
if response.status_code == 200:
return f"*Connected to {DSS_ENDPOINT}*"
return f"*Connected to {DV_ENDPOINT}*"
else:
return f"❌ Failed to connect to {DSS_ENDPOINT} (error {response.status_code})"
return f"❌ Failed to connect to {DV_ENDPOINT} (error {response.status_code})"


def draw_graph(width, height):
Expand Down Expand Up @@ -126,7 +126,7 @@ def fetch_home_dashboard(token):
}
headers = {"Authorization": f"Bearer {token}"}
response = requests.get(
f"{DSS_ENDPOINT}/admin/num-dataset-infos-by-builder-name",
f"{DV_ENDPOINT}/admin/num-dataset-infos-by-builder-name",
headers=headers,
timeout=60,
)
Expand Down Expand Up @@ -165,7 +165,7 @@ def fetch_home_dashboard(token):
value=pd.DataFrame(
{
"Error": [
f"❌ Failed to fetch dataset infos from {DSS_ENDPOINT} (error {response.status_code})"
f"❌ Failed to fetch dataset infos from {DV_ENDPOINT} (error {response.status_code})"
]
}
),
Expand All @@ -181,7 +181,7 @@ def fetch_home_dashboard(token):

def get_is_valid_response(dataset: str):
return requests.get(
f"{DSS_ENDPOINT}/is-valid?dataset={dataset}",
f"{DV_ENDPOINT}/is-valid?dataset={dataset}",
headers=headers,
timeout=60,
)
Expand Down Expand Up @@ -289,7 +289,7 @@ def view_jobs(token):
global pending_jobs_df
headers = {"Authorization": f"Bearer {token}"}
response = requests.get(
f"{DSS_ENDPOINT}/admin/pending-jobs",
f"{DV_ENDPOINT}/admin/pending-jobs",
headers=headers,
timeout=60,
)
Expand Down Expand Up @@ -346,7 +346,7 @@ def view_jobs(token):
value=pd.DataFrame(
{
"Error": [
f"❌ Failed to view pending jobs to {DSS_ENDPOINT} (error {response.status_code})"
f"❌ Failed to view pending jobs to {DV_ENDPOINT} (error {response.status_code})"
]
}
),
Expand Down Expand Up @@ -478,7 +478,7 @@ def refresh_dataset(
params["difficulty"] = refresh_difficulty
params = urllib.parse.urlencode(params)
response = requests.post(
f"{DSS_ENDPOINT}/admin/force-refresh/{refresh_type}?{params}",
f"{DV_ENDPOINT}/admin/force-refresh/{refresh_type}?{params}",
headers=headers,
timeout=60,
)
Expand Down Expand Up @@ -542,7 +542,7 @@ def delete_and_recreate_dataset(
}
params = urllib.parse.urlencode(params)
response = requests.post(
f"{DSS_ENDPOINT}/admin/recreate-dataset?{params}",
f"{DV_ENDPOINT}/admin/recreate-dataset?{params}",
headers=headers,
timeout=60,
)
Expand Down Expand Up @@ -577,7 +577,7 @@ def delete_and_recreate_dataset(
def get_dataset_status(token, dataset):
headers = {"Authorization": f"Bearer {token}"}
response = requests.get(
f"{DSS_ENDPOINT}/admin/dataset-status?dataset={dataset}",
f"{DV_ENDPOINT}/admin/dataset-status?dataset={dataset}",
headers=headers,
timeout=60,
)
Expand Down
4 changes: 2 additions & 2 deletions jobs/cache_maintenance/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@
@fixture(scope="session")
def monkeypatch_session() -> Iterator[MonkeyPatch]:
monkeypatch_session = MonkeyPatch()
monkeypatch_session.setenv("CACHE_MONGO_DATABASE", "datasets_server_cache_test")
monkeypatch_session.setenv("QUEUE_MONGO_DATABASE", "datasets_server_queue_test")
monkeypatch_session.setenv("CACHE_MONGO_DATABASE", "dataset_viewer_cache_test")
monkeypatch_session.setenv("QUEUE_MONGO_DATABASE", "dataset_viewer_queue_test")
monkeypatch_session.setenv("COMMON_HF_ENDPOINT", CI_HUB_ENDPOINT)
monkeypatch_session.setenv("COMMON_HF_TOKEN", CI_APP_TOKEN)
monkeypatch_session.setenv("DISCUSSIONS_BOT_ASSOCIATED_USER_NAME", CI_PARQUET_CONVERTER_USER)
Expand Down
4 changes: 2 additions & 2 deletions jobs/cache_maintenance/tests/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@

# see https://github.com/huggingface/moon-landing/blob/main/server/scripts/staging-seed-db.ts
CI_APP_TOKEN = "hf_app_datasets-server_token"
CI_PARQUET_CONVERTER_USER = "DSSParquetConverterUser"
CI_PARQUET_CONVERTER_USER = "DVParquetConverterUser"
CI_PARQUET_CONVERTER_APP_TOKEN = "hf_app_datasets-server-parquet-converter_token"
CI_HUB_ENDPOINT = "https://hub-ci.huggingface.co"
CI_URL_TEMPLATE = CI_HUB_ENDPOINT + "/{repo_id}/resolve/{revision}/{filename}"
CI_USER = "DSSUser"
CI_USER = "DVUser"
CI_USER_TOKEN = "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"
2 changes: 1 addition & 1 deletion jobs/mongodb_migration/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ The script can be configured using environment variables. They are grouped by sc

Set environment variables to configure the job (`DATABASE_MIGRATIONS_` prefix):

- `DATABASE_MIGRATIONS_MONGO_DATABASE`: the name of the database used for storing the migrations history. Defaults to `"datasets_server_maintenance"`.
- `DATABASE_MIGRATIONS_MONGO_DATABASE`: the name of the database used for storing the migrations history. Defaults to `"dataset_viewer_maintenance"`.
- `DATABASE_MIGRATIONS_MONGO_URL`: the URL used to connect to the mongo db server. Defaults to `"mongodb://localhost:27017"`.

### Common
Expand Down
2 changes: 1 addition & 1 deletion jobs/mongodb_migration/src/mongodb_migration/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from environs import Env
from libcommon.config import CacheConfig, LogConfig, QueueConfig

DATABASE_MIGRATIONS_MONGO_DATABASE = "datasets_server_maintenance"
DATABASE_MIGRATIONS_MONGO_DATABASE = "dataset_viewer_maintenance"
DATABASE_MIGRATIONS_MONGO_URL = "mongodb://localhost:27017"


Expand Down
2 changes: 1 addition & 1 deletion jobs/mongodb_migration/tests/test_plan.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

@pytest.fixture(autouse=True)
def migrations_mongo_resource(mongo_host: str) -> Iterator[MigrationsMongoResource]:
database = "datasets_server_migrations_test"
database = "dataset_viewer_migrations_test"
if "test" not in database:
raise ValueError("Test must be launched on a test mongo database")
with MigrationsMongoResource(database=database, host=mongo_host) as resource:
Expand Down
4 changes: 2 additions & 2 deletions libs/libapi/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,8 @@ def image_path() -> str:
@fixture(scope="session")
def monkeypatch_session(tmp_path_factory: TempPathFactory) -> Iterator[MonkeyPatch]:
monkeypatch_session = MonkeyPatch()
monkeypatch_session.setenv("CACHE_MONGO_DATABASE", "datasets_server_cache_test")
monkeypatch_session.setenv("QUEUE_MONGO_DATABASE", "datasets_server_queue_test")
monkeypatch_session.setenv("CACHE_MONGO_DATABASE", "dataset_viewer_cache_test")
monkeypatch_session.setenv("QUEUE_MONGO_DATABASE", "dataset_viewer_queue_test")
yield monkeypatch_session
monkeypatch_session.undo()

Expand Down
4 changes: 2 additions & 2 deletions libs/libcommon/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -47,14 +47,14 @@ Set the common environment variables to configure the logs:

Set environment variables to configure the storage of precomputed API responses in a MongoDB database (the "cache"):

- `CACHE_MONGO_DATABASE`: name of the database used for storing the cache. Defaults to `datasets_server_cache`.
- `CACHE_MONGO_DATABASE`: name of the database used for storing the cache. Defaults to `dataset_viewer_cache`.
- `CACHE_MONGO_URL`: URL used to connect to the MongoDB server. Defaults to `mongodb://localhost:27017`.

## Queue configuration

Set environment variables to configure the job queues to precompute API responses. The job queues are stored in a MongoDB database.

- `QUEUE_MONGO_DATABASE`: name of the database used for storing the queue. Defaults to `datasets_server_queue`.
- `QUEUE_MONGO_DATABASE`: name of the database used for storing the queue. Defaults to `dataset_viewer_queue`.
- `QUEUE_MONGO_URL`: URL used to connect to the MongoDB server. Defaults to `mongodb://localhost:27017`.

## S3 configuration
Expand Down
4 changes: 2 additions & 2 deletions libs/libcommon/src/libcommon/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ def from_env(cls) -> "LogConfig":
)


CACHE_MONGO_DATABASE = "datasets_server_cache"
CACHE_MONGO_DATABASE = "dataset_viewer_cache"
CACHE_MONGO_URL = "mongodb://localhost:27017"


Expand All @@ -192,7 +192,7 @@ def from_env(cls) -> "CacheConfig":
)


QUEUE_MONGO_DATABASE = "datasets_server_queue"
QUEUE_MONGO_DATABASE = "dataset_viewer_queue"
QUEUE_MONGO_URL = "mongodb://localhost:27017"


Expand Down
4 changes: 2 additions & 2 deletions libs/libcommon/src/libcommon/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
CACHE_MONGOENGINE_ALIAS = "cache"
HF_DATASETS_CACHE_APPNAME = "hf_datasets_cache"
PARQUET_METADATA_CACHE_APPNAME = "datasets_server_parquet_metadata"
DESCRIPTIVE_STATISTICS_CACHE_APPNAME = "datasets_server_descriptive_statistics"
DUCKDB_INDEX_CACHE_APPNAME = "datasets_server_duckdb_index"
DESCRIPTIVE_STATISTICS_CACHE_APPNAME = "dataset_viewer_descriptive_statistics"
DUCKDB_INDEX_CACHE_APPNAME = "dataset_viewer_duckdb_index"
DUCKDB_INDEX_DOWNLOADS_SUBDIRECTORY = "downloads"
DUCKDB_INDEX_JOB_RUNNER_SUBDIRECTORY = "job_runner"
DUCKDB_VERSION = "0.10.0"
Expand Down
2 changes: 0 additions & 2 deletions libs/libcommon/src/libcommon/viewer_utils/asset.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,6 @@
from libcommon.storage import StrPath, remove_dir
from libcommon.storage_client import StorageClient

ASSET_DIR_MODE = 0o755
DATASETS_SERVER_MDATE_FILENAME = ".dss"
SUPPORTED_AUDIO_EXTENSION_TO_MEDIA_TYPE = {".wav": "audio/wav", ".mp3": "audio/mpeg"}


Expand Down
4 changes: 2 additions & 2 deletions libs/libcommon/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def queue_mongo_host(env: Env) -> str:

@fixture
def queue_mongo_resource(queue_mongo_host: str) -> Iterator[QueueMongoResource]:
database = "datasets_server_queue_test"
database = "dataset_viewer_queue_test"
host = queue_mongo_host
if "test" not in database:
raise ValueError("Test must be launched on a test mongo database")
Expand All @@ -61,7 +61,7 @@ def queue_mongo_resource(queue_mongo_host: str) -> Iterator[QueueMongoResource]:

@fixture
def cache_mongo_resource(cache_mongo_host: str) -> Iterator[CacheMongoResource]:
database = "datasets_server_cache_test"
database = "dataset_viewer_cache_test"
host = cache_mongo_host
if "test" not in database:
raise ValueError("Test must be launched on a test mongo database")
Expand Down
10 changes: 5 additions & 5 deletions libs/libcommon/tests/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,13 @@
CI_HUB_ENDPOINT = "https://hub-ci.huggingface.co"
PROD_HUB_ENDPOINT = "https://huggingface.co"
#
NORMAL_USER = "DSSUser"
NORMAL_USER = "DVUser"
NORMAL_USER_TOKEN = "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"
NORMAL_ORG = "DSSNormalOrg"
PRO_USER = "DSSProUser"
NORMAL_ORG = "DVNormalOrg"
PRO_USER = "DVProUser"
PRO_USER_TOKEN = "hf_pro_user_token"
ENTERPRISE_ORG = "DSSEnterpriseOrg"
ENTERPRISE_USER = "DSSEnterpriseUser"
ENTERPRISE_ORG = "DVEnterpriseOrg"
ENTERPRISE_USER = "DVEnterpriseUser"
ENTERPRISE_USER_TOKEN = "hf_enterprise_user_token"

DEFAULT_CONFIG = "default"
Expand Down
8 changes: 4 additions & 4 deletions libs/libcommon/tests/test_resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,10 @@


def test_database_resource(queue_mongo_host: str) -> None:
database_1 = "datasets_server_1"
database_2 = "datasets_server_2"
database_1 = "dataset_viewer_1"
database_2 = "dataset_viewer_2"
host = queue_mongo_host
mongoengine_alias = "datasets_server_mongo_alias"
mongoengine_alias = "dataset_viewer_mongo_alias"
server_selection_timeout_ms = 5_000
resource_1 = MongoResource(
database=database_1,
Expand Down Expand Up @@ -63,7 +63,7 @@ def test_database_resource_errors(
) -> None:
if not host:
host = queue_mongo_host
database = "datasets_server_test"
database = "dataset_viewer_test"
resource = MongoResource(
database=database,
host=host,
Expand Down
4 changes: 2 additions & 2 deletions services/admin/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ def monkeypatch_session(hf_endpoint: str, hf_token: str, tmp_path_factory: TempP
assets_root = str(tmp_path_factory.mktemp("assets_root"))
monkeypatch_session.setenv("CACHED_ASSETS_STORAGE_ROOT", assets_root)
monkeypatch_session.setenv("ASSETS_STORAGE_ROOT", assets_root)
monkeypatch_session.setenv("CACHE_MONGO_DATABASE", "datasets_server_cache_test")
monkeypatch_session.setenv("QUEUE_MONGO_DATABASE", "datasets_server_queue_test")
monkeypatch_session.setenv("CACHE_MONGO_DATABASE", "dataset_viewer_cache_test")
monkeypatch_session.setenv("QUEUE_MONGO_DATABASE", "dataset_viewer_queue_test")
monkeypatch_session.setenv("COMMON_HF_ENDPOINT", hf_endpoint)
monkeypatch_session.setenv("COMMON_HF_TOKEN", hf_token)
monkeypatch_session.setenv("ADMIN_HF_TIMEOUT_SECONDS", "10")
Expand Down
2 changes: 1 addition & 1 deletion services/admin/tests/fixtures/hub.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from huggingface_hub.utils._errors import hf_raise_for_status

# see https://github.com/huggingface/moon-landing/blob/main/server/scripts/staging-seed-db.ts
CI_HUB_USER = "DSSUser"
CI_HUB_USER = "DVUser"
CI_HUB_USER_API_TOKEN = "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"

CI_HUB_ENDPOINT = "https://hub-ci.huggingface.co"
Expand Down
4 changes: 2 additions & 2 deletions services/admin/tests/test_app_real.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@
@fixture(scope="module")
def real_monkeypatch() -> Iterator[MonkeyPatch]:
monkeypatch = MonkeyPatch()
monkeypatch.setenv("CACHE_MONGO_DATABASE", "datasets_server_cache_test")
monkeypatch.setenv("QUEUE_MONGO_DATABASE", "datasets_server_queue_test")
monkeypatch.setenv("CACHE_MONGO_DATABASE", "dataset_viewer_cache_test")
monkeypatch.setenv("QUEUE_MONGO_DATABASE", "dataset_viewer_queue_test")
monkeypatch.setenv("COMMON_HF_ENDPOINT", "https://huggingface.co")
monkeypatch.setenv("COMMON_HF_TOKEN", "")
yield monkeypatch
Expand Down
Loading

0 comments on commit 8efd4a5

Please sign in to comment.