diff --git a/backend/app/alembic/versions/3389c67fdcb4_add_alter_columns_in_collections_table.py b/backend/app/alembic/versions/3389c67fdcb4_add_alter_columns_in_collections_table.py new file mode 100644 index 000000000..9581de3ee --- /dev/null +++ b/backend/app/alembic/versions/3389c67fdcb4_add_alter_columns_in_collections_table.py @@ -0,0 +1,76 @@ +"""add/alter columns in collections table + +Revision ID: 3389c67fdcb4 +Revises: 8757b005d681 +Create Date: 2025-06-20 18:08:16.585843 + +""" +from alembic import op +import sqlalchemy as sa +import sqlmodel.sql.sqltypes +from sqlalchemy.dialects import postgresql + + +# revision identifiers, used by Alembic. +revision = "3389c67fdcb4" +down_revision = "8757b005d681" +branch_labels = None +depends_on = None + +collection_status_enum = postgresql.ENUM( + "processing", + "successful", + "failed", + name="collectionstatus", + create_type=False, # we create manually to avoid duplicate issues +) + + +def upgrade(): + collection_status_enum.create(op.get_bind(), checkfirst=True) + op.add_column( + "collection", sa.Column("organization_id", sa.Integer(), nullable=False) + ) + op.add_column("collection", sa.Column("project_id", sa.Integer(), nullable=True)) + op.add_column( + "collection", + sa.Column( + "status", + collection_status_enum, + nullable=False, + server_default="processing", + ), + ) + op.add_column("collection", sa.Column("updated_at", sa.DateTime(), nullable=False)) + op.alter_column( + "collection", "llm_service_id", existing_type=sa.VARCHAR(), nullable=True + ) + op.alter_column( + "collection", "llm_service_name", existing_type=sa.VARCHAR(), nullable=True + ) + op.create_foreign_key( + None, + "collection", + "organization", + ["organization_id"], + ["id"], + ondelete="CASCADE", + ) + op.create_foreign_key( + None, "collection", "project", ["project_id"], ["id"], ondelete="CASCADE" + ) + + +def downgrade(): + op.drop_constraint(None, "collection", type_="foreignkey") + op.drop_constraint(None, "collection", type_="foreignkey") + op.alter_column( + "collection", "llm_service_name", existing_type=sa.VARCHAR(), nullable=False + ) + op.alter_column( + "collection", "llm_service_id", existing_type=sa.VARCHAR(), nullable=False + ) + op.drop_column("collection", "updated_at") + op.drop_column("collection", "status") + op.drop_column("collection", "project_id") + op.drop_column("collection", "organization_id") diff --git a/backend/app/api/deps.py b/backend/app/api/deps.py index 50455d54f..43810c384 100644 --- a/backend/app/api/deps.py +++ b/backend/app/api/deps.py @@ -107,6 +107,30 @@ def get_current_user_org( CurrentUserOrg = Annotated[UserOrganization, Depends(get_current_user_org)] +def get_current_user_org_project( + current_user: CurrentUser, session: SessionDep, request: Request +) -> UserProjectOrg: + api_key = request.headers.get("X-API-KEY") + organization_id = None + project_id = None + + if api_key: + api_key_record = get_api_key_by_value(session, api_key) + if api_key_record: + validate_organization(session, api_key_record.organization_id) + organization_id = api_key_record.organization_id + project_id = api_key_record.project_id + + return UserProjectOrg( + **current_user.model_dump(), + organization_id=organization_id, + project_id=project_id, + ) + + +CurrentUserOrgProject = Annotated[UserProjectOrg, Depends(get_current_user_org_project)] + + def get_current_active_superuser(current_user: CurrentUser) -> User: if not current_user.is_superuser: raise HTTPException( diff --git a/backend/app/api/routes/collections.py b/backend/app/api/routes/collections.py index 532bab721..7c903930b 100644 --- a/backend/app/api/routes/collections.py +++ b/backend/app/api/routes/collections.py @@ -1,5 +1,6 @@ import inspect import logging +import time import warnings from uuid import UUID, uuid4 from typing import Any, List, Optional @@ -11,13 +12,14 @@ from pydantic import BaseModel, Field, HttpUrl from sqlalchemy.exc import NoResultFound, MultipleResultsFound, SQLAlchemyError -from app.api.deps import CurrentUser, SessionDep +from app.api.deps import CurrentUser, SessionDep, CurrentUserOrgProject from app.core.cloud import AmazonCloudStorage from app.core.config import settings from app.core.util import now, raise_from_unknown, post_callback from app.crud import DocumentCrud, CollectionCrud, DocumentCollectionCrud from app.crud.rag import OpenAIVectorStoreCrud, OpenAIAssistantCrud from app.models import Collection, Document +from app.models.collection import CollectionStatus from app.utils import APIResponse, load_description router = APIRouter(prefix="/collections", tags=["collections"]) @@ -173,61 +175,77 @@ def do_create_collection( request: CreationRequest, payload: ResponsePayload, ): + start_time = time.time() client = OpenAI(api_key=settings.OPENAI_API_KEY) - if request.callback_url is None: - callback = SilentCallback(payload) - else: - callback = WebHookCallback(request.callback_url, payload) - - # - # Create the assistant and vector store - # - - vector_store_crud = OpenAIVectorStoreCrud(client) - try: - vector_store = vector_store_crud.create() - except OpenAIError as err: - callback.fail(str(err)) - return + callback = ( + SilentCallback(payload) + if request.callback_url is None + else WebHookCallback(request.callback_url, payload) + ) storage = AmazonCloudStorage(current_user) document_crud = DocumentCrud(session, current_user.id) assistant_crud = OpenAIAssistantCrud(client) + vector_store_crud = OpenAIVectorStoreCrud(client) + collection_crud = CollectionCrud(session, current_user.id) - docs = request(document_crud) - kwargs = dict(request.extract_super_type(AssistantOptions)) try: - updates = vector_store_crud.update(vector_store.id, storage, docs) - documents = list(updates) - assistant = assistant_crud.create(vector_store.id, **kwargs) - except Exception as err: # blanket to handle SQL and OpenAI errors - logging.error(f"File Search setup error: {err} ({type(err).__name__})") - vector_store_crud.delete(vector_store.id) - callback.fail(str(err)) - return + vector_store = vector_store_crud.create() - # - # Store the results - # + docs = list(request(document_crud)) + flat_docs = [doc for sublist in docs for doc in sublist] - collection_crud = CollectionCrud(session, current_user.id) - collection = Collection( - id=UUID(payload.key), - llm_service_id=assistant.id, - llm_service_name=request.model, - ) - try: - collection_crud.create(collection, documents) - except SQLAlchemyError as err: - _backout(assistant_crud, assistant.id) - callback.fail(str(err)) - return + file_exts = {doc.fname.split(".")[-1] for doc in flat_docs if "." in doc.fname} + file_sizes_kb = [ + storage.get_file_size_kb(doc.object_store_url) for doc in flat_docs + ] + + logging.info( + f"[VectorStore Update] Uploading {len(flat_docs)} documents to vector store {vector_store.id}" + ) + list(vector_store_crud.update(vector_store.id, storage, docs)) + logging.info(f"[VectorStore Upload] Upload completed") + + assistant_options = dict(request.extract_super_type(AssistantOptions)) + logging.info( + f"[Assistant Create] Creating assistant with options: {assistant_options}" + ) + assistant = assistant_crud.create(vector_store.id, **assistant_options) + logging.info(f"[Assistant Create] Assistant created: {assistant.id}") + + collection = collection_crud.read_one(UUID(payload.key)) + collection.llm_service_id = assistant.id + collection.llm_service_name = request.model + collection.status = CollectionStatus.successful + collection.updated_at = now() + + if flat_docs: + logging.info( + f"[DocumentCollection] Linking {len(flat_docs)} documents to collection {collection.id}" + ) + DocumentCollectionCrud(session).create(collection, flat_docs) + + collection_crud._update(collection) - # - # Send back successful response - # + elapsed = time.time() - start_time + logging.info( + f"Collection created: {collection.id} | Time: {elapsed:.2f}s | " + f"Files: {len(flat_docs)} | Sizes: {file_sizes_kb} KB | Types: {list(file_exts)}" + ) + callback.success(collection.model_dump(mode="json")) - callback.success(collection.model_dump(mode="json")) + except Exception as err: + logging.error(f"[Collection Creation Failed] {err} ({type(err).__name__})") + if "assistant" in locals(): + _backout(assistant_crud, assistant.id) + try: + collection = collection_crud.read_one(UUID(payload.key)) + collection.status = CollectionStatus.failed + collection.updated_at = now() + collection_crud._update(collection) + except Exception as suberr: + logging.warning(f"[Collection Status Update Failed] {suberr}") + callback.fail(str(err)) @router.post( @@ -236,7 +254,7 @@ def do_create_collection( ) def create_collection( session: SessionDep, - current_user: CurrentUser, + current_user: CurrentUserOrgProject, request: CreationRequest, background_tasks: BackgroundTasks, ): @@ -244,6 +262,18 @@ def create_collection( route = router.url_path_for(this.f_code.co_name) payload = ResponsePayload("processing", route) + collection = Collection( + id=UUID(payload.key), + owner_id=current_user.id, + organization_id=current_user.organization_id, + project_id=current_user.project_id, + status=CollectionStatus.processing, + ) + + collection_crud = CollectionCrud(session, current_user.id) + collection_crud.create(collection) + + # 2. Launch background task background_tasks.add_task( do_create_collection, session, diff --git a/backend/app/core/cloud/storage.py b/backend/app/core/cloud/storage.py index 0e5c2065d..0e8eaba46 100644 --- a/backend/app/core/cloud/storage.py +++ b/backend/app/core/cloud/storage.py @@ -1,6 +1,5 @@ import os -# import logging import functools as ft from pathlib import Path from dataclasses import dataclass, asdict @@ -125,6 +124,13 @@ def stream(self, url: str) -> StreamingBody: except ClientError as err: raise CloudStorageError(f'AWS Error: "{err}" ({url})') from err + def get_file_size_kb(self, url: str) -> float: + name = SimpleStorageName.from_url(url) + kwargs = asdict(name) + response = self.aws.client.head_object(**kwargs) + size_bytes = response["ContentLength"] + return round(size_bytes / 1024, 2) + def delete(self, url: str) -> None: name = SimpleStorageName.from_url(url) kwargs = asdict(name) diff --git a/backend/app/crud/collection.py b/backend/app/crud/collection.py index 7b91f4fbd..fe3e1b032 100644 --- a/backend/app/crud/collection.py +++ b/backend/app/crud/collection.py @@ -1,11 +1,12 @@ import functools as ft from uuid import UUID from typing import Optional - +import logging from sqlmodel import Session, func, select, and_ from app.models import Document, Collection, DocumentCollection from app.core.util import now +from app.models.collection import CollectionStatus from .document_collection import DocumentCollectionCrud @@ -43,13 +44,24 @@ def _exists(self, collection: Collection): return bool(present) - def create(self, collection: Collection, documents: list[Document]): - if self._exists(collection): - raise FileExistsError("Collection already present") - - collection = self._update(collection) - dc_crud = DocumentCollectionCrud(self.session) - dc_crud.create(collection, documents) + def create( + self, + collection: Collection, + documents: Optional[list[Document]] = None, + ): + try: + existing = self.read_one(collection.id) + if existing.status == CollectionStatus.failed: + self._update(collection) + else: + raise FileExistsError("Collection already present") + except: + self.session.add(collection) + self.session.commit() + + if documents: + dc_crud = DocumentCollectionCrud(self.session) + dc_crud.create(collection, documents) return collection diff --git a/backend/app/models/collection.py b/backend/app/models/collection.py index 2d7ea2a5e..27bc66e40 100644 --- a/backend/app/models/collection.py +++ b/backend/app/models/collection.py @@ -1,27 +1,53 @@ from uuid import UUID, uuid4 from datetime import datetime +from typing import Optional from sqlmodel import Field, Relationship, SQLModel from app.core.util import now from .user import User +from .organization import Organization +from .project import Project +import enum +from enum import Enum + + +class CollectionStatus(str, enum.Enum): + processing = "processing" + successful = "successful" + failed = "failed" class Collection(SQLModel, table=True): - id: UUID = Field( - default_factory=uuid4, - primary_key=True, - ) + id: UUID = Field(default_factory=uuid4, primary_key=True) + owner_id: int = Field( foreign_key="user.id", nullable=False, ondelete="CASCADE", ) - llm_service_id: str - llm_service_name: str - created_at: datetime = Field( - default_factory=now, + + organization_id: int = Field( + foreign_key="organization.id", + nullable=False, + ondelete="CASCADE", ) - deleted_at: datetime | None + + project_id: int = Field( + foreign_key="project.id", + nullable=True, + ondelete="CASCADE", + ) + + llm_service_id: Optional[str] = Field(default=None, nullable=True) + llm_service_name: Optional[str] = Field(default=None, nullable=True) + + status: CollectionStatus = Field(default=CollectionStatus.processing) + + created_at: datetime = Field(default_factory=now) + updated_at: datetime = Field(default_factory=now) + deleted_at: Optional[datetime] = None owner: User = Relationship(back_populates="collections") + organization: Organization = Relationship(back_populates="collections") + project: Project = Relationship(back_populates="collections") diff --git a/backend/app/models/organization.py b/backend/app/models/organization.py index 7e3e2cd8b..fc52bf837 100644 --- a/backend/app/models/organization.py +++ b/backend/app/models/organization.py @@ -10,6 +10,7 @@ from .project import Project from .api_key import APIKey from .assistants import Assistant + from .collection import Collection # Shared properties for an Organization @@ -48,6 +49,9 @@ class Organization(OrganizationBase, table=True): assistants: list["Assistant"] = Relationship( back_populates="organization", sa_relationship_kwargs={"cascade": "all, delete"} ) + collections: list["Collection"] = Relationship( + back_populates="organization", sa_relationship_kwargs={"cascade": "all, delete"} + ) # Properties to return via API diff --git a/backend/app/models/project.py b/backend/app/models/project.py index 8a56ec81c..df63f0d41 100644 --- a/backend/app/models/project.py +++ b/backend/app/models/project.py @@ -44,6 +44,9 @@ class Project(ProjectBase, table=True): back_populates="project", sa_relationship_kwargs={"cascade": "all, delete"} ) organization: Optional["Organization"] = Relationship(back_populates="project") + collections: list["Collection"] = Relationship( + back_populates="project", sa_relationship_kwargs={"cascade": "all, delete"} + ) # Properties to return via API diff --git a/backend/app/tests/api/routes/collections/test_collection_info.py b/backend/app/tests/api/routes/collections/test_collection_info.py new file mode 100644 index 000000000..26e7ef913 --- /dev/null +++ b/backend/app/tests/api/routes/collections/test_collection_info.py @@ -0,0 +1,108 @@ +import pytest +from uuid import uuid4 +from datetime import datetime, timezone +from fastapi.testclient import TestClient +from sqlmodel import Session +from app.core.config import settings +from app.models import Collection +from app.crud.collection import CollectionCrud +from app.main import app +from app.tests.utils.utils import get_user_from_api_key +from app.seed_data.seed_data import seed_database +from app.models.collection import CollectionStatus + +client = TestClient(app) + + +@pytest.fixture(scope="function", autouse=True) +def load_seed_data(db): + """Load seed data before each test.""" + seed_database(db) + yield + + +original_api_key = "ApiKey No3x47A5qoIGhm0kVKjQ77dhCqEdWRIQZlEPzzzh7i8" + + +def create_collection( + db, + user, + status: CollectionStatus = CollectionStatus.processing, + with_llm: bool = False, +): + now = datetime.now(timezone.utc) + collection = Collection( + id=uuid4(), + owner_id=user.user_id, + organization_id=user.organization_id, + project_id=user.project_id, + status=status, + updated_at=now, + ) + if with_llm: + collection.llm_service_id = f"asst_{uuid4()}" + collection.llm_service_name = "gpt-4o" + + db.add(collection) + db.commit() + db.refresh(collection) + return collection + + +def test_collection_info_processing(db: Session): + headers = {"X-API-KEY": original_api_key} + user = get_user_from_api_key(db, headers) + collection = create_collection(db, user, status=CollectionStatus.processing) + + response = client.post( + f"{settings.API_V1_STR}/collections/info/{collection.id}", + headers=headers, + ) + + assert response.status_code == 200 + data = response.json()["data"] + + assert data["id"] == str(collection.id) + assert data["status"] == CollectionStatus.processing.value + assert data["llm_service_id"] is None + assert data["llm_service_name"] is None + + +def test_collection_info_successful(db: Session): + headers = {"X-API-KEY": original_api_key} + user = get_user_from_api_key(db, headers) + collection = create_collection( + db, user, status=CollectionStatus.successful, with_llm=True + ) + + response = client.post( + f"{settings.API_V1_STR}/collections/info/{collection.id}", + headers=headers, + ) + + assert response.status_code == 200 + data = response.json()["data"] + + assert data["id"] == str(collection.id) + assert data["status"] == CollectionStatus.successful.value + assert data["llm_service_id"] == collection.llm_service_id + assert data["llm_service_name"] == "gpt-4o" + + +def test_collection_info_failed(db: Session): + headers = {"X-API-KEY": original_api_key} + user = get_user_from_api_key(db, headers) + collection = create_collection(db, user, status=CollectionStatus.failed) + + response = client.post( + f"{settings.API_V1_STR}/collections/info/{collection.id}", + headers=headers, + ) + + assert response.status_code == 200 + data = response.json()["data"] + + assert data["id"] == str(collection.id) + assert data["status"] == CollectionStatus.failed.value + assert data["llm_service_id"] is None + assert data["llm_service_name"] is None diff --git a/backend/app/tests/api/routes/collections/test_create_collections.py b/backend/app/tests/api/routes/collections/test_create_collections.py new file mode 100644 index 000000000..6fb0855c2 --- /dev/null +++ b/backend/app/tests/api/routes/collections/test_create_collections.py @@ -0,0 +1,107 @@ +import pytest +from uuid import UUID +import io + +import openai_responses +from sqlmodel import Session, select +from fastapi.testclient import TestClient +from openai import OpenAIError + +from app.core.config import settings +from app.tests.utils.document import DocumentStore +from app.tests.utils.utils import openai_credentials, get_user_from_api_key +from app.main import app +from app.crud.collection import CollectionCrud +from app.api.routes.collections import CreationRequest, ResponsePayload +from app.seed_data.seed_data import seed_database +from app.models.collection import CollectionStatus + +client = TestClient(app) + + +@pytest.fixture(scope="function", autouse=True) +def load_seed_data(db): + """Load seed data before each test.""" + seed_database(db) + yield + + +@pytest.fixture(autouse=True) +def mock_s3(monkeypatch): + class FakeStorage: + def __init__(self, *args, **kwargs): + pass + + def upload(self, file_obj, path: str, **kwargs): + return f"s3://fake-bucket/{path or 'mock-file.txt'}" + + def stream(self, file_obj): + fake_file = io.BytesIO(b"dummy content") + fake_file.name = "fake.txt" + return fake_file + + def get_file_size_kb(self, url: str) -> float: + return 1.0 # Simulate 1KB files + + class FakeS3Client: + def head_object(self, Bucket, Key): + return {"ContentLength": 1024} + + monkeypatch.setattr("app.api.routes.collections.AmazonCloudStorage", FakeStorage) + monkeypatch.setattr("boto3.client", lambda service: FakeS3Client()) + + +@pytest.mark.usefixtures("openai_credentials") +class TestCollectionRouteCreate: + _n_documents = 5 + + @openai_responses.mock() + def test_create_collection_success( + self, + client: TestClient, + db: Session, + ): + store = DocumentStore(db) + documents = store.fill(self._n_documents) + doc_ids = [str(doc.id) for doc in documents] + + body = { + "documents": doc_ids, + "batch_size": 2, + "model": "gpt-4o", + "instructions": "Test collection assistant.", + "temperature": 0.1, + } + original_api_key = "ApiKey No3x47A5qoIGhm0kVKjQ77dhCqEdWRIQZlEPzzzh7i8" + + headers = {"X-API-KEY": original_api_key} + + response = client.post( + f"{settings.API_V1_STR}/collections/create", + json=body, + headers=headers, + ) + + assert response.status_code == 200 + json = response.json() + assert json["success"] is True + metadata = json.get("metadata", {}) + assert metadata["status"] == CollectionStatus.processing.value + assert UUID(metadata["key"]) + + collection_id = UUID(metadata["key"]) + + user = get_user_from_api_key(db, headers) + collection = CollectionCrud(db, user.user_id).read_one(collection_id) + + info_response = client.post( + f"{settings.API_V1_STR}/collections/info/{collection_id}", + headers=headers, + ) + assert info_response.status_code == 200 + info_data = info_response.json()["data"] + + assert collection.status == CollectionStatus.successful.value + assert collection.owner_id == user.user_id + assert collection.llm_service_id is not None + assert collection.llm_service_name == "gpt-4o" diff --git a/backend/app/tests/conftest.py b/backend/app/tests/conftest.py index a68c3eca0..e2a6464d7 100644 --- a/backend/app/tests/conftest.py +++ b/backend/app/tests/conftest.py @@ -16,6 +16,7 @@ User, OpenAI_Thread, Credential, + Collection, ) from app.tests.utils.user import authentication_token_from_email from app.tests.utils.utils import get_superuser_token_headers @@ -35,6 +36,7 @@ def db() -> Generator[Session, None, None]: session.execute(delete(APIKey)) session.execute(delete(User)) session.execute(delete(OpenAI_Thread)) + session.execute(delete(Collection)) session.commit() diff --git a/backend/app/tests/utils/collection.py b/backend/app/tests/utils/collection.py index c4b4804dc..ded5c728d 100644 --- a/backend/app/tests/utils/collection.py +++ b/backend/app/tests/utils/collection.py @@ -1,11 +1,13 @@ from uuid import UUID +from uuid import uuid4 from openai import OpenAI from sqlmodel import Session from app.core.config import settings -from app.models import Collection +from app.models import Collection, Organization, Project from app.tests.utils.utils import get_user_id_by_email +from app.crud import create_api_key class constants: @@ -21,27 +23,36 @@ def uuid_increment(value: UUID): def get_collection(db: Session, client=None): owner_id = get_user_id_by_email(db) + # Step 1: Create real organization and project entries + organization = Organization(name=f"Test Org {uuid4()}") + db.add(organization) + db.commit() + db.refresh(organization) + + project = Project(name="Test Project {uuid4()}", organization_id=organization.id) + db.add(project) + db.commit() + db.refresh(project) + + # Step 2: Create API key for user with valid foreign keys + create_api_key( + db, organization_id=organization.id, user_id=owner_id, project_id=project.id + ) + if client is None: client = OpenAI(api_key=settings.OPENAI_API_KEY) + vector_store = client.vector_stores.create() assistant = client.beta.assistants.create( model=constants.openai_model, - tools=[ - { - "type": "file_search", - }, - ], - tool_resources={ - "file_search": { - "vector_store_ids": [ - vector_store.id, - ], - }, - }, + tools=[{"type": "file_search"}], + tool_resources={"file_search": {"vector_store_ids": [vector_store.id]}}, ) return Collection( owner_id=owner_id, + organization_id=organization.id, + project_id=project.id, llm_service_id=assistant.id, llm_service_name=constants.llm_service_name, ) diff --git a/backend/app/tests/utils/utils.py b/backend/app/tests/utils/utils.py index 2945e8b35..9cba7aaee 100644 --- a/backend/app/tests/utils/utils.py +++ b/backend/app/tests/utils/utils.py @@ -9,6 +9,9 @@ from app.core.config import settings from app.crud.user import get_user_by_email +from app.models import APIKeyPublic +from app.crud import create_api_key, get_api_key_by_value +from uuid import uuid4 T = TypeVar("T") @@ -39,16 +42,20 @@ def get_superuser_token_headers(client: TestClient) -> dict[str, str]: return headers -def get_user_id_by_email(db: Session): +def get_user_id_by_email(db: Session) -> int: user = get_user_by_email(session=db, email=settings.FIRST_SUPERUSER) return user.id +def get_user_from_api_key(db: Session, api_key_headers: dict[str, str]) -> APIKeyPublic: + key_value = api_key_headers["X-API-KEY"] + api_key = get_api_key_by_value(db, api_key_value=key_value) + if api_key is None: + raise ValueError("Invalid API Key") + return api_key + + def get_non_existent_id(session: Session, model: Type[T]) -> int: - """ - Returns an ID that does not exist for the given model. - It fetches the current max ID and adds 1. - """ result = session.exec(select(model.id).order_by(model.id.desc())).first() return (result or 0) + 1 @@ -60,10 +67,10 @@ def __init__(self, start=0): def __iter__(self): return self - def __next__(self): - uu_id = self.peek() + def __next__(self) -> UUID: + uu_id = UUID(int=self.start) self.start += 1 return uu_id - def peek(self): + def peek(self) -> UUID: return UUID(int=self.start)