Skip to content

Commit bc6515c

Browse files
committed
Add org check
1 parent f81efeb commit bc6515c

File tree

2 files changed

+9
-8
lines changed

2 files changed

+9
-8
lines changed

backend/btrixcloud/basecrawls.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -602,10 +602,12 @@ async def bulk_presigned_files(
602602

603603
return resources, pages_optimized
604604

605-
async def validate_all_crawls_successful(self, crawl_ids: List[str]):
605+
async def validate_all_crawls_successful(
606+
self, crawl_ids: List[str], org: Organization
607+
):
606608
"""Validate that crawls in list exist and are successful or else raise exception"""
607609
for crawl_id in crawl_ids:
608-
crawl = await self.get_base_crawl(crawl_id)
610+
crawl = await self.get_base_crawl(crawl_id, org)
609611
if crawl.state in FAILED_STATES:
610612
raise HTTPException(status_code=400, detail="invalid_failed_crawl")
611613

backend/btrixcloud/colls.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -121,10 +121,10 @@ async def init_index(self):
121121
[("oid", pymongo.ASCENDING), ("description", pymongo.ASCENDING)]
122122
)
123123

124-
async def add_collection(self, oid: UUID, coll_in: CollIn):
124+
async def add_collection(self, org: Organization, coll_in: CollIn):
125125
"""Add new collection"""
126126
crawl_ids = coll_in.crawlIds if coll_in.crawlIds else []
127-
await self.crawl_ops.validate_all_crawls_successful(crawl_ids)
127+
await self.crawl_ops.validate_all_crawls_successful(crawl_ids, org)
128128

129129
coll_id = uuid4()
130130
created = dt_now()
@@ -133,7 +133,7 @@ async def add_collection(self, oid: UUID, coll_in: CollIn):
133133

134134
coll = Collection(
135135
id=coll_id,
136-
oid=oid,
136+
oid=org.id,
137137
name=coll_in.name,
138138
slug=slug,
139139
description=coll_in.description,
@@ -146,7 +146,6 @@ async def add_collection(self, oid: UUID, coll_in: CollIn):
146146
)
147147
try:
148148
await self.collections.insert_one(coll.to_dict())
149-
org = await self.orgs.get_org_by_id(oid)
150149
await self.clear_org_previous_slugs_matching_slug(slug, org)
151150

152151
if crawl_ids:
@@ -231,7 +230,7 @@ async def add_crawls_to_collection(
231230
headers: Optional[dict] = None,
232231
) -> CollOut:
233232
"""Add crawls to collection"""
234-
await self.crawl_ops.validate_all_crawls_successful(crawl_ids)
233+
await self.crawl_ops.validate_all_crawls_successful(crawl_ids, org)
235234

236235
modified = dt_now()
237236
result = await self.collections.find_one_and_update(
@@ -1023,7 +1022,7 @@ def init_collections_api(
10231022
async def add_collection(
10241023
new_coll: CollIn, org: Organization = Depends(org_crawl_dep)
10251024
):
1026-
return await colls.add_collection(org.id, new_coll)
1025+
return await colls.add_collection(org, new_coll)
10271026

10281027
@app.get(
10291028
"/orgs/{oid}/collections",

0 commit comments

Comments
 (0)