Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions backend/btrixcloud/basecrawls.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
User,
StorageRef,
RUNNING_AND_WAITING_STATES,
SUCCESSFUL_STATES,
SUCCESSFUL_AND_PAUSED_STATES,
QARun,
UpdatedResponse,
DeletedResponseQuota,
Expand Down Expand Up @@ -460,7 +460,7 @@ async def _resolve_crawl_refs(

if (
files
and crawl.state in SUCCESSFUL_STATES
and crawl.state in SUCCESSFUL_AND_PAUSED_STATES
and isinstance(crawl, CrawlOutWithResources)
):
crawl.resources = await self._files_to_resources(files, org, crawl.id)
Expand Down
15 changes: 14 additions & 1 deletion backend/btrixcloud/crawlconfigs.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import re
import os
import traceback
from datetime import datetime
from datetime import datetime, timedelta
from uuid import UUID, uuid4
import urllib.parse

Expand Down Expand Up @@ -95,6 +95,8 @@ class CrawlConfigOps:
crawler_images_map: dict[str, str]
crawler_image_pull_policy_map: dict[str, str]

paused_expiry_delta: timedelta

def __init__(
self,
dbclient,
Expand All @@ -121,6 +123,10 @@ def __init__(
"DEFAULT_CRAWLER_IMAGE_PULL_POLICY", "IfNotPresent"
)

self.paused_expiry_delta = timedelta(
minutes=int(os.environ.get("PAUSED_CRAWL_LIMIT_MINUTES", "10080"))
)

self.router = APIRouter(
prefix="/crawlconfigs",
tags=["crawlconfigs"],
Expand Down Expand Up @@ -765,6 +771,13 @@ async def _add_running_curr_crawl_stats(self, crawlconfig: CrawlConfigOut):
crawlconfig.lastCrawlState = crawl.state
crawlconfig.lastCrawlSize = crawl.stats.size if crawl.stats else 0
crawlconfig.lastCrawlStopping = crawl.stopping
crawlconfig.lastCrawlShouldPause = crawl.shouldPause
crawlconfig.lastCrawlPausedAt = crawl.pausedAt
crawlconfig.lastCrawlPausedExpiry = None
if crawl.pausedAt:
crawlconfig.lastCrawlPausedExpiry = (
crawl.pausedAt + self.paused_expiry_delta
)
crawlconfig.isCrawlRunning = True

async def get_crawl_config_out(self, cid: UUID, org: Organization):
Expand Down
10 changes: 9 additions & 1 deletion backend/btrixcloud/crawlmanager.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import secrets

from typing import Optional, Dict, Tuple
from datetime import timedelta
from datetime import datetime, timedelta

from fastapi import HTTPException

Expand Down Expand Up @@ -386,6 +386,14 @@ async def shutdown_crawl(self, crawl_id: str, graceful=True) -> dict:

return await self.delete_crawl_job(crawl_id)

async def pause_resume_crawl(
self, crawl_id: str, paused_at: Optional[datetime] = None
) -> dict:
"""pause or resume a crawl"""
return await self._patch_job(
crawl_id, {"pausedAt": date_to_str(paused_at) if paused_at else ""}
)

async def delete_crawl_configs_for_org(self, org: str) -> None:
"""Delete all crawl configs for given org"""
await self._delete_crawl_configs(f"btrix.org={org}")
Expand Down
49 changes: 49 additions & 0 deletions backend/btrixcloud/crawls.py
Original file line number Diff line number Diff line change
Expand Up @@ -769,6 +769,39 @@ async def get_crawl_stats(

return crawls_data

async def pause_crawl(
self, crawl_id: str, org: Organization, pause: bool
) -> Dict[str, bool]:
"""pause or resume a crawl temporarily"""
crawl = await self.get_base_crawl(crawl_id, org)
if crawl and crawl.type != "crawl":
raise HTTPException(status_code=400, detail="not_a_crawl")

result = None

if pause:
paused_at = dt_now()
else:
paused_at = None

try:
result = await self.crawl_manager.pause_resume_crawl(
crawl_id, paused_at=paused_at
)

if result.get("success"):
await self.crawls.find_one_and_update(
{"_id": crawl_id, "type": "crawl", "oid": org.id},
{"$set": {"shouldPause": pause, "pausedAt": paused_at}},
)

return {"success": True}
# pylint: disable=bare-except
except:
pass

raise HTTPException(status_code=404, detail="crawl_not_found")

async def shutdown_crawl(
self, crawl_id: str, org: Organization, graceful: bool
) -> Dict[str, bool]:
Expand Down Expand Up @@ -1242,6 +1275,22 @@ async def crawl_cancel_immediately(
async def crawl_graceful_stop(crawl_id, org: Organization = Depends(org_crawl_dep)):
return await ops.shutdown_crawl(crawl_id, org, graceful=True)

@app.post(
"/orgs/{oid}/crawls/{crawl_id}/pause",
tags=["crawls"],
response_model=SuccessResponse,
)
async def pause_crawl(crawl_id, org: Organization = Depends(org_crawl_dep)):
return await ops.pause_crawl(crawl_id, org, pause=True)

@app.post(
"/orgs/{oid}/crawls/{crawl_id}/resume",
tags=["crawls"],
response_model=SuccessResponse,
)
async def resume_crawl(crawl_id, org: Organization = Depends(org_crawl_dep)):
return await ops.pause_crawl(crawl_id, org, pause=False)

@app.post(
"/orgs/{oid}/crawls/delete",
tags=["crawls"],
Expand Down
3 changes: 3 additions & 0 deletions backend/btrixcloud/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,8 @@ class SettingsResponse(BaseModel):

localesEnabled: Optional[List[str]]

pausedExpiryMinutes: int


# ============================================================================
# pylint: disable=too-many-locals, duplicate-code
Expand Down Expand Up @@ -158,6 +160,7 @@ def main() -> None:
if os.environ.get("LOCALES_ENABLED")
else None
),
pausedExpiryMinutes=int(os.environ.get("PAUSED_CRAWL_LIMIT_MINUTES", 10080)),
)

invites = init_invites(mdb, email)
Expand Down
12 changes: 11 additions & 1 deletion backend/btrixcloud/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,9 @@ class UserOrgInfoOut(BaseModel):
]
RUNNING_STATES = get_args(TYPE_RUNNING_STATES)

TYPE_WAITING_STATES = Literal["starting", "waiting_capacity", "waiting_org_limit"]
TYPE_WAITING_STATES = Literal[
"starting", "waiting_capacity", "waiting_org_limit", "paused"
]
WAITING_STATES = get_args(TYPE_WAITING_STATES)

TYPE_FAILED_STATES = Literal[
Expand All @@ -236,11 +238,13 @@ class UserOrgInfoOut(BaseModel):
TYPE_SUCCESSFUL_STATES = Literal[
"complete",
"stopped_by_user",
"stopped_pause_expired",
"stopped_storage_quota_reached",
"stopped_time_quota_reached",
"stopped_org_readonly",
]
SUCCESSFUL_STATES = get_args(TYPE_SUCCESSFUL_STATES)
SUCCESSFUL_AND_PAUSED_STATES = ["paused", *SUCCESSFUL_STATES]

TYPE_RUNNING_AND_WAITING_STATES = Literal[TYPE_WAITING_STATES, TYPE_RUNNING_STATES]
RUNNING_AND_WAITING_STATES = [*WAITING_STATES, *RUNNING_STATES]
Expand Down Expand Up @@ -478,6 +482,9 @@ class CrawlConfigOut(CrawlConfigCore, CrawlConfigAdditional):
id: UUID

lastCrawlStopping: Optional[bool] = False
lastCrawlShouldPause: Optional[bool] = False
lastCrawlPausedAt: Optional[datetime] = None
lastCrawlPausedExpiry: Optional[datetime] = None
profileName: Optional[str] = None
firstSeed: Optional[str] = None
seedCount: int = 0
Expand Down Expand Up @@ -863,6 +870,8 @@ class CrawlOut(BaseMongoModel):
seedCount: Optional[int] = None
profileName: Optional[str] = None
stopping: Optional[bool] = False
shouldPause: Optional[bool] = False
pausedAt: Optional[datetime] = None
manual: bool = False
cid_rev: Optional[int] = None
scale: Scale = 1
Expand Down Expand Up @@ -1017,6 +1026,7 @@ class Crawl(BaseCrawl, CrawlConfigCore):
manual: bool = False

stopping: Optional[bool] = False
shouldPause: Optional[bool] = False

qaCrawlExecSeconds: int = 0

Expand Down
Loading
Loading