Skip to content

Add lastCrawlExecSeconds to workflow #2612

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 12 commits into
base: main
Choose a base branch
from
Draft
16 changes: 15 additions & 1 deletion backend/btrixcloud/crawlconfigs.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import re
import os
import traceback
from datetime import datetime
from datetime import datetime, timedelta
from uuid import UUID, uuid4
import urllib.parse

Expand Down Expand Up @@ -95,6 +95,8 @@ class CrawlConfigOps:
crawler_images_map: dict[str, str]
crawler_image_pull_policy_map: dict[str, str]

paused_expiry_delta: timedelta

def __init__(
self,
dbclient,
Expand All @@ -121,6 +123,10 @@ def __init__(
"DEFAULT_CRAWLER_IMAGE_PULL_POLICY", "IfNotPresent"
)

self.paused_expiry_delta = timedelta(
minutes=int(os.environ.get("PAUSED_CRAWL_LIMIT_MINUTES", "10080"))
)

self.router = APIRouter(
prefix="/crawlconfigs",
tags=["crawlconfigs"],
Expand Down Expand Up @@ -765,6 +771,14 @@ async def _add_running_curr_crawl_stats(self, crawlconfig: CrawlConfigOut):
crawlconfig.lastCrawlState = crawl.state
crawlconfig.lastCrawlSize = crawl.stats.size if crawl.stats else 0
crawlconfig.lastCrawlStopping = crawl.stopping
crawlconfig.lastCrawlPausing = crawl.pausing
crawlconfig.lastCrawlPausedAt = crawl.pausedAt
crawlconfig.lastCrawlPausedExpiry = None
crawlconfig.lastCrawlExecSeconds = crawl.crawlExecSeconds
if crawl.pausedAt:
crawlconfig.lastCrawlPausedExpiry = (
crawl.pausedAt + self.paused_expiry_delta
)
crawlconfig.isCrawlRunning = True

async def get_crawl_config_out(self, cid: UUID, org: Organization):
Expand Down
10 changes: 9 additions & 1 deletion backend/btrixcloud/crawlmanager.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import secrets

from typing import Optional, Dict, Tuple
from datetime import timedelta
from datetime import datetime, timedelta

from fastapi import HTTPException

Expand Down Expand Up @@ -386,6 +386,14 @@ async def shutdown_crawl(self, crawl_id: str, graceful=True) -> dict:

return await self.delete_crawl_job(crawl_id)

async def pause_resume_crawl(
self, crawl_id: str, paused_at: Optional[datetime] = None
) -> dict:
"""pause or resume a crawl"""
return await self._patch_job(
crawl_id, {"pausedAt": date_to_str(paused_at) if paused_at else ""}
)

async def delete_crawl_configs_for_org(self, org: str) -> None:
"""Delete all crawl configs for given org"""
await self._delete_crawl_configs(f"btrix.org={org}")
Expand Down
49 changes: 49 additions & 0 deletions backend/btrixcloud/crawls.py
Original file line number Diff line number Diff line change
Expand Up @@ -769,6 +769,39 @@ async def get_crawl_stats(

return crawls_data

async def pause_crawl(
self, crawl_id: str, org: Organization, pause: bool
) -> Dict[str, bool]:
"""pause or resume a crawl temporarily"""
crawl = await self.get_base_crawl(crawl_id, org)
if crawl and crawl.type != "crawl":
raise HTTPException(status_code=400, detail="not_a_crawl")

result = None

if pause:
paused_at = dt_now()
else:
paused_at = None

try:
result = await self.crawl_manager.pause_resume_crawl(
crawl_id, paused_at=paused_at
)

if result.get("success"):
await self.crawls.find_one_and_update(
{"_id": crawl_id, "type": "crawl", "oid": org.id},
{"$set": {"pausing": pause, "pausedAt": paused_at}},
)

return {"success": True}
# pylint: disable=bare-except
except:
pass

raise HTTPException(status_code=404, detail="crawl_not_found")

async def shutdown_crawl(
self, crawl_id: str, org: Organization, graceful: bool
) -> Dict[str, bool]:
Expand Down Expand Up @@ -1242,6 +1275,22 @@ async def crawl_cancel_immediately(
async def crawl_graceful_stop(crawl_id, org: Organization = Depends(org_crawl_dep)):
return await ops.shutdown_crawl(crawl_id, org, graceful=True)

@app.post(
"/orgs/{oid}/crawls/{crawl_id}/pause",
tags=["crawls"],
response_model=SuccessResponse,
)
async def pause_crawl(crawl_id, org: Organization = Depends(org_crawl_dep)):
return await ops.pause_crawl(crawl_id, org, pause=True)

@app.post(
"/orgs/{oid}/crawls/{crawl_id}/resume",
tags=["crawls"],
response_model=SuccessResponse,
)
async def resume_crawl(crawl_id, org: Organization = Depends(org_crawl_dep)):
return await ops.pause_crawl(crawl_id, org, pause=False)

@app.post(
"/orgs/{oid}/crawls/delete",
tags=["crawls"],
Expand Down
3 changes: 3 additions & 0 deletions backend/btrixcloud/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,8 @@ class SettingsResponse(BaseModel):

localesEnabled: Optional[List[str]]

pausedExpiryMinutes: int


# ============================================================================
# pylint: disable=too-many-locals, duplicate-code
Expand Down Expand Up @@ -158,6 +160,7 @@ def main() -> None:
if os.environ.get("LOCALES_ENABLED")
else None
),
pausedExpiryMinutes=int(os.environ.get("PAUSED_CRAWL_LIMIT_MINUTES", 10080)),
)

invites = init_invites(mdb, email)
Expand Down
12 changes: 11 additions & 1 deletion backend/btrixcloud/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,9 @@ class UserOrgInfoOut(BaseModel):
]
RUNNING_STATES = get_args(TYPE_RUNNING_STATES)

TYPE_WAITING_STATES = Literal["starting", "waiting_capacity", "waiting_org_limit"]
TYPE_WAITING_STATES = Literal[
"starting", "waiting_capacity", "waiting_org_limit", "paused"
]
WAITING_STATES = get_args(TYPE_WAITING_STATES)

TYPE_FAILED_STATES = Literal[
Expand All @@ -236,6 +238,7 @@ class UserOrgInfoOut(BaseModel):
TYPE_SUCCESSFUL_STATES = Literal[
"complete",
"stopped_by_user",
"stopped_pause_expired",
"stopped_storage_quota_reached",
"stopped_time_quota_reached",
"stopped_org_readonly",
Expand Down Expand Up @@ -478,6 +481,10 @@ class CrawlConfigOut(CrawlConfigCore, CrawlConfigAdditional):
id: UUID

lastCrawlStopping: Optional[bool] = False
lastCrawlPausing: Optional[bool] = False
lastCrawlPausedAt: Optional[datetime] = None
lastCrawlPausedExpiry: Optional[datetime] = None
lastCrawlExecSeconds: Optional[int] = None
profileName: Optional[str] = None
firstSeed: Optional[str] = None
seedCount: int = 0
Expand Down Expand Up @@ -863,6 +870,8 @@ class CrawlOut(BaseMongoModel):
seedCount: Optional[int] = None
profileName: Optional[str] = None
stopping: Optional[bool] = False
pausing: Optional[bool] = False
pausedAt: Optional[datetime] = None
manual: bool = False
cid_rev: Optional[int] = None
scale: Scale = 1
Expand Down Expand Up @@ -1017,6 +1026,7 @@ class Crawl(BaseCrawl, CrawlConfigCore):
manual: bool = False

stopping: Optional[bool] = False
pausing: Optional[bool] = False

qaCrawlExecSeconds: int = 0

Expand Down
Loading