Initial set of backend API for event webhook notifications for the following events: * Crawl started (including boolean indicating if crawl was scheduled) * Crawl finished * Upload finished * Archived item added to collection * Archived item removed from collection Configuration of URLs is done via /api/orgs/<oid>/event-webhook-urls. If a URL is configured for a given event, a webhook notification is added to the database and then attempted to be sent (up to a total of 5 tries per overall attempt, with an increasing backoff between, implemented via use of the backoff library, which supports async). webhook status available via /api/orgs/<oid>/webhooks (Additional testing + potential fastapi integration left in separate follow-ups Fixes #1041
87 lines
2.1 KiB
Python
87 lines
2.1 KiB
Python
""" entrypoint module for operator """
|
|
|
|
import os
|
|
import sys
|
|
|
|
from fastapi import FastAPI
|
|
|
|
from .crawlmanager import CrawlManager
|
|
from .db import init_db
|
|
from .emailsender import EmailSender
|
|
from .operator import init_operator_api
|
|
from .utils import register_exit_handler
|
|
|
|
from .invites import InviteOps
|
|
from .users import init_user_manager
|
|
from .orgs import OrgOps
|
|
from .colls import CollectionOps
|
|
from .crawlconfigs import CrawlConfigOps
|
|
from .crawls import CrawlOps
|
|
from .profiles import ProfileOps
|
|
from .webhooks import EventWebhookOps
|
|
|
|
app_root = FastAPI()
|
|
|
|
|
|
# ============================================================================
|
|
# pylint: disable=too-many-function-args, duplicate-code
|
|
def main():
|
|
"""main init"""
|
|
email = EmailSender()
|
|
crawl_manager = None
|
|
|
|
dbclient, mdb = init_db()
|
|
|
|
invite_ops = InviteOps(mdb, email)
|
|
|
|
user_manager = init_user_manager(mdb, email, invite_ops)
|
|
|
|
org_ops = OrgOps(mdb, invite_ops)
|
|
|
|
event_webhook_ops = EventWebhookOps(mdb, org_ops)
|
|
|
|
user_manager.set_org_ops(org_ops)
|
|
|
|
# pylint: disable=import-outside-toplevel
|
|
if not os.environ.get("KUBERNETES_SERVICE_HOST"):
|
|
print(
|
|
"Sorry, the Browsertrix Cloud Backend must be run inside a Kubernetes environment.\
|
|
Kubernetes not detected (KUBERNETES_SERVICE_HOST is not set), Exiting"
|
|
)
|
|
sys.exit(1)
|
|
|
|
crawl_manager = CrawlManager()
|
|
|
|
profile_ops = ProfileOps(mdb, crawl_manager)
|
|
|
|
crawl_config_ops = CrawlConfigOps(
|
|
dbclient,
|
|
mdb,
|
|
user_manager,
|
|
org_ops,
|
|
crawl_manager,
|
|
profile_ops,
|
|
)
|
|
|
|
coll_ops = CollectionOps(mdb, crawl_manager, org_ops, event_webhook_ops)
|
|
|
|
CrawlOps(
|
|
mdb,
|
|
user_manager,
|
|
crawl_manager,
|
|
crawl_config_ops,
|
|
org_ops,
|
|
coll_ops,
|
|
event_webhook_ops,
|
|
)
|
|
|
|
init_operator_api(app_root, mdb, event_webhook_ops)
|
|
|
|
|
|
# ============================================================================
|
|
@app_root.on_event("startup")
|
|
async def startup():
|
|
"""init on startup"""
|
|
register_exit_handler()
|
|
main()
|