Consolidate ops class initialization (#2117)
Fixes #2111 The background job and operator entrypoints now use a shared function that initalizes and returns the ops classes. This is not applied in the main entrypoint as that also initializes the backend API, which we don't want in the other entrypoints.
This commit is contained in:
parent
0dc025e9fd
commit
55a758f342
@ -6,25 +6,9 @@ import sys
|
||||
import traceback
|
||||
from uuid import UUID
|
||||
|
||||
from .crawlmanager import CrawlManager
|
||||
from .db import init_db
|
||||
from .emailsender import EmailSender
|
||||
|
||||
# from .utils import register_exit_handler
|
||||
from .models import BgJobType
|
||||
from .ops import init_ops
|
||||
|
||||
from .basecrawls import BaseCrawlOps
|
||||
from .invites import InviteOps
|
||||
from .users import init_user_manager
|
||||
from .orgs import OrgOps
|
||||
from .colls import CollectionOps
|
||||
from .crawlconfigs import CrawlConfigOps
|
||||
from .crawls import CrawlOps
|
||||
from .profiles import ProfileOps
|
||||
from .storages import StorageOps
|
||||
from .webhooks import EventWebhookOps
|
||||
from .background_jobs import BackgroundJobOps
|
||||
from .pages import PageOps
|
||||
|
||||
job_type = os.environ.get("BG_JOB_TYPE")
|
||||
oid = os.environ.get("OID")
|
||||
@ -33,19 +17,7 @@ oid = os.environ.get("OID")
|
||||
# ============================================================================
|
||||
# pylint: disable=too-many-function-args, duplicate-code, too-many-locals
|
||||
async def main():
|
||||
"""main init"""
|
||||
email = EmailSender()
|
||||
crawl_manager = None
|
||||
|
||||
dbclient, mdb = init_db()
|
||||
|
||||
invite_ops = InviteOps(mdb, email)
|
||||
|
||||
user_manager = init_user_manager(mdb, email, invite_ops)
|
||||
|
||||
org_ops = OrgOps(mdb, invite_ops, user_manager)
|
||||
|
||||
event_webhook_ops = EventWebhookOps(mdb, org_ops)
|
||||
"""run background job with access to ops classes"""
|
||||
|
||||
# pylint: disable=import-outside-toplevel
|
||||
if not os.environ.get("KUBERNETES_SERVICE_HOST"):
|
||||
@ -55,66 +27,7 @@ async def main():
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
crawl_manager = CrawlManager()
|
||||
|
||||
storage_ops = StorageOps(org_ops, crawl_manager)
|
||||
|
||||
background_job_ops = BackgroundJobOps(
|
||||
mdb, email, user_manager, org_ops, crawl_manager, storage_ops
|
||||
)
|
||||
|
||||
profile_ops = ProfileOps(
|
||||
mdb, org_ops, crawl_manager, storage_ops, background_job_ops
|
||||
)
|
||||
|
||||
crawl_config_ops = CrawlConfigOps(
|
||||
dbclient,
|
||||
mdb,
|
||||
user_manager,
|
||||
org_ops,
|
||||
crawl_manager,
|
||||
profile_ops,
|
||||
)
|
||||
|
||||
coll_ops = CollectionOps(mdb, crawl_manager, org_ops, event_webhook_ops)
|
||||
|
||||
base_crawl_ops = BaseCrawlOps(
|
||||
mdb,
|
||||
user_manager,
|
||||
org_ops,
|
||||
crawl_config_ops,
|
||||
coll_ops,
|
||||
storage_ops,
|
||||
event_webhook_ops,
|
||||
background_job_ops,
|
||||
)
|
||||
|
||||
crawl_ops = CrawlOps(
|
||||
crawl_manager,
|
||||
mdb,
|
||||
user_manager,
|
||||
org_ops,
|
||||
crawl_config_ops,
|
||||
coll_ops,
|
||||
storage_ops,
|
||||
event_webhook_ops,
|
||||
background_job_ops,
|
||||
)
|
||||
|
||||
page_ops = PageOps(mdb, crawl_ops, org_ops, storage_ops)
|
||||
|
||||
base_crawl_ops.set_page_ops(page_ops)
|
||||
crawl_ops.set_page_ops(page_ops)
|
||||
|
||||
background_job_ops.set_ops(crawl_ops, profile_ops)
|
||||
|
||||
org_ops.set_ops(base_crawl_ops, profile_ops, coll_ops, background_job_ops)
|
||||
|
||||
user_manager.set_ops(org_ops, crawl_config_ops, base_crawl_ops)
|
||||
|
||||
background_job_ops.set_ops(base_crawl_ops, profile_ops)
|
||||
|
||||
crawl_config_ops.set_coll_ops(coll_ops)
|
||||
(org_ops, _, _, _, _, _, _, _, _, _, user_manager) = init_ops()
|
||||
|
||||
# Run job
|
||||
if job_type == BgJobType.DELETE_ORG:
|
||||
|
@ -5,23 +5,10 @@ import sys
|
||||
|
||||
from fastapi import FastAPI
|
||||
|
||||
from .crawlmanager import CrawlManager
|
||||
from .db import init_db
|
||||
from .emailsender import EmailSender
|
||||
from .operator import init_operator_api
|
||||
from .ops import init_ops
|
||||
from .utils import register_exit_handler
|
||||
|
||||
from .invites import InviteOps
|
||||
from .users import init_user_manager
|
||||
from .orgs import OrgOps
|
||||
from .colls import CollectionOps
|
||||
from .crawlconfigs import CrawlConfigOps
|
||||
from .crawls import CrawlOps
|
||||
from .profiles import ProfileOps
|
||||
from .storages import init_storages_api
|
||||
from .webhooks import EventWebhookOps
|
||||
from .background_jobs import BackgroundJobOps
|
||||
from .pages import PageOps
|
||||
|
||||
app_root = FastAPI()
|
||||
|
||||
@ -29,19 +16,7 @@ app_root = FastAPI()
|
||||
# ============================================================================
|
||||
# pylint: disable=too-many-function-args, duplicate-code
|
||||
def main():
|
||||
"""main init"""
|
||||
email = EmailSender()
|
||||
crawl_manager = None
|
||||
|
||||
dbclient, mdb = init_db()
|
||||
|
||||
invite_ops = InviteOps(mdb, email)
|
||||
|
||||
user_manager = init_user_manager(mdb, email, invite_ops)
|
||||
|
||||
org_ops = OrgOps(mdb, invite_ops, user_manager)
|
||||
|
||||
event_webhook_ops = EventWebhookOps(mdb, org_ops)
|
||||
"""init operator"""
|
||||
|
||||
# pylint: disable=import-outside-toplevel
|
||||
if not os.environ.get("KUBERNETES_SERVICE_HOST"):
|
||||
@ -51,48 +26,19 @@ def main():
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
crawl_manager = CrawlManager()
|
||||
|
||||
storage_ops = init_storages_api(org_ops, crawl_manager)
|
||||
|
||||
background_job_ops = BackgroundJobOps(
|
||||
mdb, email, user_manager, org_ops, crawl_manager, storage_ops
|
||||
)
|
||||
|
||||
profile_ops = ProfileOps(
|
||||
mdb, org_ops, crawl_manager, storage_ops, background_job_ops
|
||||
)
|
||||
|
||||
crawl_config_ops = CrawlConfigOps(
|
||||
dbclient,
|
||||
mdb,
|
||||
user_manager,
|
||||
org_ops,
|
||||
crawl_manager,
|
||||
profile_ops,
|
||||
)
|
||||
|
||||
user_manager.set_ops(org_ops, crawl_config_ops, None)
|
||||
|
||||
coll_ops = CollectionOps(mdb, crawl_manager, org_ops, event_webhook_ops)
|
||||
|
||||
crawl_ops = CrawlOps(
|
||||
crawl_manager,
|
||||
mdb,
|
||||
user_manager,
|
||||
(
|
||||
org_ops,
|
||||
crawl_config_ops,
|
||||
_,
|
||||
crawl_ops,
|
||||
page_ops,
|
||||
coll_ops,
|
||||
_,
|
||||
storage_ops,
|
||||
event_webhook_ops,
|
||||
background_job_ops,
|
||||
)
|
||||
|
||||
page_ops = PageOps(mdb, crawl_ops, org_ops, storage_ops)
|
||||
|
||||
crawl_ops.set_page_ops(page_ops)
|
||||
|
||||
background_job_ops.set_ops(crawl_ops, profile_ops)
|
||||
event_webhook_ops,
|
||||
_,
|
||||
) = init_ops()
|
||||
|
||||
return init_operator_api(
|
||||
app_root,
|
||||
|
124
backend/btrixcloud/ops.py
Normal file
124
backend/btrixcloud/ops.py
Normal file
@ -0,0 +1,124 @@
|
||||
""" shared helper to initialize ops classes """
|
||||
|
||||
from typing import Tuple
|
||||
|
||||
from .crawlmanager import CrawlManager
|
||||
from .db import init_db
|
||||
from .emailsender import EmailSender
|
||||
|
||||
from .background_jobs import BackgroundJobOps
|
||||
from .basecrawls import BaseCrawlOps
|
||||
from .colls import CollectionOps
|
||||
from .crawls import CrawlOps
|
||||
from .crawlconfigs import CrawlConfigOps
|
||||
from .invites import InviteOps
|
||||
from .orgs import OrgOps
|
||||
from .pages import PageOps
|
||||
from .profiles import ProfileOps
|
||||
from .storages import StorageOps
|
||||
from .users import UserManager
|
||||
from .webhooks import EventWebhookOps
|
||||
|
||||
|
||||
# pylint: disable=too-many-locals
|
||||
def init_ops() -> Tuple[
|
||||
OrgOps,
|
||||
CrawlConfigOps,
|
||||
BaseCrawlOps,
|
||||
CrawlOps,
|
||||
PageOps,
|
||||
CollectionOps,
|
||||
ProfileOps,
|
||||
StorageOps,
|
||||
BackgroundJobOps,
|
||||
EventWebhookOps,
|
||||
UserManager,
|
||||
]:
|
||||
"""Initialize and return ops classes"""
|
||||
email = EmailSender()
|
||||
|
||||
dbclient, mdb = init_db()
|
||||
|
||||
invite_ops = InviteOps(mdb, email)
|
||||
|
||||
user_manager = UserManager(mdb, email, invite_ops)
|
||||
|
||||
org_ops = OrgOps(mdb, invite_ops, user_manager)
|
||||
|
||||
event_webhook_ops = EventWebhookOps(mdb, org_ops)
|
||||
|
||||
crawl_manager = CrawlManager()
|
||||
|
||||
storage_ops = StorageOps(org_ops, crawl_manager)
|
||||
|
||||
background_job_ops = BackgroundJobOps(
|
||||
mdb, email, user_manager, org_ops, crawl_manager, storage_ops
|
||||
)
|
||||
|
||||
profile_ops = ProfileOps(
|
||||
mdb, org_ops, crawl_manager, storage_ops, background_job_ops
|
||||
)
|
||||
|
||||
crawl_config_ops = CrawlConfigOps(
|
||||
dbclient,
|
||||
mdb,
|
||||
user_manager,
|
||||
org_ops,
|
||||
crawl_manager,
|
||||
profile_ops,
|
||||
)
|
||||
|
||||
coll_ops = CollectionOps(mdb, crawl_manager, org_ops, event_webhook_ops)
|
||||
|
||||
base_crawl_ops = BaseCrawlOps(
|
||||
mdb,
|
||||
user_manager,
|
||||
org_ops,
|
||||
crawl_config_ops,
|
||||
coll_ops,
|
||||
storage_ops,
|
||||
event_webhook_ops,
|
||||
background_job_ops,
|
||||
)
|
||||
|
||||
crawl_ops = CrawlOps(
|
||||
crawl_manager,
|
||||
mdb,
|
||||
user_manager,
|
||||
org_ops,
|
||||
crawl_config_ops,
|
||||
coll_ops,
|
||||
storage_ops,
|
||||
event_webhook_ops,
|
||||
background_job_ops,
|
||||
)
|
||||
|
||||
page_ops = PageOps(mdb, crawl_ops, org_ops, storage_ops)
|
||||
|
||||
base_crawl_ops.set_page_ops(page_ops)
|
||||
|
||||
crawl_ops.set_page_ops(page_ops)
|
||||
|
||||
background_job_ops.set_ops(crawl_ops, profile_ops)
|
||||
|
||||
org_ops.set_ops(base_crawl_ops, profile_ops, coll_ops, background_job_ops)
|
||||
|
||||
user_manager.set_ops(org_ops, crawl_config_ops, base_crawl_ops)
|
||||
|
||||
background_job_ops.set_ops(base_crawl_ops, profile_ops)
|
||||
|
||||
crawl_config_ops.set_coll_ops(coll_ops)
|
||||
|
||||
return (
|
||||
org_ops,
|
||||
crawl_config_ops,
|
||||
base_crawl_ops,
|
||||
crawl_ops,
|
||||
page_ops,
|
||||
coll_ops,
|
||||
profile_ops,
|
||||
storage_ops,
|
||||
background_job_ops,
|
||||
event_webhook_ops,
|
||||
user_manager,
|
||||
)
|
Loading…
Reference in New Issue
Block a user