Update user names in crawls and workflows after username update (#1299)
Fixes #1275
This commit is contained in:
parent
fc6dc287c0
commit
733809b5a8
@ -206,6 +206,12 @@ class BaseCrawlOps:
|
|||||||
{"$set": data},
|
{"$set": data},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def update_usernames(self, userid: uuid.UUID, updated_name: str) -> None:
|
||||||
|
"""Update username references matching userid"""
|
||||||
|
await self.crawls.update_many(
|
||||||
|
{"userid": userid}, {"$set": {"userName": updated_name}}
|
||||||
|
)
|
||||||
|
|
||||||
async def shutdown_crawl(self, crawl_id: str, org: Organization, graceful: bool):
|
async def shutdown_crawl(self, crawl_id: str, org: Organization, graceful: bool):
|
||||||
"""stop or cancel specified crawl"""
|
"""stop or cancel specified crawl"""
|
||||||
crawl = await self.get_crawl_raw(crawl_id, org)
|
crawl = await self.get_crawl_raw(crawl_id, org)
|
||||||
@ -764,3 +770,5 @@ def init_base_crawls_api(
|
|||||||
org: Organization = Depends(org_crawl_dep),
|
org: Organization = Depends(org_crawl_dep),
|
||||||
):
|
):
|
||||||
return await ops.delete_crawls_all_types(delete_list, org, user)
|
return await ops.delete_crawls_all_types(delete_list, org, user)
|
||||||
|
|
||||||
|
return ops
|
||||||
|
@ -340,6 +340,14 @@ class CrawlConfigOps:
|
|||||||
ret["started"] = crawl_id
|
ret["started"] = crawl_id
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
async def update_usernames(self, userid: uuid.UUID, updated_name: str) -> None:
|
||||||
|
"""Update username references matching userid"""
|
||||||
|
for workflow_field in ["createdBy", "modifiedBy", "lastStartedBy"]:
|
||||||
|
await self.crawl_configs.update_many(
|
||||||
|
{workflow_field: userid},
|
||||||
|
{"$set": {f"{workflow_field}Name": updated_name}},
|
||||||
|
)
|
||||||
|
|
||||||
async def get_crawl_configs(
|
async def get_crawl_configs(
|
||||||
self,
|
self,
|
||||||
org: Organization,
|
org: Organization,
|
||||||
|
@ -76,8 +76,6 @@ def main():
|
|||||||
|
|
||||||
event_webhook_ops = init_event_webhooks_api(mdb, org_ops, app_root)
|
event_webhook_ops = init_event_webhooks_api(mdb, org_ops, app_root)
|
||||||
|
|
||||||
user_manager.set_org_ops(org_ops)
|
|
||||||
|
|
||||||
# pylint: disable=import-outside-toplevel
|
# pylint: disable=import-outside-toplevel
|
||||||
if not os.environ.get("KUBERNETES_SERVICE_HOST"):
|
if not os.environ.get("KUBERNETES_SERVICE_HOST"):
|
||||||
print(
|
print(
|
||||||
@ -106,7 +104,7 @@ def main():
|
|||||||
|
|
||||||
coll_ops = init_collections_api(app, mdb, org_ops, storage_ops, event_webhook_ops)
|
coll_ops = init_collections_api(app, mdb, org_ops, storage_ops, event_webhook_ops)
|
||||||
|
|
||||||
init_base_crawls_api(
|
base_crawl_ops = init_base_crawls_api(
|
||||||
app,
|
app,
|
||||||
mdb,
|
mdb,
|
||||||
user_manager,
|
user_manager,
|
||||||
@ -118,6 +116,8 @@ def main():
|
|||||||
current_active_user,
|
current_active_user,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
user_manager.set_ops(org_ops, crawl_config_ops, base_crawl_ops)
|
||||||
|
|
||||||
crawls = init_crawls_api(
|
crawls = init_crawls_api(
|
||||||
app,
|
app,
|
||||||
mdb,
|
mdb,
|
||||||
|
@ -41,8 +41,6 @@ def main():
|
|||||||
|
|
||||||
event_webhook_ops = EventWebhookOps(mdb, org_ops)
|
event_webhook_ops = EventWebhookOps(mdb, org_ops)
|
||||||
|
|
||||||
user_manager.set_org_ops(org_ops)
|
|
||||||
|
|
||||||
# pylint: disable=import-outside-toplevel
|
# pylint: disable=import-outside-toplevel
|
||||||
if not os.environ.get("KUBERNETES_SERVICE_HOST"):
|
if not os.environ.get("KUBERNETES_SERVICE_HOST"):
|
||||||
print(
|
print(
|
||||||
@ -66,6 +64,8 @@ def main():
|
|||||||
profile_ops,
|
profile_ops,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
user_manager.set_ops(org_ops, crawl_config_ops, None)
|
||||||
|
|
||||||
coll_ops = CollectionOps(mdb, crawl_manager, org_ops, event_webhook_ops)
|
coll_ops = CollectionOps(mdb, crawl_manager, org_ops, event_webhook_ops)
|
||||||
|
|
||||||
crawl_ops = CrawlOps(
|
crawl_ops = CrawlOps(
|
||||||
|
@ -51,21 +51,26 @@ from .auth import (
|
|||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
# pylint: disable=raise-missing-from, too-many-public-methods
|
# pylint: disable=raise-missing-from, too-many-public-methods, too-many-instance-attributes
|
||||||
class UserManager:
|
class UserManager:
|
||||||
"""Browsertrix UserManager"""
|
"""Browsertrix UserManager"""
|
||||||
|
|
||||||
def __init__(self, mdb, email, invites):
|
def __init__(self, mdb, email, invites):
|
||||||
self.users = mdb.get_collection("users")
|
self.users = mdb.get_collection("users")
|
||||||
|
self.crawl_config_ops = None
|
||||||
|
self.base_crawl_ops = None
|
||||||
self.email = email
|
self.email = email
|
||||||
self.invites = invites
|
self.invites = invites
|
||||||
self.org_ops = None
|
self.org_ops = None
|
||||||
|
|
||||||
self.registration_enabled = is_bool(os.environ.get("REGISTRATION_ENABLED"))
|
self.registration_enabled = is_bool(os.environ.get("REGISTRATION_ENABLED"))
|
||||||
|
|
||||||
def set_org_ops(self, ops):
|
# pylint: disable=attribute-defined-outside-init
|
||||||
|
def set_ops(self, org_ops, crawl_config_ops, base_crawl_ops):
|
||||||
"""set org ops"""
|
"""set org ops"""
|
||||||
self.org_ops = ops
|
self.org_ops = org_ops
|
||||||
|
self.crawl_config_ops = crawl_config_ops
|
||||||
|
self.base_crawl_ops = base_crawl_ops
|
||||||
|
|
||||||
async def init_index(self):
|
async def init_index(self):
|
||||||
"""init lookup index"""
|
"""init lookup index"""
|
||||||
@ -476,6 +481,10 @@ class UserManager:
|
|||||||
|
|
||||||
await self.update_email_name(user, user_update.email, user_update.name)
|
await self.update_email_name(user, user_update.email, user_update.name)
|
||||||
|
|
||||||
|
if user_update.name:
|
||||||
|
await self.base_crawl_ops.update_usernames(user.id, user_update.name)
|
||||||
|
await self.crawl_config_ops.update_usernames(user.id, user_update.name)
|
||||||
|
|
||||||
async def update_verified(self, user: User) -> None:
|
async def update_verified(self, user: User) -> None:
|
||||||
"""Update verified status for user"""
|
"""Update verified status for user"""
|
||||||
await self.users.find_one_and_update(
|
await self.users.find_one_and_update(
|
||||||
|
@ -1,7 +1,13 @@
|
|||||||
import requests
|
import requests
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from .conftest import API_PREFIX, CRAWLER_USERNAME, ADMIN_PW, ADMIN_USERNAME
|
from .conftest import (
|
||||||
|
API_PREFIX,
|
||||||
|
CRAWLER_USERNAME,
|
||||||
|
ADMIN_PW,
|
||||||
|
ADMIN_USERNAME,
|
||||||
|
FINISHED_STATES,
|
||||||
|
)
|
||||||
|
|
||||||
VALID_USER_EMAIL = "validpassword@example.com"
|
VALID_USER_EMAIL = "validpassword@example.com"
|
||||||
VALID_USER_PW = "validpassw0rd!"
|
VALID_USER_PW = "validpassw0rd!"
|
||||||
@ -230,14 +236,67 @@ def test_reset_valid_password(admin_auth_headers, default_org_id):
|
|||||||
assert r.json()["updated"] == True
|
assert r.json()["updated"] == True
|
||||||
|
|
||||||
|
|
||||||
def test_patch_me_endpoint(admin_auth_headers, default_org_id):
|
def test_patch_me_endpoint(admin_auth_headers, default_org_id, admin_userid):
|
||||||
|
# Start a new crawl
|
||||||
|
crawl_data = {
|
||||||
|
"runNow": True,
|
||||||
|
"name": "name change test crawl",
|
||||||
|
"config": {
|
||||||
|
"seeds": [{"url": "https://specs.webrecorder.net/", "depth": 1}],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
r = requests.post(
|
||||||
|
f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs/",
|
||||||
|
headers=admin_auth_headers,
|
||||||
|
json=crawl_data,
|
||||||
|
)
|
||||||
|
data = r.json()
|
||||||
|
crawl_id = data["run_now_job"]
|
||||||
|
|
||||||
|
# Wait for it to complete
|
||||||
|
while True:
|
||||||
|
r = requests.get(
|
||||||
|
f"{API_PREFIX}/orgs/{default_org_id}/crawls/{crawl_id}/replay.json",
|
||||||
|
headers=admin_auth_headers,
|
||||||
|
)
|
||||||
|
data = r.json()
|
||||||
|
if data["state"] in FINISHED_STATES:
|
||||||
|
break
|
||||||
|
time.sleep(5)
|
||||||
|
|
||||||
|
# Change user name and email
|
||||||
|
new_name = "New Admin"
|
||||||
r = requests.patch(
|
r = requests.patch(
|
||||||
f"{API_PREFIX}/users/me",
|
f"{API_PREFIX}/users/me",
|
||||||
headers=admin_auth_headers,
|
headers=admin_auth_headers,
|
||||||
json={"email": "admin2@example.com", "name": "New Admin"},
|
json={"email": "admin2@example.com", "name": new_name},
|
||||||
)
|
)
|
||||||
assert r.status_code == 200
|
assert r.status_code == 200
|
||||||
|
|
||||||
|
# Verify that name was updated in workflows and crawls
|
||||||
|
for workflow_field in ["createdBy", "modifiedBy", "lastStartedBy"]:
|
||||||
|
r = requests.get(
|
||||||
|
f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs?{workflow_field}={admin_userid}",
|
||||||
|
headers=admin_auth_headers,
|
||||||
|
)
|
||||||
|
assert r.status_code == 200
|
||||||
|
data = r.json()
|
||||||
|
assert data["total"] > 0
|
||||||
|
for workflow in data["items"]:
|
||||||
|
if workflow[workflow_field] == admin_userid:
|
||||||
|
assert workflow[f"{workflow_field}Name"] == new_name
|
||||||
|
|
||||||
|
r = requests.get(
|
||||||
|
f"{API_PREFIX}/orgs/{default_org_id}/crawls?userid={admin_userid}",
|
||||||
|
headers=admin_auth_headers,
|
||||||
|
)
|
||||||
|
assert r.status_code == 200
|
||||||
|
data = r.json()
|
||||||
|
assert data["total"] > 0
|
||||||
|
for item in data["items"]:
|
||||||
|
if item["userid"] == admin_userid:
|
||||||
|
assert item["userName"] == new_name
|
||||||
|
|
||||||
|
|
||||||
def test_patch_me_invalid_email_in_use(admin_auth_headers, default_org_id):
|
def test_patch_me_invalid_email_in_use(admin_auth_headers, default_org_id):
|
||||||
r = requests.patch(
|
r = requests.patch(
|
||||||
|
Loading…
Reference in New Issue
Block a user