- Add default switch to Archive (org) model - Set default org name via values.yaml - Add check to ensure only one org with default org name exists - Stop creating new orgs for new users - Add new API endpoints for creating and renaming orgs (part of #457) - Make Archive.name unique via index - Wait for db connection on init, log if waiting - Make archive-less invites invite user to default org with Owner role - Rename default org from chart value if changed - Don't create new org for invited users
This commit is contained in:
parent
a6c248f9dd
commit
49460bb070
@ -1,11 +1,15 @@
|
||||
"""
|
||||
Archive API handling
|
||||
"""
|
||||
import asyncio
|
||||
import os
|
||||
import time
|
||||
import uuid
|
||||
|
||||
from typing import Dict, Union, Literal, Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
from pymongo.errors import AutoReconnect, DuplicateKeyError
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request
|
||||
|
||||
from .db import BaseMongoModel
|
||||
@ -22,12 +26,21 @@ from .invites import (
|
||||
# crawl scale for constraint
|
||||
MAX_CRAWL_SCALE = 3
|
||||
|
||||
DEFAULT_ORG = os.environ.get("DEFAULT_ORG", "My Organization")
|
||||
|
||||
|
||||
# ============================================================================
|
||||
class UpdateRole(InviteToArchiveRequest):
|
||||
"""Update existing role for user"""
|
||||
|
||||
|
||||
# ============================================================================
|
||||
class RenameArchive(BaseModel):
|
||||
"""Request to invite another user"""
|
||||
|
||||
name: str
|
||||
|
||||
|
||||
# ============================================================================
|
||||
class DefaultStorage(BaseModel):
|
||||
"""Storage reference"""
|
||||
@ -63,6 +76,8 @@ class Archive(BaseMongoModel):
|
||||
|
||||
usage: Dict[str, int] = {}
|
||||
|
||||
default: bool = False
|
||||
|
||||
def is_owner(self, user):
|
||||
"""Check if user is owner"""
|
||||
return self._is_auth(user, UserRole.OWNER)
|
||||
@ -135,9 +150,24 @@ class ArchiveOps:
|
||||
|
||||
self.invites = invites
|
||||
|
||||
async def init_index(self):
|
||||
"""init lookup index"""
|
||||
while True:
|
||||
try:
|
||||
return await self.archives.create_index("name", unique=True)
|
||||
except AutoReconnect:
|
||||
print(
|
||||
"Database connection unavailable to create index. Will try again in 5 scconds",
|
||||
flush=True,
|
||||
)
|
||||
time.sleep(5)
|
||||
|
||||
async def add_archive(self, archive: Archive):
|
||||
"""Add new archive"""
|
||||
return await self.archives.insert_one(archive.to_dict())
|
||||
try:
|
||||
return await self.archives.insert_one(archive.to_dict())
|
||||
except DuplicateKeyError:
|
||||
print(f"Archive name {archive.name} already in use - skipping", flush=True)
|
||||
|
||||
async def create_new_archive_for_user(
|
||||
self,
|
||||
@ -147,7 +177,6 @@ class ArchiveOps:
|
||||
):
|
||||
# pylint: disable=too-many-arguments
|
||||
"""Create new archive with default storage for new user"""
|
||||
|
||||
id_ = uuid.uuid4()
|
||||
|
||||
storage_path = str(id_) + "/"
|
||||
@ -159,7 +188,8 @@ class ArchiveOps:
|
||||
storage=DefaultStorage(name=storage_name, path=storage_path),
|
||||
)
|
||||
|
||||
print(f"Created New Archive with storage {storage_name} / {storage_path}")
|
||||
storage_info = f"storage {storage_name} / {storage_path}"
|
||||
print(f"Creating new archive {archive_name} with {storage_info}", flush=True)
|
||||
await self.add_archive(archive)
|
||||
|
||||
async def get_archives_for_user(self, user: User, role: UserRole = UserRole.VIEWER):
|
||||
@ -188,9 +218,47 @@ class ArchiveOps:
|
||||
res = await self.archives.find_one({"_id": aid})
|
||||
return Archive.from_dict(res)
|
||||
|
||||
async def get_default_org(self):
|
||||
"""Get default organization"""
|
||||
res = await self.archives.find_one({"default": True})
|
||||
if res:
|
||||
return Archive.from_dict(res)
|
||||
|
||||
async def create_default_org(self, storage_name="default"):
|
||||
"""Create default organization if doesn't exist."""
|
||||
await self.init_index()
|
||||
|
||||
default_org = await self.get_default_org()
|
||||
if default_org:
|
||||
if default_org.name == DEFAULT_ORG:
|
||||
print("Default organization already exists - skipping", flush=True)
|
||||
else:
|
||||
default_org.name = DEFAULT_ORG
|
||||
await self.update(default_org)
|
||||
print(f'Default organization renamed to "{DEFAULT_ORG}"', flush=True)
|
||||
return
|
||||
|
||||
id_ = uuid.uuid4()
|
||||
storage_path = str(id_) + "/"
|
||||
archive = Archive(
|
||||
id=id_,
|
||||
name=DEFAULT_ORG,
|
||||
users={},
|
||||
storage=DefaultStorage(name=storage_name, path=storage_path),
|
||||
default=True,
|
||||
)
|
||||
storage_info = f"Storage: {storage_name} / {storage_path}"
|
||||
print(
|
||||
f'Creating Default Organization "{DEFAULT_ORG}". Storage: {storage_info}',
|
||||
flush=True,
|
||||
)
|
||||
await self.add_archive(archive)
|
||||
|
||||
async def update(self, archive: Archive):
|
||||
"""Update existing archive"""
|
||||
self.archives.replace_one({"_id": archive.id}, archive.to_dict())
|
||||
return await self.archives.find_one_and_update(
|
||||
{"_id": archive.id}, {"$set": archive.to_dict()}, upsert=True
|
||||
)
|
||||
|
||||
async def update_storage(
|
||||
self, archive: Archive, storage: Union[S3Storage, DefaultStorage]
|
||||
@ -284,12 +352,42 @@ def init_archives_api(app, mdb, user_manager, invites, user_dep: User):
|
||||
]
|
||||
}
|
||||
|
||||
@app.post("/archives/create", tags=["archives"])
|
||||
async def create_archive(
|
||||
new_archive: RenameArchive,
|
||||
user: User = Depends(user_dep),
|
||||
):
|
||||
if not user.is_superuser:
|
||||
raise HTTPException(status_code=403, detail="Not Allowed")
|
||||
|
||||
id_ = uuid.uuid4()
|
||||
storage_path = str(id_) + "/"
|
||||
archive = Archive(
|
||||
id=id_,
|
||||
name=new_archive.name,
|
||||
users={},
|
||||
storage=DefaultStorage(name="default", path=storage_path),
|
||||
)
|
||||
await ops.add_archive(archive)
|
||||
|
||||
return {"added": True}
|
||||
|
||||
@router.get("", tags=["archives"])
|
||||
async def get_archive(
|
||||
archive: Archive = Depends(archive_dep), user: User = Depends(user_dep)
|
||||
):
|
||||
return await archive.serialize_for_user(user, user_manager)
|
||||
|
||||
@router.post("/rename", tags=["archives"])
|
||||
async def rename_archive(
|
||||
rename: RenameArchive,
|
||||
archive: Archive = Depends(archive_owner_dep),
|
||||
):
|
||||
archive.name = rename.name
|
||||
await ops.update(archive)
|
||||
|
||||
return {"updated": True}
|
||||
|
||||
@router.patch("/user-role", tags=["archives"])
|
||||
async def set_role(
|
||||
update: UpdateRole,
|
||||
@ -355,4 +453,6 @@ def init_archives_api(app, mdb, user_manager, invites, user_dep: User):
|
||||
await set_role(update_role, archive, user)
|
||||
return {"added": True}
|
||||
|
||||
asyncio.create_task(ops.create_default_org())
|
||||
|
||||
return ops
|
||||
|
@ -8,7 +8,6 @@ import uuid
|
||||
from pydantic import BaseModel, UUID4
|
||||
from fastapi import HTTPException
|
||||
|
||||
|
||||
from .db import BaseMongoModel
|
||||
|
||||
|
||||
@ -61,6 +60,7 @@ class InviteOps:
|
||||
|
||||
def __init__(self, mdb, email):
|
||||
self.invites = mdb["invites"]
|
||||
self.archives = mdb["archives"]
|
||||
self.email = email
|
||||
|
||||
async def add_new_user_invite(
|
||||
@ -77,6 +77,11 @@ class InviteOps:
|
||||
status_code=403, detail="This user has already been invited"
|
||||
)
|
||||
|
||||
# Invitations to a specific org via API must invite role, so if it's
|
||||
# absent assume this is a general invitation from superadmin.
|
||||
if not new_user_invite.role:
|
||||
new_user_invite.role = UserRole.OWNER
|
||||
|
||||
await self.invites.insert_one(new_user_invite.to_dict())
|
||||
|
||||
self.email.send_new_user_invite(
|
||||
@ -126,11 +131,13 @@ class InviteOps:
|
||||
if allow_existing is false, don't allow invites to existing users"""
|
||||
invite_code = uuid.uuid4().hex
|
||||
|
||||
aid = None
|
||||
archive_name = None
|
||||
if archive:
|
||||
aid = archive.id
|
||||
archive_name = archive.name
|
||||
else:
|
||||
default_org = await self.archives.find_one({"default": True})
|
||||
aid = default_org["_id"]
|
||||
archive_name = default_org["name"]
|
||||
|
||||
invite_pending = InvitePending(
|
||||
id=invite_code,
|
||||
|
@ -140,6 +140,9 @@ class UserManager(BaseUserManager[UserCreate, UserDB]):
|
||||
):
|
||||
raise HTTPException(status_code=400, detail="Invalid Invite Token")
|
||||
|
||||
# Don't create a new org for registered users.
|
||||
user.newArchive = False
|
||||
|
||||
created_user = await super().create(user, safe, request)
|
||||
await self.on_after_register_custom(created_user, user, request)
|
||||
return created_user
|
||||
@ -170,7 +173,7 @@ class UserManager(BaseUserManager[UserCreate, UserDB]):
|
||||
email=email,
|
||||
password=password,
|
||||
is_superuser=True,
|
||||
newArchive=True,
|
||||
newArchive=False,
|
||||
is_verified=True,
|
||||
)
|
||||
)
|
||||
@ -181,7 +184,10 @@ class UserManager(BaseUserManager[UserCreate, UserDB]):
|
||||
print(f"User {email} already exists", flush=True)
|
||||
|
||||
async def create_non_super_user(
|
||||
self, email: str, password: str, name: str = "New user"
|
||||
self,
|
||||
email: str,
|
||||
password: str,
|
||||
name: str = "New user",
|
||||
):
|
||||
"""create a regular user with given credentials"""
|
||||
if not email:
|
||||
@ -197,7 +203,7 @@ class UserManager(BaseUserManager[UserCreate, UserDB]):
|
||||
email=email,
|
||||
password=password,
|
||||
is_superuser=False,
|
||||
newArchive=True,
|
||||
newArchive=False,
|
||||
is_verified=True,
|
||||
)
|
||||
created_user = await super().create(user_create, safe=False, request=None)
|
||||
@ -214,7 +220,7 @@ class UserManager(BaseUserManager[UserCreate, UserDB]):
|
||||
|
||||
print(f"User {user.id} has registered.")
|
||||
|
||||
if user_create.newArchive:
|
||||
if user_create.newArchive is True:
|
||||
print(f"Creating new archive for {user.id}")
|
||||
|
||||
archive_name = (
|
||||
|
@ -44,9 +44,11 @@ def admin_aid(admin_auth_headers):
|
||||
r = requests.get(f"{API_PREFIX}/archives", headers=admin_auth_headers)
|
||||
data = r.json()
|
||||
try:
|
||||
return data["archives"][0]["id"]
|
||||
for archive in data["archives"]:
|
||||
if archive["default"] is True:
|
||||
return archive["id"]
|
||||
except:
|
||||
print("Waiting for admin_aid")
|
||||
print("Waiting for default org id")
|
||||
time.sleep(5)
|
||||
|
||||
|
||||
|
57
backend/test/test_org.py
Normal file
57
backend/test/test_org.py
Normal file
@ -0,0 +1,57 @@
|
||||
import requests
|
||||
|
||||
from .conftest import API_PREFIX
|
||||
|
||||
|
||||
def test_ensure_only_one_default_org(admin_auth_headers):
|
||||
r = requests.get(f"{API_PREFIX}/archives", headers=admin_auth_headers)
|
||||
data = r.json()
|
||||
|
||||
orgs = data["archives"]
|
||||
default_orgs = [org for org in orgs if org["default"]]
|
||||
assert len(default_orgs) == 1
|
||||
|
||||
default_org_name = default_orgs[0]["name"]
|
||||
orgs_with_same_name = [org for org in orgs if org["name"] == default_org_name]
|
||||
assert len(orgs_with_same_name) == 1
|
||||
|
||||
|
||||
def test_rename_org(admin_auth_headers, admin_aid):
|
||||
UPDATED_NAME = "updated org name"
|
||||
rename_data = {"name": UPDATED_NAME}
|
||||
r = requests.post(
|
||||
f"{API_PREFIX}/archives/{admin_aid}/rename",
|
||||
headers=admin_auth_headers,
|
||||
json=rename_data,
|
||||
)
|
||||
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
assert data["updated"]
|
||||
|
||||
# Verify that name is now updated.
|
||||
r = requests.get(f"{API_PREFIX}/archives/{admin_aid}", headers=admin_auth_headers)
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
assert data["name"] == UPDATED_NAME
|
||||
|
||||
def test_create_org(admin_auth_headers):
|
||||
NEW_ORG_NAME = "New Org"
|
||||
r = requests.post(
|
||||
f"{API_PREFIX}/archives/create",
|
||||
headers=admin_auth_headers,
|
||||
json={"name": NEW_ORG_NAME},
|
||||
)
|
||||
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
assert data["added"]
|
||||
|
||||
# Verify that org exists.
|
||||
r = requests.get(f"{API_PREFIX}/archives", headers=admin_auth_headers)
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
org_names = []
|
||||
for org in data["archives"]:
|
||||
org_names.append(org["name"])
|
||||
assert NEW_ORG_NAME in org_names
|
@ -21,12 +21,9 @@ def test_list_archives(admin_auth_headers, admin_aid):
|
||||
assert len(archives) > 0
|
||||
|
||||
archive_ids = []
|
||||
archive_names = []
|
||||
for archive in archives:
|
||||
archive_ids.append(archive["id"])
|
||||
archive_names.append(archive["name"])
|
||||
assert admin_aid in archive_ids
|
||||
assert "admin's Archive" in archive_names
|
||||
|
||||
|
||||
def test_create_new_config(admin_auth_headers, admin_aid):
|
||||
|
@ -25,6 +25,8 @@ data:
|
||||
|
||||
CRAWLER_LIVENESS_PORT: "{{ .Values.crawler_liveness_port | default 0 }}"
|
||||
|
||||
DEFAULT_ORG: "{{ .Values.default_org }}"
|
||||
|
||||
JOB_IMAGE: "{{ .Values.api_image }}"
|
||||
|
||||
{{- if .Values.crawler_pv_claim }}
|
||||
|
@ -29,6 +29,9 @@ superuser:
|
||||
# change or remove this
|
||||
password: PASSW0RD!
|
||||
|
||||
# Set name for default organization created with superuser
|
||||
default_org: "My Organization"
|
||||
|
||||
|
||||
# API Image
|
||||
# =========================================
|
||||
|
Loading…
Reference in New Issue
Block a user