Use archive_viewer_dep permissions to GET crawls (#443)
* Use archive_viewer_dep permissions to GET crawls * Add is_viewer check to archive_dep * Add API endpoint to add new user to archive directly (/archive/<id>/add-user) * Add tests * Refactor tests to use fixtures * And remove login test that duplicates fixtures
This commit is contained in:
parent
61ed51fa84
commit
d1b59c9bd0
2
.github/workflows/k3d-ci.yaml
vendored
2
.github/workflows/k3d-ci.yaml
vendored
@ -85,7 +85,7 @@ jobs:
|
|||||||
|
|
||||||
-
|
-
|
||||||
name: Run Tests
|
name: Run Tests
|
||||||
run: py.test -vv ./backend/test/*.py
|
run: pytest -vv ./backend/test/*.py
|
||||||
|
|
||||||
-
|
-
|
||||||
name: Print Backend Logs
|
name: Print Backend Logs
|
||||||
|
2
.github/workflows/microk8s-ci.yaml
vendored
2
.github/workflows/microk8s-ci.yaml
vendored
@ -66,7 +66,7 @@ jobs:
|
|||||||
|
|
||||||
-
|
-
|
||||||
name: Run Tests
|
name: Run Tests
|
||||||
run: py.test -vv ./backend/test/*.py
|
run: pytest -vv ./backend/test/*.py
|
||||||
|
|
||||||
-
|
-
|
||||||
name: Print Backend Logs
|
name: Print Backend Logs
|
||||||
|
@ -12,7 +12,7 @@ from .db import BaseMongoModel
|
|||||||
|
|
||||||
from .users import User
|
from .users import User
|
||||||
|
|
||||||
from .invites import InvitePending, InviteToArchiveRequest, UserRole
|
from .invites import AddToArchiveRequest, InvitePending, InviteToArchiveRequest, UserRole
|
||||||
|
|
||||||
|
|
||||||
# crawl scale for constraint
|
# crawl scale for constraint
|
||||||
@ -205,7 +205,6 @@ class ArchiveOps:
|
|||||||
|
|
||||||
async def add_user_by_invite(self, invite: InvitePending, user: User):
|
async def add_user_by_invite(self, invite: InvitePending, user: User):
|
||||||
"""Add user to an Archive from an InvitePending, if any"""
|
"""Add user to an Archive from an InvitePending, if any"""
|
||||||
|
|
||||||
# if no archive to add to (eg. superuser invite), just return
|
# if no archive to add to (eg. superuser invite), just return
|
||||||
if not invite.aid:
|
if not invite.aid:
|
||||||
return
|
return
|
||||||
@ -230,6 +229,11 @@ def init_archives_api(app, mdb, user_manager, invites, user_dep: User):
|
|||||||
archive = await ops.get_archive_for_user_by_id(uuid.UUID(aid), user)
|
archive = await ops.get_archive_for_user_by_id(uuid.UUID(aid), user)
|
||||||
if not archive:
|
if not archive:
|
||||||
raise HTTPException(status_code=404, detail=f"Archive '{aid}' not found")
|
raise HTTPException(status_code=404, detail=f"Archive '{aid}' not found")
|
||||||
|
if not archive.is_viewer(user):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=403,
|
||||||
|
detail="User does not have permission to view this archive",
|
||||||
|
)
|
||||||
|
|
||||||
return archive
|
return archive
|
||||||
|
|
||||||
@ -329,4 +333,21 @@ def init_archives_api(app, mdb, user_manager, invites, user_dep: User):
|
|||||||
await user_manager.user_db.update(user)
|
await user_manager.user_db.update(user)
|
||||||
return {"added": True}
|
return {"added": True}
|
||||||
|
|
||||||
|
@router.post("/add-user", tags=["invites"])
|
||||||
|
async def add_new_user_to_archive(
|
||||||
|
invite: AddToArchiveRequest,
|
||||||
|
request: Request,
|
||||||
|
archive: Archive = Depends(archive_owner_dep),
|
||||||
|
user: User = Depends(user_dep),
|
||||||
|
):
|
||||||
|
if not user.is_superuser:
|
||||||
|
raise HTTPException(status_code=403, detail="Not Allowed")
|
||||||
|
|
||||||
|
await user_manager.create_non_super_user(
|
||||||
|
invite.email, invite.password, invite.name
|
||||||
|
)
|
||||||
|
update_role = UpdateRole(role=invite.role, email=invite.email)
|
||||||
|
await set_role(update_role, archive, user)
|
||||||
|
return {"added": True}
|
||||||
|
|
||||||
return ops
|
return ops
|
||||||
|
@ -569,6 +569,7 @@ def init_crawls_api(
|
|||||||
|
|
||||||
ops = CrawlOps(mdb, users, crawl_manager, crawl_config_ops, archives)
|
ops = CrawlOps(mdb, users, crawl_manager, crawl_config_ops, archives)
|
||||||
|
|
||||||
|
archive_viewer_dep = archives.archive_viewer_dep
|
||||||
archive_crawl_dep = archives.archive_crawl_dep
|
archive_crawl_dep = archives.archive_crawl_dep
|
||||||
|
|
||||||
@app.get("/archives/all/crawls", tags=["crawls"], response_model=ListCrawls)
|
@app.get("/archives/all/crawls", tags=["crawls"], response_model=ListCrawls)
|
||||||
@ -579,7 +580,7 @@ def init_crawls_api(
|
|||||||
return ListCrawls(crawls=await ops.list_crawls(None, running_only=True))
|
return ListCrawls(crawls=await ops.list_crawls(None, running_only=True))
|
||||||
|
|
||||||
@app.get("/archives/{aid}/crawls", tags=["crawls"], response_model=ListCrawls)
|
@app.get("/archives/{aid}/crawls", tags=["crawls"], response_model=ListCrawls)
|
||||||
async def list_crawls(archive: Archive = Depends(archive_crawl_dep)):
|
async def list_crawls(archive: Archive = Depends(archive_viewer_dep)):
|
||||||
return ListCrawls(crawls=await ops.list_crawls(archive))
|
return ListCrawls(crawls=await ops.list_crawls(archive))
|
||||||
|
|
||||||
@app.post(
|
@app.post(
|
||||||
@ -632,7 +633,7 @@ def init_crawls_api(
|
|||||||
tags=["crawls"],
|
tags=["crawls"],
|
||||||
response_model=CrawlOut,
|
response_model=CrawlOut,
|
||||||
)
|
)
|
||||||
async def get_crawl(crawl_id, archive: Archive = Depends(archive_crawl_dep)):
|
async def get_crawl(crawl_id, archive: Archive = Depends(archive_viewer_dep)):
|
||||||
return await ops.get_crawl(crawl_id, archive)
|
return await ops.get_crawl(crawl_id, archive)
|
||||||
|
|
||||||
@app.get(
|
@app.get(
|
||||||
@ -657,7 +658,7 @@ def init_crawls_api(
|
|||||||
response_model=ListCrawlOut,
|
response_model=ListCrawlOut,
|
||||||
)
|
)
|
||||||
async def list_single_crawl(
|
async def list_single_crawl(
|
||||||
crawl_id, archive: Archive = Depends(archive_crawl_dep)
|
crawl_id, archive: Archive = Depends(archive_viewer_dep)
|
||||||
):
|
):
|
||||||
crawls = await ops.list_crawls(archive, crawl_id=crawl_id)
|
crawls = await ops.list_crawls(archive, crawl_id=crawl_id)
|
||||||
if len(crawls) < 1:
|
if len(crawls) < 1:
|
||||||
|
@ -46,6 +46,15 @@ class InviteToArchiveRequest(InviteRequest):
|
|||||||
role: UserRole
|
role: UserRole
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
class AddToArchiveRequest(InviteRequest):
|
||||||
|
"""Request to add a new user to an archive directly"""
|
||||||
|
|
||||||
|
role: UserRole
|
||||||
|
password: str
|
||||||
|
name: str
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
class InviteOps:
|
class InviteOps:
|
||||||
"""invite users (optionally to an archive), send emails and delete invites"""
|
"""invite users (optionally to an archive), send emails and delete invites"""
|
||||||
|
@ -180,6 +180,33 @@ class UserManager(BaseUserManager[UserCreate, UserDB]):
|
|||||||
except (DuplicateKeyError, UserAlreadyExists):
|
except (DuplicateKeyError, UserAlreadyExists):
|
||||||
print(f"User {email} already exists", flush=True)
|
print(f"User {email} already exists", flush=True)
|
||||||
|
|
||||||
|
async def create_non_super_user(
|
||||||
|
self, email: str, password: str, name: str = "New user"
|
||||||
|
):
|
||||||
|
if not email:
|
||||||
|
print("No user defined", flush=True)
|
||||||
|
return
|
||||||
|
|
||||||
|
if not password:
|
||||||
|
password = passlib.pwd.genword()
|
||||||
|
|
||||||
|
try:
|
||||||
|
user_create = UserCreate(
|
||||||
|
name=name,
|
||||||
|
email=email,
|
||||||
|
password=password,
|
||||||
|
is_superuser=False,
|
||||||
|
newArchive=True,
|
||||||
|
is_verified=True,
|
||||||
|
)
|
||||||
|
created_user = await super().create(user_create, safe=False, request=None)
|
||||||
|
await self.on_after_register_custom(created_user, user_create, request=None)
|
||||||
|
return created_user
|
||||||
|
|
||||||
|
except (DuplicateKeyError, UserAlreadyExists):
|
||||||
|
print(f"User {email} already exists", flush=True)
|
||||||
|
|
||||||
|
|
||||||
async def on_after_register_custom(
|
async def on_after_register_custom(
|
||||||
self, user: UserDB, user_create: UserCreate, request: Optional[Request]
|
self, user: UserDB, user_create: UserCreate, request: Optional[Request]
|
||||||
):
|
):
|
||||||
|
0
backend/test/__init__.py
Normal file
0
backend/test/__init__.py
Normal file
83
backend/test/conftest.py
Normal file
83
backend/test/conftest.py
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
import pytest
|
||||||
|
import requests
|
||||||
|
import time
|
||||||
|
|
||||||
|
|
||||||
|
API_PREFIX = "http://127.0.0.1:30870/api"
|
||||||
|
|
||||||
|
ADMIN_USERNAME = "admin@example.com"
|
||||||
|
ADMIN_PW = "PASSW0RD!"
|
||||||
|
|
||||||
|
VIEWER_USERNAME = "viewer@example.com"
|
||||||
|
VIEWER_PW = "viewerPASSW0RD!"
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def admin_auth_headers():
|
||||||
|
r = requests.post(
|
||||||
|
f"{API_PREFIX}/auth/jwt/login",
|
||||||
|
data={
|
||||||
|
"username": ADMIN_USERNAME,
|
||||||
|
"password": ADMIN_PW,
|
||||||
|
"grant_type": "password",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
data = r.json()
|
||||||
|
access_token = data.get("access_token")
|
||||||
|
return {"Authorization": f"Bearer {access_token}"}
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def admin_aid(admin_auth_headers):
|
||||||
|
r = requests.get(f"{API_PREFIX}/archives", headers=admin_auth_headers)
|
||||||
|
data = r.json()
|
||||||
|
return data["archives"][0]["id"]
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def admin_crawl_id(admin_auth_headers, admin_aid):
|
||||||
|
# Start crawl.
|
||||||
|
crawl_data = {
|
||||||
|
"runNow": True,
|
||||||
|
"name": "Admin Test Crawl",
|
||||||
|
"config": {"seeds": ["https://example.com/"]},
|
||||||
|
}
|
||||||
|
r = requests.post(
|
||||||
|
f"{API_PREFIX}/archives/{admin_aid}/crawlconfigs/",
|
||||||
|
headers=admin_auth_headers,
|
||||||
|
json=crawl_data,
|
||||||
|
)
|
||||||
|
data = r.json()
|
||||||
|
crawl_id = data["run_now_job"]
|
||||||
|
# Wait for it to complete and then return crawl ID
|
||||||
|
while True:
|
||||||
|
r = requests.get(
|
||||||
|
f"{API_PREFIX}/archives/{admin_aid}/crawls/{crawl_id}/replay.json",
|
||||||
|
headers=admin_auth_headers,
|
||||||
|
)
|
||||||
|
data = r.json()
|
||||||
|
if data["state"] == "complete":
|
||||||
|
return crawl_id
|
||||||
|
time.sleep(5)
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def viewer_auth_headers(admin_auth_headers, admin_aid):
|
||||||
|
requests.post(
|
||||||
|
f"{API_PREFIX}/archives/{admin_aid}/add-user",
|
||||||
|
json={
|
||||||
|
"email": VIEWER_USERNAME,
|
||||||
|
"password": VIEWER_PW,
|
||||||
|
"name": "newviewer",
|
||||||
|
"role": 10,
|
||||||
|
},
|
||||||
|
headers=admin_auth_headers,
|
||||||
|
)
|
||||||
|
r = requests.post(
|
||||||
|
f"{API_PREFIX}/auth/jwt/login",
|
||||||
|
data={
|
||||||
|
"username": VIEWER_USERNAME,
|
||||||
|
"password": VIEWER_PW,
|
||||||
|
"grant_type": "password",
|
||||||
|
},
|
||||||
|
headers=admin_auth_headers,
|
||||||
|
)
|
||||||
|
data = r.json()
|
||||||
|
access_token = data.get("access_token")
|
||||||
|
return {"Authorization": f"Bearer {access_token}"}
|
@ -1,14 +1,13 @@
|
|||||||
import requests
|
import requests
|
||||||
|
|
||||||
api_prefix = "http://127.0.0.1:30870/api"
|
from .conftest import API_PREFIX, ADMIN_USERNAME, ADMIN_PW
|
||||||
|
|
||||||
|
|
||||||
def test_login_invalid():
|
def test_login_invalid():
|
||||||
username = "admin@example.com"
|
|
||||||
password = "invalid"
|
password = "invalid"
|
||||||
r = requests.post(
|
r = requests.post(
|
||||||
f"{api_prefix}/auth/jwt/login",
|
f"{API_PREFIX}/auth/jwt/login",
|
||||||
data={"username": username, "password": password, "grant_type": "password"},
|
data={"username": ADMIN_USERNAME, "password": password, "grant_type": "password"},
|
||||||
)
|
)
|
||||||
data = r.json()
|
data = r.json()
|
||||||
|
|
||||||
@ -17,11 +16,9 @@ def test_login_invalid():
|
|||||||
|
|
||||||
|
|
||||||
def test_login():
|
def test_login():
|
||||||
username = "admin@example.com"
|
|
||||||
password = "PASSW0RD!"
|
|
||||||
r = requests.post(
|
r = requests.post(
|
||||||
f"{api_prefix}/auth/jwt/login",
|
f"{API_PREFIX}/auth/jwt/login",
|
||||||
data={"username": username, "password": password, "grant_type": "password"},
|
data={"username": ADMIN_USERNAME, "password": ADMIN_PW, "grant_type": "password"},
|
||||||
)
|
)
|
||||||
data = r.json()
|
data = r.json()
|
||||||
|
|
||||||
|
49
backend/test/test_permissions.py
Normal file
49
backend/test/test_permissions.py
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
import requests
|
||||||
|
|
||||||
|
from .conftest import API_PREFIX
|
||||||
|
|
||||||
|
|
||||||
|
def test_admin_get_archive_crawls(admin_auth_headers, admin_aid, admin_crawl_id):
|
||||||
|
r = requests.get(
|
||||||
|
f"{API_PREFIX}/archives/{admin_aid}/crawls",
|
||||||
|
headers=admin_auth_headers
|
||||||
|
)
|
||||||
|
data = r.json()
|
||||||
|
assert len(data["crawls"]) > 0
|
||||||
|
assert data["crawls"][0]["id"] == admin_crawl_id
|
||||||
|
assert data["crawls"][0]["aid"] == admin_aid
|
||||||
|
|
||||||
|
|
||||||
|
def test_viewer_get_archive_crawls(viewer_auth_headers, admin_aid, admin_crawl_id):
|
||||||
|
r = requests.get(
|
||||||
|
f"{API_PREFIX}/archives/{admin_aid}/crawls",
|
||||||
|
headers=viewer_auth_headers
|
||||||
|
)
|
||||||
|
data = r.json()
|
||||||
|
crawls = data["crawls"]
|
||||||
|
crawl_ids = []
|
||||||
|
for crawl in crawls:
|
||||||
|
crawl_ids.append(crawl["id"])
|
||||||
|
assert len(crawls) > 0
|
||||||
|
assert admin_crawl_id in crawl_ids
|
||||||
|
|
||||||
|
|
||||||
|
def test_viewer_get_crawl(viewer_auth_headers, admin_aid, admin_crawl_id):
|
||||||
|
r = requests.get(
|
||||||
|
f"{API_PREFIX}/archives/{admin_aid}/crawls/{admin_crawl_id}",
|
||||||
|
headers=viewer_auth_headers
|
||||||
|
)
|
||||||
|
data = r.json()
|
||||||
|
assert data["id"] == admin_crawl_id
|
||||||
|
assert data["aid"] == admin_aid
|
||||||
|
|
||||||
|
|
||||||
|
def test_viewer_get_crawl_replay(viewer_auth_headers, admin_aid, admin_crawl_id):
|
||||||
|
r = requests.get(
|
||||||
|
f"{API_PREFIX}/archives/{admin_aid}/crawls/{admin_crawl_id}/replay.json",
|
||||||
|
headers=viewer_auth_headers
|
||||||
|
)
|
||||||
|
data = r.json()
|
||||||
|
assert data["id"] == admin_crawl_id
|
||||||
|
assert data["aid"] == admin_aid
|
||||||
|
assert data["resources"]
|
@ -4,15 +4,9 @@ import time
|
|||||||
import io
|
import io
|
||||||
import zipfile
|
import zipfile
|
||||||
|
|
||||||
|
from .conftest import API_PREFIX, ADMIN_USERNAME, ADMIN_PW
|
||||||
|
|
||||||
host_prefix = "http://127.0.0.1:30870"
|
host_prefix = "http://127.0.0.1:30870"
|
||||||
api_prefix = f"{host_prefix}/api"
|
|
||||||
|
|
||||||
|
|
||||||
access_token = None
|
|
||||||
headers = None
|
|
||||||
archive_id = None
|
|
||||||
|
|
||||||
crawl_id = None
|
|
||||||
|
|
||||||
wacz_path = None
|
wacz_path = None
|
||||||
wacz_size = None
|
wacz_size = None
|
||||||
@ -21,47 +15,31 @@ wacz_hash = None
|
|||||||
wacz_content = None
|
wacz_content = None
|
||||||
|
|
||||||
|
|
||||||
def test_login():
|
def test_list_archives(admin_auth_headers, admin_aid):
|
||||||
username = "admin@example.com"
|
r = requests.get(f"{API_PREFIX}/archives", headers=admin_auth_headers)
|
||||||
password = "PASSW0RD!"
|
|
||||||
r = requests.post(
|
|
||||||
f"{api_prefix}/auth/jwt/login",
|
|
||||||
data={"username": username, "password": password, "grant_type": "password"},
|
|
||||||
)
|
|
||||||
assert r.status_code == 200
|
|
||||||
data = r.json()
|
data = r.json()
|
||||||
|
|
||||||
assert data["token_type"] == "bearer"
|
archives = data["archives"]
|
||||||
|
assert len(archives) > 0
|
||||||
|
|
||||||
global access_token
|
archive_ids = []
|
||||||
access_token = data["access_token"]
|
archive_names = []
|
||||||
|
for archive in archives:
|
||||||
global headers
|
archive_ids.append(archive["id"])
|
||||||
headers = {"Authorization": f"Bearer {access_token}"}
|
archive_names.append(archive["name"])
|
||||||
|
assert admin_aid in archive_ids
|
||||||
|
assert "admin's Archive" in archive_names
|
||||||
|
|
||||||
|
|
||||||
def test_list_archives():
|
def test_create_new_config(admin_auth_headers, admin_aid):
|
||||||
r = requests.get(f"{api_prefix}/archives", headers=headers)
|
|
||||||
data = r.json()
|
|
||||||
|
|
||||||
assert len(data["archives"]) == 1
|
|
||||||
assert data["archives"][0]["id"]
|
|
||||||
|
|
||||||
global archive_id
|
|
||||||
archive_id = data["archives"][0]["id"]
|
|
||||||
|
|
||||||
assert data["archives"][0]["name"] == "admin's Archive"
|
|
||||||
|
|
||||||
|
|
||||||
def test_create_new_config():
|
|
||||||
crawl_data = {
|
crawl_data = {
|
||||||
"runNow": True,
|
"runNow": True,
|
||||||
"name": "Test Crawl",
|
"name": "Test Crawl",
|
||||||
"config": {"seeds": ["https://example.com/"]},
|
"config": {"seeds": ["https://example.com/"]},
|
||||||
}
|
}
|
||||||
r = requests.post(
|
r = requests.post(
|
||||||
f"{api_prefix}/archives/{archive_id}/crawlconfigs/",
|
f"{API_PREFIX}/archives/{admin_aid}/crawlconfigs/",
|
||||||
headers=headers,
|
headers=admin_auth_headers,
|
||||||
json=crawl_data,
|
json=crawl_data,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -71,18 +49,15 @@ def test_create_new_config():
|
|||||||
assert data["added"]
|
assert data["added"]
|
||||||
assert data["run_now_job"]
|
assert data["run_now_job"]
|
||||||
|
|
||||||
global crawl_id
|
|
||||||
crawl_id = data["run_now_job"]
|
|
||||||
|
|
||||||
|
def test_wait_for_complete(admin_auth_headers, admin_aid, admin_crawl_id):
|
||||||
def test_wait_for_complete():
|
|
||||||
print("")
|
print("")
|
||||||
print("---- Running Crawl ----")
|
print("---- Running Crawl ----")
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
r = requests.get(
|
r = requests.get(
|
||||||
f"{api_prefix}/archives/{archive_id}/crawls/{crawl_id}/replay.json",
|
f"{API_PREFIX}/archives/{admin_aid}/crawls/{admin_crawl_id}/replay.json",
|
||||||
headers=headers,
|
headers=admin_auth_headers,
|
||||||
)
|
)
|
||||||
data = r.json()
|
data = r.json()
|
||||||
assert (
|
assert (
|
||||||
@ -105,10 +80,10 @@ def test_wait_for_complete():
|
|||||||
wacz_size = data["resources"][0]["size"]
|
wacz_size = data["resources"][0]["size"]
|
||||||
wacz_hash = data["resources"][0]["hash"]
|
wacz_hash = data["resources"][0]["hash"]
|
||||||
|
|
||||||
def test_crawl_info():
|
def test_crawl_info(admin_auth_headers, admin_aid, admin_crawl_id):
|
||||||
r = requests.get(
|
r = requests.get(
|
||||||
f"{api_prefix}/archives/{archive_id}/crawls/{crawl_id}",
|
f"{API_PREFIX}/archives/{admin_aid}/crawls/{admin_crawl_id}",
|
||||||
headers=headers,
|
headers=admin_auth_headers,
|
||||||
)
|
)
|
||||||
data = r.json()
|
data = r.json()
|
||||||
assert data["fileSize"] == wacz_size
|
assert data["fileSize"] == wacz_size
|
||||||
|
18
backend/test/test_users.py
Normal file
18
backend/test/test_users.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
import requests
|
||||||
|
|
||||||
|
from .conftest import API_PREFIX
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_super_user(admin_auth_headers):
|
||||||
|
assert admin_auth_headers
|
||||||
|
auth = admin_auth_headers["Authorization"]
|
||||||
|
token = auth.replace("Bearer ", "")
|
||||||
|
assert token != "None"
|
||||||
|
assert len(token) > 4
|
||||||
|
|
||||||
|
def test_create_non_super_user(viewer_auth_headers):
|
||||||
|
assert viewer_auth_headers
|
||||||
|
auth = viewer_auth_headers["Authorization"]
|
||||||
|
token = auth.replace("Bearer ", "")
|
||||||
|
assert token != "None"
|
||||||
|
assert len(token) > 4
|
Loading…
Reference in New Issue
Block a user