backend: fix accessing signed urls when using local minio service
- signing url with endpoint_url instead of access_endpoint_url, but replace endpoint_url prefix with access_endpoint_url for access. - keep existing behavior of signing access_endpoint_url only if SIGN_ACCESS_ENDPOINT env var is set
This commit is contained in:
parent
502d687620
commit
ae51114a45
@ -4,6 +4,7 @@ Storage API
|
|||||||
from typing import Union
|
from typing import Union
|
||||||
from urllib.parse import urlsplit
|
from urllib.parse import urlsplit
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
|
import os
|
||||||
|
|
||||||
from fastapi import Depends, HTTPException
|
from fastapi import Depends, HTTPException
|
||||||
from aiobotocore.session import get_session
|
from aiobotocore.session import get_session
|
||||||
@ -12,6 +13,10 @@ from archives import Archive, DefaultStorage, S3Storage
|
|||||||
from users import User
|
from users import User
|
||||||
|
|
||||||
|
|
||||||
|
# sign access endpoint
|
||||||
|
sign_access_endpoint = os.environ.get("SIGN_ACCESS_ENDPOINT")
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
def init_storages_api(archive_ops, crawl_manager, user_dep):
|
def init_storages_api(archive_ops, crawl_manager, user_dep):
|
||||||
""" API for updating storage for an archive """
|
""" API for updating storage for an archive """
|
||||||
@ -103,11 +108,20 @@ async def get_presigned_url(archive, crawlfile, crawl_manager, duration=3600):
|
|||||||
else:
|
else:
|
||||||
raise Exception("No Default Storage Found, Invalid Storage Type")
|
raise Exception("No Default Storage Found, Invalid Storage Type")
|
||||||
|
|
||||||
async with get_s3_client(s3storage, True) as (client, bucket, key):
|
async with get_s3_client(s3storage, sign_access_endpoint) as (client, bucket, key):
|
||||||
key += crawlfile.filename
|
key += crawlfile.filename
|
||||||
|
|
||||||
presigned_url = await client.generate_presigned_url(
|
presigned_url = await client.generate_presigned_url(
|
||||||
"get_object", Params={"Bucket": bucket, "Key": key}, ExpiresIn=duration
|
"get_object", Params={"Bucket": bucket, "Key": key}, ExpiresIn=duration
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
not sign_access_endpoint
|
||||||
|
and s3storage.access_endpoint_url
|
||||||
|
and s3storage.access_endpoint_url != s3storage.endpoint_url
|
||||||
|
):
|
||||||
|
presigned_url = presigned_url.replace(
|
||||||
|
s3storage.endpoint_url, s3storage.access_endpoint_url
|
||||||
|
)
|
||||||
|
|
||||||
return presigned_url
|
return presigned_url
|
||||||
|
Loading…
Reference in New Issue
Block a user