Skip to content

Commit

Permalink
remove storage endpoints
Browse files Browse the repository at this point in the history
  • Loading branch information
paulineribeyre committed Dec 16, 2024
1 parent ec11731 commit 4a716bf
Show file tree
Hide file tree
Showing 5 changed files with 24 additions and 463 deletions.
51 changes: 0 additions & 51 deletions docs/openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -423,57 +423,6 @@ paths:
summary: S3 Endpoint
tags:
- S3
/storage/credentials:
get:
operationId: get_user_keys
responses:
'200':
content:
application/json:
schema: {}
description: Successful Response
security:
- HTTPBearer: []
summary: Get User Keys
tags:
- Storage
post:
operationId: generate_user_key
responses:
'201':
content:
application/json:
schema: {}
description: Successful Response
security:
- HTTPBearer: []
summary: Generate User Key
tags:
- Storage
/storage/credentials/{key_id}:
delete:
operationId: delete_user_key
parameters:
- in: path
name: key_id
required: true
schema:
title: Key Id
type: string
responses:
'204':
description: Successful Response
'422':
content:
application/json:
schema:
$ref: '#/components/schemas/HTTPValidationError'
description: Validation Error
security:
- HTTPBearer: []
summary: Delete User Key
tags:
- Storage
/storage/info:
get:
operationId: get_storage_info
Expand Down
128 changes: 1 addition & 127 deletions gen3workflow/aws_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,134 +46,8 @@ def create_user_bucket(user_id):
Returns:
tuple: (bucket name, prefix where the user stores objects in the bucket, bucket region)
"""
# TODO lifetime policy and encryption
user_bucket_name = get_safe_name_from_user_id(user_id)
s3_client = boto3.client("s3")
s3_client.create_bucket(Bucket=user_bucket_name)
return user_bucket_name, "ga4gh-tes", config["USER_BUCKETS_REGION"]


def create_or_update_policy(policy_name, policy_document, path_prefix, tags):
# attempt to create the policy
try:
response = iam_client.create_policy(
PolicyName=policy_name,
Path=path_prefix,
PolicyDocument=json.dumps(policy_document),
Tags=tags,
)
assert "Arn" in response.get("Policy", {}), f"{iam_resp_err}: {response}"
return response["Policy"]["Arn"]
except ClientError as e:
if e.response["Error"]["Code"] != "EntityAlreadyExists":
raise

# policy already exists: update it.
# find the right policy
response = iam_client.list_policies(PathPrefix=path_prefix)
assert "Policies" in response, f"{iam_resp_err}: {response}"
for policy in response["Policies"]:
assert "PolicyName" in policy, f"{iam_resp_err}: {policy}"
assert "Arn" in policy, f"{iam_resp_err}: {policy}"
if policy["PolicyName"] == policy_name:
break

# there can only be up to 5 versions, so delete old versions of this policy
response = iam_client.list_policy_versions(PolicyArn=policy["Arn"])
assert "Versions" in response, f"{iam_resp_err}: {response}"
for version in response["Versions"]:
assert "VersionId" in version, f"{iam_resp_err}: {version}"
assert "IsDefaultVersion" in version, f"{iam_resp_err}: {version}"
if version["IsDefaultVersion"]:
continue # do not delete the latest versions
iam_client.delete_policy_version(
PolicyArn=policy["Arn"], VersionId=version["VersionId"]
)

# update the policy by creating a new version
iam_client.create_policy_version(
PolicyArn=policy["Arn"],
PolicyDocument=json.dumps(policy_document),
SetAsDefault=True,
)
return policy["Arn"]


def create_iam_user_and_key(user_id):
iam_user_name = get_safe_name_from_user_id(user_id)
escaped_hostname = config["HOSTNAME"].replace(".", "-")
iam_tags = [
{
"Key": "name",
"Value": f"gen3wf-{escaped_hostname}",
},
]

try:
iam_client.create_user(UserName=iam_user_name, Tags=iam_tags)
except ClientError as e:
# if the user already exists, ignore the error and proceed
if e.response["Error"]["Code"] != "EntityAlreadyExists":
raise

# grant the IAM user access to the user's s3 bucket
bucket_name, bucket_prefix, _ = create_user_bucket(user_id)
policy_document = {
"Version": "2012-10-17",
"Statement": [
{
"Sid": "AllowListingBucketFolder",
"Effect": "Allow",
"Action": ["s3:ListBucket"],
"Resource": [f"arn:aws:s3:::{bucket_name}"],
"Condition": {"StringLike": {"s3:prefix": [f"{bucket_prefix}/*"]}},
},
{
"Sid": "AllowManagingBucketFolder",
"Effect": "Allow",
"Action": ["s3:GetObject", "s3:PutObject", "s3:DeleteObject"],
"Resource": [f"arn:aws:s3:::{bucket_name}/{bucket_prefix}/*"],
},
],
}
path_prefix = f"/{iam_user_name}/" # used later to get existing policies' ARN
policy_arn = create_or_update_policy(
f"{iam_user_name}-policy", policy_document, path_prefix, iam_tags
)
iam_client.attach_user_policy(PolicyArn=policy_arn, UserName=iam_user_name)

# create a key for this user
key = iam_client.create_access_key(UserName=iam_user_name)
assert "AccessKeyId" in key.get("AccessKey", {}), f"{iam_resp_err}: {key}"
assert "SecretAccessKey" in key.get("AccessKey", {}), f"{iam_resp_err}: {key}"

return key["AccessKey"]["AccessKeyId"], key["AccessKey"]["SecretAccessKey"]


def list_iam_user_keys(user_id):
iam_user_name = get_safe_name_from_user_id(user_id)
try:
response = iam_client.list_access_keys(UserName=iam_user_name)
except ClientError as e:
if e.response["Error"]["Code"] == "NoSuchEntity":
return [] # user does not exist in IAM, so they have no IAM keys
else:
raise
assert "AccessKeyMetadata" in response, f"{iam_resp_err}: {response}"
for key in response["AccessKeyMetadata"]:
assert "AccessKeyId" in key, f"{iam_resp_err}: {key}"
assert "CreateDate" in key, f"{iam_resp_err}: {key}"
assert "Status" in key, f"{iam_resp_err}: {key}"
return response["AccessKeyMetadata"]


def delete_iam_user_key(user_id, key_id):
try:
iam_client.delete_access_key(
UserName=get_safe_name_from_user_id(user_id),
AccessKeyId=key_id,
)
except ClientError as e:
if e.response["Error"]["Code"] == "NoSuchEntity":
err_msg = f"No such key: '{key_id}'"
logger.error(err_msg)
raise HTTPException(HTTP_404_NOT_FOUND, err_msg)
80 changes: 3 additions & 77 deletions gen3workflow/routes/storage.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,8 @@
from datetime import datetime, timedelta, timezone
from fastapi import APIRouter, Depends, Request
from starlette.status import HTTP_200_OK

from fastapi import APIRouter, Depends, HTTPException, Request
from starlette.status import (
HTTP_200_OK,
HTTP_201_CREATED,
HTTP_204_NO_CONTENT,
HTTP_400_BAD_REQUEST,
)

from gen3workflow import aws_utils, logger
from gen3workflow import aws_utils
from gen3workflow.auth import Auth
from gen3workflow.config import config


router = APIRouter(prefix="/storage")
Expand All @@ -26,69 +18,3 @@ async def get_storage_info(request: Request, auth=Depends(Auth)):
"workdir": f"s3://{bucket_name}/{bucket_prefix}",
"region": bucket_region,
}


@router.post("/credentials", status_code=HTTP_201_CREATED)
async def generate_user_key(request: Request, auth=Depends(Auth)):
token_claims = await auth.get_token_claims()
user_id = token_claims.get("sub")

existing_keys = aws_utils.list_iam_user_keys(user_id)
if len(existing_keys) >= config["MAX_IAM_KEYS_PER_USER"]:
err_msg = f"Too many existing keys: only {config['MAX_IAM_KEYS_PER_USER']} are allowed per user. Delete an existing key before creating a new one"
logger.error(err_msg)
raise HTTPException(HTTP_400_BAD_REQUEST, err_msg)

key_id, key_secret = aws_utils.create_iam_user_and_key(user_id)
return {
"aws_key_id": key_id,
"aws_key_secret": key_secret,
}


def seconds_to_human_time(seconds):
if seconds < 0:
return None
m, s = divmod(seconds, 60)
h, m = divmod(m, 60)
d, h = divmod(h, 24)
if d:
return "{} days".format(int(d))
if h:
return "{} hours".format(int(h))
if m:
return "{} minutes".format(int(h))
return "{} seconds".format(int(s))


@router.get("/credentials", status_code=HTTP_200_OK)
async def get_user_keys(request: Request, auth=Depends(Auth)):
token_claims = await auth.get_token_claims()
user_id = token_claims.get("sub")
now = datetime.now(timezone.utc)

def get_key_expiration(key_status, key_creation_date):
if (
key_status == "Inactive"
or (now - key_creation_date).days > config["IAM_KEYS_LIFETIME_DAYS"]
):
return "expired"
expires_in = (
key_creation_date + timedelta(days=config["IAM_KEYS_LIFETIME_DAYS"]) - now
)
return f"expires in {seconds_to_human_time(expires_in.total_seconds())}"

return [
{
"aws_key_id": key["AccessKeyId"],
"status": get_key_expiration(key["Status"], key["CreateDate"]),
}
for key in aws_utils.list_iam_user_keys(user_id)
]


@router.delete("/credentials/{key_id}", status_code=HTTP_204_NO_CONTENT)
async def delete_user_key(request: Request, key_id: str, auth=Depends(Auth)):
token_claims = await auth.get_token_claims()
user_id = token_claims.get("sub")
aws_utils.delete_iam_user_key(user_id, key_id)
20 changes: 20 additions & 0 deletions tests/test_misc.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
import boto3
from moto import mock_aws
import pytest

from conftest import TEST_USER_ID
from gen3workflow import aws_utils
from gen3workflow.aws_utils import get_safe_name_from_user_id
from gen3workflow.config import config

Expand Down Expand Up @@ -29,3 +33,19 @@ def test_get_safe_name_from_user_id(reset_config_hostname):
safe_name = get_safe_name_from_user_id(user_id)
assert len(safe_name) == 63
assert safe_name == f"gen3wf-{escaped_shortened_hostname}-{user_id}"


@pytest.mark.asyncio
async def test_storage_info(client, access_token_patcher):
with mock_aws():
aws_utils.iam_client = boto3.client("iam")
expected_bucket_name = f"gen3wf-{config['HOSTNAME']}-{TEST_USER_ID}"

res = await client.get("/storage/info", headers={"Authorization": "bearer 123"})
assert res.status_code == 200, res.text
storage_info = res.json()
assert storage_info == {
"bucket": expected_bucket_name,
"workdir": f"s3://{expected_bucket_name}/ga4gh-tes",
"region": config["USER_BUCKETS_REGION"],
}
Loading

0 comments on commit 4a716bf

Please sign in to comment.