Skip to content

Commit

Permalink
Merge pull request #366 from isb-cgc/isb-cgc-prod-sp
Browse files Browse the repository at this point in the history
Sprint 19 - Release
  • Loading branch information
s-paquette authored Sep 21, 2017
2 parents a8196e8 + eca3fbf commit 6bc2ade
Show file tree
Hide file tree
Showing 5 changed files with 77 additions and 26 deletions.
19 changes: 19 additions & 0 deletions accounts/migrations/0014_auto_20170828_1753.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2017-08-29 00:53
from __future__ import unicode_literals

from django.db import migrations


class Migration(migrations.Migration):

dependencies = [
('accounts', '0013_authorizeddataset_duca_id'),
]

operations = [
migrations.AlterUniqueTogether(
name='userauthorizeddatasets',
unique_together=set([('nih_user', 'authorized_dataset')]),
),
]
9 changes: 9 additions & 0 deletions accounts/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,15 @@ class UserAuthorizedDatasets(models.Model):
nih_user = models.ForeignKey(NIH_User, null=False)
authorized_dataset = models.ForeignKey(AuthorizedDataset, null=False)

class Meta:
unique_together = (("nih_user", "authorized_dataset"),)

def __str__(self):
return "UserAuthorizedDataset({}, {})".format(self.nih_user.NIH_username,self.authorized_dataset.whitelist_id)

def __repr__(self):
return self.__str__()


class ServiceAccount(models.Model):
google_project = models.ForeignKey(GoogleProject, null=False)
Expand Down
40 changes: 26 additions & 14 deletions accounts/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,11 +56,8 @@ def extended_logout_view(request):
nih_user = NIH_User.objects.get(user=user, linked=True)
nih_user.active = False
nih_user.save()
logger.info("[STATUS] NIH user {} has been de-activated.".format(nih_user.NIH_username))

user_auth_datasets = UserAuthorizedDatasets.objects.filter(nih_user=nih_user)
for dataset in user_auth_datasets:
dataset.delete()
logger.info("Authorized datasets removed for NIH user {}".format(nih_user.NIH_username))
except (ObjectDoesNotExist, MultipleObjectsReturned) as e:
if type(e) is MultipleObjectsReturned:
logger.error("[WARNING] More than one linked NIH User with user id %d - deactivating all of them!" % (str(e), request.user.id))
Expand All @@ -72,7 +69,7 @@ def extended_logout_view(request):
for dataset in user_auth_datasets:
dataset.delete()
else:
logger.info("[STATUS] No NIH user was found for user {} - no datasets revoked.".format(user.email))
logger.info("[STATUS] No NIH user was found for user {} - no one set to inactive.".format(user.email))

directory_service, http_auth = get_directory_resource()
user_email = user.email
Expand Down Expand Up @@ -157,6 +154,7 @@ def unlink_accounts_and_get_acl_tasks(user_id):
nih_account_to_unlink = NIH_User.objects.get(user_id=user_id, linked=True)
nih_account_to_unlink.linked = False
nih_account_to_unlink.save()

unlinked_nih_user_list.append((user_id, nih_account_to_unlink.NIH_username))

except MultipleObjectsReturned as e:
Expand Down Expand Up @@ -184,7 +182,6 @@ def unlink_accounts_and_get_acl_tasks(user_id):

@login_required
def unlink_accounts(request):
logger.info("[STATUS] In unlink accounts")
user_id = request.user.id

try:
Expand Down Expand Up @@ -485,26 +482,25 @@ def verify_service_account(gcp_id, service_account, datasets, user_email, is_ref
registered_user = bool(User.objects.filter(email=email).first())
roles[role].append({'email': email,
'registered_user': registered_user})

elif member.startswith('serviceAccount'):
if member.find(':'+service_account) > 0:
if member.split(':')[1] == service_account:
verified_sa = True

# 2. Verify that the current user is a member of the GCP project
if not is_email_in_iam_roles(roles, user_email):
logging.info('{0}: User email {1} is not the IAM policy of project {2}.'.format(service_account, user_email, gcp_id))
logger.info('[STATUS] While verifying SA {0}: User email {1} is not the IAM policy of project {2}.'.format(service_account, user_email, gcp_id))
st_logger.write_struct_log_entry(log_name, {
'message': '{0}: User email {1} is not the IAM policy of project {2}.'.format(service_account, user_email, gcp_id)
'message': 'While verifying SA {0}: User email {1} is not the IAM policy of project {2}.'.format(service_account, user_email, gcp_id)
})
return {'message': 'You must be a member of a project in order to register it'}

# 3. VERIFY SERVICE ACCOUNT IS IN THIS PROJECT
if not verified_sa:
logging.info('Provided service account does not exist in project.')
logger.info('[STATUS] While verifying SA {0}: Provided service account does not exist in project {1}.'.format(service_account, gcp_id))

st_logger.write_struct_log_entry(log_name, {'message': '{0}: Provided service account does not exist in project {1}.'.format(service_account, gcp_id)})
st_logger.write_struct_log_entry(log_name, {'message': 'While verifying SA {0}: Provided service account does not exist in project {1}.'.format(service_account, gcp_id)})
# return error that the service account doesn't exist in this project
return {'message': 'The provided service account does not exist in the selected project'}
return {'message': "Service Account ID '{}' does not exist in Google Cloud Project {}. Please double-check the service account you have entered.".format(service_account,gcp_id)}


# 4. VERIFY ALL USERS ARE REGISTERED AND HAVE ACCESS TO APPROPRIATE DATASETS
Expand All @@ -517,10 +513,20 @@ def verify_service_account(gcp_id, service_account, datasets, user_email, is_ref

# IF USER IS REGISTERED
if member['registered_user']:
# TODO: This should probably be a .get() with a try/except because multiple-users-same-email is a problem
user = User.objects.filter(email=member['email']).first()

nih_user = None

# FIND NIH_USER FOR USER
nih_user = NIH_User.objects.filter(user_id=user.id).first()
try:
nih_user = NIH_User.objects.get(user_id=user.id, linked=True)
except ObjectDoesNotExist:
nih_user = None
except MultipleObjectsReturned:
st_logger.write_struct_log_entry(log_name, {'message': 'Found more than one linked NIH_User for email address {}: {}'.format(member['email'], ",".join(nih_user.values_list('NIH_username',flat=True)))})
raise Exception('Found more than one linked NIH_User for email address {}: {}'.format(member['email'], ",".join(nih_user.values_list('NIH_username',flat=True))))

member['nih_registered'] = bool(nih_user)

# IF USER HAS LINKED ERA COMMONS ID
Expand Down Expand Up @@ -570,7 +576,13 @@ def verify_service_account(gcp_id, service_account, datasets, user_email, is_ref
# 4. VERIFY PI IS ON THE PROJECT

except HttpError as e:
logger.error("[STATUS] While verifying a service account {}: ".format(service_account))
logger.exception(e)
return {'message': 'There was an error accessing your project. Please verify that you have set the permissions correctly.'}
except Exception as e:
logger.error("[STATUS] While verifying a service account {}: ".format(service_account))
logger.exception(e)
return {'message': "There was an error while verifying this service account. Please contact the administrator."}

return_obj = {'roles': roles,
'all_user_datasets_verified': all_user_datasets_verified}
Expand Down
17 changes: 14 additions & 3 deletions cohorts/metadata_counting.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
from time import sleep

import django
import re
from metadata_helpers import *
from projects.models import Program, Project, User_Data_Tables, Public_Metadata_Tables
from google_helpers.bigquery_service import authorize_credentials_with_Google
Expand Down Expand Up @@ -64,7 +65,9 @@ def count_user_metadata(user, inc_filters=None, cohort_id=None):
project_counts = {}

for program in Program.get_user_programs(user):
user_data_counts['program']['values'].append({'id': program.id, 'value': program.id, 'displ_name': program.name, 'name': program.name, 'count': 0, 'program': program.id,})
# Supports #2018 for the user data case:
fully_qual = ("PROGRAM-" + str(program.id)).upper()
user_data_counts['program']['values'].append({'id': program.id, 'value': program.id, 'full_id': fully_qual, 'displ_name': program.name, 'name': program.name, 'count': 0, 'program': program.id,})
project_counts[program.id] = 0

for project in Project.get_user_projects(user):
Expand All @@ -87,8 +90,11 @@ def count_user_metadata(user, inc_filters=None, cohort_id=None):
project_ms_table = None

if project_ms_table is not None:
# Supports #2018 for the user data case:
fully_qual = ("PROJECT-" + str(project.id)).upper()
user_data_counts['project']['values'].append({'id': project.id,
'value': project.id,
'full_id': fully_qual,
'name': project.name,
'count': 0,
'metadata_samples': project_ms_table,
Expand Down Expand Up @@ -680,8 +686,13 @@ def count_public_metadata(user, cohort_id=None, inc_filters=None, program_id=Non
feature['values'] = normalize_by_200(counts[attr]['counts'])

for value, count in feature['values'].items():

val_obj = {'value': str(value), 'count': count, }
# Supports #2018. This value object is the only information that gets used to
# stock cohort checkboxes in the template. To support clicking on a treemap to
# trigger the checkbox, we need have an id that glues the attribute name to the
# value in a standard manner, and we really don't want to have to construct this
# with a unwieldy template statement. So we do it here:
fully_qual = (re.sub('\s+', '_', (attr + "-" + str(value)))).upper()
val_obj = {'value': str(value), 'count': count, 'full_id': fully_qual}

if value in metadata_attr_values[attr]['values'] and metadata_attr_values[attr]['values'][value] is not None \
and len(metadata_attr_values[attr]['values'][value]) > 0:
Expand Down
18 changes: 9 additions & 9 deletions cohorts/metadata_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,8 +212,7 @@ def get_sql_connection():
return db

except Exception as e:
logger.error("[ERROR] Exception in get_sql_connection(): ")
logger.error(traceback.format_exc())
logger.error("[ERROR] Exception in get_sql_connection(): "+e.message)
logger.exception(e)
if db and db.open: db.close()

Expand Down Expand Up @@ -261,9 +260,8 @@ def fetch_program_data_types(program, for_display=False):
return copy.deepcopy(METADATA_DATA_TYPES[program])

except Exception as e:
print >> sys.stdout, traceback.format_exc()
logger.error('[ERROR] Exception while trying to get data types for program #%s:' % str(program))
logger.error(traceback.format_exc())
logger.exception(e)
finally:
if cursor: cursor.close()
if db and db.open: db.close()
Expand Down Expand Up @@ -354,7 +352,7 @@ def get_public_programs():

except Exception as e:
logger.error('[ERROR] Excpetion while fetching public program list:')
logger.error(traceback.format_exc())
logger.exception(e)


# Given a public program's shorthand name, retrive its database ID for use in various queries
Expand All @@ -370,7 +368,7 @@ def get_public_program_id(program):

except Exception as e:
logger.error('[ERROR] Excpetion while fetching %s program ID:' % program)
logger.error(traceback.format_exc())
logger.exception(e)


# Get the list of possible metadata values and their display strings for non-continuous data based on their in-use
Expand All @@ -385,6 +383,10 @@ def fetch_metadata_value_set(program=None):
if not program:
program = get_public_program_id('TCGA')

# This is only valid for public programs
if not Program.objects.get(id=program).is_public:
return {}

if program not in METADATA_ATTR or len(METADATA_ATTR[program]) <= 0:
fetch_program_attr(program)

Expand Down Expand Up @@ -437,8 +439,6 @@ def fetch_metadata_value_set(program=None):

except Exception as e:
logger.error('[ERROR] Exception when fetching the metadata value set:')
print >> sys.stdout, traceback.format_exc()
logger.error(traceback.format_exc())
logger.exception(e)
finally:
if cursor: cursor.close()
Expand Down Expand Up @@ -499,7 +499,7 @@ def get_preformatted_values(program=None):
return copy.deepcopy(PREFORMATTED_VALUES[program])

except Exception as e:
print >> sys.stdout, traceback.format_exc()
logger.error("[ERROR] When getting preformatted values:")
logger.exception(e)
finally:
if cursor: cursor.close()
Expand Down

0 comments on commit 6bc2ade

Please sign in to comment.