Update database to include timezone and change all points where we set the time on an event to be utc time. If we get something in the database with a timezone, it will display properly on the front end, but by default everything will be put in the database in UTC

This commit is contained in:
Kelly McDonald 2021-04-29 10:25:28 -04:00
parent c029dad688
commit 2b9cee6b89
12 changed files with 20 additions and 20 deletions

View File

@ -55,7 +55,7 @@ def update_datastore(id, body):
raise ApiError('unknown_item', 'The item "' + id + '" is not recognized.')
DataStoreSchema().load(body, instance=item, session=session)
item.last_updated = datetime.now()
item.last_updated = datetime.utcnow()
session.add(item)
session.commit()
return DataStoreSchema().dump(item)
@ -87,7 +87,7 @@ def add_datastore(body):
'but not more than one of these')
item = DataStoreSchema().load(body)
item.last_updated = datetime.now()
item.last_updated = datetime.utcnow()
session.add(item)
session.commit()
return DataStoreSchema().dump(item)

View File

@ -23,7 +23,7 @@ def add_study(body):
study_model = StudyModel(user_uid=UserService.current_user().uid,
title=body['title'],
primary_investigator_id=body['primary_investigator_id'],
last_updated=datetime.now(),
last_updated=datetime.utcnow(),
status=StudyStatus.in_progress)
session.add(study_model)
StudyService.add_study_update_event(study_model,
@ -51,7 +51,7 @@ def update_study(study_id, body):
study: Study = StudyForUpdateSchema().load(body)
status = StudyStatus(study.status)
study_model.last_updated = datetime.now()
study_model.last_updated = datetime.utcnow()
if study_model.status != status:
study_model.status = status

View File

@ -76,7 +76,7 @@ class DataStoreBase(object):
workflow_id=workflow_id,
spec_id=workflow_spec_id)
study.value = args[1]
study.last_updated = datetime.now()
study.last_updated = datetime.utcnow()
overwritten = self.overwritten(study.value, prev_value)
session.add(study)
session.commit()

View File

@ -210,7 +210,7 @@ class FileService(object):
new_file_data_model = FileDataModel(
data=binary_data, file_model_id=file_model.id, file_model=file_model,
version=version, md5_hash=md5_checksum, date_created=datetime.now()
version=version, md5_hash=md5_checksum, date_created=datetime.utcnow()
)
session.add_all([file_model, new_file_data_model])
session.commit()

View File

@ -496,7 +496,7 @@ class StudyService(object):
workflow_model = WorkflowModel(status=WorkflowStatus.not_started,
study=study,
workflow_spec_id=spec.id,
last_updated=datetime.now())
last_updated=datetime.utcnow())
session.add(workflow_model)
session.commit()
return workflow_model

View File

@ -219,7 +219,7 @@ class WorkflowProcessor(object):
self.workflow_model.status = self.get_status()
self.workflow_model.total_tasks = len(tasks)
self.workflow_model.completed_tasks = sum(1 for t in tasks if t.state in complete_states)
self.workflow_model.last_updated = datetime.now()
self.workflow_model.last_updated = datetime.utcnow()
self.update_dependencies(self.spec_data_files)
session.add(self.workflow_model)
session.commit()

View File

@ -63,7 +63,7 @@ class WorkflowService(object):
db.session.commit()
workflow_model = WorkflowModel(status=WorkflowStatus.not_started,
workflow_spec_id=spec_id,
last_updated=datetime.now(),
last_updated=datetime.utcnow(),
study=study)
return workflow_model
@ -714,7 +714,7 @@ class WorkflowService(object):
mi_count=task.multi_instance_count, # This is the number of times the task could repeat.
mi_index=task.multi_instance_index, # And the index of the currently repeating task.
process_name=task.process_name,
date=datetime.now(),
date=datetime.utcnow(),
)
db.session.add(task_event)
db.session.commit()

View File

@ -70,7 +70,7 @@ class BaseTest(unittest.TestCase):
{
'id': 0,
'title': 'The impact of fried pickles on beer consumption in bipedal software developers.',
'last_updated': datetime.datetime.now(),
'last_updated': datetime.datetime.utcnow(),
'status': StudyStatus.in_progress,
'primary_investigator_id': 'dhf8r',
'sponsor': 'Sartography Pharmaceuticals',
@ -80,7 +80,7 @@ class BaseTest(unittest.TestCase):
{
'id': 1,
'title': 'Requirement of hippocampal neurogenesis for the behavioral effects of soft pretzels',
'last_updated': datetime.datetime.now(),
'last_updated': datetime.datetime.utcnow(),
'status': StudyStatus.in_progress,
'primary_investigator_id': 'dhf8r',
'sponsor': 'Makerspace & Co.',

View File

@ -25,7 +25,7 @@ class TestStudyApi(BaseTest):
TEST_STUDY = {
"title": "Phase III Trial of Genuine People Personalities (GPP) Autonomous Intelligent Emotional Agents "
"for Interstellar Spacecraft",
"last_updated": datetime.now(tz=timezone.utc),
"last_updated": datetime.utcnow(),
"primary_investigator_id": "tmm2x",
"user_uid": "dhf8r",
}

View File

@ -47,7 +47,7 @@ class TestStudyService(BaseTest):
self.assertIsNotNone(study.id)
workflow = WorkflowModel(workflow_spec_id="random_fact", study_id=study.id,
status=WorkflowStatus.not_started, last_updated=datetime.now())
status=WorkflowStatus.not_started, last_updated=datetime.utcnow())
db.session.add(workflow)
db.session.commit()
# Assure there is a master specification, one standard spec, and lookup tables.

View File

@ -273,7 +273,7 @@ class TestAuthentication(BaseTest):
def _make_fake_study(self, uid):
return {
"title": "blah",
"last_updated": datetime.now(tz=timezone.utc),
"last_updated": datetime.utcnow(),
"status": StudyStatus.in_progress,
"primary_investigator_id": uid,
"user_uid": uid,

View File

@ -52,7 +52,7 @@ class TestWorkflowSync(BaseTest):
self.load_example_data()
othersys = get_all_spec_state()
rf2pos = get_random_fact_pos(othersys)
othersys[rf2pos]['date_created'] = str(datetime.now())
othersys[rf2pos]['date_created'] = str(datetime.utcnow())
othersys[rf2pos]['md5_hash'] = '12345'
mock_get.return_value = othersys
response = get_changed_workflows('localhost:0000') #endpoint is not used due to mock
@ -69,7 +69,7 @@ class TestWorkflowSync(BaseTest):
self.load_example_data()
othersys = get_all_spec_state()
othersys.append({'workflow_spec_id':'my_new_workflow',
'date_created':str(datetime.now()),
'date_created':str(datetime.utcnow()),
'md5_hash': '12345'})
mock_get.return_value = othersys
response = get_changed_workflows('localhost:0000') #endpoint is not used due to mock
@ -121,7 +121,7 @@ class TestWorkflowSync(BaseTest):
self.load_example_data()
othersys = get_workflow_spec_files('random_fact')
rf2pos = get_random_fact_2_pos(othersys)
othersys[rf2pos]['date_created'] = str(datetime.now())
othersys[rf2pos]['date_created'] = str(datetime.utcnow())
othersys[rf2pos]['md5_hash'] = '12345'
mock_get.return_value = othersys
response = get_changed_files('localhost:0000','random_fact',as_df=False) #endpoint is not used due to mock
@ -145,7 +145,7 @@ class TestWorkflowSync(BaseTest):
# change the remote file date and hash
othersys = get_workflow_spec_files('random_fact')
rf2pos = get_random_fact_2_pos(othersys)
othersys[rf2pos]['date_created'] = str(datetime.now())
othersys[rf2pos]['date_created'] = str(datetime.utcnow())
othersys[rf2pos]['md5_hash'] = '12345'
spec_files_mock.return_value = othersys
# actually go get a different file
@ -179,7 +179,7 @@ class TestWorkflowSync(BaseTest):
'primary':False,
'content_type':'text/text',
'primary_process_id':None,
'date_created':str(datetime.now()),
'date_created':str(datetime.utcnow()),
'md5_hash':'12345'
}
othersys.append(newfile)