Removing "merge_similar_records", which has run, and is no longer needed.

This commit is contained in:
Dan 2021-02-15 11:35:49 -05:00
parent f4dd5d8e71
commit 9de6602667
4 changed files with 3 additions and 112 deletions

View File

@ -7,18 +7,6 @@ info:
servers:
- url: http://localhost:5000/v1.0
paths:
/status:
get:
operationId: communicator.api.admin.status
summary: provides a basic status endpoint, just get things off the ground.
security: [] # Disable security for this endpoint only.
responses:
'200':
description: Status indicator that the app is up and alive.
content:
application/json:
schema:
$ref: "#/components/schemas/Status"
/update_data:
get:
operationId: communicator.api.admin.update_data
@ -31,18 +19,7 @@ paths:
text/plain:
schema:
type: string
/merge_similar_records:
get:
operationId: communicator.api.admin.merge_similar_records
summary: Checks the local file system and firecloud for data and loads it into the db.
security: [] # Disable security for this endpoint only.
responses:
'200':
description: Status indicator that the app is up and alive.
content:
text/plain:
schema:
type: string
/split_location_column:
get:
operationId: communicator.api.admin.split_location_column

View File

@ -49,10 +49,6 @@ def verify_token(token, required_scopes):
raise Exception("permission_denied", "API Token information is not correct")
def status():
return {"status": "good"}
def add_sample(body):
sample = Sample(barcode=body['barcode'],
student_id=body['student_id'],
@ -135,12 +131,12 @@ def update_and_notify():
executor.submit(_update_data)
executor.submit(_notify_by_email)
executor.submit(_notify_by_text)
return "Task scheduled and running the background"
return "Task scheduled and running in the background"
def update_data():
executor.submit(_update_data)
return "Task scheduled and running the background"
return "Task scheduled and running in the background"
def _update_data():
@ -167,11 +163,6 @@ def split_location_column():
def correct_computing_id():
sample_service = SampleService()
sample_service.correct_computing_id()
def merge_similar_records():
sample_service = SampleService()
sample_service.merge_similar_records()
def notify_by_email(file_name=None, retry=False):
executor.submit(_notify_by_email, file_name, retry)

View File

@ -56,30 +56,3 @@ class SampleService(object):
sample.computing_id = match.group(1).strip().lower()
db.session.commit()
def merge_similar_records(self):
""" We have samples that are duplicates of each other because of the way the data was coming in
earlier on. This is a onetime fix that will compare all records based on the studient id, location
and date, and merge them together using the new and correct bar code."""
# Get all samples that do not contain an email (these were added via the api call)
samples = db.session.query(Sample).filter(Sample.email == None).all()
for sample in samples:
sample2 = db.session.query(Sample).\
filter(Sample.email != None).\
filter(Sample.student_id == sample.student_id).\
filter(Sample.date == sample.date).\
filter(Sample.location == sample.location).\
first()
if sample2:
sample.merge(sample2)
# Move notifications over as well.
notifications = sample2.notifications
sample.notifications = notifications
sample2.notifications = []
db.session.add(sample)
db.session.delete(sample2)
db.session.commit()

View File

@ -70,56 +70,6 @@ class IvyServiceTest(BaseTest):
self.assertEqual(7, len(db.session.query(Sample).all()))
def test_merge_similar_records(self):
service = SampleService()
# 511908685 - 202010051136 - 0202
s1 = Sample(barcode="111111111-AAA-202010050000-0000",
student_id=111111111,
date = parser.parse("202010050000"),
last_modified = parser.parse("202010050000"),
location=0)
s2 = Sample(barcode="111111111-202010050000-0000",
student_id=111111111,
date = parser.parse("202010050000"),
last_modified = parser.parse("202010050000"),
location=0,
email="dan@sartography.com",
phone="555-555-5555")
s2n = Notification(date=parser.parse("202010050000"), type="email", successful=True)
s2.notifications = [s2n]
db.session.add(s1)
db.session.add(s2)
db.session.commit()
delta = datetime.now() - s1.last_modified
self.assertGreater(delta.days, 1) # Last modified is in the past.
self.assertEqual(2, len(db.session.query(Sample).all()))
service.merge_similar_records()
self.assertEqual(1, len(db.session.query(Sample).all()))
sample = db.session.query(Sample).first()
self.assertEqual("dan@sartography.com", sample.email)
self.assertEqual("111111111-AAA-202010050000-0000", sample.barcode)
self.assertEqual(1, len(sample.notifications))
delta = datetime.now() - sample.last_modified
self.assertEqual(0, delta.days) # Last modified is updated on merge.
def test_merge_non_similar_records(self):
service = SampleService()
db.session.add(Sample(barcode="222222222-AAA-202010050000-0000",
student_id=222222222,
date = parser.parse("202010050000"),
location=0))
db.session.add(Sample(barcode="111111111-202010050000-0000",
student_id=111111111,
date = parser.parse("202010050000"),
location=0,
email="dan@sartography.com",
phone="555-555-5555"))
service.merge_similar_records()
self.assertEqual(2, len(db.session.query(Sample).all()))
def test_correct_computing_id(self):
service = SampleService()
db.session.add(Sample(barcode="222222222-AAA-202010050000-0000",