Generic info fields input into 'custom'.

This commit is contained in:
lwark
2024-09-24 15:23:51 -05:00
parent 7bc356e205
commit b0e9f9996d
13 changed files with 898 additions and 12 deletions

View File

@@ -932,6 +932,9 @@ class SubmissionType(BaseClass):
new_process.submission_types.append(submission_type)
new_process.kit_types.append(new_kit)
new_process.equipment_roles.append(new_role)
if 'orgs' in import_dict.keys():
logger.info("Found Organizations to be imported.")
Organization.import_from_json(filepath=filepath)
return submission_type

View File

@@ -2,12 +2,15 @@
All client organization related models.
'''
from __future__ import annotations
import json, yaml, logging
from pathlib import Path
from pprint import pformat
from sqlalchemy import Column, String, INTEGER, ForeignKey, Table
from sqlalchemy.orm import relationship, Query
from . import Base, BaseClass
from tools import check_authorization, setup_lookup
from typing import List
import logging
logger = logging.getLogger(f"submissions.{__name__}")
@@ -74,6 +77,42 @@ class Organization(BaseClass):
def save(self):
super().save()
@classmethod
@check_authorization
def import_from_json(cls, filepath: Path|str):
if isinstance(filepath, str):
filepath = Path(filepath)
if not filepath.exists():
logging.critical(f"Given file could not be found.")
return None
with open(filepath, "r") as f:
if filepath.suffix == ".json":
import_dict = json.load(fp=f)
elif filepath.suffix == ".yml":
import_dict = yaml.safe_load(stream=f)
else:
raise Exception(f"Filetype {filepath.suffix} not supported.")
data = import_dict['orgs']
logger.debug(pformat(import_dict))
for org in data:
organ = Organization.query(name=org['name'])
if organ is None:
organ = Organization(name=org['name'])
try:
organ.cost_centre = org['cost_centre']
except KeyError:
organ.cost_centre = "xxx"
for contact in org['contacts']:
cont = Contact.query(name=contact['name'])
if cont is None:
cont = Contact()
for k, v in contact.items():
cont.__setattr__(k, v)
organ.contacts.append(cont)
organ.save()
# logger.debug(pformat(organ.__dict__))
class Contact(BaseClass):
"""
@@ -119,7 +158,7 @@ class Contact(BaseClass):
match name:
case str():
# logger.debug(f"Looking up contact with name: {name}")
query = query.filter(cls.name == name)
query = query.filter(cls.name == name.title())
limit = 1
case _:
pass

View File

@@ -72,6 +72,7 @@ class BasicSubmission(BaseClass):
contact = relationship("Contact", back_populates="submissions") #: client org
contact_id = Column(INTEGER, ForeignKey("_contact.id", ondelete="SET NULL",
name="fk_BS_contact_id")) #: client lab id from _organizations
custom = Column(JSON)
submission_sample_associations = relationship(
"SubmissionSampleAssociation",
@@ -563,7 +564,11 @@ class BasicSubmission(BaseClass):
existing += value
else:
if value is not None:
existing.append(value)
if key == "custom":
existing = value
else:
existing.append(value)
self.__setattr__(key, existing)
flag_modified(self, key)
return
@@ -741,6 +746,28 @@ class BasicSubmission(BaseClass):
dict: Updated sample dictionary
"""
logger.info(f"Calling {cls.__mapper_args__['polymorphic_identity']} info parser.")
# logger.debug(f"Input dict: {input_dict}")
# logger.debug(f"Custom fields: {custom_fields}")
input_dict['custom'] = {}
for k,v in custom_fields.items():
logger.debug(f"Attempting custom parse of {k}: {v}")
match v['type']:
case "exempt":
continue
case "cell":
ws = xl[v['read']['sheet']]
input_dict['custom'][k] = ws.cell(row=v['read']['row'], column=v['read']['column']).value
case "range":
ws = xl[v['sheet']]
input_dict['custom'][k] = []
if v['start_row'] != v['end_row']:
v['end_row'] = v['end_row'] + 1
if v['start_column'] != v['end_column']:
v['end_column'] = v['end_column'] + 1
for ii in range(v['start_row'], v['end_row']):
for jj in range(v['start_column'], v['end_column']+1):
input_dict['custom'][k].append(dict(value=ws.cell(row=ii, column=jj).value, row=ii, column=jj))
return input_dict
@classmethod
@@ -790,6 +817,29 @@ class BasicSubmission(BaseClass):
Workbook: Updated workbook
"""
logger.info(f"Hello from {cls.__mapper_args__['polymorphic_identity']} autofill")
logger.debug(f"Input dict: {info}")
logger.debug(f"Custom fields: {custom_fields}")
for k,v in custom_fields.items():
try:
assert v['type'] in ['exempt', 'range', 'cell']
except (AssertionError, KeyError):
continue
match v['type']:
case "exempt":
continue
case "cell":
v['write'].append(v['read'])
for cell in v['write']:
ws = input_excel[cell['sheet']]
ws.cell(row=cell['row'], column=cell['column'], value=info['custom'][k])
case "range":
ws = input_excel[v['sheet']]
if v['start_row'] != v['end_row']:
v['end_row'] = v['end_row'] + 1
if v['start_column'] != v['end_column']:
v['end_column'] = v['end_column'] + 1
for item in info['custom'][k]:
ws.cell(row=item['row'], column=item['column'], value=item['value'])
return input_excel
@classmethod

View File

@@ -65,20 +65,28 @@ class SheetParser(object):
"""
parser = InfoParser(xl=self.xl, submission_type=self.submission_type, sub_object=self.sub_object)
info = parser.parse_info()
logger.debug(f"Checking old submission type: {self.submission_type.name} against new: {info['submission_type']['value']}")
if self.submission_type.name != info['submission_type']['value']:
if info['submission_type']['value'] not in [None, "None", "", " "]:
self.submission_type = SubmissionType.query(name=info['submission_type']['value'])
logger.debug(f"Updated self.submission_type to {self.submission_type}. Rerunning parse.")
self.parse_info()
return
self.info_map = parser.map
for k, v in info.items():
match k:
# NOTE: exclude samples.
case "sample":
continue
case "submission_type":
self.sub[k] = v
# NOTE: Rescue submission type using scraped values to be used in Sample, Reagents, etc.
if v not in [None, "None", "", " "]:
self.submission_type = SubmissionType.query(name=v)
logger.debug(f"Updated self.submission_type to {self.submission_type}")
# case "submission_type":
# self.sub[k] = v
# # NOTE: Rescue submission type using scraped values to be used in Sample, Reagents, etc.
# if v not in [None, "None", "", " "]:
# self.submission_type = SubmissionType.query(name=v)
# logger.debug(f"Updated self.submission_type to {self.submission_type}")
case _:
self.sub[k] = v
print(f"\n\n {self.sub} \n\n")
def parse_reagents(self, extraction_kit: str | None = None):

View File

@@ -175,6 +175,8 @@ class InfoWriter(object):
"""
final_info = {}
for k, v in self.info:
if k == "custom":
continue
# NOTE: merge all comments to fit in single cell.
if k == "comment" and isinstance(v['value'], list):
json_join = [item['text'] for item in v['value'] if 'text' in item.keys()]

View File

@@ -734,11 +734,19 @@ class PydSubmission(BaseModel, extra='allow'):
# logger.debug(f"Here's our list of duplicate removed samples: {self.samples}")
for key, value in dicto.items():
if isinstance(value, dict):
value = value['value']
try:
value = value['value']
except KeyError:
if key == "custom":
pass
else:
continue
if value is None:
continue
# logger.debug(f"Setting {key} to {value}")
match key:
# case "custom":
# instance.custom = value
case "reagents":
if report.results[0].code == 1:
instance.submission_reagent_associations = []
@@ -782,9 +790,13 @@ class PydSubmission(BaseModel, extra='allow'):
ii = value.items()
except AttributeError:
ii = {}
logger.debug(f"ii is {ii}, value is {value}")
for k, v in ii:
logger.debug(f"k is {k}, v is {v}")
if isinstance(v, datetime):
value[k] = v.strftime("%Y-%m-%d %H:%M:%S")
else:
value[k] = v
instance.set_attribute(key=key, value=value)
case _:
try:

View File

@@ -1,6 +1,7 @@
"""
Constructs main application.
"""
import yaml
from PyQt6.QtWidgets import (
QTabWidget, QWidget, QVBoxLayout,
QHBoxLayout, QScrollArea, QMainWindow,
@@ -10,8 +11,9 @@ from PyQt6.QtGui import QAction
from pathlib import Path
from markdown import markdown
from __init__ import project_path
from tools import check_if_app, Settings, Report, jinja_template_loading
from .functions import select_save_file
from datetime import date
from .pop_ups import HTMLPop
from .misc import LogParser
@@ -74,6 +76,7 @@ class App(QMainWindow):
helpMenu.addAction(self.docsAction)
helpMenu.addAction(self.githubAction)
fileMenu.addAction(self.importAction)
fileMenu.addAction(self.yamlAction)
methodsMenu.addAction(self.searchLog)
methodsMenu.addAction(self.searchSample)
reportMenu.addAction(self.generateReportAction)
@@ -108,6 +111,7 @@ class App(QMainWindow):
self.searchLog = QAction("Search Log", self)
self.searchSample = QAction("Search Sample", self)
self.githubAction = QAction("Github", self)
self.yamlAction = QAction("Export Type Template", self)
def _connectActions(self):
"""
@@ -124,6 +128,7 @@ class App(QMainWindow):
self.searchLog.triggered.connect(self.runSearch)
self.searchSample.triggered.connect(self.runSampleSearch)
self.githubAction.triggered.connect(self.openGithub)
self.yamlAction.triggered.connect(self.export_ST_yaml)
def showAbout(self):
"""
@@ -197,6 +202,17 @@ class App(QMainWindow):
logger.warning(f"Backup function not yet implemented for psql")
current_month_bak = current_month_bak.with_suffix(".psql")
def export_ST_yaml(self):
if check_if_app():
yaml_path = Path(sys._MEIPASS).joinpath("resources", "viral_culture.yml")
else:
yaml_path = project_path.joinpath("src", "submissions", "resources", "viral_culture.yml")
with open(yaml_path, "r") as f:
data = yaml.safe_load(f)
fname = select_save_file(obj=self, default_name="Submission Type Template.yml", extension="yml")
with open(fname, "w") as f:
yaml.safe_dump(data=data, stream=f)
class AddSubForm(QWidget):

View File

@@ -546,7 +546,10 @@ class SubmissionFormWidget(QWidget):
# logger.debug(f"Kits received for {submission_type}: {uses}")
if check_not_nan(value):
# logger.debug(f"The extraction kit in parser was: {value}")
uses.insert(0, uses.pop(uses.index(value)))
try:
uses.insert(0, uses.pop(uses.index(value)))
except ValueError:
logger.warning(f"Couldn't find kit in list, skipping move to top of list.")
obj.ext_kit = value
else:
logger.error(f"Couldn't find {obj.prsr.sub['extraction_kit']}")

View File

@@ -0,0 +1,429 @@
{
"name": "Viral Culture",
"defaults": {
"abbreviation": "VE",
"details_ignore": [
],
"form_ignore": [
"cost_centre"
],
"regex": "(?P<Viral_Culture>RSL(?:-|_)?VE(?:-|_)?20\\d{2}-?\\d{2}-?\\d{2}(?:(_|-)?\\d?([^_0123456789\\sA-QS-Z]|$)?R?\\d?)?)",
"sample_type": "Basic Sample"
},
"info": {
"comment": {
"read": [
{
"column": 2,
"row": 34,
"sheet": "Sample List"
}
],
"write": []
},
"contact": {
"read": [
{
"column": 2,
"row": 4,
"sheet": "Sample List"
}
],
"write": []
},
"contact_phone": {
"read": [],
"write": [
{
"column": 2,
"row": 5,
"sheet": "Sample List"
}
]
},
"cost_centre": {
"read": [
{
"column": 2,
"row": 6,
"sheet": "Sample List"
}
],
"write": []
},
"custom": {},
"extraction_kit": {
"read": [
{
"column": 4,
"row": 5,
"sheet": "Sample List"
}
],
"write": []
},
"rsl_plate_num": {
"read": [
{
"column": 2,
"row": 13,
"sheet": "Sample List"
}
],
"write": []
},
"sample_count": {
"read": [
{
"column": 4,
"row": 4,
"sheet": "Sample List"
}
],
"write": []
},
"signed_by": {
"read": [],
"write": [
{
"column": 2,
"row": 15,
"sheet": "Sample List"
}
]
},
"submission_category": {
"read": [
{
"column": 4,
"row": 6,
"sheet": "Sample List"
}
],
"write": []
},
"submission_type": {
"read": [
{
"column": 4,
"row": 3,
"sheet": "Sample List"
}
],
"write": []
},
"submitted_date": {
"read": [
{
"column": 2,
"row": 3,
"sheet": "Sample List"
}
],
"write": []
},
"submitter_plate_num": {
"read": [
{
"column": 2,
"row": 2,
"sheet": "Sample List"
}
],
"write": []
},
"submitting_lab": {
"read": [
{
"column": 4,
"row": 2,
"sheet": "Sample List"
}
],
"write": []
},
"technician": {
"read": [
{
"column": 2,
"row": 14,
"sheet": "Sample List"
}
],
"write": []
}
},
"samples": {
"lookup_table": {
"end_row": 132,
"merge_on_id": "submitter_id",
"sample_columns": {
"column": 6,
"concentration": 4,
"organism": 3,
"row": 5,
"submitter_id": 2
},
"sheet": "Sample List",
"start_row": 37
},
"plate_map": {
"end_column": 13,
"end_row": 14,
"sheet": "Plate Map",
"start_column": 2,
"start_row": 7
}
},
"kits": [
{
"constant_cost": 0.00,
"mutable_cost_column": 0.00,
"mutable_cost_sample": 0.00,
"kit_type": {
"name": "MagMAX-96 Viral RNA Isolation Kit",
"reagent roles": [
{
"expiry": {
"column": 4,
"row": 19
},
"lot": {
"column": 3,
"row": 19
},
"name": {
"column": 2,
"row": 19
},
"sheet": "Sample List",
"required": 1,
"role": "Wash Solution 1 (MagMAX-96 Viral)",
"extension_of_life": 0
},
{
"expiry": {
"column": 4,
"row": 20
},
"lot": {
"column": 3,
"row": 20
},
"name": {
"column": 2,
"row": 20
},
"sheet": "Sample List",
"required": 1,
"role": "Wash Solution 2 (MagMAX-96 Viral)",
"extension_of_life": 0
},
{
"expiry": {
"column": 4,
"row": 21
},
"lot": {
"column": 3,
"row": 21
},
"name": {
"column": 2,
"row": 21
},
"sheet": "Sample List",
"required": 1,
"role": "Lysis/Binding Solution (MagMAX-96 Viral)",
"extension_of_life": 0
},
{
"expiry": {
"column": 4,
"row": 22
},
"lot": {
"column": 3,
"row": 22
},
"name": {
"column": 2,
"row": 22
},
"sheet": "Sample List",
"required": 1,
"role": "RNA Binding Beads (MagMAX-96 Viral)",
"extension_of_life": 0
},
{
"expiry": {
"column": 4,
"row": 23
},
"lot": {
"column": 3,
"row": 23
},
"name": {
"column": 2,
"row": 23
},
"sheet": "Sample List",
"required": 1,
"role": "Lysis/Binding Enhancer (MagMAX-96 Viral)",
"extension_of_life": 0
},
{
"expiry": {
"column": 4,
"row": 24
},
"lot": {
"column": 3,
"row": 24
},
"name": {
"column": 2,
"row": 24
},
"sheet": "Sample List",
"required": 0,
"role": "Bacterial-Lysis Buffer",
"extension_of_life": 0
},
{
"expiry": {
"column": 4,
"row": 25
},
"lot": {
"column": 3,
"row": 25
},
"name": {
"column": 2,
"row": 25
},
"sheet": "Sample List",
"required": 1,
"role": "Elution Buffer (MagMAX-96 Viral)",
"extension_of_life": 0
},
{
"expiry": {
"column": 4,
"row": 30
},
"lot": {
"column": 3,
"row": 30
},
"name": {
"column": 2,
"row": 30
},
"sheet": "Sample List",
"required": 0,
"role": "Bacterial-Positive Control",
"extension_of_life": 0
},
{
"expiry": {
"column": 4,
"row": 31
},
"lot": {
"column": 3,
"row": 31
},
"name": {
"column": 2,
"row": 31
},
"sheet": "Sample List",
"required": 1,
"role": "Bead Plate",
"extension_of_life": 0
},
{
"expiry": {
"column": 4,
"row": 28
},
"lot": {
"column": 3,
"row": 28
},
"name": {
"column": 2,
"row": 28
},
"sheet": "Sample List",
"required": 1,
"role": "Isopropanol",
"extension_of_life": 0
},
{
"expiry": {
"column": 4,
"row": 29
},
"lot": {
"column": 3,
"row": 29
},
"name": {
"column": 2,
"row": 29
},
"sheet": "Sample List",
"required": 1,
"role": "Ethanol",
"extension_of_life": 0
},
{
"expiry": {
"column": 4,
"row": 32
},
"lot": {
"column": 3,
"row": 32
},
"name": {
"column": 2,
"row": 32
},
"sheet": "Sample List",
"required": 1,
"role": "Carrier RNA",
"extension_of_life": 0
}
],
"equipment roles": [
{
"static": 0,
"role": "Extractor",
"processes": [
"OBT_M4029_KFF_v1.0"
]
},
{
"static": 1,
"role": "Momentum - Extraction",
"processes": [
"Omega_MagBind_Universal_VarCol"
]
},
{
"static": 1,
"role": "Liquid Handler",
"processes": [
"Bacterial_Core_Submission"
]
}
]
}
}
]
}

View File

@@ -0,0 +1,310 @@
name: &NAME Viral Culture
# The defaults section helps us with some parsing and writing functions
defaults:
# The abbreviation gets put in the RSL-{abbreviation}-20XXXXXX-1 plate name
abbreviation: &ABBREV VE
# Details ignore tells us what to leave out of the details view.
# For basic submissions this is typically and empty list or '[]'
# To add entries, remove [] and add list entries preceded by dashes (see 'form_ignore' as an example)
details_ignore: []
# Form ignore tells us what to leave out of the form created when importing the submission.
# It is in list format. Each entry is preceded by a dash.
form_ignore:
- cost_centre
sample_type: Basic Sample
# The regex is used to identify a submission type by its file name, example: RSL-WW-20240924-1R1
# By default the regex will be programmatically constructed using the submission type name and abbreviation
# https://stackoverflow.com/a/23212501
regex: !regex [*NAME, *ABBREV]
# The info section holds a map of where submission info can be located in the submission form.
# For example, below the 'comment' field is found on the 'Sample List' tab in column 2 of tow 34.
# 'read' is a list of where the info can be parsed from, write is a list of where it will be written to.
# By default, items in the 'read' list will be appended to the 'write' list.
# These fields are common to all submissions. Without programming changes, at present no new fields can be added.
info:
comment:
read:
- column: 2
row: 34
sheet: Sample List
write: []
contact:
read:
- column: 2
row: 4
sheet: Sample List
write: []
contact_phone:
read: []
write:
- column: 2
row: 5
sheet: Sample List
cost_centre:
read:
- column: 2
row: 6
sheet: Sample List
write: []
custom: {}
extraction_kit:
read:
- column: 4
row: 5
sheet: Sample List
write: []
rsl_plate_num:
read:
- column: 2
row: 13
sheet: Sample List
write: []
sample_count:
read:
- column: 4
row: 4
sheet: Sample List
write: []
signed_by:
read: []
write:
- column: 2
row: 15
sheet: Sample List
submission_category:
read:
- column: 4
row: 6
sheet: Sample List
write: []
submission_type:
read:
- column: 4
row: 3
sheet: Sample List
write: []
submitted_date:
read:
- column: 2
row: 3
sheet: Sample List
write: []
submitter_plate_num:
read:
- column: 2
row: 2
sheet: Sample List
write: []
submitting_lab:
read:
- column: 4
row: 2
sheet: Sample List
write: []
technician:
read:
- column: 2
row: 14
sheet: Sample List
write: []
kits:
- constant_cost: 0.0
kit_type:
name: MagMAX-96 Viral RNA Isolation Kit
equipment roles:
- role: Extractor
processes:
- OBT_M4029_KFF_v1.0
static: 0
- processes:
- Omega_MagBind_Universal_VarCol
role: Momentum - Extraction
static: 1
- processes:
- Bacterial_Core_Submission
role: Liquid Handler
static: 1
reagent roles:
- expiry:
column: 4
row: 19
extension_of_life: 0
lot:
column: 3
row: 19
name:
column: 2
row: 19
required: 1
role: Wash Solution 1 (MagMAX-96 Viral)
sheet: Sample List
- expiry:
column: 4
row: 20
extension_of_life: 0
lot:
column: 3
row: 20
name:
column: 2
row: 20
required: 1
role: Wash Solution 2 (MagMAX-96 Viral)
sheet: Sample List
- expiry:
column: 4
row: 21
extension_of_life: 0
lot:
column: 3
row: 21
name:
column: 2
row: 21
required: 1
role: Lysis/Binding Solution (MagMAX-96 Viral)
sheet: Sample List
- expiry:
column: 4
row: 22
extension_of_life: 0
lot:
column: 3
row: 22
name:
column: 2
row: 22
required: 1
role: RNA Binding Beads (MagMAX-96 Viral)
sheet: Sample List
- expiry:
column: 4
row: 23
extension_of_life: 0
lot:
column: 3
row: 23
name:
column: 2
row: 23
required: 1
role: Lysis/Binding Enhancer (MagMAX-96 Viral)
sheet: Sample List
- expiry:
column: 4
row: 24
extension_of_life: 0
lot:
column: 3
row: 24
name:
column: 2
row: 24
required: 0
role: Bacterial-Lysis Buffer
sheet: Sample List
- expiry:
column: 4
row: 25
extension_of_life: 0
lot:
column: 3
row: 25
name:
column: 2
row: 25
required: 1
role: Elution Buffer (MagMAX-96 Viral)
sheet: Sample List
- expiry:
column: 4
row: 30
extension_of_life: 0
lot:
column: 3
row: 30
name:
column: 2
row: 30
required: 0
role: Bacterial-Positive Control
sheet: Sample List
- expiry:
column: 4
row: 31
extension_of_life: 0
lot:
column: 3
row: 31
name:
column: 2
row: 31
required: 0
role: Bead Plate
sheet: Sample List
- expiry:
column: 4
row: 28
extension_of_life: 0
lot:
column: 3
row: 28
name:
column: 2
row: 28
required: 1
role: Isopropanol
sheet: Sample List
- expiry:
column: 4
row: 29
extension_of_life: 0
lot:
column: 3
row: 29
name:
column: 2
row: 29
required: 1
role: Ethanol
sheet: Sample List
- expiry:
column: 4
row: 32
extension_of_life: 0
lot:
column: 3
row: 32
name:
column: 2
row: 32
required: 1
role: Carrier RNA
sheet: Sample List
mutable_cost_column: 0.0
mutable_cost_sample: 0.0
samples:
lookup_table:
end_row: 132
merge_on_id: submitter_id
sample_columns:
column: 6
concentration: 4
organism: 3
row: 5
submitter_id: 2
sheet: Sample List
start_row: 37
plate_map:
end_column: 13
end_row: 14
sheet: Plate Map
start_column: 2
start_row: 7
orgs:
- name: IRVC-Genomics
cost_centre: xxx
contacts:
- name: Ruimin Gao
phone: (204) 789-5078
email: Ruimin.Gao@phac-aspc.gc.ca

View File

@@ -909,6 +909,15 @@ def remove_key_from_list_of_dicts(input: list, key: str) -> list:
return input
def yaml_regex_creator(loader, node):
# Note: Add to import from json, NOT export yaml in app.
# base = node[0].value
nodes = loader.construct_sequence(node)
name = nodes[0].replace(" ", "_")
abbr = nodes[1]
return f"(?P<{name}>RSL(?:-|_)?{abbr}(?:-|_)?20\d{2}-?\d{2}-?\d{2}(?:(_|-)?\d?([^_0123456789\sA-QS-Z]|$)?R?\d?)?)"
ctx = get_config(None)