Converted client manager to new Omni pydantic version.
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
# 202504.01
|
||||
|
||||
- Added in method to backup submissions to xlsx (partly).
|
||||
- Added in checkbox to use all samples in Concentrations tab (very slow).
|
||||
|
||||
# 202503.05
|
||||
|
||||
2
TODO.md
2
TODO.md
@@ -1,4 +1,4 @@
|
||||
- [ ] Change "Manage Organizations" to the Pydantic version.
|
||||
- [x] Change "Manage Organizations" to the Pydantic version.
|
||||
- [x] Can my "to_dict", "to_sub_dict", "to_pydantic" methods be rewritten as properties?
|
||||
- [ ] Stop displacing date on Irida controls and just do what Turnaround time does.
|
||||
- [x] Get Manager window working for KitType, maybe SubmissionType
|
||||
|
||||
@@ -60,28 +60,26 @@ class BaseClass(Base):
|
||||
try:
|
||||
return f"<{self.__class__.__name__}({self.name})>"
|
||||
except AttributeError:
|
||||
return f"<{self.__class__.__name__}(Unknown)>"
|
||||
|
||||
# @classproperty
|
||||
# def skip_on_edit(cls):
|
||||
# if "association" in cls.__name__.lower() or cls.__name__.lower() == "discount":
|
||||
# return True
|
||||
# else:
|
||||
# return False
|
||||
return f"<{self.__class__.__name__}(Name Unavailable)>"
|
||||
|
||||
@classproperty
|
||||
def aliases(cls):
|
||||
def aliases(cls) -> List[str]:
|
||||
"""
|
||||
List of other names this class might be known by.
|
||||
|
||||
Returns:
|
||||
List[str]: List of names
|
||||
"""
|
||||
return [cls.query_alias]
|
||||
|
||||
# @classproperty
|
||||
# def level(cls):
|
||||
# if "association" in cls.__name__.lower() or cls.__name__.lower() == "discount":
|
||||
# return 2
|
||||
# else:
|
||||
# return 1
|
||||
|
||||
@classproperty
|
||||
def query_alias(cls):
|
||||
def query_alias(cls) -> str:
|
||||
"""
|
||||
What to query this class as.
|
||||
|
||||
Returns:
|
||||
str: query name
|
||||
"""
|
||||
return cls.__name__.lower()
|
||||
|
||||
@classmethod
|
||||
@@ -153,21 +151,23 @@ class BaseClass(Base):
|
||||
return dict(singles=singles)
|
||||
|
||||
@classmethod
|
||||
def find_regular_subclass(cls, name: str = "") -> Any:
|
||||
def find_regular_subclass(cls, name: str|None = None) -> Any:
|
||||
"""
|
||||
|
||||
Args:
|
||||
name (str): name of subclass of interest.
|
||||
|
||||
Returns:
|
||||
Any: Subclass of this object
|
||||
|
||||
Any: Subclass of this object.
|
||||
"""
|
||||
if name:
|
||||
if " " in name:
|
||||
search = name.title().replace(" ", "")
|
||||
else:
|
||||
search = name
|
||||
return next((item for item in cls.__subclasses__() if item.__name__ == search), cls)
|
||||
else:
|
||||
return cls.__subclasses__()
|
||||
|
||||
|
||||
@classmethod
|
||||
def fuzzy_search(cls, **kwargs) -> List[Any]:
|
||||
@@ -395,7 +395,8 @@ class BaseClass(Base):
|
||||
if check:
|
||||
logger.debug(f"Checking for subclass name.")
|
||||
self_value = self_value.name
|
||||
logger.debug(f"Checking self_value {self_value} of type {type(self_value)} against attribute {value} of type {type(value)}")
|
||||
logger.debug(
|
||||
f"Checking self_value {self_value} of type {type(self_value)} against attribute {value} of type {type(value)}")
|
||||
if self_value != value:
|
||||
output = False
|
||||
logger.debug(f"Value {key} is False, returning.")
|
||||
|
||||
@@ -425,17 +425,23 @@ class IridaControl(Control):
|
||||
kraken = self.kraken
|
||||
except TypeError:
|
||||
kraken = {}
|
||||
try:
|
||||
kraken_cnt_total = sum([item['kraken_count'] for item in kraken.values()])
|
||||
except AttributeError:
|
||||
kraken_cnt_total = 0
|
||||
try:
|
||||
new_kraken = [dict(name=key, kraken_count=value['kraken_count'],
|
||||
kraken_percent=f"{value['kraken_count'] / kraken_cnt_total:0.2%}",
|
||||
target=key in self.controltype.targets)
|
||||
for key, value in kraken.items()]
|
||||
new_kraken = sorted(new_kraken, key=itemgetter('kraken_count'), reverse=True)
|
||||
new_kraken = sorted(new_kraken, key=itemgetter('kraken_count'), reverse=True)[0:10]
|
||||
except (AttributeError, ZeroDivisionError):
|
||||
new_kraken = []
|
||||
output = dict(
|
||||
name=self.name,
|
||||
type=self.controltype.name,
|
||||
targets=", ".join(self.targets),
|
||||
kraken=new_kraken[0:10]
|
||||
kraken=new_kraken
|
||||
)
|
||||
return output
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ from sqlalchemy.ext.hybrid import hybrid_property
|
||||
from sqlalchemy.orm import relationship, Query
|
||||
from . import Base, BaseClass
|
||||
from tools import check_authorization, setup_lookup, yaml_regex_creator
|
||||
from typing import List
|
||||
from typing import List, Tuple
|
||||
|
||||
logger = logging.getLogger(f"submissions.{__name__}")
|
||||
|
||||
@@ -123,6 +123,20 @@ class Organization(BaseClass):
|
||||
organ.contacts.append(cont)
|
||||
organ.save()
|
||||
|
||||
def to_omni(self, expand: bool = False):
|
||||
from backend.validators.omni_gui_objects import OmniOrganization
|
||||
if self.cost_centre:
|
||||
cost_centre = self.cost_centre
|
||||
else:
|
||||
cost_centre = "NA"
|
||||
if self.name:
|
||||
name = self.name
|
||||
else:
|
||||
name = "NA"
|
||||
return OmniOrganization(instance_object=self,
|
||||
name=name, cost_centre=cost_centre,
|
||||
contact=[item.to_omni() for item in self.contacts])
|
||||
|
||||
|
||||
class Contact(BaseClass):
|
||||
"""
|
||||
@@ -144,6 +158,20 @@ class Contact(BaseClass):
|
||||
def searchables(cls):
|
||||
return []
|
||||
|
||||
@classmethod
|
||||
def query_or_create(cls, **kwargs) -> Tuple[Contact, bool]:
|
||||
new = False
|
||||
disallowed = []
|
||||
sanitized_kwargs = {k: v for k, v in kwargs.items() if k not in disallowed}
|
||||
instance = cls.query(**sanitized_kwargs)
|
||||
if not instance or isinstance(instance, list):
|
||||
instance = cls()
|
||||
new = True
|
||||
for k, v in sanitized_kwargs.items():
|
||||
setattr(instance, k, v)
|
||||
logger.info(f"Instance from contact query or create: {instance}")
|
||||
return instance, new
|
||||
|
||||
@classmethod
|
||||
@setup_lookup
|
||||
def query(cls,
|
||||
@@ -195,3 +223,22 @@ class Contact(BaseClass):
|
||||
def to_pydantic(self) -> "PydContact":
|
||||
from backend.validators import PydContact
|
||||
return PydContact(name=self.name, email=self.email, phone=self.phone)
|
||||
|
||||
def to_omni(self, expand: bool = False):
|
||||
from backend.validators.omni_gui_objects import OmniContact
|
||||
if self.email:
|
||||
email = self.email
|
||||
else:
|
||||
email = "NA"
|
||||
if self.name:
|
||||
name = self.name
|
||||
else:
|
||||
name = "NA"
|
||||
if self.phone:
|
||||
phone = self.phone
|
||||
else:
|
||||
phone = "NA"
|
||||
return OmniContact(instance_object=self,
|
||||
name=name, email=email,
|
||||
phone=phone)
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ Models for the main submission and sample types.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import itertools
|
||||
import pickle
|
||||
from copy import deepcopy
|
||||
from getpass import getuser
|
||||
@@ -12,6 +13,8 @@ from zipfile import ZipFile, BadZipfile
|
||||
from tempfile import TemporaryDirectory, TemporaryFile
|
||||
from operator import itemgetter
|
||||
from pprint import pformat
|
||||
|
||||
from pandas import DataFrame
|
||||
from sqlalchemy.ext.hybrid import hybrid_property
|
||||
from . import BaseClass, Reagent, SubmissionType, KitType, Organization, Contact, LogMixin, SubmissionReagentAssociation
|
||||
from sqlalchemy import Column, String, TIMESTAMP, INTEGER, ForeignKey, JSON, FLOAT, case, func
|
||||
@@ -287,6 +290,7 @@ class BasicSubmission(BaseClass, LogMixin):
|
||||
Constructs dictionary used in submissions summary
|
||||
|
||||
Args:
|
||||
expand (bool, optional): indicates if generators to be expanded. Defaults to False.
|
||||
report (bool, optional): indicates if to be used for a report. Defaults to False.
|
||||
full_data (bool, optional): indicates if sample dicts to be constructed. Defaults to False.
|
||||
backup (bool, optional): passed to adjust_to_dict_samples. Defaults to False.
|
||||
@@ -393,6 +397,33 @@ class BasicSubmission(BaseClass, LogMixin):
|
||||
output["completed_date"] = self.completed_date
|
||||
return output
|
||||
|
||||
@classmethod
|
||||
def archive_submissions(cls, start_date: date | datetime | str | int | None = None,
|
||||
end_date: date | datetime | str | int | None = None,
|
||||
submissiontype: List[str] | None = None):
|
||||
if submissiontype:
|
||||
if isinstance(submissiontype, str):
|
||||
submissiontype = [submissiontype]
|
||||
query_out = []
|
||||
for sub_type in submissiontype:
|
||||
subs = cls.query(page_size=0, start_date=start_date, end_date=end_date, submissiontype=sub_type)
|
||||
# logger.debug(f"Sub results: {subs}")
|
||||
query_out.append(subs)
|
||||
query_out = list(itertools.chain.from_iterable(query_out))
|
||||
else:
|
||||
query_out = cls.query(page_size=0, start_date=start_date, end_date=end_date)
|
||||
records = []
|
||||
for sub in query_out:
|
||||
output = sub.to_dict(full_data=True)
|
||||
for k, v in output.items():
|
||||
if isinstance(v, types.GeneratorType):
|
||||
output[k] = [item for item in v]
|
||||
records.append(output)
|
||||
df = DataFrame.from_records(records)
|
||||
df.sort_values(by="id", inplace=True)
|
||||
df.set_index("id", inplace=True)
|
||||
return df
|
||||
|
||||
@property
|
||||
def column_count(self) -> int:
|
||||
"""
|
||||
@@ -590,61 +621,18 @@ class BasicSubmission(BaseClass, LogMixin):
|
||||
except AttributeError as e:
|
||||
logger.error(f"Could not set {self} attribute {key} to {value} due to \n{e}")
|
||||
|
||||
# def update_subsampassoc(self, sample: BasicSample, input_dict: dict) -> SubmissionSampleAssociation:
|
||||
# """
|
||||
# Update a joined submission sample association.
|
||||
#
|
||||
# Args:
|
||||
# sample (BasicSample): Associated sample.
|
||||
# input_dict (dict): values to be updated
|
||||
#
|
||||
# Returns:
|
||||
# SubmissionSampleAssociation: Updated association
|
||||
# """
|
||||
# try:
|
||||
# logger.debug(f"Searching for sample {sample} at column {input_dict['column']} and row {input_dict['row']}")
|
||||
# assoc = next((item for item in self.submission_sample_associations
|
||||
# if item.sample == sample and
|
||||
# item.row == input_dict['row'] and
|
||||
# item.column == input_dict['column']))
|
||||
# logger.debug(f"Found assoc {pformat(assoc.__dict__)}")
|
||||
# except StopIteration:
|
||||
# report = Report()
|
||||
# report.add_result(
|
||||
# Result(msg=f"Couldn't find submission sample association for {sample.submitter_id}", status="Warning"))
|
||||
# return report
|
||||
# for k, v in input_dict.items():
|
||||
# try:
|
||||
# # logger.debug(f"Setting assoc {assoc} with key {k} to value {v}")
|
||||
# setattr(assoc, k, v)
|
||||
# # NOTE: for some reason I don't think assoc.__setattr__(k, v) works here.
|
||||
# except AttributeError:
|
||||
# logger.error(f"Can't set {k} to {v}")
|
||||
# return assoc
|
||||
|
||||
def update_subsampassoc(self, assoc: SubmissionSampleAssociation, input_dict: dict) -> SubmissionSampleAssociation:
|
||||
"""
|
||||
Update a joined submission sample association.
|
||||
|
||||
Args:
|
||||
sample (BasicSample): Associated sample.
|
||||
input_dict (dict): values to be updated
|
||||
assoc (SubmissionSampleAssociation): Sample association to be updated.
|
||||
input_dict (dict): updated values to insert.
|
||||
|
||||
Returns:
|
||||
SubmissionSampleAssociation: Updated association
|
||||
"""
|
||||
# try:
|
||||
# logger.debug(f"Searching for sample {sample} at column {input_dict['column']} and row {input_dict['row']}")
|
||||
# assoc = next((item for item in self.submission_sample_associations
|
||||
# if item.sample == sample and
|
||||
# item.row == input_dict['row'] and
|
||||
# item.column == input_dict['column']))
|
||||
# logger.debug(f"Found assoc {pformat(assoc.__dict__)}")
|
||||
# except StopIteration:
|
||||
# report = Report()
|
||||
# report.add_result(
|
||||
# Result(msg=f"Couldn't find submission sample association for {sample.submitter_id}", status="Warning"))
|
||||
# return report
|
||||
# NOTE: No longer searches for association here, done in caller function
|
||||
for k, v in input_dict.items():
|
||||
try:
|
||||
# logger.debug(f"Setting assoc {assoc} with key {k} to value {v}")
|
||||
@@ -771,8 +759,8 @@ class BasicSubmission(BaseClass, LogMixin):
|
||||
return regex
|
||||
|
||||
@classmethod
|
||||
def find_polymorphic_subclass(cls, polymorphic_identity: str | SubmissionType | None = None,
|
||||
attrs: dict | None = None) -> BasicSubmission:
|
||||
def find_polymorphic_subclass(cls, polymorphic_identity: str | SubmissionType | list | None = None,
|
||||
attrs: dict | None = None) -> BasicSubmission | List[BasicSubmission]:
|
||||
"""
|
||||
Find subclass based on polymorphic identity or relevant attributes.
|
||||
|
||||
@@ -795,6 +783,13 @@ class BasicSubmission(BaseClass, LogMixin):
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Could not get polymorph {polymorphic_identity} of {cls} due to {e}, falling back to BasicSubmission")
|
||||
case list():
|
||||
output = []
|
||||
for identity in polymorphic_identity:
|
||||
if isinstance(identity, SubmissionType):
|
||||
identity = polymorphic_identity.name
|
||||
output.append(cls.__mapper__.polymorphic_map[identity].class_)
|
||||
return output
|
||||
case _:
|
||||
pass
|
||||
if attrs and any([not hasattr(cls, attr) for attr in attrs.keys()]):
|
||||
@@ -1855,18 +1850,6 @@ class Wastewater(BasicSubmission):
|
||||
result = assoc.save()
|
||||
if result:
|
||||
report.add_result(result)
|
||||
# for sample in self.samples:
|
||||
# logger.debug(f"Checking pcr_samples for {sample.rsl_number}, {sample.ww_full_sample_id}")
|
||||
# try:
|
||||
# # NOTE: Fix for ENs which have no rsl_number...
|
||||
# sample_dict = next(item for item in pcr_samples if item['sample'] == sample.rsl_number)
|
||||
# logger.debug(f"Found sample {sample_dict} at index {pcr_samples.index(sample_dict)}: {pcr_samples[pcr_samples.index(sample_dict)]}")
|
||||
# except StopIteration:
|
||||
# logger.error(f"Couldn't find {sample} in the Parser samples")
|
||||
# continue
|
||||
# assoc = self.update_subsampassoc(sample=sample, input_dict=sample_dict)
|
||||
# result = assoc.save()
|
||||
# report.add_result(result)
|
||||
controltype = ControlType.query(name="PCR Control")
|
||||
submitted_date = datetime.strptime(" ".join(parser.pcr_info['run_start_date/time'].split(" ")[:-1]),
|
||||
"%Y-%m-%d %I:%M:%S %p")
|
||||
@@ -1880,35 +1863,6 @@ class Wastewater(BasicSubmission):
|
||||
new_control.save()
|
||||
return report
|
||||
|
||||
# def update_subsampassoc(self, assoc: SubmissionSampleAssociation, input_dict: dict) -> SubmissionSampleAssociation:
|
||||
# """
|
||||
# Updates a joined submission sample association by assigning ct values to n1 or n2 based on alphabetical sorting.
|
||||
#
|
||||
# Args:
|
||||
# sample (BasicSample): Associated sample.
|
||||
# input_dict (dict): values to be updated
|
||||
#
|
||||
# Returns:
|
||||
# SubmissionSampleAssociation: Updated association
|
||||
# """
|
||||
# # logger.debug(f"Input dict: {pformat(input_dict)}")
|
||||
# #
|
||||
# assoc = super().update_subsampassoc(assoc=assoc, input_dict=input_dict)
|
||||
# # targets = {k: input_dict[k] for k in sorted(input_dict.keys()) if k.startswith("ct_")}
|
||||
# # assert 0 < len(targets) <= 2
|
||||
# # for k, v in targets.items():
|
||||
# # # logger.debug(f"Setting sample {sample} with key {k} to value {v}")
|
||||
# # # update_key = f"ct_n{i}"
|
||||
# # current_value = getattr(assoc, k)
|
||||
# # logger.debug(f"Current value came back as: {current_value}")
|
||||
# # if current_value is None:
|
||||
# # setattr(assoc, k, v)
|
||||
# # else:
|
||||
# # logger.debug(f"Have a value already, {current_value}... skipping.")
|
||||
# if assoc.column == 3:
|
||||
# logger.debug(f"Final association for association {assoc}:\n{pformat(assoc.__dict__)}")
|
||||
# return assoc
|
||||
|
||||
|
||||
class WastewaterArtic(BasicSubmission):
|
||||
"""
|
||||
@@ -2196,14 +2150,14 @@ class WastewaterArtic(BasicSubmission):
|
||||
# logger.debug(processed)
|
||||
# NOTE: Remove brackets at end
|
||||
processed = re.sub(r"\(.*\)$", "", processed).strip()
|
||||
logger.debug(processed)
|
||||
# logger.debug(processed)
|
||||
processed = re.sub(r"-RPT", "", processed, flags=re.IGNORECASE)
|
||||
# NOTE: Remove any non-R letters at end.
|
||||
processed = re.sub(r"[A-QS-Z]+\d*", "", processed)
|
||||
logger.debug(processed)
|
||||
# logger.debug(processed)
|
||||
# NOTE: Remove trailing '-' if any
|
||||
processed = processed.strip("-")
|
||||
logger.debug(processed)
|
||||
# logger.debug(processed)
|
||||
try:
|
||||
plate_num = re.search(r"\-\d{1}R?\d?$", processed).group()
|
||||
processed = rreplace(processed, plate_num, "")
|
||||
@@ -2221,20 +2175,20 @@ class WastewaterArtic(BasicSubmission):
|
||||
plate_num = re.sub(r"R", rf"R{repeat_num}", plate_num)
|
||||
except AttributeError:
|
||||
logger.error(f"Problem re-evaluating plate number for {processed}")
|
||||
logger.debug(processed)
|
||||
# logger.debug(processed)
|
||||
# NOTE: Remove any redundant -digits
|
||||
processed = re.sub(r"-\d$", "", processed)
|
||||
logger.debug(processed)
|
||||
# logger.debug(processed)
|
||||
day = re.search(r"\d{2}$", processed).group()
|
||||
processed = rreplace(processed, day, "")
|
||||
logger.debug(processed)
|
||||
# logger.debug(processed)
|
||||
month = re.search(r"\d{2}$", processed).group()
|
||||
processed = rreplace(processed, month, "")
|
||||
processed = processed.replace("--", "")
|
||||
logger.debug(processed)
|
||||
# logger.debug(processed)
|
||||
year = re.search(r'^(?:\d{2})?\d{2}', processed).group()
|
||||
year = f"20{year}"
|
||||
logger.debug(processed)
|
||||
# logger.debug(processed)
|
||||
final_en_name = f"PBS{year}{month}{day}-{plate_num}"
|
||||
return final_en_name
|
||||
|
||||
@@ -2881,7 +2835,7 @@ class BacterialCultureSample(BasicSample):
|
||||
sample['organism'] = self.organism
|
||||
try:
|
||||
sample['concentration'] = f"{float(self.concentration):.2f}"
|
||||
except TypeError:
|
||||
except (TypeError, ValueError):
|
||||
sample['concentration'] = 0.0
|
||||
if self.control is not None:
|
||||
sample['colour'] = [0, 128, 0]
|
||||
|
||||
@@ -593,3 +593,57 @@ class OmniKitType(BaseOmni):
|
||||
for item in kit.kit_reagentrole_associations:
|
||||
logger.debug(f"KTRRassoc: {item.__dict__}")
|
||||
return kit
|
||||
|
||||
|
||||
class OmniOrganization(BaseOmni):
|
||||
|
||||
class_object: ClassVar[Any] = Organization
|
||||
|
||||
name: str = Field(default="", description="property")
|
||||
cost_centre: str = Field(default="", description="property")
|
||||
# TODO: add in List[OmniContacts]
|
||||
contact: List[str] | List[OmniContact] = Field(default=[], description="relationship", title="Contact")
|
||||
|
||||
def __init__(self, instance_object: Any, **data):
|
||||
logger.debug(f"Incoming data: {data}")
|
||||
super().__init__(**data)
|
||||
self.instance_object = instance_object
|
||||
|
||||
def to_dataframe_dict(self):
|
||||
return dict(
|
||||
name=self.name,
|
||||
cost_centre=self.cost_centre,
|
||||
contacts=self.contact
|
||||
)
|
||||
|
||||
|
||||
class OmniContact(BaseOmni):
|
||||
|
||||
class_object: ClassVar[Any] = Contact
|
||||
|
||||
name: str = Field(default="", description="property")
|
||||
email: str = Field(default="", description="property")
|
||||
phone: str = Field(default="", description="property")
|
||||
|
||||
@property
|
||||
def list_searchables(self):
|
||||
return dict(name=self.name, email=self.email)
|
||||
|
||||
def __init__(self, instance_object: Any, **data):
|
||||
super().__init__(**data)
|
||||
self.instance_object = instance_object
|
||||
|
||||
def to_dataframe_dict(self):
|
||||
return dict(
|
||||
name=self.name,
|
||||
email=self.email,
|
||||
phone=self.phone
|
||||
)
|
||||
|
||||
def to_sql(self):
|
||||
contact, is_new = Contact.query_or_create(name=self.name, email=self.email, phone=self.phone)
|
||||
if is_new:
|
||||
logger.debug(f"New contact made: {contact}")
|
||||
else:
|
||||
logger.debug(f"Contact retrieved: {contact}")
|
||||
return contact
|
||||
|
||||
@@ -9,17 +9,19 @@ from PyQt6.QtWidgets import (
|
||||
QHBoxLayout, QScrollArea, QMainWindow,
|
||||
QToolBar
|
||||
)
|
||||
# import pickle
|
||||
from PyQt6.QtGui import QAction
|
||||
from pathlib import Path
|
||||
from markdown import markdown
|
||||
from pandas import ExcelWriter
|
||||
from __init__ import project_path
|
||||
from backend import SubmissionType, Reagent, BasicSample, Organization, KitType
|
||||
from backend import SubmissionType, Reagent, BasicSample, Organization, KitType, BasicSubmission
|
||||
from tools import (
|
||||
check_if_app, Settings, Report, jinja_template_loading, check_authorization, page_size, is_power_user, under_development
|
||||
check_if_app, Settings, Report, jinja_template_loading, check_authorization, page_size, is_power_user,
|
||||
under_development
|
||||
)
|
||||
from .functions import select_save_file, select_open_file
|
||||
from .pop_ups import HTMLPop, AlertPop
|
||||
from .date_type_picker import DateTypePicker
|
||||
from .functions import select_save_file
|
||||
from .pop_ups import HTMLPop
|
||||
from .misc import Pagifier
|
||||
from .submission_table import SubmissionsSheet
|
||||
from .submission_widget import SubmissionFormContainer
|
||||
@@ -80,7 +82,7 @@ class App(QMainWindow):
|
||||
helpMenu.addAction(self.docsAction)
|
||||
helpMenu.addAction(self.githubAction)
|
||||
fileMenu.addAction(self.importAction)
|
||||
# fileMenu.addAction(self.yamlExportAction)
|
||||
fileMenu.addAction(self.archiveSubmissionsAction)
|
||||
# fileMenu.addAction(self.yamlImportAction)
|
||||
methodsMenu.addAction(self.searchSample)
|
||||
maintenanceMenu.addAction(self.joinExtractionAction)
|
||||
@@ -112,8 +114,7 @@ class App(QMainWindow):
|
||||
self.docsAction = QAction("&Docs", self)
|
||||
self.searchSample = QAction("Search Sample", self)
|
||||
self.githubAction = QAction("Github", self)
|
||||
# self.yamlExportAction = QAction("Export Type Example", self)
|
||||
# self.yamlImportAction = QAction("Import Type Template", self)
|
||||
self.archiveSubmissionsAction = QAction("Submissions to Excel", self)
|
||||
self.editReagentAction = QAction("Edit Reagent", self)
|
||||
self.manageOrgsAction = QAction("Manage Clients", self)
|
||||
self.manageKitsAction = QAction("Manage Kits", self)
|
||||
@@ -130,8 +131,7 @@ class App(QMainWindow):
|
||||
self.docsAction.triggered.connect(self.openDocs)
|
||||
self.searchSample.triggered.connect(self.runSampleSearch)
|
||||
self.githubAction.triggered.connect(self.openGithub)
|
||||
# self.yamlExportAction.triggered.connect(self.export_ST_yaml)
|
||||
# self.yamlImportAction.triggered.connect(self.import_ST_yaml)
|
||||
self.archiveSubmissionsAction.triggered.connect(self.submissions_to_excel)
|
||||
self.table_widget.pager.current_page.textChanged.connect(self.update_data)
|
||||
self.editReagentAction.triggered.connect(self.edit_reagent)
|
||||
self.manageOrgsAction.triggered.connect(self.manage_orgs)
|
||||
@@ -186,60 +186,18 @@ class App(QMainWindow):
|
||||
dlg = SearchBox(parent=self, object_type=Reagent, extras=[dict(name='Role', field="role")])
|
||||
dlg.exec()
|
||||
|
||||
# def export_ST_yaml(self):
|
||||
# """
|
||||
# Copies submission type yaml to file system for editing and remport
|
||||
#
|
||||
# Returns:
|
||||
# None
|
||||
# """
|
||||
# if check_if_app():
|
||||
# yaml_path = Path(sys._MEIPASS).joinpath("files", "resources", "viral_culture.yml")
|
||||
# else:
|
||||
# yaml_path = project_path.joinpath("src", "submissions", "resources", "viral_culture.yml")
|
||||
# fname = select_save_file(obj=self, default_name="Submission Type Template.yml", extension="yml")
|
||||
# shutil.copyfile(yaml_path, fname)
|
||||
|
||||
# @check_authorization
|
||||
# def import_ST_yaml(self, *args, **kwargs):
|
||||
# """
|
||||
# Imports a yml form into a submission type.
|
||||
#
|
||||
# Args:
|
||||
# *args ():
|
||||
# **kwargs ():
|
||||
#
|
||||
# Returns:
|
||||
#
|
||||
# """
|
||||
# fname = select_open_file(obj=self, file_extension="yml")
|
||||
# if not fname:
|
||||
# logger.info(f"Import cancelled.")
|
||||
# return
|
||||
# ap = AlertPop(message="This function will proceed in the debug window.", status="Warning", owner=self)
|
||||
# ap.exec()
|
||||
# st = SubmissionType.import_from_json(filepath=fname)
|
||||
# if st:
|
||||
# # NOTE: Do not delete the print statement below.
|
||||
# choice = input("Save the above submission type? [y/N]: ")
|
||||
# if choice.lower() == "y":
|
||||
# pass
|
||||
# else:
|
||||
# logger.warning("Save of submission type cancelled.")
|
||||
|
||||
def update_data(self):
|
||||
self.table_widget.sub_wid.setData(page=self.table_widget.pager.page_anchor, page_size=page_size)
|
||||
|
||||
# TODO: Change this to the Pydantic version.
|
||||
def manage_orgs(self):
|
||||
from frontend.widgets.omni_manager_pydant import ManagerWindow as ManagerWindowPyd
|
||||
dlg = ManagerWindow(parent=self, object_type=Organization, extras=[], add_edit='edit', managers=set())
|
||||
# dlg = ManagerWindow(parent=self, object_type=Organization, extras=[], add_edit='edit', managers=set())
|
||||
dlg = ManagerWindowPyd(parent=self, object_type=Organization, extras=[], add_edit='edit', managers=set())
|
||||
if dlg.exec():
|
||||
new_org = dlg.parse_form()
|
||||
new_org.save()
|
||||
# logger.debug(new_org.__dict__)
|
||||
|
||||
@under_development
|
||||
def manage_kits(self, *args, **kwargs):
|
||||
from frontend.widgets.omni_manager_pydant import ManagerWindow as ManagerWindowPyd
|
||||
dlg = ManagerWindowPyd(parent=self, object_type=KitType, extras=[], add_edit='edit', managers=set())
|
||||
@@ -252,6 +210,17 @@ class App(QMainWindow):
|
||||
assert isinstance(sql, KitType)
|
||||
sql.save()
|
||||
|
||||
@under_development
|
||||
def submissions_to_excel(self, *args, **kwargs):
|
||||
dlg = DateTypePicker(self)
|
||||
if dlg.exec():
|
||||
output = dlg.parse_form()
|
||||
df = BasicSubmission.archive_submissions(**output)
|
||||
filepath = select_save_file(self, f"Submissions {output['start_date']}-{output['end_date']}", "xlsx")
|
||||
writer = ExcelWriter(filepath, "openpyxl")
|
||||
df.to_excel(writer)
|
||||
writer.close()
|
||||
|
||||
|
||||
class AddSubForm(QWidget):
|
||||
|
||||
|
||||
36
src/submissions/frontend/widgets/date_type_picker.py
Normal file
36
src/submissions/frontend/widgets/date_type_picker.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from PyQt6.QtWidgets import (
|
||||
QLabel, QVBoxLayout, QDialog,
|
||||
QDialogButtonBox, QMessageBox, QComboBox
|
||||
)
|
||||
from .misc import CheckableComboBox, StartEndDatePicker
|
||||
from backend.db import SubmissionType
|
||||
|
||||
|
||||
class DateTypePicker(QDialog):
|
||||
|
||||
def __init__(self, parent):
|
||||
super().__init__(parent)
|
||||
self.layout = QVBoxLayout()
|
||||
self.setFixedWidth(500)
|
||||
self.typepicker = CheckableComboBox(parent=self)
|
||||
self.typepicker.setEditable(False)
|
||||
self.typepicker.addItem("Select", header=True)
|
||||
for org in [org.name for org in SubmissionType.query()]:
|
||||
self.typepicker.addItem(org)
|
||||
self.datepicker = StartEndDatePicker(-180)
|
||||
self.layout.addWidget(self.typepicker)
|
||||
self.layout.addWidget(self.datepicker)
|
||||
QBtn = QDialogButtonBox.StandardButton.Ok | QDialogButtonBox.StandardButton.Cancel
|
||||
self.buttonBox = QDialogButtonBox(QBtn)
|
||||
self.buttonBox.accepted.connect(self.accept)
|
||||
self.buttonBox.rejected.connect(self.reject)
|
||||
self.layout.addWidget(self.buttonBox)
|
||||
self.setLayout(self.layout)
|
||||
|
||||
def parse_form(self):
|
||||
sub_types = [self.typepicker.itemText(i) for i in range(self.typepicker.count()) if self.typepicker.itemChecked(i)]
|
||||
start_date = self.datepicker.start_date.date().toPyDate()
|
||||
end_date = self.datepicker.end_date.date().toPyDate()
|
||||
return dict(submissiontype=sub_types, start_date=start_date, end_date=end_date)
|
||||
|
||||
|
||||
@@ -269,6 +269,7 @@ class EditRelationship(QWidget):
|
||||
from backend.db import models
|
||||
super().__init__(parent)
|
||||
self.class_object = getattr(models, class_object)
|
||||
logger.debug(f"Attempt value: {value}")
|
||||
# logger.debug(f"Class object: {self.class_object}")
|
||||
self.setParent(parent)
|
||||
# logger.debug(f"Edit relationship class_object: {self.class_object}")
|
||||
@@ -388,7 +389,7 @@ class EditRelationship(QWidget):
|
||||
"""
|
||||
sets data in model
|
||||
"""
|
||||
# logger.debug(f"Self.data: {self.data}")
|
||||
logger.debug(f"Self.data: {self.data}")
|
||||
try:
|
||||
records = [item.to_dataframe_dict() for item in self.data]
|
||||
except AttributeError:
|
||||
|
||||
@@ -1,429 +0,0 @@
|
||||
{
|
||||
"name": "Viral Culture",
|
||||
"defaults": {
|
||||
"abbreviation": "VE",
|
||||
"details_ignore": [
|
||||
],
|
||||
"form_ignore": [
|
||||
"cost_centre"
|
||||
],
|
||||
"regex": "(?P<Viral_Culture>RSL(?:-|_)?VE(?:-|_)?20\\d{2}-?\\d{2}-?\\d{2}(?:(_|-)?\\d?([^_0123456789\\sA-QS-Z]|$)?R?\\d?)?)",
|
||||
"sample_type": "Basic Sample"
|
||||
},
|
||||
"info": {
|
||||
"comment": {
|
||||
"read": [
|
||||
{
|
||||
"column": 2,
|
||||
"row": 34,
|
||||
"sheet": "Sample List"
|
||||
}
|
||||
],
|
||||
"write": []
|
||||
},
|
||||
"contact": {
|
||||
"read": [
|
||||
{
|
||||
"column": 2,
|
||||
"row": 4,
|
||||
"sheet": "Sample List"
|
||||
}
|
||||
],
|
||||
"write": []
|
||||
},
|
||||
"contact_phone": {
|
||||
"read": [],
|
||||
"write": [
|
||||
{
|
||||
"column": 2,
|
||||
"row": 5,
|
||||
"sheet": "Sample List"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cost_centre": {
|
||||
"read": [
|
||||
{
|
||||
"column": 2,
|
||||
"row": 6,
|
||||
"sheet": "Sample List"
|
||||
}
|
||||
],
|
||||
"write": []
|
||||
},
|
||||
"custom": {},
|
||||
"extraction_kit": {
|
||||
"read": [
|
||||
{
|
||||
"column": 4,
|
||||
"row": 5,
|
||||
"sheet": "Sample List"
|
||||
}
|
||||
],
|
||||
"write": []
|
||||
},
|
||||
"rsl_plate_num": {
|
||||
"read": [
|
||||
{
|
||||
"column": 2,
|
||||
"row": 13,
|
||||
"sheet": "Sample List"
|
||||
}
|
||||
],
|
||||
"write": []
|
||||
},
|
||||
"sample_count": {
|
||||
"read": [
|
||||
{
|
||||
"column": 4,
|
||||
"row": 4,
|
||||
"sheet": "Sample List"
|
||||
}
|
||||
],
|
||||
"write": []
|
||||
},
|
||||
"signed_by": {
|
||||
"read": [],
|
||||
"write": [
|
||||
{
|
||||
"column": 2,
|
||||
"row": 15,
|
||||
"sheet": "Sample List"
|
||||
}
|
||||
]
|
||||
},
|
||||
"submission_category": {
|
||||
"read": [
|
||||
{
|
||||
"column": 4,
|
||||
"row": 6,
|
||||
"sheet": "Sample List"
|
||||
}
|
||||
],
|
||||
"write": []
|
||||
},
|
||||
"submission_type": {
|
||||
"read": [
|
||||
{
|
||||
"column": 4,
|
||||
"row": 3,
|
||||
"sheet": "Sample List"
|
||||
}
|
||||
],
|
||||
"write": []
|
||||
},
|
||||
"submitted_date": {
|
||||
"read": [
|
||||
{
|
||||
"column": 2,
|
||||
"row": 3,
|
||||
"sheet": "Sample List"
|
||||
}
|
||||
],
|
||||
"write": []
|
||||
},
|
||||
"submitter_plate_num": {
|
||||
"read": [
|
||||
{
|
||||
"column": 2,
|
||||
"row": 2,
|
||||
"sheet": "Sample List"
|
||||
}
|
||||
],
|
||||
"write": []
|
||||
},
|
||||
"submitting_lab": {
|
||||
"read": [
|
||||
{
|
||||
"column": 4,
|
||||
"row": 2,
|
||||
"sheet": "Sample List"
|
||||
}
|
||||
],
|
||||
"write": []
|
||||
},
|
||||
"technician": {
|
||||
"read": [
|
||||
{
|
||||
"column": 2,
|
||||
"row": 14,
|
||||
"sheet": "Sample List"
|
||||
}
|
||||
],
|
||||
"write": []
|
||||
}
|
||||
},
|
||||
"samples": {
|
||||
"lookup_table": {
|
||||
"end_row": 132,
|
||||
"merge_on_id": "submitter_id",
|
||||
"sample_columns": {
|
||||
"column": 6,
|
||||
"concentration": 4,
|
||||
"organism": 3,
|
||||
"row": 5,
|
||||
"submitter_id": 2
|
||||
},
|
||||
"sheet": "Sample List",
|
||||
"start_row": 37
|
||||
},
|
||||
"plate_map": {
|
||||
"end_column": 13,
|
||||
"end_row": 14,
|
||||
"sheet": "Plate Map",
|
||||
"start_column": 2,
|
||||
"start_row": 7
|
||||
}
|
||||
},
|
||||
"kits": [
|
||||
{
|
||||
"constant_cost": 0.00,
|
||||
"mutable_cost_column": 0.00,
|
||||
"mutable_cost_sample": 0.00,
|
||||
"kit_type": {
|
||||
"name": "MagMAX-96 Viral RNA Isolation Kit",
|
||||
"reagent roles": [
|
||||
{
|
||||
"expiry": {
|
||||
"column": 4,
|
||||
"row": 19
|
||||
},
|
||||
"lot": {
|
||||
"column": 3,
|
||||
"row": 19
|
||||
},
|
||||
"name": {
|
||||
"column": 2,
|
||||
"row": 19
|
||||
},
|
||||
"sheet": "Sample List",
|
||||
"required": 1,
|
||||
"role": "Wash Solution 1 (MagMAX-96 Viral)",
|
||||
"extension_of_life": 0
|
||||
},
|
||||
{
|
||||
"expiry": {
|
||||
"column": 4,
|
||||
"row": 20
|
||||
},
|
||||
"lot": {
|
||||
"column": 3,
|
||||
"row": 20
|
||||
},
|
||||
"name": {
|
||||
"column": 2,
|
||||
"row": 20
|
||||
},
|
||||
"sheet": "Sample List",
|
||||
"required": 1,
|
||||
"role": "Wash Solution 2 (MagMAX-96 Viral)",
|
||||
"extension_of_life": 0
|
||||
},
|
||||
{
|
||||
"expiry": {
|
||||
"column": 4,
|
||||
"row": 21
|
||||
},
|
||||
"lot": {
|
||||
"column": 3,
|
||||
"row": 21
|
||||
},
|
||||
"name": {
|
||||
"column": 2,
|
||||
"row": 21
|
||||
},
|
||||
"sheet": "Sample List",
|
||||
"required": 1,
|
||||
"role": "Lysis/Binding Solution (MagMAX-96 Viral)",
|
||||
"extension_of_life": 0
|
||||
},
|
||||
{
|
||||
"expiry": {
|
||||
"column": 4,
|
||||
"row": 22
|
||||
},
|
||||
"lot": {
|
||||
"column": 3,
|
||||
"row": 22
|
||||
},
|
||||
"name": {
|
||||
"column": 2,
|
||||
"row": 22
|
||||
},
|
||||
"sheet": "Sample List",
|
||||
"required": 1,
|
||||
"role": "RNA Binding Beads (MagMAX-96 Viral)",
|
||||
"extension_of_life": 0
|
||||
},
|
||||
{
|
||||
"expiry": {
|
||||
"column": 4,
|
||||
"row": 23
|
||||
},
|
||||
"lot": {
|
||||
"column": 3,
|
||||
"row": 23
|
||||
},
|
||||
"name": {
|
||||
"column": 2,
|
||||
"row": 23
|
||||
},
|
||||
"sheet": "Sample List",
|
||||
"required": 1,
|
||||
"role": "Lysis/Binding Enhancer (MagMAX-96 Viral)",
|
||||
"extension_of_life": 0
|
||||
},
|
||||
{
|
||||
"expiry": {
|
||||
"column": 4,
|
||||
"row": 24
|
||||
},
|
||||
"lot": {
|
||||
"column": 3,
|
||||
"row": 24
|
||||
},
|
||||
"name": {
|
||||
"column": 2,
|
||||
"row": 24
|
||||
},
|
||||
"sheet": "Sample List",
|
||||
"required": 0,
|
||||
"role": "Bacterial-Lysis Buffer",
|
||||
"extension_of_life": 0
|
||||
},
|
||||
{
|
||||
"expiry": {
|
||||
"column": 4,
|
||||
"row": 25
|
||||
},
|
||||
"lot": {
|
||||
"column": 3,
|
||||
"row": 25
|
||||
},
|
||||
"name": {
|
||||
"column": 2,
|
||||
"row": 25
|
||||
},
|
||||
"sheet": "Sample List",
|
||||
"required": 1,
|
||||
"role": "Elution Buffer (MagMAX-96 Viral)",
|
||||
"extension_of_life": 0
|
||||
},
|
||||
{
|
||||
"expiry": {
|
||||
"column": 4,
|
||||
"row": 30
|
||||
},
|
||||
"lot": {
|
||||
"column": 3,
|
||||
"row": 30
|
||||
},
|
||||
"name": {
|
||||
"column": 2,
|
||||
"row": 30
|
||||
},
|
||||
"sheet": "Sample List",
|
||||
"required": 0,
|
||||
"role": "Bacterial-Positive Control",
|
||||
"extension_of_life": 0
|
||||
},
|
||||
{
|
||||
"expiry": {
|
||||
"column": 4,
|
||||
"row": 31
|
||||
},
|
||||
"lot": {
|
||||
"column": 3,
|
||||
"row": 31
|
||||
},
|
||||
"name": {
|
||||
"column": 2,
|
||||
"row": 31
|
||||
},
|
||||
"sheet": "Sample List",
|
||||
"required": 1,
|
||||
"role": "Bead Plate",
|
||||
"extension_of_life": 0
|
||||
},
|
||||
{
|
||||
"expiry": {
|
||||
"column": 4,
|
||||
"row": 28
|
||||
},
|
||||
"lot": {
|
||||
"column": 3,
|
||||
"row": 28
|
||||
},
|
||||
"name": {
|
||||
"column": 2,
|
||||
"row": 28
|
||||
},
|
||||
"sheet": "Sample List",
|
||||
"required": 1,
|
||||
"role": "Isopropanol",
|
||||
"extension_of_life": 0
|
||||
},
|
||||
{
|
||||
"expiry": {
|
||||
"column": 4,
|
||||
"row": 29
|
||||
},
|
||||
"lot": {
|
||||
"column": 3,
|
||||
"row": 29
|
||||
},
|
||||
"name": {
|
||||
"column": 2,
|
||||
"row": 29
|
||||
},
|
||||
"sheet": "Sample List",
|
||||
"required": 1,
|
||||
"role": "Ethanol",
|
||||
"extension_of_life": 0
|
||||
},
|
||||
{
|
||||
"expiry": {
|
||||
"column": 4,
|
||||
"row": 32
|
||||
},
|
||||
"lot": {
|
||||
"column": 3,
|
||||
"row": 32
|
||||
},
|
||||
"name": {
|
||||
"column": 2,
|
||||
"row": 32
|
||||
},
|
||||
"sheet": "Sample List",
|
||||
"required": 1,
|
||||
"role": "Carrier RNA",
|
||||
"extension_of_life": 0
|
||||
}
|
||||
],
|
||||
"equipment roles": [
|
||||
{
|
||||
"static": 0,
|
||||
"role": "Extractor",
|
||||
"processes": [
|
||||
"OBT_M4029_KFF_v1.0"
|
||||
]
|
||||
},
|
||||
{
|
||||
"static": 1,
|
||||
"role": "Momentum - Extraction",
|
||||
"processes": [
|
||||
"Omega_MagBind_Universal_VarCol"
|
||||
]
|
||||
},
|
||||
{
|
||||
"static": 1,
|
||||
"role": "Liquid Handler",
|
||||
"processes": [
|
||||
"Bacterial_Core_Submission"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,339 +0,0 @@
|
||||
name: &NAME Viral Culture
|
||||
# The defaults section helps us with some parsing and writing functions
|
||||
defaults:
|
||||
# The abbreviation gets put in the RSL-{abbreviation}-20XXXXXX-1 plate name
|
||||
abbreviation: &ABBREV VE
|
||||
# Details ignore tells us what to leave out of the details view.
|
||||
# For basic submissions this is typically and empty list or '[]'
|
||||
# To add entries, remove [] and add list entries preceded by dashes (see 'form_ignore' as an example)
|
||||
details_ignore: []
|
||||
# Form ignore tells us what to leave out of the form created when importing the submission.
|
||||
# It is in list format. Each entry is preceded by a dash.
|
||||
form_ignore:
|
||||
- cost_centre
|
||||
sample_type: Basic Sample
|
||||
# The regex is used to identify a submission type by its file name, example: RSL-WW-20240924-1R1
|
||||
# By default the regex will be programmatically constructed using the submission type name and abbreviation
|
||||
# https://stackoverflow.com/a/23212501
|
||||
regex: !regex [*NAME, *ABBREV]
|
||||
# The info section holds a map of where submission info can be located in the submission form.
|
||||
# For example, below the 'comment' field is found on the 'Sample List' tab in column 2 of tow 34.
|
||||
# 'read' is a list of where the info can be parsed from, write is a list of where it will be written to.
|
||||
# By default, items in the 'read' list will be appended to the 'write' list.
|
||||
# These fields are common to all submissions. Without programming changes, at present no new fields can be added.
|
||||
info:
|
||||
comment:
|
||||
read:
|
||||
- column: 2
|
||||
row: 34
|
||||
sheet: Sample List
|
||||
write: []
|
||||
contact:
|
||||
read:
|
||||
- column: 2
|
||||
row: 4
|
||||
sheet: Sample List
|
||||
write: []
|
||||
contact_phone:
|
||||
read: []
|
||||
write:
|
||||
- column: 2
|
||||
row: 5
|
||||
sheet: Sample List
|
||||
cost_centre:
|
||||
read:
|
||||
- column: 2
|
||||
row: 6
|
||||
sheet: Sample List
|
||||
write: []
|
||||
# The 'custom' field is currently under development and will allow parsing, storage and writing of new fields in
|
||||
# generic submission types.
|
||||
# Examples of the types of fields are given below.
|
||||
custom: {
|
||||
# The 'cell' type will read a single cell from the excel sheet and write it to the same cell as well as
|
||||
# any specified in the write list.
|
||||
# "test cell": {
|
||||
# "read": {
|
||||
# "column": 3,
|
||||
# "row": 1,
|
||||
# "sheet": "Plate Map"
|
||||
# },
|
||||
# "type": "cell",
|
||||
# "write": [{
|
||||
# "column": 8,
|
||||
# "row": 8,
|
||||
# "sheet": "Sample List"
|
||||
# }]
|
||||
# },
|
||||
# The 'range' type will read a group of cells, store them as a list of values, row and column integers
|
||||
# and write to that range.
|
||||
# "test range": {
|
||||
# "sheet": "First Strand",
|
||||
# "start_row": 1,
|
||||
# "end_row":9,
|
||||
# "start_column": 1,
|
||||
# "end_column": 5
|
||||
# }
|
||||
}
|
||||
extraction_kit:
|
||||
read:
|
||||
- column: 4
|
||||
row: 5
|
||||
sheet: Sample List
|
||||
write: []
|
||||
rsl_plate_num:
|
||||
read:
|
||||
- column: 2
|
||||
row: 13
|
||||
sheet: Sample List
|
||||
write: []
|
||||
sample_count:
|
||||
read:
|
||||
- column: 4
|
||||
row: 4
|
||||
sheet: Sample List
|
||||
write: []
|
||||
signed_by:
|
||||
read: []
|
||||
write:
|
||||
- column: 2
|
||||
row: 15
|
||||
sheet: Sample List
|
||||
submission_category:
|
||||
read:
|
||||
- column: 4
|
||||
row: 6
|
||||
sheet: Sample List
|
||||
write: []
|
||||
submission_type:
|
||||
read:
|
||||
- column: 4
|
||||
row: 3
|
||||
sheet: Sample List
|
||||
write: []
|
||||
submitted_date:
|
||||
read:
|
||||
- column: 2
|
||||
row: 3
|
||||
sheet: Sample List
|
||||
write: []
|
||||
submitter_plate_num:
|
||||
read:
|
||||
- column: 2
|
||||
row: 2
|
||||
sheet: Sample List
|
||||
write: []
|
||||
submitting_lab:
|
||||
read:
|
||||
- column: 4
|
||||
row: 2
|
||||
sheet: Sample List
|
||||
write: []
|
||||
technician:
|
||||
read:
|
||||
- column: 2
|
||||
row: 14
|
||||
sheet: Sample List
|
||||
write: []
|
||||
# The 'kits' field holds reagents, equipment and tips information. It's a list of kit_type objects.
|
||||
kits:
|
||||
- kit_type:
|
||||
name: MagMAX-96 Viral RNA Isolation Kit
|
||||
equipment roles:
|
||||
- role: Extractor
|
||||
processes:
|
||||
- OBT_M4029_KFF_v1.0
|
||||
static: 0
|
||||
- processes:
|
||||
- Omega_MagBind_Universal_VarCol
|
||||
role: Momentum - Extraction
|
||||
static: 1
|
||||
- processes:
|
||||
- Bacterial_Core_Submission
|
||||
role: Liquid Handler
|
||||
static: 1
|
||||
reagent roles:
|
||||
- expiry:
|
||||
column: 4
|
||||
row: 19
|
||||
extension_of_life: 0
|
||||
lot:
|
||||
column: 3
|
||||
row: 19
|
||||
name:
|
||||
column: 2
|
||||
row: 19
|
||||
required: 1
|
||||
role: Wash Solution 1 (MagMAX-96 Viral)
|
||||
sheet: Sample List
|
||||
- expiry:
|
||||
column: 4
|
||||
row: 20
|
||||
extension_of_life: 0
|
||||
lot:
|
||||
column: 3
|
||||
row: 20
|
||||
name:
|
||||
column: 2
|
||||
row: 20
|
||||
required: 1
|
||||
role: Wash Solution 2 (MagMAX-96 Viral)
|
||||
sheet: Sample List
|
||||
- expiry:
|
||||
column: 4
|
||||
row: 21
|
||||
extension_of_life: 0
|
||||
lot:
|
||||
column: 3
|
||||
row: 21
|
||||
name:
|
||||
column: 2
|
||||
row: 21
|
||||
required: 1
|
||||
role: Lysis/Binding Solution (MagMAX-96 Viral)
|
||||
sheet: Sample List
|
||||
- expiry:
|
||||
column: 4
|
||||
row: 22
|
||||
extension_of_life: 0
|
||||
lot:
|
||||
column: 3
|
||||
row: 22
|
||||
name:
|
||||
column: 2
|
||||
row: 22
|
||||
required: 1
|
||||
role: RNA Binding Beads (MagMAX-96 Viral)
|
||||
sheet: Sample List
|
||||
- expiry:
|
||||
column: 4
|
||||
row: 23
|
||||
extension_of_life: 0
|
||||
lot:
|
||||
column: 3
|
||||
row: 23
|
||||
name:
|
||||
column: 2
|
||||
row: 23
|
||||
required: 1
|
||||
role: Lysis/Binding Enhancer (MagMAX-96 Viral)
|
||||
sheet: Sample List
|
||||
- expiry:
|
||||
column: 4
|
||||
row: 24
|
||||
extension_of_life: 0
|
||||
lot:
|
||||
column: 3
|
||||
row: 24
|
||||
name:
|
||||
column: 2
|
||||
row: 24
|
||||
required: 0
|
||||
role: Bacterial-Lysis Buffer
|
||||
sheet: Sample List
|
||||
- expiry:
|
||||
column: 4
|
||||
row: 25
|
||||
extension_of_life: 0
|
||||
lot:
|
||||
column: 3
|
||||
row: 25
|
||||
name:
|
||||
column: 2
|
||||
row: 25
|
||||
required: 1
|
||||
role: Elution Buffer (MagMAX-96 Viral)
|
||||
sheet: Sample List
|
||||
- expiry:
|
||||
column: 4
|
||||
row: 30
|
||||
extension_of_life: 0
|
||||
lot:
|
||||
column: 3
|
||||
row: 30
|
||||
name:
|
||||
column: 2
|
||||
row: 30
|
||||
required: 0
|
||||
role: Bacterial-Positive Control
|
||||
sheet: Sample List
|
||||
- expiry:
|
||||
column: 4
|
||||
row: 31
|
||||
extension_of_life: 0
|
||||
lot:
|
||||
column: 3
|
||||
row: 31
|
||||
name:
|
||||
column: 2
|
||||
row: 31
|
||||
required: 0
|
||||
role: Bead Plate
|
||||
sheet: Sample List
|
||||
- expiry:
|
||||
column: 4
|
||||
row: 28
|
||||
extension_of_life: 0
|
||||
lot:
|
||||
column: 3
|
||||
row: 28
|
||||
name:
|
||||
column: 2
|
||||
row: 28
|
||||
required: 1
|
||||
role: Isopropanol
|
||||
sheet: Sample List
|
||||
- expiry:
|
||||
column: 4
|
||||
row: 29
|
||||
extension_of_life: 0
|
||||
lot:
|
||||
column: 3
|
||||
row: 29
|
||||
name:
|
||||
column: 2
|
||||
row: 29
|
||||
required: 1
|
||||
role: Ethanol
|
||||
sheet: Sample List
|
||||
- expiry:
|
||||
column: 4
|
||||
row: 32
|
||||
extension_of_life: 0
|
||||
lot:
|
||||
column: 3
|
||||
row: 32
|
||||
name:
|
||||
column: 2
|
||||
row: 32
|
||||
required: 1
|
||||
role: Carrier RNA
|
||||
sheet: Sample List
|
||||
constant_cost: 0.0
|
||||
mutable_cost_column: 0.0
|
||||
mutable_cost_sample: 0.0
|
||||
samples:
|
||||
lookup_table:
|
||||
end_row: 132
|
||||
merge_on_id: submitter_id
|
||||
sample_columns:
|
||||
column: 6
|
||||
concentration: 4
|
||||
organism: 3
|
||||
row: 5
|
||||
submitter_id: 2
|
||||
sheet: Sample List
|
||||
start_row: 37
|
||||
plate_map:
|
||||
end_column: 13
|
||||
end_row: 14
|
||||
sheet: Plate Map
|
||||
start_column: 2
|
||||
start_row: 7
|
||||
orgs:
|
||||
- name: IRVC-Genomics
|
||||
cost_centre: xxx
|
||||
contacts:
|
||||
- name: Ruimin Gao
|
||||
phone: (204) 789-5078
|
||||
email: Ruimin.Gao@phac-aspc.gc.ca
|
||||
Reference in New Issue
Block a user