Various bug fixes and streamlining.
This commit is contained in:
@@ -1,3 +1,8 @@
|
|||||||
|
## 202410.01
|
||||||
|
|
||||||
|
- Reverted details exports from docx back to pdf.
|
||||||
|
- Large scale speedups for control chart construction.
|
||||||
|
|
||||||
## 202409.05
|
## 202409.05
|
||||||
|
|
||||||
- Replaced some lists with generators to improve speed, added javascript to templates for click events.
|
- Replaced some lists with generators to improve speed, added javascript to templates for click events.
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ from submissions import __version__, __copyright__, __author__
|
|||||||
|
|
||||||
project = 'RSL Submissions'
|
project = 'RSL Submissions'
|
||||||
copyright = __copyright__
|
copyright = __copyright__
|
||||||
author = f"{__author__['sub_type']} - {__author__['email']}"
|
author = f"{__author__['name']} - {__author__['email']}"
|
||||||
release = __version__
|
release = __version__
|
||||||
|
|
||||||
# -- General configuration ---------------------------------------------------
|
# -- General configuration ---------------------------------------------------
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ from openpyxl.drawing.image import Image as OpenpyxlImage
|
|||||||
from tools import row_map, setup_lookup, jinja_template_loading, rreplace, row_keys, check_key_or_attr, Result, Report, \
|
from tools import row_map, setup_lookup, jinja_template_loading, rreplace, row_keys, check_key_or_attr, Result, Report, \
|
||||||
report_result
|
report_result
|
||||||
from datetime import datetime, date
|
from datetime import datetime, date
|
||||||
from typing import List, Any, Tuple, Literal
|
from typing import List, Any, Tuple, Literal, Generator
|
||||||
from dateutil.parser import parse
|
from dateutil.parser import parse
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from jinja2.exceptions import TemplateNotFound
|
from jinja2.exceptions import TemplateNotFound
|
||||||
@@ -592,8 +592,8 @@ class BasicSubmission(BaseClass):
|
|||||||
case "ctx" | "csv" | "filepath" | "equipment":
|
case "ctx" | "csv" | "filepath" | "equipment":
|
||||||
return
|
return
|
||||||
case item if item in self.jsons():
|
case item if item in self.jsons():
|
||||||
match value:
|
match key:
|
||||||
case dict():
|
case "custom":
|
||||||
existing = value
|
existing = value
|
||||||
case _:
|
case _:
|
||||||
# logger.debug(f"Setting JSON attribute.")
|
# logger.debug(f"Setting JSON attribute.")
|
||||||
@@ -611,9 +611,6 @@ class BasicSubmission(BaseClass):
|
|||||||
existing += value
|
existing += value
|
||||||
else:
|
else:
|
||||||
if value is not None:
|
if value is not None:
|
||||||
if key == "custom":
|
|
||||||
existing = value
|
|
||||||
else:
|
|
||||||
existing.append(value)
|
existing.append(value)
|
||||||
self.__setattr__(key, existing)
|
self.__setattr__(key, existing)
|
||||||
flag_modified(self, key)
|
flag_modified(self, key)
|
||||||
@@ -889,19 +886,6 @@ class BasicSubmission(BaseClass):
|
|||||||
ws.cell(row=item['row'], column=item['column'], value=item['value'])
|
ws.cell(row=item['row'], column=item['column'], value=item['value'])
|
||||||
return input_excel
|
return input_excel
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def custom_docx_writer(cls, input_dict: dict, tpl_obj=None):
|
|
||||||
"""
|
|
||||||
Adds custom fields to docx template writer for exported details.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
input_dict (dict): Incoming default dictionary.
|
|
||||||
tpl_obj (_type_, optional): Template object. Defaults to None.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: Dictionary with information added.
|
|
||||||
"""
|
|
||||||
return input_dict
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def enforce_name(cls, instr: str, data: dict | None = {}) -> str:
|
def enforce_name(cls, instr: str, data: dict | None = {}) -> str:
|
||||||
@@ -962,7 +946,7 @@ class BasicSubmission(BaseClass):
|
|||||||
return re.sub(rf"{data['abbreviation']}(\d)", rf"{data['abbreviation']}-\1", outstr)
|
return re.sub(rf"{data['abbreviation']}(\d)", rf"{data['abbreviation']}-\1", outstr)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def parse_pcr(cls, xl: Workbook, rsl_plate_num: str) -> list:
|
def parse_pcr(cls, xl: Workbook, rsl_plate_num: str) -> Generator[dict, None, None]:
|
||||||
"""
|
"""
|
||||||
Perform parsing of pcr info. Since most of our PC outputs are the same format, this should work for most.
|
Perform parsing of pcr info. Since most of our PC outputs are the same format, this should work for most.
|
||||||
|
|
||||||
@@ -977,7 +961,7 @@ class BasicSubmission(BaseClass):
|
|||||||
pcr_sample_map = cls.get_submission_type().sample_map['pcr_samples']
|
pcr_sample_map = cls.get_submission_type().sample_map['pcr_samples']
|
||||||
# logger.debug(f'sample map: {pcr_sample_map}')
|
# logger.debug(f'sample map: {pcr_sample_map}')
|
||||||
main_sheet = xl[pcr_sample_map['main_sheet']]
|
main_sheet = xl[pcr_sample_map['main_sheet']]
|
||||||
samples = []
|
# samples = []
|
||||||
fields = {k: v for k, v in pcr_sample_map.items() if k not in ['main_sheet', 'start_row']}
|
fields = {k: v for k, v in pcr_sample_map.items() if k not in ['main_sheet', 'start_row']}
|
||||||
for row in main_sheet.iter_rows(min_row=pcr_sample_map['start_row']):
|
for row in main_sheet.iter_rows(min_row=pcr_sample_map['start_row']):
|
||||||
idx = row[0].row
|
idx = row[0].row
|
||||||
@@ -985,8 +969,9 @@ class BasicSubmission(BaseClass):
|
|||||||
for k, v in fields.items():
|
for k, v in fields.items():
|
||||||
sheet = xl[v['sheet']]
|
sheet = xl[v['sheet']]
|
||||||
sample[k] = sheet.cell(row=idx, column=v['column']).value
|
sample[k] = sheet.cell(row=idx, column=v['column']).value
|
||||||
samples.append(sample)
|
yield sample
|
||||||
return samples
|
# samples.append(sample)
|
||||||
|
# return samples
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def filename_template(cls) -> str:
|
def filename_template(cls) -> str:
|
||||||
@@ -1533,17 +1518,17 @@ class Wastewater(BasicSubmission):
|
|||||||
return input_dict
|
return input_dict
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def parse_pcr(cls, xl: Workbook, rsl_plate_num: str) -> List[dict]:
|
def parse_pcr(cls, xl: Workbook, rsl_plate_num: str) -> Generator[dict, None, None]:
|
||||||
"""
|
"""
|
||||||
Parse specific to wastewater samples.
|
Parse specific to wastewater samples.
|
||||||
"""
|
"""
|
||||||
samples = super().parse_pcr(xl=xl, rsl_plate_num=rsl_plate_num)
|
samples = [item for item in super().parse_pcr(xl=xl, rsl_plate_num=rsl_plate_num)]
|
||||||
# logger.debug(f'Samples from parent pcr parser: {pformat(samples)}')
|
# logger.debug(f'Samples from parent pcr parser: {pformat(samples)}')
|
||||||
output = []
|
output = []
|
||||||
for sample in samples:
|
for sample in samples:
|
||||||
# NOTE: remove '-{target}' from controls
|
# NOTE: remove '-{target}' from controls
|
||||||
sample['sample'] = re.sub('-N\\d$', '', sample['sample'])
|
sample['sample'] = re.sub('-N\\d$', '', sample['sample'])
|
||||||
# NOTE: if sample is already in output skip
|
# # NOTE: if sample is already in output skip
|
||||||
if sample['sample'] in [item['sample'] for item in output]:
|
if sample['sample'] in [item['sample'] for item in output]:
|
||||||
logger.warning(f"Already have {sample['sample']}")
|
logger.warning(f"Already have {sample['sample']}")
|
||||||
continue
|
continue
|
||||||
@@ -1564,8 +1549,10 @@ class Wastewater(BasicSubmission):
|
|||||||
del sample['assessment']
|
del sample['assessment']
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
|
# yield sample
|
||||||
output.append(sample)
|
output.append(sample)
|
||||||
return output
|
for sample in output:
|
||||||
|
yield sample
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def enforce_name(cls, instr: str, data: dict | None = {}) -> str:
|
def enforce_name(cls, instr: str, data: dict | None = {}) -> str:
|
||||||
@@ -1677,49 +1664,18 @@ class Wastewater(BasicSubmission):
|
|||||||
return report
|
return report
|
||||||
parser = PCRParser(filepath=fname)
|
parser = PCRParser(filepath=fname)
|
||||||
self.set_attribute("pcr_info", parser.pcr)
|
self.set_attribute("pcr_info", parser.pcr)
|
||||||
|
pcr_samples = [sample for sample in parser.samples]
|
||||||
self.save(original=False)
|
self.save(original=False)
|
||||||
# logger.debug(f"Got {len(parser.samples)} samples to update!")
|
# logger.debug(f"Got {len(parser.samples)} samples to update!")
|
||||||
# logger.debug(f"Parser samples: {parser.samples}")
|
# logger.debug(f"Parser samples: {parser.samples}")
|
||||||
for sample in self.samples:
|
for sample in self.samples:
|
||||||
# logger.debug(f"Running update on: {sample}")
|
# logger.debug(f"Running update on: {sample}")
|
||||||
try:
|
try:
|
||||||
sample_dict = next(item for item in parser.samples if item['sample'] == sample.rsl_number)
|
sample_dict = next(item for item in pcr_samples if item['sample'] == sample.rsl_number)
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
continue
|
continue
|
||||||
self.update_subsampassoc(sample=sample, input_dict=sample_dict)
|
self.update_subsampassoc(sample=sample, input_dict=sample_dict)
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def custom_docx_writer(cls, input_dict: dict, tpl_obj=None) -> dict:
|
|
||||||
"""
|
|
||||||
Adds custom fields to docx template writer for exported details. Extends parent.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
input_dict (dict): Incoming default dictionary.
|
|
||||||
tpl_obj (_type_, optional): Template object. Defaults to None.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: Dictionary with information added.
|
|
||||||
"""
|
|
||||||
from backend.excel.writer import DocxWriter
|
|
||||||
input_dict = super().custom_docx_writer(input_dict)
|
|
||||||
well_24 = []
|
|
||||||
input_dict['samples'] = [item for item in input_dict['samples']]
|
|
||||||
samples_copy = deepcopy(input_dict['samples'])
|
|
||||||
for sample in sorted(samples_copy, key=itemgetter('column', 'row')):
|
|
||||||
try:
|
|
||||||
row = sample['source_row']
|
|
||||||
except KeyError:
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
column = sample['source_column']
|
|
||||||
except KeyError:
|
|
||||||
continue
|
|
||||||
copy = dict(submitter_id=sample['submitter_id'], row=row, column=column)
|
|
||||||
well_24.append(copy)
|
|
||||||
input_dict['origin_plate'] = [item for item in
|
|
||||||
DocxWriter.create_plate_map(sample_list=well_24, rows=4, columns=6)]
|
|
||||||
return input_dict
|
|
||||||
|
|
||||||
|
|
||||||
class WastewaterArtic(BasicSubmission):
|
class WastewaterArtic(BasicSubmission):
|
||||||
"""
|
"""
|
||||||
@@ -2038,11 +1994,17 @@ class WastewaterArtic(BasicSubmission):
|
|||||||
"""
|
"""
|
||||||
input_dict = super().custom_validation(pyd)
|
input_dict = super().custom_validation(pyd)
|
||||||
# logger.debug(f"Incoming input_dict: {pformat(input_dict)}")
|
# logger.debug(f"Incoming input_dict: {pformat(input_dict)}")
|
||||||
|
exclude_plates = [None, "", "none", "na"]
|
||||||
|
pyd.source_plates = [plate for plate in pyd.source_plates if plate['plate'].lower() not in exclude_plates]
|
||||||
for sample in pyd.samples:
|
for sample in pyd.samples:
|
||||||
# logger.debug(f"Sample: {sample}")
|
# logger.debug(f"Sample: {sample}")
|
||||||
if re.search(r"^NTC", sample.submitter_id):
|
if re.search(r"^NTC", sample.submitter_id):
|
||||||
sample.submitter_id = f"{sample.submitter_id}-WWG-{pyd.rsl_plate_num}"
|
if isinstance(pyd.rsl_plate_num, dict):
|
||||||
# input_dict['csv'] = xl["hitpicks_csv_to_export"]
|
placeholder = pyd.rsl_plate_num['value']
|
||||||
|
else:
|
||||||
|
placeholder = pyd.rsl_plate_num
|
||||||
|
sample.submitter_id = f"{sample.submitter_id}-WWG-{placeholder}"
|
||||||
|
# logger.debug(f"sample id: {sample.submitter_id}")
|
||||||
return input_dict
|
return input_dict
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -2075,6 +2037,7 @@ class WastewaterArtic(BasicSubmission):
|
|||||||
for iii, plate in enumerate(info['source_plates']['value']):
|
for iii, plate in enumerate(info['source_plates']['value']):
|
||||||
# logger.debug(f"Plate: {plate}")
|
# logger.debug(f"Plate: {plate}")
|
||||||
row = start_row + iii
|
row = start_row + iii
|
||||||
|
logger.debug(f"Writing {plate} to row {iii}")
|
||||||
try:
|
try:
|
||||||
worksheet.cell(row=row, column=source_plates_section['plate_column'], value=plate['plate'])
|
worksheet.cell(row=row, column=source_plates_section['plate_column'], value=plate['plate'])
|
||||||
except TypeError:
|
except TypeError:
|
||||||
@@ -2209,30 +2172,6 @@ class WastewaterArtic(BasicSubmission):
|
|||||||
zipf.write(img_path, self.gel_image)
|
zipf.write(img_path, self.gel_image)
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def custom_docx_writer(cls, input_dict: dict, tpl_obj=None) -> dict:
|
|
||||||
"""
|
|
||||||
Adds custom fields to docx template writer for exported details.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
input_dict (dict): Incoming default dictionary/
|
|
||||||
tpl_obj (_type_, optional): Template object. Defaults to None.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: Dictionary with information added.
|
|
||||||
"""
|
|
||||||
input_dict = super().custom_docx_writer(input_dict)
|
|
||||||
# NOTE: if there's a gel image, extract it.
|
|
||||||
if check_key_or_attr(key='gel_image_path', interest=input_dict, check_none=True):
|
|
||||||
with ZipFile(cls.__directory_path__.joinpath("submission_imgs.zip")) as zipped:
|
|
||||||
img = zipped.read(input_dict['gel_image_path'])
|
|
||||||
with tempfile.TemporaryFile(mode="wb", suffix=".jpg", delete=False) as tmp:
|
|
||||||
tmp.write(img)
|
|
||||||
# logger.debug(f"Tempfile: {tmp.name}")
|
|
||||||
img = InlineImage(tpl_obj, image_descriptor=tmp.name, width=Inches(5.5)) #, width=5.5)#, height=400)
|
|
||||||
input_dict['gel_image'] = img
|
|
||||||
return input_dict
|
|
||||||
|
|
||||||
|
|
||||||
# Sample Classes
|
# Sample Classes
|
||||||
|
|
||||||
@@ -2493,6 +2432,8 @@ class BasicSample(BaseClass):
|
|||||||
model = cls.find_polymorphic_subclass(polymorphic_identity=sample_type)
|
model = cls.find_polymorphic_subclass(polymorphic_identity=sample_type)
|
||||||
case BasicSample():
|
case BasicSample():
|
||||||
model = sample_type
|
model = sample_type
|
||||||
|
case None:
|
||||||
|
model = cls
|
||||||
case _:
|
case _:
|
||||||
model = cls.find_polymorphic_subclass(attrs=kwargs)
|
model = cls.find_polymorphic_subclass(attrs=kwargs)
|
||||||
# logger.debug(f"Length of kwargs: {len(kwargs)}")
|
# logger.debug(f"Length of kwargs: {len(kwargs)}")
|
||||||
@@ -2514,7 +2455,7 @@ class BasicSample(BaseClass):
|
|||||||
raise AttributeError(f"Delete not implemented for {self.__class__}")
|
raise AttributeError(f"Delete not implemented for {self.__class__}")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_searchables(cls):
|
def get_searchables(cls) -> List[dict]:
|
||||||
"""
|
"""
|
||||||
Delivers a list of fields that can be used in fuzzy search.
|
Delivers a list of fields that can be used in fuzzy search.
|
||||||
|
|
||||||
|
|||||||
@@ -1,13 +1,15 @@
|
|||||||
'''
|
'''
|
||||||
Contains functions for generating summary reports
|
Contains functions for generating summary reports
|
||||||
'''
|
'''
|
||||||
|
from PyQt6.QtCore import QMarginsF
|
||||||
|
from PyQt6.QtGui import QPageLayout, QPageSize
|
||||||
from pandas import DataFrame, ExcelWriter
|
from pandas import DataFrame, ExcelWriter
|
||||||
import logging, re
|
import logging, re
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from datetime import date, timedelta
|
from datetime import date, timedelta
|
||||||
from typing import List, Tuple, Any
|
from typing import List, Tuple, Any
|
||||||
from backend.db.models import BasicSubmission
|
from backend.db.models import BasicSubmission
|
||||||
from tools import jinja_template_loading, html_to_pdf, get_first_blank_df_row, \
|
from tools import jinja_template_loading, get_first_blank_df_row, \
|
||||||
row_map
|
row_map
|
||||||
from PyQt6.QtWidgets import QWidget
|
from PyQt6.QtWidgets import QWidget
|
||||||
from openpyxl.worksheet.worksheet import Worksheet
|
from openpyxl.worksheet.worksheet import Worksheet
|
||||||
@@ -99,11 +101,15 @@ class ReportMaker(object):
|
|||||||
filename = Path(filename)
|
filename = Path(filename)
|
||||||
filename = filename.absolute()
|
filename = filename.absolute()
|
||||||
# NOTE: html_to_pdf doesn't function without a PyQt6 app
|
# NOTE: html_to_pdf doesn't function without a PyQt6 app
|
||||||
if isinstance(obj, QWidget):
|
# if isinstance(obj, QWidget):
|
||||||
logger.info(f"We're in PyQt environment, writing PDF to: {filename}")
|
# logger.info(f"We're in PyQt environment, writing PDF to: {filename}")
|
||||||
html_to_pdf(html=self.html, output_file=filename)
|
# page_layout = QPageLayout()
|
||||||
else:
|
# page_layout.setPageSize(QPageSize(QPageSize.PageSizeId.A4))
|
||||||
logger.info("Not in PyQt. Skipping PDF writing.")
|
# page_layout.setOrientation(QPageLayout.Orientation.Portrait)
|
||||||
|
# page_layout.setMargins(QMarginsF(25, 25, 25, 25))
|
||||||
|
# self.webview.page().printToPdf(fname.with_suffix(".pdf").__str__(), page_layout)
|
||||||
|
# else:
|
||||||
|
# logger.info("Not in PyQt. Skipping PDF writing.")
|
||||||
# logger.debug("Finished writing.")
|
# logger.debug("Finished writing.")
|
||||||
self.writer = ExcelWriter(filename.with_suffix(".xlsx"), engine='openpyxl')
|
self.writer = ExcelWriter(filename.with_suffix(".xlsx"), engine='openpyxl')
|
||||||
self.summary_df.to_excel(self.writer, sheet_name="Report")
|
self.summary_df.to_excel(self.writer, sheet_name="Report")
|
||||||
|
|||||||
@@ -3,9 +3,6 @@ contains writer objects for pushing values to submission sheet templates.
|
|||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
from copy import copy
|
from copy import copy
|
||||||
from operator import itemgetter
|
|
||||||
from pathlib import Path
|
|
||||||
# from pathlib import Path
|
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from typing import List, Generator
|
from typing import List, Generator
|
||||||
from openpyxl import load_workbook, Workbook
|
from openpyxl import load_workbook, Workbook
|
||||||
@@ -13,9 +10,6 @@ from backend.db.models import SubmissionType, KitType, BasicSubmission
|
|||||||
from backend.validators.pydant import PydSubmission
|
from backend.validators.pydant import PydSubmission
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from tools import jinja_template_loading
|
|
||||||
from docxtpl import DocxTemplate
|
|
||||||
from docx import Document
|
|
||||||
|
|
||||||
logger = logging.getLogger(f"submissions.{__name__}")
|
logger = logging.getLogger(f"submissions.{__name__}")
|
||||||
|
|
||||||
@@ -147,7 +141,6 @@ class InfoWriter(object):
|
|||||||
Returns:
|
Returns:
|
||||||
dict: merged dictionary
|
dict: merged dictionary
|
||||||
"""
|
"""
|
||||||
# output = {}
|
|
||||||
for k, v in info_dict.items():
|
for k, v in info_dict.items():
|
||||||
if v is None:
|
if v is None:
|
||||||
continue
|
continue
|
||||||
@@ -163,8 +156,6 @@ class InfoWriter(object):
|
|||||||
if len(dicto) > 0:
|
if len(dicto) > 0:
|
||||||
# output[k] = dicto
|
# output[k] = dicto
|
||||||
yield k, dicto
|
yield k, dicto
|
||||||
# logger.debug(f"Reconciled info: {pformat(output)}")
|
|
||||||
# return output
|
|
||||||
|
|
||||||
def write_info(self) -> Workbook:
|
def write_info(self) -> Workbook:
|
||||||
"""
|
"""
|
||||||
@@ -217,7 +208,6 @@ class ReagentWriter(object):
|
|||||||
if isinstance(extraction_kit, str):
|
if isinstance(extraction_kit, str):
|
||||||
kit_type = KitType.query(name=extraction_kit)
|
kit_type = KitType.query(name=extraction_kit)
|
||||||
reagent_map = {k: v for k, v in kit_type.construct_xl_map_for_use(submission_type)}
|
reagent_map = {k: v for k, v in kit_type.construct_xl_map_for_use(submission_type)}
|
||||||
# self.reagents = {k: v for k, v in self.reconcile_map(reagent_list=reagent_list, reagent_map=reagent_map)}
|
|
||||||
self.reagents = self.reconcile_map(reagent_list=reagent_list, reagent_map=reagent_map)
|
self.reagents = self.reconcile_map(reagent_list=reagent_list, reagent_map=reagent_map)
|
||||||
|
|
||||||
def reconcile_map(self, reagent_list: List[dict], reagent_map: dict) -> Generator[dict, None, None]:
|
def reconcile_map(self, reagent_list: List[dict], reagent_map: dict) -> Generator[dict, None, None]:
|
||||||
@@ -231,7 +221,6 @@ class ReagentWriter(object):
|
|||||||
Returns:
|
Returns:
|
||||||
List[dict]: merged dictionary
|
List[dict]: merged dictionary
|
||||||
"""
|
"""
|
||||||
# output = []
|
|
||||||
for reagent in reagent_list:
|
for reagent in reagent_list:
|
||||||
try:
|
try:
|
||||||
mp_info = reagent_map[reagent['role']]
|
mp_info = reagent_map[reagent['role']]
|
||||||
@@ -246,9 +235,7 @@ class ReagentWriter(object):
|
|||||||
dicto = v
|
dicto = v
|
||||||
placeholder[k] = dicto
|
placeholder[k] = dicto
|
||||||
placeholder['sheet'] = mp_info['sheet']
|
placeholder['sheet'] = mp_info['sheet']
|
||||||
# output.append(placeholder)
|
|
||||||
yield placeholder
|
yield placeholder
|
||||||
# return output
|
|
||||||
|
|
||||||
def write_reagents(self) -> Workbook:
|
def write_reagents(self) -> Workbook:
|
||||||
"""
|
"""
|
||||||
@@ -285,7 +272,6 @@ class SampleWriter(object):
|
|||||||
self.submission_type = submission_type
|
self.submission_type = submission_type
|
||||||
self.xl = xl
|
self.xl = xl
|
||||||
self.sample_map = submission_type.construct_sample_map()['lookup_table']
|
self.sample_map = submission_type.construct_sample_map()['lookup_table']
|
||||||
# self.samples = self.reconcile_map(sample_list)
|
|
||||||
# NOTE: exclude any samples without a submission rank.
|
# NOTE: exclude any samples without a submission rank.
|
||||||
samples = [item for item in self.reconcile_map(sample_list) if item['submission_rank'] > 0]
|
samples = [item for item in self.reconcile_map(sample_list) if item['submission_rank'] > 0]
|
||||||
self.samples = sorted(samples, key=lambda k: k['submission_rank'])
|
self.samples = sorted(samples, key=lambda k: k['submission_rank'])
|
||||||
@@ -300,7 +286,6 @@ class SampleWriter(object):
|
|||||||
Returns:
|
Returns:
|
||||||
List[dict]: List of merged dictionaries
|
List[dict]: List of merged dictionaries
|
||||||
"""
|
"""
|
||||||
# output = []
|
|
||||||
multiples = ['row', 'column', 'assoc_id', 'submission_rank']
|
multiples = ['row', 'column', 'assoc_id', 'submission_rank']
|
||||||
for sample in sample_list:
|
for sample in sample_list:
|
||||||
# logger.debug(f"Writing sample: {sample}")
|
# logger.debug(f"Writing sample: {sample}")
|
||||||
@@ -311,7 +296,6 @@ class SampleWriter(object):
|
|||||||
continue
|
continue
|
||||||
new[k] = v
|
new[k] = v
|
||||||
yield new
|
yield new
|
||||||
# return sorted(output, key=lambda k: k['submission_rank'])
|
|
||||||
|
|
||||||
def write_samples(self) -> Workbook:
|
def write_samples(self) -> Workbook:
|
||||||
"""
|
"""
|
||||||
@@ -325,6 +309,11 @@ class SampleWriter(object):
|
|||||||
for sample in self.samples:
|
for sample in self.samples:
|
||||||
row = self.sample_map['start_row'] + (sample['submission_rank'] - 1)
|
row = self.sample_map['start_row'] + (sample['submission_rank'] - 1)
|
||||||
for k, v in sample.items():
|
for k, v in sample.items():
|
||||||
|
if isinstance(v, dict):
|
||||||
|
try:
|
||||||
|
v = v['value']
|
||||||
|
except KeyError:
|
||||||
|
logger.error(f"Cant convert {v} to single string.")
|
||||||
try:
|
try:
|
||||||
column = columns[k]
|
column = columns[k]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
@@ -363,7 +352,6 @@ class EquipmentWriter(object):
|
|||||||
Returns:
|
Returns:
|
||||||
List[dict]: List of merged dictionaries
|
List[dict]: List of merged dictionaries
|
||||||
"""
|
"""
|
||||||
# output = []
|
|
||||||
if equipment_list is None:
|
if equipment_list is None:
|
||||||
return
|
return
|
||||||
for ii, equipment in enumerate(equipment_list, start=1):
|
for ii, equipment in enumerate(equipment_list, start=1):
|
||||||
@@ -388,10 +376,7 @@ class EquipmentWriter(object):
|
|||||||
placeholder['sheet'] = mp_info['sheet']
|
placeholder['sheet'] = mp_info['sheet']
|
||||||
except KeyError:
|
except KeyError:
|
||||||
placeholder['sheet'] = "Equipment"
|
placeholder['sheet'] = "Equipment"
|
||||||
# logger.debug(f"Final output of {equipment['role']} : {placeholder}")
|
|
||||||
yield placeholder
|
yield placeholder
|
||||||
# output.append(placeholder)
|
|
||||||
# return output
|
|
||||||
|
|
||||||
def write_equipment(self) -> Workbook:
|
def write_equipment(self) -> Workbook:
|
||||||
"""
|
"""
|
||||||
@@ -452,19 +437,19 @@ class TipWriter(object):
|
|||||||
Returns:
|
Returns:
|
||||||
List[dict]: List of merged dictionaries
|
List[dict]: List of merged dictionaries
|
||||||
"""
|
"""
|
||||||
# output = []
|
|
||||||
if tips_list is None:
|
if tips_list is None:
|
||||||
return
|
return
|
||||||
for ii, tips in enumerate(tips_list, start=1):
|
for ii, tips in enumerate(tips_list, start=1):
|
||||||
mp_info = tips_map[tips['role']]
|
# mp_info = tips_map[tips['role']]
|
||||||
|
mp_info = tips_map[tips.role]
|
||||||
# logger.debug(f"{tips['role']} map: {mp_info}")
|
# logger.debug(f"{tips['role']} map: {mp_info}")
|
||||||
placeholder = copy(tips)
|
placeholder = {}
|
||||||
if mp_info == {}:
|
if mp_info == {}:
|
||||||
for jj, (k, v) in enumerate(tips.items(), start=1):
|
for jj, (k, v) in enumerate(tips.__dict__.items(), start=1):
|
||||||
dicto = dict(value=v, row=ii, column=jj)
|
dicto = dict(value=v, row=ii, column=jj)
|
||||||
placeholder[k] = dicto
|
placeholder[k] = dicto
|
||||||
else:
|
else:
|
||||||
for jj, (k, v) in enumerate(tips.items(), start=1):
|
for jj, (k, v) in enumerate(tips.__dict__.items(), start=1):
|
||||||
try:
|
try:
|
||||||
dicto = dict(value=v, row=mp_info[k]['row'], column=mp_info[k]['column'])
|
dicto = dict(value=v, row=mp_info[k]['row'], column=mp_info[k]['column'])
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
@@ -477,8 +462,6 @@ class TipWriter(object):
|
|||||||
placeholder['sheet'] = "Tips"
|
placeholder['sheet'] = "Tips"
|
||||||
# logger.debug(f"Final output of {tips['role']} : {placeholder}")
|
# logger.debug(f"Final output of {tips['role']} : {placeholder}")
|
||||||
yield placeholder
|
yield placeholder
|
||||||
# output.append(placeholder)
|
|
||||||
# return output
|
|
||||||
|
|
||||||
def write_tips(self) -> Workbook:
|
def write_tips(self) -> Workbook:
|
||||||
"""
|
"""
|
||||||
@@ -507,72 +490,3 @@ class TipWriter(object):
|
|||||||
logger.error(f"Couldn't write to {tips['sheet']}, row: {v['row']}, column: {v['column']}")
|
logger.error(f"Couldn't write to {tips['sheet']}, row: {v['row']}, column: {v['column']}")
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
return self.xl
|
return self.xl
|
||||||
|
|
||||||
|
|
||||||
class DocxWriter(object):
|
|
||||||
"""
|
|
||||||
Object to render
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, base_dict: dict):
|
|
||||||
"""
|
|
||||||
Args:
|
|
||||||
base_dict (dict): dictionary of info to be written to template.
|
|
||||||
"""
|
|
||||||
logger.debug(f"Incoming base dict: {pformat(base_dict)}")
|
|
||||||
self.sub_obj = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=base_dict['submission_type'])
|
|
||||||
env = jinja_template_loading()
|
|
||||||
temp_name = f"{base_dict['submission_type'].replace(' ', '').lower()}_subdocument.docx"
|
|
||||||
path = Path(env.loader.__getattribute__("searchpath")[0])
|
|
||||||
main_template = path.joinpath("basicsubmission_document.docx")
|
|
||||||
subdocument = path.joinpath(temp_name)
|
|
||||||
if subdocument.exists():
|
|
||||||
main_template = self.create_merged_template(main_template, subdocument)
|
|
||||||
self.template = DocxTemplate(main_template)
|
|
||||||
base_dict['platemap'] = [item for item in self.create_plate_map(base_dict['samples'], rows=8, columns=12)]
|
|
||||||
# logger.debug(pformat(base_dict['platemap']))
|
|
||||||
try:
|
|
||||||
base_dict['excluded'] += ["platemap"]
|
|
||||||
except KeyError:
|
|
||||||
base_dict['excluded'] = ["platemap"]
|
|
||||||
base_dict = self.sub_obj.custom_docx_writer(base_dict, tpl_obj=self.template)
|
|
||||||
# logger.debug(f"Base dict: {pformat(base_dict)}")
|
|
||||||
self.template.render({"sub": base_dict})
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create_plate_map(self, sample_list: List[dict], rows: int = 0, columns: int = 0) -> List[list]:
|
|
||||||
sample_list = sorted(sample_list, key=itemgetter('column', 'row'))
|
|
||||||
# NOTE if rows or samples is default, set to maximum value in sample list
|
|
||||||
if rows == 0:
|
|
||||||
rows = max([sample['row'] for sample in sample_list])
|
|
||||||
if columns == 0:
|
|
||||||
columns = max([sample['column'] for sample in sample_list])
|
|
||||||
for row in range(0, rows):
|
|
||||||
# NOTE: Create a list with length equal to columns length, padding with '' where necessary
|
|
||||||
contents = [next((item['submitter_id'] for item in sample_list if item['row'] == row + 1 and
|
|
||||||
item['column'] == column + 1), '') for column in range(0, columns)]
|
|
||||||
yield contents
|
|
||||||
|
|
||||||
def create_merged_template(self, *args) -> BytesIO:
|
|
||||||
"""
|
|
||||||
Appends submission specific information
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
BytesIO: Merged docx template
|
|
||||||
"""
|
|
||||||
merged_document = Document()
|
|
||||||
output = BytesIO()
|
|
||||||
for index, file in enumerate(args):
|
|
||||||
sub_doc = Document(file)
|
|
||||||
# Don't add a page break if you've reached the last file.
|
|
||||||
# if index < len(args) - 1:
|
|
||||||
# sub_doc.add_page_break()
|
|
||||||
for element in sub_doc.element.body:
|
|
||||||
merged_document.element.body.append(element)
|
|
||||||
merged_document.save(output)
|
|
||||||
return output
|
|
||||||
|
|
||||||
def save(self, filename: Path | str):
|
|
||||||
if isinstance(filename, str):
|
|
||||||
filename = Path(filename)
|
|
||||||
self.template.save(filename)
|
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ class RSLNamer(object):
|
|||||||
if self.submission_type is None:
|
if self.submission_type is None:
|
||||||
# logger.debug("Creating submission type because none exists")
|
# logger.debug("Creating submission type because none exists")
|
||||||
self.submission_type = self.retrieve_submission_type(filename=filename)
|
self.submission_type = self.retrieve_submission_type(filename=filename)
|
||||||
logger.debug(f"got submission type: {self.submission_type}")
|
logger.info(f"got submission type: {self.submission_type}")
|
||||||
if self.submission_type is not None:
|
if self.submission_type is not None:
|
||||||
# logger.debug("Retrieving BasicSubmission subclass")
|
# logger.debug("Retrieving BasicSubmission subclass")
|
||||||
self.sub_object = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type)
|
self.sub_object = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type)
|
||||||
@@ -48,36 +48,41 @@ class RSLNamer(object):
|
|||||||
Returns:
|
Returns:
|
||||||
str: parsed submission type
|
str: parsed submission type
|
||||||
"""
|
"""
|
||||||
match filename:
|
def st_from_path(filename:Path) -> str:
|
||||||
case Path():
|
|
||||||
logger.debug(f"Using path method for {filename}.")
|
logger.debug(f"Using path method for {filename}.")
|
||||||
if filename.exists():
|
if filename.exists():
|
||||||
wb = load_workbook(filename)
|
wb = load_workbook(filename)
|
||||||
try:
|
try:
|
||||||
submission_type = [item.strip().title() for item in wb.properties.category.split(";")][0]
|
# NOTE: Gets first category in the metadata.
|
||||||
except AttributeError:
|
submission_type = next(item.strip().title() for item in wb.properties.category.split(";"))
|
||||||
try:
|
except (StopIteration, AttributeError):
|
||||||
sts = {item.name: item.get_template_file_sheets() for item in SubmissionType.query()}
|
sts = {item.name: item.get_template_file_sheets() for item in SubmissionType.query()}
|
||||||
for k, v in sts.items():
|
try:
|
||||||
# This gets the *first* submission type that matches the sheet names in the workbook
|
submission_type = next(k.title() for k,v in sts.items() if wb.sheetnames==v)
|
||||||
if wb.sheetnames == v:
|
except StopIteration:
|
||||||
submission_type = k.title()
|
# NOTE: On failure recurse using filename as string for string method
|
||||||
break
|
|
||||||
except:
|
|
||||||
# On failure recurse using filename as string for string method
|
|
||||||
submission_type = cls.retrieve_submission_type(filename=filename.stem.__str__())
|
submission_type = cls.retrieve_submission_type(filename=filename.stem.__str__())
|
||||||
else:
|
else:
|
||||||
submission_type = cls.retrieve_submission_type(filename=filename.stem.__str__())
|
submission_type = cls.retrieve_submission_type(filename=filename.stem.__str__())
|
||||||
case str():
|
return submission_type
|
||||||
|
def st_from_str(filename:str) -> str:
|
||||||
regex = BasicSubmission.construct_regex()
|
regex = BasicSubmission.construct_regex()
|
||||||
logger.debug(f"Using string method for {filename}.")
|
logger.debug(f"Using string method for {filename}.")
|
||||||
logger.debug(f"Using regex: {regex}")
|
logger.debug(f"Using regex: {regex}")
|
||||||
m = regex.search(filename)
|
m = regex.search(filename)
|
||||||
|
print(m)
|
||||||
try:
|
try:
|
||||||
submission_type = m.lastgroup
|
submission_type = m.lastgroup
|
||||||
logger.debug(f"Got submission type: {submission_type}")
|
logger.debug(f"Got submission type: {submission_type}")
|
||||||
except AttributeError as e:
|
except AttributeError as e:
|
||||||
|
submission_type = None
|
||||||
logger.critical(f"No submission type found or submission type found!: {e}")
|
logger.critical(f"No submission type found or submission type found!: {e}")
|
||||||
|
return submission_type
|
||||||
|
match filename:
|
||||||
|
case Path():
|
||||||
|
submission_type = st_from_path(filename=filename)
|
||||||
|
case str():
|
||||||
|
submission_type = st_from_str(filename=filename)
|
||||||
case _:
|
case _:
|
||||||
submission_type = None
|
submission_type = None
|
||||||
try:
|
try:
|
||||||
@@ -93,6 +98,7 @@ class RSLNamer(object):
|
|||||||
message="Please select submission type from list below.", obj_type=SubmissionType)
|
message="Please select submission type from list below.", obj_type=SubmissionType)
|
||||||
if dlg.exec():
|
if dlg.exec():
|
||||||
submission_type = dlg.parse_form()
|
submission_type = dlg.parse_form()
|
||||||
|
print(submission_type)
|
||||||
submission_type = submission_type.replace("_", " ")
|
submission_type = submission_type.replace("_", " ")
|
||||||
return submission_type
|
return submission_type
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ from datetime import date, datetime, timedelta
|
|||||||
from dateutil.parser import parse
|
from dateutil.parser import parse
|
||||||
from dateutil.parser import ParserError
|
from dateutil.parser import ParserError
|
||||||
from typing import List, Tuple, Literal
|
from typing import List, Tuple, Literal
|
||||||
from types import GeneratorType
|
|
||||||
from . import RSLNamer
|
from . import RSLNamer
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from tools import check_not_nan, convert_nans_to_nones, Report, Result
|
from tools import check_not_nan, convert_nans_to_nones, Report, Result
|
||||||
@@ -49,7 +48,6 @@ class PydReagent(BaseModel):
|
|||||||
def rescue_type_with_lookup(cls, value, values):
|
def rescue_type_with_lookup(cls, value, values):
|
||||||
if value is None and values.data['lot'] is not None:
|
if value is None and values.data['lot'] is not None:
|
||||||
try:
|
try:
|
||||||
# return lookup_reagents(ctx=values.data['ctx'], lot_number=values.data['lot']).name
|
|
||||||
return Reagent.query(lot_number=values.data['lot'].name)
|
return Reagent.query(lot_number=values.data['lot'].name)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
return value
|
return value
|
||||||
@@ -222,7 +220,8 @@ class PydSample(BaseModel, extra='allow'):
|
|||||||
fields = list(self.model_fields.keys()) + list(self.model_extra.keys())
|
fields = list(self.model_fields.keys()) + list(self.model_extra.keys())
|
||||||
return {k: getattr(self, k) for k in fields}
|
return {k: getattr(self, k) for k in fields}
|
||||||
|
|
||||||
def toSQL(self, submission: BasicSubmission | str = None) -> Tuple[BasicSample, Result]:
|
def toSQL(self, submission: BasicSubmission | str = None) -> Tuple[
|
||||||
|
BasicSample, List[SubmissionSampleAssociation], Result | None]:
|
||||||
"""
|
"""
|
||||||
Converts this instance into a backend.db.models.submissions.Sample object
|
Converts this instance into a backend.db.models.submissions.Sample object
|
||||||
|
|
||||||
@@ -238,6 +237,7 @@ class PydSample(BaseModel, extra='allow'):
|
|||||||
instance = BasicSample.query_or_create(sample_type=self.sample_type, submitter_id=self.submitter_id)
|
instance = BasicSample.query_or_create(sample_type=self.sample_type, submitter_id=self.submitter_id)
|
||||||
for key, value in self.__dict__.items():
|
for key, value in self.__dict__.items():
|
||||||
match key:
|
match key:
|
||||||
|
# NOTE: row, column go in the association
|
||||||
case "row" | "column":
|
case "row" | "column":
|
||||||
continue
|
continue
|
||||||
case _:
|
case _:
|
||||||
@@ -259,7 +259,6 @@ class PydSample(BaseModel, extra='allow'):
|
|||||||
**self.model_extra)
|
**self.model_extra)
|
||||||
# logger.debug(f"Using submission_sample_association: {association}")
|
# logger.debug(f"Using submission_sample_association: {association}")
|
||||||
try:
|
try:
|
||||||
# instance.sample_submission_associations.append(association)
|
|
||||||
out_associations.append(association)
|
out_associations.append(association)
|
||||||
except IntegrityError as e:
|
except IntegrityError as e:
|
||||||
logger.error(f"Could not attach submission sample association due to: {e}")
|
logger.error(f"Could not attach submission sample association due to: {e}")
|
||||||
@@ -316,10 +315,10 @@ class PydEquipment(BaseModel, extra='ignore'):
|
|||||||
def make_empty_list(cls, value):
|
def make_empty_list(cls, value):
|
||||||
# logger.debug(f"Pydantic value: {value}")
|
# logger.debug(f"Pydantic value: {value}")
|
||||||
value = convert_nans_to_nones(value)
|
value = convert_nans_to_nones(value)
|
||||||
if value is None:
|
if not value:
|
||||||
value = ['']
|
|
||||||
if len(value) == 0:
|
|
||||||
value = ['']
|
value = ['']
|
||||||
|
# if len(value) == 0:
|
||||||
|
# value = ['']
|
||||||
try:
|
try:
|
||||||
value = [item.strip() for item in value]
|
value = [item.strip() for item in value]
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
@@ -337,7 +336,7 @@ class PydEquipment(BaseModel, extra='ignore'):
|
|||||||
Tuple[Equipment, SubmissionEquipmentAssociation]: SQL objects
|
Tuple[Equipment, SubmissionEquipmentAssociation]: SQL objects
|
||||||
"""
|
"""
|
||||||
if isinstance(submission, str):
|
if isinstance(submission, str):
|
||||||
logger.info(f"Got string, querying {submission}")
|
# logger.debug(f"Got string, querying {submission}")
|
||||||
submission = BasicSubmission.query(rsl_number=submission)
|
submission = BasicSubmission.query(rsl_number=submission)
|
||||||
equipment = Equipment.query(asset_number=self.asset_number)
|
equipment = Equipment.query(asset_number=self.asset_number)
|
||||||
if equipment is None:
|
if equipment is None:
|
||||||
@@ -409,7 +408,7 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
value = value['value']
|
value = value['value']
|
||||||
if isinstance(value, Generator):
|
if isinstance(value, Generator):
|
||||||
logger.debug("We have a generator")
|
# logger.debug("We have a generator")
|
||||||
return [PydTips(**tips) for tips in value]
|
return [PydTips(**tips) for tips in value]
|
||||||
if not value:
|
if not value:
|
||||||
return []
|
return []
|
||||||
@@ -466,7 +465,7 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
return dict(value=datetime.fromordinal(datetime(1900, 1, 1).toordinal() + value['value'] - 2).date(),
|
return dict(value=datetime.fromordinal(datetime(1900, 1, 1).toordinal() + value['value'] - 2).date(),
|
||||||
missing=True)
|
missing=True)
|
||||||
case str():
|
case str():
|
||||||
string = re.sub(r"(_|-)\d$", "", value['value'])
|
string = re.sub(r"(_|-)\d(R\d)?$", "", value['value'])
|
||||||
try:
|
try:
|
||||||
output = dict(value=parse(string).date(), missing=True)
|
output = dict(value=parse(string).date(), missing=True)
|
||||||
except ParserError as e:
|
except ParserError as e:
|
||||||
@@ -568,6 +567,7 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
else:
|
else:
|
||||||
raise ValueError(f"No extraction kit found.")
|
raise ValueError(f"No extraction kit found.")
|
||||||
if value is None:
|
if value is None:
|
||||||
|
# NOTE: Kit selection is done in the parser, so should not be necessary here.
|
||||||
return dict(value=None, missing=True)
|
return dict(value=None, missing=True)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@@ -575,7 +575,7 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def make_submission_type(cls, value, values):
|
def make_submission_type(cls, value, values):
|
||||||
if not isinstance(value, dict):
|
if not isinstance(value, dict):
|
||||||
value = {"value": value}
|
value = dict(value=value)
|
||||||
if check_not_nan(value['value']):
|
if check_not_nan(value['value']):
|
||||||
value = value['value'].title()
|
value = value['value'].title()
|
||||||
return dict(value=value, missing=False)
|
return dict(value=value, missing=False)
|
||||||
@@ -593,6 +593,8 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
@field_validator("submission_category")
|
@field_validator("submission_category")
|
||||||
@classmethod
|
@classmethod
|
||||||
def rescue_category(cls, value, values):
|
def rescue_category(cls, value, values):
|
||||||
|
if isinstance(value['value'], str):
|
||||||
|
value['value'] = value['value'].title()
|
||||||
if value['value'] not in ["Research", "Diagnostic", "Surveillance", "Validation"]:
|
if value['value'] not in ["Research", "Diagnostic", "Surveillance", "Validation"]:
|
||||||
value['value'] = values.data['submission_type']['value']
|
value['value'] = values.data['submission_type']['value']
|
||||||
return value
|
return value
|
||||||
@@ -600,18 +602,16 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
@field_validator("reagents", mode="before")
|
@field_validator("reagents", mode="before")
|
||||||
@classmethod
|
@classmethod
|
||||||
def expand_reagents(cls, value):
|
def expand_reagents(cls, value):
|
||||||
# print(f"\n{type(value)}\n")
|
|
||||||
if isinstance(value, Generator):
|
if isinstance(value, Generator):
|
||||||
logger.debug("We have a generator")
|
# logger.debug("We have a generator")
|
||||||
return [PydReagent(**reagent) for reagent in value]
|
return [PydReagent(**reagent) for reagent in value]
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@field_validator("samples", mode="before")
|
@field_validator("samples", mode="before")
|
||||||
@classmethod
|
@classmethod
|
||||||
def expand_samples(cls, value):
|
def expand_samples(cls, value):
|
||||||
# print(f"\n{type(value)}\n")
|
|
||||||
if isinstance(value, Generator):
|
if isinstance(value, Generator):
|
||||||
logger.debug("We have a generator")
|
# logger.debug("We have a generator")
|
||||||
return [PydSample(**sample) for sample in value]
|
return [PydSample(**sample) for sample in value]
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@@ -619,11 +619,10 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def assign_ids(cls, value):
|
def assign_ids(cls, value):
|
||||||
starting_id = SubmissionSampleAssociation.autoincrement_id()
|
starting_id = SubmissionSampleAssociation.autoincrement_id()
|
||||||
output = []
|
|
||||||
for iii, sample in enumerate(value, start=starting_id):
|
for iii, sample in enumerate(value, start=starting_id):
|
||||||
|
# NOTE: Why is this a list? Answer: to zip with the lists of rows and columns in case of multiple of the same sample.
|
||||||
sample.assoc_id = [iii]
|
sample.assoc_id = [iii]
|
||||||
output.append(sample)
|
return value
|
||||||
return output
|
|
||||||
|
|
||||||
@field_validator("cost_centre", mode="before")
|
@field_validator("cost_centre", mode="before")
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -682,7 +681,6 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
if run_custom:
|
if run_custom:
|
||||||
self.submission_object.custom_validation(pyd=self)
|
self.submission_object.custom_validation(pyd=self)
|
||||||
|
|
||||||
|
|
||||||
def set_attribute(self, key: str, value):
|
def set_attribute(self, key: str, value):
|
||||||
"""
|
"""
|
||||||
Better handling of attribute setting.
|
Better handling of attribute setting.
|
||||||
@@ -796,8 +794,6 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
continue
|
continue
|
||||||
# logger.debug(f"Setting {key} to {value}")
|
# logger.debug(f"Setting {key} to {value}")
|
||||||
match key:
|
match key:
|
||||||
# case "custom":
|
|
||||||
# instance.custom = value
|
|
||||||
case "reagents":
|
case "reagents":
|
||||||
if report.results[0].code == 1:
|
if report.results[0].code == 1:
|
||||||
instance.submission_reagent_associations = []
|
instance.submission_reagent_associations = []
|
||||||
@@ -833,7 +829,6 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
except AttributeError:
|
except AttributeError:
|
||||||
continue
|
continue
|
||||||
if association is not None and association not in instance.submission_tips_associations:
|
if association is not None and association not in instance.submission_tips_associations:
|
||||||
# association.save()
|
|
||||||
instance.submission_tips_associations.append(association)
|
instance.submission_tips_associations.append(association)
|
||||||
case item if item in instance.jsons():
|
case item if item in instance.jsons():
|
||||||
# logger.debug(f"{item} is a json.")
|
# logger.debug(f"{item} is a json.")
|
||||||
@@ -877,13 +872,6 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
instance.run_cost = instance.run_cost - sum(discounts)
|
instance.run_cost = instance.run_cost - sum(discounts)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"An unknown exception occurred when calculating discounts: {e}")
|
logger.error(f"An unknown exception occurred when calculating discounts: {e}")
|
||||||
# We need to make sure there's a proper rsl plate number
|
|
||||||
# logger.debug(f"We've got a total cost of {instance.run_cost}")
|
|
||||||
# try:
|
|
||||||
# logger.debug(f"Constructed instance: {instance}")
|
|
||||||
# except AttributeError as e:
|
|
||||||
# logger.debug(f"Something went wrong constructing instance {self.rsl_plate_num}: {e}")
|
|
||||||
# logger.debug(f"Constructed submissions message: {msg}")
|
|
||||||
return instance, report
|
return instance, report
|
||||||
|
|
||||||
def to_form(self, parent: QWidget, disable: list | None = None):
|
def to_form(self, parent: QWidget, disable: list | None = None):
|
||||||
@@ -1014,7 +1002,6 @@ class PydOrganization(BaseModel):
|
|||||||
value = [item.to_sql() for item in getattr(self, field)]
|
value = [item.to_sql() for item in getattr(self, field)]
|
||||||
case _:
|
case _:
|
||||||
value = getattr(self, field)
|
value = getattr(self, field)
|
||||||
# instance.set_attribute(name=field, value=value)
|
|
||||||
instance.__setattr__(name=field, value=value)
|
instance.__setattr__(name=field, value=value)
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,13 @@
|
|||||||
"""
|
"""
|
||||||
Functions for constructing controls graphs using plotly.
|
Functions for constructing controls graphs using plotly.
|
||||||
"""
|
"""
|
||||||
|
from copy import deepcopy
|
||||||
|
from pprint import pformat
|
||||||
|
|
||||||
import plotly
|
import plotly
|
||||||
import plotly.express as px
|
import plotly.express as px
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
|
from PyQt6.QtWidgets import QWidget
|
||||||
from plotly.graph_objects import Figure
|
from plotly.graph_objects import Figure
|
||||||
import logging
|
import logging
|
||||||
from tools import get_unique_values_in_df_column, divide_chunks
|
from tools import get_unique_values_in_df_column, divide_chunks
|
||||||
@@ -14,7 +18,7 @@ logger = logging.getLogger(f"submissions.{__name__}")
|
|||||||
|
|
||||||
class CustomFigure(Figure):
|
class CustomFigure(Figure):
|
||||||
|
|
||||||
def __init__(self, df: pd.DataFrame, modes: list, ytitle: str | None = None):
|
def __init__(self, df: pd.DataFrame, modes: list, ytitle: str | None = None, parent: QWidget | None = None):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.construct_chart(df=df, modes=modes)
|
self.construct_chart(df=df, modes=modes)
|
||||||
self.generic_figure_markers(modes=modes, ytitle=ytitle)
|
self.generic_figure_markers(modes=modes, ytitle=ytitle)
|
||||||
@@ -140,7 +144,7 @@ class CustomFigure(Figure):
|
|||||||
{"yaxis.title.text": mode},
|
{"yaxis.title.text": mode},
|
||||||
])
|
])
|
||||||
|
|
||||||
def save_figure(self, group_name: str = "plotly_output"):
|
def save_figure(self, group_name: str = "plotly_output", parent:QWidget|None=None):
|
||||||
"""
|
"""
|
||||||
Writes plotly figure to html file.
|
Writes plotly figure to html file.
|
||||||
|
|
||||||
@@ -150,12 +154,10 @@ class CustomFigure(Figure):
|
|||||||
fig (Figure): input figure object
|
fig (Figure): input figure object
|
||||||
group_name (str): controltype
|
group_name (str): controltype
|
||||||
"""
|
"""
|
||||||
output = select_save_file(None, default_name=group_name, extension="html")
|
|
||||||
with open(output, "w") as f:
|
output = select_save_file(obj=parent, default_name=group_name, extension="png")
|
||||||
try:
|
self.write_image(output.absolute().__str__(), engine="kaleido")
|
||||||
f.write(self.to_html())
|
|
||||||
except AttributeError:
|
|
||||||
logger.error(f"The following figure was a string: {self}")
|
|
||||||
|
|
||||||
def to_html(self) -> str:
|
def to_html(self) -> str:
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -189,8 +189,8 @@ class App(QMainWindow):
|
|||||||
"""
|
"""
|
||||||
month = date.today().strftime("%Y-%m")
|
month = date.today().strftime("%Y-%m")
|
||||||
current_month_bak = Path(self.ctx.backup_path).joinpath(f"submissions_backup-{month}").resolve()
|
current_month_bak = Path(self.ctx.backup_path).joinpath(f"submissions_backup-{month}").resolve()
|
||||||
logger.debug(f"Here is the db directory: {self.ctx.database_path}")
|
logger.info(f"Here is the db directory: {self.ctx.database_path}")
|
||||||
logger.debug(f"Here is the backup directory: {self.ctx.backup_path}")
|
logger.info(f"Here is the backup directory: {self.ctx.backup_path}")
|
||||||
match self.ctx.database_schema:
|
match self.ctx.database_schema:
|
||||||
case "sqlite":
|
case "sqlite":
|
||||||
db_path = self.ctx.database_path.joinpath(self.ctx.database_name).with_suffix(".db")
|
db_path = self.ctx.database_path.joinpath(self.ctx.database_name).with_suffix(".db")
|
||||||
@@ -206,15 +206,17 @@ class App(QMainWindow):
|
|||||||
current_month_bak = current_month_bak.with_suffix(".psql")
|
current_month_bak = current_month_bak.with_suffix(".psql")
|
||||||
|
|
||||||
def export_ST_yaml(self):
|
def export_ST_yaml(self):
|
||||||
|
"""
|
||||||
|
Copies submission type yaml to file system for editing and remport
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
None
|
||||||
|
"""
|
||||||
if check_if_app():
|
if check_if_app():
|
||||||
yaml_path = Path(sys._MEIPASS).joinpath("resources", "viral_culture.yml")
|
yaml_path = Path(sys._MEIPASS).joinpath("resources", "viral_culture.yml")
|
||||||
else:
|
else:
|
||||||
yaml_path = project_path.joinpath("src", "submissions", "resources", "viral_culture.yml")
|
yaml_path = project_path.joinpath("src", "submissions", "resources", "viral_culture.yml")
|
||||||
# with open(yaml_path, "r") as f:
|
|
||||||
# data = yaml.safe_load(f)
|
|
||||||
fname = select_save_file(obj=self, default_name="Submission Type Template.yml", extension="yml")
|
fname = select_save_file(obj=self, default_name="Submission Type Template.yml", extension="yml")
|
||||||
# with open(fname, "w") as f:
|
|
||||||
# yaml.safe_dump(data=data, stream=f)
|
|
||||||
shutil.copyfile(yaml_path, fname)
|
shutil.copyfile(yaml_path, fname)
|
||||||
|
|
||||||
@check_authorization
|
@check_authorization
|
||||||
@@ -230,7 +232,6 @@ class App(QMainWindow):
|
|||||||
print(pformat(st.to_export_dict()))
|
print(pformat(st.to_export_dict()))
|
||||||
choice = input("Save the above submission type? [y/N]: ")
|
choice = input("Save the above submission type? [y/N]: ")
|
||||||
if choice.lower() == "y":
|
if choice.lower() == "y":
|
||||||
# st.save()
|
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
logger.warning("Save of submission type cancelled.")
|
logger.warning("Save of submission type cancelled.")
|
||||||
|
|||||||
@@ -2,12 +2,13 @@
|
|||||||
Handles display of control charts
|
Handles display of control charts
|
||||||
"""
|
"""
|
||||||
import re
|
import re
|
||||||
|
import sys
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from typing import Tuple
|
from typing import Tuple
|
||||||
from PyQt6.QtWebEngineWidgets import QWebEngineView
|
from PyQt6.QtWebEngineWidgets import QWebEngineView
|
||||||
from PyQt6.QtWidgets import (
|
from PyQt6.QtWidgets import (
|
||||||
QWidget, QVBoxLayout, QComboBox, QHBoxLayout,
|
QWidget, QVBoxLayout, QComboBox, QHBoxLayout,
|
||||||
QDateEdit, QLabel, QSizePolicy
|
QDateEdit, QLabel, QSizePolicy, QPushButton
|
||||||
)
|
)
|
||||||
from PyQt6.QtCore import QSignalBlocker
|
from PyQt6.QtCore import QSignalBlocker
|
||||||
from backend.db import ControlType, Control
|
from backend.db import ControlType, Control
|
||||||
@@ -15,11 +16,11 @@ from PyQt6.QtCore import QDate, QSize
|
|||||||
import logging
|
import logging
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
from tools import Report, Result, get_unique_values_in_df_column, Settings, report_result
|
from tools import Report, Result, get_unique_values_in_df_column, Settings, report_result
|
||||||
# from backend.excel.reports import convert_data_list_to_df
|
|
||||||
from frontend.visualizations.control_charts import CustomFigure
|
from frontend.visualizations.control_charts import CustomFigure
|
||||||
|
|
||||||
logger = logging.getLogger(f"submissions.{__name__}")
|
logger = logging.getLogger(f"submissions.{__name__}")
|
||||||
|
|
||||||
|
|
||||||
class ControlsViewer(QWidget):
|
class ControlsViewer(QWidget):
|
||||||
|
|
||||||
def __init__(self, parent: QWidget) -> None:
|
def __init__(self, parent: QWidget) -> None:
|
||||||
@@ -29,7 +30,7 @@ class ControlsViewer(QWidget):
|
|||||||
self.report = Report()
|
self.report = Report()
|
||||||
self.datepicker = ControlsDatePicker()
|
self.datepicker = ControlsDatePicker()
|
||||||
self.webengineview = QWebEngineView()
|
self.webengineview = QWebEngineView()
|
||||||
# set tab2 layout
|
# NOTE: set tab2 layout
|
||||||
self.layout = QVBoxLayout(self)
|
self.layout = QVBoxLayout(self)
|
||||||
self.control_typer = QComboBox()
|
self.control_typer = QComboBox()
|
||||||
# NOTE: fetch types of controls
|
# NOTE: fetch types of controls
|
||||||
@@ -54,6 +55,10 @@ class ControlsViewer(QWidget):
|
|||||||
self.mode_typer.currentIndexChanged.connect(self.controls_getter)
|
self.mode_typer.currentIndexChanged.connect(self.controls_getter)
|
||||||
self.datepicker.start_date.dateChanged.connect(self.controls_getter)
|
self.datepicker.start_date.dateChanged.connect(self.controls_getter)
|
||||||
self.datepicker.end_date.dateChanged.connect(self.controls_getter)
|
self.datepicker.end_date.dateChanged.connect(self.controls_getter)
|
||||||
|
self.datepicker.save_button.pressed.connect(self.save_chart_function)
|
||||||
|
|
||||||
|
def save_chart_function(self):
|
||||||
|
self.fig.save_figure(parent=self)
|
||||||
|
|
||||||
def controls_getter(self):
|
def controls_getter(self):
|
||||||
"""
|
"""
|
||||||
@@ -136,6 +141,7 @@ class ControlsViewer(QWidget):
|
|||||||
# NOTE: if no data found from query set fig to none for reporting in webview
|
# NOTE: if no data found from query set fig to none for reporting in webview
|
||||||
if controls is None:
|
if controls is None:
|
||||||
fig = None
|
fig = None
|
||||||
|
self.datepicker.save_button.setEnabled(False)
|
||||||
else:
|
else:
|
||||||
# NOTE: change each control to list of dictionaries
|
# NOTE: change each control to list of dictionaries
|
||||||
data = [control.convert_by_mode(mode=self.mode) for control in controls]
|
data = [control.convert_by_mode(mode=self.mode) for control in controls]
|
||||||
@@ -153,8 +159,10 @@ class ControlsViewer(QWidget):
|
|||||||
title = f"{self.mode} - {self.subtype}"
|
title = f"{self.mode} - {self.subtype}"
|
||||||
# NOTE: send dataframe to chart maker
|
# NOTE: send dataframe to chart maker
|
||||||
df, modes = self.prep_df(ctx=self.app.ctx, df=df)
|
df, modes = self.prep_df(ctx=self.app.ctx, df=df)
|
||||||
fig = CustomFigure(df=df, ytitle=title, modes=modes)
|
fig = CustomFigure(df=df, ytitle=title, modes=modes, parent=self)
|
||||||
|
self.datepicker.save_button.setEnabled(True)
|
||||||
# logger.debug(f"Updating figure...")
|
# logger.debug(f"Updating figure...")
|
||||||
|
self.fig = fig
|
||||||
# NOTE: construct html for webview
|
# NOTE: construct html for webview
|
||||||
html = fig.to_html()
|
html = fig.to_html()
|
||||||
# logger.debug(f"The length of html code is: {len(html)}")
|
# logger.debug(f"The length of html code is: {len(html)}")
|
||||||
@@ -179,6 +187,11 @@ class ControlsViewer(QWidget):
|
|||||||
df = DataFrame.from_records(input_df)
|
df = DataFrame.from_records(input_df)
|
||||||
safe = ['name', 'submitted_date', 'genus', 'target']
|
safe = ['name', 'submitted_date', 'genus', 'target']
|
||||||
for column in df.columns:
|
for column in df.columns:
|
||||||
|
if column not in safe:
|
||||||
|
if self.subtype is not None and column != self.subtype:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
safe.append(column)
|
||||||
if "percent" in column:
|
if "percent" in column:
|
||||||
# count_col = [item for item in df.columns if "count" in item][0]
|
# count_col = [item for item in df.columns if "count" in item][0]
|
||||||
try:
|
try:
|
||||||
@@ -187,9 +200,9 @@ class ControlsViewer(QWidget):
|
|||||||
continue
|
continue
|
||||||
# NOTE: The actual percentage from kraken was off due to exclusion of NaN, recalculating.
|
# NOTE: The actual percentage from kraken was off due to exclusion of NaN, recalculating.
|
||||||
df[column] = 100 * df[count_col] / df.groupby('name')[count_col].transform('sum')
|
df[column] = 100 * df[count_col] / df.groupby('name')[count_col].transform('sum')
|
||||||
if column not in safe:
|
logger.debug(df)
|
||||||
if self.subtype is not None and column != self.subtype:
|
logger.debug(safe)
|
||||||
del df[column]
|
df = df[[c for c in df.columns if c in safe]]
|
||||||
# NOTE: move date of sample submitted on same date as previous ahead one.
|
# NOTE: move date of sample submitted on same date as previous ahead one.
|
||||||
df = self.displace_date(df=df)
|
df = self.displace_date(df=df)
|
||||||
# NOTE: ad hoc method to make data labels more accurate.
|
# NOTE: ad hoc method to make data labels more accurate.
|
||||||
@@ -229,12 +242,13 @@ class ControlsViewer(QWidget):
|
|||||||
# NOTE: get submitted dates for each control
|
# NOTE: get submitted dates for each control
|
||||||
dict_list = [dict(name=item, date=df[df.name == item].iloc[0]['submitted_date']) for item in
|
dict_list = [dict(name=item, date=df[df.name == item].iloc[0]['submitted_date']) for item in
|
||||||
sorted(df['name'].unique())]
|
sorted(df['name'].unique())]
|
||||||
previous_dates = []
|
previous_dates = set()
|
||||||
for _, item in enumerate(dict_list):
|
# for _, item in enumerate(dict_list):
|
||||||
|
for item in dict_list:
|
||||||
df, previous_dates = self.check_date(df=df, item=item, previous_dates=previous_dates)
|
df, previous_dates = self.check_date(df=df, item=item, previous_dates=previous_dates)
|
||||||
return df
|
return df
|
||||||
|
|
||||||
def check_date(self, df: DataFrame, item: dict, previous_dates: list) -> Tuple[DataFrame, list]:
|
def check_date(self, df: DataFrame, item: dict, previous_dates: set) -> Tuple[DataFrame, list]:
|
||||||
"""
|
"""
|
||||||
Checks if an items date is already present in df and adjusts df accordingly
|
Checks if an items date is already present in df and adjusts df accordingly
|
||||||
|
|
||||||
@@ -250,7 +264,7 @@ class ControlsViewer(QWidget):
|
|||||||
check = item['date'] in previous_dates
|
check = item['date'] in previous_dates
|
||||||
except IndexError:
|
except IndexError:
|
||||||
check = False
|
check = False
|
||||||
previous_dates.append(item['date'])
|
previous_dates.add(item['date'])
|
||||||
if check:
|
if check:
|
||||||
# logger.debug(f"We found one! Increment date!\n\t{item['date']} to {item['date'] + timedelta(days=1)}")
|
# logger.debug(f"We found one! Increment date!\n\t{item['date']} to {item['date'] + timedelta(days=1)}")
|
||||||
# NOTE: get df locations where name == item name
|
# NOTE: get df locations where name == item name
|
||||||
@@ -273,7 +287,7 @@ class ControlsViewer(QWidget):
|
|||||||
df, previous_dates = self.check_date(df, item, previous_dates)
|
df, previous_dates = self.check_date(df, item, previous_dates)
|
||||||
return df, previous_dates
|
return df, previous_dates
|
||||||
|
|
||||||
def prep_df(self, ctx: Settings, df: DataFrame) -> DataFrame:
|
def prep_df(self, ctx: Settings, df: DataFrame) -> Tuple[DataFrame, list]:
|
||||||
"""
|
"""
|
||||||
Constructs figures based on parsed pandas dataframe.
|
Constructs figures based on parsed pandas dataframe.
|
||||||
|
|
||||||
@@ -285,27 +299,17 @@ class ControlsViewer(QWidget):
|
|||||||
Returns:
|
Returns:
|
||||||
Figure: Plotly figure
|
Figure: Plotly figure
|
||||||
"""
|
"""
|
||||||
# from backend.excel import drop_reruns_from_df
|
# NOTE: converts starred genera to normal and splits off list of starred
|
||||||
# converts starred genera to normal and splits off list of starred
|
|
||||||
genera = []
|
|
||||||
if df.empty:
|
if df.empty:
|
||||||
return None
|
return None
|
||||||
for item in df['genus'].to_list():
|
|
||||||
try:
|
|
||||||
if item[-1] == "*":
|
|
||||||
genera.append(item[-1])
|
|
||||||
else:
|
|
||||||
genera.append("")
|
|
||||||
except IndexError:
|
|
||||||
genera.append("")
|
|
||||||
df['genus'] = df['genus'].replace({'\*': ''}, regex=True).replace({"NaN": "Unknown"})
|
df['genus'] = df['genus'].replace({'\*': ''}, regex=True).replace({"NaN": "Unknown"})
|
||||||
df['genera'] = genera
|
df['genera'] = [item[-1] if item and item[-1] == "*" else "" for item in df['genus'].to_list()]
|
||||||
# NOTE: remove original runs, using reruns if applicable
|
# NOTE: remove original runs, using reruns if applicable
|
||||||
df = self.drop_reruns_from_df(ctx=ctx, df=df)
|
df = self.drop_reruns_from_df(ctx=ctx, df=df)
|
||||||
# NOTE: sort by and exclude from
|
# NOTE: sort by and exclude from
|
||||||
sorts = ['submitted_date', "target", "genus"]
|
sorts = ['submitted_date', "target", "genus"]
|
||||||
exclude = ['name', 'genera']
|
exclude = ['name', 'genera']
|
||||||
modes = [item for item in df.columns if item not in sorts and item not in exclude] # and "_hashes" not in item]
|
modes = [item for item in df.columns if item not in sorts and item not in exclude]
|
||||||
# NOTE: Set descending for any columns that have "{mode}" in the header.
|
# NOTE: Set descending for any columns that have "{mode}" in the header.
|
||||||
ascending = [False if item == "target" else True for item in sorts]
|
ascending = [False if item == "target" else True for item in sorts]
|
||||||
df = df.sort_values(by=sorts, ascending=ascending)
|
df = df.sort_values(by=sorts, ascending=ascending)
|
||||||
@@ -327,10 +331,12 @@ class ControlsViewer(QWidget):
|
|||||||
if 'rerun_regex' in ctx:
|
if 'rerun_regex' in ctx:
|
||||||
sample_names = get_unique_values_in_df_column(df, column_name="name")
|
sample_names = get_unique_values_in_df_column(df, column_name="name")
|
||||||
rerun_regex = re.compile(fr"{ctx.rerun_regex}")
|
rerun_regex = re.compile(fr"{ctx.rerun_regex}")
|
||||||
for sample in sample_names:
|
exclude = [re.sub(rerun_regex, "", sample) for sample in sample_names if rerun_regex.search(sample)]
|
||||||
if rerun_regex.search(sample):
|
df = df[df.name not in exclude]
|
||||||
first_run = re.sub(rerun_regex, "", sample)
|
# for sample in sample_names:
|
||||||
df = df.drop(df[df.name == first_run].index)
|
# if rerun_regex.search(sample):
|
||||||
|
# first_run = re.sub(rerun_regex, "", sample)
|
||||||
|
# df = df.drop(df[df.name == first_run].index)
|
||||||
return df
|
return df
|
||||||
|
|
||||||
|
|
||||||
@@ -338,12 +344,13 @@ class ControlsDatePicker(QWidget):
|
|||||||
"""
|
"""
|
||||||
custom widget to pick start and end dates for controls graphs
|
custom widget to pick start and end dates for controls graphs
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.start_date = QDateEdit(calendarPopup=True)
|
self.start_date = QDateEdit(calendarPopup=True)
|
||||||
# NOTE: start date is two months prior to end date by default
|
# NOTE: start date is two months prior to end date by default
|
||||||
twomonthsago = QDate.currentDate().addDays(-60)
|
sixmonthsago = QDate.currentDate().addDays(-180)
|
||||||
self.start_date.setDate(twomonthsago)
|
self.start_date.setDate(sixmonthsago)
|
||||||
self.end_date = QDateEdit(calendarPopup=True)
|
self.end_date = QDateEdit(calendarPopup=True)
|
||||||
self.end_date.setDate(QDate.currentDate())
|
self.end_date.setDate(QDate.currentDate())
|
||||||
self.layout = QHBoxLayout()
|
self.layout = QHBoxLayout()
|
||||||
@@ -353,6 +360,8 @@ class ControlsDatePicker(QWidget):
|
|||||||
self.layout.addWidget(self.end_date)
|
self.layout.addWidget(self.end_date)
|
||||||
self.setLayout(self.layout)
|
self.setLayout(self.layout)
|
||||||
self.setSizePolicy(QSizePolicy.Policy.Expanding, QSizePolicy.Policy.Fixed)
|
self.setSizePolicy(QSizePolicy.Policy.Expanding, QSizePolicy.Policy.Fixed)
|
||||||
|
self.save_button = QPushButton("Save Chart", parent=self)
|
||||||
|
self.layout.addWidget(self.save_button)
|
||||||
|
|
||||||
def sizeHint(self) -> QSize:
|
def sizeHint(self) -> QSize:
|
||||||
return QSize(80, 20)
|
return QSize(80, 20)
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ from PyQt6.QtWidgets import (QDialog, QComboBox, QCheckBox,
|
|||||||
from backend.db.models import Equipment, BasicSubmission, Process
|
from backend.db.models import Equipment, BasicSubmission, Process
|
||||||
from backend.validators.pydant import PydEquipment, PydEquipmentRole, PydTips
|
from backend.validators.pydant import PydEquipment, PydEquipmentRole, PydTips
|
||||||
import logging
|
import logging
|
||||||
from typing import List
|
from typing import List, Generator
|
||||||
|
|
||||||
logger = logging.getLogger(f"submissions.{__name__}")
|
logger = logging.getLogger(f"submissions.{__name__}")
|
||||||
|
|
||||||
@@ -45,26 +45,26 @@ class EquipmentUsage(QDialog):
|
|||||||
widg.update_processes()
|
widg.update_processes()
|
||||||
self.layout.addWidget(self.buttonBox)
|
self.layout.addWidget(self.buttonBox)
|
||||||
|
|
||||||
def parse_form(self) -> List[PydEquipment]:
|
def parse_form(self) -> Generator[PydEquipment, None, None]:
|
||||||
"""
|
"""
|
||||||
Pull info from all RoleComboBox widgets
|
Pull info from all RoleComboBox widgets
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
List[PydEquipment]: All equipment pulled from widgets
|
List[PydEquipment]: All equipment pulled from widgets
|
||||||
"""
|
"""
|
||||||
output = []
|
|
||||||
for widget in self.findChildren(QWidget):
|
for widget in self.findChildren(QWidget):
|
||||||
match widget:
|
match widget:
|
||||||
case RoleComboBox():
|
case RoleComboBox():
|
||||||
if widget.check.isChecked():
|
if widget.check.isChecked():
|
||||||
output.append(widget.parse_form())
|
item = widget.parse_form()
|
||||||
|
if item:
|
||||||
|
yield item
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
continue
|
||||||
case _:
|
case _:
|
||||||
pass
|
continue
|
||||||
# logger.debug(f"parsed output of Equsage form: {pformat(output)}")
|
|
||||||
try:
|
|
||||||
return [item.strip() for item in output if item is not None]
|
|
||||||
except AttributeError:
|
|
||||||
return [item for item in output if item is not None]
|
|
||||||
|
|
||||||
class LabelRow(QWidget):
|
class LabelRow(QWidget):
|
||||||
|
|
||||||
@@ -93,14 +93,10 @@ class RoleComboBox(QWidget):
|
|||||||
|
|
||||||
def __init__(self, parent, role: PydEquipmentRole, used: list) -> None:
|
def __init__(self, parent, role: PydEquipmentRole, used: list) -> None:
|
||||||
super().__init__(parent)
|
super().__init__(parent)
|
||||||
# self.layout = QHBoxLayout()
|
|
||||||
self.layout = QGridLayout()
|
self.layout = QGridLayout()
|
||||||
self.role = role
|
self.role = role
|
||||||
self.check = QCheckBox()
|
self.check = QCheckBox()
|
||||||
# if role.name in used:
|
|
||||||
self.check.setChecked(False)
|
self.check.setChecked(False)
|
||||||
# else:
|
|
||||||
# self.check.setChecked(True)
|
|
||||||
self.check.stateChanged.connect(self.toggle_checked)
|
self.check.stateChanged.connect(self.toggle_checked)
|
||||||
self.box = QComboBox()
|
self.box = QComboBox()
|
||||||
self.box.setMaximumWidth(200)
|
self.box.setMaximumWidth(200)
|
||||||
@@ -129,7 +125,6 @@ class RoleComboBox(QWidget):
|
|||||||
"""
|
"""
|
||||||
equip = self.box.currentText()
|
equip = self.box.currentText()
|
||||||
# logger.debug(f"Updating equipment: {equip}")
|
# logger.debug(f"Updating equipment: {equip}")
|
||||||
# equip2 = [item for item in self.role.equipment if item.name == equip][0]
|
|
||||||
equip2 = next((item for item in self.role.equipment if item.name == equip), self.role.equipment[0])
|
equip2 = next((item for item in self.role.equipment if item.name == equip), self.role.equipment[0])
|
||||||
# logger.debug(f"Using: {equip2}")
|
# logger.debug(f"Using: {equip2}")
|
||||||
self.process.clear()
|
self.process.clear()
|
||||||
@@ -158,7 +153,10 @@ class RoleComboBox(QWidget):
|
|||||||
widget.setMinimumWidth(200)
|
widget.setMinimumWidth(200)
|
||||||
widget.setMaximumWidth(200)
|
widget.setMaximumWidth(200)
|
||||||
self.layout.addWidget(widget, 0, 4)
|
self.layout.addWidget(widget, 0, 4)
|
||||||
|
try:
|
||||||
widget.setEnabled(self.check.isChecked())
|
widget.setEnabled(self.check.isChecked())
|
||||||
|
except NameError:
|
||||||
|
pass
|
||||||
|
|
||||||
def parse_form(self) -> PydEquipment | None:
|
def parse_form(self) -> PydEquipment | None:
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -74,7 +74,7 @@ class GelBox(QDialog):
|
|||||||
self.buttonBox = QDialogButtonBox(QBtn)
|
self.buttonBox = QDialogButtonBox(QBtn)
|
||||||
self.buttonBox.accepted.connect(self.accept)
|
self.buttonBox.accepted.connect(self.accept)
|
||||||
self.buttonBox.rejected.connect(self.reject)
|
self.buttonBox.rejected.connect(self.reject)
|
||||||
layout.addWidget(self.buttonBox, 23, 1, 1, 1) #, alignment=Qt.AlignmentFlag.AlignTop)
|
layout.addWidget(self.buttonBox, 23, 1, 1, 1)
|
||||||
self.setLayout(layout)
|
self.setLayout(layout)
|
||||||
|
|
||||||
|
|
||||||
@@ -135,7 +135,7 @@ class ControlsForm(QWidget):
|
|||||||
self.layout.addWidget(self.comment_field, 1, 5, 4, 1)
|
self.layout.addWidget(self.comment_field, 1, 5, 4, 1)
|
||||||
self.setLayout(self.layout)
|
self.setLayout(self.layout)
|
||||||
|
|
||||||
def parse_form(self) -> List[dict]:
|
def parse_form(self) -> Tuple[List[dict], str]:
|
||||||
"""
|
"""
|
||||||
Pulls the controls statuses from the form.
|
Pulls the controls statuses from the form.
|
||||||
|
|
||||||
@@ -145,11 +145,7 @@ class ControlsForm(QWidget):
|
|||||||
output = []
|
output = []
|
||||||
for le in self.findChildren(QComboBox):
|
for le in self.findChildren(QComboBox):
|
||||||
label = [item.strip() for item in le.objectName().split(" : ")]
|
label = [item.strip() for item in le.objectName().split(" : ")]
|
||||||
try:
|
dicto = next((item for item in output if item['name'] == label[0]), dict(name=label[0], values=[]))
|
||||||
# dicto = [item for item in output if item['name'] == label[0]][0]
|
|
||||||
dicto = next(item for item in output if item['name'] == label[0])
|
|
||||||
except StopIteration:
|
|
||||||
dicto = dict(name=label[0], values=[])
|
|
||||||
dicto['values'].append(dict(name=label[1], value=le.currentText()))
|
dicto['values'].append(dict(name=label[1], value=le.currentText()))
|
||||||
if label[0] not in [item['name'] for item in output]:
|
if label[0] not in [item['name'] for item in output]:
|
||||||
output.append(dicto)
|
output.append(dicto)
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ from PyQt6.QtWidgets import (
|
|||||||
QDialogButtonBox, QMessageBox, QComboBox
|
QDialogButtonBox, QMessageBox, QComboBox
|
||||||
)
|
)
|
||||||
from PyQt6.QtWebEngineWidgets import QWebEngineView
|
from PyQt6.QtWebEngineWidgets import QWebEngineView
|
||||||
from PyQt6.QtCore import Qt
|
|
||||||
from tools import jinja_template_loading
|
from tools import jinja_template_loading
|
||||||
import logging
|
import logging
|
||||||
from backend.db import models
|
from backend.db import models
|
||||||
@@ -21,6 +20,7 @@ class QuestionAsker(QDialog):
|
|||||||
"""
|
"""
|
||||||
dialog to ask yes/no questions
|
dialog to ask yes/no questions
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, title: str, message: str):
|
def __init__(self, title: str, message: str):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.setWindowTitle(title)
|
self.setWindowTitle(title)
|
||||||
@@ -41,7 +41,9 @@ class AlertPop(QMessageBox):
|
|||||||
"""
|
"""
|
||||||
Dialog to show an alert.
|
Dialog to show an alert.
|
||||||
"""
|
"""
|
||||||
def __init__(self, message:str, status:Literal['Information', 'Question', 'Warning', 'Critical'], owner:str|None=None):
|
|
||||||
|
def __init__(self, message: str, status: Literal['Information', 'Question', 'Warning', 'Critical'],
|
||||||
|
owner: str | None = None):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
# NOTE: select icon by string
|
# NOTE: select icon by string
|
||||||
icon = getattr(QMessageBox.Icon, status)
|
icon = getattr(QMessageBox.Icon, status)
|
||||||
@@ -49,6 +51,7 @@ class AlertPop(QMessageBox):
|
|||||||
self.setInformativeText(message)
|
self.setInformativeText(message)
|
||||||
self.setWindowTitle(f"{owner} - {status.title()}")
|
self.setWindowTitle(f"{owner} - {status.title()}")
|
||||||
|
|
||||||
|
|
||||||
class HTMLPop(QDialog):
|
class HTMLPop(QDialog):
|
||||||
|
|
||||||
def __init__(self, html: str, owner: str | None = None, title: str = "python"):
|
def __init__(self, html: str, owner: str | None = None, title: str = "python"):
|
||||||
@@ -67,10 +70,14 @@ class ObjectSelector(QDialog):
|
|||||||
"""
|
"""
|
||||||
dialog to input BaseClass type manually
|
dialog to input BaseClass type manually
|
||||||
"""
|
"""
|
||||||
def __init__(self, title:str, message:str, obj_type:str|type[models.BaseClass]):
|
|
||||||
|
def __init__(self, title: str, message: str, obj_type: str | type[models.BaseClass], values: list | None = None):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.setWindowTitle(title)
|
self.setWindowTitle(title)
|
||||||
self.widget = QComboBox()
|
self.widget = QComboBox()
|
||||||
|
if values:
|
||||||
|
items = values
|
||||||
|
else:
|
||||||
if isinstance(obj_type, str):
|
if isinstance(obj_type, str):
|
||||||
obj_type: models.BaseClass = getattr(models, obj_type)
|
obj_type: models.BaseClass = getattr(models, obj_type)
|
||||||
items = [item.name for item in obj_type.query()]
|
items = [item.name for item in obj_type.query()]
|
||||||
|
|||||||
@@ -55,6 +55,7 @@ class SearchBox(QDialog):
|
|||||||
widget = FieldSearch(parent=self, label=item['label'], field_name=item['field'])
|
widget = FieldSearch(parent=self, label=item['label'], field_name=item['field'])
|
||||||
self.layout.addWidget(widget, start_row+iii, 0)
|
self.layout.addWidget(widget, start_row+iii, 0)
|
||||||
widget.search_widget.textChanged.connect(self.update_data)
|
widget.search_widget.textChanged.connect(self.update_data)
|
||||||
|
self.update_data()
|
||||||
|
|
||||||
def parse_form(self) -> dict:
|
def parse_form(self) -> dict:
|
||||||
"""
|
"""
|
||||||
@@ -73,7 +74,8 @@ class SearchBox(QDialog):
|
|||||||
# logger.debug(f"Running update_data with sample type: {self.type}")
|
# logger.debug(f"Running update_data with sample type: {self.type}")
|
||||||
fields = self.parse_form()
|
fields = self.parse_form()
|
||||||
# logger.debug(f"Got fields: {fields}")
|
# logger.debug(f"Got fields: {fields}")
|
||||||
sample_list_creator = self.type.fuzzy_search(sample_type=self.type, **fields)
|
# sample_list_creator = self.type.fuzzy_search(sample_type=self.type, **fields)
|
||||||
|
sample_list_creator = self.type.fuzzy_search(**fields)
|
||||||
data = self.type.samples_to_df(sample_list=sample_list_creator)
|
data = self.type.samples_to_df(sample_list=sample_list_creator)
|
||||||
# logger.debug(f"Data: {data}")
|
# logger.debug(f"Data: {data}")
|
||||||
self.results.setData(df=data)
|
self.results.setData(df=data)
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ from PyQt6.QtWebEngineWidgets import QWebEngineView
|
|||||||
from PyQt6.QtWebChannel import QWebChannel
|
from PyQt6.QtWebChannel import QWebChannel
|
||||||
from PyQt6.QtCore import Qt, pyqtSlot, QMarginsF
|
from PyQt6.QtCore import Qt, pyqtSlot, QMarginsF
|
||||||
from jinja2 import TemplateNotFound
|
from jinja2 import TemplateNotFound
|
||||||
|
|
||||||
from backend.db.models import BasicSubmission, BasicSample, Reagent, KitType
|
from backend.db.models import BasicSubmission, BasicSample, Reagent, KitType
|
||||||
from tools import is_power_user, html_to_pdf, jinja_template_loading
|
from tools import is_power_user, html_to_pdf, jinja_template_loading
|
||||||
from .functions import select_save_file
|
from .functions import select_save_file
|
||||||
@@ -18,9 +17,8 @@ import logging
|
|||||||
from getpass import getuser
|
from getpass import getuser
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
|
|
||||||
from typing import List
|
from typing import List
|
||||||
from backend.excel.writer import DocxWriter
|
|
||||||
|
|
||||||
logger = logging.getLogger(f"submissions.{__name__}")
|
logger = logging.getLogger(f"submissions.{__name__}")
|
||||||
|
|
||||||
|
|||||||
@@ -11,8 +11,6 @@ from backend.excel import ReportMaker
|
|||||||
from tools import Report, Result, report_result
|
from tools import Report, Result, report_result
|
||||||
from .functions import select_save_file, select_open_file
|
from .functions import select_save_file, select_open_file
|
||||||
from .misc import ReportDatePicker
|
from .misc import ReportDatePicker
|
||||||
import pandas as pd
|
|
||||||
from openpyxl.worksheet.worksheet import Worksheet
|
|
||||||
|
|
||||||
logger = logging.getLogger(f"submissions.{__name__}")
|
logger = logging.getLogger(f"submissions.{__name__}")
|
||||||
|
|
||||||
@@ -222,10 +220,6 @@ class SubmissionsSheet(QTableView):
|
|||||||
# NOTE: if imported submission doesn't exist move on to next run
|
# NOTE: if imported submission doesn't exist move on to next run
|
||||||
if sub is None:
|
if sub is None:
|
||||||
continue
|
continue
|
||||||
# try:
|
|
||||||
# logger.debug(f"Found submission: {sub.rsl_plate_num}")
|
|
||||||
# except AttributeError:
|
|
||||||
# continue
|
|
||||||
sub.set_attribute('pcr_info', new_run)
|
sub.set_attribute('pcr_info', new_run)
|
||||||
# NOTE: check if pcr_info already exists
|
# NOTE: check if pcr_info already exists
|
||||||
sub.save()
|
sub.save()
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ from PyQt6.QtWidgets import (
|
|||||||
)
|
)
|
||||||
from PyQt6.QtCore import pyqtSignal, Qt
|
from PyQt6.QtCore import pyqtSignal, Qt
|
||||||
from . import select_open_file, select_save_file
|
from . import select_open_file, select_save_file
|
||||||
import logging, difflib, inspect
|
import logging, difflib
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from tools import Report, Result, check_not_nan, main_form_style, report_result, check_regex_match
|
from tools import Report, Result, check_not_nan, main_form_style, report_result, check_regex_match
|
||||||
from backend.excel.parser import SheetParser
|
from backend.excel.parser import SheetParser
|
||||||
@@ -163,7 +163,7 @@ class SubmissionFormContainer(QWidget):
|
|||||||
# NOTE: create form
|
# NOTE: create form
|
||||||
dlg = AddReagentForm(reagent_lot=reagent_lot, reagent_role=reagent_role, expiry=expiry, reagent_name=name)
|
dlg = AddReagentForm(reagent_lot=reagent_lot, reagent_role=reagent_role, expiry=expiry, reagent_name=name)
|
||||||
if dlg.exec():
|
if dlg.exec():
|
||||||
# extract form info
|
# NOTE: extract form info
|
||||||
info = dlg.parse_form()
|
info = dlg.parse_form()
|
||||||
# logger.debug(f"Reagent info: {info}")
|
# logger.debug(f"Reagent info: {info}")
|
||||||
# NOTE: create reagent object
|
# NOTE: create reagent object
|
||||||
@@ -180,7 +180,6 @@ class SubmissionFormWidget(QWidget):
|
|||||||
|
|
||||||
def __init__(self, parent: QWidget, submission: PydSubmission, disable: list | None = None) -> None:
|
def __init__(self, parent: QWidget, submission: PydSubmission, disable: list | None = None) -> None:
|
||||||
super().__init__(parent)
|
super().__init__(parent)
|
||||||
# self.report = Report()
|
|
||||||
# logger.debug(f"Disable: {disable}")
|
# logger.debug(f"Disable: {disable}")
|
||||||
if disable is None:
|
if disable is None:
|
||||||
disable = []
|
disable = []
|
||||||
@@ -268,7 +267,6 @@ class SubmissionFormWidget(QWidget):
|
|||||||
Tuple[QMainWindow, dict]: Updated application and result
|
Tuple[QMainWindow, dict]: Updated application and result
|
||||||
"""
|
"""
|
||||||
extraction_kit = args[0]
|
extraction_kit = args[0]
|
||||||
# caller = inspect.stack()[1].function.__repr__().replace("'", "")
|
|
||||||
report = Report()
|
report = Report()
|
||||||
# logger.debug(f"Extraction kit: {extraction_kit}")
|
# logger.debug(f"Extraction kit: {extraction_kit}")
|
||||||
# NOTE: Remove previous reagent widgets
|
# NOTE: Remove previous reagent widgets
|
||||||
|
|||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -874,28 +874,6 @@ def rreplace(s: str, old: str, new: str) -> str:
|
|||||||
return (s[::-1].replace(old[::-1], new[::-1], 1))[::-1]
|
return (s[::-1].replace(old[::-1], new[::-1], 1))[::-1]
|
||||||
|
|
||||||
|
|
||||||
def html_to_pdf(html: str, output_file: Path | str):
|
|
||||||
"""
|
|
||||||
Attempts to print an html string as a PDF. (currently not working)
|
|
||||||
|
|
||||||
Args:
|
|
||||||
html (str): Input html string.
|
|
||||||
output_file (Path | str): Output PDF file path.
|
|
||||||
"""
|
|
||||||
if isinstance(output_file, str):
|
|
||||||
output_file = Path(output_file)
|
|
||||||
logger.debug(f"Printing PDF to {output_file}")
|
|
||||||
document = QWebEngineView()
|
|
||||||
document.setHtml(html)
|
|
||||||
# document.show()
|
|
||||||
printer = QPrinter(QPrinter.PrinterMode.HighResolution)
|
|
||||||
printer.setOutputFormat(QPrinter.OutputFormat.PdfFormat)
|
|
||||||
printer.setOutputFileName(output_file.absolute().__str__())
|
|
||||||
printer.setPageSize(QPageSize(QPageSize.PageSizeId.A4))
|
|
||||||
document.print(printer)
|
|
||||||
# document.close()
|
|
||||||
|
|
||||||
|
|
||||||
def remove_key_from_list_of_dicts(input: list, key: str) -> list:
|
def remove_key_from_list_of_dicts(input: list, key: str) -> list:
|
||||||
"""
|
"""
|
||||||
Removes a key from all dictionaries in a list of dictionaries
|
Removes a key from all dictionaries in a list of dictionaries
|
||||||
|
|||||||
Reference in New Issue
Block a user