From 841af02eaa45c1b7395d5f4e4711de3c3661c146 Mon Sep 17 00:00:00 2001 From: Peter Amstutz Date: Fri, 3 Jul 2020 16:36:55 -0400 Subject: Fix single arg upload Arvados-DCO-1.1-Signed-off-by: Peter Amstutz --- bh20sequploader/main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'bh20sequploader') diff --git a/bh20sequploader/main.py b/bh20sequploader/main.py index c442af0..89c30e8 100644 --- a/bh20sequploader/main.py +++ b/bh20sequploader/main.py @@ -62,7 +62,7 @@ def main(): parser = argparse.ArgumentParser(description='Upload SARS-CoV-19 sequences for analysis') parser.add_argument('metadata', type=argparse.FileType('r'), help='sequence metadata json') parser.add_argument('sequence_p1', type=argparse.FileType('rb'), help='sequence FASTA/FASTQ') - parser.add_argument('sequence_p2', type=argparse.FileType('rb'), default=None, help='sequence FASTQ pair') + parser.add_argument('sequence_p2', type=argparse.FileType('rb'), default=None, nargs='?', help='sequence FASTQ pair') parser.add_argument("--validate", action="store_true", help="Dry run, validate only") args = parser.parse_args() -- cgit v1.2.3 From c50e611f7be281a3b7955254be097c01e6461c20 Mon Sep 17 00:00:00 2001 From: Peter Amstutz Date: Fri, 3 Jul 2020 20:55:16 +0000 Subject: Fix qc_fasta bug closing stream. --- bh20sequploader/qc_fasta.py | 4 ++++ 1 file changed, 4 insertions(+) (limited to 'bh20sequploader') diff --git a/bh20sequploader/qc_fasta.py b/bh20sequploader/qc_fasta.py index e198430..1587def 100644 --- a/bh20sequploader/qc_fasta.py +++ b/bh20sequploader/qc_fasta.py @@ -51,6 +51,8 @@ def qc_fasta(arg_sequence): if seq_type == "text/fasta": # ensure that contains only one entry submitlabel, submitseq = read_fasta(sequence) + sequence.seek(0) + sequence.detach() with tempfile.NamedTemporaryFile() as tmp1: refstring = pkg_resources.resource_string(__name__, "SARS-CoV-2-reference.fasta") @@ -92,6 +94,8 @@ def qc_fasta(arg_sequence): return ("sequence.fasta"+gz, seqlabel) elif seq_type == "text/fastq": + sequence.seek(0) + sequence.detach() return ("reads.fastq"+gz, seqlabel) else: raise ValueError("Sequence file does not look like a DNA FASTA or FASTQ") -- cgit v1.2.3 From 04df498f5cd85015afce79e1e87a3979e596dcc6 Mon Sep 17 00:00:00 2001 From: Peter Amstutz Date: Fri, 3 Jul 2020 17:08:25 -0400 Subject: Adding --skip-qc Arvados-DCO-1.1-Signed-off-by: Peter Amstutz --- bh20sequploader/main.py | 9 +++++---- bh20sequploader/qc_fasta.py | 7 +++++-- 2 files changed, 10 insertions(+), 6 deletions(-) (limited to 'bh20sequploader') diff --git a/bh20sequploader/main.py b/bh20sequploader/main.py index 89c30e8..dc63bfc 100644 --- a/bh20sequploader/main.py +++ b/bh20sequploader/main.py @@ -22,10 +22,10 @@ ARVADOS_API_HOST='lugli.arvadosapi.com' ARVADOS_API_TOKEN='2fbebpmbo3rw3x05ueu2i6nx70zhrsb1p22ycu3ry34m4x4462' UPLOAD_PROJECT='lugli-j7d0g-n5clictpuvwk8aa' -def qa_stuff(metadata, sequence_p1, sequence_p2): +def qc_stuff(metadata, sequence_p1, sequence_p2, do_qc=True): try: log.debug("Checking metadata") - if not qc_metadata(metadata.name): + if do_qc and not qc_metadata(metadata.name): log.warning("Failed metadata qc") exit(1) except ValueError as e: @@ -37,7 +37,7 @@ def qa_stuff(metadata, sequence_p1, sequence_p2): target = [] try: log.debug("Checking FASTA/FASTQ QC") - target.append(qc_fasta(sequence_p1)) + target.append(qc_fasta(sequence_p1, check_with_clustalw=do_qc)) if sequence_p2: target.append(qc_fasta(sequence_p2)) target[0] = ("reads_1."+target[0][0][6:], target[0][1]) @@ -64,11 +64,12 @@ def main(): parser.add_argument('sequence_p1', type=argparse.FileType('rb'), help='sequence FASTA/FASTQ') parser.add_argument('sequence_p2', type=argparse.FileType('rb'), default=None, nargs='?', help='sequence FASTQ pair') parser.add_argument("--validate", action="store_true", help="Dry run, validate only") + parser.add_argument("--skip-qc", action="store_true", help="Skip local qc check") args = parser.parse_args() api = arvados.api(host=ARVADOS_API_HOST, token=ARVADOS_API_TOKEN, insecure=True) - target = qa_stuff(args.metadata, args.sequence_p1, args.sequence_p2) + target = qc_stuff(args.metadata, args.sequence_p1, args.sequence_p2, not args.skip_qc) seqlabel = target[0][1] if args.validate: diff --git a/bh20sequploader/qc_fasta.py b/bh20sequploader/qc_fasta.py index 1587def..944b52c 100644 --- a/bh20sequploader/qc_fasta.py +++ b/bh20sequploader/qc_fasta.py @@ -25,7 +25,7 @@ def read_fasta(sequence): raise ValueError("FASTA file contains multiple entries") return label, bases -def qc_fasta(arg_sequence): +def qc_fasta(arg_sequence, check_with_clustalw=True): log.debug("Starting qc_fasta") schema_resource = pkg_resources.resource_stream(__name__, "validation/formats") with tempfile.NamedTemporaryFile() as tmp: @@ -64,6 +64,9 @@ def qc_fasta(arg_sequence): refbp = 0 similarity = 0 try: + if not check_with_clustalw: + raise Exception("skipping QC") + cmd = ["clustalw", "-infile="+tmp1.name, "-quicktree", "-iteration=none", "-type=DNA"] print("QC checking similarity to reference") @@ -81,7 +84,7 @@ def qc_fasta(arg_sequence): print(g2.group(0)) print(g3.group(0)) except Exception as e: - logging.warn("Error trying to QC against reference sequence using 'clustalw': %s", e) + logging.warn("QC against reference sequence using 'clustalw': %s", e) if refbp and (subbp/refbp) < .7: raise ValueError("QC fail: submit sequence length is shorter than 70% reference") -- cgit v1.2.3 From 38340e0cedb465cd592ac40b11c9d22c75973fed Mon Sep 17 00:00:00 2001 From: Peter Amstutz Date: Fri, 3 Jul 2020 21:15:48 +0000 Subject: Add --skip-qc for faster batch import --- bh20sequploader/main.py | 4 ++-- bh20sequploader/qc_fasta.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) (limited to 'bh20sequploader') diff --git a/bh20sequploader/main.py b/bh20sequploader/main.py index dc63bfc..cdc4c3f 100644 --- a/bh20sequploader/main.py +++ b/bh20sequploader/main.py @@ -24,7 +24,7 @@ UPLOAD_PROJECT='lugli-j7d0g-n5clictpuvwk8aa' def qc_stuff(metadata, sequence_p1, sequence_p2, do_qc=True): try: - log.debug("Checking metadata") + log.debug("Checking metadata" if do_qc else "Skipping metadata check") if do_qc and not qc_metadata(metadata.name): log.warning("Failed metadata qc") exit(1) @@ -36,7 +36,7 @@ def qc_stuff(metadata, sequence_p1, sequence_p2, do_qc=True): target = [] try: - log.debug("Checking FASTA/FASTQ QC") + log.debug("FASTA/FASTQ QC" if do_qc else "Limited FASTA/FASTQ QC") target.append(qc_fasta(sequence_p1, check_with_clustalw=do_qc)) if sequence_p2: target.append(qc_fasta(sequence_p2)) diff --git a/bh20sequploader/qc_fasta.py b/bh20sequploader/qc_fasta.py index 944b52c..8c6ebd3 100644 --- a/bh20sequploader/qc_fasta.py +++ b/bh20sequploader/qc_fasta.py @@ -54,6 +54,9 @@ def qc_fasta(arg_sequence, check_with_clustalw=True): sequence.seek(0) sequence.detach() + if not check_with_clustalw: + return ("sequence.fasta"+gz, seqlabel) + with tempfile.NamedTemporaryFile() as tmp1: refstring = pkg_resources.resource_string(__name__, "SARS-CoV-2-reference.fasta") tmp1.write(refstring) @@ -64,9 +67,6 @@ def qc_fasta(arg_sequence, check_with_clustalw=True): refbp = 0 similarity = 0 try: - if not check_with_clustalw: - raise Exception("skipping QC") - cmd = ["clustalw", "-infile="+tmp1.name, "-quicktree", "-iteration=none", "-type=DNA"] print("QC checking similarity to reference") -- cgit v1.2.3 From 4f30e506055acb788a1ff1bbcb6359c4413a4eab Mon Sep 17 00:00:00 2001 From: Peter Amstutz Date: Sun, 5 Jul 2020 21:33:26 +0000 Subject: Add bh20-seq-analyzer --print-status Get status from new property "status: validated/rejected" Also store QC fail error message. --- bh20seqanalyzer/main.py | 99 +++++++++++++++++++++++++----------------- bh20sequploader/main.py | 9 ++-- bh20sequploader/qc_metadata.py | 21 +++------ 3 files changed, 70 insertions(+), 59 deletions(-) (limited to 'bh20sequploader') diff --git a/bh20seqanalyzer/main.py b/bh20seqanalyzer/main.py index 7839d20..2fefa86 100644 --- a/bh20seqanalyzer/main.py +++ b/bh20seqanalyzer/main.py @@ -20,28 +20,36 @@ def validate_upload(api, collection, validated_project, fastq_project, fastq_workflow_uuid): col = arvados.collection.Collection(collection["uuid"]) - # validate the collection here. Check metadata, etc. - valid = True + if collection.get("status") in ("validated", "rejected"): + return False + # validate the collection here. Check metadata, etc. logging.info("Validating upload '%s' (%s)" % (collection["name"], collection["uuid"])) - if "metadata.yaml" not in col: - logging.warn("Upload '%s' missing metadata.yaml", collection["name"]) - valid = False - else: - try: - metadata_content = ruamel.yaml.round_trip_load(col.open("metadata.yaml")) - metadata_content["id"] = "http://arvados.org/keep:%s/metadata.yaml" % collection["portable_data_hash"] - sample_id = metadata_content["sample"]["sample_id"] - add_lc_filename(metadata_content, metadata_content["id"]) - valid = qc_metadata(metadata_content) and valid - except Exception as e: - logging.warn(e) - valid = False - if not valid: - logging.warn("Failed metadata qc") - - if valid: + errors = [] + + dup = api.collections().list(filters=[["owner_uuid", "=", validated_project], + ["portable_data_hash", "=", col.portable_data_hash()]]).execute() + if dup["items"]: + # This exact collection has been uploaded before. + errors.append("Duplicate of %s" % ([d["uuid"] for d in dup["items"]])) + + if not errors: + if "metadata.yaml" not in col: + errors.append("Missing metadata.yaml", collection["name"]) + else: + try: + metadata_content = ruamel.yaml.round_trip_load(col.open("metadata.yaml")) + metadata_content["id"] = "http://arvados.org/keep:%s/metadata.yaml" % collection["portable_data_hash"] + sample_id = metadata_content["sample"]["sample_id"] + add_lc_filename(metadata_content, metadata_content["id"]) + valid = qc_metadata(metadata_content) + if not valid: + errors.append("Failed metadata qc") + except Exception as e: + errors.append(str(e)) + + if not errors: try: tgt = None paired = {"reads_1.fastq": "reads.fastq", "reads_1.fastq.gz": "reads.fastq.gz"} @@ -51,36 +59,32 @@ def validate_upload(api, collection, validated_project, with col.open(n, 'rb') as qf: tgt = qc_fasta(qf)[0] if tgt != n and tgt != paired.get(n): - logging.info("Expected %s but magic says it should be %s", n, tgt) - valid = False + errors.append("Expected %s but magic says it should be %s", n, tgt) elif tgt in ("reads.fastq", "reads.fastq.gz", "reads_1.fastq", "reads_1.fastq.gz"): start_fastq_to_fasta(api, collection, fastq_project, fastq_workflow_uuid, n, sample_id) return False if tgt is None: - valid = False - logging.warn("Upload '%s' does not contain sequence.fasta, reads.fastq or reads_1.fastq", collection["name"]) - except ValueError as v: - valid = False + errors.append("Upload '%s' does not contain sequence.fasta, reads.fastq or reads_1.fastq", collection["name"]) + except Exception as v: + errors.append(str(v)) - dup = api.collections().list(filters=[["owner_uuid", "=", validated_project], - ["portable_data_hash", "=", col.portable_data_hash()]]).execute() - if dup["items"]: - # This exact collection has been uploaded before. - valid = False - logging.warn("Upload '%s' is duplicate" % collection["name"]) - if valid: + if not errors: logging.info("Added '%s' to validated sequences" % collection["name"]) # Move it to the "validated" project to be included in the next analysis + collection["properties"]["status"] = "validated" api.collections().update(uuid=collection["uuid"], body={ "owner_uuid": validated_project, "name": "%s (%s)" % (collection["name"], time.asctime(time.gmtime()))}).execute() + return True else: - # It is invalid, delete it. - logging.warn("Suggest deleting '%s' (%s)" % (collection["name"], collection["uuid"])) - #api.collections().delete(uuid=collection["uuid"]).execute() - - return valid + # It is invalid + logging.warn("'%s' (%s) has validation errors: %s" % ( + collection["name"], collection["uuid"], "\n".join(errors))) + collection["properties"]["status"] = "rejected" + collection["properties"]["errors"] = errors + api.collections().update(uuid=collection["uuid"], body={"properties": collection["properties"]}).execute() + return False def run_workflow(api, parent_project, workflow_uuid, name, inputobj): @@ -231,6 +235,18 @@ def upload_schema(api, workflow_def_project): return "keep:%s/schema.yml" % pdh +def print_status(api, uploader_project): + pending = arvados.util.list_all(api.collections().list, filters=[["owner_uuid", "=", uploader_project]]) + out = [] + for p in pending: + prop = p["properties"] + out.append(prop) + if "status" not in prop: + prop["status"] = "pending" + prop["created_at"] = p["created_at"] + prop["uuid"] = p["uuid"] + print(json.dumps(out, indent=2)) + def main(): parser = argparse.ArgumentParser(description='Analyze collections uploaded to a project') parser.add_argument('--uploader-project', type=str, default='lugli-j7d0g-n5clictpuvwk8aa', help='') @@ -248,6 +264,7 @@ def main(): parser.add_argument('--kickoff', action="store_true") parser.add_argument('--no-start-analysis', action="store_true") parser.add_argument('--once', action="store_true") + parser.add_argument('--print-status', action="store_true") args = parser.parse_args() api = arvados.api() @@ -266,14 +283,18 @@ def main(): args.exclude_list) return + if args.print_status: + print_status(api, args.uploader_project) + exit(0) + logging.info("Starting up, monitoring %s for uploads" % (args.uploader_project)) while True: move_fastq_to_fasta_results(api, args.fastq_project, args.uploader_project) - new_collections = api.collections().list(filters=[['owner_uuid', '=', args.uploader_project]]).execute() + new_collections = arvados.util.list_all(api.collections().list, filters=[["owner_uuid", "=", args.uploader_project]]) at_least_one_new_valid_seq = False - for c in new_collections["items"]: + for c in new_collections: at_least_one_new_valid_seq = validate_upload(api, c, args.validated_project, args.fastq_project, diff --git a/bh20sequploader/main.py b/bh20sequploader/main.py index cdc4c3f..8555e2b 100644 --- a/bh20sequploader/main.py +++ b/bh20sequploader/main.py @@ -26,11 +26,9 @@ def qc_stuff(metadata, sequence_p1, sequence_p2, do_qc=True): try: log.debug("Checking metadata" if do_qc else "Skipping metadata check") if do_qc and not qc_metadata(metadata.name): - log.warning("Failed metadata qc") - exit(1) - except ValueError as e: + raise Exception("Failed metadata qc") + except Exception as e: log.debug(e) - log.debug("Failed metadata qc") print(e) exit(1) @@ -42,9 +40,8 @@ def qc_stuff(metadata, sequence_p1, sequence_p2, do_qc=True): target.append(qc_fasta(sequence_p2)) target[0] = ("reads_1."+target[0][0][6:], target[0][1]) target[1] = ("reads_2."+target[1][0][6:], target[0][1]) - except ValueError as e: + except Exception as e: log.debug(e) - log.debug("Failed FASTA qc") print(e) exit(1) diff --git a/bh20sequploader/qc_metadata.py b/bh20sequploader/qc_metadata.py index 9122ace..2b57991 100644 --- a/bh20sequploader/qc_metadata.py +++ b/bh20sequploader/qc_metadata.py @@ -21,20 +21,13 @@ def qc_metadata(metadatafile): shex = pkg_resources.resource_stream(__name__, "bh20seq-shex.rdf").read().decode("utf-8") if not isinstance(avsc_names, schema_salad.avro.schema.Names): - print(avsc_names) - return False + raise Exception(avsc_names) - try: - doc, metadata = schema_salad.schema.load_and_validate(document_loader, avsc_names, metadatafile, True) - g = schema_salad.jsonld_context.makerdf("workflow", doc, document_loader.ctx) - rslt, reason = evaluate(g, shex, doc["id"], "https://raw.githubusercontent.com/arvados/bh20-seq-resource/master/bh20sequploader/bh20seq-shex.rdf#submissionShape") + doc, metadata = schema_salad.schema.load_and_validate(document_loader, avsc_names, metadatafile, True) + g = schema_salad.jsonld_context.makerdf("workflow", doc, document_loader.ctx) + rslt, reason = evaluate(g, shex, doc["id"], "https://raw.githubusercontent.com/arvados/bh20-seq-resource/master/bh20sequploader/bh20seq-shex.rdf#submissionShape") - if not rslt: - log.debug(reason) - print(reason) + if not rslt: + raise Exception(reason) - return rslt - except Exception as e: - traceback.print_exc() - log.warn(e) - return False + return True -- cgit v1.2.3 From 7cf561c1b92a44d488d36dd3d883750b261c6550 Mon Sep 17 00:00:00 2001 From: Peter Amstutz Date: Tue, 7 Jul 2020 15:52:46 -0400 Subject: Use minimap2 instead of clustalw for fasta QC Arvados-DCO-1.1-Signed-off-by: Peter Amstutz --- bh20sequploader/main.py | 9 +++++-- bh20sequploader/qc_fasta.py | 61 +++++++++++++++++++-------------------------- 2 files changed, 33 insertions(+), 37 deletions(-) (limited to 'bh20sequploader') diff --git a/bh20sequploader/main.py b/bh20sequploader/main.py index 8555e2b..fd0278d 100644 --- a/bh20sequploader/main.py +++ b/bh20sequploader/main.py @@ -23,14 +23,16 @@ ARVADOS_API_TOKEN='2fbebpmbo3rw3x05ueu2i6nx70zhrsb1p22ycu3ry34m4x4462' UPLOAD_PROJECT='lugli-j7d0g-n5clictpuvwk8aa' def qc_stuff(metadata, sequence_p1, sequence_p2, do_qc=True): + failed = False try: log.debug("Checking metadata" if do_qc else "Skipping metadata check") if do_qc and not qc_metadata(metadata.name): - raise Exception("Failed metadata qc") + log.warning("Failed metadata qc") + failed = True except Exception as e: log.debug(e) print(e) - exit(1) + failed = True target = [] try: @@ -43,6 +45,9 @@ def qc_stuff(metadata, sequence_p1, sequence_p2, do_qc=True): except Exception as e: log.debug(e) print(e) + failed = True + + if failed: exit(1) return target diff --git a/bh20sequploader/qc_fasta.py b/bh20sequploader/qc_fasta.py index 8c6ebd3..b08333e 100644 --- a/bh20sequploader/qc_fasta.py +++ b/bh20sequploader/qc_fasta.py @@ -58,42 +58,33 @@ def qc_fasta(arg_sequence, check_with_clustalw=True): return ("sequence.fasta"+gz, seqlabel) with tempfile.NamedTemporaryFile() as tmp1: - refstring = pkg_resources.resource_string(__name__, "SARS-CoV-2-reference.fasta") - tmp1.write(refstring) - tmp1.write(submitlabel.encode("utf8")) - tmp1.write(("".join(submitseq)).encode("utf8")) - tmp1.flush() - subbp = 0 - refbp = 0 - similarity = 0 - try: - cmd = ["clustalw", "-infile="+tmp1.name, - "-quicktree", "-iteration=none", "-type=DNA"] - print("QC checking similarity to reference") - print(" ".join(cmd)) - result = subprocess.run(cmd, stdout=subprocess.PIPE) - res = result.stdout.decode("utf-8") - g1 = re.search(r"^Sequence 1: [^ ]+ +(\d+) bp$", res, flags=re.MULTILINE) - refbp = float(g1.group(1)) - g2 = re.search(r"^Sequence 2: [^ ]+ +(\d+) bp$", res, flags=re.MULTILINE) - subbp = float(g2.group(1)) - g3 = re.search(r"^Sequences \(1:2\) Aligned\. Score: (\d+(\.\d+)?)$", res, flags=re.MULTILINE) - similarity = float(g3.group(1)) + with tempfile.NamedTemporaryFile() as tmp2: + refstring = pkg_resources.resource_string(__name__, "SARS-CoV-2-reference.fasta") + tmp1.write(refstring) + tmp1.flush() + tmp2.write(submitlabel.encode("utf8")) + tmp2.write(("".join(submitseq)).encode("utf8")) + tmp2.flush() + subbp = 0 + refbp = 0 + similarity = 0 + try: + cmd = ["minimap2", "-c", tmp1.name, tmp2.name] + print("QC checking similarity to reference") + print(" ".join(cmd)) + result = subprocess.run(cmd, stdout=subprocess.PIPE) + res = result.stdout.decode("utf-8") + mm = res.split("\t") + print(mm) + # divide Number of matching bases in the mapping / Target sequence length + similarity = (float(mm[9]) / float(mm[6])) * 100.0 + except Exception as e: + logging.warn("QC against reference sequence using 'minimap2': %s", e) - print(g1.group(0)) - print(g2.group(0)) - print(g3.group(0)) - except Exception as e: - logging.warn("QC against reference sequence using 'clustalw': %s", e) - - if refbp and (subbp/refbp) < .7: - raise ValueError("QC fail: submit sequence length is shorter than 70% reference") - if refbp and (subbp/refbp) > 1.3: - raise ValueError("QC fail: submit sequence length is greater than 130% reference") - if similarity and similarity < 70.0: - raise ValueError("QC fail: submit similarity is less than 70%") - if refbp == 0 or similarity == 0: - raise ValueError("QC fail") + if similarity and similarity < 70.0: + raise ValueError("QC fail: alignment to reference was less than 70%% (was %2.2f%%)" % (similarity)) + if similarity == 0: + raise ValueError("QC fail") return ("sequence.fasta"+gz, seqlabel) elif seq_type == "text/fastq": -- cgit v1.2.3 From 54e0ce85ef3abafb8870ef5a9244a03f16b5e3f5 Mon Sep 17 00:00:00 2001 From: Peter Amstutz Date: Tue, 7 Jul 2020 20:41:11 +0000 Subject: minimap2 returns nothing when there is no alignment. --- bh20sequploader/qc_fasta.py | 15 +++++++++------ scripts/docker/Dockerfile | 2 +- 2 files changed, 10 insertions(+), 7 deletions(-) (limited to 'bh20sequploader') diff --git a/bh20sequploader/qc_fasta.py b/bh20sequploader/qc_fasta.py index b08333e..37eb4e8 100644 --- a/bh20sequploader/qc_fasta.py +++ b/bh20sequploader/qc_fasta.py @@ -70,16 +70,19 @@ def qc_fasta(arg_sequence, check_with_clustalw=True): similarity = 0 try: cmd = ["minimap2", "-c", tmp1.name, tmp2.name] - print("QC checking similarity to reference") - print(" ".join(cmd)) + logging.info("QC checking similarity to reference") + logging.info(" ".join(cmd)) result = subprocess.run(cmd, stdout=subprocess.PIPE) + result.check_returncode() res = result.stdout.decode("utf-8") mm = res.split("\t") - print(mm) - # divide Number of matching bases in the mapping / Target sequence length - similarity = (float(mm[9]) / float(mm[6])) * 100.0 + if len(mm) >= 10: + # divide Number of matching bases in the mapping / Target sequence length + similarity = (float(mm[9]) / float(mm[6])) * 100.0 + else: + similarity = 0 except Exception as e: - logging.warn("QC against reference sequence using 'minimap2': %s", e) + logging.warn("QC against reference sequence using 'minimap2': %s", e, exc_info=e) if similarity and similarity < 70.0: raise ValueError("QC fail: alignment to reference was less than 70%% (was %2.2f%%)" % (similarity)) diff --git a/scripts/docker/Dockerfile b/scripts/docker/Dockerfile index 8811927..02829d4 100644 --- a/scripts/docker/Dockerfile +++ b/scripts/docker/Dockerfile @@ -3,7 +3,7 @@ FROM debian:10 RUN apt-get update && \ apt-get -yq --no-install-recommends -o Acquire::Retries=6 install \ python3 python3-pip python3-setuptools python3-dev python-pycurl \ - clustalw python3-biopython libcurl4-openssl-dev build-essential \ + minimap2 python3-biopython libcurl4-openssl-dev build-essential \ libssl-dev libmagic-dev python3-magic && \ apt-get clean -- cgit v1.2.3