From cbb191cb23f4e40b1a1d3024401960939fda9bc3 Mon Sep 17 00:00:00 2001 From: Peter Amstutz Date: Wed, 8 Jul 2020 14:02:11 -0400 Subject: Get count from arvados Separate pending/rejected tables "Lastest results" is latest successful run. Arvados-DCO-1.1-Signed-off-by: Peter Amstutz --- bh20seqanalyzer/main.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) (limited to 'bh20seqanalyzer') diff --git a/bh20seqanalyzer/main.py b/bh20seqanalyzer/main.py index 1746587..ce9a723 100644 --- a/bh20seqanalyzer/main.py +++ b/bh20seqanalyzer/main.py @@ -187,14 +187,15 @@ def get_workflow_output_from_project(api, uuid): cr = api.container_requests().list(filters=[['owner_uuid', '=', uuid], ["requesting_container_uuid", "=", None]]).execute() if cr["items"] and cr["items"][0]["output_uuid"]: - return cr["items"][0] - else: - return None + container = api.containers().get(uuid=cr["items"][0]["container_uuid"]).execute() + if container["state"] == "Complete" and container["exit_code"] == 0: + return cr["items"][0] + return None def copy_most_recent_result(api, analysis_project, latest_result_uuid): most_recent_analysis = api.groups().list(filters=[['owner_uuid', '=', analysis_project]], - order="created_at desc", limit=1).execute() + order="created_at desc").execute() for m in most_recent_analysis["items"]: wf = get_workflow_output_from_project(api, m["uuid"]) if wf: @@ -220,6 +221,7 @@ def move_fastq_to_fasta_results(api, analysis_project, uploader_project): body={"owner_uuid": uploader_project}).execute() p["properties"]["moved_output"] = True api.groups().update(uuid=p["uuid"], body={"properties": p["properties"]}).execute() + break def upload_schema(api, workflow_def_project): -- cgit v1.2.3 From cd25f56f4d9aea1d30cc17ee923a6d8dd6c63e5a Mon Sep 17 00:00:00 2001 From: Peter Amstutz Date: Wed, 8 Jul 2020 20:16:42 +0000 Subject: Add --revalidate and fix checking/updating properties. --- bh20seqanalyzer/main.py | 31 ++++++++++++++++++++----------- bh20simplewebuploader/main.py | 2 +- 2 files changed, 21 insertions(+), 12 deletions(-) (limited to 'bh20seqanalyzer') diff --git a/bh20seqanalyzer/main.py b/bh20seqanalyzer/main.py index ce9a723..0b52e6b 100644 --- a/bh20seqanalyzer/main.py +++ b/bh20seqanalyzer/main.py @@ -17,10 +17,11 @@ logging.basicConfig(format="[%(asctime)s] %(levelname)s %(message)s", datefmt="% logging.getLogger("googleapiclient.discovery").setLevel(logging.WARN) def validate_upload(api, collection, validated_project, - fastq_project, fastq_workflow_uuid): + fastq_project, fastq_workflow_uuid, + revalidate): col = arvados.collection.Collection(collection["uuid"]) - if collection.get("status") in ("validated", "rejected"): + if not revalidate and collection["properties"].get("status") in ("validated", "rejected"): return False # validate the collection here. Check metadata, etc. @@ -28,11 +29,12 @@ def validate_upload(api, collection, validated_project, errors = [] - dup = api.collections().list(filters=[["owner_uuid", "=", validated_project], - ["portable_data_hash", "=", col.portable_data_hash()]]).execute() - if dup["items"]: - # This exact collection has been uploaded before. - errors.append("Duplicate of %s" % ([d["uuid"] for d in dup["items"]])) + if collection["owner_uuid"] != validated_project: + dup = api.collections().list(filters=[["owner_uuid", "=", validated_project], + ["portable_data_hash", "=", col.portable_data_hash()]]).execute() + if dup["items"]: + # This exact collection has been uploaded before. + errors.append("Duplicate of %s" % ([d["uuid"] for d in dup["items"]])) if not errors: if "metadata.yaml" not in col: @@ -70,12 +72,15 @@ def validate_upload(api, collection, validated_project, if not errors: - logging.info("Added '%s' to validated sequences" % collection["name"]) # Move it to the "validated" project to be included in the next analysis + if "errors" in collection["properties"]: + del collection["properties"]["errors"] collection["properties"]["status"] = "validated" api.collections().update(uuid=collection["uuid"], body={ "owner_uuid": validated_project, - "name": "%s (%s)" % (collection["name"], time.asctime(time.gmtime()))}).execute() + "name": "%s (%s)" % (collection["name"], time.asctime(time.gmtime())), + "properties": collection["properties"]}).execute() + logging.info("Added '%s' to validated sequences" % collection["name"]) return True else: # It is invalid @@ -155,7 +160,9 @@ def start_pangenome_analysis(api, validated_project, schema_ref, exclude_list): - validated = arvados.util.list_all(api.collections().list, filters=[["owner_uuid", "=", validated_project]]) + validated = arvados.util.list_all(api.collections().list, filters=[ + ["owner_uuid", "=", validated_project], + ["properties.status", "=", "validated"]]) inputobj = { "inputReads": [], "metadata": [], @@ -299,6 +306,7 @@ def main(): parser.add_argument('--no-start-analysis', action="store_true") parser.add_argument('--once', action="store_true") parser.add_argument('--print-status', type=str, default=None) + parser.add_argument('--revalidate', action="store_true", default=None) args = parser.parse_args() api = arvados.api() @@ -332,7 +340,8 @@ def main(): at_least_one_new_valid_seq = validate_upload(api, c, args.validated_project, args.fastq_project, - args.fastq_workflow_uuid) or at_least_one_new_valid_seq + args.fastq_workflow_uuid, + args.revalidate) or at_least_one_new_valid_seq if at_least_one_new_valid_seq and not args.no_start_analysis: start_pangenome_analysis(api, diff --git a/bh20simplewebuploader/main.py b/bh20simplewebuploader/main.py index d4adbda..9132453 100644 --- a/bh20simplewebuploader/main.py +++ b/bh20simplewebuploader/main.py @@ -422,7 +422,7 @@ def receive_files(): # Try and upload files to Arvados using the sequence uploader CLI - cmd = ['python3','bh20sequploader/main.py', fasta_dest, metadata_dest] + cmd = ['python3','bh20sequploader/main.py', metadata_dest, fasta_dest] print(" ".join(cmd),file=sys.stderr) result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) -- cgit v1.2.3