aboutsummaryrefslogtreecommitdiff
path: root/bh20seqanalyzer
diff options
context:
space:
mode:
Diffstat (limited to 'bh20seqanalyzer')
-rw-r--r--bh20seqanalyzer/main.py31
1 files changed, 29 insertions, 2 deletions
diff --git a/bh20seqanalyzer/main.py b/bh20seqanalyzer/main.py
index dae8eca..78e32c9 100644
--- a/bh20seqanalyzer/main.py
+++ b/bh20seqanalyzer/main.py
@@ -6,6 +6,8 @@ import subprocess
import tempfile
import json
import logging
+import ruamel.yaml
+from bh20sequploader.qc_metadata import qc_metadata
logging.basicConfig(format="[%(asctime)s] %(levelname)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S",
level=logging.INFO)
@@ -20,9 +22,12 @@ def validate_upload(api, collection, validated_project):
if "sequence.fasta" not in col:
valid = False
logging.warn("Upload '%s' missing sequence.fasta", collection["name"])
- if "metadata.jsonld" not in col:
- logging.warn("Upload '%s' missing metadata.jsonld", collection["name"])
+ if "metadata.yaml" not in col:
+ logging.warn("Upload '%s' missing metadata.yaml", collection["name"])
valid = False
+ else:
+ metadata_content = ruamel.yaml.round_trip_load(col.open("metadata.yaml"))
+ valid = qc_metadata(metadata_content) and valid
dup = api.collections().list(filters=[["owner_uuid", "=", validated_project],
["portable_data_hash", "=", col.portable_data_hash()]]).execute()
@@ -79,12 +84,31 @@ def start_analysis(api,
logging.error(comp.stderr.decode('utf-8'))
+def copy_most_recent_result(api, analysis_project, latest_result_uuid):
+ most_recent_analysis = api.groups().list(filters=[['owner_uuid', '=', analysis_project]],
+ order="created_at desc", limit=1).execute()
+ for m in most_recent_analysis["items"]:
+ cr = api.container_requests().list(filters=[['owner_uuid', '=', m["uuid"]],
+ ["requesting_container_uuid", "=", None]]).execute()
+ if cr["items"] and cr["items"][0]["output_uuid"]:
+ wf = cr["items"][0]
+ src = api.collections().get(uuid=wf["output_uuid"]).execute()
+ dst = api.collections().get(uuid=latest_result_uuid).execute()
+ if src["portable_data_hash"] != dst["portable_data_hash"]:
+ logging.info("Copying latest result from '%s' to %s", m["name"], latest_result_uuid)
+ api.collections().update(uuid=latest_result_uuid,
+ body={"manifest_text": src["manifest_text"],
+ "description": "latest result from %s %s" % (m["name"], wf["uuid"])}).execute()
+ break
+
+
def main():
parser = argparse.ArgumentParser(description='Analyze collections uploaded to a project')
parser.add_argument('--uploader-project', type=str, default='lugli-j7d0g-n5clictpuvwk8aa', help='')
parser.add_argument('--analysis-project', type=str, default='lugli-j7d0g-y4k4uswcqi3ku56', help='')
parser.add_argument('--validated-project', type=str, default='lugli-j7d0g-5ct8p1i1wrgyjvp', help='')
parser.add_argument('--workflow-uuid', type=str, default='lugli-7fd4e-mqfu9y3ofnpnho1', help='')
+ parser.add_argument('--latest-result-uuid', type=str, default='lugli-4zz18-z513nlpqm03hpca', help='')
args = parser.parse_args()
api = arvados.api()
@@ -101,4 +125,7 @@ def main():
start_analysis(api, args.analysis_project,
args.workflow_uuid,
args.validated_project)
+
+ copy_most_recent_result(api, args.analysis_project, args.latest_result_uuid)
+
time.sleep(10)