about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--bh20seqanalyzer/main.py99
-rw-r--r--bh20sequploader/main.py9
-rw-r--r--bh20sequploader/qc_metadata.py21
3 files changed, 70 insertions, 59 deletions
diff --git a/bh20seqanalyzer/main.py b/bh20seqanalyzer/main.py
index 7839d20..2fefa86 100644
--- a/bh20seqanalyzer/main.py
+++ b/bh20seqanalyzer/main.py
@@ -20,28 +20,36 @@ def validate_upload(api, collection, validated_project,
                     fastq_project, fastq_workflow_uuid):
     col = arvados.collection.Collection(collection["uuid"])
 
-    # validate the collection here.  Check metadata, etc.
-    valid = True
+    if collection.get("status") in ("validated", "rejected"):
+        return False
 
+    # validate the collection here.  Check metadata, etc.
     logging.info("Validating upload '%s' (%s)" % (collection["name"], collection["uuid"]))
 
-    if "metadata.yaml" not in col:
-        logging.warn("Upload '%s' missing metadata.yaml", collection["name"])
-        valid = False
-    else:
-        try:
-            metadata_content = ruamel.yaml.round_trip_load(col.open("metadata.yaml"))
-            metadata_content["id"] = "http://arvados.org/keep:%s/metadata.yaml" % collection["portable_data_hash"]
-            sample_id = metadata_content["sample"]["sample_id"]
-            add_lc_filename(metadata_content, metadata_content["id"])
-            valid = qc_metadata(metadata_content) and valid
-        except Exception as e:
-            logging.warn(e)
-            valid = False
-        if not valid:
-            logging.warn("Failed metadata qc")
-
-    if valid:
+    errors = []
+
+    dup = api.collections().list(filters=[["owner_uuid", "=", validated_project],
+                                          ["portable_data_hash", "=", col.portable_data_hash()]]).execute()
+    if dup["items"]:
+        # This exact collection has been uploaded before.
+        errors.append("Duplicate of %s" % ([d["uuid"] for d in dup["items"]]))
+
+    if not errors:
+        if "metadata.yaml" not in col:
+            errors.append("Missing metadata.yaml", collection["name"])
+        else:
+            try:
+                metadata_content = ruamel.yaml.round_trip_load(col.open("metadata.yaml"))
+                metadata_content["id"] = "http://arvados.org/keep:%s/metadata.yaml" % collection["portable_data_hash"]
+                sample_id = metadata_content["sample"]["sample_id"]
+                add_lc_filename(metadata_content, metadata_content["id"])
+                valid = qc_metadata(metadata_content)
+                if not valid:
+                    errors.append("Failed metadata qc")
+            except Exception as e:
+                errors.append(str(e))
+
+    if not errors:
         try:
             tgt = None
             paired = {"reads_1.fastq": "reads.fastq", "reads_1.fastq.gz": "reads.fastq.gz"}
@@ -51,36 +59,32 @@ def validate_upload(api, collection, validated_project,
                 with col.open(n, 'rb') as qf:
                     tgt = qc_fasta(qf)[0]
                     if tgt != n and tgt != paired.get(n):
-                        logging.info("Expected %s but magic says it should be %s", n, tgt)
-                        valid = False
+                        errors.append("Expected %s but magic says it should be %s", n, tgt)
                     elif tgt in ("reads.fastq", "reads.fastq.gz", "reads_1.fastq", "reads_1.fastq.gz"):
                         start_fastq_to_fasta(api, collection, fastq_project, fastq_workflow_uuid, n, sample_id)
                         return False
             if tgt is None:
-                valid = False
-                logging.warn("Upload '%s' does not contain sequence.fasta, reads.fastq or reads_1.fastq", collection["name"])
-        except ValueError as v:
-            valid = False
+                errors.append("Upload '%s' does not contain sequence.fasta, reads.fastq or reads_1.fastq", collection["name"])
+        except Exception as v:
+            errors.append(str(v))
 
-    dup = api.collections().list(filters=[["owner_uuid", "=", validated_project],
-                                          ["portable_data_hash", "=", col.portable_data_hash()]]).execute()
-    if dup["items"]:
-        # This exact collection has been uploaded before.
-        valid = False
-        logging.warn("Upload '%s' is duplicate" % collection["name"])
 
-    if valid:
+    if not errors:
         logging.info("Added '%s' to validated sequences" % collection["name"])
         # Move it to the "validated" project to be included in the next analysis
+        collection["properties"]["status"] = "validated"
         api.collections().update(uuid=collection["uuid"], body={
             "owner_uuid": validated_project,
             "name": "%s (%s)" % (collection["name"], time.asctime(time.gmtime()))}).execute()
+        return True
     else:
-        # It is invalid, delete it.
-        logging.warn("Suggest deleting '%s' (%s)" % (collection["name"], collection["uuid"]))
-        #api.collections().delete(uuid=collection["uuid"]).execute()
-
-    return valid
+        # It is invalid
+        logging.warn("'%s' (%s) has validation errors: %s" % (
+            collection["name"], collection["uuid"], "\n".join(errors)))
+        collection["properties"]["status"] = "rejected"
+        collection["properties"]["errors"] = errors
+        api.collections().update(uuid=collection["uuid"], body={"properties": collection["properties"]}).execute()
+        return False
 
 
 def run_workflow(api, parent_project, workflow_uuid, name, inputobj):
@@ -231,6 +235,18 @@ def upload_schema(api, workflow_def_project):
     return "keep:%s/schema.yml" % pdh
 
 
+def print_status(api, uploader_project):
+    pending = arvados.util.list_all(api.collections().list, filters=[["owner_uuid", "=", uploader_project]])
+    out = []
+    for p in pending:
+        prop = p["properties"]
+        out.append(prop)
+        if "status" not in prop:
+            prop["status"] = "pending"
+        prop["created_at"] = p["created_at"]
+        prop["uuid"] = p["uuid"]
+    print(json.dumps(out, indent=2))
+
 def main():
     parser = argparse.ArgumentParser(description='Analyze collections uploaded to a project')
     parser.add_argument('--uploader-project', type=str, default='lugli-j7d0g-n5clictpuvwk8aa', help='')
@@ -248,6 +264,7 @@ def main():
     parser.add_argument('--kickoff', action="store_true")
     parser.add_argument('--no-start-analysis', action="store_true")
     parser.add_argument('--once', action="store_true")
+    parser.add_argument('--print-status', action="store_true")
     args = parser.parse_args()
 
     api = arvados.api()
@@ -266,14 +283,18 @@ def main():
                                  args.exclude_list)
         return
 
+    if args.print_status:
+        print_status(api, args.uploader_project)
+        exit(0)
+
     logging.info("Starting up, monitoring %s for uploads" % (args.uploader_project))
 
     while True:
         move_fastq_to_fasta_results(api, args.fastq_project, args.uploader_project)
 
-        new_collections = api.collections().list(filters=[['owner_uuid', '=', args.uploader_project]]).execute()
+        new_collections = arvados.util.list_all(api.collections().list, filters=[["owner_uuid", "=", args.uploader_project]])
         at_least_one_new_valid_seq = False
-        for c in new_collections["items"]:
+        for c in new_collections:
             at_least_one_new_valid_seq = validate_upload(api, c,
                                                          args.validated_project,
                                                          args.fastq_project,
diff --git a/bh20sequploader/main.py b/bh20sequploader/main.py
index cdc4c3f..8555e2b 100644
--- a/bh20sequploader/main.py
+++ b/bh20sequploader/main.py
@@ -26,11 +26,9 @@ def qc_stuff(metadata, sequence_p1, sequence_p2, do_qc=True):
     try:
         log.debug("Checking metadata" if do_qc else "Skipping metadata check")
         if do_qc and not qc_metadata(metadata.name):
-            log.warning("Failed metadata qc")
-            exit(1)
-    except ValueError as e:
+            raise Exception("Failed metadata qc")
+    except Exception as e:
         log.debug(e)
-        log.debug("Failed metadata qc")
         print(e)
         exit(1)
 
@@ -42,9 +40,8 @@ def qc_stuff(metadata, sequence_p1, sequence_p2, do_qc=True):
             target.append(qc_fasta(sequence_p2))
             target[0] = ("reads_1."+target[0][0][6:], target[0][1])
             target[1] = ("reads_2."+target[1][0][6:], target[0][1])
-    except ValueError as e:
+    except Exception as e:
         log.debug(e)
-        log.debug("Failed FASTA qc")
         print(e)
         exit(1)
 
diff --git a/bh20sequploader/qc_metadata.py b/bh20sequploader/qc_metadata.py
index 9122ace..2b57991 100644
--- a/bh20sequploader/qc_metadata.py
+++ b/bh20sequploader/qc_metadata.py
@@ -21,20 +21,13 @@ def qc_metadata(metadatafile):
     shex = pkg_resources.resource_stream(__name__, "bh20seq-shex.rdf").read().decode("utf-8")
 
     if not isinstance(avsc_names, schema_salad.avro.schema.Names):
-        print(avsc_names)
-        return False
+        raise Exception(avsc_names)
 
-    try:
-        doc, metadata = schema_salad.schema.load_and_validate(document_loader, avsc_names, metadatafile, True)
-        g = schema_salad.jsonld_context.makerdf("workflow", doc, document_loader.ctx)
-        rslt, reason = evaluate(g, shex, doc["id"], "https://raw.githubusercontent.com/arvados/bh20-seq-resource/master/bh20sequploader/bh20seq-shex.rdf#submissionShape")
+    doc, metadata = schema_salad.schema.load_and_validate(document_loader, avsc_names, metadatafile, True)
+    g = schema_salad.jsonld_context.makerdf("workflow", doc, document_loader.ctx)
+    rslt, reason = evaluate(g, shex, doc["id"], "https://raw.githubusercontent.com/arvados/bh20-seq-resource/master/bh20sequploader/bh20seq-shex.rdf#submissionShape")
 
-        if not rslt:
-            log.debug(reason)
-            print(reason)
+    if not rslt:
+        raise Exception(reason)
 
-        return rslt
-    except Exception as e:
-        traceback.print_exc()
-        log.warn(e)
-    return False
+    return True