aboutsummaryrefslogtreecommitdiff
path: root/bh20sequploader
diff options
context:
space:
mode:
authorPeter Amstutz2020-07-08 17:14:46 -0400
committerGitHub2020-07-08 17:14:46 -0400
commit6e0f9f18167377bac073d7715b89e7ddbf1fe72d (patch)
tree1b72a737b50e60346aefaf009ac2488d45c8abe0 /bh20sequploader
parent6fa25708b46a590be82a6b84266c0a3f25a0d890 (diff)
parente821857e7a9403739f321feb7418d33d6bd8b2c7 (diff)
downloadbh20-seq-resource-6e0f9f18167377bac073d7715b89e7ddbf1fe72d.tar.gz
bh20-seq-resource-6e0f9f18167377bac073d7715b89e7ddbf1fe72d.tar.lz
bh20-seq-resource-6e0f9f18167377bac073d7715b89e7ddbf1fe72d.zip
Merge pull request #92 from arvados/upload-download-status
Split upload tab. Add upload status tab. Also a bunch of QC and uploader improvements.
Diffstat (limited to 'bh20sequploader')
-rw-r--r--bh20sequploader/main.py27
-rw-r--r--bh20sequploader/qc_metadata.py15
2 files changed, 36 insertions, 6 deletions
diff --git a/bh20sequploader/main.py b/bh20sequploader/main.py
index fd0278d..f744a8c 100644
--- a/bh20sequploader/main.py
+++ b/bh20sequploader/main.py
@@ -19,8 +19,10 @@ log = logging.getLogger(__name__ )
log.debug("Entering sequence uploader")
ARVADOS_API_HOST='lugli.arvadosapi.com'
-ARVADOS_API_TOKEN='2fbebpmbo3rw3x05ueu2i6nx70zhrsb1p22ycu3ry34m4x4462'
+UPLOADER_API_TOKEN='2fbebpmbo3rw3x05ueu2i6nx70zhrsb1p22ycu3ry34m4x4462'
+ANONYMOUS_API_TOKEN='5o42qdxpxp5cj15jqjf7vnxx5xduhm4ret703suuoa3ivfglfh'
UPLOAD_PROJECT='lugli-j7d0g-n5clictpuvwk8aa'
+VALIDATED_PROJECT='lugli-j7d0g-5ct8p1i1wrgyjvp'
def qc_stuff(metadata, sequence_p1, sequence_p2, do_qc=True):
failed = False
@@ -67,9 +69,14 @@ def main():
parser.add_argument('sequence_p2', type=argparse.FileType('rb'), default=None, nargs='?', help='sequence FASTQ pair')
parser.add_argument("--validate", action="store_true", help="Dry run, validate only")
parser.add_argument("--skip-qc", action="store_true", help="Skip local qc check")
+ parser.add_argument("--trusted", action="store_true", help="Trust local validation and add directly to validated project")
args = parser.parse_args()
- api = arvados.api(host=ARVADOS_API_HOST, token=ARVADOS_API_TOKEN, insecure=True)
+ if args.trusted:
+ # Use credentials from environment
+ api = arvados.api()
+ else:
+ api = arvados.api(host=ARVADOS_API_HOST, token=UPLOADER_API_TOKEN, insecure=True)
target = qc_stuff(args.metadata, args.sequence_p1, args.sequence_p2, not args.skip_qc)
seqlabel = target[0][1]
@@ -106,7 +113,21 @@ def main():
"upload_user": "%s@%s" % (username, socket.gethostname())
}
- col.save_new(owner_uuid=UPLOAD_PROJECT, name="%s uploaded by %s from %s" %
+ api2 = arvados.api(host=ARVADOS_API_HOST, token=ANONYMOUS_API_TOKEN, insecure=True)
+ dup = api2.collections().list(filters=[["owner_uuid", "in", [VALIDATED_PROJECT, UPLOAD_PROJECT]],
+ ["portable_data_hash", "=", col.portable_data_hash()]]).execute()
+ if dup["items"]:
+ # This exact collection has been uploaded before.
+ print("Duplicate of %s" % ([d["uuid"] for d in dup["items"]]))
+ exit(1)
+
+ if args.trusted:
+ properties["status"] = "validated"
+ owner_uuid = VALIDATED_PROJECT
+ else:
+ owner_uuid = UPLOAD_PROJECT
+
+ col.save_new(owner_uuid=owner_uuid, name="%s uploaded by %s from %s" %
(seqlabel, properties['upload_user'], properties['upload_ip']),
properties=properties, ensure_unique_name=True)
diff --git a/bh20sequploader/qc_metadata.py b/bh20sequploader/qc_metadata.py
index 2b57991..27657b1 100644
--- a/bh20sequploader/qc_metadata.py
+++ b/bh20sequploader/qc_metadata.py
@@ -8,15 +8,20 @@ import traceback
from rdflib import Graph, Namespace
from pyshex.evaluate import evaluate
+metadata_schema = None
def qc_metadata(metadatafile):
+ global metadata_schema
log = logging.getLogger(__name__ )
- schema_resource = pkg_resources.resource_stream(__name__, "bh20seq-schema.yml")
- cache = {"https://raw.githubusercontent.com/arvados/bh20-seq-resource/master/bh20sequploader/bh20seq-schema.yml": schema_resource.read().decode("utf-8")}
+ if metadata_schema is None:
+ schema_resource = pkg_resources.resource_stream(__name__, "bh20seq-schema.yml")
+ cache = {"https://raw.githubusercontent.com/arvados/bh20-seq-resource/master/bh20sequploader/bh20seq-schema.yml": schema_resource.read().decode("utf-8")}
+ metadata_schema = schema_salad.schema.load_schema("https://raw.githubusercontent.com/arvados/bh20-seq-resource/master/bh20sequploader/bh20seq-schema.yml", cache=cache)
+
(document_loader,
avsc_names,
schema_metadata,
- metaschema_loader) = schema_salad.schema.load_schema("https://raw.githubusercontent.com/arvados/bh20-seq-resource/master/bh20sequploader/bh20seq-schema.yml", cache=cache)
+ metaschema_loader) = metadata_schema
shex = pkg_resources.resource_stream(__name__, "bh20seq-shex.rdf").read().decode("utf-8")
@@ -27,6 +32,10 @@ def qc_metadata(metadatafile):
g = schema_salad.jsonld_context.makerdf("workflow", doc, document_loader.ctx)
rslt, reason = evaluate(g, shex, doc["id"], "https://raw.githubusercontent.com/arvados/bh20-seq-resource/master/bh20sequploader/bh20seq-shex.rdf#submissionShape")
+ # As part of QC make sure serialization works too, this will raise
+ # an exception if there are invalid URIs.
+ g.serialize(format="ntriples")
+
if not rslt:
raise Exception(reason)