aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPjotr Prins2021-01-06 02:33:35 -0600
committerPjotr Prins2021-01-06 02:34:14 -0600
commitc31835f787f3ae36e26bad0a1803f8557f8084e7 (patch)
treecdf74371fd5866d4a1a6ac94a06d2ac6ed026045
parent911ba372cfc4b35c5b52d18a573a636ea78d16d7 (diff)
downloadbh20-seq-resource-c31835f787f3ae36e26bad0a1803f8557f8084e7.tar.gz
bh20-seq-resource-c31835f787f3ae36e26bad0a1803f8557f8084e7.tar.lz
bh20-seq-resource-c31835f787f3ae36e26bad0a1803f8557f8084e7.zip
Pubseq fetch: sometimes a request times out. So repeat with intervals.
-rwxr-xr-xworkflows/tools/pubseq-fetch-data.py33
1 files changed, 19 insertions, 14 deletions
diff --git a/workflows/tools/pubseq-fetch-data.py b/workflows/tools/pubseq-fetch-data.py
index 2119fdf..ef4edde 100755
--- a/workflows/tools/pubseq-fetch-data.py
+++ b/workflows/tools/pubseq-fetch-data.py
@@ -5,6 +5,7 @@ import json
import os
import requests
import sys
+import time
parser = argparse.ArgumentParser(description="""
@@ -33,18 +34,22 @@ for id in ids:
print(id)
jsonfn = dir+"/"+id+".json"
if not os.path.exists(jsonfn):
+ count = 0
r = requests.get(f"http://covid19.genenetwork.org/api/sample/{id}.json")
- if r:
- m_url = r.json()[0]['metadata']
- mr = requests.get(m_url)
- with open(dir+"/"+id+".json","w") as outf:
- outf.write(mr.text)
- if args.fasta:
- fastafn = dir+"/"+id+".fa"
- if os.path.exists(fastafn): continue
- fa_url = r.json()[0]['fasta']
- fr = requests.get(fa_url)
- with open(fastafn,"w") as outf:
- outf.write(fr.text)
- else:
- raise Exception(f"Can not find record for {id}")
+ while not r:
+ count += 1
+ if count>10: raise Exception(f"Can not find record for {id}")
+ time.sleep(15)
+ r = requests.get(f"http://covid19.genenetwork.org/api/sample/{id}.json")
+ m_url = r.json()[0]['metadata']
+ mr = requests.get(m_url)
+ with open(dir+"/"+id+".json","w") as outf:
+ outf.write(mr.text)
+ if args.fasta:
+ fastafn = dir+"/"+id+".fa"
+ if os.path.exists(fastafn): continue
+ fa_url = r.json()[0]['fasta']
+ fr = requests.get(fa_url)
+ with open(fastafn,"w") as outf:
+ outf.write(fr.text)
+