Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
File renamed without changes.
File renamed without changes.
File renamed without changes.
297 changes: 297 additions & 0 deletions archived_scripts/snic_util.py

Large diffs are not rendered by default.

86 changes: 86 additions & 0 deletions archived_scripts/statusdb_snicuser_checker.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
#!/usr/bin/env python
"""
Get open projects from statusdb, checks if the users have a SNIC account and
writes the result back into statusdb.
"""
import argparse
import yaml
from couchdb import Server
import requests
from requests.auth import HTTPBasicAuth
import json
import sys

def update_statusdb(config, dryrun=True):
if not config['statusdb']:
print('Statusdb credentials not found')
sys.exit(1)
url_string = 'https://{}:{}@{}'.format(config['statusdb'].get('username'), config['statusdb'].get('password'),
config['statusdb'].get('url'))
couch = Server(url=url_string)
assert couch, 'Could not connect to {}'.format(settings.get('url'))

proj_db = couch['projects']
open_projs = proj_db.view('project/summary',include_docs=True, descending=True)[['open','Z']:['open','']]

for project in open_projs:
doc = project.doc
update_doc = False
if not project.value.get('delivery_type') == 'GRUS':
continue
if project.value['details'].get('snic_checked'):
if not project.value['details']['snic_checked']['status']:
email = project.value['order_details']['fields'].get('project_pi_email')
check = snic_check(email, config['SNIC'])
if check:
doc['details']['snic_checked']['status'] = check
update_doc = True

else:
snic_checked = {}
if project.value.get('order_details'):
email = project.value['order_details']['fields'].get('project_pi_email')
if email:
snic_checked['status'] = snic_check(email, config['SNIC'])
#Add the new field to project details
doc['details']['snic_checked'] = snic_checked
update_doc = True
#write to projects doc
if update_doc:
if not dryrun:
proj_db.save(doc)
else:
print(doc['project_name'], doc['details']['snic_checked'])

def snic_check(email, config):
url = 'https://api.supr.naiss.se/api/person/email_present/?email={}'.format(email)
response = requests.get(url, auth=HTTPBasicAuth(config.get('username'), config.get('password')))
if not response.ok and response.reason == 'Unauthorized':
print('ERROR: SNIC API is IP restricted and this script can only be run from ngi-internal OR credentials are wrong')
sys.exit(1)
return json.loads(response.content)['email_present']

if __name__ == '__main__':
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--statusdb_config', metavar='Path to statusdb config file', help='Path to yaml file with credentials for statusdb')
parser.add_argument('--snic_config', metavar='Path to snic config file', help='Path to yaml file with credentials for SNIC API')
parser.add_argument('--check_email', metavar='Option to run script to check emails',
help='Check an individual email directly in SNIC')
parser.add_argument('-d', '--dryrun',
action='store_true', dest='dryrun', default=False,
help='Use this to print out what would have been saved to statusdb')

args = parser.parse_args()
config = {}
with open(args.statusdb_config) as config_file:
config = yaml.load(config_file, Loader=yaml.SafeLoader)
with open(args.snic_config) as config_file:
config.update(yaml.load(config_file, Loader=yaml.SafeLoader))
if args.check_email:
if config.get('SNIC'):
result = snic_check(args.check_email, config['SNIC'])
print('The email "{}" has {} associated SNIC account.'.format(args.check_email, 'an' if result else 'NO'))
else:
print('SNIC credentials not found')
else:
update_statusdb(config, args.dryrun)
14 changes: 10 additions & 4 deletions quota_log.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import subprocess
from platform import node as host_name
from pprint import pprint
import couchdb
from ibmcloudant import CouchDbSessionAuthenticator, cloudant_v1



Expand Down Expand Up @@ -46,7 +46,13 @@
if not args.server:
pprint(project_list)
else:
couch = couchdb.Server(args.server)
db = couch[args.db]
user_pass = args.server.split("@")[0]
couch = cloudant_v1.CloudantV1(
authenticator=CouchDbSessionAuthenticator(
user_pass.split(":")[0],
user_pass.split(":")[1]
)
)
couch.set_service_url(f"https://{args.server.split('@')[1]}")
for fs_dict in project_list:
db.save(fs_dict)
couch.post_document(db=args.db, document=fs_dict).get_result()
33 changes: 22 additions & 11 deletions repooler.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import os
import yaml

import couchdb
from ibmcloudant import CouchDbSessionAuthenticator, cloudant_v1
import numpy
import click

Expand Down Expand Up @@ -44,10 +44,17 @@ def connection():
pw = config.get("password")
print("Database server used: https://{}".format(config.get("url")))
print("LIMS server used: " + BASEURI)
couch = couchdb.Server('https://{}:{}@{}'.format(user, pw, config.get("url")))
couch = cloudant_v1.CloudantV1(
authenticator=CouchDbSessionAuthenticator(
user,
pw
)
)
couch.set_service_url(f"https://{config.get('url')}")

try:
print("Connecting to statusDB...")
couch.version()
couch.get_server_information().get_result()
except:
sys.exit("Can't connect to couch server. Username & Password is incorrect, or network is inaccessible.")
print("Connected!")
Expand All @@ -56,17 +63,21 @@ def connection():

def proj_struct(couch, project, target_clusters):
""""Fetches the structure of a project"""
db = couch['x_flowcells']
view = db.view('names/project_ids_list')
db = "x_flowcells"
view = couch.post_view(
db=db,
ddoc='names',
view='project_ids_list'
).get_result()
fc_track = defaultdict(set)

#Adds flowcells to ALL projects. Due to interactions its easier to just get FCs for ALL projects
for rec in view.rows:
fc = ''.join(rec.key)
for rec in view["rows"]:
fc = ''.join(rec["key"])
fc = unicodedata.normalize('NFKD', fc).encode('ascii','ignore')
id = ''.join(rec.id)
id = ''.join(rec["id"])
id = unicodedata.normalize('NFKD', id).encode('ascii','ignore')
for projs in rec.value:
for projs in rec["value"]:
projs = ''.join(projs)
projs = unicodedata.normalize('NFKD', projs).encode('ascii','ignore')
if fc_track[projs] == set([]):
Expand All @@ -78,10 +89,10 @@ def proj_struct(couch, project, target_clusters):
raise Exception('Error: Project not logged in x_flowcells database!')
for fc, id in fc_track[project].items():
try:
entry = db[id]['illumina']
entry_illumina = couch.get_document(db=db, document_id=id).get_result()['illumina']
except KeyError:
print("Error: Illumina table for db entry" , id, "doesn't exist!")
entry = db[id]['illumina']['Demultiplex_Stats']['Barcode_lane_statistics']
entry = entry_illumina['Demultiplex_Stats']['Barcode_lane_statistics']
for index in range(0, len(entry)):
lane = entry[index]['Lane']
sample = entry[index]['Sample']
Expand Down
38 changes: 27 additions & 11 deletions upload_biomek_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,14 @@
import logging.handlers
import argparse
import os
import couchdb
from ibmcloudant import CouchDbSessionAuthenticator, cloudant_v1
import yaml
import re
import sys

error_line_regex = re.compile(r"^(?=\d{2}/\d{1,2}/\d{4} \d{2}:\d{2}:\d{2},\b)")

def create_doc_from_log_file(doc_option, handle, log_file_path, db=None):
def create_doc_from_log_file(doc_option, handle, couch, log_file_path, db=None):

error_lines = ""

Expand All @@ -36,7 +36,7 @@ def create_doc_from_log_file(doc_option, handle, log_file_path, db=None):
doc["run_finished"] = True
doc["errors"] = error_lines
elif doc_option == "update":
doc = db.get(handle)
doc = couch.get_document(db=db, doc_id=handle).get_result()
with open(os.path.join(log_file_path, doc["file_name"]), "r", encoding="utf-8-sig") as inp:
contents=inp.readlines()
for line in contents:
Expand All @@ -50,8 +50,14 @@ def create_doc_from_log_file(doc_option, handle, log_file_path, db=None):

def setupServer(conf):
db_conf = conf['statusdb']
url="https://{0}:{1}@{2}".format(db_conf['username'], db_conf['password'], db_conf['url'])
return couchdb.Server(url)
url=f"https://{db_conf['url'])}"
couch = cloudant_v1.CloudantV1(
authenticator=CouchDbSessionAuthenticator(
db_conf['username'], db_conf['password']
)
)
couch.set_service_url(url)
return couch

def setupLog(name, logfile, log_level=logging.INFO, max_size=209715200, nb_files=5):
mainlog = logging.getLogger(name)
Expand All @@ -71,11 +77,18 @@ def main(args):

couch = setupServer(conf["couchdb_credentials"])
inst_id = conf["inst_id"]
biomek_logs_db = couch['biomek_logs']
db_view_run_finished = biomek_logs_db.view('names/run_finished')
biomek_logs_db = "biomek_logs"

db_view_run_finished = couch.post_view(
db=biomek_logs_db,
ddoc="names",
view="run_finished",
include_docs=False,
).get_result()["rows"]

run_finished_dict = {}
for row in db_view_run_finished:
run_finished_dict[(row.key[0], row.key[1])] = (row.value, row.id)
run_finished_dict[(row["key"][0], row["key"][1])] = (row["value"], row["id"])

log_files_list = os.listdir(args.log_file_path)
logs_to_create = []
Expand All @@ -90,12 +103,15 @@ def main(args):


for fname in logs_to_create:
save_docs.append(create_doc_from_log_file('create', fname, log_file_path=args.log_file_path))
save_docs.append(create_doc_from_log_file('create', fname, couch, log_file_path=args.log_file_path))

for doc_id in logs_to_update:
save_docs.append(create_doc_from_log_file('update', doc_id, log_file_path=args.log_file_path, db=biomek_logs_db))
save_docs.append(create_doc_from_log_file('update', doc_id, couch, log_file_path=args.log_file_path, db=biomek_logs_db))
try:
save_result = biomek_logs_db.update(save_docs)
couch.post_bulk_docs(
db=biomek_logs_db,
bulk_docs=cloudant_v1.BulkDocs(docs=save_docs, new_edits=True),
).get_result()
except Exception:
mainlog.error("Failed to upload to statusdb : {}".format(sys.exc_info()[0]))
else:
Expand Down
20 changes: 13 additions & 7 deletions upload_robot_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,16 +10,16 @@
import argparse
import sys
import datetime
import couchdb
from ibmcloudant import CouchDbSessionAuthenticator, cloudant_v1
import yaml
import socket

def save_to_statusdb(db, message, args):
def save_to_statusdb(couch, db, message, args):
data={'message':message}
data['timestamp']=datetime.datetime.now().isoformat()
data['instrument_name']=args.name

db.save(data)
couch.post_document(db=db, document=data).get_result()


def read_message(args):
Expand All @@ -36,8 +36,14 @@ def read_message(args):

def setupServer(conf):
db_conf = conf['statusdb']
url="https://{0}:{1}@{2}".format(db_conf['username'], db_conf['password'], db_conf['url'])
return couchdb.Server(url)
url=f"https://{db_conf['url'])}"
couch = cloudant_v1.CloudantV1(
authenticator=CouchDbSessionAuthenticator(
db_conf['username'], db_conf['password']
)
)
couch.set_service_url(url)
return couch

def setupLog(name, logfile, log_level=logging.INFO, max_size=209715200, nb_files=5):
mainlog = logging.getLogger(name)
Expand All @@ -56,12 +62,12 @@ def main(args):
conf=yaml.load(conf_file)

couch=setupServer(conf)
db=couch[conf['statusdb']['instrument_logs_db']]
db = conf['statusdb']['instrument_logs_db']

message=read_message(args)
mainlog.info("Read message : {}".format(message))
try:
save_to_statusdb(db, message, args)
save_to_statusdb(couch, db, message, args)
except Exception:
mainlog.error("Failed to upload to statusdb : {}".format(sys.exc_info()[0]))
else:
Expand Down