From 3fcd4824339ecc722d4c778534f5ff12365075d1 Mon Sep 17 00:00:00 2001 From: Anandashankar Anil Date: Tue, 18 Nov 2025 11:08:44 +0100 Subject: [PATCH 1/2] Update to using cloudant; Move unused scripts to archived --- .../couchdb_replication.py | 0 .../project_status.py | 0 .../project_status_extended.py | 0 .../sample_information_validator.py | 0 archived_scripts/snic_util.py | 297 ++++++++++++++++++ archived_scripts/statusdb_snicuser_checker.py | 86 +++++ quota_log.py | 14 +- repooler.py | 32 +- upload_biomek_logs.py | 37 ++- upload_robot_logs.py | 20 +- 10 files changed, 455 insertions(+), 31 deletions(-) rename couchdb_replication.py => archived_scripts/couchdb_replication.py (100%) rename project_status.py => archived_scripts/project_status.py (100%) rename project_status_extended.py => archived_scripts/project_status_extended.py (100%) rename sample_information_validator.py => archived_scripts/sample_information_validator.py (100%) create mode 100644 archived_scripts/snic_util.py create mode 100644 archived_scripts/statusdb_snicuser_checker.py diff --git a/couchdb_replication.py b/archived_scripts/couchdb_replication.py similarity index 100% rename from couchdb_replication.py rename to archived_scripts/couchdb_replication.py diff --git a/project_status.py b/archived_scripts/project_status.py similarity index 100% rename from project_status.py rename to archived_scripts/project_status.py diff --git a/project_status_extended.py b/archived_scripts/project_status_extended.py similarity index 100% rename from project_status_extended.py rename to archived_scripts/project_status_extended.py diff --git a/sample_information_validator.py b/archived_scripts/sample_information_validator.py similarity index 100% rename from sample_information_validator.py rename to archived_scripts/sample_information_validator.py diff --git a/archived_scripts/snic_util.py b/archived_scripts/snic_util.py new file mode 100644 index 0000000..c6de39d --- /dev/null +++ b/archived_scripts/snic_util.py @@ -0,0 +1,297 @@ +#!/usr/bin/env python + +description="A python wrapper for SNIC API utilities. It requires a config file with SNIC credentials." + +import argparse +import datetime +import logging +import json +import os +import requests +import sys +import yaml +from collections import OrderedDict +from taca.utils.statusdb import StatusdbSession as sdb +from ngi_pipeline.database.classes import CharonSession, CharonError + +# set logger object +logging.basicConfig(level=logging.INFO, format='%(asctime)s %(levelname)s: %(message)s', datefmt="%Y-%m-%d %H:%M:%S") +logger = logging.getLogger(__name__) + +# Expected config format +config_format = ("snic:\n" + "\tsnic_api_url: \n" + "\tsnic_api_user: \n" + "\tsnic_api_password: \n") + +def to_bool(s): + try: + return {"yes":True, "no":False}[s] + except KeyError: + raise argparse.ArgumentTypeError('Only "yes/no" is allowed') + +def proceed_or_not(question): + sys.stdout.write("{}".format(question)) + while True: + if sys.version_info[0] == 3: + choice = input().lower() + elif sys.version_info[0] == 2: + choice = raw_input().lower() + if choice in ['yes','y']: + return True + elif choice in ['no','n']: + return False + else: + sys.stdout.write("Please respond with 'yes/y' or 'no/n'") + + +class snic_util(object): + """ A class with SNIC utilities """ + def __init__(self, config=None, params=None): + """ Instantiate the object with config properties """ + try: + self.api_url = config['snic_api_url'].rstrip('/') + self.api_user = config['snic_api_user'] + self.api_pass = config['snic_api_password'] + self.api_cred = (self.api_user, self.api_pass) + except KeyError as e: + print("Config is missing key {}".format(e)) + raise e + # if any params given set them as attributes + if params: + for _key, _val in params.items(): + setattr(self, _key, _val) + + def create_grus_project(self, proj_data={}): + """Create a GRUS delivery project with given info and return info of created project in JSON""" + pdata = proj_data or getattr(self, 'proj_data', {}) + create_proj_url = "{}/ngi_delivery/project/create/".format(self.api_url) + response = requests.post(create_proj_url, data=json.dumps(pdata), auth=self.api_cred) + self._assert_response(response) + return response.json() + + def update_grus_project(self, prj_snic_id=None, updata={}): + """Update a GRUS delivery project with given info and return info of updated project in JSON""" + psnic = prj_snic_id or getattr(self, 'prj_snic_id', None) + udata = updata or getattr(self, 'updata', {}) + update_proj_url = "{}/ngi_delivery/project/{}/update/".format(self.api_url, psnic) + response = requests.post(update_proj_url, data=json.dumps(udata), auth=self.api_cred) + self._assert_response(response) + return response.json() + + def get_user_info(self, user_email=None): + """Search for user in SNIC and return list with matching hits. Each hits is in JSON format""" + uemail = user_email or getattr(self, 'user_email') + search_info = {"search_url": "{}/person/search/".format(self.api_url), + "search_params": {"email_i": uemail}} + user_hits = self._search_snic(**search_info) + if len(user_hits) == 0: + logger.info("No user found with email {}".format(uemail)) + return user_hits + + def get_project_info(self, grus_project=None): + """Search for delivery project in SNIC and return list with matching hits. Each hits is in JSON format""" + gproject = grus_project or getattr(self, 'grus_project') + search_info = {"search_url": "{}/project/search/".format(self.api_url), + "search_params": {"name": gproject}} + project_hits = self._search_snic(**search_info) + if len(project_hits) == 0: + logger.info("No projects was found with name {}".format(gproject)) + return project_hits + + def _search_snic(self, search_url, search_params): + response = requests.get(search_url, params=search_params, auth=self.api_cred) + self._assert_response(response) + return response.json()["matches"] + + def _assert_response(self, response): + assert response.status_code == 200, "Failed connecting {} via SNIC API".format(response.url) + + +class _snic_wrapper(snic_util): + """A wrapper class that makes use of SNIC UTIL class and help out for relavant calls""" + def __init__(self, config=None, params=None, execute_mode=False): + super(_snic_wrapper, self).__init__(config, params) + if execute_mode: + getattr(self, "_{}".format(self.mode))() + + def _create_project(self): + try: + pi_snic_id = self.get_user_info(user_email=self.pi_email)[0]['id'] + logger.info("For PI email '{}' found SNIC ID '{}'".format(self.pi_email, pi_snic_id)) + except IndexError as e: + logger.error("Could not find PI email '{}' in SNIC. So can not create a delivery project with that PI".format(self.pi_email)) + raise SystemExit + supr_date_format = '%Y-%m-%d' + today = datetime.date.today() + endday = today + datetime.timedelta(days=self.days) + mem_snic_ids = [] + if self.members: + for mem in self.members: + try: + mem_snic_id = self.get_user_info(user_email=mem)[0]['id'] + mem_snic_ids.append(mem_snic_id) + logger.info("For email '{}' found SNIC ID '{}'".format(mem, mem_snic_id)) + except IndexError as e: + logger.error("Could not find email '{}' in SNIC. So can not add to delivery project".format(mem)) + prj_data = {'ngi_project_name': self.project, + 'title': self.title or "DELIVERY_{}_{}".format(self.project, today.strftime(supr_date_format)), + 'pi_id': pi_snic_id, + 'start_date': today.strftime(supr_date_format), + 'end_date': endday.strftime(supr_date_format), + 'ngi_ready': False, + 'ngi_sensitive_data': self.sensitive, + 'member_ids': mem_snic_ids} + question = ("\nA GRUS delivery project will be created with following information, check and confirm\n\n{}\n\n" + "NOTE: Sensivity for project is my default set to 'True', it can be change dby calling '--no-sensitive'. " + "Also parameters '--title / --members / --days' can be used to control the defaults, check help\n\n" + "So proceed with the project creation (yes/no) ? ".format(json.dumps(prj_data, indent=4))) + if proceed_or_not(question): + logger.info("Creating GRUS delivery project") + grus_proj_details = self.create_grus_project(prj_data) + logger.info("Created GRUS delivery project with id '{}'".format(grus_proj_details["name"])) + #Post to statusdb and Charon + if self.update_databases: + dbSess=DbConnections() + dbSess.add_delivery_proj_in_charon(grus_proj_details["name"], self.project) + dbSess.add_delivery_proj_in_statusdb(grus_proj_details["name"], self.project) + else: + logger.warning("Project will not be created. EXITING") + + def _extend_project(self): + ukey = "end_date" + endday = datetime.date.today() + datetime.timedelta(days=self.days) + uval = endday.strftime('%Y-%m-%d') + self._execute_project_update(ukey=ukey, uval=uval) + + def _change_pi(self): + ukey = "pi_id" + try: + pi_snic_id = self.get_user_info(user_email=self.pi_email)[0]['id'] + logger.info("For PI email '{}' found SNIC ID '{}'".format(self.pi_email, pi_snic_id)) + except IndexError as e: + logger.error("Could not find PI email '{}' in SNIC. So can not new PI for {}".format(self.pi_email, self.grus_project)) + raise SystemExit + uval = pi_snic_id + self._execute_project_update(ukey=ukey, uval=uval) + + def _change_sensitive(self): + ukey = "ngi_sensitive_data" + uval = self.sensitive + self._execute_project_update(ukey=ukey, uval=uval) + + def _project_info(self): + interested_keys = ["name", "id", "title", "ngi_project_name", "pi", "members", "ngi_sensitive_data", "start_date", "end_date"] + self._execute_search(exec_func=self.get_project_info, filter_keys=interested_keys) + + def _user_info(self): + interested_keys = ["first_name", "last_name", "id", "email", "department", "organization"] + self._execute_search(exec_func=self.get_user_info, filter_keys=interested_keys) + + def _execute_project_update(self, ukey, uval): + try: + prj_info = self.get_project_info(grus_project=self.grus_project)[0] + prj_snic_id = prj_info['id'] + except IndexError as e: + logger.error("Project with name '{}' does not exist in SNIC".format(self.grus_project)) + raise SystemExit + oval = prj_info.get('pi', {}).get('id') if ukey == 'pi_id' else prj_info.get(ukey) + question = "\nProject {}: old value for key '{}' is '{}'. Replace it with '{}' (yes/no) ? ".format(self.grus_project, ukey, oval, uval) + if proceed_or_not(question): + logger.info("Updating project {}".format(self.grus_project)) + updated_info = self.update_grus_project(prj_snic_id=prj_snic_id, updata={ukey:uval}) + logger.info("Updated project {}".format(self.grus_project)) + else: + logger.warning("Project '{}' will not be updated. EXITING".format(self.grus_project)) + + def _execute_search(self, exec_func, filter_keys=[], all_info=False): + all_info = all_info or getattr(self, "all_info", False) + search_hits = exec_func() + for ind, inf in enumerate(search_hits, 1): + oinf = inf if all_info else OrderedDict((k, inf.get(k)) for k in filter_keys) + print("Hit {}:\n{}".format(ind, json.dumps(oinf, indent=4))) + +class DbConnections(): + def __init__(self): + with open(os.getenv('STATUS_DB_CONFIG'), 'r') as db_cred_file: + db_conf = yaml.load(db_cred_file, Loader=yaml.SafeLoader)['statusdb'] + self.statusdbSess = sdb(db_conf, db="projects") + self.CharonSess = CharonSession() + + def add_delivery_proj_in_charon(self, delivery_proj, projectid): + '''Updates delivery_projects in Charon at project level + ''' + try: + #fetch the project + project_charon = self.CharonSess.project_get(projectid) + delivery_projects = project_charon['delivery_projects'] + if delivery_proj not in delivery_projects: + delivery_projects.append(delivery_proj) + self.CharonSess.project_update(projectid, delivery_projects=delivery_projects) + logger.info('Charon delivery_projects for project {} updated with value {}'.format(projectid, delivery_proj)) + else: + logger.warn('Charon delivery_projects for project {} not updated with value {} because the value was already present'.format(projectid, delivery_proj)) + except Exception as e: + logger.error('Failed to update delivery_projects in charon for {}. Error says: {}'.format(projectid, e)) + logger.exception(e) + + def add_delivery_proj_in_statusdb(self, delivery_proj, projectid): + '''Updates delivery_projects in StatusDB at project level + ''' + project_page=self.statusdbSess.get_project(projectid) + dprojs = project_page.get('delivery_projects', []) + dprojs.append(delivery_proj) + project_page['delivery_projects'] = dprojs + try: + self.statusdbSess.save_db_doc(project_page) + logger.info('Delivery_projects for project {} updated with value {} in statusdb'.format(projectid, delivery_proj)) + except Exception as e: + logger.error('Failed to update delivery_projects in statusdb for {}. Error says: {}'.format(projectid, e)) + logger.exception(e) + +if __name__ == "__main__": + # add main parser object with global parameters + parser = argparse.ArgumentParser(prog=__file__, description="SNIC utility tools") + parser.add_argument("-c", "--config", type=argparse.FileType('r'), metavar="", + default=os.getenv('SNIC_API_STOCKHOLM'), help="Config with SNIC credentials") + subparser = parser.add_subparsers(title="subcommands", dest="mode", metavar="MODE", help="Available SNIC utility modes") + # add sub command for creating GRUS project + subparser_create_project = subparser.add_parser("create_project", help="Create a GRUS delivery project with given information") + subparser_create_project.add_argument("-p", "--project", required=True, type=str, help="NGI project name/id") + subparser_create_project.add_argument("-e", "--pi-email", required=True, type=str, help="PI email address") + subparser_create_project.add_argument("-s", "--sensitive", required=True, type=to_bool, + help="Choose if the project is sensitive or not, (Only 'yes/no' is allowed)") + subparser_create_project.add_argument("-d", "--days", type=int, default=90, help="Number of days created GRUS delivery project to be active") + subparser_create_project.add_argument("-t", "--title", type=str, help="Custom title for GRUS delivery project") + subparser_create_project.add_argument("-m", "--members", type=str, action="append", help="Members to be added in GRUS delivery project") + subparser_create_project.add_argument("--update-databases", action="store_true", help="Use to store generated delivery project in Charon and statusdb") + # add sub command for extending a project + subparser_extend_project = subparser.add_parser("extend_project", help="Extend the end date of GRUS project for given 'days'") + subparser_extend_project.add_argument("-g", "--grus-project", required=True, type=str, help="Grus project id, format should be 'deliveryNNNNN'") + subparser_extend_project.add_argument("-d", "--days", required=True, type=int, help="Number of days to extend a GRUS delivery project") + # add sub command to search a project + subparser_project_info = subparser.add_parser("project_info", help="Get information for specified GRUS project") + subparser_project_info.add_argument("-g", "--grus-project", required=True, type=str, help="Grus project id, format should be 'deliveryNNNNN'") + subparser_project_info.add_argument("-a", "--all-info", action="store_true", help="Display all information without default filtering") + # add sub command to sear a user + subparser_user_info = subparser.add_parser("user_info", help="Get SNIC information for specified user") + subparser_user_info.add_argument("-u", "--user-email", required=True, type=str, help="User email address to fetch their SNIC details") + subparser_user_info.add_argument("-a", "--all-info", action="store_true", help="Display all information without default filtering") + # add sub command to change a PI for a project + subparser_change_pi = subparser.add_parser("change_pi", help="Change PI of mentioned GRUS project to given PI") + subparser_change_pi.add_argument("-g", "--grus-project", required=True, type=str, help="Grus project id, format should be 'deliveryNNNNN'") + subparser_change_pi.add_argument("-e", "--pi-email",required=True, type=str, help="Email address of user to set as new PI") + # add sub command to change sensivity for a project + subparser_change_sensitive = subparser.add_parser("change_sensitive", help="Change sensitivity of GRUS project") + subparser_change_sensitive.add_argument("-g", "--grus-project", required=True, type=str, help="Grus project id, format should be 'deliveryNNNNN'") + subparser_change_sensitive.add_argument("-s", "--sensitive", required=True, type=to_bool, + help="Choose if the project is sensitive or not, (Only 'yes/no' is allowed)") + + params = vars(parser.parse_args()) + # try loading config file + try: + snic_config = yaml.load(params["config"], Loader=yaml.SafeLoader)["snic"] + except: + logger.error("Error loading config file, make sure config is in following format\n\n{}".format(config_format)) + raise + _snic_wrapper(config=snic_config, params=params, execute_mode=True) diff --git a/archived_scripts/statusdb_snicuser_checker.py b/archived_scripts/statusdb_snicuser_checker.py new file mode 100644 index 0000000..ba36b8b --- /dev/null +++ b/archived_scripts/statusdb_snicuser_checker.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python +""" +Get open projects from statusdb, checks if the users have a SNIC account and +writes the result back into statusdb. +""" +import argparse +import yaml +from couchdb import Server +import requests +from requests.auth import HTTPBasicAuth +import json +import sys + +def update_statusdb(config, dryrun=True): + if not config['statusdb']: + print('Statusdb credentials not found') + sys.exit(1) + url_string = 'https://{}:{}@{}'.format(config['statusdb'].get('username'), config['statusdb'].get('password'), + config['statusdb'].get('url')) + couch = Server(url=url_string) + assert couch, 'Could not connect to {}'.format(settings.get('url')) + + proj_db = couch['projects'] + open_projs = proj_db.view('project/summary',include_docs=True, descending=True)[['open','Z']:['open','']] + + for project in open_projs: + doc = project.doc + update_doc = False + if not project.value.get('delivery_type') == 'GRUS': + continue + if project.value['details'].get('snic_checked'): + if not project.value['details']['snic_checked']['status']: + email = project.value['order_details']['fields'].get('project_pi_email') + check = snic_check(email, config['SNIC']) + if check: + doc['details']['snic_checked']['status'] = check + update_doc = True + + else: + snic_checked = {} + if project.value.get('order_details'): + email = project.value['order_details']['fields'].get('project_pi_email') + if email: + snic_checked['status'] = snic_check(email, config['SNIC']) + #Add the new field to project details + doc['details']['snic_checked'] = snic_checked + update_doc = True + #write to projects doc + if update_doc: + if not dryrun: + proj_db.save(doc) + else: + print(doc['project_name'], doc['details']['snic_checked']) + +def snic_check(email, config): + url = 'https://api.supr.naiss.se/api/person/email_present/?email={}'.format(email) + response = requests.get(url, auth=HTTPBasicAuth(config.get('username'), config.get('password'))) + if not response.ok and response.reason == 'Unauthorized': + print('ERROR: SNIC API is IP restricted and this script can only be run from ngi-internal OR credentials are wrong') + sys.exit(1) + return json.loads(response.content)['email_present'] + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument('--statusdb_config', metavar='Path to statusdb config file', help='Path to yaml file with credentials for statusdb') + parser.add_argument('--snic_config', metavar='Path to snic config file', help='Path to yaml file with credentials for SNIC API') + parser.add_argument('--check_email', metavar='Option to run script to check emails', + help='Check an individual email directly in SNIC') + parser.add_argument('-d', '--dryrun', + action='store_true', dest='dryrun', default=False, + help='Use this to print out what would have been saved to statusdb') + + args = parser.parse_args() + config = {} + with open(args.statusdb_config) as config_file: + config = yaml.load(config_file, Loader=yaml.SafeLoader) + with open(args.snic_config) as config_file: + config.update(yaml.load(config_file, Loader=yaml.SafeLoader)) + if args.check_email: + if config.get('SNIC'): + result = snic_check(args.check_email, config['SNIC']) + print('The email "{}" has {} associated SNIC account.'.format(args.check_email, 'an' if result else 'NO')) + else: + print('SNIC credentials not found') + else: + update_statusdb(config, args.dryrun) diff --git a/quota_log.py b/quota_log.py index c470ecc..4b8b417 100755 --- a/quota_log.py +++ b/quota_log.py @@ -6,7 +6,7 @@ import subprocess from platform import node as host_name from pprint import pprint -import couchdb +from ibmcloudant import CouchDbSessionAuthenticator, cloudant_v1 @@ -46,7 +46,13 @@ if not args.server: pprint(project_list) else: - couch = couchdb.Server(args.server) - db = couch[args.db] + user_pass = args.server.split("@")[0] + couch = cloudant_v1.CloudantV1( + authenticator=CouchDbSessionAuthenticator( + user_pass.split(":")[0], + user_pass.split(":")[1] + ) + ) + couch.set_service_url(f"https://{args.server.split('@')[1]}") for fs_dict in project_list: - db.save(fs_dict) + couch.post_document(db=args.db, document=fs_dict).get_result() diff --git a/repooler.py b/repooler.py index 34303b5..17a6419 100755 --- a/repooler.py +++ b/repooler.py @@ -9,7 +9,7 @@ import os import yaml -import couchdb +from ibmcloudant import CouchDbSessionAuthenticator, cloudant_v1 import numpy import click @@ -44,10 +44,17 @@ def connection(): pw = config.get("password") print("Database server used: https://{}".format(config.get("url"))) print("LIMS server used: " + BASEURI) - couch = couchdb.Server('https://{}:{}@{}'.format(user, pw, config.get("url"))) + couch = cloudant_v1.CloudantV1( + authenticator=CouchDbSessionAuthenticator( + user, + pw + ) + ) + couch.set_service_url(f"https://{config.get('url')}") + try: print("Connecting to statusDB...") - couch.version() + couch.get_server_information().get_result() except: sys.exit("Can't connect to couch server. Username & Password is incorrect, or network is inaccessible.") print("Connected!") @@ -56,17 +63,22 @@ def connection(): def proj_struct(couch, project, target_clusters): """"Fetches the structure of a project""" - db = couch['x_flowcells'] + db ="x_flowcells" view = db.view('names/project_ids_list') + view = couch.post_view( + db=db, + ddoc='names', + view='project_ids_list' + ).get_result() fc_track = defaultdict(set) #Adds flowcells to ALL projects. Due to interactions its easier to just get FCs for ALL projects - for rec in view.rows: - fc = ''.join(rec.key) + for rec in view["rows"]: + fc = ''.join(rec["key"]) fc = unicodedata.normalize('NFKD', fc).encode('ascii','ignore') - id = ''.join(rec.id) + id = ''.join(rec["id"]) id = unicodedata.normalize('NFKD', id).encode('ascii','ignore') - for projs in rec.value: + for projs in rec["value"]: projs = ''.join(projs) projs = unicodedata.normalize('NFKD', projs).encode('ascii','ignore') if fc_track[projs] == set([]): @@ -78,10 +90,10 @@ def proj_struct(couch, project, target_clusters): raise Exception('Error: Project not logged in x_flowcells database!') for fc, id in fc_track[project].items(): try: - entry = db[id]['illumina'] + entry_illumina = couch.get_document(db=db, document_id=id).get_result()['illumina'] except KeyError: print("Error: Illumina table for db entry" , id, "doesn't exist!") - entry = db[id]['illumina']['Demultiplex_Stats']['Barcode_lane_statistics'] + entry = entry_illumina['Demultiplex_Stats']['Barcode_lane_statistics'] for index in range(0, len(entry)): lane = entry[index]['Lane'] sample = entry[index]['Sample'] diff --git a/upload_biomek_logs.py b/upload_biomek_logs.py index 6b41158..bd34fcd 100755 --- a/upload_biomek_logs.py +++ b/upload_biomek_logs.py @@ -5,14 +5,14 @@ import logging.handlers import argparse import os -import couchdb +from ibmcloudant import CouchDbSessionAuthenticator, cloudant_v1 import yaml import re import sys error_line_regex = re.compile(r"^(?=\d{2}/\d{1,2}/\d{4} \d{2}:\d{2}:\d{2},\b)") -def create_doc_from_log_file(doc_option, handle, log_file_path, db=None): +def create_doc_from_log_file(doc_option, handle, couch, log_file_path, db=None): error_lines = "" @@ -37,6 +37,7 @@ def create_doc_from_log_file(doc_option, handle, log_file_path, db=None): doc["errors"] = error_lines elif doc_option == "update": doc = db.get(handle) + doc = couch.get_document(db=db, doc_id=handle).get_result() with open(os.path.join(log_file_path, doc["file_name"]), "r", encoding="utf-8-sig") as inp: contents=inp.readlines() for line in contents: @@ -50,8 +51,14 @@ def create_doc_from_log_file(doc_option, handle, log_file_path, db=None): def setupServer(conf): db_conf = conf['statusdb'] - url="https://{0}:{1}@{2}".format(db_conf['username'], db_conf['password'], db_conf['url']) - return couchdb.Server(url) + url=f"https://{db_conf['url'])}" + couch = cloudant_v1.CloudantV1( + authenticator=CouchDbSessionAuthenticator( + db_conf['username'], db_conf['password'] + ) + ) + couch.set_service_url(url) + return couch def setupLog(name, logfile, log_level=logging.INFO, max_size=209715200, nb_files=5): mainlog = logging.getLogger(name) @@ -71,11 +78,18 @@ def main(args): couch = setupServer(conf["couchdb_credentials"]) inst_id = conf["inst_id"] - biomek_logs_db = couch['biomek_logs'] - db_view_run_finished = biomek_logs_db.view('names/run_finished') + biomek_logs_db = "biomek_logs" + + db_view_run_finished = couch.post_view( + db=biomek_logs_db, + ddoc="names", + view="run_finished", + include_docs=False, + ).get_result()["rows"] + run_finished_dict = {} for row in db_view_run_finished: - run_finished_dict[(row.key[0], row.key[1])] = (row.value, row.id) + run_finished_dict[(row["key"][0], row["key"][1])] = (row["value"], row["id"]) log_files_list = os.listdir(args.log_file_path) logs_to_create = [] @@ -90,12 +104,15 @@ def main(args): for fname in logs_to_create: - save_docs.append(create_doc_from_log_file('create', fname, log_file_path=args.log_file_path)) + save_docs.append(create_doc_from_log_file('create', fname, couch, log_file_path=args.log_file_path)) for doc_id in logs_to_update: - save_docs.append(create_doc_from_log_file('update', doc_id, log_file_path=args.log_file_path, db=biomek_logs_db)) + save_docs.append(create_doc_from_log_file('update', doc_id, couch, log_file_path=args.log_file_path, db=biomek_logs_db)) try: - save_result = biomek_logs_db.update(save_docs) + couch.post_bulk_docs( + db=biomek_logs_db, + bulk_docs=cloudant_v1.BulkDocs(docs=save_docs, new_edits=True), + ).get_result() except Exception: mainlog.error("Failed to upload to statusdb : {}".format(sys.exc_info()[0])) else: diff --git a/upload_robot_logs.py b/upload_robot_logs.py index e7dd38f..f314cb5 100755 --- a/upload_robot_logs.py +++ b/upload_robot_logs.py @@ -10,16 +10,16 @@ import argparse import sys import datetime -import couchdb +from ibmcloudant import CouchDbSessionAuthenticator, cloudant_v1 import yaml import socket -def save_to_statusdb(db, message, args): +def save_to_statusdb(couch, db, message, args): data={'message':message} data['timestamp']=datetime.datetime.now().isoformat() data['instrument_name']=args.name - db.save(data) + couch.post_document(db=db, document=data).get_result() def read_message(args): @@ -36,8 +36,14 @@ def read_message(args): def setupServer(conf): db_conf = conf['statusdb'] - url="https://{0}:{1}@{2}".format(db_conf['username'], db_conf['password'], db_conf['url']) - return couchdb.Server(url) + url=f"https://{db_conf['url'])}" + couch = cloudant_v1.CloudantV1( + authenticator=CouchDbSessionAuthenticator( + db_conf['username'], db_conf['password'] + ) + ) + couch.set_service_url(url) + return couch def setupLog(name, logfile, log_level=logging.INFO, max_size=209715200, nb_files=5): mainlog = logging.getLogger(name) @@ -56,12 +62,12 @@ def main(args): conf=yaml.load(conf_file) couch=setupServer(conf) - db=couch[conf['statusdb']['instrument_logs_db']] + db = conf['statusdb']['instrument_logs_db'] message=read_message(args) mainlog.info("Read message : {}".format(message)) try: - save_to_statusdb(db, message, args) + save_to_statusdb(couch, db, message, args) except Exception: mainlog.error("Failed to upload to statusdb : {}".format(sys.exc_info()[0])) else: From 6b89fad07217cc8cebb5d8bdc0b2a845bc2bc21b Mon Sep 17 00:00:00 2001 From: Anandashankar Anil Date: Tue, 18 Nov 2025 14:10:06 +0100 Subject: [PATCH 2/2] Fix bugs --- repooler.py | 3 +-- upload_biomek_logs.py | 1 - 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/repooler.py b/repooler.py index 17a6419..f6e6fc2 100755 --- a/repooler.py +++ b/repooler.py @@ -63,8 +63,7 @@ def connection(): def proj_struct(couch, project, target_clusters): """"Fetches the structure of a project""" - db ="x_flowcells" - view = db.view('names/project_ids_list') + db = "x_flowcells" view = couch.post_view( db=db, ddoc='names', diff --git a/upload_biomek_logs.py b/upload_biomek_logs.py index bd34fcd..09e8dfe 100755 --- a/upload_biomek_logs.py +++ b/upload_biomek_logs.py @@ -36,7 +36,6 @@ def create_doc_from_log_file(doc_option, handle, couch, log_file_path, db=None): doc["run_finished"] = True doc["errors"] = error_lines elif doc_option == "update": - doc = db.get(handle) doc = couch.get_document(db=db, doc_id=handle).get_result() with open(os.path.join(log_file_path, doc["file_name"]), "r", encoding="utf-8-sig") as inp: contents=inp.readlines()