Commit da57157a authored by Rudel Fankep's avatar Rudel Fankep
Browse files

programm run

parent e6f821cc
Verzeichnisstruktur beim prepare
dbname+_+tool. keine unterverzeichnisse: skripte wäre komplizierter. man müsste in jedem verzechnis gehen bei
operationen und wieder rauskommen
#!/bin/bash
TOOL_DIR=$1
TOOL_DIRECTORY=$1
TOOL_FILE=$2
cd $TOOL_DIR
cd $TOOL_DIRECTORY
makeblastdb -dbtype prot -in $TOOL_FILE
cd -
\ No newline at end of file
#!/bin/bash
WEB_FILE=$1
s3cmd del $WEB_FILE
\ No newline at end of file
#!/bin/bash
REMOTE_FILE=$1
s3cmd del $REMOTE_FILE
\ No newline at end of file
#!/bin/bash
TARGET_FILE=$1
TARGET_DIR=$2
FILE_NAME=$3
cd $TARGET_DIR
s3cmd get $TARGET_FILE
tar -xzvf $FILE_NAME
rm $FILE_NAME
REMOTE_TARFILE=$1
LOCAL_DATABASE_DIRECTORY=$2
TARFILE=$3
cd $LOCAL_DATABASE_DIRECTORY
s3cmd get $REMOTE_TARFILE
tar -xzvf $TARFILE
rm $TARFILE
cd -
\ No newline at end of file
#!/bin/bash
TOOL_DIR=$1
TOOL_FILE=$2
cd $TOOL_DIR
ghostx db -i $TOOL_FILE -o ghostx_db
TOOL_DIRECTORY=$1
RAW_FILE=$2
cd $TOOL_DIRECTORY
ghostx db -i $RAW_FILE -o ghostx_db
cd -
\ No newline at end of file
#!/bin/bash
TOOL_DIR=$1
TOOL_FILE=$2
TOOL_DIRECTORY=$1
RAW_FILE=$2
cd $TOOL_DIR
hmmpress $TOOL_FILE
cd $TOOL_DIRECTORY
hmmpress $RAW_FILE
cd -
\ No newline at end of file
[]
\ No newline at end of file
......@@ -4,20 +4,11 @@ import argparse
import subprocess
import os
import shutil
data = {'swissprot': {'prepare': './prepare_swissprot.sh',
'tool': {'blast': './blast_db.sh', 'ghostx': './ghostx_db.sh'},
'filename':'uniprot_sprot.fasta'
},
'pfam': {'prepare': './prepare_pfam.sh',
'tool': {'hmmer': './hmmer_pfam.sh'},
'filename':'Pfam-A.hmm'
},
'card': {'prepare':'./prepare_card.sh',
'tool':{'blast':'./blast_db.sh','ghostx':'./ghostx_db.sh'},
'filename':'protein_fasta_protein_homolog_model.fasta'
}
}
import re
import wget
from datetime import date
import tarfile
import json
def myparser():
......@@ -27,55 +18,60 @@ def myparser():
prepare_parser = subparsers.add_parser('prepare', help='Databank download from the Website, extraction and transformation for another Programm')
prepare_parser.add_argument('database', help='Database which have to be prepared', type=str)
prepare_parser.add_argument('tool', help='Programm/Tool for the post analysis',type=str,)
prepare_parser.add_argument('-d', '--directory', dest='dir', help='set the local directory to save/to get the data.', type=str)
prepare_parser.add_argument('tool', help='Programm/Tool for the post analysis', type=str,)
prepare_parser.add_argument('-d', '--directory', dest='dir', help='change the local directory to save/to get the data.', type=str)
prepare_parser.add_argument('-v', '--version', help='version of the needed database. Standard will be the current release')
prepare_parser.set_defaults(func=prepare)
upload_parser = subparsers.add_parser('upload', help='Databank copy from the local directory to the web storage')
upload_parser.add_argument('database', help='database to be transfered from the local directory', type=str)
upload_parser.add_argument('tool', type=str, help='database type which have to be transfered')
upload_parser.add_argument('-s', '--s3store', dest='store', help='set the "S3" storage of the data ')
upload_parser.add_argument('-d', '--directory', dest='dir', help='set the local directory to save/to get the data.', type=str)
upload_parser.add_argument('-r', '--raw', help='Raw data need to be upload', action='store_true')
upload_parser.add_argument('tool', type=str, help='database type which have to be transfered."raw" instead of tool, if raw files are needed to be processed')
upload_parser.add_argument('-s', '--s3store', dest='store', help='change the "S3" storage of the data ')
upload_parser.add_argument('-d', '--directory', dest='dir', help='change the local directory to save/to get the data.', type=str)
upload_parser.add_argument('-v', '--version', help='version of the needed database. Standard will be the current release')
upload_parser.set_defaults(func=upload)
download_parser = subparsers.add_parser('download', help='Datenbank copy from the web storage to the working computer')
download_parser.add_argument('database', help='database to be transfered from "S3"', type=str)
download_parser.add_argument('tool', type=str, help='database type which have to be transfered')
download_parser.add_argument('-s', '--s3store', dest='store', help='set the "S3" storage of the data ')
download_parser.add_argument('-d', '--directory', dest='dir', help='set the local directory to save/to get the data.',type=str)
download_parser.add_argument('-r', '--raw', help='Raw data need to be download', action='store_true')
download_parser.add_argument('tool', type=str, help='database type which have to be transfered. "raw" instead of tool, if raw files are needed to be processed')
download_parser.add_argument('-s', '--s3store', dest='store', help='change the "S3" storage of the data ')
download_parser.add_argument('-d', '--directory', dest='dir', help='change the local directory to save/to get the data.', type=str)
download_parser.add_argument('-v', '--version', help='version of the needed database. Standard will be the current release')
download_parser.set_defaults(func=download)
delete_parser = subparsers.add_parser('delete', help='delete existing files from local directory or from "S3"')
delete_parser.add_argument('database', help='database which have to be delete')
delete_parser.add_argument('tool', type=str, help='database type which have to be deleted')
delete_parser.add_argument('place', choices=['local', 'S3'], help='defined the place where the database have to be delete')
delete_parser.add_argument('-d', '--directory', dest='dir', help='set the local directory to save/to get the data.',type=str)
delete_parser.add_argument('-s', '--s3store', dest='store', help='set the "S3" storage of the data ')
delete_parser.add_argument('-r', '--raw', help='Raw data need to be deleted', action='store_true')
delete_parser.add_argument('tool', type=str, help='database type which have to be deleted."raw" instead of tool, if raw files are needed to be processed')
delete_parser.add_argument('place', choices=['local', 's3'], help='defined the place where the database have to be delete')
delete_parser.add_argument('-d', '--directory', dest='dir', help='change the local directory to save/to get the data.', type=str)
delete_parser.add_argument('-s', '--s3store', dest='store', help='change the "S3" storage of the data ')
delete_parser.add_argument('-v', '--version', help='version of the needed database. Standard will be the current release')
delete_parser.set_defaults(func=delete)
list_recipes_parser = subparsers.add_parser('list_recipes', help='print databases with the possible Tool')
list_local_databases_parser = subparsers.add_parser('list_local_databases', help='print the list of local databases with some features')
list_local_databases_parser.add_argument('-d', '--directory', dest='dir', help='change the local directory to save/to get the data.', type=str)
list_local_databases_parser.set_defaults(func=list_local_databases)
list_remote_databases_parser = subparsers.add_parser('list_remote_databases', help='print the list of remote databases with some features')
list_remote_databases_parser.add_argument('-d', '--directory', dest='dir', help='change the local directory to save/to get the data.', type=str)
list_remote_databases_parser.add_argument('-s', '--s3store', dest='store', help='change the "S3" storage of the data ')
list_remote_databases_parser.set_defaults(func=list_remote_databases)
list_recipes_parser = subparsers.add_parser('list_recipes', help='print databases with the possible Tool/s')
list_recipes_parser.set_defaults(func=list_recipes)
return parser.parse_args()
def targetdir_maker(args):
databasedir = "./output"
def get_local_databases_directory(args):
database_directory = os.path.abspath("local_databases")
if args.dir:
databasedir = args.dir
database_directory = os.path.abspath(args.dir)
elif "DBMAN_DBDIR" in os.environ:
databasedir = os.environ["DBMAN_DBDIR"]
return databasedir
def get_tool_dir(dbdir, db, tool):
tool_dir = path_maker(dbdir, db+'_'+tool)
return tool_dir
database_directory = os.environ["DBMAN_DBDIR"]
return database_directory
def s3dir_maker(args):
def get_remote_databases_directory(args):
web_dir = 's3://db_storage'
if args.store:
web_dir = args.store
......@@ -84,6 +80,18 @@ def s3dir_maker(args):
return web_dir
def get_raw_directory_name(args, version):
return args.database+'_'+version+'_raw'
def get_tool_directory_name(args, version):
return args.database+'_'+version+'_'+args.tool
def get_path_tool_directory(args, version):
return path_maker(get_local_databases_directory(args), get_tool_directory_name(args, version))
def path_maker(directory, file):
filedir = ''
if directory[-1] == '/':
......@@ -93,22 +101,173 @@ def path_maker(directory, file):
return filedir
def prepare(args):
if args.database in data.keys():
def get_version(args):
version = str(date.today())
if args.version:
version = args.version
elif data[args.database]['version']:
version = data[args.database]['version']()
return version
def get_swissprot_version():
file = wget.download("ftp://ftp.uniprot.org/pub/databases/uniprot/current_release/knowledgebase/complete/reldate.txt")
datei = open(file)
satz = ''
while 'Swiss-Prot' not in satz:
satz = datei.readline()
satz_search = re.search('[0-9]{4}_[0-9]{2}', satz)
version = satz_search.group(0)
datei.close()
os.remove(file)
return version
def get_pfam_version():
file = wget.download("ftp://ftp.ebi.ac.uk/pub/databases/Pfam/current_release/relnotes.txt")
datei = open(file)
satz = ''
while 'RELEASE' not in satz:
satz = datei.readline()
satz_search = re.search(r'[0-9]{2}\.[0-9]', satz)
version = satz_search.group(0)
datei.close()
os.remove(file)
return version
def get_card_version():
os.mkdir('version')
os.chdir('version')
wget.download("https://card.mcmaster.ca/latest/data")
tar = tarfile.open('card-data.tar.bz2')
tar.extractall()
tar.close()
with open("card.json") as f:
data = json.load(f)
version = data["_version"]
os.chdir('..')
shutil.rmtree('version')
return version
def get_local_json_version(args):
if args.version:
version = args.version
else:
with open(get_path_local_json(args)) as f:
metadata = json.load(f)
matched_dictionnary = []
for dictionnary in metadata:
if dictionnary['name'] == args.database and dictionnary['tool'] == args.tool:
matched_dictionnary.append(dictionnary)
if len(matched_dictionnary) == 1:
version = matched_dictionnary[0]['version']
elif len(matched_dictionnary) > 1:
version = sorted(matched_dictionnary, key=lambda i: i['created'], reverse=True)[0]['version']
else:
version = 'error'
return version
dbman_dir = targetdir_maker(args)
raw_dir = path_maker(dbman_dir, args.database+'_raw')
if not os.path.isdir(raw_dir):
os.mkdir(raw_dir)
subprocess.run([data[args.database]['prepare'], raw_dir])
print("The {} file is in: ".format(args.database) + os.path.abspath(raw_dir))
def get_remote_json_version(args):
if args.version:
version = args.version
else:
remote_metadata = get_remote_metadata(args)
if remote_metadata != []:
matched_dictionnary = []
for dictionnary in remote_metadata:
if dictionnary['name'] == args.database and dictionnary['tool'] == args.tool:
matched_dictionnary.append(dictionnary)
if len(matched_dictionnary) == 1:
version = matched_dictionnary[0]['version']
elif len(matched_dictionnary) > 1:
version = sorted(matched_dictionnary, key=lambda i: i['created'], reverse=True)[0]['version']
else:
version = 'error'
else:
print('The {} file already exists in: '.format(args.database) + os.path.abspath(raw_dir))
version = 'error'
return version
def get_path_local_json(args):
return path_maker(get_local_databases_directory(args), 'dbman_metadata.json')
def get_path_remote_json(args):
return path_maker(get_remote_databases_directory(args), 'dbman_remote_metadata.json.tar.gz')
def get_local_metadata(args):
if not os.path.isfile(get_path_local_json(args)):
with open(get_path_local_json(args), 'w') as f:
json.dump([], f)
metadata = []
else:
with open(get_path_local_json(args)) as f:
metadata = json.load(f)
return metadata
def save_local_metadata(args, metaliste):
with open(get_path_local_json(args), 'w') as f:
json.dump(metaliste, f)
def get_remote_metadata(args):
if get_path_remote_json(args) in get_remote_files():
subprocess.run(['./download_db.sh', get_path_remote_json(args), get_local_databases_directory(args), 'dbman_remote_metadata.json.tar.gz'])
with open(path_maker(get_local_databases_directory(args), 'dbman_remote_metadata.json')) as f:
metadata = json.load(f)
os.remove(path_maker(get_local_databases_directory(args), 'dbman_remote_metadata.json'))
else:
metadata = []
return metadata
def save_remote_metadata(args, metaliste):
with open('dbman_remote_metadata.json', 'w') as f:
json.dump(metaliste, f)
subprocess.run(['./upload_db.sh', './', 'dbman_remote_metadata.json.tar.gz', 'dbman_remote_metadata.json', get_remote_databases_directory(args)])
os.remove('dbman_remote_metadata.json')
def get_remote_filename(args, version):
return path_maker(get_remote_databases_directory(args), get_tar_filename(args, version))
def get_remote_files():
return subprocess.run(['s3cmd', 'la'], capture_output=True, text=True).stdout.split()
def get_tar_filename(args, version):
return get_tool_directory_name(args, version) + '.tar.gz'
def create_tar_file_and_upload(args, version):
subprocess.run(['./upload_db.sh', get_local_databases_directory(args), get_tar_filename(args, version), get_tool_directory_name(args, version), get_remote_databases_directory(args)])
def prepare(args):
if args.database in data.keys():
version = get_version(args)
print('')
raw_directory_path = path_maker(get_local_databases_directory(args), get_raw_directory_name(args, version))
if not os.path.isdir(raw_directory_path):
os.mkdir(raw_directory_path)
subprocess.run([data[args.database]['prepare'], raw_directory_path])
metadata = get_local_metadata(args)
details = {'name': args.database, 'tool': 'raw', 'version': version, 'created': str(date.today())}
metadata.append(details)
save_local_metadata(args, metadata)
print("The {} file is in: ".format(args.database) + os.path.abspath(raw_directory_path))
else:
print('The {} file already exists in: '.format(args.database) + os.path.abspath(raw_directory_path))
if args.tool in data[args.database]['tool'].keys():
tool_dir = get_tool_dir(dbman_dir, args.database, args.tool)
tool_file = path_maker(raw_dir, data[args.database]['filename'])
tool_dir = get_path_tool_directory(args, version)
tool_file = path_maker(raw_directory_path, data[args.database]['filename'])
tool_dir_file = path_maker(tool_dir, data[args.database]['filename'])
if not os.path.isdir(tool_dir):
os.mkdir(tool_dir)
......@@ -116,45 +275,35 @@ def prepare(args):
subprocess.run([data[args.database]['tool'][args.tool], tool_dir, data[args.database]['filename']])
print('The {} files are in: '.format(args.tool) + os.path.abspath(tool_dir))
os.unlink(tool_dir_file)
metadata = get_local_metadata(args)
details = {'name': args.database, 'tool': args.tool, 'version': version, 'created': str(date.today())}
metadata.append(details)
save_local_metadata(args, metadata)
else:
print('The {} files are already exists in: '.format(args.tool) + os.path.abspath(tool_dir))
else:
print('Tool error. There are following possibility: {}'.format([tool for tool in data[args.database]['tool'].keys()]))
else:
print('Database error. There are following possibility: {}'.format([database for database in data.keys()]))
print('Database error. There are following possibility: {}'.format([database for database in data.keys()]))
def upload(args):
if args.database in data.keys():
if args.tool in data[args.database]['tool'].keys():
upload_dir = s3dir_maker(args)
dbman_dir = targetdir_maker(args)
s3_list = subprocess.run(['s3cmd', 'la'], capture_output=True, text=True).stdout.split()
s3_dir = get_tool_dir(upload_dir, args.database, args.tool+'.tar.gz')
if s3_dir not in s3_list:
tool_dir = get_tool_dir(dbman_dir, args.database, args.tool)
upload_data = args.database+'_'+args.tool
upload_data_name = '{}'.format(args.database+'_'+args.tool+'.tar.gz')
if os.path.isdir(tool_dir):
subprocess.run(['./upload_db.sh', dbman_dir, upload_data_name, upload_data, upload_dir])
if args.tool in data[args.database]['tool'].keys() or args.tool == "raw":
version = get_local_json_version(args)
# 'error' when the data to upload was not found local
if version != 'error':
if get_remote_filename(args, version) not in get_remote_files():
create_tar_file_and_upload(args, version)
remote_metadata = get_remote_metadata(args)
for dictionnary in get_local_metadata(args):
if dictionnary['name'] == args.database and dictionnary['tool'] == args.tool and dictionnary['version'] == version:
remote_metadata.append(dictionnary)
save_remote_metadata(args, remote_metadata)
else:
print('There is no {} data to upload in {}. Prepare the database first'.format(args.database+'_'+args.tool,os.path.abspath(dbman_dir)))
print('The {} files are already in {}'.format(get_tool_directory_name(args, version), get_remote_databases_directory(args)))
else:
print('The {} files are already in {}'.format(args.database+'_'+args.tool,upload_dir))
if args.raw:
s3_raw_dir=get_tool_dir(upload_dir,args.database,'raw.tar.gz')
if s3_raw_dir not in s3_list:
local_raw_dir = get_tool_dir(dbman_dir,args.database,'raw')
raw_data = args.database+'_raw'
tarraw_data_name = args.database+'_raw.tar.gz'
if os.path.isdir(local_raw_dir):
subprocess.run(['./upload_db.sh', dbman_dir, tarraw_data_name, raw_data, upload_dir])
else:
print('There is no {} data to upload in {}. Prepare the database first'.format(args.database+'_raw',os.path.abspath(dbman_dir)))
else:
print('The {} files are already in {}'.format(args.database+'_raw',upload_dir))
print('There is no {} data to upload in {}. Prepare the database first'.format(args.database+' '+args.tool, os.path.abspath(get_local_databases_directory(args))))
else:
print('Tool error. There are following possibility: {}'.format([tool for tool in data[args.database]['tool'].keys()]))
else:
......@@ -163,33 +312,22 @@ def upload(args):
def download(args):
if args.database in data.keys():
if args.tool in data[args.database]['tool'].keys():
dbman_dir = targetdir_maker(args)
tool_dir = get_tool_dir(dbman_dir, args.database, args.tool)
if not os.path.isdir(tool_dir):
s3_list = subprocess.run(['s3cmd','la'],capture_output=True,text=True).stdout.split()
download_data_name = args.database+'_'+args.tool+'.tar.gz'
download_dir = s3dir_maker(args)
download_file = path_maker(download_dir, download_data_name)
if download_file in s3_list:
subprocess.run(['./download_db.sh', download_file, dbman_dir, download_data_name])
if args.tool in data[args.database]['tool'].keys() or args.tool == "raw":
version = get_remote_json_version(args)
if version != 'error':
if not os.path.isdir(get_path_tool_directory(args, version)):
download_file = path_maker(get_remote_databases_directory(args), get_tar_filename(args, version))
subprocess.run(['./download_db.sh', download_file, get_local_databases_directory(args), get_tar_filename(args, version)])
local_metadata = get_local_metadata(args)
for dictionnary in get_remote_metadata(args):
if dictionnary['name'] == args.database and dictionnary['tool'] == args.tool and dictionnary['version'] == version:
local_metadata.append(dictionnary)
save_local_metadata(args, local_metadata)
else:
print('There is no {} files in {}:'.format(args.database+'_'+args.tool, download_dir))
print('{} is already in the local directory {}'.format(get_tool_directory_name(args, version), os.path.abspath(get_local_databases_directory(args))))
else:
print('{} is already in the local directory {}'.format(args.database+'_'+args.tool,os.path.abspath(dbman_dir)))
if args.raw:
local_raw_dir = get_tool_dir(dbman_dir,args.database,'raw')
if not os.path.isdir(local_raw_dir):
download_dir = s3dir_maker(args)
s3_raw_dir = get_tool_dir(download_dir,args.database,'raw.tar.gz')
s3_list = subprocess.run(['s3cmd','la'],capture_output=True,text=True).stdout.split()
tarraw_data_name = args.database+'_raw.tar.gz'
if s3_raw_dir in s3_list:
subprocess.run(['./download_db.sh', s3_raw_dir, dbman_dir, tarraw_data_name])
else:
print('There is no {} files in {}:'.format(args.database+'_raw', download_dir))
else:
print('{} is already in the local directory {}'.format(args.database+'_raw',os.path.abspath(dbman_dir)))
print('There is no {} files to download in {}:'.format(args.database+' '+args.tool, get_remote_databases_directory(args)))
else:
print('Tool error. There are following possibility: {}'.format([tool for tool in data[args.database]['tool'].keys()]))
else:
......@@ -198,39 +336,37 @@ def download(args):
def delete(args):
if args.database in data.keys():
if args.tool in data[args.database]['tool'].keys():
if args.tool in data[args.database]['tool'].keys() or args.tool == "raw":
if args.place == 'local':
dbman_dir = targetdir_maker(args)
del_dir = path_maker(dbman_dir, args.database+'_'+args.tool)
if os.path.isdir(del_dir):
shutil.rmtree(del_dir)
print("The {} files were successfully delete from: ".format(args.database+'_'+args.tool) + os.path.abspath(dbman_dir))
version = get_local_json_version(args)
dbman_dir = get_local_databases_directory(args)
directory_to_delete = path_maker(dbman_dir, get_tool_directory_name(args, version))
if os.path.isdir(directory_to_delete):
shutil.rmtree(directory_to_delete)
metadata = get_local_metadata(args)
for position in range(0, len(metadata)):
if metadata[position]['name'] == args.database and metadata[position]['tool'] == args.tool and metadata[position]['version'] == version:
del metadata[position]
break
save_local_metadata(args, metadata)
print("The {} files were successfully delete from: ".format(get_tool_directory_name(args, version)) + os.path.abspath(dbman_dir))
else:
print("The {} files aren't existing in: ".format(args.database+'_'+args.tool) + os.path.abspath(dbman_dir))
if args.raw:
del_rawdir = path_maker(dbman_dir, args.database+'_raw')
if os.path.isdir(del_rawdir):
shutil.rmtree(del_rawdir)
print("The {} files were successfully delete from: ".format(args.database+'_raw') + os.path.abspath(dbman_dir))
else:
print("The {} files aren't existing in: ".format(args.database+'_raw') + os.path.abspath(dbman_dir))
elif args.place == 'S3':
web_store = s3dir_maker(args)
s3_list = subprocess.run(['s3cmd','la'],capture_output=True,text=True).stdout.split()
web_file = path_maker(web_store, args.database+'_'+args.tool+'.tar.gz')
if web_file in s3_list:
subprocess.run(['./delete_db.sh', web_file])
print("The {} files were successfully delete from: ".format(args.database+'_'+args.tool) + web_store)
print("The {} files aren't existing in: ".format(get_tool_directory_name(args, version)) + os.path.abspath(dbman_dir))
elif args.place == 's3':
version = get_remote_json_version(args)
web_store = get_remote_databases_directory(args)
web_file = path_maker(web_store, get_tar_filename(args, version))
if web_file in get_remote_files():
subprocess.run(['./delete_remote_file.sh', web_file])
metadata = get_remote_metadata(args)
for position in range(0, len(metadata)):
if metadata[position]['name'] == args.database and metadata[position]['tool'] == args.tool and metadata[position]['version'] == version:
del metadata[position]
break
save_remote_metadata(args, metadata)
print("The {} files were successfully delete from: ".format(get_tool_directory_name(args, version)) + web_store)
else:
print("The {} files aren't existing in: ".format(args.database+'_'+args.tool) + web_store)
if args.raw:
web_rawfile = path_maker(web_store, args.database+'_raw.tar.gz')
if web_rawfile in s3_list:
subprocess.run(['./delete_db.sh', web_rawfile])
print("The {} files were successfully delete from: ".format(args.database+'_raw') + web_store)
else:
print("The {} files aren't existing in: ".format(args.database+'_raw') + web_store)
print("The {} files aren't existing in: ".format(get_tool_directory_name(args, version)) + web_store)
else:
print('Tool error. There are following possibility: {}'.format([tool for tool in data[args.database]['tool'].keys()]))
else:
......@@ -239,16 +375,46 @@ def delete(args):
def list_recipes(args):
for database in data.keys():
print('{}:{}'.format(database,[tool for tool in data[database]['tool'].keys()]))
print('{}:{}'.format(database, [tool for tool in data[database]['tool'].keys()]))
def list_local_databases(args):
metadata = get_local_metadata(args)
for dic_fichier in metadata:
print('\n{}[{}] Version: {} erstellt am: {}'.format(dic_fichier['name'], dic_fichier['tool'], dic_fichier['version'], dic_fichier['created']))
def list_remote_databases(args):
metadata = get_remote_metadata(args)
for dic_fichier in metadata:
print('\n{}[{}] Version: {} erstellt am: {}'.format(dic_fichier['name'], dic_fichier['tool'], dic_fichier['version'], dic_fichier['created']))