Skip to content
Snippets Groups Projects
Commit 2b012eee authored by Rudel Fankep's avatar Rudel Fankep
Browse files

pip_installation

parent cfea2f15
No related branches found
No related tags found
No related merge requests found
Showing
with 72 additions and 12 deletions
Metadata-Version: 1.0
Name: dbman
Version: 0.0.1.dev14
Summary: Download, convert and upload databases to cloud server
Home-page: UNKNOWN
Author: Rudel Fankep
Author-email: Rudel.C.NKouamedjo-fankep@bio.uni-giessen.de
License: MIT
Project-URL: Source Code, https://git.computational.bio.uni-giessen.de/cfankep/psotdb.git
Description: Download, convert and upload databases to cloud server
Keywords: tools, databases
Platform: UNKNOWN
README.rst
blast_db.sh
delete_remote_file.sh
download_db.sh
download_json.sh
ghostx_db.sh
hmmer_pfam.sh
prepare_card.sh
prepare_pfam.sh
prepare_swissprot.sh
setup.cfg
setup.py
test.py
testfile.txt
upload_db.sh
upload_json.sh
dbman/__init__.py
dbman/main.py
dbman.egg-info/PKG-INFO
dbman.egg-info/SOURCES.txt
dbman.egg-info/dependency_links.txt
dbman.egg-info/entry_points.txt
dbman.egg-info/not-zip-safe
dbman.egg-info/pbr.json
dbman.egg-info/requires.txt
dbman.egg-info/top_level.txt
local_databases/dbman_metadata.json
local_databases/swissprot_2019_06_blast/uniprot_sprot.fasta.phr
local_databases/swissprot_2019_06_blast/uniprot_sprot.fasta.pin
local_databases/swissprot_2019_06_blast/uniprot_sprot.fasta.psq
\ No newline at end of file
[console_scripts]
dbman = dbman.main:main
{"git_version": "cfea2f1", "is_release": false}
\ No newline at end of file
wget
dbman
File added
File added
......@@ -9,10 +9,14 @@ import wget
from datetime import date
import tarfile
import json
import pkg_resources
def myparser():
parser = argparse.ArgumentParser(description='Download, convert and share databases to working computer', prog='Database Manager')
parser = argparse.ArgumentParser(description='Download, convert and upload databases to cloud server', prog='Database Manager')
parser.set_defaults(func=help)
subparsers = parser.add_subparsers(title='Subcommands', description='Authorized Subcommands', help='Additional help')
......@@ -59,7 +63,13 @@ def myparser():
list_recipes_parser = subparsers.add_parser('list_recipes', help='print databases with the possible Tool/s')
list_recipes_parser.set_defaults(func=list_recipes)
return parser.parse_args()
args = parser.parse_args()
args.parser = parser
return args
def help(args):
args.parser.print_help()
def get_local_databases_directory(args):
......@@ -217,7 +227,7 @@ def save_local_metadata(args, metaliste):
def get_remote_metadata(args):
if get_path_remote_json(args) in get_remote_files():
subprocess.run(['./download_json.sh', get_path_remote_json(args), get_local_databases_directory(args)])
subprocess.run([pkg_resources.resource_filename(__name__, "scripts/download_json.sh"), get_path_remote_json(args), get_local_databases_directory(args)])
with open(path_maker(get_local_databases_directory(args), 'dbman_remote_metadata.json')) as f:
metadata = json.load(f)
os.remove(path_maker(get_local_databases_directory(args), 'dbman_remote_metadata.json'))
......@@ -229,7 +239,7 @@ def get_remote_metadata(args):
def save_remote_metadata(args, metaliste):
with open(path_maker(get_local_databases_directory(args), 'dbman_remote_metadata.json'), 'w') as f:
json.dump(metaliste, f)
subprocess.run(['./upload_json.sh', get_local_databases_directory(args), 'dbman_remote_metadata.json', get_remote_databases_directory(args)])
subprocess.run([pkg_resources.resource_filename(__name__, "scripts/upload_json.sh"), get_local_databases_directory(args), 'dbman_remote_metadata.json', get_remote_databases_directory(args)])
os.remove(path_maker(get_local_databases_directory(args), 'dbman_remote_metadata.json'))
......@@ -246,7 +256,7 @@ def get_tar_filename(args, version):
def create_tar_file_and_upload(args, version):
subprocess.run(['./upload_db.sh', get_local_databases_directory(args), get_tar_filename(args, version), get_tool_directory_name(args, version), get_remote_databases_directory(args)])
subprocess.run([pkg_resources.resource_filename(__name__, "scripts/upload_db.sh"), get_local_databases_directory(args), get_tar_filename(args, version), get_tool_directory_name(args, version), get_remote_databases_directory(args)])
def prepare(args):
......@@ -317,7 +327,7 @@ def download(args):
if version != 'error':
if not os.path.isdir(get_path_tool_directory(args, version)):
download_file = path_maker(get_remote_databases_directory(args), get_tar_filename(args, version))
subprocess.run(['./download_db.sh', download_file, get_local_databases_directory(args), get_tar_filename(args, version)])
subprocess.run([pkg_resources.resource_filename(__name__, "scripts/download_db.sh"), download_file, get_local_databases_directory(args), get_tar_filename(args, version)])
local_metadata = get_local_metadata(args)
for dictionnary in get_remote_metadata(args):
if dictionnary['name'] == args.database and dictionnary['tool'] == args.tool and dictionnary['version'] == version:
......@@ -357,7 +367,7 @@ def delete(args):
web_store = get_remote_databases_directory(args)
web_file = path_maker(web_store, get_tar_filename(args, version))
if web_file in get_remote_files():
subprocess.run(['./delete_remote_file.sh', web_file])
subprocess.run([pkg_resources.resource_filename(__name__, "scripts/delete_remote_file.sh"), web_file])
metadata = get_remote_metadata(args)
for position in range(0, len(metadata)):
if metadata[position]['name'] == args.database and metadata[position]['tool'] == args.tool and metadata[position]['version'] == version:
......@@ -390,18 +400,18 @@ def list_remote_databases(args):
print('\n{}[{}] Version: {} erstellt am: {}'.format(dic_fichier['name'], dic_fichier['tool'], dic_fichier['version'], dic_fichier['created']))
data = {'swissprot': {'prepare': './prepare_swissprot.sh',
'tool': {'blast': './blast_db.sh', 'ghostx': './ghostx_db.sh'},
data = {'swissprot': {'prepare': pkg_resources.resource_filename(__name__, "scripts/prepare_swissprot.sh"),
'tool': {'blast': pkg_resources.resource_filename(__name__, "scripts/blast_db.sh"), 'ghostx': pkg_resources.resource_filename(__name__, "scripts/ghostx_db.sh")},
'filename': 'uniprot_sprot.fasta',
'version': get_swissprot_version
},
'pfam': {'prepare': './prepare_pfam.sh',
'tool': {'hmmer': './hmmer_pfam.sh'},
'tool': {'hmmer': pkg_resources.resource_filename(__name__, "scripts/hmmer_pfam.sh")},
'filename': 'Pfam-A.hmm',
'version': get_pfam_version
},
'card': {'prepare': './prepare_card.sh',
'tool': {'blast': './blast_db.sh', 'ghostx': './ghostx_db.sh'},
'card': {'prepare': pkg_resources.resource_filename(__name__, "scripts/prepare_card.sh"),
'tool': {'blast': pkg_resources.resource_filename(__name__, "scripts/blast_db.sh"), 'ghostx': pkg_resources.resource_filename(__name__, "scripts/ghostx_db.sh")},
'filename': 'protein_fasta_protein_homolog_model.fasta',
'version': get_card_version
}
......
File moved
File moved
File moved
File moved
File moved
File moved
File moved
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment