diff --git a/getmystuph/backends/__init__.py b/getmystuph/backends/__init__.py index d6ca9e1..58c2a7a 100644 --- a/getmystuph/backends/__init__.py +++ b/getmystuph/backends/__init__.py @@ -1,93 +1,94 @@ import logging import re from .. import export from .. import colored from .. import color_code __author__ = "Nicolas Richart" __copyright__ = "Copyright (C) 2016, EPFL (Ecole Polytechnique Fédérale " \ "de Lausanne) - SCITAS (Scientific IT and Application " \ "Support)" __credits__ = ["Nicolas Richart"] __license__ = "BSD" __version__ = "0.1" __maintainer__ = "Nicolas Richart" __email__ = "nicolas.richart@epfl.ch" _logger = logging.getLogger(__name__) __repo_backends = {} def _get_class(_type, backend): if not backend or backend not in __repo_backends: _logger.error("{0} not a known backend".format(backend)) raise TypeError("{0} not a known backend".format(backend)) if _type not in __repo_backends[backend]: _logger.error(("{0} is not a known type for the " + "backend {1}").format(_type, backend)) raise TypeError(("{0} is not a known type for the " + "backend {1}").format(_type, backend)) module_info = __repo_backends[backend][_type] _logger.debug("Importing {0} from module {1}".format( module_info['class'], module_info['module'])) module = __import__('getmystuph.' + module_info['module'], globals(), locals(), [module_info['class']], 0) _class = getattr(module, module_info['class']) return _class def _register_backend(name, backends): __repo_backends[name] = backends @export def color_phid(phid): _color = {'PROJ': color_code['group'], 'USER': color_code['user'], - 'REPO': color_code['repo']} + 'REPO': color_code['repo'], + 'PLCY': 'yellow'} regex = re.compile(r'PHID-([A-Z]{4})-.+') match = regex.match(phid) if match: return colored(phid, _color[match.group(1)]) else: return colored(phid, attrs=['bold']) _register_backend( 'c4science', { 'git': {'module': 'backends.c4science.repo', 'class': 'C4ScienceRepo'}, 'svn': {'module': 'backends.c4science.repo', 'class': 'C4ScienceRepo'}, 'directory': {'module': 'backends.c4science.directory', 'class': 'C4ScienceDirectory'} }) _register_backend( 'epfl', { 'git': {'module': 'backends.epfl.repo', 'class': 'RepoGitEPFL'}, 'svn': {'module': 'backends.epfl.repo', 'class': 'RepoSvnEPFL'}, 'directory': {'module': 'backends.epfl.directory', 'class': 'EPFLDirectory'} } ) _register_backend( 'phabricator', { 'git': {'module': 'backends.repos.phabricator', 'class': 'PhabRepo'}, 'svn': {'module': 'backends.repos.phabricator', 'class': 'PhabRepo'}, 'directory': {'module': 'backends.directories.phabricator', 'class': 'PhabDirectory'} }) diff --git a/getmystuph/backends/directories/phabricator.py b/getmystuph/backends/directories/phabricator.py index 5e28cf0..500a644 100644 --- a/getmystuph/backends/directories/phabricator.py +++ b/getmystuph/backends/directories/phabricator.py @@ -1,165 +1,166 @@ import logging import re from ... import colored from ... import export from ... import dry_do from ... import Directory from ...utils import get_phabricator_instance from .. import color_phid __author__ = "Nicolas Richart" __copyright__ = "Copyright (C) 2016, EPFL (Ecole Polytechnique Fédérale " \ "de Lausanne) - SCITAS (Scientific IT and Application " \ "Support)" __credits__ = ["Nicolas Richart"] __license__ = "BSD" __version__ = "0.1" __maintainer__ = "Nicolas Richart" __email__ = "nicolas.richart@epfl.ch" _logger = logging.getLogger(__name__) @export class PhabDirectory(Directory): def __init__(self, *args, host=None, username=None, token=None, **kwargs): super().__init__(**kwargs) - self._phab = get_phabricator_instance(host=host, + self._host = '{0}/api/'.format(host.rstrip('/')) + self._phab = get_phabricator_instance(host=self._host, username=username, token=token) def _set_default_policy(self, phid): _default_policy = [{"type": "edit", "value": self.whoami}, {"type": "view", "value": "obj.project.members"}, {"type": "join", "value": "no-one"}] _msg = "Setting default policy for project {0} to {1}".format( color_phid(phid), ', '.join(["{0}: {1}".format(colored(m["type"], attrs=['bold']), color_phid(m['value'])) for m in _default_policy])) _logger.debug(_msg) if not self._dry_run: self._phab.project.edit(transactions=_default_policy, objectIdentifier=phid) else: dry_do(_msg) def color_name(self, name, **kwargs): regex = re.compile(r'PHID-([A-Z]{4})-.+') match = regex.match(name) if match: return color_phid(name) else: return super().color_name(name, **kwargs) def is_valid_user(self, id): return self.get_user_name(id) is not None def is_valid_group(self, id): return self.get_group_name(id) is not None def get_users_from_group(self, id): _res = self._phab.project.search(constraints={'phids': [id]}, attachments={'members': True}) if _res['data']: return [member['phid'] for member in _res['data'][0]['attachments']['members']['members']] return [] def get_group_unique_id(self, name): _res = self._phab.project.query(names=[name]) if _res['data']: return list(_res['data'].keys())[0] return None def get_user_unique_id(self, email): _res = self._phab.user.query(emails=[email]) if _res: return _res[0]['phid'] return None def get_user_unique_id_from_login(self, name): _res = self._phab.user.query(emails=[name]) if _res: return _res[0]['phid'] return None def get_group_name(self, gid): _res = self._phab.project.query(phids=[gid]) if _res['data']: return _res[0]['data'][gid]['name'] return None def get_user_name(self, uid): _res = self._phab.user.query(phids=[uid]) if _res: return _res[0]['realName'] return None def get_user_email(self, uid): raise RuntimeError("This information is not accessible") def create_group(self, name, members=[]): _unique_members = list(set(members)) _msg = 'Creating group {0} with members [{1}]'.format( self.color_name(name, type='group'), ', '.join([color_phid(_id) for _id in _unique_members])) _logger.debug(_msg) if not self._dry_run: _res = self._phab.project.create(name=name, members=_unique_members) _phid = _res['phid'] else: _phid = "PHID-PROJ-notarealproject" self._set_default_policy(_phid) return _phid def set_group_users(self, gid, uids): _unique_uids = list(set(uids)) _msg = 'Setting users {0} as members of group {1}'.format( ', '.join([color_phid(_id) for _id in _unique_uids]), color_phid(gid)) _logger.debug(_msg) transactions = [{"type": "members.set", "value": _unique_uids}] if not self._dry_run: self._phab.project.edit(transactions=transactions, objectIdentifier=gid) else: dry_do(_msg) def create_subgroup(self, name, pgid, members=[]): _unique_members = list(set(members)) _msg = 'Creating group {0} as a subgroup of {1} with members {2}'.format( colored(name, 'red', attrs=['bold']), colored(pgid, attrs=['bold']), ', '.join([color_phid(_id) for _id in _unique_members])) _logger.debug(_msg) transactions = [{"type": "parent", "value": pgid}, {"type": "name", "value": name}] if members is not None: transactions.append({"type": "members.set", "value": _unique_members}) if not self._dry_run: _res = self._phab.project.edit(transactions=transactions) _phid = _res['object']['phid'] else: _phid = 'PHID-PROJ-notarealproject' self._set_default_policy(_phid) return _phid @property def whoami(self): me = self._phab.user.whoami() return me['phid'] diff --git a/getmystuph/backends/repos/git.py b/getmystuph/backends/repos/git.py index ec46a0d..7769665 100644 --- a/getmystuph/backends/repos/git.py +++ b/getmystuph/backends/repos/git.py @@ -1,112 +1,141 @@ # -*- coding: utf-8 -*- import os import re import git import logging from ... import colored +from ... import color_code +from ... import dry_do from ...repo import RepoQuery __author__ = "Nicolas Richart" __copyright__ = "Copyright (C) 2016, EPFL (Ecole Polytechnique Fédérale " \ "de Lausanne) - SCITAS (Scientific IT and Application " \ "Support)" __credits__ = ["Nicolas Richart"] __license__ = "BSD" __version__ = "0.1" __maintainer__ = "Nicolas Richart" __email__ = "nicolas.richart@epfl.ch" _logger = logging.getLogger(__name__) class RepoGit(RepoQuery): """This class handles the common part on git repositories, cloning, retreiving tags/branches doing subtrees """ def __enter__(self): super().__enter__() _logger.info('Cloning repo {0} [{1}] in {2}'.format( self._repo_info.color_name(self._name), self._url, colored(self.working_dir, attrs=['bold']))) if not os.path.isdir(os.path.join(self.working_dir, '.git')): self._repo = git.Repo.clone_from(self._url, self.working_dir) else: _logger.warning('Repo {0} is already cloned in {1}'.format( self._repo_info.color_name(self._name), colored(self.working_dir, attrs=['bold']))) self._repo = git.Repo(self.working_dir) return self @property def tags(self): _tags = [] for ref in self._repo.refs: if type(ref) == git.refs.tag.TagReference: _tags.append(str(ref)) return _tags @property def branches(self): _refs = [] for ref in self._repo.refs: if type(ref) == git.refs.remote.RemoteReference and\ ref.name != 'origin/HEAD': _refs.append(str(ref)) return _refs def add_remote(self, out_repo): + self._out_repo = out_repo + _logger.debug('Adding remote {0} [{1}] to clone of {2}'.format( + colored(out_repo.backend_name, attrs=['bold']), out_repo.url, + self._repo_info.color_name(self._name))) if out_repo.backend_name in [_r.name for _r in self._repo.remotes]: + _logger.debug('Remote {0} already present in {2}'.format( + colored(out_repo.backend_name, attrs=['bold']), out_repo.url, + self._repo_info.color_name(self._name))) + return + self._repo.create_remote(out_repo.backend_name, + out_repo.url) - _logger.debug('Adding remote {0} [{1}] to clone of {2}'.format( - out_repo.backend_name, out_repo.url, self._name)) + def push(self): + class PushProgress(git.remote.RemoteProgress): + def update(op_code, cur_count, max_count=None, message=''): + print('update({0}, {1}, {2}, {3})'.format(op_code, cur_count, + max_count, message)) - git.remote.Remote.create( - self._repo, out_repo.backend_name, - out_repo.url) + _remote = self._repo.remote(name=self._out_repo.backend_name) + _msg = 'Pushing repo {0} data to {1}'.format( + self._repo_info.color_name(self._name), + colored(self._out_repo.backend_name, attrs=['bold'])) + + _logger.info(_msg) + if self._dry_run: + dry_do(_msg) + else: + for ref in self._repo.refs: + if type(ref) == git.refs.remote.RemoteReference and\ + ref.name != 'origin/HEAD': + self._repo.create_head(ref.remote_head, commit=ref.name) + + _push_progress=PushProgress() + _remote.push(all=True) + _remote.push(tags=True) class RepoGitSvn(RepoGit): """This class handles the git svn""" def __enter__(self): self._create_stage() _logger.info('Cloning repo {0} [{1}] in {2}'.format( self._repo_info.color_name(self._name), self._url, colored(self.working_dir, attrs=['bold']))) if not os.path.isdir(os.path.join(self.working_dir, '.git')): _git = git.Git(self.working_dir) _git.svn('clone', '--preserve-empty-dirs', '--stdlayout', self._url, self.working_dir) self._repo = git.Repo(self.working_dir) super().__enter__() _tag_re = re.compile('origin/tags/(.*)') for _ref in self._repo.refs: if type(_ref) == git.refs.remote.RemoteReference: _match = _tag_re.match(_ref.name) if _match is not None: _tag_name = _match.group(1) _logger.debug('Creating tag {0} from branch {1}'.format( _tag_name, _ref.name)) git.refs.tag.TagReference.create( self._repo, _tag_name, ref=_ref.name) _logger.debug('Deleting remote branch {1}'.format( _tag_name, _ref.name)) git.refs.remote.RemoteReference.delete( self._repo, _ref) return self diff --git a/getmystuph/backends/repos/phabricator.py b/getmystuph/backends/repos/phabricator.py index 0d0ad1c..b6e66eb 100644 --- a/getmystuph/backends/repos/phabricator.py +++ b/getmystuph/backends/repos/phabricator.py @@ -1,151 +1,235 @@ # -*- coding: utf-8 -*- import logging import copy import re +import time from ... import export -from ... import colored from ... import dry_do from ...repo import Repo from ...utils import get_phabricator_instance from .. import color_phid __author__ = "Nicolas Richart" __copyright__ = "Copyright (C) 2016, EPFL (Ecole Polytechnique Fédérale " \ "de Lausanne) - SCITAS (Scientific IT and Application " \ "Support)" __credits__ = ["Nicolas Richart"] __license__ = "BSD" __version__ = "0.1" __maintainer__ = "Nicolas Richart" __email__ = "nicolas.richart@epfl.ch" _logger = logging.getLogger(__name__) @export class PhabRepo(Repo): def __init__(self, *args, host=None, username=None, token=None, **kwargs): super().__init__(**kwargs, username=username) options = copy.copy(kwargs) self._repo_type = options.pop('repo_type', 'git') # _create = options.pop('create', False) - self._host = host - self._phab = get_phabricator_instance(host='{0}api/'.format(host), + self._host = '{0}/api/'.format(host.rstrip('/')) + + self._server = None + + _server_re = re.compile(r'https?://(.*)') + _match = _server_re.match(host.rstrip('/')) + if _match is not None: + self._server = _match.group(1) + + if self._server is None: + raise RuntimeError( + 'Cannot extract the server name for repo {0} from {1}'.format( + self._colored_name, host)) + + self._phab = get_phabricator_instance(host=self._host, username=username, token=token) _data = self._phab.diffusion.repository.search( queryKey="all", constraints={"name": self._name})['data'] self._phab_id = None - if len(_data) > 0: - self._phab_id = _data[0]['phid'] - _logger.debug('Repositories {0} has id {1}'.format( - self._colored_name, - self._phab_id)) - else: + for _repo in _data: + _repo_name = _repo['fields']['name'] + if _repo_name == self._name: + self._phab_id = _repo['phid'] + self._id = _repo['id'] + _logger.debug('Repositories {0} has id {1}'.format( + self._colored_name, + self._phab_id)) + + if self._phab_id is None: _logger.debug('Repositories {0} not in phabricator'.format( self._colored_name)) def create(self): if self._phab_id is not None: _msg = 'The repository {0}:{1} already exists'.format( self._colored_name, self._phab_id) _logger.error(_msg) - raise RuntimeError(_msg) + return + #raise RuntimeError(_msg) if self._dry_run: self._phab_id = "PHID-REPO-notarealrepo" else: _data = self._phab.diffusion.repository.edit( transactions=[{'type': 'name', 'value': self._name}, {'type': 'vcs', 'value': self._repo_type}]) self._creation_data = _data['object'] self._phab_id = self._creation_data['phid'] _msg = 'Created repository {0} id {1}'.format(self._colored_name, color_phid(self._phab_id)) _logger.info(_msg) if self._dry_run: dry_do(_msg) self._id = 666 - - _server_re = re.compile(r'https?://(.*)/?') - _match = _server_re.match(self._host) - if _match is not None: - _server = _match.group(1) else: - raise RuntimeError( - 'Cannot extract the server name for repo {0}'.format( - self.name)) + self._id = self._creation_data['id'] + @property + def url(self): if self.repo_type == 'git': self._url = 'git@{0}:/diffusion/{1}/{2}.git'.format( - _server, self._id, self._name) + self._server, self._id, self._name) elif self.repo_type == 'svn': self._url = 'svn+ssh://git@{0}/diffusion/{1}/{2}.git'.format( - _server, self._id) + self._server, self._id) + return self._url + + def enable(self): + _msg = 'Activating repository {0} [{1}]'.format( + self._colored_name, color_phid(self._phab_id)) + _logger.info(_msg) + if self._dry_run: + dry_do(_msg) + else: + self._phab.diffusion.repository.edit( + transactions=[{'type': 'status', + 'value': 'active'}], + objectIdentifier=self._phab_id) + + def wait_enabled(self, timeout=3600): + _msg = 'Checking if {0} [{1}] is activated'.format( + self._colored_name, color_phid(self._phab_id)) + _logger.info(_msg) + if self._dry_run: + dry_do(_msg) + else: + _time = 0 + while True: + _data = self._phab.diffusion.repository.search( + queryKey="all", constraints={"phids": [self._phab_id]})['data'] + if not len(_data) == 1: + raise RuntimeError('Cannot find the repo {0}'.format(self._colored_name)) + + _status = _data[0]['fields']['status'] + if _status == 'active' or _time > timeout: + return + + time.sleep(1) + _time += 1 def set_permissions(self, permissions): _perms = {'edit': [], 'push': [], 'view': []} _equivalent = {'edit': Repo.EDIT, 'view': Repo.VIEW, 'push': Repo.PUSH} _phab_perms = {'edit': 'edit', 'view': 'view', 'push': 'policy.push'} _special_perms = {'_author_': 'obj.repository.author', '_users_': 'users', '_public_': 'public'} for _type in {'group', 'user'}: _perms_ug = getattr(permissions, '{0}s'.format(_type)) for _entity in _perms_ug: _id = _entity['id'] if _id in _special_perms: _id = _special_perms[_id] for _phab, _gen in _equivalent.items(): if _entity['perm'] & _gen: _perms[_phab].append(_id) if permissions.anonymous: _perms['view'] = ['public'] for _type in ['push', 'view', 'edit']: if _type not in _perms: continue - _perms[_type] = set(_perms[_type]) + _perms[_type] = list(set(_perms[_type])) if len(_perms[_type]) > 1: + + if 'public' in _perms[_type]: + _perms[_type] = 'public' + continue + + if 'users' in _perms[_type]: + _perms[_type] = 'users' + continue + # create custom policy to replace the list - _logger.warning( - 'Cannot create complicated policies, {0}'.format( - _perms[_type]) + - ' changing permissions to user instead') - _perms[_type] = ['obj.repository.author'] + regex = re.compile(r'PHID-([A-Z]{4})-.+') + + _lists = {'PROJ': [], + 'USER': []} + for _p in _perms[_type]: + match = regex.match(_p) + if match is not None: + _lists[match.group(1)].append(_p) + elif _p == 'obj.repository.author': + _lists['USER'].append(self._directory.whoami) + + _policy = [ + {"action": "allow", "rule": 'PhabricatorUsersPolicyRule', "value": _lists['USER']}, + {"action": "allow", "rule": 'PhabricatorProjectsPolicyRule', "value": _lists['PROJ']}] + + _msg = 'Creating policy for users [{0}] and projects [{1}]'.format( + ', '.join([color_phid(_id) for _id in _lists['USER']]), + ', '.join([color_phid(_id) for _id in _lists['PROJ']])) + + _logger.debug(_msg) + if self._dry_run: + dry_do(_msg) + _phid = 'PHID-PLCY-notapolicy' + else: + _data = self._phab.policy.create(objectType='REPO', default='deny', + policy=_policy) + + _phid = _data['phid'] + + _logger.info('Replacing list {0} by policy {1}'.format( + _perms[_type], _phid)) + _perms[_type] = _phid + else: + _perms[_type] = _perms[_type][0] _msg = 'Setting \'{0}\' permissions for {1} to {2}:'.format( _type, self._colored_name, - ', '.join([color_phid(_id) for _id in _perms[_type]])) + color_phid(_perms[_type])) _logger.info(_msg) if not self._dry_run: - self._in_directory self._phab.diffusion.repository.edit( transactions=[{'type': _phab_perms[_type], - 'value': self._perms[_type][0]}], + 'value': _perms[_type]}], objectIdentifier=self._phab_id) else: dry_do(_msg) diff --git a/getmystuph/importers/group_importer.py b/getmystuph/importers/group_importer.py index 50b0152..5b19261 100644 --- a/getmystuph/importers/group_importer.py +++ b/getmystuph/importers/group_importer.py @@ -1,148 +1,148 @@ # -*- coding: utf-8 -*- import logging from . import Importer from .. import export from .. import colored from .. import Directory __author__ = "Nicolas Richart" __copyright__ = "Copyright (C) 2016, EPFL (Ecole Polytechnique Fédérale " \ "de Lausanne) - SCITAS (Scientific IT and Application " \ "Support)" __credits__ = ["Nicolas Richart"] __license__ = "BSD" __version__ = "0.1" __maintainer__ = "Nicolas Richart" __email__ = "nicolas.richart@epfl.ch" _logger = logging.getLogger(__name__) @export class GroupImporter(Importer): __default_import_scheme = {'type': 'project', 'name': '{original_name}', 'user-import-strategy': 'additive'} def __init__(self, name, config, **kwargs): super().__init__(name, config, self.__default_import_scheme, **kwargs) if 'user-import-strategy' not in self._config['import-scheme']: self._config['import-scheme']['user-import-strategy'] = 'additive' self._in_directory = self._backend_in['directory'] self._out_directory = self._backend_out['directory'] def _create_group(self, name, **kwargs): _logger.debug('Checking phid for group {0}'.format( self._out_directory.color_name(name, type='group'))) gid = self._out_directory.get_group_unique_id(name) if gid is not None: _logger.debug('Group {0} -> {1}'.format( self._out_directory.color_name(name, type='group'), self._out_directory.color_name(gid))) _logger.warning( '{0} already exists in \'{1}\' try to update it' .format( self._out_directory.color_name(name, type='group'), self._out_directory.backend_name)) return (gid, False) else: gid = self._out_directory.create_group(name, **kwargs) return (gid, True) def _get_users_ids(self, name): gid = self._in_directory.get_group_unique_id(name) if gid is None: _msg = '{0} is not a valid group in the directory {1}'.format( self._out_directory.color_name(name, type='group'), self._in_directory.backend_name) _logger.error(_msg) raise ValueError(_msg) _logger.debug(' --> group id {0} -> {1}' .format(self._in_directory.color_name(name, type='group'), self._in_directory.color_name(gid))) _users = self._in_directory.get_users_from_group(gid) self._user_db.add_users(_users) _ids = self._user_db.get_user_oids(_users) return (gid, _ids) def transfered_name(self, name): _name = name import_scheme = self._config['import-scheme'] if 'name' in import_scheme: _name = import_scheme['name'].format(original_name=name) return _name def transfer(self, name): _colored_name = self._in_directory.color_name(name, type='group') _logger.info('Locking for group: {0} ({1})'.format(_colored_name, self._colored_name)) _logger.debug(' --> group info {0}'.format(colored(self._config, attrs=['bold']))) import_scheme = self._config['import-scheme'] if import_scheme['type'] == 'ignore': return _in_gid, users_ids = self._get_users_ids(name) _newly_created = True _name = self.transfered_name(name) if 'type' not in import_scheme or \ import_scheme['type'] not in ['project', 'sub-project']: msg = "You should specify a type of " + \ "import-scheme for group {0}".format(_colored_name) _logger.error(msg) raise ValueError(msg) if import_scheme['type'] == 'project': _out_gid, _newly_created = self._create_group(_name, members=users_ids) elif import_scheme['type'] == 'sub-project': if 'project' not in import_scheme: _msg = 'To create {0} as a sub-project you ' + \ 'have to specify a parent project' \ .format(_colored_name) _logger.error(_msg) raise ValueError(_msg) _project_name = import_scheme['project'] _pgid, _newly_created = self._create_group(_project_name) _out_gid = self._out_directory.get_group_unique_id(_name) if _out_gid is not None: _logger.warning( '{0} already exists in \'{1}\' try to update it'.format( self._out_directory.color_name(_name, type='group'), self._out_directory.backend_name)) _newly_created = False else: _out_gid = self._out_directory.create_subgroup( _name, _pgid, members=users_ids) _newly_created = True if not _newly_created: if import_scheme['user-import-strategy'] == 'additive': _logger.debug( 'Looking for existing users in {0} ({1})'.format( self._out_directory.color_name(_name, type='group'), self._out_directory.color_name(_out_gid))) _existing_users = \ self._out_directory.get_users_from_group(_out_gid) if users_ids: users_ids.extend(_existing_users) else: users_ids = _existing_users self._out_directory.set_group_users(_out_gid, users_ids) - self._user_db.import_group(_in_gid, _out_gid) + self._user_db.import_group(_in_gid, _out_gid, name, _name) return _out_gid diff --git a/getmystuph/importers/import_user_db.py b/getmystuph/importers/import_user_db.py index c7d58bf..9d7f3e4 100644 --- a/getmystuph/importers/import_user_db.py +++ b/getmystuph/importers/import_user_db.py @@ -1,144 +1,164 @@ # -*- coding: utf-8 -*- import logging from .. import export from .. import colored __author__ = "Nicolas Richart" __copyright__ = "Copyright (C) 2016, EPFL (Ecole Polytechnique Fédérale " \ "de Lausanne) - SCITAS (Scientific IT and Application " \ "Support)" __credits__ = ["Nicolas Richart"] __license__ = "BSD" __version__ = "0.1" __maintainer__ = "Nicolas Richart" __email__ = "nicolas.richart@epfl.ch" _logger = logging.getLogger(__name__) @export class ImportUserDB: def __init__(self, in_directory, out_directory): self._in_directory = in_directory self._out_directory = out_directory self._users = {} self._imported_groups = {} self._default_importer = None - def import_group(self, _id, _imported_id): - self._imported_groups[_id] = _imported_id + def import_group(self, _id, _imported_id, _in_name, _out_name): + self._imported_groups[_id] = {'out_id': _imported_id, + 'in_name': _in_name, + 'out_name': _out_name} def add_users(self, users): for _user in users: if _user in self._users: _logger.debug( 'User {0} ({1}) from {2} already in cache [{3} - {4}]'.format( self._in_directory.color_name( self._users[_user]['name'], type='user'), self._in_directory.color_name(_user), self._in_directory.backend_name, self._out_directory.color_name( self._users[_user]['oid']), self._out_directory.backend_name)) continue _user_info = {'id': _user} try: _mail = self._in_directory.get_user_email(_user) _user_info['email'] = _mail _user_info['name'] = self._in_directory.get_user_name(_user) _out_id = self._out_directory.get_user_unique_id(_mail) if _out_id is not None: _logger.debug( 'Found {0} ({1}) in {2} as {3} in {4}'.format( self._in_directory.color_name(_user_info['name'], type='user'), self._in_directory.color_name(_user), self._in_directory.backend_name, self._out_directory.color_name(_out_id), self._out_directory.backend_name)) _user_info['oid'] = _out_id self._users[_user] = _user_info else: _logger.debug( 'Did not find {0} ({1}) from {2} in {3}'.format( self._in_directory.color_name(_user_info['name'], type='user'), self._in_directory.color_name(_user), self._in_directory.backend_name, self._out_directory.backend_name)) except: _logger.warning( "The user {0} does not exists in {1}".format( self._in_directory.color_name(_user), colored(self._in_directory.backend_name, attrs=['bold']))) def get_user_oids(self, users_ids): return [self._users[_user]['oid'] for _user in users_ids if (_user in self._users) and ('oid' in self._users[_user])] @property def default_importer(self): return self._default_importer @default_importer.setter def default_importer(self, importer): self._default_importer = importer @property def users(self): return self._users @property def directory(self): return self._out_directory @property def in_directory(self): return self._in_directory def group(self, _id, create=False): if _id in self._imported_groups: _logger.debug('Found group {0} in cache: {1}'.format( self._in_directory.color_name(_id), self._out_directory.color_name(self._imported_groups[_id]))) - return self._imported_groups[_id] + return self._imported_groups[_id]['out_id'] else: _name = self._in_directory.get_group_name(_id) if _name is not None: _t_name = self._default_importer.transfered_name(_name) _out = self._out_directory.get_group_unique_id(_t_name) if (_out is None) and create: _out = self._default_importer.transfer(_name) else: _logger.error( ' Could not find {1} ({0}) in directory {2} {3}'.format( self._in_directory.color_name(_id), self._in_directory.color_name(_t_name, type='group'), self._out_directory.backend_name, create)) return _out else: _logger.error( ' Could not find {0} in directory {1}'.format( _id, self._in_directory.backend_name)) return None + def group_by_in_name(self, _name): + for _id, data in self._imported_groups.items(): + if data['in_name'] == _name: + return data['out_id'] + return None + + def group_by_out_name(self, _name): + for _id, data in self._imported_groups.items(): + if data['out_name'] == _name: + return data['out_id'] + return None + + def get_group_unique_id(self, _name): + _gid = self.group_by_out_name(_name) + if _gid is None: + return self._out_directory.get_group_unique_id(_name) + return _gid + def user(self, _id, **kwargs): if _id in self._users: _logger.debug('Found user {0} in cache: {1}'.format( self._in_directory.color_name(_id), self._out_directory.color_name(self._users[_id]['oid']))) return self._users[_id]['oid'] else: _email = self._in_directory.get_user_email(_id) if _email is not None: return self._out_directory.get_user_unique_id(_email) return None diff --git a/getmystuph/importers/repo_importer.py b/getmystuph/importers/repo_importer.py index 3db611e..6e2c1a4 100644 --- a/getmystuph/importers/repo_importer.py +++ b/getmystuph/importers/repo_importer.py @@ -1,199 +1,202 @@ # -*- coding: utf-8 -*- import logging import copy from .. import export from .. import colored from .. import color_code from ..repo import Repo from .importer import Importer __author__ = "Nicolas Richart" __copyright__ = "Copyright (C) 2016, EPFL (Ecole Polytechnique Fédérale " \ "de Lausanne) - SCITAS (Scientific IT and Application " \ "Support)" __credits__ = ["Nicolas Richart"] __license__ = "BSD" __version__ = "0.1" __maintainer__ = "Nicolas Richart" __email__ = "nicolas.richart@epfl.ch" _logger = logging.getLogger(__name__) @export class RepoImporter(Importer): __default_import_scheme = {'type': 'git', 'permissions': {'scheme': 'import'}} def __init__(self, name, config, **kwargs): super().__init__(name, config, self.__default_import_scheme, **kwargs) _logger.info( 'Initializing importer for Repo {0}' ' with configuration: {1}'.format(self._colored_name, self._config)) def transfer(self, name): _colored_name = colored(name, color_code['repo'], attrs=['bold']) _logger.info('Locking for repo: {0} ({1})'.format(_colored_name, self._colored_name)) _import_scheme = copy.copy(self._config['import-scheme']) _logger.debug(' --> repo info {0}'.format(colored(self._config, attrs=['bold']))) _config = copy.copy(self._config) _type = _config.pop('type', self.__default_import_scheme['type']) _in_repo = Repo(name=name, keyring=self._keyring, dry_run=self._dry_run, type=_type, **self._backend_in) _type = _import_scheme['type'] if _type == 'same': _type = _in_repo.repo_type _name = _import_scheme.pop('name', self._name).format( original_name=self._name) _out_repo = Repo(name=_name, keyring=self._keyring, dry_run=self._dry_run, type=_type, **self._backend_out) _queries = {'git': {'git': 'RepoGit', 'svn': 'RepoGitSvn'}, #'svn': {'svn': 'svn.RepoSvnSync'}} } _query_class = None if _type in _queries and _in_repo.repo_type in _queries[_type]: _class_name = _queries[_type][_in_repo.repo_type] _module = __import__('getmystuph.backends.repos.{0}'.format(_type), globals(), locals(), [_class_name], 0) _query_class = getattr(_module, _class_name) else: _msg = 'Cannot import a {0} repo in a {1} repo'.format( _type, _in_repo.repo_type) _logger.error(_msg) #raise RuntimeError(_msg) _out_repo.create() _permissions_scheme = _import_scheme['permissions'] _out_perms = Repo.Permissions(_out_repo) if _permissions_scheme['scheme'] == 'import': _in_perms = _in_repo.permissions _logger.debug("Replicating permissions {0}".format(_in_perms)) for _type in {'group', 'user'}: _perms_ug = getattr(_in_perms, '{0}s'.format(_type)) for _entity in _perms_ug: _in_id = _entity['id'] _out_id = getattr( self._user_db, _type)(_in_id, create=True) if _type == 'user' and \ _out_id == self._user_db.directory.whoami: _out_id = '_author_' if _out_id is not None: getattr(_out_perms, 'add_{0}'.format(_type))(_out_id, _entity['perm']) else: _logger.warning( 'No permissions to replicate for repository {0}'.format( _colored_name)) elif _permissions_scheme['scheme'] == 'project': if 'project' in _permissions_scheme: - _gid = self._user_db.directory.get_group_unique_id( + _gid = self._user_db.get_group_unique_id( _permissions_scheme['project']) if _gid is not None: _out_perms.groups = [ {'id': _gid, 'perm': Repo.EDIT + Repo.PUSH + Repo.VIEW}] else: _msg = str('The project {0} you specified in the ' + 'permissions of repo {1} does not exists' + ' in {2}').format( colored(_permissions_scheme['project'], color_code['group'], attrs=['bold']), _colored_name, self._user_db.directory.backend_name) _logger.error(_msg) raise RuntimeError(_msg) else: _msg = 'You should specify a project name in the ' + \ 'permissions of repo {0}' .format(_colored_name) + \ ' to be able to use the \'project\' import scheme' _logger.error(_msg) raise RuntimeError(_msg) elif _permissions_scheme['scheme'] == 'user': _out_perms.groups = [ {'id': '_author_', 'perm': Repo.EDIT + Repo.PUSH + Repo.VIEW}] elif _permissions_scheme['scheme'] == 'static': for _perm_type in ['edit', 'view', 'push']: if _perm_type not in _permissions_scheme: _msg = 'You should specify a \'{0}\' in the ' + \ 'permissions of repo {1} to be able to use the ' + \ '\'project\' import scheme'.format(_perm_type, _colored_name) _logger.error(_msg) raise RuntimeError(_msg) _equivalent = {'edit': Repo.EDIT, 'view': Repo.VIEW, 'push': Repo.PUSH} for _perm_type in _equivalent.keys(): if _perm_type in _permissions_scheme: _perm_list = _permissions_scheme[_perm_type] if type(_perm_list) is not list: _perm_list = [_perm_list] if _equivalent[_perm_type] is Repo.VIEW: _out_perms.anonymous = False _out_perms.remove_permission(_equivalent[_perm_type]) for _entity in _perm_list: if _entity == '_author_' or \ _entity == '_users_' or \ _entity == '_public_': _out_perms.add_user(_entity, _equivalent[_perm_type]) else: _id = self._user_db.directory.get_group_unique_id( _entity) if _id is not None: - _out_perms.add_group(_entity, + _out_perms.add_group(_id, _equivalent[_perm_type]) else: _logger.error( 'The project {0} was not found in {1}'.format( _entity, self._user_db.directory.backend_name)) if _out_perms is not None: _out_repo.set_permissions(_out_perms) - if _query_class: - with _query_class(_in_repo) as _clone: + if _query_class is not None: + _out_repo.enable() + with _query_class(_in_repo, dry_run=self._dry_run) as _clone: _branches = _clone.branches _tags = _clone.tags for b in _branches: _logger.debug("Branch: {0}".format(colored(b, attrs=['bold']))) for t in _tags: _logger.debug("Tag: {0}".format(colored(t, attrs=['bold']))) _clone.add_remote(_out_repo) + _out_repo.wait_enabled() + _clone.push() diff --git a/getmystuph/repo.py b/getmystuph/repo.py index db792a9..5cf3756 100644 --- a/getmystuph/repo.py +++ b/getmystuph/repo.py @@ -1,236 +1,240 @@ # -*- coding: utf-8 -*- import copy import logging import tempfile from . import export from . import colored from . import color_code from .backends import _get_class from .directory import Directory __author__ = "Nicolas Richart" __copyright__ = "Copyright (C) 2016, EPFL (Ecole Polytechnique Fédérale " \ "de Lausanne) - SCITAS (Scientific IT and Application " \ "Support)" __credits__ = ["Nicolas Richart"] __license__ = "BSD" __version__ = "0.1" __maintainer__ = "Nicolas Richart" __email__ = "nicolas.richart@epfl.ch" _logger = logging.getLogger(__name__) @export class Repo(object): '''Interface class to define for your backend''' VIEW = 0x1 PUSH = 0x2 EDIT = 0x4 _repo_backends = dict() def __new__(cls, *args, **kwargs): """ Factory constructor depending on the chosen backend """ option = copy.copy(kwargs) backend = option.pop('backend', None) repo_type = option.pop('type', 'git') _class = _get_class(repo_type, backend) return super(Repo, cls).__new__(_class) def __init__(self, name, *args, **kwargs): self._name = name self._colored_name = self.color_name(name) options = copy.copy(kwargs) self._username = options.pop('username', None) self._type = options.pop('type', None) self._dry_run = options.pop("dry_run", False) self._backend_name = options.pop("backend", None) self._directory = options.pop('directory', None) if self._directory is None: self._directory = Directory(type='directory', backend=self._backend_name, username=self._username, dry_run=self._dry_run, **options) + + def enable(self): + pass + @property def backend_name(self): return self._backend_name @property def repo_type(self): return self._type def color_name(self, name): return colored(name, color_code['repo'], attrs=['bold']) @property def directory(self): return self._directory class Permissions(object): def __init__(self, repo): self.__groups = [] self.__users = [] self.__anonymous = False self.__repo = repo @property def groups(self): return self.__groups @groups.setter def groups(self, groups_perms): self.__groups = copy.copy(groups_perms) def add_group(self, group_id, perm): self.__groups.append({'id': group_id, 'perm': perm}) def add_user(self, user_id, perm): self.__users.append({'id': user_id, 'perm': perm}) def remove_permission(self, perm): _lists = {'group': self.__groups, 'user': self.__users} for _type in _lists.keys(): for _entity in _lists[_type]: _entity['perm'] = \ _entity['perm'] ^ (_entity['perm'] & perm) def user_perm(self, _id): for _user in self.__users: if _user['id'] == _id: return _user['perm'] return 0 @property def users(self): return self.__users @users.setter def users(self, users_perms): self.__users = copy.copy(users_perms) @property def anonymous(self): return self.__anonymous @anonymous.setter def anonymous(self, anonymous): self.__anonymous = anonymous @property def all_users(self): _users = [u['id'] for u in self.__users] _directory = self.__repo.directory for g in self._groups: _users.extend(_directory.get_users_from_group(g['id'])) return set(_users) def __repr__(self): return ''.format( self.__groups, self.__users, self.__anonymous) def set_permissions(self, permissions): pass @property def permissions(self): ''' Returns a dictionary of permissions of the form: {'groups': [{'id': id, 'perm': perm, ...}, ...], 'users': [{'id': id, 'perm': perm, ...}, ...], 'anonymous': True/False} perm should be read, write, admin, or None ''' return self.Permissions(self) @property def name(self): return self._name @property def url(self): return self._url @property def username(self): return self._username def get_query(self): if self._type == 'git': from .repo_backends import RepoGit return RepoGit(self) else: raise RuntimeError( 'No backend for \'{0}\' implemented yet'.format(self._type)) class RepoQuery(object): class debug_mktemp: def __init__(self, name): import os self._path = '{0}/getmystuph/{1}'.format(os.environ['TMPDIR'], name) try: os.makedirs(self._path) except FileExistsError: pass @property def name(self): return self._path def cleanup(self): pass def __init__(self, repo, **kwargs): self._repo_info = repo self._name = repo.name self._url = repo.url self._username = repo.username self._dry_run = kwargs.pop('dry_run', False) def __enter__(self): if self._stage_path is not None: self._create_stage() return self def _create_stage(self): #self._stage_path = tempfile.TemporaryDirectory( # prefix=self._name + '-') self._stage_path = RepoQuery.debug_mktemp(self._name) _logger.debug('Creating stage folder {0} for repo {1}'.format( colored(self.working_dir, attrs=['bold']), self._repo_info.color_name(self._name))) def __exit__(self, *arg, **kwargs): _logger.debug('Cleaning staged folder {0}'.format( colored(self.working_dir, attrs=['bold']))) self._stage_path.cleanup() def add_remote(self, out_repo): pass @property def tags(self): return [] @property def branches(self): return [] @property def working_dir(self): return self._stage_path.name