diff --git a/pagure/__init__.py b/pagure/__init__.py index b5e4a74..b5e5398 100644 --- a/pagure/__init__.py +++ b/pagure/__init__.py @@ -11,5 +11,5 @@ from __future__ import unicode_literals -__version__ = '4.0.1' -__api_version__ = '0.23' +__version__ = "4.0.1" +__api_version__ = "0.23" diff --git a/pagure/api/__init__.py b/pagure/api/__init__.py index baf70f3..58c5eeb 100644 --- a/pagure/api/__init__.py +++ b/pagure/api/__init__.py @@ -27,7 +27,7 @@ import flask import markupsafe from six.moves.urllib_parse import urljoin -API = flask.Blueprint('api_ns', __name__, url_prefix='/api/0') +API = flask.Blueprint("api_ns", __name__, url_prefix="/api/0") import pagure.lib # noqa: E402 @@ -42,11 +42,11 @@ _log = logging.getLogger(__name__) def preload_docs(endpoint): - ''' Utility to load an RST file and turn it into fancy HTML. ''' + """ Utility to load an RST file and turn it into fancy HTML. """ here = os.path.dirname(os.path.abspath(__file__)) - fname = os.path.join(here, '..', 'doc', endpoint + '.rst') - with codecs.open(fname, 'r', 'utf-8') as stream: + fname = os.path.join(here, "..", "doc", endpoint + ".rst") + with codecs.open(fname, "r", "utf-8") as stream: rst = stream.read() rst = modify_rst(rst) @@ -56,60 +56,75 @@ def preload_docs(endpoint): return api_docs -APIDOC = preload_docs('api') +APIDOC = preload_docs("api") class APIERROR(enum.Enum): """ Clast listing as Enum all the possible error thrown by the API. """ - ENOCODE = 'Variable message describing the issue' - ENOPROJECT = 'Project not found' - ENOPROJECTS = 'No projects found' - ETRACKERDISABLED = 'Issue tracker disabled for this project' - EDBERROR = 'An error occurred at the database level and prevent the ' \ - 'action from reaching completion' - EINVALIDREQ = 'Invalid or incomplete input submitted' - EINVALIDTOK = 'Invalid or expired token. Please visit %s to get or '\ - 'renew your API token.'\ - % urljoin(pagure_config['APP_URL'], 'settings#api-keys') - ENOISSUE = 'Issue not found' - EISSUENOTALLOWED = 'You are not allowed to view this issue' - EPULLREQUESTSDISABLED = 'Pull-Request have been deactivated for this '\ - 'project' - ENOREQ = 'Pull-Request not found' - ENOPRCLOSE = 'You are not allowed to merge/close pull-request for '\ - 'this project' - EPRSCORE = 'This request does not have the minimum review score '\ - 'necessary to be merged' - ENOTASSIGNEE = 'Only the assignee can merge this review' - ENOTASSIGNED = 'This request must be assigned to be merged' - ENOUSER = 'No such user found' - ENOCOMMENT = 'Comment not found' - ENEWPROJECTDISABLED = 'Creating project have been disabled for this '\ - 'instance' - ETIMESTAMP = 'Invalid timestamp format' - EDATETIME = 'Invalid datetime format' - EINVALIDISSUEFIELD = 'Invalid custom field submitted' - EINVALIDISSUEFIELD_LINK = 'Invalid custom field submitted, the value '\ - 'is not a link' - EINVALIDPRIORITY = 'Invalid priority submitted' - ENOGROUP = 'Group not found' - ENOTMAINADMIN = 'Only the main admin can set the main admin of a project' - EMODIFYPROJECTNOTALLOWED = 'You are not allowed to modify this project' - EINVALIDPERPAGEVALUE = 'The per_page value must be between 1 and 100' - EGITERROR = 'An error occurred during a git operation' - ENOCOMMIT = 'No such commit found in this repository' - ENOTHIGHENOUGH = 'You do not have sufficient permissions to perform '\ - 'this action' - ENOSIGNEDOFF = 'This repo enforces that all commits are signed off ' \ - 'by their author.' - ETRACKERREADONLY = 'The issue tracker of this project is read-only' + + ENOCODE = "Variable message describing the issue" + ENOPROJECT = "Project not found" + ENOPROJECTS = "No projects found" + ETRACKERDISABLED = "Issue tracker disabled for this project" + EDBERROR = ( + "An error occurred at the database level and prevent the " + + "action from reaching completion" + ) + EINVALIDREQ = "Invalid or incomplete input submitted" + EINVALIDTOK = ( + "Invalid or expired token. Please visit %s to get or " + "renew your API token." + % urljoin(pagure_config["APP_URL"], "settings#api-keys") + ) + ENOISSUE = "Issue not found" + EISSUENOTALLOWED = "You are not allowed to view this issue" + EPULLREQUESTSDISABLED = ( + "Pull-Request have been deactivated for this " "project" + ) + ENOREQ = "Pull-Request not found" + ENOPRCLOSE = ( + "You are not allowed to merge/close pull-request for " "this project" + ) + EPRSCORE = ( + "This request does not have the minimum review score " + "necessary to be merged" + ) + ENOTASSIGNEE = "Only the assignee can merge this review" + ENOTASSIGNED = "This request must be assigned to be merged" + ENOUSER = "No such user found" + ENOCOMMENT = "Comment not found" + ENEWPROJECTDISABLED = ( + "Creating project have been disabled for this " "instance" + ) + ETIMESTAMP = "Invalid timestamp format" + EDATETIME = "Invalid datetime format" + EINVALIDISSUEFIELD = "Invalid custom field submitted" + EINVALIDISSUEFIELD_LINK = ( + "Invalid custom field submitted, the value " "is not a link" + ) + EINVALIDPRIORITY = "Invalid priority submitted" + ENOGROUP = "Group not found" + ENOTMAINADMIN = "Only the main admin can set the main admin of a project" + EMODIFYPROJECTNOTALLOWED = "You are not allowed to modify this project" + EINVALIDPERPAGEVALUE = "The per_page value must be between 1 and 100" + EGITERROR = "An error occurred during a git operation" + ENOCOMMIT = "No such commit found in this repository" + ENOTHIGHENOUGH = ( + "You do not have sufficient permissions to perform " "this action" + ) + ENOSIGNEDOFF = ( + "This repo enforces that all commits are signed off " + "by their author." + ) + ETRACKERREADONLY = "The issue tracker of this project is read-only" def get_authorized_api_project(session, repo, user=None, namespace=None): - ''' Helper function to get an authorized_project with optional lock. ''' + """ Helper function to get an authorized_project with optional lock. """ repo = pagure.lib.get_authorized_project( - flask.g.session, repo, user=user, namespace=namespace) + flask.g.session, repo, user=user, namespace=namespace + ) flask.g.repo = repo return repo @@ -119,9 +134,9 @@ def get_request_data(): def check_api_acls(acls, optional=False): - ''' Checks if the user provided an API token with its request and if + """ Checks if the user provided an API token with its request and if this token allows the user to access the endpoint desired. - ''' + """ if authenticated(): return @@ -130,10 +145,10 @@ def check_api_acls(acls, optional=False): token = None token_str = None - if 'Authorization' in flask.request.headers: - authorization = flask.request.headers['Authorization'] - if 'token' in authorization: - token_str = authorization.split('token', 1)[1].strip() + if "Authorization" in flask.request.headers: + authorization = flask.request.headers["Authorization"] + if "token" in authorization: + token_str = authorization.split("token", 1)[1].strip() token_auth = False if token_str: @@ -161,8 +176,8 @@ def check_api_acls(acls, optional=False): if not token_auth: output = { - 'error_code': APIERROR.EINVALIDTOK.name, - 'error': APIERROR.EINVALIDTOK.value, + "error_code": APIERROR.EINVALIDTOK.name, + "error": APIERROR.EINVALIDTOK.value, } jsonout = flask.jsonify(output) jsonout.status_code = 401 @@ -170,16 +185,16 @@ def check_api_acls(acls, optional=False): def api_login_required(acls=None): - ''' Decorator used to indicate that authentication is required for some + """ Decorator used to indicate that authentication is required for some API endpoint. - ''' + """ def decorator(function): - ''' The decorator of the function ''' + """ The decorator of the function """ @functools.wraps(function) def decorated_function(*args, **kwargs): - ''' Actually does the job with the arguments provided. ''' + """ Actually does the job with the arguments provided. """ response = check_api_acls(acls) if response: @@ -192,16 +207,16 @@ def api_login_required(acls=None): def api_login_optional(acls=None): - ''' Decorator used to indicate that authentication is optional for some + """ Decorator used to indicate that authentication is optional for some API endpoint. - ''' + """ def decorator(function): - ''' The decorator of the function ''' + """ The decorator of the function """ @functools.wraps(function) def decorated_function(*args, **kwargs): - ''' Actually does the job with the arguments provided. ''' + """ Actually does the job with the arguments provided. """ response = check_api_acls(acls, optional=True) if response: @@ -214,11 +229,11 @@ def api_login_optional(acls=None): def api_method(function): - ''' Runs an API endpoint and catch all the APIException thrown. ''' + """ Runs an API endpoint and catch all the APIException thrown. """ @functools.wraps(function) def wrapper(*args, **kwargs): - ''' Actually does the job with the arguments provided. ''' + """ Actually does the job with the arguments provided. """ try: result = function(*args, **kwargs) except APIError as err: @@ -227,17 +242,17 @@ def api_method(function): if err.error_code in [APIERROR.ENOCODE]: output = { - 'error': err.error, - 'error_code': err.error_code.name + "error": err.error, + "error_code": err.error_code.name, } else: output = { - 'error': err.error_code.value, - 'error_code': err.error_code.name, + "error": err.error_code.value, + "error_code": err.error_code.name, } if err.errors: - output['errors'] = err.errors + output["errors"] = err.errors response = flask.jsonify(output) response.status_code = err.status_code else: @@ -255,7 +270,7 @@ def get_page(): raises APIERROR.EINVALIDREQ if the page provided is lower than 1 """ - page = flask.request.values.get('page', None) + page = flask.request.values.get("page", None) if not page: page = 1 else: @@ -263,11 +278,13 @@ def get_page(): page = int(page) except (TypeError, ValueError): raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDREQ) + 400, error_code=APIERROR.EINVALIDREQ + ) if page < 1: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDREQ) + 400, error_code=APIERROR.EINVALIDREQ + ) return page @@ -279,37 +296,39 @@ def get_per_page(): raises APIERROR.EINVALIDPERPAGEVALUE if the page provided is lower than 1 or greater than 100 """ - per_page = flask.request.values.get('per_page', None) or 20 + per_page = flask.request.values.get("per_page", None) or 20 if per_page: try: per_page = int(per_page) except (TypeError, ValueError): raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDREQ) + 400, error_code=APIERROR.EINVALIDREQ + ) if per_page < 1 or per_page > 100: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDPERPAGEVALUE) + 400, error_code=APIERROR.EINVALIDPERPAGEVALUE + ) return per_page -if pagure_config.get('ENABLE_TICKETS', True): +if pagure_config.get("ENABLE_TICKETS", True): from pagure.api import issue # noqa: E402 from pagure.api import fork # noqa: E402 from pagure.api import project # noqa: E402 from pagure.api import user # noqa: E402 from pagure.api import group # noqa: E402 -if pagure_config.get('PAGURE_CI_SERVICES', False): +if pagure_config.get("PAGURE_CI_SERVICES", False): from pagure.api.ci import jenkins # noqa: E402 -@API.route('/version/') -@API.route('/version') -@API.route('/-/version') +@API.route("/version/") +@API.route("/version") +@API.route("/-/version") def api_version(): - ''' + """ API Version ----------- Get the current API version. @@ -327,14 +346,14 @@ def api_version(): "version": "1" } - ''' - return flask.jsonify({'version': pagure.__api_version__}) + """ + return flask.jsonify({"version": pagure.__api_version__}) -@API.route('/users/') -@API.route('/users') +@API.route("/users/") +@API.route("/users") def api_users(): - ''' + """ List users ----------- Retrieve users that have logged into the Pagure instance. @@ -364,31 +383,35 @@ def api_users(): "users": ["user1", "user2"] } - ''' - pattern = flask.request.args.get('pattern', None) - if pattern is not None and not pattern.endswith('*'): - pattern += '*' + """ + pattern = flask.request.args.get("pattern", None) + if pattern is not None and not pattern.endswith("*"): + pattern += "*" users = pagure.lib.search_user(flask.g.session, pattern=pattern) return flask.jsonify( { - 'total_users': len(users), - 'users': [usr.username for usr in users], - 'mention': [{ - 'username': usr.username, - 'name': usr.fullname, - 'image': pagure.lib.avatar_url_from_email( - usr.default_email, size=16) - } for usr in users] + "total_users": len(users), + "users": [usr.username for usr in users], + "mention": [ + { + "username": usr.username, + "name": usr.fullname, + "image": pagure.lib.avatar_url_from_email( + usr.default_email, size=16 + ), + } + for usr in users + ], } ) -@API.route('/-/whoami', methods=['POST']) +@API.route("/-/whoami", methods=["POST"]) @api_login_optional() def api_whoami(): - ''' + """ Who am I? --------- This API endpoint will return the username associated with the provided @@ -408,44 +431,45 @@ def api_whoami(): "username": "user1" } - ''' + """ if authenticated(): - return flask.jsonify({'username': flask.g.fas_user.username}) + return flask.jsonify({"username": flask.g.fas_user.username}) else: output = { - 'error_code': APIERROR.EINVALIDTOK.name, - 'error': APIERROR.EINVALIDTOK.value, + "error_code": APIERROR.EINVALIDTOK.name, + "error": APIERROR.EINVALIDTOK.value, } jsonout = flask.jsonify(output) jsonout.status_code = 401 return jsonout -@API.route('/task//status') -@API.route('/task//status/') +@API.route("/task//status") +@API.route("/task//status/") def api_task_status(taskid): - ''' + """ Return the status of a async task - ''' + """ result = pagure.lib.tasks.get_result(taskid) if not result.ready: - output = {'ready': False, - 'status': result.status} + output = {"ready": False, "status": result.status} else: - output = {'ready': True, - 'succesful': result.succesful(), - 'status': result.status} + output = { + "ready": True, + "succesful": result.succesful(), + "status": result.status, + } return flask.jsonify(output) -@API.route('//tags') -@API.route('//tags/') -@API.route('/fork///tags') -@API.route('/fork///tags/') +@API.route("//tags") +@API.route("//tags/") +@API.route("/fork///tags") +@API.route("/fork///tags/") def api_project_tags(repo, username=None): - ''' + """ List all the tags of a project ------------------------------ List the tags made on the project's issues. @@ -478,35 +502,33 @@ def api_project_tags(repo, username=None): "tags": ["tag1", "tag2"] } - ''' + """ - pattern = flask.request.args.get('pattern', None) - if pattern is not None and not pattern.endswith('*'): - pattern += '*' + pattern = flask.request.args.get("pattern", None) + if pattern is not None and not pattern.endswith("*"): + pattern += "*" project_obj = get_authorized_api_project(flask.g.session, repo, username) if not project_obj: - output = {'output': 'notok', 'error': 'Project not found'} + output = {"output": "notok", "error": "Project not found"} jsonout = flask.jsonify(output) jsonout.status_code = 404 return jsonout tags = pagure.lib.get_tags_of_project( - flask.g.session, project_obj, pattern=pattern) + flask.g.session, project_obj, pattern=pattern + ) return flask.jsonify( - { - 'total_tags': len(tags), - 'tags': [tag.tag for tag in tags] - } + {"total_tags": len(tags), "tags": [tag.tag for tag in tags]} ) -@API.route('/error_codes/') -@API.route('/error_codes') -@API.route('/-/error_codes') +@API.route("/error_codes/") +@API.route("/error_codes") +@API.route("/-/error_codes") def api_error_codes(): - ''' + """ Error codes ------------ Get a dictionary (hash) of all error codes. @@ -525,18 +547,17 @@ def api_error_codes(): ENOPROJECT: 'Project not found', } - ''' + """ errors = { - val.name: val.value - for val in APIERROR.__members__.values() + val.name: val.value for val in APIERROR.__members__.values() } # pylint: disable=no-member return flask.jsonify(errors) -@API.route('/') +@API.route("/") def api(): - ''' Display the api information page. ''' + """ Display the api information page. """ api_project_doc = load_doc(project.api_project) api_projects_doc = load_doc(project.api_projects) api_project_watchers_doc = load_doc(project.api_project_watchers) @@ -552,10 +573,11 @@ def api(): api_commit_flags_doc = load_doc(project.api_commit_flags) api_commit_add_flag_doc = load_doc(project.api_commit_add_flag) api_update_project_watchers_doc = load_doc( - project.api_update_project_watchers) + project.api_update_project_watchers + ) issues = [] - if pagure_config.get('ENABLE_TICKETS', True): + if pagure_config.get("ENABLE_TICKETS", True): issues.append(load_doc(issue.api_new_issue)) issues.append(load_doc(issue.api_view_issues)) issues.append(load_doc(issue.api_view_issue)) @@ -570,8 +592,8 @@ def api(): issues.append(load_doc(user.api_view_user_issues)) ci_doc = [] - if pagure_config.get('PAGURE_CI_SERVICES', True): - if 'jenkins' in pagure_config['PAGURE_CI_SERVICES']: + if pagure_config.get("PAGURE_CI_SERVICES", True): + if "jenkins" in pagure_config["PAGURE_CI_SERVICES"]: ci_doc.append(load_doc(jenkins.jenkins_ci_notification)) api_pull_request_views_doc = load_doc(fork.api_pull_request_views) @@ -579,7 +601,8 @@ def api(): api_pull_request_merge_doc = load_doc(fork.api_pull_request_merge) api_pull_request_close_doc = load_doc(fork.api_pull_request_close) api_pull_request_add_comment_doc = load_doc( - fork.api_pull_request_add_comment) + fork.api_pull_request_add_comment + ) api_pull_request_add_flag_doc = load_doc(fork.api_pull_request_add_flag) api_version_doc = load_doc(api_version) @@ -587,32 +610,32 @@ def api(): api_users_doc = load_doc(api_users) api_view_user_doc = load_doc(user.api_view_user) api_view_user_activity_stats_doc = load_doc( - user.api_view_user_activity_stats) + user.api_view_user_activity_stats + ) api_view_user_activity_date_doc = load_doc( - user.api_view_user_activity_date) + user.api_view_user_activity_date + ) api_view_user_requests_filed_doc = load_doc( - user.api_view_user_requests_filed) + user.api_view_user_requests_filed + ) api_view_user_requests_actionable_doc = load_doc( - user.api_view_user_requests_actionable) + user.api_view_user_requests_actionable + ) api_view_group_doc = load_doc(group.api_view_group) api_groups_doc = load_doc(group.api_groups) - if pagure_config.get('ENABLE_TICKETS', True): + if pagure_config.get("ENABLE_TICKETS", True): api_project_tags_doc = load_doc(api_project_tags) api_error_codes_doc = load_doc(api_error_codes) - extras = [ - api_whoami_doc, - api_version_doc, - api_error_codes_doc, - ] + extras = [api_whoami_doc, api_version_doc, api_error_codes_doc] - if pagure_config.get('ENABLE_TICKETS', True): + if pagure_config.get("ENABLE_TICKETS", True): extras.append(api_project_tags_doc) return flask.render_template( - 'api.html', + "api.html", version=pagure.__api_version__, api_doc=APIDOC, projects=[ @@ -649,10 +672,7 @@ def api(): api_view_user_requests_filed_doc, api_view_user_requests_actionable_doc, ], - groups=[ - api_groups_doc, - api_view_group_doc - ], + groups=[api_groups_doc, api_view_group_doc], ci=ci_doc, extras=extras, ) diff --git a/pagure/api/ci/jenkins.py b/pagure/api/ci/jenkins.py index c5aa9a8..29e562e 100644 --- a/pagure/api/ci/jenkins.py +++ b/pagure/api/ci/jenkins.py @@ -27,17 +27,26 @@ from pagure.api import API, APIERROR, api_method _log = logging.getLogger(__name__) -@API.route('/ci/jenkins///build-finished', - methods=['POST']) -@API.route('/ci/jenkins////build-finished', - methods=['POST']) -@API.route('/ci/jenkins/forks///' - '/build-finished', methods=['POST']) -@API.route('/ci/jenkins/forks////' - '/build-finished', methods=['POST']) +@API.route( + "/ci/jenkins///build-finished", methods=["POST"] +) +@API.route( + "/ci/jenkins////build-finished", + methods=["POST"], +) +@API.route( + "/ci/jenkins/forks///" "/build-finished", + methods=["POST"], +) +@API.route( + "/ci/jenkins/forks////" + "/build-finished", + methods=["POST"], +) @api_method def jenkins_ci_notification( - repo, pagure_ci_token, username=None, namespace=None): + repo, pagure_ci_token, username=None, namespace=None +): """ Jenkins Build Notification -------------------------- @@ -51,15 +60,16 @@ def jenkins_ci_notification( """ project = pagure.lib._get_project( - flask.g.session, repo, user=username, namespace=namespace) + flask.g.session, repo, user=username, namespace=namespace + ) flask.g.repo_locked = True flask.g.repo = project if not project: raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOPROJECT) if not constant_time.bytes_eq( - to_bytes(pagure_ci_token), - to_bytes(project.ci_hook.pagure_ci_token)): + to_bytes(pagure_ci_token), to_bytes(project.ci_hook.pagure_ci_token) + ): raise pagure.exceptions.APIError(401, error_code=APIERROR.EINVALIDTOK) data = flask.request.get_json() @@ -67,19 +77,20 @@ def jenkins_ci_notification( _log.debug("Bad Request: No JSON retrieved") raise pagure.exceptions.APIError(400, error_code=APIERROR.EINVALIDREQ) - build_id = data.get('build', {}).get('number') + build_id = data.get("build", {}).get("number") if not build_id: _log.debug("Bad Request: No build ID retrieved") raise pagure.exceptions.APIError(400, error_code=APIERROR.EINVALIDREQ) - build_phase = data.get('build', {}).get('phase') + build_phase = data.get("build", {}).get("phase") if not build_phase: _log.debug("Bad Request: No build phase retrieved") raise pagure.exceptions.APIError(400, error_code=APIERROR.EINVALIDREQ) if build_phase not in ["STARTED", "FINALIZED"]: _log.debug( "Ignoring phase: %s - not in the list: STARTED, FINALIZED", - build_phase) + build_phase, + ) raise pagure.exceptions.APIError(400, error_code=APIERROR.EINVALIDREQ) try: @@ -87,15 +98,17 @@ def jenkins_ci_notification( flask.g.session, project, build_id, - requestfolder=pagure.config.config['REQUESTS_FOLDER'] + requestfolder=pagure.config.config["REQUESTS_FOLDER"], ) except pagure.exceptions.NoCorrespondingPR as err: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.ENOCODE, error=str(err)) + 400, error_code=APIERROR.ENOCODE, error=str(err) + ) except pagure.exceptions.PagureException as err: - _log.error('Error processing jenkins notification', exc_info=err) + _log.error("Error processing jenkins notification", exc_info=err) raise pagure.exceptions.APIError( - 400, error_code=APIERROR.ENOCODE, error=str(err)) + 400, error_code=APIERROR.ENOCODE, error=str(err) + ) - _log.info('Successfully proccessed jenkins notification') - return ('', 204) + _log.info("Successfully proccessed jenkins notification") + return ("", 204) diff --git a/pagure/api/fork.py b/pagure/api/fork.py index 42b8678..a87e6ac 100644 --- a/pagure/api/fork.py +++ b/pagure/api/fork.py @@ -21,21 +21,32 @@ import pagure import pagure.exceptions import pagure.lib import pagure.lib.tasks -from pagure.api import (API, api_method, api_login_required, APIERROR, - get_authorized_api_project, get_request_data, - get_page, get_per_page) +from pagure.api import ( + API, + api_method, + api_login_required, + APIERROR, + get_authorized_api_project, + get_request_data, + get_page, + get_per_page, +) from pagure.config import config as pagure_config -from pagure.utils import authenticated, is_repo_committer, is_true, \ - api_authenticated +from pagure.utils import ( + authenticated, + is_repo_committer, + is_true, + api_authenticated, +) _log = logging.getLogger(__name__) -@API.route('//pull-requests') -@API.route('///pull-requests') -@API.route('/fork///pull-requests') -@API.route('/fork////pull-requests') +@API.route("//pull-requests") +@API.route("///pull-requests") +@API.route("/fork///pull-requests") +@API.route("/fork////pull-requests") @api_method def api_pull_request_views(repo, username=None, namespace=None): """ @@ -137,36 +148,40 @@ def api_pull_request_views(repo, username=None, namespace=None): """ repo = get_authorized_api_project( - flask.g.session, repo, user=username, namespace=namespace) + flask.g.session, repo, user=username, namespace=namespace + ) if repo is None: raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOPROJECT) - if not repo.settings.get('pull_requests', True): + if not repo.settings.get("pull_requests", True): raise pagure.exceptions.APIError( - 404, error_code=APIERROR.EPULLREQUESTSDISABLED) + 404, error_code=APIERROR.EPULLREQUESTSDISABLED + ) - status = flask.request.args.get('status', True) - assignee = flask.request.args.get('assignee', None) - author = flask.request.args.get('author', None) + status = flask.request.args.get("status", True) + assignee = flask.request.args.get("assignee", None) + author = flask.request.args.get("author", None) status_text = ("%s" % status).lower() requests = [] - if status_text in ['0', 'false', 'closed']: + if status_text in ["0", "false", "closed"]: requests = pagure.lib.search_pull_requests( flask.g.session, project_id=repo.id, status=False, assignee=assignee, - author=author) + author=author, + ) - elif status_text == 'all': + elif status_text == "all": requests = pagure.lib.search_pull_requests( flask.g.session, project_id=repo.id, status=None, assignee=assignee, - author=author) + author=author, + ) else: requests = pagure.lib.search_pull_requests( @@ -174,41 +189,39 @@ def api_pull_request_views(repo, username=None, namespace=None): project_id=repo.id, assignee=assignee, author=author, - status=status) + status=status, + ) page = get_page() per_page = get_per_page() pagination_metadata = pagure.lib.get_pagination_metadata( - flask.request, page, per_page, len(requests)) + flask.request, page, per_page, len(requests) + ) start = (page - 1) * per_page if start + per_page > len(requests): requests_page = requests[start:] else: - requests_page = requests[start:(start + per_page)] + requests_page = requests[start : (start + per_page)] jsonout = { - 'total_requests': len(requests), - 'requests': [ - request.to_json(public=True, api=True) - for request in requests_page], - 'args': { - 'status': status, - 'assignee': assignee, - 'author': author, - } + "total_requests": len(requests), + "requests": [ + request.to_json(public=True, api=True) for request in requests_page + ], + "args": {"status": status, "assignee": assignee, "author": author}, } if pagination_metadata: - jsonout['args']['page'] = page - jsonout['args']['per_page'] = per_page - jsonout['pagination'] = pagination_metadata + jsonout["args"]["page"] = page + jsonout["args"]["per_page"] = per_page + jsonout["pagination"] = pagination_metadata return flask.jsonify(jsonout) -@API.route('//pull-request/') -@API.route('///pull-request/') -@API.route('/fork///pull-request/') -@API.route('/fork////pull-request/') +@API.route("//pull-request/") +@API.route("///pull-request/") +@API.route("/fork///pull-request/") +@API.route("/fork////pull-request/") @api_method def api_pull_request_view(repo, requestid, username=None, namespace=None): """ @@ -279,17 +292,20 @@ def api_pull_request_view(repo, requestid, username=None, namespace=None): """ repo = get_authorized_api_project( - flask.g.session, repo, user=username, namespace=namespace) + flask.g.session, repo, user=username, namespace=namespace + ) if repo is None: raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOPROJECT) - if not repo.settings.get('pull_requests', True): + if not repo.settings.get("pull_requests", True): raise pagure.exceptions.APIError( - 404, error_code=APIERROR.EPULLREQUESTSDISABLED) + 404, error_code=APIERROR.EPULLREQUESTSDISABLED + ) request = pagure.lib.search_pull_requests( - flask.g.session, project_id=repo.id, requestid=requestid) + flask.g.session, project_id=repo.id, requestid=requestid + ) if not request: raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOREQ) @@ -298,16 +314,19 @@ def api_pull_request_view(repo, requestid, username=None, namespace=None): return jsonout -@API.route('//pull-request//merge', methods=['POST']) +@API.route("//pull-request//merge", methods=["POST"]) @API.route( - '///pull-request//merge', - methods=['POST']) -@API.route('/fork///pull-request//merge', - methods=['POST']) + "///pull-request//merge", methods=["POST"] +) @API.route( - '/fork////pull-request//merge', - methods=['POST']) -@api_login_required(acls=['pull_request_merge']) + "/fork///pull-request//merge", + methods=["POST"], +) +@API.route( + "/fork////pull-request//merge", + methods=["POST"], +) +@api_login_required(acls=["pull_request_merge"]) @api_method def api_pull_request_merge(repo, requestid, username=None, namespace=None): """ @@ -347,20 +366,23 @@ def api_pull_request_merge(repo, requestid, username=None, namespace=None): output = {} repo = get_authorized_api_project( - flask.g.session, repo, user=username, namespace=namespace) + flask.g.session, repo, user=username, namespace=namespace + ) if repo is None: raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOPROJECT) - if not repo.settings.get('pull_requests', True): + if not repo.settings.get("pull_requests", True): raise pagure.exceptions.APIError( - 404, error_code=APIERROR.EPULLREQUESTSDISABLED) + 404, error_code=APIERROR.EPULLREQUESTSDISABLED + ) if flask.g.token.project and repo != flask.g.token.project: raise pagure.exceptions.APIError(401, error_code=APIERROR.EINVALIDTOK) request = pagure.lib.search_pull_requests( - flask.g.session, project_id=repo.id, requestid=requestid) + flask.g.session, project_id=repo.id, requestid=requestid + ) if not request: raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOREQ) @@ -368,43 +390,47 @@ def api_pull_request_merge(repo, requestid, username=None, namespace=None): if not is_repo_committer(repo): raise pagure.exceptions.APIError(403, error_code=APIERROR.ENOPRCLOSE) - if repo.settings.get('Only_assignee_can_merge_pull-request', False): + if repo.settings.get("Only_assignee_can_merge_pull-request", False): if not request.assignee: raise pagure.exceptions.APIError( - 403, error_code=APIERROR.ENOTASSIGNED) + 403, error_code=APIERROR.ENOTASSIGNED + ) if request.assignee.username != flask.g.fas_user.username: raise pagure.exceptions.APIError( - 403, error_code=APIERROR.ENOTASSIGNEE) + 403, error_code=APIERROR.ENOTASSIGNEE + ) - threshold = repo.settings.get('Minimum_score_to_merge_pull-request', -1) + threshold = repo.settings.get("Minimum_score_to_merge_pull-request", -1) if threshold > 0 and int(request.score) < int(threshold): raise pagure.exceptions.APIError(403, error_code=APIERROR.EPRSCORE) task = pagure.lib.tasks.merge_pull_request.delay( - repo.name, namespace, username, requestid, - flask.g.fas_user.username) - output = {'message': 'Merging queued', - 'taskid': task.id} + repo.name, namespace, username, requestid, flask.g.fas_user.username + ) + output = {"message": "Merging queued", "taskid": task.id} - if get_request_data().get('wait', True): + if get_request_data().get("wait", True): task.get() - output = {'message': 'Changes merged!'} + output = {"message": "Changes merged!"} jsonout = flask.jsonify(output) return jsonout -@API.route('//pull-request//close', methods=['POST']) +@API.route("//pull-request//close", methods=["POST"]) @API.route( - '///pull-request//close', - methods=['POST']) -@API.route('/fork///pull-request//close', - methods=['POST']) + "///pull-request//close", methods=["POST"] +) @API.route( - '/fork////pull-request//close', - methods=['POST']) -@api_login_required(acls=['pull_request_close']) + "/fork///pull-request//close", + methods=["POST"], +) +@API.route( + "/fork////pull-request//close", + methods=["POST"], +) +@api_login_required(acls=["pull_request_close"]) @api_method def api_pull_request_close(repo, requestid, username=None, namespace=None): """ @@ -435,20 +461,23 @@ def api_pull_request_close(repo, requestid, username=None, namespace=None): output = {} repo = get_authorized_api_project( - flask.g.session, repo, user=username, namespace=namespace) + flask.g.session, repo, user=username, namespace=namespace + ) if repo is None: raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOPROJECT) - if not repo.settings.get('pull_requests', True): + if not repo.settings.get("pull_requests", True): raise pagure.exceptions.APIError( - 404, error_code=APIERROR.EPULLREQUESTSDISABLED) + 404, error_code=APIERROR.EPULLREQUESTSDISABLED + ) if repo != flask.g.token.project: raise pagure.exceptions.APIError(401, error_code=APIERROR.EINVALIDTOK) request = pagure.lib.search_pull_requests( - flask.g.session, project_id=repo.id, requestid=requestid) + flask.g.session, project_id=repo.id, requestid=requestid + ) if not request: raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOREQ) @@ -458,11 +487,14 @@ def api_pull_request_close(repo, requestid, username=None, namespace=None): try: pagure.lib.close_pull_request( - flask.g.session, request, flask.g.fas_user.username, - requestfolder=pagure_config['REQUESTS_FOLDER'], - merged=False) + flask.g.session, + request, + flask.g.fas_user.username, + requestfolder=pagure_config["REQUESTS_FOLDER"], + merged=False, + ) flask.g.session.commit() - output['message'] = 'Pull-request closed!' + output["message"] = "Pull-request closed!" except SQLAlchemyError as err: # pragma: no cover flask.g.session.rollback() _log.exception(err) @@ -472,19 +504,24 @@ def api_pull_request_close(repo, requestid, username=None, namespace=None): return jsonout -@API.route('//pull-request//comment', - methods=['POST']) -@API.route('///pull-request//comment', - methods=['POST']) -@API.route('/fork///pull-request//comment', - methods=['POST']) +@API.route("//pull-request//comment", methods=["POST"]) +@API.route( + "///pull-request//comment", + methods=["POST"], +) +@API.route( + "/fork///pull-request//comment", + methods=["POST"], +) @API.route( - '/fork////pull-request//comment', - methods=['POST']) -@api_login_required(acls=['pull_request_comment']) + "/fork////pull-request//comment", + methods=["POST"], +) +@api_login_required(acls=["pull_request_comment"]) @api_method def api_pull_request_add_comment( - repo, requestid, username=None, namespace=None): + repo, requestid, username=None, namespace=None +): """ Comment on a pull-request ------------------------- @@ -538,22 +575,25 @@ def api_pull_request_add_comment( """ # noqa repo = get_authorized_api_project( - flask.g.session, repo, user=username, namespace=namespace) + flask.g.session, repo, user=username, namespace=namespace + ) output = {} if repo is None: raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOPROJECT) - if not repo.settings.get('pull_requests', True): + if not repo.settings.get("pull_requests", True): raise pagure.exceptions.APIError( - 404, error_code=APIERROR.EPULLREQUESTSDISABLED) + 404, error_code=APIERROR.EPULLREQUESTSDISABLED + ) if flask.g.token.project and repo != flask.g.token.project: raise pagure.exceptions.APIError(401, error_code=APIERROR.EINVALIDTOK) request = pagure.lib.search_pull_requests( - flask.g.session, project_id=repo.id, requestid=requestid) + flask.g.session, project_id=repo.id, requestid=requestid + ) if not request: raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOREQ) @@ -576,13 +616,14 @@ def api_pull_request_add_comment( row=row, comment=comment, user=flask.g.fas_user.username, - requestfolder=pagure_config['REQUESTS_FOLDER'], + requestfolder=pagure_config["REQUESTS_FOLDER"], ) flask.g.session.commit() - output['message'] = message + output["message"] = message except pagure.exceptions.PagureException as err: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.ENOCODE, error=str(err)) + 400, error_code=APIERROR.ENOCODE, error=str(err) + ) except SQLAlchemyError as err: # pragma: no cover _log.exception(err) flask.g.session.rollback() @@ -590,22 +631,26 @@ def api_pull_request_add_comment( else: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDREQ, errors=form.errors) + 400, error_code=APIERROR.EINVALIDREQ, errors=form.errors + ) jsonout = flask.jsonify(output) return jsonout -@API.route('//pull-request//flag', - methods=['POST']) -@API.route('///pull-request//flag', - methods=['POST']) -@API.route('/fork///pull-request//flag', - methods=['POST']) +@API.route("//pull-request//flag", methods=["POST"]) @API.route( - '/fork////pull-request//flag', - methods=['POST']) -@api_login_required(acls=['pull_request_flag']) + "///pull-request//flag", methods=["POST"] +) +@API.route( + "/fork///pull-request//flag", + methods=["POST"], +) +@API.route( + "/fork////pull-request//flag", + methods=["POST"], +) +@api_login_required(acls=["pull_request_flag"]) @api_method def api_pull_request_add_flag(repo, requestid, username=None, namespace=None): """ @@ -720,29 +765,30 @@ def api_pull_request_add_flag(repo, requestid, username=None, namespace=None): """ # noqa repo = get_authorized_api_project( - flask.g.session, repo, user=username, namespace=namespace) + flask.g.session, repo, user=username, namespace=namespace + ) output = {} if repo is None: - raise pagure.exceptions.APIError( - 404, error_code=APIERROR.ENOPROJECT) + raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOPROJECT) - if not repo.settings.get('pull_requests', True): + if not repo.settings.get("pull_requests", True): raise pagure.exceptions.APIError( - 404, error_code=APIERROR.EPULLREQUESTSDISABLED) + 404, error_code=APIERROR.EPULLREQUESTSDISABLED + ) if flask.g.token.project and repo != flask.g.token.project: - raise pagure.exceptions.APIError( - 401, error_code=APIERROR.EINVALIDTOK) + raise pagure.exceptions.APIError(401, error_code=APIERROR.EINVALIDTOK) request = pagure.lib.search_pull_requests( - flask.g.session, project_id=repo.id, requestid=requestid) + flask.g.session, project_id=repo.id, requestid=requestid + ) if not request: raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOREQ) - if 'status' in get_request_data(): + if "status" in get_request_data(): form = pagure.forms.AddPullRequestFlagForm(csrf_enabled=False) else: form = pagure.forms.AddPullRequestFlagFormV1(csrf_enabled=False) @@ -752,14 +798,17 @@ def api_pull_request_add_flag(repo, requestid, username=None, namespace=None): comment = form.comment.data.strip() url = form.url.data.strip() uid = form.uid.data.strip() if form.uid.data else None - if 'status' in get_request_data(): + if "status" in get_request_data(): status = form.status.data.strip() else: if percent is None: - status = pagure_config['FLAG_PENDING'] + status = pagure_config["FLAG_PENDING"] else: - status = pagure_config['FLAG_SUCCESS'] if percent != '0' else \ - pagure_config['FLAG_FAILURE'] + status = ( + pagure_config["FLAG_SUCCESS"] + if percent != "0" + else pagure_config["FLAG_FAILURE"] + ) try: # New Flag message, uid = pagure.lib.add_pull_request_flag( @@ -773,17 +822,19 @@ def api_pull_request_add_flag(repo, requestid, username=None, namespace=None): uid=uid, user=flask.g.fas_user.username, token=flask.g.token.id, - requestfolder=pagure_config['REQUESTS_FOLDER'], + requestfolder=pagure_config["REQUESTS_FOLDER"], ) flask.g.session.commit() pr_flag = pagure.lib.get_pull_request_flag_by_uid( - flask.g.session, request, uid) - output['message'] = message - output['uid'] = uid - output['flag'] = pr_flag.to_json() + flask.g.session, request, uid + ) + output["message"] = message + output["uid"] = uid + output["flag"] = pr_flag.to_json() except pagure.exceptions.PagureException as err: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.ENOCODE, error=str(err)) + 400, error_code=APIERROR.ENOCODE, error=str(err) + ) except SQLAlchemyError as err: # pragma: no cover _log.exception(err) flask.g.session.rollback() @@ -791,33 +842,36 @@ def api_pull_request_add_flag(repo, requestid, username=None, namespace=None): else: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDREQ, errors=form.errors) + 400, error_code=APIERROR.EINVALIDREQ, errors=form.errors + ) - output['avatar_url'] = pagure.lib.avatar_url_from_email( - flask.g.fas_user.default_email, size=30) + output["avatar_url"] = pagure.lib.avatar_url_from_email( + flask.g.fas_user.default_email, size=30 + ) - output['user'] = flask.g.fas_user.username + output["user"] = flask.g.fas_user.username jsonout = flask.jsonify(output) return jsonout +@API.route("//pull-request//subscribe", methods=["POST"]) @API.route( - '//pull-request//subscribe', - methods=['POST']) -@API.route( - '///pull-request//subscribe', - methods=['POST']) + "///pull-request//subscribe", + methods=["POST"], +) @API.route( - '/fork///pull-request//subscribe', - methods=['POST']) + "/fork///pull-request//subscribe", + methods=["POST"], +) @API.route( - '/fork////pull-request/' - '/subscribe', methods=['POST']) -@api_login_required(acls=['pull_request_subscribe']) + "/fork////pull-request/" + "/subscribe", + methods=["POST"], +) +@api_login_required(acls=["pull_request_subscribe"]) @api_method -def api_subscribe_pull_request( - repo, requestid, username=None, namespace=None): +def api_subscribe_pull_request(repo, requestid, username=None, namespace=None): """ Subscribe to an pull-request ---------------------------- @@ -860,25 +914,30 @@ def api_subscribe_pull_request( """ # noqa repo = get_authorized_api_project( - flask.g.session, repo, user=username, namespace=namespace) + flask.g.session, repo, user=username, namespace=namespace + ) output = {} if repo is None: - raise pagure.exceptions.APIError( - 404, error_code=APIERROR.ENOPROJECT) - - if not repo.settings.get('pull_requests', True): - raise pagure.exceptions.APIError( - 404, error_code=APIERROR.EPULLREQUESTSDISABLED) + raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOPROJECT) - if (api_authenticated() and flask.g.token and flask.g.token.project - and repo != flask.g.token.project) or not authenticated(): + if not repo.settings.get("pull_requests", True): raise pagure.exceptions.APIError( - 401, error_code=APIERROR.EINVALIDTOK) + 404, error_code=APIERROR.EPULLREQUESTSDISABLED + ) + + if ( + api_authenticated() + and flask.g.token + and flask.g.token.project + and repo != flask.g.token.project + ) or not authenticated(): + raise pagure.exceptions.APIError(401, error_code=APIERROR.EINVALIDTOK) request = pagure.lib.search_pull_requests( - flask.g.session, project_id=repo.id, requestid=requestid) + flask.g.session, project_id=repo.id, requestid=requestid + ) if not request: raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOREQ) @@ -892,15 +951,17 @@ def api_subscribe_pull_request( flask.g.session, user=flask.g.fas_user.username, obj=request, - watch_status=status + watch_status=status, ) flask.g.session.commit() - output['message'] = message + output["message"] = message user_obj = pagure.lib.get_user( - flask.g.session, flask.g.fas_user.username) - output['avatar_url'] = pagure.lib.avatar_url_from_email( - user_obj.default_email, size=30) - output['user'] = flask.g.fas_user.username + flask.g.session, flask.g.fas_user.username + ) + output["avatar_url"] = pagure.lib.avatar_url_from_email( + user_obj.default_email, size=30 + ) + output["user"] = flask.g.fas_user.username except SQLAlchemyError as err: # pragma: no cover flask.g.session.rollback() _log.logger.exception(err) @@ -910,12 +971,13 @@ def api_subscribe_pull_request( return jsonout -@API.route('//pull-request/new', methods=['POST']) -@API.route('///pull-request/new', methods=['POST']) -@API.route('/fork///pull-request/new', methods=['POST']) -@API.route('/fork////pull-request/new', - methods=['POST']) -@api_login_required(acls=['pull_request_create']) +@API.route("//pull-request/new", methods=["POST"]) +@API.route("///pull-request/new", methods=["POST"]) +@API.route("/fork///pull-request/new", methods=["POST"]) +@API.route( + "/fork////pull-request/new", methods=["POST"] +) +@api_login_required(acls=["pull_request_create"]) @api_method def api_pull_request_create(repo, username=None, namespace=None): """ @@ -1010,62 +1072,75 @@ def api_pull_request_create(repo, username=None, namespace=None): """ repo = get_authorized_api_project( - flask.g.session, repo, user=username, namespace=namespace) + flask.g.session, repo, user=username, namespace=namespace + ) if repo is None: raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOPROJECT) if flask.g.token.project and repo != flask.g.token.project: - raise pagure.exceptions.APIError( - 401, error_code=APIERROR.EINVALIDTOK) + raise pagure.exceptions.APIError(401, error_code=APIERROR.EINVALIDTOK) form = pagure.forms.RequestPullForm(csrf_enabled=False) if not form.validate_on_submit(): raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDREQ, errors=form.errors) - branch_to = get_request_data().get('branch_to') + 400, error_code=APIERROR.EINVALIDREQ, errors=form.errors + ) + branch_to = get_request_data().get("branch_to") if not branch_to: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDREQ, - errors={'branch_to': ['This field is required.']}) - branch_from = get_request_data().get('branch_from') + 400, + error_code=APIERROR.EINVALIDREQ, + errors={"branch_to": ["This field is required."]}, + ) + branch_from = get_request_data().get("branch_from") if not branch_from: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDREQ, - errors={'branch_from': ['This field is required.']}) + 400, + error_code=APIERROR.EINVALIDREQ, + errors={"branch_from": ["This field is required."]}, + ) parent = repo if repo.parent: parent = repo.parent - if not parent.settings.get('pull_requests', True): + if not parent.settings.get("pull_requests", True): raise pagure.exceptions.APIError( - 404, error_code=APIERROR.EPULLREQUESTSDISABLED) + 404, error_code=APIERROR.EPULLREQUESTSDISABLED + ) repo_committer = pagure.utils.is_repo_committer(repo) if not repo_committer: raise pagure.exceptions.APIError( - 401, error_code=APIERROR.ENOTHIGHENOUGH) + 401, error_code=APIERROR.ENOTHIGHENOUGH + ) repo_obj = pygit2.Repository( - os.path.join(pagure_config['GIT_FOLDER'], repo.path)) + os.path.join(pagure_config["GIT_FOLDER"], repo.path) + ) orig_repo = pygit2.Repository( - os.path.join(pagure_config['GIT_FOLDER'], parent.path)) + os.path.join(pagure_config["GIT_FOLDER"], parent.path) + ) try: diff, diff_commits, orig_commit = pagure.lib.git.get_diff_info( - repo_obj, orig_repo, branch_from, branch_to) + repo_obj, orig_repo, branch_from, branch_to + ) except pagure.exceptions.PagureException as err: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDREQ, errors=str(err)) + 400, error_code=APIERROR.EINVALIDREQ, errors=str(err) + ) if parent.settings.get( - 'Enforce_signed-off_commits_in_pull-request', False): + "Enforce_signed-off_commits_in_pull-request", False + ): for commit in diff_commits: - if 'signed-off-by' not in commit.message.lower(): + if "signed-off-by" not in commit.message.lower(): raise pagure.exceptions.APIError( - 400, error_code=APIERROR.ENOSIGNEDOFF) + 400, error_code=APIERROR.ENOSIGNEDOFF + ) if orig_commit: orig_commit = orig_commit.oid.hex @@ -1086,7 +1161,7 @@ def api_pull_request_create(repo, username=None, namespace=None): title=form.title.data, initial_comment=initial_comment, user=flask.g.fas_user.username, - requestfolder=pagure_config['REQUESTS_FOLDER'], + requestfolder=pagure_config["REQUESTS_FOLDER"], commit_start=commit_start, commit_stop=commit_stop, ) diff --git a/pagure/api/group.py b/pagure/api/group.py index 0b41dd2..6b28aeb 100644 --- a/pagure/api/group.py +++ b/pagure/api/group.py @@ -17,14 +17,20 @@ import pagure import pagure.exceptions import pagure.lib from pagure.api import ( - API, APIERROR, api_method, api_login_optional, get_page, get_per_page) + API, + APIERROR, + api_method, + api_login_optional, + get_page, + get_per_page, +) from pagure.utils import is_true -@API.route('/groups/') -@API.route('/groups') +@API.route("/groups/") +@API.route("/groups") def api_groups(): - ''' + """ List groups ----------- Retrieve groups on this Pagure instance. @@ -72,32 +78,31 @@ def api_groups(): "groups": ["group1", "group2"] } - ''' # noqa - pattern = flask.request.args.get('pattern', None) - extended = is_true(flask.request.args.get('extended', False)) + """ # noqa + pattern = flask.request.args.get("pattern", None) + extended = is_true(flask.request.args.get("extended", False)) - if pattern is not None and not pattern.endswith('*'): - pattern += '*' + if pattern is not None and not pattern.endswith("*"): + pattern += "*" page = get_page() per_page = get_per_page() group_cnt = pagure.lib.search_groups( - flask.g.session, pattern=pattern, count=True) + flask.g.session, pattern=pattern, count=True + ) pagination_metadata = pagure.lib.get_pagination_metadata( - flask.request, page, per_page, group_cnt) + flask.request, page, per_page, group_cnt + ) query_start = (page - 1) * per_page query_limit = per_page groups = pagure.lib.search_groups( - flask.g.session, pattern=pattern, - limit=query_limit, offset=query_start) + flask.g.session, pattern=pattern, limit=query_limit, offset=query_start + ) if extended: groups = [ - { - 'name': grp.group_name, - 'description': grp.description - } + {"name": grp.group_name, "description": grp.description} for grp in groups ] else: @@ -105,14 +110,14 @@ def api_groups(): return flask.jsonify( { - 'total_groups': group_cnt, - 'groups': groups, - 'pagination': pagination_metadata, + "total_groups": group_cnt, + "groups": groups, + "pagination": pagination_metadata, } ) -@API.route('/group/') +@API.route("/group/") @api_login_optional() @api_method def api_view_group(group): @@ -205,13 +210,15 @@ def api_view_group(group): """ # noqa - projects = flask.request.values.get( - 'projects', '').strip().lower() in ['1', 'true'] - acl = flask.request.values.get('acl', '').strip().lower() or None - if acl == 'ticket': - acl = ['admin', 'commit', 'ticket'] - elif acl == 'commit': - acl = ['commit', 'admin'] + projects = flask.request.values.get("projects", "").strip().lower() in [ + "1", + "true", + ] + acl = flask.request.values.get("acl", "").strip().lower() or None + if acl == "ticket": + acl = ["admin", "commit", "ticket"] + elif acl == "commit": + acl = ["commit", "admin"] elif acl: acl = [acl] @@ -221,12 +228,11 @@ def api_view_group(group): output = group.to_json(public=(not pagure.utils.api_authenticated())) if projects and not acl: - output['projects'] = [ - project.to_json(public=True) - for project in group.projects + output["projects"] = [ + project.to_json(public=True) for project in group.projects ] elif projects and acl: - output['projects'] = [ + output["projects"] = [ pg.project.to_json(public=True) for pg in group.projects_groups if pg.access in acl diff --git a/pagure/api/issue.py b/pagure/api/issue.py index 2b0e5b9..436bf65 100644 --- a/pagure/api/issue.py +++ b/pagure/api/issue.py @@ -20,8 +20,15 @@ from sqlalchemy.exc import SQLAlchemyError import pagure.exceptions import pagure.lib from pagure.api import ( - API, api_method, api_login_required, api_login_optional, APIERROR, - get_authorized_api_project, get_request_data, get_page, get_per_page + API, + api_method, + api_login_required, + api_login_optional, + APIERROR, + get_authorized_api_project, + get_request_data, + get_page, + get_per_page, ) from pagure.config import config as pagure_config from pagure.utils import ( @@ -46,11 +53,11 @@ def _get_repo(repo_name, username=None, namespace=None): :return: repository name """ repo = get_authorized_api_project( - flask.g.session, repo_name, user=username, namespace=namespace) + flask.g.session, repo_name, user=username, namespace=namespace + ) if repo is None: - raise pagure.exceptions.APIError( - 404, error_code=APIERROR.ENOPROJECT) + raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOPROJECT) return repo @@ -60,15 +67,18 @@ def _check_issue_tracker(repo): :param repo: repository :raises pagure.exceptions.APIError: when issue tracker is disabled """ - if not repo.settings.get('issue_tracker', True): + if not repo.settings.get("issue_tracker", True): raise pagure.exceptions.APIError( - 404, error_code=APIERROR.ETRACKERDISABLED) + 404, error_code=APIERROR.ETRACKERDISABLED + ) # forbid all POST requests if the issue tracker is made read-only - if flask.request.method == 'POST' and \ - repo.settings.get('issue_tracker_read_only', False): + if flask.request.method == "POST" and repo.settings.get( + "issue_tracker_read_only", False + ): raise pagure.exceptions.APIError( - 401, error_code=APIERROR.ETRACKERREADONLY) + 401, error_code=APIERROR.ETRACKERREADONLY + ) def _check_token(repo, project_token=True): @@ -81,11 +91,12 @@ def _check_token(repo, project_token=True): if api_authenticated(): # if there is a project associated with the token, check it # if there is no project associated, check if it is required - if (flask.g.token.project is not None - and repo != flask.g.token.project) \ - or (flask.g.token.project is None and project_token): + if ( + flask.g.token.project is not None and repo != flask.g.token.project + ) or (flask.g.token.project is None and project_token): raise pagure.exceptions.APIError( - 401, error_code=APIERROR.EINVALIDTOK) + 401, error_code=APIERROR.EINVALIDTOK + ) def _get_issue(repo, issueid, issueuid=None): @@ -97,7 +108,8 @@ def _get_issue(repo, issueid, issueuid=None): :return: issue """ issue = pagure.lib.search_issues( - flask.g.session, repo, issueid=issueid, issueuid=issueuid) + flask.g.session, repo, issueid=issueid, issueuid=issueuid + ) if issue is None or issue.project != repo: raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOISSUE) @@ -120,7 +132,8 @@ def _check_private_issue_access(issue): ) ): raise pagure.exceptions.APIError( - 403, error_code=APIERROR.EISSUENOTALLOWED) + 403, error_code=APIERROR.EISSUENOTALLOWED + ) def _check_ticket_access(issue, assignee=False): @@ -137,13 +150,16 @@ def _check_ticket_access(issue, assignee=False): error = not is_repo_user(issue.project) if assignee: - if issue.assignee is not None \ - and issue.assignee.user == flask.g.fas_user.username: + if ( + issue.assignee is not None + and issue.assignee.user == flask.g.fas_user.username + ): error = False if error: raise pagure.exceptions.APIError( - 403, error_code=APIERROR.EISSUENOTALLOWED) + 403, error_code=APIERROR.EISSUENOTALLOWED + ) def _check_link_custom_field(field, links): @@ -153,20 +169,21 @@ def _check_link_custom_field(field, links): :param links : Value of the custom field. :raises pagure.exceptions.APIERROR when invalid. """ - if field.key_type == 'link': - links = links.split(',') + if field.key_type == "link": + links = links.split(",") for link in links: - link = link.replace(' ', '') + link = link.replace(" ", "") if not urlpattern.match(link): raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDISSUEFIELD_LINK) + 400, error_code=APIERROR.EINVALIDISSUEFIELD_LINK + ) -@API.route('//new_issue', methods=['POST']) -@API.route('///new_issue', methods=['POST']) -@API.route('/fork///new_issue', methods=['POST']) -@API.route('/fork////new_issue', methods=['POST']) -@api_login_required(acls=['issue_create']) +@API.route("//new_issue", methods=["POST"]) +@API.route("///new_issue", methods=["POST"]) +@API.route("/fork///new_issue", methods=["POST"]) +@API.route("/fork////new_issue", methods=["POST"]) +@api_login_required(acls=["issue_create"]) @api_method def api_new_issue(repo, username=None, namespace=None): """ @@ -255,28 +272,27 @@ def api_new_issue(repo, username=None, namespace=None): _check_issue_tracker(repo) _check_token(repo, project_token=False) - user_obj = pagure.lib.get_user( - flask.g.session, flask.g.fas_user.username) + user_obj = pagure.lib.get_user(flask.g.session, flask.g.fas_user.username) if not user_obj: raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOUSER) form = pagure.forms.IssueFormSimplied( priorities=repo.priorities, milestones=repo.milestones, - csrf_enabled=False) + csrf_enabled=False, + ) if form.validate_on_submit(): title = form.title.data content = form.issue_content.data milestone = form.milestone.data or None private = is_true(form.private.data) priority = form.priority.data or None - assignee = get_request_data().get( - 'assignee', '').strip() or None + assignee = get_request_data().get("assignee", "").strip() or None tags = [ tag.strip() - for tag in get_request_data().get( - 'tag', '').split(',') - if tag.strip()] + for tag in get_request_data().get("tag", "").split(",") + if tag.strip() + ] try: issue = pagure.lib.new_issue( @@ -290,16 +306,16 @@ def api_new_issue(repo, username=None, namespace=None): priority=priority, tags=tags, user=flask.g.fas_user.username, - ticketfolder=pagure_config['TICKETS_FOLDER'], + ticketfolder=pagure_config["TICKETS_FOLDER"], ) flask.g.session.flush() # If there is a file attached, attach it. - filestream = flask.request.files.get('filestream') - if filestream and '' in issue.content: + filestream = flask.request.files.get("filestream") + if filestream and "" in issue.content: new_filename = pagure.lib.add_attachment( repo=repo, issue=issue, - attachmentfolder=pagure_config['ATTACHMENTS_FOLDER'], + attachmentfolder=pagure_config["ATTACHMENTS_FOLDER"], user=user_obj, filename=filestream.filename, filestream=filestream.stream, @@ -307,21 +323,24 @@ def api_new_issue(repo, username=None, namespace=None): # Replace the tag in the comment with the link # to the actual image filelocation = flask.url_for( - 'ui_ns.view_issue_raw_file', + "ui_ns.view_issue_raw_file", repo=repo.name, username=username, - filename='files/%s' % new_filename, + filename="files/%s" % new_filename, + ) + new_filename = new_filename.split("-", 1)[1] + url = "[![%s](%s)](%s)" % ( + new_filename, + filelocation, + filelocation, ) - new_filename = new_filename.split('-', 1)[1] - url = '[![%s](%s)](%s)' % ( - new_filename, filelocation, filelocation) - issue.content = issue.content.replace('', url) + issue.content = issue.content.replace("", url) flask.g.session.add(issue) flask.g.session.flush() flask.g.session.commit() - output['message'] = 'Issue created' - output['issue'] = issue.to_json(public=True) + output["message"] = "Issue created" + output["issue"] = issue.to_json(public=True) except SQLAlchemyError as err: # pragma: no cover flask.g.session.rollback() _log.exception(err) @@ -329,16 +348,17 @@ def api_new_issue(repo, username=None, namespace=None): else: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDREQ, errors=form.errors) + 400, error_code=APIERROR.EINVALIDREQ, errors=form.errors + ) jsonout = flask.jsonify(output) return jsonout -@API.route('///issues') -@API.route('/fork///issues') -@API.route('//issues') -@API.route('/fork////issues') +@API.route("///issues") +@API.route("/fork///issues") +@API.route("//issues") +@API.route("/fork////issues") @api_login_optional() @api_method def api_view_issues(repo, username=None, namespace=None): @@ -478,17 +498,17 @@ def api_view_issues(repo, username=None, namespace=None): _check_issue_tracker(repo) _check_token(repo) - assignee = flask.request.args.get('assignee', None) - author = flask.request.args.get('author', None) - milestone = flask.request.args.getlist('milestones', None) - no_stones = flask.request.args.get('no_stones', None) + assignee = flask.request.args.get("assignee", None) + author = flask.request.args.get("author", None) + milestone = flask.request.args.getlist("milestones", None) + no_stones = flask.request.args.get("no_stones", None) if no_stones is not None: no_stones = is_true(no_stones) - priority = flask.request.args.get('priority', None) - since = flask.request.args.get('since', None) - order = flask.request.args.get('order', None) - status = flask.request.args.get('status', None) - tags = flask.request.args.getlist('tags') + priority = flask.request.args.get("priority", None) + since = flask.request.args.get("since", None) + order = flask.request.args.get("order", None) + status = flask.request.args.get("status", None) + tags = flask.request.args.getlist("tags") tags = [tag.strip() for tag in tags if tag.strip()] priority_key = None @@ -506,7 +526,8 @@ def api_view_issues(repo, username=None, namespace=None): if not found: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDPRIORITY) + 400, error_code=APIERROR.EINVALIDPRIORITY + ) # Hide private tickets private = False @@ -518,27 +539,27 @@ def api_view_issues(repo, username=None, namespace=None): private = None params = { - 'session': flask.g.session, - 'repo': repo, - 'tags': tags, - 'assignee': assignee, - 'author': author, - 'private': private, - 'milestones': milestone, - 'priority': priority_key, - 'order': order, - 'no_milestones': no_stones, + "session": flask.g.session, + "repo": repo, + "tags": tags, + "assignee": assignee, + "author": author, + "private": private, + "milestones": milestone, + "priority": priority_key, + "order": order, + "no_milestones": no_stones, } if status is not None: - if status.lower() == 'all': - params.update({'status': None}) - elif status.lower() == 'closed': - params.update({'closed': True}) + if status.lower() == "all": + params.update({"status": None}) + elif status.lower() == "closed": + params.update({"closed": True}) else: - params.update({'status': status}) + params.update({"status": status}) else: - params.update({'status': 'Open'}) + params.update({"status": "Open"}) updated_after = None if since: @@ -549,54 +570,59 @@ def api_view_issues(repo, username=None, namespace=None): updated_after = arrow.get(int(since)).datetime except ValueError: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.ETIMESTAMP) + 400, error_code=APIERROR.ETIMESTAMP + ) else: # We assume datetime format, so validate it try: - updated_after = datetime.datetime.strptime(since, '%Y-%m-%d') + updated_after = datetime.datetime.strptime(since, "%Y-%m-%d") except ValueError: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EDATETIME) + 400, error_code=APIERROR.EDATETIME + ) - params.update({'updated_after': updated_after}) + params.update({"updated_after": updated_after}) page = get_page() per_page = get_per_page() - params['count'] = True + params["count"] = True issue_cnt = pagure.lib.search_issues(**params) pagination_metadata = pagure.lib.get_pagination_metadata( - flask.request, page, per_page, issue_cnt) + flask.request, page, per_page, issue_cnt + ) query_start = (page - 1) * per_page query_limit = per_page - params['count'] = False - params['limit'] = query_limit - params['offset'] = query_start + params["count"] = False + params["limit"] = query_limit + params["offset"] = query_start issues = pagure.lib.search_issues(**params) - jsonout = flask.jsonify({ - 'total_issues': len(issues), - 'issues': [issue.to_json(public=True) for issue in issues], - 'args': { - 'assignee': assignee, - 'author': author, - 'milestones': milestone, - 'no_stones': no_stones, - 'order': order, - 'priority': priority, - 'since': since, - 'status': status, - 'tags': tags, - }, - 'pagination': pagination_metadata, - }) + jsonout = flask.jsonify( + { + "total_issues": len(issues), + "issues": [issue.to_json(public=True) for issue in issues], + "args": { + "assignee": assignee, + "author": author, + "milestones": milestone, + "no_stones": no_stones, + "order": order, + "priority": priority, + "since": since, + "status": status, + "tags": tags, + }, + "pagination": pagination_metadata, + } + ) return jsonout -@API.route('//issue/') -@API.route('///issue/') -@API.route('/fork///issue/') -@API.route('/fork////issue/') +@API.route("//issue/") +@API.route("///issue/") +@API.route("/fork///issue/") +@API.route("/fork////issue/") @api_login_optional() @api_method def api_view_issue(repo, issueid, username=None, namespace=None): @@ -643,7 +669,7 @@ def api_view_issue(repo, issueid, username=None, namespace=None): } """ - comments = is_true(flask.request.args.get('comments', True)) + comments = is_true(flask.request.args.get("comments", True)) repo = _get_repo(repo, username, namespace) _check_issue_tracker(repo) @@ -658,21 +684,22 @@ def api_view_issue(repo, issueid, username=None, namespace=None): issue = _get_issue(repo, issue_id, issueuid=issue_uid) _check_private_issue_access(issue) - jsonout = flask.jsonify( - issue.to_json(public=True, with_comments=comments)) + jsonout = flask.jsonify(issue.to_json(public=True, with_comments=comments)) return jsonout -@API.route('//issue//comment/') -@API.route('///issue//comment/') -@API.route('/fork///issue//comment/') +@API.route("//issue//comment/") +@API.route("///issue//comment/") +@API.route("/fork///issue//comment/") @API.route( - '/fork////issue//' - 'comment/') + "/fork////issue//" + "comment/" +) @api_login_optional() @api_method def api_view_issue_comment( - repo, issueid, commentid, username=None, namespace=None): + repo, issueid, commentid, username=None, namespace=None +): """ Comment of an issue -------------------- @@ -726,28 +753,30 @@ def api_view_issue_comment( _check_private_issue_access(issue) comment = pagure.lib.get_issue_comment( - flask.g.session, issue.uid, commentid) + flask.g.session, issue.uid, commentid + ) if not comment: - raise pagure.exceptions.APIError( - 404, error_code=APIERROR.ENOCOMMENT) + raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOCOMMENT) output = comment.to_json(public=True) - output['avatar_url'] = pagure.lib.avatar_url_from_email( - comment.user.default_email, size=16) - output['comment_date'] = comment.date_created.strftime( - '%Y-%m-%d %H:%M:%S') + output["avatar_url"] = pagure.lib.avatar_url_from_email( + comment.user.default_email, size=16 + ) + output["comment_date"] = comment.date_created.strftime("%Y-%m-%d %H:%M:%S") jsonout = flask.jsonify(output) return jsonout -@API.route('//issue//status', methods=['POST']) -@API.route('///issue//status', methods=['POST']) +@API.route("//issue//status", methods=["POST"]) +@API.route("///issue//status", methods=["POST"]) @API.route( - '/fork///issue//status', methods=['POST']) + "/fork///issue//status", methods=["POST"] +) @API.route( - '/fork////issue//status', - methods=['POST']) -@api_login_required(acls=['issue_change_status', 'issue_update']) + "/fork////issue//status", + methods=["POST"], +) +@api_login_required(acls=["issue_change_status", "issue_update"]) @api_method def api_change_status_issue(repo, issueid, username=None, namespace=None): """ @@ -800,9 +829,8 @@ def api_change_status_issue(repo, issueid, username=None, namespace=None): status = pagure.lib.get_issue_statuses(flask.g.session) form = pagure.forms.StatusForm( - status=status, - close_status=repo.close_status, - csrf_enabled=False) + status=status, close_status=repo.close_status, csrf_enabled=False + ) close_status = None if form.close_status.raw_data: @@ -810,7 +838,7 @@ def api_change_status_issue(repo, issueid, username=None, namespace=None): new_status = form.status.data.strip() if new_status in repo.close_status and not close_status: close_status = new_status - new_status = 'Closed' + new_status = "Closed" form.status.data = new_status if form.validate_on_submit(): @@ -822,13 +850,13 @@ def api_change_status_issue(repo, issueid, username=None, namespace=None): status=new_status, close_status=close_status, user=flask.g.fas_user.username, - ticketfolder=pagure_config['TICKETS_FOLDER'], + ticketfolder=pagure_config["TICKETS_FOLDER"], ) flask.g.session.commit() if message: - output['message'] = message + output["message"] = message else: - output['message'] = 'No changes' + output["message"] = "No changes" if message: pagure.lib.add_metadata_update_notif( @@ -836,33 +864,37 @@ def api_change_status_issue(repo, issueid, username=None, namespace=None): obj=issue, messages=message, user=flask.g.fas_user.username, - gitfolder=pagure_config['TICKETS_FOLDER'] + gitfolder=pagure_config["TICKETS_FOLDER"], ) except pagure.exceptions.PagureException as err: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.ENOCODE, error=str(err)) + 400, error_code=APIERROR.ENOCODE, error=str(err) + ) except SQLAlchemyError as err: # pragma: no cover flask.g.session.rollback() raise pagure.exceptions.APIError(400, error_code=APIERROR.EDBERROR) else: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDREQ, errors=form.errors) + 400, error_code=APIERROR.EINVALIDREQ, errors=form.errors + ) jsonout = flask.jsonify(output) return jsonout -@API.route('//issue//milestone', methods=['POST']) +@API.route("//issue//milestone", methods=["POST"]) @API.route( - '///issue//milestone', methods=['POST']) + "///issue//milestone", methods=["POST"] +) @API.route( - '/fork///issue//milestone', - methods=['POST']) + "/fork///issue//milestone", methods=["POST"] +) @API.route( - '/fork////issue//milestone', - methods=['POST']) -@api_login_required(acls=['issue_update_milestone', 'issue_update']) + "/fork////issue//milestone", + methods=["POST"], +) +@api_login_required(acls=["issue_update_milestone", "issue_update"]) @api_method def api_change_milestone_issue(repo, issueid, username=None, namespace=None): """ @@ -912,8 +944,8 @@ def api_change_milestone_issue(repo, issueid, username=None, namespace=None): _check_ticket_access(issue) form = pagure.forms.MilestoneForm( - milestones=repo.milestones.keys(), - csrf_enabled=False) + milestones=repo.milestones.keys(), csrf_enabled=False + ) if form.validate_on_submit(): new_milestone = form.milestone.data or None @@ -924,13 +956,13 @@ def api_change_milestone_issue(repo, issueid, username=None, namespace=None): issue=issue, milestone=new_milestone, user=flask.g.fas_user.username, - ticketfolder=pagure_config['TICKETS_FOLDER'], + ticketfolder=pagure_config["TICKETS_FOLDER"], ) flask.g.session.commit() if message: - output['message'] = message + output["message"] = message else: - output['message'] = 'No changes' + output["message"] = "No changes" if message: pagure.lib.add_metadata_update_notif( @@ -938,31 +970,35 @@ def api_change_milestone_issue(repo, issueid, username=None, namespace=None): obj=issue, messages=message, user=flask.g.fas_user.username, - gitfolder=pagure_config['TICKETS_FOLDER'] + gitfolder=pagure_config["TICKETS_FOLDER"], ) except pagure.exceptions.PagureException as err: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.ENOCODE, error=str(err)) + 400, error_code=APIERROR.ENOCODE, error=str(err) + ) except SQLAlchemyError as err: # pragma: no cover flask.g.session.rollback() raise pagure.exceptions.APIError(400, error_code=APIERROR.EDBERROR) else: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDREQ, errors=form.errors) + 400, error_code=APIERROR.EINVALIDREQ, errors=form.errors + ) jsonout = flask.jsonify(output) return jsonout -@API.route('//issue//comment', methods=['POST']) -@API.route('///issue//comment', methods=['POST']) +@API.route("//issue//comment", methods=["POST"]) +@API.route("///issue//comment", methods=["POST"]) @API.route( - '/fork///issue//comment', methods=['POST']) + "/fork///issue//comment", methods=["POST"] +) @API.route( - '/fork////issue//comment', - methods=['POST']) -@api_login_required(acls=['issue_comment', 'issue_update']) + "/fork////issue//comment", + methods=["POST"], +) +@api_login_required(acls=["issue_comment", "issue_update"]) @api_method def api_comment_issue(repo, issueid, username=None, namespace=None): """ @@ -1018,10 +1054,10 @@ def api_comment_issue(repo, issueid, username=None, namespace=None): issue=issue, comment=comment, user=flask.g.fas_user.username, - ticketfolder=pagure_config['TICKETS_FOLDER'], + ticketfolder=pagure_config["TICKETS_FOLDER"], ) flask.g.session.commit() - output['message'] = message + output["message"] = message except SQLAlchemyError as err: # pragma: no cover flask.g.session.rollback() _log.exception(err) @@ -1029,25 +1065,29 @@ def api_comment_issue(repo, issueid, username=None, namespace=None): else: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDREQ, errors=form.errors) + 400, error_code=APIERROR.EINVALIDREQ, errors=form.errors + ) - output['avatar_url'] = pagure.lib.avatar_url_from_email( - flask.g.fas_user.default_email, size=30) + output["avatar_url"] = pagure.lib.avatar_url_from_email( + flask.g.fas_user.default_email, size=30 + ) - output['user'] = flask.g.fas_user.username + output["user"] = flask.g.fas_user.username jsonout = flask.jsonify(output) return jsonout -@API.route('//issue//assign', methods=['POST']) -@API.route('///issue//assign', methods=['POST']) +@API.route("//issue//assign", methods=["POST"]) +@API.route("///issue//assign", methods=["POST"]) @API.route( - '/fork///issue//assign', methods=['POST']) + "/fork///issue//assign", methods=["POST"] +) @API.route( - '/fork////issue//assign', - methods=['POST']) -@api_login_required(acls=['issue_assign', 'issue_update']) + "/fork////issue//assign", + methods=["POST"], +) +@api_login_required(acls=["issue_assign", "issue_update"]) @api_method def api_assign_issue(repo, issueid, username=None, namespace=None): """ @@ -1104,7 +1144,7 @@ def api_assign_issue(repo, issueid, username=None, namespace=None): issue=issue, assignee=assignee, user=flask.g.fas_user.username, - ticketfolder=pagure_config['TICKETS_FOLDER'], + ticketfolder=pagure_config["TICKETS_FOLDER"], ) flask.g.session.commit() if message: @@ -1113,14 +1153,15 @@ def api_assign_issue(repo, issueid, username=None, namespace=None): obj=issue, messages=message, user=flask.g.fas_user.username, - gitfolder=pagure_config['TICKETS_FOLDER'] + gitfolder=pagure_config["TICKETS_FOLDER"], ) - output['message'] = message + output["message"] = message else: - output['message'] = 'Nothing to change' + output["message"] = "Nothing to change" except pagure.exceptions.PagureException as err: # pragma: no cover raise pagure.exceptions.APIError( - 400, error_code=APIERROR.ENOCODE, error=str(err)) + 400, error_code=APIERROR.ENOCODE, error=str(err) + ) except SQLAlchemyError as err: # pragma: no cover flask.g.session.rollback() _log.exception(err) @@ -1130,15 +1171,18 @@ def api_assign_issue(repo, issueid, username=None, namespace=None): return jsonout -@API.route('//issue//subscribe', methods=['POST']) +@API.route("//issue//subscribe", methods=["POST"]) @API.route( - '///issue//subscribe', methods=['POST']) + "///issue//subscribe", methods=["POST"] +) @API.route( - '/fork///issue//subscribe', methods=['POST']) + "/fork///issue//subscribe", methods=["POST"] +) @API.route( - '/fork////issue//subscribe', - methods=['POST']) -@api_login_required(acls=['issue_subscribe']) + "/fork////issue//subscribe", + methods=["POST"], +) +@api_login_required(acls=["issue_subscribe"]) @api_method def api_subscribe_issue(repo, issueid, username=None, namespace=None): """ @@ -1198,15 +1242,17 @@ def api_subscribe_issue(repo, issueid, username=None, namespace=None): flask.g.session, user=flask.g.fas_user.username, obj=issue, - watch_status=status + watch_status=status, ) flask.g.session.commit() - output['message'] = message + output["message"] = message user_obj = pagure.lib.get_user( - flask.g.session, flask.g.fas_user.username) - output['avatar_url'] = pagure.lib.avatar_url_from_email( - user_obj.default_email, size=30) - output['user'] = flask.g.fas_user.username + flask.g.session, flask.g.fas_user.username + ) + output["avatar_url"] = pagure.lib.avatar_url_from_email( + user_obj.default_email, size=30 + ) + output["user"] = flask.g.fas_user.username except SQLAlchemyError as err: # pragma: no cover flask.g.session.rollback() _log.exception(err) @@ -1216,20 +1262,23 @@ def api_subscribe_issue(repo, issueid, username=None, namespace=None): return jsonout -@API.route('//issue//custom/', methods=['POST']) +@API.route("//issue//custom/", methods=["POST"]) @API.route( - '///issue//custom/', - methods=['POST']) + "///issue//custom/", methods=["POST"] +) @API.route( - '/fork///issue//custom/', - methods=['POST']) + "/fork///issue//custom/", + methods=["POST"], +) @API.route( - '/fork////issue//custom/', - methods=['POST']) -@api_login_required(acls=['issue_update_custom_fields', 'issue_update']) + "/fork////issue//custom/", + methods=["POST"], +) +@api_login_required(acls=["issue_update_custom_fields", "issue_update"]) @api_method def api_update_custom_field( - repo, issueid, field, username=None, namespace=None): + repo, issueid, field, username=None, namespace=None +): """ Update custom field ------------------- @@ -1276,31 +1325,34 @@ def api_update_custom_field( fields = {k.name: k for k in repo.issue_keys} if field not in fields: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDISSUEFIELD) + 400, error_code=APIERROR.EINVALIDISSUEFIELD + ) key = fields[field] - value = get_request_data().get('value') + value = get_request_data().get("value") if value: _check_link_custom_field(key, value) try: message = pagure.lib.set_custom_key_value( - flask.g.session, issue, key, value) + flask.g.session, issue, key, value + ) flask.g.session.commit() if message: - output['message'] = message + output["message"] = message pagure.lib.add_metadata_update_notif( session=flask.g.session, obj=issue, messages=message, user=flask.g.fas_user.username, - gitfolder=pagure_config['TICKETS_FOLDER'] + gitfolder=pagure_config["TICKETS_FOLDER"], ) else: - output['message'] = 'No changes' + output["message"] = "No changes" except pagure.exceptions.PagureException as err: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.ENOCODE, error=str(err)) + 400, error_code=APIERROR.ENOCODE, error=str(err) + ) except SQLAlchemyError as err: # pragma: no cover print(err) flask.g.session.rollback() @@ -1310,20 +1362,18 @@ def api_update_custom_field( return jsonout -@API.route('//issue//custom', methods=['POST']) -@API.route( - '///issue//custom', - methods=['POST']) +@API.route("//issue//custom", methods=["POST"]) +@API.route("///issue//custom", methods=["POST"]) @API.route( - '/fork///issue//custom', - methods=['POST']) + "/fork///issue//custom", methods=["POST"] +) @API.route( - '/fork////issue//custom', - methods=['POST']) -@api_login_required(acls=['issue_update_custom_fields', 'issue_update']) + "/fork////issue//custom", + methods=["POST"], +) +@api_login_required(acls=["issue_update_custom_fields", "issue_update"]) @api_method -def api_update_custom_fields( - repo, issueid, username=None, namespace=None): +def api_update_custom_fields(repo, issueid, username=None, namespace=None): """ Update custom fields -------------------- @@ -1381,7 +1431,7 @@ def api_update_custom_fields( } """ # noqa - output = {'messages': []} + output = {"messages": []} repo = _get_repo(repo, username, namespace) _check_issue_tracker(repo) _check_token(repo) @@ -1392,14 +1442,14 @@ def api_update_custom_fields( fields = get_request_data() if not fields: - raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDREQ) + raise pagure.exceptions.APIError(400, error_code=APIERROR.EINVALIDREQ) repo_fields = {k.name: k for k in repo.issue_keys} if not all(key in repo_fields.keys() for key in fields.keys()): raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDISSUEFIELD) + 400, error_code=APIERROR.EINVALIDISSUEFIELD + ) for field in fields: key = repo_fields[field] @@ -1408,23 +1458,25 @@ def api_update_custom_fields( _check_link_custom_field(key, value) try: message = pagure.lib.set_custom_key_value( - flask.g.session, issue, key, value) + flask.g.session, issue, key, value + ) flask.g.session.commit() if message: - output['messages'].append({key.name: message}) + output["messages"].append({key.name: message}) pagure.lib.add_metadata_update_notif( session=flask.g.session, obj=issue, messages=message, user=flask.g.fas_user.username, - gitfolder=pagure_config['TICKETS_FOLDER'] + gitfolder=pagure_config["TICKETS_FOLDER"], ) else: - output['messages'].append({key.name: 'No changes'}) + output["messages"].append({key.name: "No changes"}) except pagure.exceptions.PagureException as err: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.ENOCODE, error=str(err)) + 400, error_code=APIERROR.ENOCODE, error=str(err) + ) except SQLAlchemyError as err: # pragma: no cover print(err) flask.g.session.rollback() @@ -1434,10 +1486,10 @@ def api_update_custom_fields( return jsonout -@API.route('//issues/history/stats') -@API.route('///issues/history/stats') -@API.route('/fork///issues/history/stats') -@API.route('/fork////issues/history/stats') +@API.route("//issues/history/stats") +@API.route("///issues/history/stats") +@API.route("/fork///issues/history/stats") +@API.route("/fork////issues/history/stats") @api_method def api_view_issues_history_stats(repo, username=None, namespace=None): """ @@ -1478,5 +1530,5 @@ def api_view_issues_history_stats(repo, username=None, namespace=None): _check_issue_tracker(repo) stats = pagure.lib.issues_history_stats(flask.g.session, repo) - jsonout = flask.jsonify({'stats': stats}) + jsonout = flask.jsonify({"stats": stats}) return jsonout diff --git a/pagure/api/project.py b/pagure/api/project.py index 9470a23..af602f7 100644 --- a/pagure/api/project.py +++ b/pagure/api/project.py @@ -23,19 +23,27 @@ import pagure.exceptions import pagure.lib import pagure.lib.git import pagure.utils -from pagure.api import (API, api_method, APIERROR, api_login_required, - get_authorized_api_project, api_login_optional, - get_request_data, get_page, get_per_page) +from pagure.api import ( + API, + api_method, + APIERROR, + api_login_required, + get_authorized_api_project, + api_login_optional, + get_request_data, + get_page, + get_per_page, +) from pagure.config import config as pagure_config _log = logging.getLogger(__name__) -@API.route('//git/tags') -@API.route('///git/tags') -@API.route('/fork///git/tags') -@API.route('/fork////git/tags') +@API.route("//git/tags") +@API.route("///git/tags") +@API.route("/fork///git/tags") +@API.route("/fork////git/tags") @api_method def api_git_tags(repo, username=None, namespace=None): """ @@ -85,29 +93,28 @@ def api_git_tags(repo, username=None, namespace=None): """ with_commits = pagure.utils.is_true( - flask.request.values.get('with_commits', False)) + flask.request.values.get("with_commits", False) + ) repo = get_authorized_api_project( - flask.g.session, repo, user=username, namespace=namespace) + flask.g.session, repo, user=username, namespace=namespace + ) if repo is None: raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOPROJECT) tags = pagure.lib.git.get_git_tags(repo, with_commits=with_commits) - jsonout = flask.jsonify({ - 'total_tags': len(tags), - 'tags': tags - }) + jsonout = flask.jsonify({"total_tags": len(tags), "tags": tags}) return jsonout -@API.route('//watchers') -@API.route('///watchers') -@API.route('/fork///watchers') -@API.route('/fork////watchers') +@API.route("//watchers") +@API.route("///watchers") +@API.route("/fork///watchers") +@API.route("/fork////watchers") @api_method def api_project_watchers(repo, username=None, namespace=None): - ''' + """ Project watchers ---------------- List the watchers on the project. @@ -136,63 +143,71 @@ def api_project_watchers(repo, username=None, namespace=None): ] } } - ''' + """ repo = get_authorized_api_project( - flask.g.session, repo, user=username, namespace=namespace) + flask.g.session, repo, user=username, namespace=namespace + ) if repo is None: raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOPROJECT) implicit_watch_users = {repo.user.username} for access_type in repo.access_users.keys(): - implicit_watch_users = \ - implicit_watch_users | set( - [user.username for user in repo.access_users[access_type]]) + implicit_watch_users = implicit_watch_users | set( + [user.username for user in repo.access_users[access_type]] + ) watching_users_to_watch_level = {} for implicit_watch_user in implicit_watch_users: user_watch_level = pagure.lib.get_watch_level_on_repo( - flask.g.session, implicit_watch_user, repo) + flask.g.session, implicit_watch_user, repo + ) watching_users_to_watch_level[implicit_watch_user] = user_watch_level for access_type in repo.access_groups.keys(): - group_names = ['@' + group.group_name - for group in repo.access_groups[access_type]] + group_names = [ + "@" + group.group_name for group in repo.access_groups[access_type] + ] for group_name in group_names: if group_name not in watching_users_to_watch_level: watching_users_to_watch_level[group_name] = set() # By the logic in pagure.lib.get_watch_level_on_repo, group members # only by default watch issues. If they want to watch commits they # have to explicitly subscribe. - watching_users_to_watch_level[group_name].add('issues') + watching_users_to_watch_level[group_name].add("issues") for key in watching_users_to_watch_level: watching_users_to_watch_level[key] = list( - watching_users_to_watch_level[key]) + watching_users_to_watch_level[key] + ) # Get the explicit watch statuses for watcher in repo.watchers: if watcher.watch_issues or watcher.watch_commits: - watching_users_to_watch_level[watcher.user.username] = \ - pagure.lib.get_watch_level_on_repo( - flask.g.session, watcher.user.username, repo) + watching_users_to_watch_level[ + watcher.user.username + ] = pagure.lib.get_watch_level_on_repo( + flask.g.session, watcher.user.username, repo + ) else: if watcher.user.username in watching_users_to_watch_level: watching_users_to_watch_level.pop(watcher.user.username, None) - return flask.jsonify({ - 'total_watchers': len(watching_users_to_watch_level), - 'watchers': watching_users_to_watch_level - }) + return flask.jsonify( + { + "total_watchers": len(watching_users_to_watch_level), + "watchers": watching_users_to_watch_level, + } + ) -@API.route('//git/urls') -@API.route('///git/urls') -@API.route('/fork///git/urls') -@API.route('/fork////git/urls') +@API.route("//git/urls") +@API.route("///git/urls") +@API.route("/fork///git/urls") +@API.route("/fork////git/urls") @api_login_optional() @api_method def api_project_git_urls(repo, username=None, namespace=None): - ''' + """ Project Git URLs ---------------- List the Git URLS on the project. @@ -219,40 +234,40 @@ def api_project_git_urls(repo, username=None, namespace=None): "git": "https://pagure.io/mprahl-test123.git" } } - ''' + """ repo = get_authorized_api_project( - flask.g.session, repo, user=username, namespace=namespace) + flask.g.session, repo, user=username, namespace=namespace + ) if repo is None: raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOPROJECT) git_urls = {} - git_url_ssh = pagure_config.get('GIT_URL_SSH') + git_url_ssh = pagure_config.get("GIT_URL_SSH") if pagure.utils.api_authenticated() and git_url_ssh: try: git_url_ssh = git_url_ssh.format( - username=flask.g.fas_user.username) + username=flask.g.fas_user.username + ) except (KeyError, IndexError): pass if git_url_ssh: - git_urls['ssh'] = '{0}{1}.git'.format(git_url_ssh, repo.fullname) - if pagure_config.get('GIT_URL_GIT'): - git_urls['git'] = '{0}{1}.git'.format( - pagure_config['GIT_URL_GIT'], repo.fullname) + git_urls["ssh"] = "{0}{1}.git".format(git_url_ssh, repo.fullname) + if pagure_config.get("GIT_URL_GIT"): + git_urls["git"] = "{0}{1}.git".format( + pagure_config["GIT_URL_GIT"], repo.fullname + ) - return flask.jsonify({ - 'total_urls': len(git_urls), - "urls": git_urls - }) + return flask.jsonify({"total_urls": len(git_urls), "urls": git_urls}) -@API.route('//git/branches') -@API.route('///git/branches') -@API.route('/fork///git/branches') -@API.route('/fork////git/branches') +@API.route("//git/branches") +@API.route("///git/branches") +@API.route("/fork///git/branches") +@API.route("/fork////git/branches") @api_method def api_git_branches(repo, username=None, namespace=None): - ''' + """ List project branches --------------------- List the branches associated with a Pagure git repository @@ -277,23 +292,21 @@ def api_git_branches(repo, username=None, namespace=None): "branches": ["master", "dev"] } - ''' + """ repo = get_authorized_api_project( - flask.g.session, repo, user=username, namespace=namespace) + flask.g.session, repo, user=username, namespace=namespace + ) if repo is None: raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOPROJECT) branches = pagure.lib.git.get_git_branches(repo) return flask.jsonify( - { - 'total_branches': len(branches), - 'branches': branches - } + {"total_branches": len(branches), "branches": branches} ) -@API.route('/projects') +@API.route("/projects") @api_method def api_projects(): """ @@ -448,54 +461,68 @@ def api_projects(): "total_projects": 1000 } """ - tags = flask.request.values.getlist('tags') - username = flask.request.values.get('username', None) - fork = flask.request.values.get('fork', None) - namespace = flask.request.values.get('namespace', None) - owner = flask.request.values.get('owner', None) - pattern = flask.request.values.get('pattern', None) - short = pagure.utils.is_true( - flask.request.values.get('short', False)) + tags = flask.request.values.getlist("tags") + username = flask.request.values.get("username", None) + fork = flask.request.values.get("fork", None) + namespace = flask.request.values.get("namespace", None) + owner = flask.request.values.get("owner", None) + pattern = flask.request.values.get("pattern", None) + short = pagure.utils.is_true(flask.request.values.get("short", False)) if fork is not None: fork = pagure.utils.is_true(fork) private = False - if pagure.utils.authenticated() \ - and username == flask.g.fas_user.username: + if pagure.utils.authenticated() and username == flask.g.fas_user.username: private = flask.g.fas_user.username project_count = pagure.lib.search_projects( - flask.g.session, username=username, fork=fork, tags=tags, - pattern=pattern, private=private, namespace=namespace, owner=owner, - count=True) + flask.g.session, + username=username, + fork=fork, + tags=tags, + pattern=pattern, + private=private, + namespace=namespace, + owner=owner, + count=True, + ) # Pagination code inspired by Flask-SQLAlchemy page = get_page() per_page = get_per_page() pagination_metadata = pagure.lib.get_pagination_metadata( - flask.request, page, per_page, project_count) + flask.request, page, per_page, project_count + ) query_start = (page - 1) * per_page query_limit = per_page projects = pagure.lib.search_projects( - flask.g.session, username=username, fork=fork, tags=tags, - pattern=pattern, private=private, namespace=namespace, owner=owner, - limit=query_limit, start=query_start) + flask.g.session, + username=username, + fork=fork, + tags=tags, + pattern=pattern, + private=private, + namespace=namespace, + owner=owner, + limit=query_limit, + start=query_start, + ) # prepare the output json jsonout = { - 'total_projects': project_count, - 'projects': projects, - 'args': { - 'tags': tags, - 'username': username, - 'fork': fork, - 'pattern': pattern, - 'namespace': namespace, - 'owner': owner, - 'short': short, - } + "total_projects": project_count, + "projects": projects, + "args": { + "tags": tags, + "username": username, + "fork": fork, + "pattern": pattern, + "namespace": namespace, + "owner": owner, + "short": short, + }, } if not short: @@ -503,27 +530,28 @@ def api_projects(): else: projects = [ { - 'name': p.name, - 'namespace': p.namespace, - 'fullname': p.fullname.replace('forks/', 'fork/', 1) - if p.fullname.startswith('forks/') else p.fullname, - 'description': p.description, + "name": p.name, + "namespace": p.namespace, + "fullname": p.fullname.replace("forks/", "fork/", 1) + if p.fullname.startswith("forks/") + else p.fullname, + "description": p.description, } for p in projects ] - jsonout['projects'] = projects + jsonout["projects"] = projects if pagination_metadata: - jsonout['args']['page'] = page - jsonout['args']['per_page'] = per_page - jsonout['pagination'] = pagination_metadata + jsonout["args"]["page"] = page + jsonout["args"]["per_page"] = per_page + jsonout["pagination"] = pagination_metadata return flask.jsonify(jsonout) -@API.route('/') -@API.route('//') -@API.route('/fork//') -@API.route('/fork///') +@API.route("/") +@API.route("//") +@API.route("/fork//") +@API.route("/fork///") @api_method def api_project(repo, username=None, namespace=None): """ @@ -600,10 +628,11 @@ def api_project(repo, username=None, namespace=None): """ repo = get_authorized_api_project( - flask.g.session, repo, user=username, namespace=namespace) + flask.g.session, repo, user=username, namespace=namespace + ) expand_group = pagure.utils.is_true( - flask.request.values.get('expand_group', False) + flask.request.values.get("expand_group", False) ) if repo is None: @@ -615,16 +644,17 @@ def api_project(repo, username=None, namespace=None): group_details = {} for grp in repo.projects_groups: group_details[grp.group.group_name] = [ - user.username for user in grp.group.users] - output['group_details'] = group_details + user.username for user in grp.group.users + ] + output["group_details"] = group_details jsonout = flask.jsonify(output) return jsonout -@API.route('/new/', methods=['POST']) -@API.route('/new', methods=['POST']) -@api_login_required(acls=['create_project']) +@API.route("/new/", methods=["POST"]) +@API.route("/new", methods=["POST"]) +@api_login_required(acls=["create_project"]) @api_method def api_new_project(): """ @@ -699,19 +729,20 @@ def api_new_project(): """ user = pagure.lib.search_user( - flask.g.session, username=flask.g.fas_user.username) + flask.g.session, username=flask.g.fas_user.username + ) output = {} - if not pagure_config.get('ENABLE_NEW_PROJECTS', True): + if not pagure_config.get("ENABLE_NEW_PROJECTS", True): raise pagure.exceptions.APIError( - 404, error_code=APIERROR.ENEWPROJECTDISABLED) + 404, error_code=APIERROR.ENEWPROJECTDISABLED + ) - namespaces = pagure_config['ALLOWED_PREFIX'][:] + namespaces = pagure_config["ALLOWED_PREFIX"][:] if user: namespaces.extend([grp for grp in user.groups]) - form = pagure.forms.ProjectForm( - namespaces=namespaces, csrf_enabled=False) + form = pagure.forms.ProjectForm(namespaces=namespaces, csrf_enabled=False) if form.validate_on_submit(): name = form.name.data description = form.description.data @@ -724,7 +755,7 @@ def api_new_project(): namespace = namespace.strip() private = False - if pagure_config.get('PRIVATE_PROJECTS', False): + if pagure_config.get("PRIVATE_PROJECTS", False): private = form.private.data try: @@ -737,46 +768,50 @@ def api_new_project(): url=url, avatar_email=avatar_email, user=flask.g.fas_user.username, - blacklist=pagure_config['BLACKLISTED_PROJECTS'], - allowed_prefix=pagure_config['ALLOWED_PREFIX'], - gitfolder=pagure_config['GIT_FOLDER'], - docfolder=pagure_config.get('DOCS_FOLDER'), - ticketfolder=pagure_config.get('TICKETS_FOLDER'), - requestfolder=pagure_config['REQUESTS_FOLDER'], + blacklist=pagure_config["BLACKLISTED_PROJECTS"], + allowed_prefix=pagure_config["ALLOWED_PREFIX"], + gitfolder=pagure_config["GIT_FOLDER"], + docfolder=pagure_config.get("DOCS_FOLDER"), + ticketfolder=pagure_config.get("TICKETS_FOLDER"), + requestfolder=pagure_config["REQUESTS_FOLDER"], add_readme=create_readme, userobj=user, prevent_40_chars=pagure_config.get( - 'OLD_VIEW_COMMIT_ENABLED', False), - user_ns=pagure_config.get('USER_NAMESPACE', False), + "OLD_VIEW_COMMIT_ENABLED", False + ), + user_ns=pagure_config.get("USER_NAMESPACE", False), ) flask.g.session.commit() - output = {'message': 'Project creation queued', - 'taskid': task.id} + output = {"message": "Project creation queued", "taskid": task.id} - if get_request_data().get('wait', True): + if get_request_data().get("wait", True): result = task.get() project = pagure.lib._get_project( - flask.g.session, name=result['repo'], - namespace=result['namespace']) - output = {'message': 'Project "%s" created' % project.fullname} + flask.g.session, + name=result["repo"], + namespace=result["namespace"], + ) + output = {"message": 'Project "%s" created' % project.fullname} except pagure.exceptions.PagureException as err: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.ENOCODE, error=str(err)) + 400, error_code=APIERROR.ENOCODE, error=str(err) + ) except SQLAlchemyError as err: # pragma: no cover _log.exception(err) flask.g.session.rollback() raise pagure.exceptions.APIError(400, error_code=APIERROR.EDBERROR) else: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDREQ, errors=form.errors) + 400, error_code=APIERROR.EINVALIDREQ, errors=form.errors + ) jsonout = flask.jsonify(output) return jsonout -@API.route('/', methods=['PATCH']) -@API.route('//', methods=['PATCH']) -@api_login_required(acls=['modify_project']) +@API.route("/", methods=["PATCH"]) +@API.route("//", methods=["PATCH"]) +@api_login_required(acls=["modify_project"]) @api_method def api_modify_project(repo, namespace=None): """ @@ -846,36 +881,38 @@ def api_modify_project(repo, namespace=None): """ project = get_authorized_api_project( - flask.g.session, repo, namespace=namespace) + flask.g.session, repo, namespace=namespace + ) if not project: - raise pagure.exceptions.APIError( - 404, error_code=APIERROR.ENOPROJECT) + raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOPROJECT) if flask.g.token.project and project != flask.g.token.project: - raise pagure.exceptions.APIError( - 401, error_code=APIERROR.EINVALIDTOK) + raise pagure.exceptions.APIError(401, error_code=APIERROR.EINVALIDTOK) is_site_admin = pagure.utils.is_admin() - admins = [u.username for u in project.get_project_users('admin')] + admins = [u.username for u in project.get_project_users("admin")] # Only allow the main admin, the admins of the project, and Pagure site # admins to modify projects, even if the user has the right ACLs on their # token - if flask.g.fas_user.username not in admins \ - and flask.g.fas_user.username != project.user.username \ - and not is_site_admin: + if ( + flask.g.fas_user.username not in admins + and flask.g.fas_user.username != project.user.username + and not is_site_admin + ): raise pagure.exceptions.APIError( - 401, error_code=APIERROR.EMODIFYPROJECTNOTALLOWED) + 401, error_code=APIERROR.EMODIFYPROJECTNOTALLOWED + ) - valid_keys = ['main_admin', 'retain_access'] + valid_keys = ["main_admin", "retain_access"] # Check if it's JSON or form data - if flask.request.headers.get('Content-Type') == 'application/json': + if flask.request.headers.get("Content-Type") == "application/json": # Set force to True to ignore the mimetype. Set silent so that None is # returned if it's invalid JSON. args = flask.request.get_json(force=True, silent=True) or {} - retain_access = args.get('retain_access', False) + retain_access = args.get("retain_access", False) else: args = get_request_data() - retain_access = args.get('retain_access', '').lower() in ['true', '1'] + retain_access = args.get("retain_access", "").lower() in ["true", "1"] if not args: raise pagure.exceptions.APIError(400, error_code=APIERROR.EINVALIDREQ) @@ -884,46 +921,52 @@ def api_modify_project(repo, namespace=None): for key in args.keys(): if key not in valid_keys: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDREQ) + 400, error_code=APIERROR.EINVALIDREQ + ) - if 'main_admin' in args: - if flask.g.fas_user.username != project.user.username \ - and not is_site_admin: + if "main_admin" in args: + if ( + flask.g.fas_user.username != project.user.username + and not is_site_admin + ): raise pagure.exceptions.APIError( - 401, error_code=APIERROR.ENOTMAINADMIN) + 401, error_code=APIERROR.ENOTMAINADMIN + ) # If the main_admin is already set correctly, don't do anything if flask.g.fas_user.username == project.user: return flask.jsonify(project.to_json(public=False, api=True)) try: new_main_admin = pagure.lib.get_user( - flask.g.session, args['main_admin']) + flask.g.session, args["main_admin"] + ) except pagure.exceptions.PagureException: raise pagure.exceptions.APIError(400, error_code=APIERROR.ENOUSER) old_main_admin = project.user.user - pagure.lib.set_project_owner( - flask.g.session, project, new_main_admin) + pagure.lib.set_project_owner(flask.g.session, project, new_main_admin) if retain_access and flask.g.fas_user.username == old_main_admin: pagure.lib.add_user_to_project( - flask.g.session, project, new_user=flask.g.fas_user.username, - user=flask.g.fas_user.username) + flask.g.session, + project, + new_user=flask.g.fas_user.username, + user=flask.g.fas_user.username, + ) try: flask.g.session.commit() except SQLAlchemyError: # pragma: no cover flask.g.session.rollback() - raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EDBERROR) + raise pagure.exceptions.APIError(400, error_code=APIERROR.EDBERROR) pagure.lib.git.generate_gitolite_acls(project=project) return flask.jsonify(project.to_json(public=False, api=True)) -@API.route('/fork/', methods=['POST']) -@API.route('/fork', methods=['POST']) -@api_login_required(acls=['fork_project']) +@API.route("/fork/", methods=["POST"]) +@API.route("/fork", methods=["POST"]) +@api_login_required(acls=["fork_project"]) @api_method def api_fork_project(): """ @@ -987,52 +1030,56 @@ def api_fork_project(): namespace = form.namespace.data.strip() or None repo = get_authorized_api_project( - flask.g.session, repo, user=username, namespace=namespace) + flask.g.session, repo, user=username, namespace=namespace + ) if repo is None: raise pagure.exceptions.APIError( - 404, error_code=APIERROR.ENOPROJECT) + 404, error_code=APIERROR.ENOPROJECT + ) try: task = pagure.lib.fork_project( flask.g.session, user=flask.g.fas_user.username, repo=repo, - gitfolder=pagure_config['GIT_FOLDER'], - docfolder=pagure_config.get('DOCS_FOLDER'), - ticketfolder=pagure_config.get('TICKETS_FOLDER'), - requestfolder=pagure_config['REQUESTS_FOLDER'], + gitfolder=pagure_config["GIT_FOLDER"], + docfolder=pagure_config.get("DOCS_FOLDER"), + ticketfolder=pagure_config.get("TICKETS_FOLDER"), + requestfolder=pagure_config["REQUESTS_FOLDER"], ) flask.g.session.commit() - output = {'message': 'Project forking queued', - 'taskid': task.id} + output = {"message": "Project forking queued", "taskid": task.id} - if get_request_data().get('wait', True): + if get_request_data().get("wait", True): task.get() - output = {'message': 'Repo "%s" cloned to "%s/%s"' - % (repo.fullname, flask.g.fas_user.username, - repo.fullname)} + output = { + "message": 'Repo "%s" cloned to "%s/%s"' + % (repo.fullname, flask.g.fas_user.username, repo.fullname) + } except pagure.exceptions.PagureException as err: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.ENOCODE, error=str(err)) + 400, error_code=APIERROR.ENOCODE, error=str(err) + ) except SQLAlchemyError as err: # pragma: no cover _log.exception(err) flask.g.session.rollback() - raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EDBERROR) + raise pagure.exceptions.APIError(400, error_code=APIERROR.EDBERROR) else: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDREQ, errors=form.errors) + 400, error_code=APIERROR.EINVALIDREQ, errors=form.errors + ) jsonout = flask.jsonify(output) return jsonout -@API.route('//git/generateacls', methods=['POST']) -@API.route('///git/generateacls', methods=['POST']) -@API.route('/fork///git/generateacls', methods=['POST']) -@API.route('/fork////git/generateacls', - methods=['POST']) -@api_login_required(acls=['generate_acls_project']) +@API.route("//git/generateacls", methods=["POST"]) +@API.route("///git/generateacls", methods=["POST"]) +@API.route("/fork///git/generateacls", methods=["POST"]) +@API.route( + "/fork////git/generateacls", methods=["POST"] +) +@api_login_required(acls=["generate_acls_project"]) @api_method def api_generate_acls(repo, username=None, namespace=None): """ @@ -1079,48 +1126,48 @@ def api_generate_acls(repo, username=None, namespace=None): """ project = get_authorized_api_project( - flask.g.session, repo, namespace=namespace) + flask.g.session, repo, namespace=namespace + ) if not project: raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOPROJECT) if flask.g.token.project and project != flask.g.token.project: - raise pagure.exceptions.APIError( - 401, error_code=APIERROR.EINVALIDTOK) + raise pagure.exceptions.APIError(401, error_code=APIERROR.EINVALIDTOK) # Check if it's JSON or form data - if flask.request.headers.get('Content-Type') == 'application/json': + if flask.request.headers.get("Content-Type") == "application/json": # Set force to True to ignore the mimetype. Set silent so that None is # returned if it's invalid JSON. json = flask.request.get_json(force=True, silent=True) or {} - wait = json.get('wait', False) + wait = json.get("wait", False) else: - wait = pagure.utils.is_true(get_request_data().get('wait')) + wait = pagure.utils.is_true(get_request_data().get("wait")) try: - task = pagure.lib.git.generate_gitolite_acls( - project=project, - ) + task = pagure.lib.git.generate_gitolite_acls(project=project) if wait: task.get() - output = {'message': 'Project ACLs generated'} + output = {"message": "Project ACLs generated"} else: - output = {'message': 'Project ACL generation queued', - 'taskid': task.id} + output = { + "message": "Project ACL generation queued", + "taskid": task.id, + } except pagure.exceptions.PagureException as err: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.ENOCODE, error=str(err)) + 400, error_code=APIERROR.ENOCODE, error=str(err) + ) jsonout = flask.jsonify(output) return jsonout -@API.route('//git/branch', methods=['POST']) -@API.route('///git/branch', methods=['POST']) -@API.route('/fork///git/branch', methods=['POST']) -@API.route('/fork////git/branch', - methods=['POST']) -@api_login_required(acls=['create_branch']) +@API.route("//git/branch", methods=["POST"]) +@API.route("///git/branch", methods=["POST"]) +@API.route("/fork///git/branch", methods=["POST"]) +@API.route("/fork////git/branch", methods=["POST"]) +@api_login_required(acls=["create_branch"]) @api_method def api_new_branch(repo, username=None, namespace=None): """ @@ -1164,52 +1211,57 @@ def api_new_branch(repo, username=None, namespace=None): """ project = get_authorized_api_project( - flask.g.session, repo, namespace=namespace) + flask.g.session, repo, namespace=namespace + ) if not project: raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOPROJECT) if flask.g.token.project and project != flask.g.token.project: - raise pagure.exceptions.APIError( - 401, error_code=APIERROR.EINVALIDTOK) + raise pagure.exceptions.APIError(401, error_code=APIERROR.EINVALIDTOK) # Check if it's JSON or form data - if flask.request.headers.get('Content-Type') == 'application/json': + if flask.request.headers.get("Content-Type") == "application/json": # Set force to True to ignore the mimetype. Set silent so that None is # returned if it's invalid JSON. args = flask.request.get_json(force=True, silent=True) or {} else: args = get_request_data() - branch = args.get('branch') - from_branch = args.get('from_branch') - from_commit = args.get('from_commit') + branch = args.get("branch") + from_branch = args.get("from_branch") + from_commit = args.get("from_commit") if from_branch and from_commit: raise pagure.exceptions.APIError(400, error_code=APIERROR.EINVALIDREQ) - if not branch or not isinstance(branch, string_types) or \ - (from_branch and not isinstance(from_branch, string_types)) or \ - (from_commit and not isinstance(from_commit, string_types)): + if ( + not branch + or not isinstance(branch, string_types) + or (from_branch and not isinstance(from_branch, string_types)) + or (from_commit and not isinstance(from_commit, string_types)) + ): raise pagure.exceptions.APIError(400, error_code=APIERROR.EINVALIDREQ) try: - pagure.lib.git.new_git_branch(project, branch, from_branch=from_branch, - from_commit=from_commit) + pagure.lib.git.new_git_branch( + project, branch, from_branch=from_branch, from_commit=from_commit + ) except GitError: # pragma: no cover raise pagure.exceptions.APIError(400, error_code=APIERROR.EGITERROR) except pagure.exceptions.PagureException as error: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.ENOCODE, error=str(error)) + 400, error_code=APIERROR.ENOCODE, error=str(error) + ) - output = {'message': 'Project branch was created'} + output = {"message": "Project branch was created"} jsonout = flask.jsonify(output) return jsonout -@API.route('//c//flag') -@API.route('///c//flag') -@API.route('/fork///c//flag') -@API.route('/fork////c//flag') +@API.route("//c//flag") +@API.route("///c//flag") +@API.route("/fork///c//flag") +@API.route("/fork////c//flag") @api_method def api_commit_flags(repo, commit_hash, username=None, namespace=None): """ @@ -1266,7 +1318,8 @@ def api_commit_flags(repo, commit_hash, username=None, namespace=None): """ repo = get_authorized_api_project( - flask.g.session, repo, user=username, namespace=namespace) + flask.g.session, repo, user=username, namespace=namespace + ) if repo is None: raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOPROJECT) @@ -1275,26 +1328,21 @@ def api_commit_flags(repo, commit_hash, username=None, namespace=None): try: repo_obj.get(commit_hash) except ValueError: - raise pagure.exceptions.APIError( - 404, error_code=APIERROR.ENOCOMMIT) + raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOCOMMIT) flags = pagure.lib.get_commit_flag(flask.g.session, repo, commit_hash) flags = [f.to_json(public=True) for f in flags] - return flask.jsonify( - { - 'total_flags': len(flags), - 'flags': flags - } - ) + return flask.jsonify({"total_flags": len(flags), "flags": flags}) -@API.route('//c//flag', methods=['POST']) -@API.route('///c//flag', methods=['POST']) -@API.route('/fork///c//flag', methods=['POST']) +@API.route("//c//flag", methods=["POST"]) +@API.route("///c//flag", methods=["POST"]) +@API.route("/fork///c//flag", methods=["POST"]) @API.route( - '/fork////c//flag', - methods=['POST']) -@api_login_required(acls=['commit_flag']) + "/fork////c//flag", + methods=["POST"], +) +@api_login_required(acls=["commit_flag"]) @api_method def api_commit_add_flag(repo, commit_hash, username=None, namespace=None): """ @@ -1403,25 +1451,23 @@ def api_commit_add_flag(repo, commit_hash, username=None, namespace=None): """ # noqa repo = get_authorized_api_project( - flask.g.session, repo, user=username, namespace=namespace) + flask.g.session, repo, user=username, namespace=namespace + ) output = {} if repo is None: - raise pagure.exceptions.APIError( - 404, error_code=APIERROR.ENOPROJECT) + raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOPROJECT) if flask.g.token.project and repo != flask.g.token.project: - raise pagure.exceptions.APIError( - 401, error_code=APIERROR.EINVALIDTOK) + raise pagure.exceptions.APIError(401, error_code=APIERROR.EINVALIDTOK) reponame = pagure.utils.get_repo_path(repo) repo_obj = Repository(reponame) try: repo_obj.get(commit_hash) except ValueError: - raise pagure.exceptions.APIError( - 404, error_code=APIERROR.ENOCOMMIT) + raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOCOMMIT) form = pagure.forms.AddPullRequestFlagForm(csrf_enabled=False) if form.validate_on_submit(): @@ -1448,35 +1494,38 @@ def api_commit_add_flag(repo, commit_hash, username=None, namespace=None): ) flask.g.session.commit() c_flag = pagure.lib.get_commit_flag_by_uid( - flask.g.session, commit_hash, uid) - output['message'] = message - output['uid'] = uid - output['flag'] = c_flag.to_json() + flask.g.session, commit_hash, uid + ) + output["message"] = message + output["uid"] = uid + output["flag"] = c_flag.to_json() except pagure.exceptions.PagureException as err: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.ENOCODE, error=str(err)) + 400, error_code=APIERROR.ENOCODE, error=str(err) + ) except SQLAlchemyError as err: # pragma: no cover flask.g.session.rollback() _log.exception(err) - raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EDBERROR) + raise pagure.exceptions.APIError(400, error_code=APIERROR.EDBERROR) else: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDREQ, errors=form.errors) + 400, error_code=APIERROR.EINVALIDREQ, errors=form.errors + ) jsonout = flask.jsonify(output) return jsonout -@API.route('//watchers/update', methods=['POST']) -@API.route('///watchers/update', methods=['POST']) -@API.route('/fork///watchers/update', methods=['POST']) +@API.route("//watchers/update", methods=["POST"]) +@API.route("///watchers/update", methods=["POST"]) +@API.route("/fork///watchers/update", methods=["POST"]) @API.route( - '/fork////watchers/update', methods=['POST']) -@api_login_required(acls=['update_watch_status']) + "/fork////watchers/update", methods=["POST"] +) +@api_login_required(acls=["update_watch_status"]) @api_method def api_update_project_watchers(repo, username=None, namespace=None): - ''' + """ Update project watchers ----------------------- Allows anyone to update their own watch status on the project. @@ -1541,73 +1590,71 @@ def api_update_project_watchers(repo, username=None, namespace=None): "message": "You are now watching issues and PRs on this project", "status": "ok" } - ''' + """ project = get_authorized_api_project( - flask.g.session, repo, namespace=namespace) + flask.g.session, repo, namespace=namespace + ) if not project: - raise pagure.exceptions.APIError( - 404, error_code=APIERROR.ENOPROJECT) + raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOPROJECT) if flask.g.token.project and project != flask.g.token.project: - raise pagure.exceptions.APIError( - 401, error_code=APIERROR.EINVALIDTOK) + raise pagure.exceptions.APIError(401, error_code=APIERROR.EINVALIDTOK) # Get the input submitted data = get_request_data() - watcher = data.get('watcher') + watcher = data.get("watcher") if not watcher: - _log.debug( - 'api_update_project_watchers: Invalid watcher: %s', - watcher) - raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDREQ) + _log.debug("api_update_project_watchers: Invalid watcher: %s", watcher) + raise pagure.exceptions.APIError(400, error_code=APIERROR.EINVALIDREQ) is_site_admin = pagure.utils.is_admin() # Only allow the main admin, and the user themselves to update their # status if not is_site_admin and flask.g.fas_user.username != watcher: raise pagure.exceptions.APIError( - 401, error_code=APIERROR.EMODIFYPROJECTNOTALLOWED) + 401, error_code=APIERROR.EMODIFYPROJECTNOTALLOWED + ) try: pagure.lib.get_user(flask.g.session, watcher) except pagure.exceptions.PagureException as err: _log.debug( - 'api_update_project_watchers: Invalid user watching: %s', - watcher) - raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDREQ) + "api_update_project_watchers: Invalid user watching: %s", watcher + ) + raise pagure.exceptions.APIError(400, error_code=APIERROR.EINVALIDREQ) - watch_status = data.get('status') + watch_status = data.get("status") try: msg = pagure.lib.update_watch_status( session=flask.g.session, project=project, user=watcher, - watch=watch_status) + watch=watch_status, + ) flask.g.session.commit() except pagure.exceptions.PagureException as err: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.ENOCODE, error=str(err)) + 400, error_code=APIERROR.ENOCODE, error=str(err) + ) except SQLAlchemyError as err: # pragma: no cover flask.g.session.rollback() _log.exception(err) - raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EDBERROR) + raise pagure.exceptions.APIError(400, error_code=APIERROR.EDBERROR) - return flask.jsonify({'message': msg, 'status': 'ok'}) + return flask.jsonify({"message": msg, "status": "ok"}) -@API.route('//git/modifyacls', methods=['POST']) -@API.route('///git/modifyacls', methods=['POST']) -@API.route('/fork///git/modifyacls', methods=['POST']) -@API.route('/fork////git/modifyacls', - methods=['POST']) -@api_login_required(acls=['modify_project']) +@API.route("//git/modifyacls", methods=["POST"]) +@API.route("///git/modifyacls", methods=["POST"]) +@API.route("/fork///git/modifyacls", methods=["POST"]) +@API.route( + "/fork////git/modifyacls", methods=["POST"] +) +@api_login_required(acls=["modify_project"]) @api_method def api_modify_acls(repo, namespace=None, username=None): """ @@ -1701,91 +1748,110 @@ def api_modify_acls(repo, namespace=None, username=None): """ output = {} project = get_authorized_api_project( - flask.g.session, repo, namespace=namespace) + flask.g.session, repo, namespace=namespace + ) if not project: raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOPROJECT) if flask.g.token.project and project != flask.g.token.project: - raise pagure.exceptions.APIError( - 401, error_code=APIERROR.EINVALIDTOK) + raise pagure.exceptions.APIError(401, error_code=APIERROR.EINVALIDTOK) form = pagure.forms.ModifyACLForm(csrf_enabled=False) if form.validate_on_submit(): acl = form.acl.data group = None user = None - if form.user_type.data == 'user': + if form.user_type.data == "user": user = form.name.data else: group = form.name.data is_site_admin = pagure.utils.is_admin() - admins = [u.username for u in project.get_project_users('admin')] + admins = [u.username for u in project.get_project_users("admin")] if not acl: - if user and flask.g.fas_user.username != user \ - and flask.g.fas_user.username not in admins \ - and flask.g.fas_user.username != project.user.username \ - and not is_site_admin: + if ( + user + and flask.g.fas_user.username != user + and flask.g.fas_user.username not in admins + and flask.g.fas_user.username != project.user.username + and not is_site_admin + ): raise pagure.exceptions.APIError( - 401, error_code=APIERROR.EMODIFYPROJECTNOTALLOWED) - elif flask.g.fas_user.username not in admins \ - and flask.g.fas_user.username != project.user.username \ - and not is_site_admin: + 401, error_code=APIERROR.EMODIFYPROJECTNOTALLOWED + ) + elif ( + flask.g.fas_user.username not in admins + and flask.g.fas_user.username != project.user.username + and not is_site_admin + ): raise pagure.exceptions.APIError( - 401, error_code=APIERROR.EMODIFYPROJECTNOTALLOWED) + 401, error_code=APIERROR.EMODIFYPROJECTNOTALLOWED + ) if user: user_obj = pagure.lib.search_user(flask.g.session, username=user) if not user_obj: raise pagure.exceptions.APIError( - 404, error_code=APIERROR.ENOUSER) + 404, error_code=APIERROR.ENOUSER + ) elif group: group_obj = pagure.lib.search_groups( - flask.g.session, group_name=group) + flask.g.session, group_name=group + ) if not group_obj: raise pagure.exceptions.APIError( - 404, error_code=APIERROR.ENOGROUP) + 404, error_code=APIERROR.ENOGROUP + ) if acl: - if user and user_obj not in project.access_users[acl] and \ - user_obj.user != project.user.user: + if ( + user + and user_obj not in project.access_users[acl] + and user_obj.user != project.user.user + ): _log.info( - 'Adding user %s to project: %s', user, project.fullname) + "Adding user %s to project: %s", user, project.fullname + ) pagure.lib.add_user_to_project( session=flask.g.session, project=project, new_user=user, user=flask.g.fas_user.username, - access=acl + access=acl, ) elif group and group_obj not in project.access_groups[acl]: _log.info( - 'Adding group %s to project: %s', group, - project.fullname) + "Adding group %s to project: %s", group, project.fullname + ) pagure.lib.add_group_to_project( session=flask.g.session, project=project, new_group=group, user=flask.g.fas_user.username, access=acl, - create=pagure_config.get('ENABLE_GROUP_MNGT', False), + create=pagure_config.get("ENABLE_GROUP_MNGT", False), is_admin=pagure.utils.is_admin(), ) else: if user: _log.info( - 'Looking at removing user %s from project %s', user, - project.fullname) + "Looking at removing user %s from project %s", + user, + project.fullname, + ) try: msg = pagure.lib.remove_user_of_project( - flask.g.session, user_obj, project, - flask.g.fas_user.username) + flask.g.session, + user_obj, + project, + flask.g.fas_user.username, + ) except pagure.exceptions.PagureException as err: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDREQ, - errors='%s' % err) + 400, error_code=APIERROR.EINVALIDREQ, errors="%s" % err + ) elif group: pass @@ -1794,18 +1860,18 @@ def api_modify_acls(repo, namespace=None, username=None): except pagure.exceptions.PagureException as msg: flask.g.session.rollback() _log.debug(msg) - flask.flash(str(msg), 'error') + flask.flash(str(msg), "error") except SQLAlchemyError as err: _log.exception(err) flask.g.session.rollback() - raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EDBERROR) + raise pagure.exceptions.APIError(400, error_code=APIERROR.EDBERROR) pagure.lib.git.generate_gitolite_acls(project=project) output = project.to_json(api=True, public=True) else: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EINVALIDREQ, errors=form.errors) + 400, error_code=APIERROR.EINVALIDREQ, errors=form.errors + ) jsonout = flask.jsonify(output) return jsonout diff --git a/pagure/api/user.py b/pagure/api/user.py index 3e9a3f2..64b61ee 100644 --- a/pagure/api/user.py +++ b/pagure/api/user.py @@ -33,7 +33,7 @@ def _get_user(username): raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOUSER) -@API.route('/user/') +@API.route("/user/") @api_method def api_view_user(username): """ @@ -123,26 +123,25 @@ def api_view_user(username): user = _get_user(username=username) per_page = get_per_page() - repopage = flask.request.args.get('repopage', 1) + repopage = flask.request.args.get("repopage", 1) try: repopage = int(repopage) except ValueError: repopage = 1 - forkpage = flask.request.args.get('forkpage', 1) + forkpage = flask.request.args.get("forkpage", 1) try: forkpage = int(forkpage) except ValueError: forkpage = 1 repos_cnt = pagure.lib.search_projects( - flask.g.session, - username=username, - fork=False, - count=True) + flask.g.session, username=username, fork=False, count=True + ) pagination_metadata_repo = pagure.lib.get_pagination_metadata( - flask.request, repopage, per_page, repos_cnt, key_page='repopage') + flask.request, repopage, per_page, repos_cnt, key_page="repopage" + ) repopage_start = (repopage - 1) * per_page repopage_limit = per_page @@ -151,16 +150,16 @@ def api_view_user(username): username=username, fork=False, start=repopage_start, - limit=repopage_limit) + limit=repopage_limit, + ) forks_cnt = pagure.lib.search_projects( - flask.g.session, - username=username, - fork=True, - count=True) + flask.g.session, username=username, fork=True, count=True + ) pagination_metadata_fork = pagure.lib.get_pagination_metadata( - flask.request, forkpage, per_page, forks_cnt, key_page='forkpage') + flask.request, forkpage, per_page, forks_cnt, key_page="forkpage" + ) forkpage_start = (forkpage - 1) * per_page forkpage_limit = per_page @@ -169,20 +168,21 @@ def api_view_user(username): username=username, fork=True, start=forkpage_start, - limit=forkpage_limit) + limit=forkpage_limit, + ) - output['user'] = user.to_json(public=True) - output['repos'] = [repo.to_json(public=True) for repo in repos] - output['forks'] = [repo.to_json(public=True) for repo in forks] - output['repos_pagination'] = pagination_metadata_repo - output['forks_pagination'] = pagination_metadata_fork + output["user"] = user.to_json(public=True) + output["repos"] = [repo.to_json(public=True) for repo in repos] + output["forks"] = [repo.to_json(public=True) for repo in forks] + output["repos_pagination"] = pagination_metadata_repo + output["forks_pagination"] = pagination_metadata_fork jsonout = flask.jsonify(output) jsonout.status_code = httpcode return jsonout -@API.route('/user//issues') +@API.route("/user//issues") @api_method def api_view_user_issues(username): """ @@ -357,48 +357,54 @@ def api_view_user_issues(username): """ # noqa - milestone = flask.request.args.getlist('milestones', None) - no_stones = flask.request.args.get('no_stones', None) + milestone = flask.request.args.getlist("milestones", None) + no_stones = flask.request.args.get("no_stones", None) if no_stones is not None: no_stones = is_true(no_stones) - since = flask.request.args.get('since', None) - order = flask.request.args.get('order', None) - order_key = flask.request.args.get('order_key', None) - status = flask.request.args.get('status', None) - tags = flask.request.args.getlist('tags') + since = flask.request.args.get("since", None) + order = flask.request.args.get("order", None) + order_key = flask.request.args.get("order_key", None) + status = flask.request.args.get("status", None) + tags = flask.request.args.getlist("tags") tags = [tag.strip() for tag in tags if tag.strip()] page = get_page() per_page = get_per_page() - assignee = flask.request.args.get('assignee', '').lower()\ - not in ['false', '0', 'f'] - author = flask.request.args.get('author', '').lower() \ - not in ['false', '0', 'f'] + assignee = flask.request.args.get("assignee", "").lower() not in [ + "false", + "0", + "f", + ] + author = flask.request.args.get("author", "").lower() not in [ + "false", + "0", + "f", + ] offset = (page - 1) * per_page limit = per_page params = { - 'session': flask.g.session, - 'tags': tags, - 'milestones': milestone, - 'order': order, - 'order_key': order_key, - 'no_milestones': no_stones, - 'offset': offset, - 'limit': limit, + "session": flask.g.session, + "tags": tags, + "milestones": milestone, + "order": order, + "order_key": order_key, + "no_milestones": no_stones, + "offset": offset, + "limit": limit, } if status is not None: - if status.lower() == 'all': - params.update({'status': None}) - elif status.lower() == 'closed': - params.update({'closed': True}) + if status.lower() == "all": + params.update({"status": None}) + elif status.lower() == "closed": + params.update({"closed": True}) else: - params.update({'status': status}) + params.update({"status": status}) else: - params.update({'status': 'Open'}) + params.update({"status": "Open"}) updated_after = None if since: @@ -409,16 +415,18 @@ def api_view_user_issues(username): updated_after = datetime.datetime.fromtimestamp(int(since)) except ValueError: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.ETIMESTAMP) + 400, error_code=APIERROR.ETIMESTAMP + ) else: # We assume datetime format, so validate it try: - updated_after = datetime.datetime.strptime(since, '%Y-%m-%d') + updated_after = datetime.datetime.strptime(since, "%Y-%m-%d") except ValueError: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.EDATETIME) + 400, error_code=APIERROR.EDATETIME + ) - params.update({'updated_after': updated_after}) + params.update({"updated_after": updated_after}) issues_created = [] issues_created_pages = 1 @@ -429,10 +437,11 @@ def api_view_user_issues(username): params_created = params.copy() params_created.update({"author": username}) issues_created = pagure.lib.search_issues(**params_created) - params_created.update({"offset": None, 'limit': None, 'count': True}) + params_created.update({"offset": None, "limit": None, "count": True}) issues_created_cnt = pagure.lib.search_issues(**params_created) pagination_issues_created = pagure.lib.get_pagination_metadata( - flask.request, page, per_page, issues_created_cnt) + flask.request, page, per_page, issues_created_cnt + ) issues_assigned = [] issues_assigned_pages = 1 @@ -443,39 +452,46 @@ def api_view_user_issues(username): params_assigned = params.copy() params_assigned.update({"assignee": username}) issues_assigned = pagure.lib.search_issues(**params_assigned) - params_assigned.update({"offset": None, 'limit': None, 'count': True}) + params_assigned.update({"offset": None, "limit": None, "count": True}) issues_assigned_cnt = pagure.lib.search_issues(**params_assigned) pagination_issues_assigned = pagure.lib.get_pagination_metadata( - flask.request, page, per_page, issues_assigned_cnt) - - jsonout = flask.jsonify({ - 'pagination_issues_created': pagination_issues_created, - 'pagination_issues_assigned': pagination_issues_assigned, - 'total_issues_created_pages': issues_created_pages, - 'total_issues_assigned_pages': issues_assigned_pages, - 'total_issues_created': issues_created_cnt, - 'total_issues_assigned': issues_assigned_cnt, - 'issues_created': [issue.to_json(public=True, with_project=True) - for issue in issues_created], - 'issues_assigned': [issue.to_json(public=True, with_project=True) - for issue in issues_assigned], - 'args': { - 'milestones': milestone, - 'no_stones': no_stones, - 'order': order, - 'order_key': order_key, - 'since': since, - 'status': status, - 'tags': tags, - 'page': page, - 'assignee': assignee, - 'author': author, + flask.request, page, per_page, issues_assigned_cnt + ) + + jsonout = flask.jsonify( + { + "pagination_issues_created": pagination_issues_created, + "pagination_issues_assigned": pagination_issues_assigned, + "total_issues_created_pages": issues_created_pages, + "total_issues_assigned_pages": issues_assigned_pages, + "total_issues_created": issues_created_cnt, + "total_issues_assigned": issues_assigned_cnt, + "issues_created": [ + issue.to_json(public=True, with_project=True) + for issue in issues_created + ], + "issues_assigned": [ + issue.to_json(public=True, with_project=True) + for issue in issues_assigned + ], + "args": { + "milestones": milestone, + "no_stones": no_stones, + "order": order, + "order_key": order_key, + "since": since, + "status": status, + "tags": tags, + "page": page, + "assignee": assignee, + "author": author, + }, } - }) + ) return jsonout -@API.route('/user//activity/stats') +@API.route("/user//activity/stats") @api_method def api_view_user_activity_stats(username): """ @@ -551,8 +567,8 @@ def api_view_user_activity_stats(username): } """ - date_format = flask.request.args.get('format', 'isoformat') - tz = flask.request.args.get('tz', 'UTC') + date_format = flask.request.args.get("format", "isoformat") + tz = flask.request.args.get("tz", "UTC") user = _get_user(username=username) @@ -560,11 +576,11 @@ def api_view_user_activity_stats(username): flask.g.session, user, datetime.datetime.utcnow().date() + datetime.timedelta(days=1), - tz=tz + tz=tz, ) def format_date(d, tz): - if date_format == 'timestamp': + if date_format == "timestamp": # the reason we have this at all is the cal-heatmap js lib # wants times as timestamps. We're trying to feed it a # timestamp it will count as having happened on date 'd'. @@ -588,7 +604,7 @@ def api_view_user_activity_stats(username): return jsonout -@API.route('/user//activity/') +@API.route("/user//activity/") @api_method def api_view_user_activity_date(username, date): """ @@ -660,15 +676,16 @@ def api_view_user_activity_date(username, date): } """ # noqa - grouped = is_true(flask.request.args.get('grouped')) - tz = flask.request.args.get('tz', 'UTC') + grouped = is_true(flask.request.args.get("grouped")) + tz = flask.request.args.get("tz", "UTC") try: date = arrow.get(date) - date = date.strftime('%Y-%m-%d') + date = date.strftime("%Y-%m-%d") except arrow.parser.ParserError as err: raise pagure.exceptions.APIError( - 400, error_code=APIERROR.ENOCODE, error=str(err)) + 400, error_code=APIERROR.ENOCODE, error=str(err) + ) user = _get_user(username=username) @@ -680,7 +697,7 @@ def api_view_user_activity_date(username, date): commits = collections.defaultdict(list) acts = [] for activity in activities: - if activity.log_type == 'committed': + if activity.log_type == "committed": commits[activity.project.fullname].append(activity) else: acts.append(activity) @@ -688,14 +705,14 @@ def api_view_user_activity_date(username, date): if len(commits[project]) == 1: tmp = dict( description_mk=pagure.lib.text2markdown( - six.text_type(commits[project][0])) + six.text_type(commits[project][0]) + ) ) else: tmp = dict( description_mk=pagure.lib.text2markdown( - '@%s pushed %s commits to %s' % ( - username, len(commits[project]), project - ) + "@%s pushed %s commits to %s" + % (username, len(commits[project]), project) ) ) js_act.append(tmp) @@ -703,20 +720,16 @@ def api_view_user_activity_date(username, date): for act in activities: activity = act.to_json(public=True) - activity['description_mk'] = pagure.lib.text2markdown( - six.text_type(act)) + activity["description_mk"] = pagure.lib.text2markdown( + six.text_type(act) + ) js_act.append(activity) - jsonout = flask.jsonify( - dict( - activities=js_act, - date=date, - ) - ) + jsonout = flask.jsonify(dict(activities=js_act, date=date)) return jsonout -@API.route('/user//requests/filed') +@API.route("/user//requests/filed") @api_method def api_view_user_requests_filed(username): """ @@ -911,7 +924,7 @@ def api_view_user_requests_filed(username): } """ # noqa - status = flask.request.args.get('status', 'open') + status = flask.request.args.get("status", "open") page = get_page() per_page = get_per_page() @@ -919,19 +932,17 @@ def api_view_user_requests_filed(username): limit = per_page orig_status = status - if status.lower() == 'all': + if status.lower() == "all": status = None else: status = status.capitalize() pullrequests_cnt = pagure.lib.get_pull_request_of_user( - flask.g.session, - username=username, - status=status, - count=True, + flask.g.session, username=username, status=status, count=True ) pagination = pagure.lib.get_pagination_metadata( - flask.request, page, per_page, pullrequests_cnt) + flask.request, page, per_page, pullrequests_cnt + ) pullrequests = pagure.lib.get_pull_request_of_user( flask.g.session, @@ -943,23 +954,24 @@ def api_view_user_requests_filed(username): ) pullrequestslist = [ - pr.to_json(public=True, api=True) - for pr in pullrequests + pr.to_json(public=True, api=True) for pr in pullrequests ] - return flask.jsonify({ - 'total_requests': len(pullrequestslist), - 'requests': pullrequestslist, - 'args': { - 'username': username, - 'status': orig_status, - 'page': page, - }, - 'pagination': pagination, - }) + return flask.jsonify( + { + "total_requests": len(pullrequestslist), + "requests": pullrequestslist, + "args": { + "username": username, + "status": orig_status, + "page": page, + }, + "pagination": pagination, + } + ) -@API.route('/user//requests/actionable') +@API.route("/user//requests/actionable") @api_method def api_view_user_requests_actionable(username): """ @@ -1150,7 +1162,7 @@ def api_view_user_requests_actionable(username): } """ # noqa - status = flask.request.args.get('status', 'open') + status = flask.request.args.get("status", "open") page = get_page() per_page = get_per_page() @@ -1158,19 +1170,17 @@ def api_view_user_requests_actionable(username): limit = per_page orig_status = status - if status.lower() == 'all': + if status.lower() == "all": status = None else: status = status.capitalize() pullrequests_cnt = pagure.lib.get_pull_request_of_user( - flask.g.session, - username=username, - status=status, - count=True, + flask.g.session, username=username, status=status, count=True ) pagination = pagure.lib.get_pagination_metadata( - flask.request, page, per_page, pullrequests_cnt) + flask.request, page, per_page, pullrequests_cnt + ) pullrequests = pagure.lib.get_pull_request_of_user( flask.g.session, @@ -1182,17 +1192,18 @@ def api_view_user_requests_actionable(username): ) pullrequestslist = [ - pr.to_json(public=True, api=True) - for pr in pullrequests + pr.to_json(public=True, api=True) for pr in pullrequests ] - return flask.jsonify({ - 'total_requests': len(pullrequestslist), - 'requests': pullrequestslist, - 'args': { - 'username': username, - 'status': orig_status, - 'page': page, - }, - 'pagination': pagination, - }) + return flask.jsonify( + { + "total_requests": len(pullrequestslist), + "requests": pullrequestslist, + "args": { + "username": username, + "status": orig_status, + "page": page, + }, + "pagination": pagination, + } + ) diff --git a/pagure/cli/admin.py b/pagure/cli/admin.py index ddce4a2..2ecf261 100644 --- a/pagure/cli/admin.py +++ b/pagure/cli/admin.py @@ -19,10 +19,11 @@ import sys import arrow from six.moves import input -if 'PAGURE_CONFIG' not in os.environ \ - and os.path.exists('/etc/pagure/pagure.cfg'): - print('Using configuration file `/etc/pagure/pagure.cfg`') - os.environ['PAGURE_CONFIG'] = '/etc/pagure/pagure.cfg' +if "PAGURE_CONFIG" not in os.environ and os.path.exists( + "/etc/pagure/pagure.cfg" +): + print("Using configuration file `/etc/pagure/pagure.cfg`") + os.environ["PAGURE_CONFIG"] = "/etc/pagure/pagure.cfg" import pagure.config # noqa: E402 import pagure.exceptions # noqa: E402 @@ -33,16 +34,16 @@ from pagure.flask_app import generate_user_key_files # noqa: E402 _config = pagure.config.reload_config() -session = pagure.lib.create_session(_config['DB_URL']) +session = pagure.lib.create_session(_config["DB_URL"]) _log = logging.getLogger(__name__) WATCH = { - '-1': 'reset the watch status to default', - '0': 'unwatch, don\'t notify the user of anything', - '1': 'watch issues and PRs', - '2': 'watch commits', - '3': 'watch issues, PRs and commits', + "-1": "reset the watch status to default", + "0": "unwatch, don't notify the user of anything", + "1": "watch issues and PRs", + "2": "watch commits", + "3": "watch issues, PRs and commits", } @@ -54,18 +55,24 @@ def _parser_refresh_gitolite(subparser): """ local_parser = subparser.add_parser( - 'refresh-gitolite', - help='Re-generate the gitolite config file') - local_parser.add_argument( - '--user', help="User of the project (to use only on forks)") + "refresh-gitolite", help="Re-generate the gitolite config file" + ) local_parser.add_argument( - '--project', help="Project to update (as namespace/project if there " - "is a namespace)") + "--user", help="User of the project (to use only on forks)" + ) local_parser.add_argument( - '--group', help="Group to refresh") + "--project", + help="Project to update (as namespace/project if there " + "is a namespace)", + ) + local_parser.add_argument("--group", help="Group to refresh") local_parser.add_argument( - '--all', dest="all_", default=False, action='store_true', - help="Refresh all the projects") + "--all", + dest="all_", + default=False, + action="store_true", + help="Refresh all the projects", + ) local_parser.set_defaults(func=do_generate_acl) @@ -77,8 +84,9 @@ def _parser_refresh_ssh(subparser): """ local_parser = subparser.add_parser( - 'refresh-ssh', - help="Re-write to disk every user's ssh key stored in the database") + "refresh-ssh", + help="Re-write to disk every user's ssh key stored in the database", + ) local_parser.set_defaults(func=do_refresh_ssh) @@ -90,8 +98,9 @@ def _parser_clear_hook_token(subparser): """ local_parser = subparser.add_parser( - 'clear-hook-token', - help='Generate a new hook token for every project in this instance') + "clear-hook-token", + help="Generate a new hook token for every project in this instance", + ) local_parser.set_defaults(func=do_generate_hook_token) @@ -103,21 +112,30 @@ def _parser_admin_token_list(subparser): """ local_parser = subparser.add_parser( - 'list', help="List the API admin token") - local_parser.add_argument( - '--user', - help="User to associate or associated with the token") + "list", help="List the API admin token" + ) local_parser.add_argument( - '--token', help="API token") + "--user", help="User to associate or associated with the token" + ) + local_parser.add_argument("--token", help="API token") local_parser.add_argument( - '--active', default=False, action='store_true', - help="Only list active API token") + "--active", + default=False, + action="store_true", + help="Only list active API token", + ) local_parser.add_argument( - '--expired', default=False, action='store_true', - help="Only list expired API token") + "--expired", + default=False, + action="store_true", + help="Only list expired API token", + ) local_parser.add_argument( - '--all', default=False, action='store_true', - help="Only list all API token instead of only those with admin ACLs") + "--all", + default=False, + action="store_true", + help="Only list all API token instead of only those with admin ACLs", + ) local_parser.set_defaults(func=do_list_admin_token) @@ -129,9 +147,9 @@ def _parser_admin_token_info(subparser): """ local_parser = subparser.add_parser( - 'info', help="Provide some information about a specific API token") - local_parser.add_argument( - 'token', help="API token") + "info", help="Provide some information about a specific API token" + ) + local_parser.add_argument("token", help="API token") local_parser.set_defaults(func=do_info_admin_token) @@ -144,9 +162,9 @@ def _parser_admin_token_expire(subparser): """ # Expire admin token local_parser = subparser.add_parser( - 'expire', help="Expire a specific API token") - local_parser.add_argument( - 'token', help="API token") + "expire", help="Expire a specific API token" + ) + local_parser.add_argument("token", help="API token") local_parser.set_defaults(func=do_expire_admin_token) @@ -159,9 +177,9 @@ def _parser_admin_token_create(subparser): """ # Create admin token local_parser = subparser.add_parser( - 'create', help="Create a new API token") - local_parser.add_argument( - 'user', help="User to associate with the token") + "create", help="Create a new API token" + ) + local_parser.add_argument("user", help="User to associate with the token") local_parser.set_defaults(func=do_create_admin_token) @@ -174,11 +192,10 @@ def _parser_admin_token_update(subparser): """ # Update admin token local_parser = subparser.add_parser( - 'update', help="Update the expiration date of an API token") - local_parser.add_argument( - 'token', help="API token") - local_parser.add_argument( - 'date', help="New expiration date") + "update", help="Update the expiration date of an API token" + ) + local_parser.add_argument("token", help="API token") + local_parser.add_argument("date", help="New expiration date") local_parser.set_defaults(func=do_update_admin_token) @@ -190,10 +207,10 @@ def _parser_admin_token(subparser): """ local_parser = subparser.add_parser( - 'admin-token', - help='Manages the admin tokens for this instance') + "admin-token", help="Manages the admin tokens for this instance" + ) - subsubparser = local_parser.add_subparsers(title='actions') + subsubparser = local_parser.add_subparsers(title="actions") # list _parser_admin_token_list(subsubparser) @@ -216,12 +233,14 @@ def _parser_get_watch(subparser): """ # Update watch status local_parser = subparser.add_parser( - 'get-watch', help="Get someone's watch status on a project") - local_parser.add_argument( - 'project', help="Project (as namespace/project if there " - "is a namespace) -- Fork not supported") + "get-watch", help="Get someone's watch status on a project" + ) local_parser.add_argument( - 'user', help="User to get the watch status of") + "project", + help="Project (as namespace/project if there " + "is a namespace) -- Fork not supported", + ) + local_parser.add_argument("user", help="User to get the watch status of") local_parser.set_defaults(func=do_get_watch_status) @@ -234,14 +253,19 @@ def _parser_update_watch(subparser): """ # Update watch status local_parser = subparser.add_parser( - 'update-watch', help="Update someone's watch status on a project") + "update-watch", help="Update someone's watch status on a project" + ) local_parser.add_argument( - 'project', help="Project to update (as namespace/project if there " - "is a namespace) -- Fork not supported") + "project", + help="Project to update (as namespace/project if there " + "is a namespace) -- Fork not supported", + ) local_parser.add_argument( - 'user', help="User to update the watch status of") + "user", help="User to update the watch status of" + ) local_parser.add_argument( - '-s', '--status', help="Watch status to update to") + "-s", "--status", help="Watch status to update to" + ) local_parser.set_defaults(func=do_update_watch_status) @@ -253,17 +277,21 @@ def _parser_read_only(subparser): """ local_parser = subparser.add_parser( - 'read-only', - help='Get or set the read-only flag on a project') + "read-only", help="Get or set the read-only flag on a project" + ) local_parser.add_argument( - '--user', help="User of the project (to use only on forks)") + "--user", help="User of the project (to use only on forks)" + ) local_parser.add_argument( - 'project', help="Project to update (as namespace/project if there " - "is a namespace)") + "project", + help="Project to update (as namespace/project if there " + "is a namespace)", + ) local_parser.add_argument( - '--ro', + "--ro", help="Read-Only status to set (has to be: true or false), do not " - "specify to get the current status") + "specify to get the current status", + ) local_parser.set_defaults(func=do_read_only) @@ -275,17 +303,18 @@ def _parser_new_group(subparser): """ local_parser = subparser.add_parser( - 'new-group', - help='Create a new group on this pagure instance') - local_parser.add_argument('group_name', help="Name of the group") + "new-group", help="Create a new group on this pagure instance" + ) + local_parser.add_argument("group_name", help="Name of the group") local_parser.add_argument( - 'username', + "username", help="Name of the user creating the group " - "(will be added to the group once created)") - local_parser.add_argument( - '--display', help="Display name of the group") + "(will be added to the group once created)", + ) + local_parser.add_argument("--display", help="Display name of the group") local_parser.add_argument( - '--description', help="Short description of the group") + "--description", help="Short description of the group" + ) local_parser.set_defaults(func=do_new_group) @@ -297,31 +326,38 @@ def _parser_block_user(subparser): """ local_parser = subparser.add_parser( - 'block-user', - help='Prevents an user to interact with this pagure instance until ' - 'the specified date') - local_parser.add_argument('username', help='Name of the user to block') + "block-user", + help="Prevents an user to interact with this pagure instance until " + "the specified date", + ) + local_parser.add_argument("username", help="Name of the user to block") local_parser.add_argument( - 'date', default=None, - help='Date before which the user is not welcome on this pagure ' - 'instance') + "date", + default=None, + help="Date before which the user is not welcome on this pagure " + "instance", + ) local_parser.set_defaults(func=do_block_user) def parse_arguments(args=None): """ Set-up the argument parsing. """ parser = argparse.ArgumentParser( - description='The admin CLI for this pagure instance') + description="The admin CLI for this pagure instance" + ) parser.add_argument( - '-c', '--config', default=None, - help='Specify a configuration to use') + "-c", "--config", default=None, help="Specify a configuration to use" + ) parser.add_argument( - '--debug', default=False, action='store_true', - help='Increase the verbosity of the information displayed') + "--debug", + default=False, + action="store_true", + help="Increase the verbosity of the information displayed", + ) - subparser = parser.add_subparsers(title='actions') + subparser = parser.add_subparsers(title="actions") # refresh-gitolite _parser_refresh_gitolite(subparser) @@ -354,32 +390,33 @@ def parse_arguments(args=None): def _ask_confirmation(): - ''' Ask to confirm an action. - ''' - action = input('Do you want to continue? [y/N]') - return action.lower() in ['y', 'yes'] + """ Ask to confirm an action. + """ + action = input("Do you want to continue? [y/N]") + return action.lower() in ["y", "yes"] def _get_input(text): - ''' Ask the user for input. ''' + """ Ask the user for input. """ return input(text) def _get_project(arg_project, user=None): - ''' From the project specified to the CLI, extract the actual project. - ''' + """ From the project specified to the CLI, extract the actual project. + """ namespace = None - if '/' in arg_project: - if arg_project.count('/') > 1: + if "/" in arg_project: + if arg_project.count("/") > 1: raise pagure.exceptions.PagureException( - 'Invalid project name, has more than one "/": %s' % - arg_project) - namespace, name = arg_project.split('/') + 'Invalid project name, has more than one "/": %s' % arg_project + ) + namespace, name = arg_project.split("/") else: name = arg_project return pagure.lib._get_project( - session, namespace=namespace, name=name, user=user) + session, namespace=namespace, name=name, user=user + ) def do_generate_acl(args): @@ -389,10 +426,10 @@ def do_generate_acl(args): :arg args: the argparse object returned by ``parse_arguments()``. """ - _log.debug('group: %s', args.group) - _log.debug('project: %s', args.project) - _log.debug('user: %s', args.user) - _log.debug('all: %s', args.all_) + _log.debug("group: %s", args.group) + _log.debug("project: %s", args.project) + _log.debug("user: %s", args.user) + _log.debug("all: %s", args.all_) title = None project = None @@ -400,34 +437,40 @@ def do_generate_acl(args): project = _get_project(args.project, user=args.user) title = project.fullname if args.all_: - title = 'all' + title = "all" project = -1 if not args.all_ and not args.project: print( - 'Please note that you have not selected a project or --all. ' - 'Do you want to recompile the existing config file?') + "Please note that you have not selected a project or --all. " + "Do you want to recompile the existing config file?" + ) if not _ask_confirmation(): return helper = pagure.lib.git_auth.get_git_auth_helper( - pagure.config.config['GITOLITE_BACKEND']) - _log.debug('Got helper: %s', helper) + pagure.config.config["GITOLITE_BACKEND"] + ) + _log.debug("Got helper: %s", helper) group_obj = None if args.group: group_obj = pagure.lib.search_groups(session, group_name=args.group) _log.debug( - 'Calling helper: %s with arg: project=%s, group=%s', - helper, project, group_obj) + "Calling helper: %s with arg: project=%s, group=%s", + helper, + project, + group_obj, + ) print( - 'Do you want to re-generate the gitolite.conf file for group: %s ' - 'and project: %s?' % (group_obj, title)) + "Do you want to re-generate the gitolite.conf file for group: %s " + "and project: %s?" % (group_obj, title) + ) if _ask_confirmation(): helper.generate_acls(project=project, group=group_obj) pagure.lib.tasks.gc_clean() - print('Gitolite ACLs updated') + print("Gitolite ACLs updated") def do_refresh_ssh(_): @@ -438,12 +481,13 @@ def do_refresh_ssh(_): """ print( - 'Do you want to re-generate all the ssh keys for every user in ' - 'the database? (Depending on your instance this may take a while ' - 'and result in an outage while it lasts)') + "Do you want to re-generate all the ssh keys for every user in " + "the database? (Depending on your instance this may take a while " + "and result in an outage while it lasts)" + ) if _ask_confirmation(): generate_user_key_files() - print('User key files regenerated') + print("User key files regenerated") do_generate_acl() @@ -455,12 +499,13 @@ def do_generate_hook_token(_): """ print( - 'Do you want to re-generate all the hook token for every user in ' - 'the database? This will break every web-hook set-up on this ' - 'instance. You should only ever run this for a security issue') + "Do you want to re-generate all the hook token for every user in " + "the database? This will break every web-hook set-up on this " + "instance. You should only ever run this for a security issue" + ) if _ask_confirmation(): pagure.lib.generate_hook_token(session) - print('Hook token all re-generated') + print("Hook token all re-generated") def do_list_admin_token(args): @@ -469,26 +514,23 @@ def do_list_admin_token(args): :arg args: the argparse object returned by ``parse_arguments()``. """ - _log.debug('user: %s', args.user) - _log.debug('token: %s', args.token) - _log.debug('active: %s', args.active) - _log.debug('expire: %s', args.expired) - _log.debug('all: %s', args.all) + _log.debug("user: %s", args.user) + _log.debug("token: %s", args.token) + _log.debug("active: %s", args.active) + _log.debug("expire: %s", args.expired) + _log.debug("all: %s", args.all) - acls = pagure.config.config['ADMIN_API_ACLS'] + acls = pagure.config.config["ADMIN_API_ACLS"] if args.all: acls = None tokens = pagure.lib.search_token( - session, acls, - user=args.user, - active=args.active, - expired=args.expired) + session, acls, user=args.user, active=args.active, expired=args.expired + ) for token in tokens: - print('%s -- %s -- %s' % ( - token.id, token.user.user, token.expiration)) + print("%s -- %s -- %s" % (token.id, token.user.user, token.expiration)) if not tokens: - print('No admin tokens found') + print("No admin tokens found") def do_info_admin_token(args): @@ -497,17 +539,16 @@ def do_info_admin_token(args): :arg args: the argparse object returned by ``parse_arguments()``. """ - _log.debug('token: %s', args.token) + _log.debug("token: %s", args.token) token = pagure.lib.search_token(session, acls=None, token=args.token) if not token: - raise pagure.exceptions.PagureException('No such admin token found') + raise pagure.exceptions.PagureException("No such admin token found") - print('%s -- %s -- %s' % ( - token.id, token.user.user, token.expiration)) - print('ACLs:') + print("%s -- %s -- %s" % (token.id, token.user.user, token.expiration)) + print("ACLs:") for acl in token.acls: - print(' - %s' % acl.name) + print(" - %s" % acl.name) def do_expire_admin_token(args): @@ -516,25 +557,24 @@ def do_expire_admin_token(args): :arg args: the argparse object returned by ``parse_arguments()``. """ - _log.debug('token: %s', args.token) + _log.debug("token: %s", args.token) - acls = pagure.config.config['ADMIN_API_ACLS'] + acls = pagure.config.config["ADMIN_API_ACLS"] token = pagure.lib.search_token(session, acls, token=args.token) if not token: - raise pagure.exceptions.PagureException('No such admin token found') + raise pagure.exceptions.PagureException("No such admin token found") - print('%s -- %s -- %s' % ( - token.id, token.user.user, token.expiration)) - print('ACLs:') + print("%s -- %s -- %s" % (token.id, token.user.user, token.expiration)) + print("ACLs:") for acl in token.acls: - print(' - %s' % acl.name) + print(" - %s" % acl.name) - print('Do you really want to expire this API token?') + print("Do you really want to expire this API token?") if _ask_confirmation(): token.expiration = datetime.datetime.utcnow() session.add(token) session.commit() - print('Token expired') + print("Token expired") def do_update_admin_token(args): @@ -543,43 +583,43 @@ def do_update_admin_token(args): :arg args: the argparse object returned by ``parse_arguments()``. """ - _log.debug('token: %s', args.token) - _log.debug('new date: %s', args.date) + _log.debug("token: %s", args.token) + _log.debug("new date: %s", args.date) - acls = pagure.config.config['ADMIN_API_ACLS'] + acls = pagure.config.config["ADMIN_API_ACLS"] token = pagure.lib.search_token(session, acls, token=args.token) if not token: - raise pagure.exceptions.PagureException('No such admin token found') + raise pagure.exceptions.PagureException("No such admin token found") try: - date = arrow.get(args.date, 'YYYY-MM-DD').replace(tzinfo='UTC') + date = arrow.get(args.date, "YYYY-MM-DD").replace(tzinfo="UTC") except Exception as err: _log.exception(err) raise pagure.exceptions.PagureException( - 'Invalid new expiration date submitted: %s, not of the format ' - 'YYYY-MM-DD' % args.date + "Invalid new expiration date submitted: %s, not of the format " + "YYYY-MM-DD" % args.date ) if date.naive.date() <= datetime.datetime.utcnow().date(): raise pagure.exceptions.PagureException( - 'You are about to expire this API token using the wrong ' - 'command, please use: pagure-admin admin-token expire' + "You are about to expire this API token using the wrong " + "command, please use: pagure-admin admin-token expire" ) - print('%s -- %s -- %s' % ( - token.id, token.user.user, token.expiration)) - print('ACLs:') + print("%s -- %s -- %s" % (token.id, token.user.user, token.expiration)) + print("ACLs:") for acl in token.acls: - print(' - %s' % acl.name) + print(" - %s" % acl.name) print( - 'Do you really want to update this API token to expire on %s?' % - args.date) + "Do you really want to update this API token to expire on %s?" + % args.date + ) if _ask_confirmation(): token.expiration = date.naive session.add(token) session.commit() - print('Token updated') + print("Token updated") def do_create_admin_token(args): @@ -588,24 +628,24 @@ def do_create_admin_token(args): :arg args: the argparse object returned by ``parse_arguments()``. """ - _log.debug('user: %s', args.user) + _log.debug("user: %s", args.user) # Validate user first pagure.lib.get_user(session, args.user) - acls_list = pagure.config.config['ADMIN_API_ACLS'] + acls_list = pagure.config.config["ADMIN_API_ACLS"] for idx, acl in enumerate(acls_list): - print('%s. %s' % (idx, acl)) + print("%s. %s" % (idx, acl)) - print('Which ACLs do you want to associated with this token?') - acls = _get_input('(Comma separated list): ') - acls_idx = [int(acl.strip()) for acl in acls.split(',')] + print("Which ACLs do you want to associated with this token?") + acls = _get_input("(Comma separated list): ") + acls_idx = [int(acl.strip()) for acl in acls.split(",")] acls = [acls_list[acl] for acl in acls_idx] - print('ACLs selected:') + print("ACLs selected:") for idx, acl in enumerate(acls_idx): - print('%s. %s' % (acls_idx[idx], acls[idx])) + print("%s. %s" % (acls_idx[idx], acls[idx])) - print('Do you want to create this API token?') + print("Do you want to create this API token?") if _ask_confirmation(): print(pagure.lib.add_token_to_user(session, None, acls, args.user)) @@ -616,8 +656,8 @@ def do_get_watch_status(args): :arg args: the argparse object returned by ``parse_arguments()``. """ - _log.debug('user: %s', args.user) - _log.debug('project: %s', args.project) + _log.debug("user: %s", args.user) + _log.debug("project: %s", args.project) # Validate user pagure.lib.get_user(session, args.user) @@ -626,21 +666,28 @@ def do_get_watch_status(args): if project is None: raise pagure.exceptions.PagureException( - 'No project found with: %s' % args.project) + "No project found with: %s" % args.project + ) - level = pagure.lib.get_watch_level_on_repo( - session=session, - user=args.user, - repo=project.name, - repouser=None, - namespace=project.namespace) or [] + level = ( + pagure.lib.get_watch_level_on_repo( + session=session, + user=args.user, + repo=project.name, + repouser=None, + namespace=project.namespace, + ) + or [] + ) # Specify that issues == 'issues & PRs' - if 'issues' in level: - level.append('pull-requests') + if "issues" in level: + level.append("pull-requests") - print('On %s user: %s is watching the following items: %s' % ( - project.fullname, args.user, ', '.join(level) or None)) + print( + "On %s user: %s is watching the following items: %s" + % (project.fullname, args.user, ", ".join(level) or None) + ) def do_update_watch_status(args): @@ -650,41 +697,43 @@ def do_update_watch_status(args): """ - _log.debug('user: %s', args.user) - _log.debug('status: %s', args.status) - _log.debug('project: %s', args.project) + _log.debug("user: %s", args.user) + _log.debug("status: %s", args.status) + _log.debug("project: %s", args.project) # Validate user pagure.lib.get_user(session, args.user) # Ask the status if none were given if args.status is None: - print('The watch status can be one of the following: ') + print("The watch status can be one of the following: ") for lvl in WATCH: - print('%s: %s' % (lvl, WATCH[lvl])) - args.status = _get_input('Status:') + print("%s: %s" % (lvl, WATCH[lvl])) + args.status = _get_input("Status:") # Validate the status if args.status not in WATCH: raise pagure.exceptions.PagureException( - 'Invalid status provided: %s not in %s' % ( - args.status, ', '.join(sorted(WATCH.keys())))) + "Invalid status provided: %s not in %s" + % (args.status, ", ".join(sorted(WATCH.keys()))) + ) # Get the project project = _get_project(args.project) if project is None: raise pagure.exceptions.PagureException( - 'No project found with: %s' % args.project) + "No project found with: %s" % args.project + ) - print('Updating watch status of %s to %s (%s) on %s' % ( - args.user, args.status, WATCH[args.status], args.project)) + print( + "Updating watch status of %s to %s (%s) on %s" + % (args.user, args.status, WATCH[args.status], args.project) + ) pagure.lib.update_watch_status( - session=session, - project=project, - user=args.user, - watch=args.status) + session=session, project=project, user=args.user, watch=args.status + ) session.commit() @@ -695,9 +744,9 @@ def do_read_only(args): """ - _log.debug('project: %s', args.project) - _log.debug('user: %s', args.user) - _log.debug('read-only: %s', args.ro) + _log.debug("project: %s", args.project) + _log.debug("user: %s", args.user) + _log.debug("read-only: %s", args.ro) # Validate user pagure.lib.get_user(session, args.user) @@ -707,26 +756,30 @@ def do_read_only(args): if project is None: raise pagure.exceptions.PagureException( - 'No project found with: %s' % args.project) + "No project found with: %s" % args.project + ) # Validate ro flag - if args.ro and args.ro.lower() not in ['true', 'false']: + if args.ro and args.ro.lower() not in ["true", "false"]: raise pagure.exceptions.PagureException( - 'Invalid read-only status specified: %s is not in: ' - 'true, false' % args.ro.lower()) + "Invalid read-only status specified: %s is not in: " + "true, false" % args.ro.lower() + ) if not args.ro: print( - 'The current read-only flag of the project %s is set to %s' % ( - project.fullname, project.read_only)) + "The current read-only flag of the project %s is set to %s" + % (project.fullname, project.read_only) + ) else: pagure.lib.update_read_only_mode( - session, project, read_only=(args.ro.lower() == 'true') + session, project, read_only=(args.ro.lower() == "true") ) session.commit() print( - 'The read-only flag of the project %s has been set to %s' % ( - project.fullname, args.ro.lower() == 'true')) + "The read-only flag of the project %s has been set to %s" + % (project.fullname, args.ro.lower() == "true") + ) def do_new_group(args): @@ -736,24 +789,26 @@ def do_new_group(args): """ - _log.debug('name: %s', args.group_name) - _log.debug('display-name: %s', args.display) - _log.debug('description: %s', args.description) - _log.debug('username: %s', args.username) + _log.debug("name: %s", args.group_name) + _log.debug("display-name: %s", args.display) + _log.debug("description: %s", args.description) + _log.debug("username: %s", args.username) # Validate user pagure.lib.get_user(session, args.username) if not args.username: raise pagure.exceptions.PagureException( - 'An username must be provided to associate with the group') + "An username must be provided to associate with the group" + ) if not args.display: raise pagure.exceptions.PagureException( - 'A display name must be provided for the group') + "A display name must be provided for the group" + ) - if pagure.config.config.get('ENABLE_GROUP_MNGT') is False: - print('Group management has been turned off for this pagure instance') + if pagure.config.config.get("ENABLE_GROUP_MNGT") is False: + print("Group management has been turned off for this pagure instance") if not _ask_confirmation(): return @@ -762,13 +817,13 @@ def do_new_group(args): group_name=args.group_name, display_name=args.display, description=args.description, - group_type='user', + group_type="user", user=args.username, is_admin=True, - blacklist=pagure.config.config['BLACKLISTED_GROUPS'], + blacklist=pagure.config.config["BLACKLISTED_GROUPS"], ) session.commit() - print('Group `%s` created.' % args.group_name) + print("Group `%s` created." % args.group_name) print(msg) @@ -780,27 +835,32 @@ def do_block_user(args): """ - _log.debug('username: %s', args.username) - _log.debug('date: %s', args.date) + _log.debug("username: %s", args.username) + _log.debug("date: %s", args.date) if not args.username: raise pagure.exceptions.PagureException( - 'An username must be specified') + "An username must be specified" + ) try: - date = arrow.get(args.date, 'YYYY-MM-DD').replace(tzinfo='UTC') + date = arrow.get(args.date, "YYYY-MM-DD").replace(tzinfo="UTC") except Exception as err: _log.exception(err) raise pagure.exceptions.PagureException( - 'Invalid date submitted: %s, not of the format ' - 'YYYY-MM-DD' % args.date + "Invalid date submitted: %s, not of the format " + "YYYY-MM-DD" % args.date ) # Validate user user = pagure.lib.get_user(session, args.username) - print('The user `%s` will be blocked from all interaction with this ' - 'pagure instance until: %s.', user.username, date.isoformat()) + print( + "The user `%s` will be blocked from all interaction with this " + "pagure instance until: %s.", + user.username, + date.isoformat(), + ) if not _ask_confirmation(): return @@ -817,13 +877,13 @@ def main(): if args.config: config = args.config - if not config.startswith('/'): + if not config.startswith("/"): config = os.path.join(os.getcwd(), config) - os.environ['PAGURE_CONFIG'] = config + os.environ["PAGURE_CONFIG"] = config global session, _config _config = pagure.config.reload_config() - session = pagure.lib.create_session(_config['DB_URL']) + session = pagure.lib.create_session(_config["DB_URL"]) logging.basicConfig() if args.debug: @@ -840,7 +900,7 @@ def main(): print(err) return_code = 3 except Exception as err: - print('Error: {0}'.format(err)) + print("Error: {0}".format(err)) logging.exception("Generic error catched:") return_code = 2 finally: @@ -849,5 +909,5 @@ def main(): return return_code -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(main()) diff --git a/pagure/config.py b/pagure/config.py index 64bfdd5..9c60bae 100644 --- a/pagure/config.py +++ b/pagure/config.py @@ -17,34 +17,26 @@ import flask # noqa: E402 def reload_config(): """ Reload the configuration. """ config = flask.config.Config( - os.path.dirname(os.path.abspath(__file__)), - flask.Flask.default_config + os.path.dirname(os.path.abspath(__file__)), flask.Flask.default_config ) - config.from_object('pagure.default_config') + config.from_object("pagure.default_config") - if 'PAGURE_CONFIG' in os.environ: - config.from_envvar('PAGURE_CONFIG') + if "PAGURE_CONFIG" in os.environ: + config.from_envvar("PAGURE_CONFIG") # These were previously respected config values, but as explained # in https://pagure.io/pagure/issue/2991 they don't really work # as expected and their values must be based on GIT_FOLDER. # To prevent large changes throughout the codebase, we omitted them # from config and we add them here. - if config['ENABLE_DOCS']: - config['DOCS_FOLDER'] = os.path.join( - config['GIT_FOLDER'], - 'docs' + if config["ENABLE_DOCS"]: + config["DOCS_FOLDER"] = os.path.join(config["GIT_FOLDER"], "docs") + if config["ENABLE_TICKETS"]: + config["TICKETS_FOLDER"] = os.path.join( + config["GIT_FOLDER"], "tickets" ) - if config['ENABLE_TICKETS']: - config['TICKETS_FOLDER'] = os.path.join( - config['GIT_FOLDER'], - 'tickets' - ) - config['REQUESTS_FOLDER'] = os.path.join( - config['GIT_FOLDER'], - 'requests' - ) + config["REQUESTS_FOLDER"] = os.path.join(config["GIT_FOLDER"], "requests") return config diff --git a/pagure/decorators.py b/pagure/decorators.py index 5e65215..bae71e1 100644 --- a/pagure/decorators.py +++ b/pagure/decorators.py @@ -21,15 +21,17 @@ def has_issue_tracker(function): issue tracker active If not active returns a 404 page """ + @wraps(function) def check_issue_tracker(*args, **kwargs): repo = flask.g.repo - if not repo.settings.get('issue_tracker', True): - flask.abort(404, 'No issue tracker found for this project') + if not repo.settings.get("issue_tracker", True): + flask.abort(404, "No issue tracker found for this project") # forbid all POST requests if the issue tracker is made read-only - if flask.request.method == 'POST' and \ - repo.settings.get('issue_tracker_read_only', False): - flask.abort(401, 'The issue tracker for this project is read-only') + if flask.request.method == "POST" and repo.settings.get( + "issue_tracker_read_only", False + ): + flask.abort(401, "The issue tracker for this project is read-only") return function(*args, **kwargs) return check_issue_tracker @@ -41,12 +43,17 @@ def is_repo_admin(function): the project. If not active returns a 403 page """ + @wraps(function) def check_repo_admin(*args, **kwargs): if not flask.g.repo_admin: - flask.abort(403, 'You are not allowed to change the ' - 'settings for this project') + flask.abort( + 403, + "You are not allowed to change the " + "settings for this project", + ) return function(*args, **kwargs) + return check_repo_admin @@ -55,12 +62,15 @@ def is_admin_sess_timedout(function): Decorator that checks if the admin session has timeout. If not true redirect to the login page """ + @wraps(function) def check_session_timeout(*args, **kwargs): if admin_session_timedout(): - if flask.request.method == 'POST': - flask.flash('Action canceled, try it again', 'error') + if flask.request.method == "POST": + flask.flash("Action canceled, try it again", "error") return flask.redirect( - flask.url_for('auth_login', next=flask.request.url)) + flask.url_for("auth_login", next=flask.request.url) + ) return function(*args, **kwargs) + return check_session_timeout diff --git a/pagure/default_config.py b/pagure/default_config.py index 5304200..bf9415d 100644 --- a/pagure/default_config.py +++ b/pagure/default_config.py @@ -20,14 +20,14 @@ from pagure.mail_logging import ContextInjector, MSG_FORMAT ADMIN_SESSION_LIFETIME = timedelta(minutes=20) # secret key used to generate unique csrf token -SECRET_KEY = '' +SECRET_KEY = "" # url to the database server: -DB_URL = 'sqlite:////var/tmp/pagure_dev.sqlite' +DB_URL = "sqlite:////var/tmp/pagure_dev.sqlite" # Name the instance, used in the welcome screen upon first login (not # working with `local` auth) -INSTANCE_NAME = 'Pagure' +INSTANCE_NAME = "Pagure" # url to datagrepper (optional): # DATAGREPPER_URL = 'https://apps.fedoraproject.org/datagrepper' @@ -37,7 +37,7 @@ INSTANCE_NAME = 'Pagure' FEDMSG_NOTIFICATIONS = False # The FAS group in which the admin of pagure are -ADMIN_GROUP = 'sysadmin-main' +ADMIN_GROUP = "sysadmin-main" # Hard-code a list of users that are global admins PAGURE_ADMIN_USERS = [] @@ -46,10 +46,10 @@ PAGURE_ADMIN_USERS = [] EMAIL_SEND = False # The email address to which the flask.log will send the errors (tracebacks) -EMAIL_ERROR = 'root@localhost.localdomain' +EMAIL_ERROR = "root@localhost.localdomain" # The URL at which the project is available. -APP_URL = 'http://localhost.localdomain/' +APP_URL = "http://localhost.localdomain/" # Enables / Disables tickets for project for the entire pagure instance ENABLE_TICKETS = True @@ -90,22 +90,22 @@ DEPLOY_KEY = True PR_TARGET_MATCHING_BRANCH = False # Enables / Disables showing all the projects by default on the front page -SHOW_PROJECTS_INDEX = ['repos', 'myrepos', 'myforks'] +SHOW_PROJECTS_INDEX = ["repos", "myrepos", "myforks"] # The URL to use to clone the git repositories. -GIT_URL_SSH = 'ssh://git@localhost.localdomain/' -GIT_URL_GIT = 'git://localhost.localdomain/' +GIT_URL_SSH = "ssh://git@localhost.localdomain/" +GIT_URL_GIT = "git://localhost.localdomain/" # Set to True if git ssh URLs should be displayed even if user # doesn't have SSH key uploaded ALWAYS_RENDER_SSH_CLONE_URL = False # Default queue names for the different services -WEBHOOK_CELERY_QUEUE = 'pagure_webhook' -LOGCOM_CELERY_QUEUE = 'pagure_logcom' -LOADJSON_CELERY_QUEUE = 'pagure_loadjson' -CI_CELERY_QUEUE = 'pagure_ci' -MIRRORING_QUEUE = 'pagure_mirror' +WEBHOOK_CELERY_QUEUE = "pagure_webhook" +LOGCOM_CELERY_QUEUE = "pagure_logcom" +LOADJSON_CELERY_QUEUE = "pagure_loadjson" +CI_CELERY_QUEUE = "pagure_ci" +MIRRORING_QUEUE = "pagure_mirror" # Number of items displayed per page ITEM_PER_PAGE = 48 @@ -114,7 +114,7 @@ ITEM_PER_PAGE = 48 MAX_CONTENT_LENGTH = 4 * 1024 * 1024 # 4 megabytes # IP addresses allowed to access the internal endpoints -IP_ALLOWED_INTERNAL = ['127.0.0.1', 'localhost', '::1'] +IP_ALLOWED_INTERNAL = ["127.0.0.1", "localhost", "::1"] # Worker configuration CELERY_CONFIG = {} @@ -122,37 +122,28 @@ CELERY_CONFIG = {} # Redis configuration EVENTSOURCE_SOURCE = None WEBHOOK = False -REDIS_HOST = '0.0.0.0' +REDIS_HOST = "0.0.0.0" REDIS_PORT = 6379 REDIS_DB = 0 EVENTSOURCE_PORT = 8080 # Folder where to place the ssh keys for the mirroring feature -MIRROR_SSHKEYS_FOLDER = '/var/lib/pagure/sshkeys/' +MIRROR_SSHKEYS_FOLDER = "/var/lib/pagure/sshkeys/" # Folder containing to the git repos # Note that this must be exactly the same as GL_REPO_BASE in gitolite.rc GIT_FOLDER = os.path.join( - os.path.abspath(os.path.dirname(__file__)), - '..', - 'lcl', - 'repos' + os.path.abspath(os.path.dirname(__file__)), "..", "lcl", "repos" ) # Folder containing the clones for the remote pull-requests REMOTE_GIT_FOLDER = os.path.join( - os.path.abspath(os.path.dirname(__file__)), - '..', - 'lcl', - 'remotes' + os.path.abspath(os.path.dirname(__file__)), "..", "lcl", "remotes" ) # Folder containing attachments ATTACHMENTS_FOLDER = os.path.join( - os.path.abspath(os.path.dirname(__file__)), - '..', - 'lcl', - 'attachments' + os.path.abspath(os.path.dirname(__file__)), "..", "lcl", "attachments" ) # Whether to enable scanning for viruses in attachments @@ -160,19 +151,13 @@ VIRUS_SCAN_ATTACHMENTS = False # Configuration file for gitolite GITOLITE_CONFIG = os.path.join( - os.path.abspath(os.path.dirname(__file__)), - '..', - 'lcl', - 'gitolite.conf' + os.path.abspath(os.path.dirname(__file__)), "..", "lcl", "gitolite.conf" ) # Configuration keys to specify where the upload folder is and what is its # name UPLOAD_FOLDER_PATH = os.path.join( - os.path.abspath(os.path.dirname(__file__)), - '..', - 'lcl', - 'releases' + os.path.abspath(os.path.dirname(__file__)), "..", "lcl", "releases" ) @@ -186,7 +171,7 @@ GITOLITE_VERSION = 3 GITOLITE_KEYDIR = None # Backend to use to write down the gitolite configuration file -GITOLITE_BACKEND = 'gitolite3' +GITOLITE_BACKEND = "gitolite3" # Whether or not this installation of Pagure should use `gitolite compile-1` # to improve speed of some gitolite operations. See documentation for more @@ -200,7 +185,7 @@ GL_BINDIR = None # SMTP settings -SMTP_SERVER = 'localhost' +SMTP_SERVER = "localhost" SMTP_PORT = 25 SMTP_SSL = False @@ -210,10 +195,10 @@ SMTP_PASSWORD = None # Email used to sent emails -FROM_EMAIL = 'pagure@localhost.localdomain' +FROM_EMAIL = "pagure@localhost.localdomain" -DOMAIN_EMAIL_NOTIFICATIONS = 'localhost.localdomain' -SALT_EMAIL = '' +DOMAIN_EMAIL_NOTIFICATIONS = "localhost.localdomain" +SALT_EMAIL = "" # Specify which authentication method to use. # Refer to @@ -222,7 +207,7 @@ SALT_EMAIL = '' # Available options: `fas`, `openid`, `oidc`, `local` # Default: ``local``. -PAGURE_AUTH = 'local' +PAGURE_AUTH = "local" # If PAGURE_AUTH is set to 'oidc', the following variables must be set: # The path to JSON file with client secrets (provided by your IdP) @@ -257,7 +242,7 @@ PAGURE_AUTH = 'local' # be set to True in production. # Default: ``True``. SESSION_COOKIE_SECURE = False -SESSION_COOKIE_NAME = 'pagure' +SESSION_COOKIE_NAME = "pagure" # Boolean specifying whether to check the user's IP address when retrieving # its session. This make things more secure (thus is on by default) but @@ -269,72 +254,93 @@ CHECK_SESSION_IP = True SHORT_LENGTH = 6 # Used by SESSION_COOKIE_PATH -APPLICATION_ROOT = '/' +APPLICATION_ROOT = "/" # List of blacklisted project names BLACKLISTED_PROJECTS = [ - 'static', 'pv', 'releases', 'new', 'api', 'settings', 'search', 'fork', - 'logout', 'login', 'user', 'users', 'groups', 'projects', 'ssh_info', - 'issues', 'pull-requests', 'commits', 'tree', 'forks', 'admin', 'c', - 'wait', 'docs/*, tickets/*, requests/*' + "static", + "pv", + "releases", + "new", + "api", + "settings", + "search", + "fork", + "logout", + "login", + "user", + "users", + "groups", + "projects", + "ssh_info", + "issues", + "pull-requests", + "commits", + "tree", + "forks", + "admin", + "c", + "wait", + "docs/*, tickets/*, requests/*", ] # List of prefix allowed in project names ALLOWED_PREFIX = [] # List of blacklisted group names -BLACKLISTED_GROUPS = ['forks', 'group'] +BLACKLISTED_GROUPS = ["forks", "group"] ACLS = { - 'create_branch': 'Create a git branch on a project', - 'create_project': 'Create a new project', - 'commit_flag': 'Flag a commit', - 'fork_project': 'Fork a project', - 'generate_acls_project': 'Generate the Gitolite ACLs on a project', - 'issue_assign': 'Assign issue to someone', - 'issue_change_status': 'Change the status of a ticket', - 'issue_comment': 'Comment on a ticket', - 'issue_create': 'Create a new ticket', - 'issue_subscribe': 'Subscribe the user with this token to an issue', - 'issue_update': 'Update an issue, status, comments, custom fields...', - 'issue_update_custom_fields': 'Update the custom fields of an issue', - 'issue_update_milestone': 'Update the milestone of an issue', - 'modify_project': 'Modify an existing project', - 'pull_request_create': 'Open a new pull-request', - 'pull_request_close': 'Close a pull-request', - 'pull_request_comment': 'Comment on a pull-request', - 'pull_request_flag': 'Flag a pull-request', - 'pull_request_merge': 'Merge a pull-request', - 'pull_request_subscribe': - 'Subscribe the user with this token to a pull-request', - 'update_watch_status': 'Update the watch status on a project', + "create_branch": "Create a git branch on a project", + "create_project": "Create a new project", + "commit_flag": "Flag a commit", + "fork_project": "Fork a project", + "generate_acls_project": "Generate the Gitolite ACLs on a project", + "issue_assign": "Assign issue to someone", + "issue_change_status": "Change the status of a ticket", + "issue_comment": "Comment on a ticket", + "issue_create": "Create a new ticket", + "issue_subscribe": "Subscribe the user with this token to an issue", + "issue_update": "Update an issue, status, comments, custom fields...", + "issue_update_custom_fields": "Update the custom fields of an issue", + "issue_update_milestone": "Update the milestone of an issue", + "modify_project": "Modify an existing project", + "pull_request_create": "Open a new pull-request", + "pull_request_close": "Close a pull-request", + "pull_request_comment": "Comment on a pull-request", + "pull_request_flag": "Flag a pull-request", + "pull_request_merge": "Merge a pull-request", + "pull_request_subscribe": ( + "Subscribe the user with this token to a pull-request" + ), + "update_watch_status": "Update the watch status on a project", } # List of ACLs which a regular user is allowed to associate to an API token # from the ACLs above -USER_ACLS = [key for key in ACLS.keys() if key != 'generate_acls_project'] +USER_ACLS = [key for key in ACLS.keys() if key != "generate_acls_project"] # From the ACLs above lists which ones are tolerated to be associated with # an API token that isn't linked to a particular project. CROSS_PROJECT_ACLS = [ - 'create_project', - 'fork_project', - 'modify_project', - 'update_watch_status', + "create_project", + "fork_project", + "modify_project", + "update_watch_status", ] # ACLs with which admins are allowed to create project-less API tokens ADMIN_API_ACLS = [ - 'issue_comment', - 'issue_create', - 'issue_change_status', - 'pull_request_flag', - 'pull_request_comment', - 'pull_request_merge', - 'generate_acls_project', - 'commit_flag', - 'create_branch', + "issue_comment", + "issue_create", + "issue_change_status", + "pull_request_flag", + "pull_request_comment", + "pull_request_merge", + "generate_acls_project", + "commit_flag", + "create_branch", ] # List of the type of CI service supported by this pagure instance @@ -347,18 +353,18 @@ USER_NAMESPACE = False # unless the user has direct access to it. EXCLUDE_GROUP_INDEX = [] -TRIGGER_CI = ['pretty please pagure-ci rebuild'] +TRIGGER_CI = ["pretty please pagure-ci rebuild"] FLAG_STATUSES_LABELS = { - 'success': 'badge-success', - 'failure': 'badge-danger', - 'error': 'badge-danger', - 'pending': 'badge-info', - 'canceled': 'badge-warning', + "success": "badge-success", + "failure": "badge-danger", + "error": "badge-danger", + "pending": "badge-info", + "canceled": "badge-warning", } -FLAG_SUCCESS = 'success' -FLAG_FAILURE = 'failure' -FLAG_PENDING = 'pending' +FLAG_SUCCESS = "success" +FLAG_FAILURE = "failure" +FLAG_PENDING = "pending" # Never enable this option, this is intended for tests only, and can allow # easy denial of service to the system if enabled. @@ -374,87 +380,74 @@ STOMP_CREDS_PASSWORD = None STOMP_HIERARCHY = None LOGGING = { - 'version': 1, - 'disable_existing_loggers': False, - 'formatters': { - 'standard': { - 'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s' + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "standard": { + "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s" }, - 'email_format': { - 'format': MSG_FORMAT - } + "email_format": {"format": MSG_FORMAT}, }, - 'filters': { - 'myfilter': { - '()': ContextInjector, - } - }, - 'handlers': { - 'console': { - 'level': 'INFO', - 'formatter': 'standard', - 'class': 'logging.StreamHandler', - 'stream': 'ext://sys.stdout', + "filters": {"myfilter": {"()": ContextInjector}}, + "handlers": { + "console": { + "level": "INFO", + "formatter": "standard", + "class": "logging.StreamHandler", + "stream": "ext://sys.stdout", }, - 'email': { - 'level': 'ERROR', - 'formatter': 'email_format', - 'class': 'logging.handlers.SMTPHandler', - 'mailhost': 'localhost', - 'fromaddr': 'pagure@localhost', - 'toaddrs': 'root@localhost', - 'subject': 'ERROR on pagure', - 'filters': ['myfilter'], + "email": { + "level": "ERROR", + "formatter": "email_format", + "class": "logging.handlers.SMTPHandler", + "mailhost": "localhost", + "fromaddr": "pagure@localhost", + "toaddrs": "root@localhost", + "subject": "ERROR on pagure", + "filters": ["myfilter"], }, }, # The root logger configuration; this is a catch-all configuration # that applies to all log messages not handled by a different logger - 'root': { - 'level': 'INFO', - 'handlers': ['console'], - }, - 'loggers': { - 'pagure': { - 'handlers': ['console'], - 'level': 'DEBUG', - 'propagate': True + "root": {"level": "INFO", "handlers": ["console"]}, + "loggers": { + "pagure": { + "handlers": ["console"], + "level": "DEBUG", + "propagate": True, }, - 'flask': { - 'handlers': ['console'], - 'level': 'INFO', - 'propagate': False + "flask": { + "handlers": ["console"], + "level": "INFO", + "propagate": False, }, - 'sqlalchemy': { - 'handlers': ['console'], - 'level': 'WARN', - 'propagate': False + "sqlalchemy": { + "handlers": ["console"], + "level": "WARN", + "propagate": False, }, - 'binaryornot': { - 'handlers': ['console'], - 'level': 'WARN', - 'propagate': True + "binaryornot": { + "handlers": ["console"], + "level": "WARN", + "propagate": True, }, - 'MARKDOWN': { - 'handlers': ['console'], - 'level': 'WARN', - 'propagate': True + "MARKDOWN": { + "handlers": ["console"], + "level": "WARN", + "propagate": True, }, - 'PIL': { - 'handlers': ['console'], - 'level': 'WARN', - 'propagate': True + "PIL": {"handlers": ["console"], "level": "WARN", "propagate": True}, + "chardet": { + "handlers": ["console"], + "level": "WARN", + "propagate": True, }, - 'chardet': { - 'handlers': ['console'], - 'level': 'WARN', - 'propagate': True + "pagure.lib.encoding_utils": { + "handlers": ["console"], + "level": "WARN", + "propagate": False, }, - 'pagure.lib.encoding_utils': { - 'handlers': ['console'], - 'level': 'WARN', - 'propagate': False - }, - } + }, } # Gives commit access to all, all but some or just some project based on @@ -468,10 +461,10 @@ REQUIRED_GROUPS = {} # Predefined reactions. Selecting others is possible by typing their name. The # order here will be preserved in the web UI picker for reactions. REACTIONS = [ - ("Thumbs up", "emojione-1F44D"), # Thumbs up + ("Thumbs up", "emojione-1F44D"), # Thumbs up ("Thumbs down", "emojione-1F44E"), # Thumbs down - ("Confused", "emojione-1F615"), # Confused - ("Heart", "emojione-2764"), # Heart + ("Confused", "emojione-1F615"), # Confused + ("Heart", "emojione-2764"), # Heart ] # This is used for faster indexing. Do not change. _REACTIONS_DICT = dict(REACTIONS) diff --git a/pagure/doc_utils.py b/pagure/doc_utils.py index efcbf41..b54643a 100644 --- a/pagure/doc_utils.py +++ b/pagure/doc_utils.py @@ -27,17 +27,14 @@ import pagure.lib.encoding_utils def modify_rst(rst, view_file_url=None): """ Downgrade some of our rst directives if docutils is too old. """ if view_file_url: - rst = rst.replace( - '.. image:: ', - '.. image:: %s' % view_file_url - ) + rst = rst.replace(".. image:: ", ".. image:: %s" % view_file_url) # We catch Exception if we want :-p # pylint: disable=broad-except try: # The rst features we need were introduced in this version minimum = [0, 9] - version = [int(cpt) for cpt in docutils.__version__.split('.')] + version = [int(cpt) for cpt in docutils.__version__.split(".")] # If we're at or later than that version, no need to downgrade if version >= minimum: @@ -49,9 +46,7 @@ def modify_rst(rst, view_file_url=None): # On Fedora this will never work as the docutils version is to recent # Otherwise, make code-blocks into just literal blocks. - substitutions = { # pragma: no cover - '.. code-block:: javascript': '::', - } + substitutions = {".. code-block:: javascript": "::"} # pragma: no cover for old, new in substitutions.items(): # pragma: no cover rst = rst.replace(old, new) @@ -64,13 +59,14 @@ def modify_html(html): """ substitutions = { - '': '', - '': '', - '$$FLAG_STATUSES_COMMAS$$': - ', '.join(sorted(pagure_config['FLAG_STATUSES_LABELS'].keys())), - '$$FLAG_SUCCESS$$': pagure_config['FLAG_SUCCESS'], - '$$FLAG_FAILURE$$': pagure_config['FLAG_FAILURE'], - '$$FLAG_PENDING$$': pagure_config['FLAG_PENDING'], + '': "", + "": "", + "$$FLAG_STATUSES_COMMAS$$": ", ".join( + sorted(pagure_config["FLAG_STATUSES_LABELS"].keys()) + ), + "$$FLAG_SUCCESS$$": pagure_config["FLAG_SUCCESS"], + "$$FLAG_FAILURE$$": pagure_config["FLAG_FAILURE"], + "$$FLAG_PENDING$$": pagure_config["FLAG_PENDING"], } for old, new in substitutions.items(): html = html.replace(old, new) @@ -82,18 +78,17 @@ def convert_doc(rst_string, view_file_url=None): """ Utility to load an RST file and turn it into fancy HTML. """ rst = modify_rst(rst_string, view_file_url) - overrides = {'report_level': 'quiet'} + overrides = {"report_level": "quiet"} try: html = docutils.core.publish_parts( - source=rst, - writer_name='html', - settings_overrides=overrides) + source=rst, writer_name="html", settings_overrides=overrides + ) except Exception: - return '
%s
' % jinja2.escape(rst) + return "
%s
" % jinja2.escape(rst) else: - html_string = html['html_body'] + html_string = html["html_body"] html_string = modify_html(html_string) @@ -102,20 +97,20 @@ def convert_doc(rst_string, view_file_url=None): def convert_readme(content, ext, view_file_url=None): - ''' Convert the provided content according to the extension of the file + """ Convert the provided content according to the extension of the file provided. - ''' + """ output = pagure.lib.encoding_utils.decode(ktc.to_bytes(content)) safe = False - if ext and ext in ['.rst']: + if ext and ext in [".rst"]: safe = True output = convert_doc(output, view_file_url) - elif ext and ext in ['.mk', '.md', '.markdown']: + elif ext and ext in [".mk", ".md", ".markdown"]: output = pagure.lib.text2markdown(output, readme=True) safe = True - elif not ext or (ext and ext in ['.text', '.txt']): + elif not ext or (ext and ext in [".text", ".txt"]): safe = True - output = '
%s
' % jinja2.escape(output) + output = "
%s
" % jinja2.escape(output) return output, safe diff --git a/pagure/docs_server.py b/pagure/docs_server.py index c516f3e..d4ebe63 100644 --- a/pagure/docs_server.py +++ b/pagure/docs_server.py @@ -31,23 +31,27 @@ APP = flask.Flask(__name__) # set up FAS APP.config = pagure.config.reload_config() -SESSION = pagure.lib.create_session(APP.config['DB_URL']) +SESSION = pagure.lib.create_session(APP.config["DB_URL"]) if not APP.debug: - APP.logger.addHandler(pagure.mail_logging.get_mail_handler( - smtp_server=APP.config.get('SMTP_SERVER', '127.0.0.1'), - mail_admin=APP.config.get('MAIL_ADMIN', APP.config['EMAIL_ERROR']), - from_email=APP.config.get('FROM_EMAIL', 'pagure@fedoraproject.org') - )) + APP.logger.addHandler( + pagure.mail_logging.get_mail_handler( + smtp_server=APP.config.get("SMTP_SERVER", "127.0.0.1"), + mail_admin=APP.config.get("MAIL_ADMIN", APP.config["EMAIL_ERROR"]), + from_email=APP.config.get( + "FROM_EMAIL", "pagure@fedoraproject.org" + ), + ) + ) # Send classic logs into syslog SHANDLER = logging.StreamHandler() -SHANDLER.setLevel(APP.config.get('log_level', 'INFO')) +SHANDLER.setLevel(APP.config.get("log_level", "INFO")) APP.logger.addHandler(SHANDLER) _log = logging.getLogger(__name__) -TMPL_HTML = ''' +TMPL_HTML = """ @@ -63,50 +67,57 @@ TMPL_HTML = ''' {content} -''' +""" def __get_tree(repo_obj, tree, filepath, index=0, extended=False): - ''' Retrieve the entry corresponding to the provided filename in a + """ Retrieve the entry corresponding to the provided filename in a given tree. - ''' + """ filename = filepath[index] if isinstance(tree, pygit2.Blob): # pragma: no cover # If we were given a blob, then let's just return it return (tree, None, None) for element in tree: - if element.name == filename or \ - (not filename and element.name.startswith('index')): + if element.name == filename or ( + not filename and element.name.startswith("index") + ): # If we have a folder we must go one level deeper if element.filemode == 16384: if (index + 1) == len(filepath): - filepath.append('') + filepath.append("") return __get_tree( - repo_obj, repo_obj[element.oid], filepath, - index=index + 1, extended=True) + repo_obj, + repo_obj[element.oid], + filepath, + index=index + 1, + extended=True, + ) else: return (element, tree, False) - if filename == '': + if filename == "": return (None, tree, extended) else: raise pagure.exceptions.FileNotFoundException( - 'File %s not found' % ('/'.join(filepath),)) + "File %s not found" % ("/".join(filepath),) + ) def __get_tree_and_content(repo_obj, commit, path): - ''' Return the tree and the content of the specified file. ''' + """ Return the tree and the content of the specified file. """ (blob_or_tree, tree_obj, extended) = __get_tree( - repo_obj, commit.tree, path) + repo_obj, commit.tree, path + ) if blob_or_tree is None: return (tree_obj, None, None) if not repo_obj[blob_or_tree.oid]: # Not tested and no idea how to test it, but better safe than sorry - flask.abort(404, 'File not found') + flask.abort(404, "File not found") if isinstance(blob_or_tree, pygit2.TreeEntry): # Returned a file filename = blob_or_tree.name @@ -115,9 +126,10 @@ def __get_tree_and_content(repo_obj, commit, path): if not is_binary_string(blob_obj.data): try: content, safe = pagure.doc_utils.convert_readme( - blob_obj.data, ext) + blob_obj.data, ext + ) if safe: - filename = name + '.html' + filename = name + ".html" except pagure.exceptions.PagureEncodingException: content = blob_obj.data else: @@ -127,32 +139,33 @@ def __get_tree_and_content(repo_obj, commit, path): return (tree, content, filename) -@APP.route('//') -@APP.route('/./') -@APP.route('//') -@APP.route('/./') -@APP.route('/fork///') -@APP.route('/fork/.//') -@APP.route('/fork///') -@APP.route('/fork/.//') +@APP.route("//") +@APP.route("/./") +@APP.route("//") +@APP.route("/./") +@APP.route("/fork///") +@APP.route("/fork/.//") +@APP.route("/fork///") +@APP.route("/fork/.//") def view_docs(repo, username=None, namespace=None, filename=None): """ Display the documentation """ - if '.' in repo: - namespace, repo = repo.split('.', 1) + if "." in repo: + namespace, repo = repo.split(".", 1) repo = pagure.lib.get_authorized_project( - SESSION, repo, user=username, namespace=namespace) + SESSION, repo, user=username, namespace=namespace + ) if not repo: - flask.abort(404, 'Project not found') + flask.abort(404, "Project not found") - if not repo.settings.get('project_documentation', True): - flask.abort(404, 'This project has documentation disabled') + if not repo.settings.get("project_documentation", True): + flask.abort(404, "This project has documentation disabled") - reponame = os.path.join(APP.config['DOCS_FOLDER'], repo.path) + reponame = os.path.join(APP.config["DOCS_FOLDER"], repo.path) if not os.path.exists(reponame): - flask.abort(404, 'Documentation not found') + flask.abort(404, "Documentation not found") repo_obj = pygit2.Repository(reponame) @@ -162,43 +175,44 @@ def view_docs(repo, username=None, namespace=None, filename=None): flask.abort( 404, flask.Markup( - 'No content found in the repository, you may want to read ' + "No content found in the repository, you may want to read " 'the ' - 'Using the doc repository of your project documentation.' - ) + "Using the doc repository of your project documentation." + ), ) content = None tree = None if not filename: - path = [''] + path = [""] else: - path = [it for it in filename.split('/') if it] + path = [it for it in filename.split("/") if it] if commit: try: (tree, content, filename) = __get_tree_and_content( - repo_obj, commit, path) + repo_obj, commit, path + ) except pagure.exceptions.FileNotFoundException as err: - flask.flash('%s' % err, 'error') + flask.flash("%s" % err, "error") except Exception as err: _log.exception(err) - flask.abort(500, 'Unkown error encountered and reported') + flask.abort(500, "Unkown error encountered and reported") if not content: if not tree or not len(tree): - flask.abort(404, 'No content found in the repository') - html = '
  • ' + flask.abort(404, "No content found in the repository") + html = "
  • " for el in tree: name = el.name # Append a trailing '/' to the folders if el.filemode == 16384: - name += '/' + name += "/" html += ''.format(name, name) - html += '
  • ' + html += "" content = TMPL_HTML.format(content=html) - mimetype = 'text/html' + mimetype = "text/html" else: mimetype, _ = pagure.lib.mimetype.guess_type(filename, content) diff --git a/pagure/exceptions.py b/pagure/exceptions.py index 1c375fc..5ff4655 100644 --- a/pagure/exceptions.py +++ b/pagure/exceptions.py @@ -12,44 +12,49 @@ from __future__ import unicode_literals class PagureException(Exception): - ''' Parent class of all the exception for all Pagure specific + """ Parent class of all the exception for all Pagure specific exceptions. - ''' + """ + pass class RepoExistsException(PagureException): - ''' Exception thrown when trying to create a repository that already + """ Exception thrown when trying to create a repository that already exists. - ''' + """ + pass class ProjectBlackListedException(PagureException): - ''' Exception thrown when trying to create a repository but, that repository + """ Exception thrown when trying to create a repository but, that repository name has been blacklisted - ''' + """ + pass class AccessLevelNotFound(PagureException): - ''' Exception raised when the access level asked is not allowed on pagure - ''' + """ Exception raised when the access level asked is not allowed on pagure + """ + pass class FileNotFoundException(PagureException): - ''' Exception thrown when the desired file is not found. + """ Exception thrown when the desired file is not found. This exception is found when the file is searched in a git repo or when setting up one of the git hook. - ''' + """ + pass class APIError(PagureException): - ''' Exception raised by the API when something goes wrong. ''' + """ Exception raised by the API when something goes wrong. """ def __init__(self, status_code, error_code, error=None, errors=None): self.status_code = status_code @@ -59,43 +64,50 @@ class APIError(PagureException): class BranchNotFoundException(PagureException): - ''' Exception thrown when trying to use a branch that could not be + """ Exception thrown when trying to use a branch that could not be found in a repository. - ''' + """ + pass class PagureEvException(PagureException): - ''' Exceptions used in the pagure_stream_server. - ''' + """ Exceptions used in the pagure_stream_server. + """ + pass class GitConflictsException(PagureException): - ''' Exception used when trying to pull on a repo and that leads to + """ Exception used when trying to pull on a repo and that leads to conflicts. - ''' + """ + pass class HookInactiveException(PagureException): - ''' Exception raised when the hook is inactive. ''' + """ Exception raised when the hook is inactive. """ + pass class NoCorrespondingPR(PagureException): - ''' Exception raised when no pull-request is found with the given - information. ''' + """ Exception raised when no pull-request is found with the given + information. """ + pass class InvalidObjectException(PagureException): - ''' Exception raised when a given object is not what was expected. ''' + """ Exception raised when a given object is not what was expected. """ + pass class PagureEncodingException(PagureException, ValueError): - ''' Exception raised none of the encoding guessed could be applied to + """ Exception raised none of the encoding guessed could be applied to the content examined - ''' + """ + pass diff --git a/pagure/flask_app.py b/pagure/flask_app.py index 58e6ea6..aa04b66 100644 --- a/pagure/flask_app.py +++ b/pagure/flask_app.py @@ -31,7 +31,7 @@ import pagure.utils from pagure.config import config as pagure_config from pagure.utils import get_repo_path -if os.environ.get('PAGURE_PERFREPO'): +if os.environ.get("PAGURE_PERFREPO"): import pagure.perfrepo as perfrepo else: perfrepo = None @@ -40,18 +40,20 @@ else: logger = logging.getLogger(__name__) REDIS = None -if pagure_config['EVENTSOURCE_SOURCE'] \ - or pagure_config['WEBHOOK'] \ - or pagure_config.get('PAGURE_CI_SERVICES'): +if ( + pagure_config["EVENTSOURCE_SOURCE"] + or pagure_config["WEBHOOK"] + or pagure_config.get("PAGURE_CI_SERVICES") +): pagure.lib.set_redis( - host=pagure_config['REDIS_HOST'], - port=pagure_config['REDIS_PORT'], - dbname=pagure_config['REDIS_DB'] + host=pagure_config["REDIS_HOST"], + port=pagure_config["REDIS_PORT"], + dbname=pagure_config["REDIS_DB"], ) -if pagure_config.get('PAGURE_CI_SERVICES'): - pagure.lib.set_pagure_ci(pagure_config['PAGURE_CI_SERVICES']) +if pagure_config.get("PAGURE_CI_SERVICES"): + pagure.lib.set_pagure_ci(pagure_config["PAGURE_CI_SERVICES"]) def create_app(config=None): @@ -62,8 +64,9 @@ def create_app(config=None): if config: app.config.update(config) - if app.config.get('SESSION_TYPE', None) is not None: + if app.config.get("SESSION_TYPE", None) is not None: import flask_session + flask_session.Session(app) pagure.utils.set_up_logging(app=app) @@ -78,19 +81,22 @@ def create_app(config=None): # request. app.before_request(perfrepo.reset_stats) - auth = pagure_config.get('PAGURE_AUTH', None) - if auth in ['fas', 'openid']: + auth = pagure_config.get("PAGURE_AUTH", None) + if auth in ["fas", "openid"]: # Only import and set flask_fas_openid if it is needed from pagure.ui.fas_login import FAS + FAS.init_app(app) - elif auth == 'oidc': + elif auth == "oidc": # Only import and set flask_fas_openid if it is needed from pagure.ui.oidc_login import oidc, fas_user_from_oidc + oidc.init_app(app) app.before_request(fas_user_from_oidc) - if auth == 'local': + if auth == "local": # Only import the login controller if the app is set up for local login import pagure.ui.login as login + app.before_request(login._check_session_cookie) app.after_request(login._send_session_cookie) @@ -99,25 +105,31 @@ def create_app(config=None): # Back port 'equalto' to older version of jinja2 app.jinja_env.tests.setdefault( - 'equalto', lambda value, other: value == other) + "equalto", lambda value, other: value == other + ) # Import the application from pagure.api import API # noqa: E402 + app.register_blueprint(API) from pagure.ui import UI_NS # noqa: E402 + app.register_blueprint(UI_NS) from pagure.internal import PV # noqa: E402 + app.register_blueprint(PV) - themename = pagure_config.get('THEME', 'default') + themename = pagure_config.get("THEME", "default") themeblueprint = flask.Blueprint( - 'theme', __name__, - static_url_path='/theme/static', + "theme", + __name__, + static_url_path="/theme/static", static_folder="themes/" + themename + "/static/", - template_folder="themes/" + themename + "/templates/") + template_folder="themes/" + themename + "/templates/", + ) app.register_blueprint(themeblueprint) app.before_request(set_request) @@ -127,8 +139,8 @@ def create_app(config=None): # Do this at the very end, so that this after_request comes last. app.after_request(perfrepo.print_stats) - app.add_url_rule('/login/', view_func=auth_login, methods=['GET', 'POST']) - app.add_url_rule('/logout/', view_func=auth_logout) + app.add_url_rule("/login/", view_func=auth_login, methods=["GET", "POST"]) + app.add_url_rule("/logout/", view_func=auth_logout) return app @@ -136,13 +148,15 @@ def create_app(config=None): def generate_user_key_files(): """ Regenerate the key files used by gitolite. """ - gitolite_home = pagure_config.get('GITOLITE_HOME', None) + gitolite_home = pagure_config.get("GITOLITE_HOME", None) if gitolite_home: users = pagure.lib.search_user(flask.g.session) for user in users: pagure.lib.update_user_ssh( - flask.g.session, user, user.public_ssh_key, - pagure_config.get('GITOLITE_KEYDIR', None), + flask.g.session, + user, + user.public_ssh_key, + pagure_config.get("GITOLITE_KEYDIR", None), update_only=True, ) pagure.lib.git.generate_gitolite_acls(project=None) @@ -161,10 +175,9 @@ def admin_session_timedout(): # This is because flask_fas_openid will store this as a posix timestamp if not isinstance(login_time, datetime.datetime): login_time = datetime.datetime.utcfromtimestamp(login_time) - if (datetime.datetime.utcnow() - login_time) > \ - pagure_config.get( - 'ADMIN_SESSION_LIFETIME', - datetime.timedelta(minutes=15)): + if (datetime.datetime.utcnow() - login_time) > pagure_config.get( + "ADMIN_SESSION_LIFETIME", datetime.timedelta(minutes=15) + ): timedout = True logout() return timedout @@ -173,32 +186,36 @@ def admin_session_timedout(): def logout(): """ Log out the user currently logged in in the application """ - auth = pagure_config.get('PAGURE_AUTH', None) - if auth in ['fas', 'openid']: - if hasattr(flask.g, 'fas_user') and flask.g.fas_user is not None: + auth = pagure_config.get("PAGURE_AUTH", None) + if auth in ["fas", "openid"]: + if hasattr(flask.g, "fas_user") and flask.g.fas_user is not None: from pagure.ui.fas_login import FAS + FAS.logout() - elif auth == 'oidc': + elif auth == "oidc": from pagure.ui.oidc_login import oidc_logout + oidc_logout() - elif auth == 'local': + elif auth == "local": import pagure.ui.login as login + login.logout() def set_request(): """ Prepare every request. """ flask.session.permanent = True - if not hasattr(flask.g, 'session') or not flask.g.session: + if not hasattr(flask.g, "session") or not flask.g.session: flask.g.session = pagure.lib.create_session( - flask.current_app.config['DB_URL']) + flask.current_app.config["DB_URL"] + ) flask.g.version = pagure.__version__ flask.g.confirmationform = pagure.forms.ConfirmationForm() # The API namespace has its own way of getting repo and username and # of handling errors - if flask.request.blueprint == 'api_ns': + if flask.request.blueprint == "api_ns": return flask.g.forkbuttonform = None @@ -212,19 +229,21 @@ def set_request(): if not isinstance(login_time, datetime.datetime): login_time = datetime.datetime.utcfromtimestamp(login_time) user = _get_user(username=flask.g.fas_user.username) - if (user.refuse_sessions_before - and login_time < user.refuse_sessions_before): + if ( + user.refuse_sessions_before + and login_time < user.refuse_sessions_before + ): logout() - return flask.redirect(flask.url_for('ui_ns.index')) + return flask.redirect(flask.url_for("ui_ns.index")) - flask.g.justlogedout = flask.session.get('_justloggedout', False) + flask.g.justlogedout = flask.session.get("_justloggedout", False) if flask.g.justlogedout: - flask.session['_justloggedout'] = None + flask.session["_justloggedout"] = None flask.g.new_user = False - if flask.session.get('_new_user'): + if flask.session.get("_new_user"): flask.g.new_user = True - flask.session['_new_user'] = False + flask.session["_new_user"] = False flask.g.authenticated = pagure.utils.authenticated() flask.g.admin = pagure.utils.is_admin() @@ -232,34 +251,45 @@ def set_request(): # Retrieve the variables in the URL args = flask.request.view_args or {} # Check if there is a `repo` and an `username` - repo = args.get('repo') - username = args.get('username') - namespace = args.get('namespace') + repo = args.get("repo") + username = args.get("username") + namespace = args.get("namespace") # If there isn't a `repo` in the URL path, or if there is but the # endpoint called is part of the API, just don't do anything if repo: flask.g.repo = pagure.lib.get_authorized_project( - flask.g.session, repo, user=username, namespace=namespace) + flask.g.session, repo, user=username, namespace=namespace + ) if flask.g.authenticated: flask.g.repo_forked = pagure.lib.get_authorized_project( - flask.g.session, repo, user=flask.g.fas_user.username, - namespace=namespace) - flask.g.repo_starred = pagure.lib.has_starred( - flask.g.session, flask.g.repo, + flask.g.session, + repo, user=flask.g.fas_user.username, + namespace=namespace, + ) + flask.g.repo_starred = pagure.lib.has_starred( + flask.g.session, flask.g.repo, user=flask.g.fas_user.username ) - if not flask.g.repo \ - and namespace \ - and pagure_config.get('OLD_VIEW_COMMIT_ENABLED', False) \ - and len(repo) == 40: - return flask.redirect(flask.url_for( - 'ui_ns.view_commit', repo=namespace, commitid=repo, - username=username, namespace=None)) + if ( + not flask.g.repo + and namespace + and pagure_config.get("OLD_VIEW_COMMIT_ENABLED", False) + and len(repo) == 40 + ): + return flask.redirect( + flask.url_for( + "ui_ns.view_commit", + repo=namespace, + commitid=repo, + username=username, + namespace=None, + ) + ) if flask.g.repo is None: - flask.abort(404, 'Project not found') + flask.abort(404, "Project not found") flask.g.reponame = get_repo_path(flask.g.repo) flask.g.repo_obj = pygit2.Repository(flask.g.reponame) @@ -271,15 +301,19 @@ def set_request(): repouser = flask.g.repo.user.user if flask.g.repo.is_fork else None fas_user = flask.g.fas_user if pagure.utils.authenticated() else None flask.g.repo_watch_levels = pagure.lib.get_watch_level_on_repo( - flask.g.session, fas_user, flask.g.repo.name, - repouser=repouser, namespace=namespace) + flask.g.session, + fas_user, + flask.g.repo.name, + repouser=repouser, + namespace=namespace, + ) - items_per_page = pagure_config['ITEM_PER_PAGE'] + items_per_page = pagure_config["ITEM_PER_PAGE"] flask.g.offset = 0 flask.g.page = 1 flask.g.limit = items_per_page - page = flask.request.args.get('page') - limit = flask.request.args.get('n') + page = flask.request.args.get("page") + limit = flask.request.args.get("n") if limit: try: limit = int(limit) @@ -304,16 +338,17 @@ def set_request(): def auth_login(): # pragma: no cover """ Method to log into the application using FAS OpenID. """ - return_point = flask.url_for('ui_ns.index') - if 'next' in flask.request.args: - if pagure.utils.is_safe_url(flask.request.args['next']): - return_point = flask.request.args['next'] + return_point = flask.url_for("ui_ns.index") + if "next" in flask.request.args: + if pagure.utils.is_safe_url(flask.request.args["next"]): + return_point = flask.request.args["next"] authenticated = pagure.utils.authenticated() - auth = pagure_config.get('PAGURE_AUTH', None) + auth = pagure_config.get("PAGURE_AUTH", None) - if not authenticated and auth == 'oidc': + if not authenticated and auth == "oidc": from pagure.ui.oidc_login import oidc, fas_user_from_oidc, set_user + # If oidc is used and user hits this endpoint, it will redirect # to IdP with destination=/login?next= # After confirming user identity, the IdP will redirect user here @@ -323,7 +358,7 @@ def auth_login(): # pragma: no cover if not oidc.user_loggedin: return oidc.redirect_to_auth_server(flask.request.url) else: - flask.session['oidc_logintime'] = time.time() + flask.session["oidc_logintime"] = time.time() fas_user_from_oidc() authenticated = pagure.utils.authenticated() set_user() @@ -331,47 +366,47 @@ def auth_login(): # pragma: no cover if authenticated: return flask.redirect(return_point) - admins = pagure_config['ADMIN_GROUP'] + admins = pagure_config["ADMIN_GROUP"] if isinstance(admins, list): admins = set(admins) else: # pragma: no cover admins = set([admins]) - if auth in ['fas', 'openid']: + if auth in ["fas", "openid"]: from pagure.ui.fas_login import FAS + groups = set() - if not pagure_config.get('ENABLE_GROUP_MNGT', False): + if not pagure_config.get("ENABLE_GROUP_MNGT", False): groups = [ group.group_name for group in pagure.lib.search_groups( - flask.g.session, group_type='user') + flask.g.session, group_type="user" + ) ] groups = set(groups).union(admins) - ext_committer = set(pagure_config.get('EXTERNAL_COMMITTER', {})) + ext_committer = set(pagure_config.get("EXTERNAL_COMMITTER", {})) groups = set(groups).union(ext_committer) return FAS.login(return_url=return_point, groups=groups) - elif auth == 'local': + elif auth == "local": form = pagure.login_forms.LoginForm() return flask.render_template( - 'login/login.html', - next_url=return_point, - form=form, + "login/login.html", next_url=return_point, form=form ) def auth_logout(): # pragma: no cover """ Method to log out from the application. """ - return_point = flask.url_for('ui_ns.index') - if 'next' in flask.request.args: - if pagure.utils.is_safe_url(flask.request.args['next']): - return_point = flask.request.args['next'] + return_point = flask.url_for("ui_ns.index") + if "next" in flask.request.args: + if pagure.utils.is_safe_url(flask.request.args["next"]): + return_point = flask.request.args["next"] if not pagure.utils.authenticated(): return flask.redirect(return_point) logout() flask.flash("You have been logged out") - flask.session['_justloggedout'] = True + flask.session["_justloggedout"] = True return flask.redirect(return_point) @@ -394,4 +429,4 @@ def _get_user(username): try: return pagure.lib.get_user(flask.g.session, username) except pagure.exceptions.PagureException as e: - flask.abort(404, '%s' % e) + flask.abort(404, "%s" % e) diff --git a/pagure/forms.py b/pagure/forms.py index 208bd29..bacaeba 100644 --- a/pagure/forms.py +++ b/pagure/forms.py @@ -19,6 +19,7 @@ import re import flask import flask_wtf as wtf + try: from flask_wtf import FlaskForm except ImportError: @@ -32,28 +33,28 @@ from pagure.config import config as pagure_config from pagure.utils import urlpattern -STRICT_REGEX = '^[a-zA-Z0-9-_]+$' -TAGS_REGEX = '^[a-zA-Z0-9-_, .:]+$' -FALSE_VALUES = ('false', '', False, 'False', 0, '0') +STRICT_REGEX = "^[a-zA-Z0-9-_]+$" +TAGS_REGEX = "^[a-zA-Z0-9-_, .:]+$" +FALSE_VALUES = ("false", "", False, "False", 0, "0") WTF_VERSION = tuple() -if hasattr(wtf, '__version__'): - WTF_VERSION = tuple(int(v) for v in wtf.__version__.split('.')) +if hasattr(wtf, "__version__"): + WTF_VERSION = tuple(int(v) for v in wtf.__version__.split(".")) class PagureForm(FlaskForm): """ Local form allowing us to form set the time limit. """ def __init__(self, *args, **kwargs): - delta = pagure_config.get('WTF_CSRF_TIME_LIMIT', 3600) + delta = pagure_config.get("WTF_CSRF_TIME_LIMIT", 3600) if delta and WTF_VERSION < (0, 10, 0): self.TIME_LIMIT = datetime.timedelta(seconds=delta) else: self.TIME_LIMIT = delta - if 'csrf_enabled' in kwargs and kwargs['csrf_enabled'] is False: - kwargs['meta'] = {'csrf': False} + if "csrf_enabled" in kwargs and kwargs["csrf_enabled"] is False: + kwargs["meta"] = {"csrf": False} if WTF_VERSION >= (0, 14, 0): - kwargs.pop('csrf_enabled') + kwargs.pop("csrf_enabled") super(PagureForm, self).__init__(*args, **kwargs) @@ -61,7 +62,7 @@ def convert_value(val): """ Convert the provided values to strings when possible. """ if val: if not isinstance(val, (list, tuple, six.text_type)): - return val.decode('utf-8') + return val.decode("utf-8") elif isinstance(val, six.string_types): return val @@ -70,38 +71,42 @@ class MultipleEmail(wtforms.validators.Email): """ Split the value by comma and run them through the email validator of wtforms. """ + def __call__(self, form, field): - message = field.gettext('One or more invalid email address.') - for data in field.data.split(','): + message = field.gettext("One or more invalid email address.") + for data in field.data.split(","): data = data.strip() - if not self.regex.match(data or ''): + if not self.regex.match(data or ""): raise wtforms.validators.ValidationError(message) def user_namespace_if_private(form, field): - ''' Check if the data in the field is the same as in the password field. - ''' + """ Check if the data in the field is the same as in the password field. + """ if form.private.data: field.data = flask.g.fas_user.username def file_virus_validator(form, field): - ''' Checks for virus in the file from flask request object, - raises wtf.ValidationError if virus is found else None. ''' + """ Checks for virus in the file from flask request object, + raises wtf.ValidationError if virus is found else None. """ - if not pagure_config['VIRUS_SCAN_ATTACHMENTS']: + if not pagure_config["VIRUS_SCAN_ATTACHMENTS"]: return from pyclamd import ClamdUnixSocket - if field.name not in flask.request.files or \ - flask.request.files[field.name].filename == '': + if ( + field.name not in flask.request.files + or flask.request.files[field.name].filename == "" + ): # If no file was uploaded, this field is correct return uploaded = flask.request.files[field.name] clam = ClamdUnixSocket() if not clam.ping(): raise wtforms.ValidationError( - 'Unable to communicate with virus scanner') + "Unable to communicate with virus scanner" + ) results = clam.scan_stream(uploaded.stream.read()) if results is None: uploaded.stream.seek(0) @@ -109,64 +114,58 @@ def file_virus_validator(form, field): else: result = results.values() res_type, res_msg = result - if res_type == 'FOUND': - raise wtforms.ValidationError('Virus found: %s' % res_msg) + if res_type == "FOUND": + raise wtforms.ValidationError("Virus found: %s" % res_msg) else: - raise wtforms.ValidationError('Error scanning uploaded file') + raise wtforms.ValidationError("Error scanning uploaded file") def ssh_key_validator(form, field): - ''' Form for ssh key validation ''' + """ Form for ssh key validation """ if not pagure.lib.are_valid_ssh_keys(field.data): - raise wtforms.ValidationError('Invalid SSH keys') + raise wtforms.ValidationError("Invalid SSH keys") class ProjectFormSimplified(PagureForm): - ''' Form to edit the description of a project. ''' + """ Form to edit the description of a project. """ + description = wtforms.TextField( 'Description *', - [wtforms.validators.Required()] + [wtforms.validators.Required()], ) url = wtforms.TextField( - 'URL', + "URL", [ wtforms.validators.optional(), wtforms.validators.Regexp(urlpattern, flags=re.IGNORECASE), - ] + ], ) avatar_email = wtforms.TextField( - 'Avatar email', - [wtforms.validators.optional()] + "Avatar email", [wtforms.validators.optional()] ) tags = wtforms.TextField( - 'Project tags', - [ - wtforms.validators.optional(), - wtforms.validators.Length(max=255), - ] + "Project tags", + [wtforms.validators.optional(), wtforms.validators.Length(max=255)], ) private = wtforms.BooleanField( - 'Private', - [wtforms.validators.Optional()], - false_values=FALSE_VALUES, + "Private", [wtforms.validators.Optional()], false_values=FALSE_VALUES ) class ProjectForm(ProjectFormSimplified): - ''' Form to create or edit project. ''' - name = wtforms.TextField( - 'Project name *', - ) + """ Form to create or edit project. """ + + name = wtforms.TextField('Project name *') create_readme = wtforms.BooleanField( - 'Create README', + "Create README", [wtforms.validators.optional()], false_values=FALSE_VALUES, ) namespace = wtforms.SelectField( - 'Project Namespace', + "Project Namespace", [user_namespace_if_private, wtforms.validators.optional()], choices=[], - coerce=convert_value + coerce=convert_value, ) def __init__(self, *args, **kwargs): @@ -177,46 +176,44 @@ class ProjectForm(ProjectFormSimplified): super(ProjectForm, self).__init__(*args, **kwargs) # set the name validator regex = pagure_config.get( - 'PROJECT_NAME_REGEX', '^[a-zA-z0-9_][a-zA-Z0-9-_]*$') + "PROJECT_NAME_REGEX", "^[a-zA-z0-9_][a-zA-Z0-9-_]*$" + ) self.name.validators = [ wtforms.validators.Required(), - wtforms.validators.Regexp(regex, flags=re.IGNORECASE) + wtforms.validators.Regexp(regex, flags=re.IGNORECASE), ] # Set the list of namespace - if 'namespaces' in kwargs: + if "namespaces" in kwargs: self.namespace.choices = [ - (namespace, namespace) for namespace in kwargs['namespaces'] + (namespace, namespace) for namespace in kwargs["namespaces"] ] - if not pagure_config.get('USER_NAMESPACE', False): - self.namespace.choices.insert(0, ('', '')) + if not pagure_config.get("USER_NAMESPACE", False): + self.namespace.choices.insert(0, ("", "")) class IssueFormSimplied(PagureForm): - ''' Form to create or edit an issue. ''' + """ Form to create or edit an issue. """ + title = wtforms.TextField( - 'Title*', - [wtforms.validators.Required()] + 'Title*', [wtforms.validators.Required()] ) issue_content = wtforms.TextAreaField( - 'Content*', - [wtforms.validators.Required()] + 'Content*', [wtforms.validators.Required()] ) private = wtforms.BooleanField( - 'Private', - [wtforms.validators.optional()], - false_values=FALSE_VALUES, + "Private", [wtforms.validators.optional()], false_values=FALSE_VALUES ) milestone = wtforms.SelectField( - 'Milestone', + "Milestone", [wtforms.validators.Optional()], choices=[], - coerce=convert_value + coerce=convert_value, ) priority = wtforms.SelectField( - 'Priority', + "Priority", [wtforms.validators.Optional()], choices=[], - coerce=convert_value + coerce=convert_value, ) def __init__(self, *args, **kwargs): @@ -227,25 +224,22 @@ class IssueFormSimplied(PagureForm): super(IssueFormSimplied, self).__init__(*args, **kwargs) self.priority.choices = [] - if 'priorities' in kwargs: - for key in sorted(kwargs['priorities']): - self.priority.choices.append( - (key, kwargs['priorities'][key]) - ) + if "priorities" in kwargs: + for key in sorted(kwargs["priorities"]): + self.priority.choices.append((key, kwargs["priorities"][key])) self.milestone.choices = [] - if 'milestones' in kwargs and kwargs['milestones']: - for key in kwargs['milestones']: + if "milestones" in kwargs and kwargs["milestones"]: + for key in kwargs["milestones"]: self.milestone.choices.append((key, key)) - self.milestone.choices.insert(0, ('', '')) + self.milestone.choices.insert(0, ("", "")) class IssueForm(IssueFormSimplied): - ''' Form to create or edit an issue. ''' + """ Form to create or edit an issue. """ + status = wtforms.SelectField( - 'Status', - [wtforms.validators.Required()], - choices=[] + "Status", [wtforms.validators.Required()], choices=[] ) def __init__(self, *args, **kwargs): @@ -254,73 +248,70 @@ class IssueForm(IssueFormSimplied): drop-down list. """ super(IssueForm, self).__init__(*args, **kwargs) - if 'status' in kwargs: + if "status" in kwargs: self.status.choices = [ - (status, status) for status in kwargs['status'] + (status, status) for status in kwargs["status"] ] class RequestPullForm(PagureForm): - ''' Form to create a pull request. ''' + """ Form to create a pull request. """ + title = wtforms.TextField( - 'Title*', - [wtforms.validators.Required()] + 'Title*', [wtforms.validators.Required()] ) initial_comment = wtforms.TextAreaField( - 'Initial Comment', [wtforms.validators.Optional()]) + "Initial Comment", [wtforms.validators.Optional()] + ) class RemoteRequestPullForm(RequestPullForm): - ''' Form to create a remote pull request. ''' + """ Form to create a remote pull request. """ + git_repo = wtforms.TextField( 'Git repo address*', - [wtforms.validators.Required()] + [wtforms.validators.Required()], ) branch_from = wtforms.TextField( 'Git branch*', - [wtforms.validators.Required()] + [wtforms.validators.Required()], ) branch_to = wtforms.TextField( 'Git branch to merge in*', - [wtforms.validators.Required()] + [wtforms.validators.Required()], ) class DeleteIssueTagForm(PagureForm): - ''' Form to remove a tag to from a project. ''' + """ Form to remove a tag to from a project. """ + tag = wtforms.TextField( - 'Tag', + "Tag", [ wtforms.validators.Optional(), wtforms.validators.Regexp(TAGS_REGEX, flags=re.IGNORECASE), wtforms.validators.Length(max=255), - ] + ], ) class AddIssueTagForm(DeleteIssueTagForm): - ''' Form to add a tag to a project. ''' + """ Form to add a tag to a project. """ + tag_description = wtforms.TextField( - 'Tag Description', - [wtforms.validators.Optional()], - ) - tag_color = wtforms.TextField( - 'Tag Color', - [wtforms.validators.Required()], + "Tag Description", [wtforms.validators.Optional()] ) + tag_color = wtforms.TextField("Tag Color", [wtforms.validators.Required()]) class StatusForm(PagureForm): - ''' Form to add/change the status of an issue. ''' + """ Form to add/change the status of an issue. """ + status = wtforms.SelectField( - 'Status', - [wtforms.validators.Required()], - choices=[] + "Status", [wtforms.validators.Required()], choices=[] ) close_status = wtforms.SelectField( - 'Closed as', - [wtforms.validators.Optional()], - choices=[] + "Closed as", [wtforms.validators.Optional()], choices=[] ) def __init__(self, *args, **kwargs): @@ -329,24 +320,25 @@ class StatusForm(PagureForm): drop-down list. """ super(StatusForm, self).__init__(*args, **kwargs) - if 'status' in kwargs: + if "status" in kwargs: self.status.choices = [ - (status, status) for status in kwargs['status'] + (status, status) for status in kwargs["status"] ] self.close_status.choices = [] - if 'close_status' in kwargs: - for key in sorted(kwargs['close_status']): + if "close_status" in kwargs: + for key in sorted(kwargs["close_status"]): self.close_status.choices.append((key, key)) - self.close_status.choices.insert(0, ('', '')) + self.close_status.choices.insert(0, ("", "")) class MilestoneForm(PagureForm): - ''' Form to change the milestone of an issue. ''' + """ Form to change the milestone of an issue. """ + milestone = wtforms.SelectField( - 'Milestone', + "Milestone", [wtforms.validators.Optional()], choices=[], - coerce=convert_value + coerce=convert_value, ) def __init__(self, *args, **kwargs): @@ -356,21 +348,20 @@ class MilestoneForm(PagureForm): """ super(MilestoneForm, self).__init__(*args, **kwargs) self.milestone.choices = [] - if 'milestones' in kwargs and kwargs['milestones']: - for key in kwargs['milestones']: + if "milestones" in kwargs and kwargs["milestones"]: + for key in kwargs["milestones"]: self.milestone.choices.append((key, key)) - self.milestone.choices.insert(0, ('', '')) + self.milestone.choices.insert(0, ("", "")) class NewTokenForm(PagureForm): - ''' Form to add/change the status of an issue. ''' + """ Form to add/change the status of an issue. """ + description = wtforms.TextField( - 'description', [wtforms.validators.Optional()] + "description", [wtforms.validators.Optional()] ) acls = wtforms.SelectMultipleField( - 'ACLs', - [wtforms.validators.Required()], - choices=[] + "ACLs", [wtforms.validators.Required()], choices=[] ) def __init__(self, *args, **kwargs): @@ -379,60 +370,53 @@ class NewTokenForm(PagureForm): drop-down list. """ super(NewTokenForm, self).__init__(*args, **kwargs) - if 'acls' in kwargs: + if "acls" in kwargs: self.acls.choices = [ - (acl.name, acl.name) for acl in kwargs['acls'] + (acl.name, acl.name) for acl in kwargs["acls"] ] class UpdateIssueForm(PagureForm): - ''' Form to add a comment to an issue. ''' + """ Form to add a comment to an issue. """ + tag = wtforms.TextField( - 'tag', + "tag", [ wtforms.validators.Optional(), wtforms.validators.Regexp(TAGS_REGEX, flags=re.IGNORECASE), wtforms.validators.Length(max=255), - ] + ], ) depending = wtforms.TextField( - 'depending issue', [wtforms.validators.Optional()] + "depending issue", [wtforms.validators.Optional()] ) blocking = wtforms.TextField( - 'blocking issue', [wtforms.validators.Optional()] - ) - comment = wtforms.TextAreaField( - 'Comment', [wtforms.validators.Optional()] + "blocking issue", [wtforms.validators.Optional()] ) + comment = wtforms.TextAreaField("Comment", [wtforms.validators.Optional()]) assignee = wtforms.TextAreaField( - 'Assigned to', [wtforms.validators.Optional()] + "Assigned to", [wtforms.validators.Optional()] ) status = wtforms.SelectField( - 'Status', - [wtforms.validators.Optional()], - choices=[] + "Status", [wtforms.validators.Optional()], choices=[] ) priority = wtforms.SelectField( - 'Priority', - [wtforms.validators.Optional()], - choices=[] + "Priority", [wtforms.validators.Optional()], choices=[] ) milestone = wtforms.SelectField( - 'Milestone', + "Milestone", [wtforms.validators.Optional()], choices=[], - coerce=convert_value + coerce=convert_value, ) private = wtforms.BooleanField( - 'Private', - [wtforms.validators.optional()], - false_values=FALSE_VALUES, + "Private", [wtforms.validators.optional()], false_values=FALSE_VALUES ) close_status = wtforms.SelectField( - 'Closed as', + "Closed as", [wtforms.validators.Optional()], choices=[], - coerce=convert_value + coerce=convert_value, ) def __init__(self, *args, **kwargs): @@ -441,247 +425,249 @@ class UpdateIssueForm(PagureForm): drop-down list. """ super(UpdateIssueForm, self).__init__(*args, **kwargs) - if 'status' in kwargs: + if "status" in kwargs: self.status.choices = [ - (status, status) for status in kwargs['status'] + (status, status) for status in kwargs["status"] ] self.priority.choices = [] - if 'priorities' in kwargs: - for key in sorted(kwargs['priorities']): - self.priority.choices.append( - (key, kwargs['priorities'][key]) - ) + if "priorities" in kwargs: + for key in sorted(kwargs["priorities"]): + self.priority.choices.append((key, kwargs["priorities"][key])) self.milestone.choices = [] - if 'milestones' in kwargs and kwargs['milestones']: - for key in kwargs['milestones']: + if "milestones" in kwargs and kwargs["milestones"]: + for key in kwargs["milestones"]: self.milestone.choices.append((key, key)) - self.milestone.choices.insert(0, ('', '')) + self.milestone.choices.insert(0, ("", "")) self.close_status.choices = [] - if 'close_status' in kwargs: - for key in sorted(kwargs['close_status']): + if "close_status" in kwargs: + for key in sorted(kwargs["close_status"]): self.close_status.choices.append((key, key)) - self.close_status.choices.insert(0, ('', '')) + self.close_status.choices.insert(0, ("", "")) class AddPullRequestCommentForm(PagureForm): - ''' Form to add a comment to a pull-request. ''' - commit = wtforms.HiddenField('commit identifier') - filename = wtforms.HiddenField('file changed') - row = wtforms.HiddenField('row') - requestid = wtforms.HiddenField('requestid') - tree_id = wtforms.HiddenField('treeid') + """ Form to add a comment to a pull-request. """ + + commit = wtforms.HiddenField("commit identifier") + filename = wtforms.HiddenField("file changed") + row = wtforms.HiddenField("row") + requestid = wtforms.HiddenField("requestid") + tree_id = wtforms.HiddenField("treeid") comment = wtforms.TextAreaField( - 'Comment*', - [wtforms.validators.Required()] + 'Comment*', [wtforms.validators.Required()] ) class AddPullRequestFlagFormV1(PagureForm): - ''' Form to add a flag to a pull-request or commit. ''' - username = wtforms.TextField( - 'Username', [wtforms.validators.Required()]) + """ Form to add a flag to a pull-request or commit. """ + + username = wtforms.TextField("Username", [wtforms.validators.Required()]) percent = wtforms.TextField( - 'Percentage of completion', - [wtforms.validators.optional()]) - comment = wtforms.TextAreaField( - 'Comment', [wtforms.validators.Required()]) + "Percentage of completion", [wtforms.validators.optional()] + ) + comment = wtforms.TextAreaField("Comment", [wtforms.validators.Required()]) url = wtforms.TextField( - 'URL', [ + "URL", + [ wtforms.validators.Required(), wtforms.validators.Regexp(urlpattern, flags=re.IGNORECASE), - ]) - uid = wtforms.TextField( - 'UID', [wtforms.validators.optional()]) + ], + ) + uid = wtforms.TextField("UID", [wtforms.validators.optional()]) class AddPullRequestFlagForm(AddPullRequestFlagFormV1): - ''' Form to add a flag to a pull-request or commit. ''' + """ Form to add a flag to a pull-request or commit. """ + def __init__(self, *args, **kwargs): # we need to instantiate dynamically because the configuration # values may change during tests and we want to always respect # the currently set value super(AddPullRequestFlagForm, self).__init__(*args, **kwargs) - self.status.choices = list(zip( - pagure_config['FLAG_STATUSES_LABELS'].keys(), - pagure_config['FLAG_STATUSES_LABELS'].keys() - )) + self.status.choices = list( + zip( + pagure_config["FLAG_STATUSES_LABELS"].keys(), + pagure_config["FLAG_STATUSES_LABELS"].keys(), + ) + ) status = wtforms.SelectField( - 'status', - [wtforms.validators.Required()], - choices=[], + "status", [wtforms.validators.Required()], choices=[] ) class UserSettingsForm(PagureForm): - ''' Form to create or edit project. ''' + """ Form to create or edit project. """ + ssh_key = wtforms.TextAreaField( 'Public SSH keys *', - [wtforms.validators.Required(), - ssh_key_validator] + [wtforms.validators.Required(), ssh_key_validator], ) class AddDeployKeyForm(PagureForm): - ''' Form to add a deploy key to a project. ''' + """ Form to add a deploy key to a project. """ + ssh_key = wtforms.TextField( 'SSH Key *', [wtforms.validators.Required()] # TODO: Add an ssh key validator? ) pushaccess = wtforms.BooleanField( - 'Push access', + "Push access", [wtforms.validators.optional()], false_values=FALSE_VALUES, ) class AddUserForm(PagureForm): - ''' Form to add a user to a project. ''' + """ Form to add a user to a project. """ + user = wtforms.TextField( 'Username *', - [wtforms.validators.Required()] + [wtforms.validators.Required()], ) access = wtforms.TextField( 'Access Level *', - [wtforms.validators.Required()] + [wtforms.validators.Required()], ) class AddUserToGroupForm(PagureForm): - ''' Form to add a user to a pagure group. ''' + """ Form to add a user to a pagure group. """ + user = wtforms.TextField( 'Username *', - [wtforms.validators.Required()] + [wtforms.validators.Required()], ) class AssignIssueForm(PagureForm): - ''' Form to assign an user to an issue. ''' + """ Form to assign an user to an issue. """ + assignee = wtforms.TextField( 'Assignee *', - [wtforms.validators.Optional()] + [wtforms.validators.Optional()], ) class AddGroupForm(PagureForm): - ''' Form to add a group to a project. ''' + """ Form to add a group to a project. """ + group = wtforms.TextField( 'Group *', [ wtforms.validators.Required(), - wtforms.validators.Regexp(STRICT_REGEX, flags=re.IGNORECASE) - ] + wtforms.validators.Regexp(STRICT_REGEX, flags=re.IGNORECASE), + ], ) access = wtforms.TextField( 'Access Level *', - [wtforms.validators.Required()] + [wtforms.validators.Required()], ) class ConfirmationForm(PagureForm): - ''' Simple form used just for CSRF protection. ''' + """ Simple form used just for CSRF protection. """ + pass class ModifyACLForm(PagureForm): - ''' Form to change ACL of a user or a group to a project. ''' + """ Form to change ACL of a user or a group to a project. """ + user_type = wtforms.SelectField( - 'User type', + "User type", [wtforms.validators.Required()], - choices=[('user', 'User'), ('group', 'Group')] + choices=[("user", "User"), ("group", "Group")], ) name = wtforms.TextField( 'User- or Groupname *', - [wtforms.validators.Required()] + [wtforms.validators.Required()], ) acl = wtforms.SelectField( - 'ACL type', + "ACL type", [wtforms.validators.Optional()], - choices=[('admin', 'Admin'), ('ticket', 'Ticket'), - ('commit', 'Commit'), (None, None)], - coerce=convert_value + choices=[ + ("admin", "Admin"), + ("ticket", "Ticket"), + ("commit", "Commit"), + (None, None), + ], + coerce=convert_value, ) class UploadFileForm(PagureForm): - ''' Form to upload a file. ''' + """ Form to upload a file. """ + filestream = wtforms.FileField( - 'File', - [wtforms.validators.Required(), file_virus_validator]) + "File", [wtforms.validators.Required(), file_virus_validator] + ) class UserEmailForm(PagureForm): - ''' Form to edit the description of a project. ''' - email = wtforms.TextField( - 'email', [wtforms.validators.Required()] - ) + """ Form to edit the description of a project. """ + + email = wtforms.TextField("email", [wtforms.validators.Required()]) def __init__(self, *args, **kwargs): super(UserEmailForm, self).__init__(*args, **kwargs) - if 'emails' in kwargs: - if kwargs['emails']: + if "emails" in kwargs: + if kwargs["emails"]: self.email.validators.append( - wtforms.validators.NoneOf(kwargs['emails']) + wtforms.validators.NoneOf(kwargs["emails"]) ) else: self.email.validators = [wtforms.validators.Required()] class ProjectCommentForm(PagureForm): - ''' Form to represent project. ''' + """ Form to represent project. """ + objid = wtforms.TextField( - 'Ticket/Request id', - [wtforms.validators.Required()] - ) - useremail = wtforms.TextField( - 'Email', - [wtforms.validators.Required()] + "Ticket/Request id", [wtforms.validators.Required()] ) + useremail = wtforms.TextField("Email", [wtforms.validators.Required()]) class CommentForm(PagureForm): - ''' Form to upload a file. ''' + """ Form to upload a file. """ + comment = wtforms.FileField( - 'Comment', - [wtforms.validators.Required(), file_virus_validator]) + "Comment", [wtforms.validators.Required(), file_virus_validator] + ) class EditGroupForm(PagureForm): """ Form to ask for a password change. """ + display_name = wtforms.TextField( - 'Group name to display', - [ - wtforms.validators.Required(), - wtforms.validators.Length(max=255), - ] + "Group name to display", + [wtforms.validators.Required(), wtforms.validators.Length(max=255)], ) description = wtforms.TextField( - 'Description', - [ - wtforms.validators.Required(), - wtforms.validators.Length(max=255), - ] + "Description", + [wtforms.validators.Required(), wtforms.validators.Length(max=255)], ) class NewGroupForm(EditGroupForm): """ Form to ask for a password change. """ + group_name = wtforms.TextField( 'Group name *', [ wtforms.validators.Required(), wtforms.validators.Length(max=255), - wtforms.validators.Regexp(STRICT_REGEX, flags=re.IGNORECASE) - ] + wtforms.validators.Regexp(STRICT_REGEX, flags=re.IGNORECASE), + ], ) group_type = wtforms.SelectField( - 'Group type', - [wtforms.validators.Required()], - choices=[] + "Group type", [wtforms.validators.Required()], choices=[] ) def __init__(self, *args, **kwargs): @@ -690,26 +676,24 @@ class NewGroupForm(EditGroupForm): drop-down list. """ super(NewGroupForm, self).__init__(*args, **kwargs) - if 'group_types' in kwargs: + if "group_types" in kwargs: self.group_type.choices = [ - (grptype, grptype) for grptype in kwargs['group_types'] + (grptype, grptype) for grptype in kwargs["group_types"] ] class EditFileForm(PagureForm): """ Form used to edit a file. """ - content = wtforms.TextAreaField( - 'content', [wtforms.validators.Optional()]) - commit_title = wtforms.TextField( - 'Title', [wtforms.validators.Required()]) + + content = wtforms.TextAreaField("content", [wtforms.validators.Optional()]) + commit_title = wtforms.TextField("Title", [wtforms.validators.Required()]) commit_message = wtforms.TextAreaField( - 'Commit message', [wtforms.validators.optional()]) + "Commit message", [wtforms.validators.optional()] + ) email = wtforms.SelectField( - 'Email', [wtforms.validators.Required()], - choices=[] + "Email", [wtforms.validators.Required()], choices=[] ) - branch = wtforms.TextField( - 'Branch', [wtforms.validators.Required()]) + branch = wtforms.TextField("Branch", [wtforms.validators.Required()]) def __init__(self, *args, **kwargs): """ Calls the default constructor with the normal argument but @@ -717,18 +701,17 @@ class EditFileForm(PagureForm): drop-down list. """ super(EditFileForm, self).__init__(*args, **kwargs) - if 'emails' in kwargs: + if "emails" in kwargs: self.email.choices = [ - (email.email, email.email) for email in kwargs['emails'] + (email.email, email.email) for email in kwargs["emails"] ] class DefaultBranchForm(PagureForm): """Form to change the default branh for a repository""" + branches = wtforms.SelectField( - 'default_branch', - [wtforms.validators.Required()], - choices=[] + "default_branch", [wtforms.validators.Required()], choices=[] ) def __init__(self, *args, **kwargs): @@ -737,18 +720,17 @@ class DefaultBranchForm(PagureForm): drop-down list. """ super(DefaultBranchForm, self).__init__(*args, **kwargs) - if 'branches' in kwargs: + if "branches" in kwargs: self.branches.choices = [ - (branch, branch) for branch in kwargs['branches'] + (branch, branch) for branch in kwargs["branches"] ] class DefaultPriorityForm(PagureForm): """Form to change the default priority for a repository""" + priority = wtforms.SelectField( - 'default_priority', - [wtforms.validators.optional()], - choices=[] + "default_priority", [wtforms.validators.optional()], choices=[] ) def __init__(self, *args, **kwargs): @@ -757,63 +739,65 @@ class DefaultPriorityForm(PagureForm): drop-down list. """ super(DefaultPriorityForm, self).__init__(*args, **kwargs) - if 'priorities' in kwargs: + if "priorities" in kwargs: self.priority.choices = [ - (priority, priority) for priority in kwargs['priorities'] + (priority, priority) for priority in kwargs["priorities"] ] class EditCommentForm(PagureForm): """ Form to verify that comment is not empty """ + update_comment = wtforms.TextAreaField( - 'Comment*', - [wtforms.validators.Required()] + 'Comment*', [wtforms.validators.Required()] ) class ForkRepoForm(PagureForm): - ''' Form to fork a project in the API. ''' + """ Form to fork a project in the API. """ + repo = wtforms.TextField( - 'The project name', - [wtforms.validators.Required()] + "The project name", [wtforms.validators.Required()] ) username = wtforms.TextField( - 'User who forked the project', - [wtforms.validators.optional()]) + "User who forked the project", [wtforms.validators.optional()] + ) namespace = wtforms.TextField( - 'The project namespace', - [wtforms.validators.optional()] + "The project namespace", [wtforms.validators.optional()] ) class AddReportForm(PagureForm): """ Form to verify that comment is not empty """ + report_name = wtforms.TextAreaField( 'Report name*', - [wtforms.validators.Required()] + [wtforms.validators.Required()], ) class PublicNotificationForm(PagureForm): """ Form to verify that comment is not empty """ + issue_notifs = wtforms.TextAreaField( 'Public issue notification*', - [wtforms.validators.optional(), MultipleEmail()] + [wtforms.validators.optional(), MultipleEmail()], ) pr_notifs = wtforms.TextAreaField( 'Public PR notification*', - [wtforms.validators.optional(), MultipleEmail()] + [wtforms.validators.optional(), MultipleEmail()], ) class SubscribtionForm(PagureForm): - ''' Form to subscribe to or unsubscribe from an issue or a PR. ''' + """ Form to subscribe to or unsubscribe from an issue or a PR. """ + status = wtforms.BooleanField( - 'Subscription status', + "Subscription status", [wtforms.validators.optional()], false_values=FALSE_VALUES, ) @@ -821,7 +805,7 @@ class SubscribtionForm(PagureForm): class MergePRForm(PagureForm): delete_branch = wtforms.BooleanField( - 'Delete branch after merging', + "Delete branch after merging", [wtforms.validators.optional()], false_values=FALSE_VALUES, ) diff --git a/pagure/hooks/__init__.py b/pagure/hooks/__init__.py index a58f6b8..cc6fc29 100644 --- a/pagure/hooks/__init__.py +++ b/pagure/hooks/__init__.py @@ -35,14 +35,15 @@ class RequiredIf(wtforms.validators.Required): for fieldname in self.fields: nfield = form._fields.get(fieldname) if nfield is None: - raise Exception( - 'no field named "%s" in form' % fieldname) + raise Exception('no field named "%s" in form' % fieldname) if bool(nfield.data): - if not field.data \ - or isinstance(field.data, six.string_types) \ - and not field.data.strip(): + if ( + not field.data + or isinstance(field.data, six.string_types) + and not field.data.strip() + ): if self.message is None: - message = field.gettext('This field is required.') + message = field.gettext("This field is required.") else: message = self.message @@ -51,87 +52,88 @@ class RequiredIf(wtforms.validators.Required): class BaseHook(object): - ''' Base class for pagure's hooks. ''' + """ Base class for pagure's hooks. """ name = None form = None description = None - hook_type = 'post-receive' + hook_type = "post-receive" @classmethod def set_up(cls, project): - ''' Install the generic post-receive hook that allow us to call + """ Install the generic post-receive hook that allow us to call multiple post-receive hooks as set per plugin. - ''' + """ repopaths = [get_repo_path(project)] for folder in [ - pagure_config.get('DOCS_FOLDER'), - pagure_config.get('REQUESTS_FOLDER')]: + pagure_config.get("DOCS_FOLDER"), + pagure_config.get("REQUESTS_FOLDER"), + ]: if folder: - repopaths.append( - os.path.join(folder, project.path) - ) + repopaths.append(os.path.join(folder, project.path)) hook_files = os.path.join( - os.path.dirname(os.path.realpath(__file__)), 'files') + os.path.dirname(os.path.realpath(__file__)), "files" + ) for repopath in repopaths: # Make sure the hooks folder exists - hookfolder = os.path.join(repopath, 'hooks') + hookfolder = os.path.join(repopath, "hooks") if not os.path.exists(hookfolder): os.makedirs(hookfolder) # Install the main post-receive file postreceive = os.path.join(hookfolder, cls.hook_type) if not os.path.exists(postreceive): - os.symlink(os.path.join(hook_files, cls.hook_type), - postreceive) + os.symlink( + os.path.join(hook_files, cls.hook_type), postreceive + ) @classmethod def base_install(cls, repopaths, dbobj, hook_name, filein): - ''' Method called to install the hook for a project. + """ Method called to install the hook for a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed :arg dbobj: the DB object the hook uses to store the settings information. - ''' + """ for repopath in repopaths: if not os.path.exists(repopath): - raise FileNotFoundException('Repo %s not found' % repopath) + raise FileNotFoundException("Repo %s not found" % repopath) hook_files = os.path.join( - os.path.dirname(os.path.realpath(__file__)), 'files') + os.path.dirname(os.path.realpath(__file__)), "files" + ) # Make sure the hooks folder exists - hookfolder = os.path.join(repopath, 'hooks') + hookfolder = os.path.join(repopath, "hooks") if not os.path.exists(hookfolder): os.makedirs(hookfolder) # Install the hook itself - hook_file = os.path.join(repopath, 'hooks', cls.hook_type + '.' - + hook_name) + hook_file = os.path.join( + repopath, "hooks", cls.hook_type + "." + hook_name + ) if not os.path.exists(hook_file): - os.symlink( - os.path.join(hook_files, filein), - hook_file - ) + os.symlink(os.path.join(hook_files, filein), hook_file) @classmethod def base_remove(cls, repopaths, hook_name): - ''' Method called to remove the hook of a project. + """ Method called to remove the hook of a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' + """ for repopath in repopaths: if not os.path.exists(repopath): - raise FileNotFoundException('Repo %s not found' % repopath) + raise FileNotFoundException("Repo %s not found" % repopath) - hook_path = os.path.join(repopath, 'hooks', cls.hook_type + '.' - + hook_name) + hook_path = os.path.join( + repopath, "hooks", cls.hook_type + "." + hook_name + ) if os.path.exists(hook_path): os.unlink(hook_path) diff --git a/pagure/hooks/default.py b/pagure/hooks/default.py index f511ea6..51d625c 100644 --- a/pagure/hooks/default.py +++ b/pagure/hooks/default.py @@ -12,6 +12,7 @@ from __future__ import unicode_literals import sqlalchemy as sa import wtforms + try: from flask_wtf import FlaskForm except ImportError: @@ -30,32 +31,34 @@ class DefaultTable(BASE): Table -- hook_default """ - __tablename__ = 'hook_default' + __tablename__ = "hook_default" id = sa.Column(sa.Integer, primary_key=True) project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', onupdate='CASCADE', ondelete='CASCADE'), + sa.ForeignKey("projects.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=False, unique=True, - index=True) + index=True, + ) active = sa.Column(sa.Boolean, nullable=False, default=False) project = relation( - 'Project', remote_side=[Project.id], + "Project", + remote_side=[Project.id], backref=backref( - 'default_hook', cascade="delete, delete-orphan", - single_parent=True, uselist=False) + "default_hook", + cascade="delete, delete-orphan", + single_parent=True, + uselist=False, + ), ) class DefaultForm(FlaskForm): - ''' Form to configure the default hook. ''' - active = wtforms.BooleanField( - 'Active', - [wtforms.validators.Optional()] - ) + """ Form to configure the default hook. """ + + active = wtforms.BooleanField("Active", [wtforms.validators.Optional()]) def __init__(self, *args, **kwargs): """ Calls the default constructor with the normal argument but @@ -66,37 +69,38 @@ class DefaultForm(FlaskForm): class Default(BaseHook): - ''' Default hooks. ''' + """ Default hooks. """ - name = 'default' - description = 'Default hooks that should be enabled for each and '\ - 'every project.' + name = "default" + description = ( + "Default hooks that should be enabled for each and every project." + ) form = DefaultForm db_object = DefaultTable - backref = 'default_hook' - form_fields = ['active'] + backref = "default_hook" + form_fields = ["active"] @classmethod def install(cls, project, dbobj): - ''' Method called to install the hook for a project. + """ Method called to install the hook for a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' + """ repopaths = [get_repo_path(project)] - cls.base_install(repopaths, dbobj, 'default', 'default_hook.py') + cls.base_install(repopaths, dbobj, "default", "default_hook.py") @classmethod def remove(cls, project): - ''' Method called to remove the hook of a project. + """ Method called to remove the hook of a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' + """ repopaths = [get_repo_path(project)] - cls.base_remove(repopaths, 'default') + cls.base_remove(repopaths, "default") diff --git a/pagure/hooks/fedmsg.py b/pagure/hooks/fedmsg.py index bafd652..65efc76 100644 --- a/pagure/hooks/fedmsg.py +++ b/pagure/hooks/fedmsg.py @@ -12,6 +12,7 @@ from __future__ import unicode_literals import sqlalchemy as sa import wtforms + try: from flask_wtf import FlaskForm except ImportError: @@ -29,36 +30,38 @@ class FedmsgTable(BASE): Table -- hook_fedmsg """ - __tablename__ = 'hook_fedmsg' + __tablename__ = "hook_fedmsg" id = sa.Column(sa.Integer, primary_key=True) project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', onupdate='CASCADE', ondelete='CASCADE'), + sa.ForeignKey("projects.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=False, unique=True, - index=True) + index=True, + ) active = sa.Column(sa.Boolean, nullable=False, default=False) project = relation( - 'Project', remote_side=[Project.id], + "Project", + remote_side=[Project.id], backref=backref( - 'fedmsg_hook', cascade="delete, delete-orphan", - single_parent=True, uselist=False) + "fedmsg_hook", + cascade="delete, delete-orphan", + single_parent=True, + uselist=False, + ), ) class FedmsgForm(FlaskForm): - ''' Form to configure the fedmsg hook. ''' - active = wtforms.BooleanField( - 'Active', - [wtforms.validators.Optional()] - ) + """ Form to configure the fedmsg hook. """ + active = wtforms.BooleanField("Active", [wtforms.validators.Optional()]) -DESCRIPTION = ''' + +DESCRIPTION = """ This hook pushes commit notification to the fedmsg bus to be consumed by other applications. @@ -67,22 +70,22 @@ which publishes notifications about events happening in the project via pagure's (web) user interface, for example: new tickets, new comments, new pull-requests and so on. This hook on the other only acts on commits. -''' +""" class Fedmsg(BaseHook): - ''' Fedmsg hooks. ''' + """ Fedmsg hooks. """ - name = 'Fedmsg' + name = "Fedmsg" description = DESCRIPTION form = FedmsgForm db_object = FedmsgTable - backref = 'fedmsg_hook' - form_fields = ['active'] + backref = "fedmsg_hook" + form_fields = ["active"] @classmethod def install(cls, project, dbobj): - ''' Method called to install the hook for a project. + """ Method called to install the hook for a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed @@ -92,12 +95,12 @@ class Fedmsg(BaseHook): sending fedmsg notifications on commit push, but other than that this plugin doesn't do much anymore. - ''' + """ pass @classmethod def remove(cls, project): - ''' Method called to remove the hook of a project. + """ Method called to remove the hook of a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed @@ -107,5 +110,5 @@ class Fedmsg(BaseHook): sending fedmsg notifications on commit push, but other than that this plugin doesn't do much anymore. - ''' + """ pass diff --git a/pagure/hooks/files/default_hook.py b/pagure/hooks/files/default_hook.py index 0dd4376..5c4b056 100755 --- a/pagure/hooks/files/default_hook.py +++ b/pagure/hooks/files/default_hook.py @@ -10,9 +10,10 @@ import logging import sys -if 'PAGURE_CONFIG' not in os.environ \ - and os.path.exists('/etc/pagure/pagure.cfg'): - os.environ['PAGURE_CONFIG'] = '/etc/pagure/pagure.cfg' +if "PAGURE_CONFIG" not in os.environ and os.path.exists( + "/etc/pagure/pagure.cfg" +): + os.environ["PAGURE_CONFIG"] = "/etc/pagure/pagure.cfg" import pygit2 # noqa: E402 @@ -28,18 +29,19 @@ import pagure.lib.tasks_services # noqa: E402 _config = pagure.config.reload_config() _log = logging.getLogger(__name__) -abspath = os.path.abspath(os.environ['GIT_DIR']) +abspath = os.path.abspath(os.environ["GIT_DIR"]) def send_fedmsg_notifications(project, topic, msg): - ''' If the user asked for fedmsg notifications on commit, this will + """ If the user asked for fedmsg notifications on commit, this will do it. - ''' + """ import fedmsg + config = fedmsg.config.load_config([], None) - config['active'] = True - config['endpoints']['relay_inbound'] = config['relay_inbound'] - fedmsg.init(name='relay_inbound', **config) + config["active"] = True + config["endpoints"]["relay_inbound"] = config["relay_inbound"] + fedmsg.init(name="relay_inbound", **config) pagure.lib.notify.log( project=project, @@ -50,9 +52,9 @@ def send_fedmsg_notifications(project, topic, msg): def send_webhook_notifications(project, topic, msg): - ''' If the user asked for webhook notifications on commit, this will + """ If the user asked for webhook notifications on commit, this will do it. - ''' + """ pagure.lib.tasks_services.webhook_notification.delay( topic=topic, @@ -64,9 +66,9 @@ def send_webhook_notifications(project, topic, msg): def send_notifications(session, project, refname, revs, forced): - ''' Send out-going notifications about the commits that have just been + """ Send out-going notifications about the commits that have just been pushed. - ''' + """ auths = set() for rev in revs: @@ -85,7 +87,7 @@ def send_notifications(session, project, refname, revs, forced): revs.reverse() print("* Publishing information for %i commits" % len(revs)) - topic = 'git.receive' + topic = "git.receive" msg = dict( total_commits=len(revs), start_commit=revs[0], @@ -93,60 +95,70 @@ def send_notifications(session, project, refname, revs, forced): branch=refname, forced=forced, authors=list(authors), - agent=os.environ['GL_USER'], + agent=os.environ["GL_USER"], repo=project.to_json(public=True) - if not isinstance(project, six.string_types) else project, + if not isinstance(project, six.string_types) + else project, ) - fedmsg_hook = pagure.lib.plugins.get_plugin('Fedmsg') + fedmsg_hook = pagure.lib.plugins.get_plugin("Fedmsg") fedmsg_hook.db_object() - always_fedmsg = _config.get('ALWAYS_FEDMSG_ON_COMMITS') or None + always_fedmsg = _config.get("ALWAYS_FEDMSG_ON_COMMITS") or None - if always_fedmsg \ - or (project.fedmsg_hook and project.fedmsg_hook.active): + if always_fedmsg or ( + project.fedmsg_hook and project.fedmsg_hook.active + ): try: print(" - to fedmsg") send_fedmsg_notifications(project, topic, msg) except Exception: _log.exception( - 'Error sending fedmsg notifications on commit push') - if project.settings.get('Web-hooks') and not project.private: + "Error sending fedmsg notifications on commit push" + ) + if project.settings.get("Web-hooks") and not project.private: try: print(" - to web-hooks") send_webhook_notifications(project, topic, msg) except Exception: _log.exception( - 'Error sending web-hook notifications on commit push') - - if _config.get('PAGURE_CI_SERVICES') \ - and project.ci_hook \ - and project.ci_hook.active_commit \ - and not project.private: + "Error sending web-hook notifications on commit push" + ) + + if ( + _config.get("PAGURE_CI_SERVICES") + and project.ci_hook + and project.ci_hook.active_commit + and not project.private + ): pagure.lib.tasks_services.trigger_ci_build.delay( project_name=project.fullname, cause=revs[-1], branch=refname, - ci_type=project.ci_hook.ci_type + ci_type=project.ci_hook.ci_type, ) def inform_pull_request_urls( - session, project, commits, refname, default_branch): - ''' Inform the user about the URLs to open a new pull-request or visit + session, project, commits, refname, default_branch +): + """ Inform the user about the URLs to open a new pull-request or visit the existing one. - ''' + """ target_repo = project if project.is_fork: target_repo = project.parent - if commits and refname != default_branch\ - and target_repo.settings.get('pull_requests', True): + if ( + commits + and refname != default_branch + and target_repo.settings.get("pull_requests", True) + ): print() prs = pagure.lib.search_pull_requests( session, project_id_from=project.id, - status='Open', + status="Open", branch_from=refname, ) # Link to existing PRs if there are any @@ -156,20 +168,22 @@ def inform_pull_request_urls( pagure.lib.tasks.link_pr_to_ticket.delay(pr.uid) # Inform the user about the PR - print('View pull-request for %s' % refname) - print(' %s/%s/pull-request/%s' % ( - _config['APP_URL'].rstrip('/'), - pr.project.url_path, - pr.id) + print("View pull-request for %s" % refname) + print( + " %s/%s/pull-request/%s" + % (_config["APP_URL"].rstrip("/"), pr.project.url_path, pr.id) ) # If no existing PRs, provide the link to open one if not seen: - print('Create a pull-request for %s' % refname) - print(' %s/%s/diff/%s..%s' % ( - _config['APP_URL'].rstrip('/'), - project.url_path, - default_branch, - refname) + print("Create a pull-request for %s" % refname) + print( + " %s/%s/diff/%s..%s" + % ( + _config["APP_URL"].rstrip("/"), + project.url_path, + default_branch, + refname, + ) ) print() @@ -179,27 +193,28 @@ def run_as_post_receive_hook(): repo = pagure.lib.git.get_repo_name(abspath) username = pagure.lib.git.get_username(abspath) namespace = pagure.lib.git.get_repo_namespace(abspath) - if _config.get('HOOK_DEBUG', False): - print('repo:', repo) - print('user:', username) - print('namespace:', namespace) + if _config.get("HOOK_DEBUG", False): + print("repo:", repo) + print("user:", username) + print("namespace:", namespace) - session = pagure.lib.create_session(_config['DB_URL']) + session = pagure.lib.create_session(_config["DB_URL"]) project = pagure.lib._get_project( - session, repo, user=username, namespace=namespace) + session, repo, user=username, namespace=namespace + ) for line in sys.stdin: - if _config.get('HOOK_DEBUG', False): + if _config.get("HOOK_DEBUG", False): print(line) - (oldrev, newrev, refname) = line.strip().split(' ', 2) + (oldrev, newrev, refname) = line.strip().split(" ", 2) - if _config.get('HOOK_DEBUG', False): - print(' -- Old rev') + if _config.get("HOOK_DEBUG", False): + print(" -- Old rev") print(oldrev) - print(' -- New rev') + print(" -- New rev") print(newrev) - print(' -- Ref name') + print(" -- Ref name") print(refname) # Retrieve the default branch @@ -209,27 +224,32 @@ def run_as_post_receive_hook(): default_branch = repo_obj.head.shorthand forced = False - if set(newrev) == set(['0']): - print("Deleting a reference/branch, so we won't run the " - "pagure hook") + if set(newrev) == set(["0"]): + print( + "Deleting a reference/branch, so we won't run the " + "pagure hook" + ) return - elif set(oldrev) == set(['0']): - oldrev = '^%s' % oldrev + elif set(oldrev) == set(["0"]): + oldrev = "^%s" % oldrev elif pagure.lib.git.is_forced_push(oldrev, newrev, abspath): forced = True base = pagure.lib.git.get_base_revision(oldrev, newrev, abspath) if base: oldrev = base[0] - refname = refname.replace('refs/heads/', '') + refname = refname.replace("refs/heads/", "") commits = pagure.lib.git.get_revs_between( - oldrev, newrev, abspath, refname) + oldrev, newrev, abspath, refname + ) if refname == default_branch: - print('Sending to redis to log activity and send commit ' - 'notification emails') + print( + "Sending to redis to log activity and send commit " + "notification emails" + ) else: - print('Sending to redis to send commit notification emails') + print("Sending to redis to send commit notification emails") # This is logging the commit to the log table in the DB so we can # render commits in the calendar heatmap. @@ -252,14 +272,15 @@ def run_as_post_receive_hook(): # Now display to the user if this isn't the default branch links to # open a new pr or review the existing one inform_pull_request_urls( - session, project, commits, refname, default_branch) + session, project, commits, refname, default_branch + ) # Schedule refresh of all opened PRs parent = project.parent or project pagure.lib.tasks.refresh_pr_cache.delay( parent.name, parent.namespace, - parent.user.user if parent.is_fork else None + parent.user.user if parent.is_fork else None, ) session.remove() @@ -269,5 +290,5 @@ def main(args): run_as_post_receive_hook() -if __name__ == '__main__': +if __name__ == "__main__": main(sys.argv[1:]) diff --git a/pagure/hooks/files/git_multimail.py b/pagure/hooks/files/git_multimail.py index 30eb1c6..31e0c03 100755 --- a/pagure/hooks/files/git_multimail.py +++ b/pagure/hooks/files/git_multimail.py @@ -79,33 +79,31 @@ except ImportError: DEBUG = False -ZEROS = '0' * 40 -LOGBEGIN = '- Log -------------------------------------------------------'\ - '----------\n' -LOGEND = '---------------------------------------------------------------'\ - '--------\n' +ZEROS = "0" * 40 +LOGBEGIN = "- Log -------------------------------------------------------" "----------\n" +LOGEND = "---------------------------------------------------------------" "--------\n" -ADDR_HEADERS = set(['from', 'to', 'cc', 'bcc', 'reply-to', 'sender']) +ADDR_HEADERS = set(["from", "to", "cc", "bcc", "reply-to", "sender"]) # It is assumed in many places that the encoding is uniformly UTF-8, # so changing these constants is unsupported. But define them here # anyway, to make it easier to find (at least most of) the places # where the encoding is important. -(ENCODING, CHARSET) = ('UTF-8', 'utf-8') +(ENCODING, CHARSET) = ("UTF-8", "utf-8") REF_CREATED_SUBJECT_TEMPLATE = ( - '%(emailprefix)s%(refname_type)s %(short_refname)s created' - ' (now %(newrev_short)s)' - ) + "%(emailprefix)s%(refname_type)s %(short_refname)s created" + " (now %(newrev_short)s)" +) REF_UPDATED_SUBJECT_TEMPLATE = ( - '%(emailprefix)s%(refname_type)s %(short_refname)s updated' - ' (%(oldrev_short)s -> %(newrev_short)s)' - ) + "%(emailprefix)s%(refname_type)s %(short_refname)s updated" + " (%(oldrev_short)s -> %(newrev_short)s)" +) REF_DELETED_SUBJECT_TEMPLATE = ( - '%(emailprefix)s%(refname_type)s %(short_refname)s deleted' - ' (was %(oldrev_short)s)' - ) + "%(emailprefix)s%(refname_type)s %(short_refname)s deleted" + " (was %(oldrev_short)s)" +) REFCHANGE_HEADER_TEMPLATE = """\ Date: %(send_date)s @@ -272,8 +270,8 @@ class CommandError(Exception): self.retcode = retcode Exception.__init__( self, - 'Command "%s" failed with retcode %s' % (' '.join(cmd), retcode,) - ) + 'Command "%s" failed with retcode %s' % (" ".join(cmd), retcode), + ) class ConfigurationException(Exception): @@ -281,7 +279,7 @@ class ConfigurationException(Exception): # The "git" program (this could be changed to include a full path): -GIT_EXECUTABLE = 'git' +GIT_EXECUTABLE = "git" # How "git" should be invoked (including global arguments), as a list @@ -303,11 +301,13 @@ def choose_git_command(): # output of "git --version", though if we needed more # specific version information this would be the place to # do it. - cmd = [GIT_EXECUTABLE, '-c', 'foo.bar=baz', '--version'] + cmd = [GIT_EXECUTABLE, "-c", "foo.bar=baz", "--version"] read_output(cmd) GIT_CMD = [ - GIT_EXECUTABLE, '-c', - 'i18n.logoutputencoding=%s' % (ENCODING,)] + GIT_EXECUTABLE, + "-c", + "i18n.logoutputencoding=%s" % (ENCODING,), + ] except CommandError: GIT_CMD = [GIT_EXECUTABLE] @@ -318,8 +318,7 @@ def read_git_output(args, input=None, keepends=False, **kw): if GIT_CMD is None: choose_git_command() - return read_output( - GIT_CMD + args, input=input, keepends=keepends, **kw) + return read_output(GIT_CMD + args, input=input, keepends=keepends, **kw) def read_output(cmd, input=None, keepends=False, **kw): @@ -328,15 +327,14 @@ def read_output(cmd, input=None, keepends=False, **kw): else: stdin = None p = subprocess.Popen( - cmd, stdin=stdin, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, **kw - ) + cmd, stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kw + ) (out, err) = p.communicate(input) retcode = p.wait() if retcode: raise CommandError(cmd, retcode) if not keepends: - out = out.rstrip('\n\r') + out = out.rstrip("\n\r") return out @@ -353,11 +351,12 @@ def header_encode(text, header_name=None): try: if isinstance(text, str): - text = text.decode(ENCODING, 'replace') + text = text.decode(ENCODING, "replace") return Header(text, header_name=header_name).encode() except UnicodeEncodeError: - return Header(text, header_name=header_name, charset=CHARSET, - errors='replace').encode() + return Header( + text, header_name=header_name, charset=CHARSET, errors="replace" + ).encode() def addr_header_encode(text, header_name=None): @@ -365,12 +364,12 @@ def addr_header_encode(text, header_name=None): email addresses.""" return Header( - ', '.join( + ", ".join( formataddr((header_encode(name), emailaddr)) for name, emailaddr in getaddresses([text]) - ), - header_name=header_name - ).encode() + ), + header_name=header_name, + ).encode() class Config(object): @@ -385,7 +384,7 @@ class Config(object): self.section = section if git_config: self.env = os.environ.copy() - self.env['GIT_CONFIG'] = git_config + self.env["GIT_CONFIG"] = git_config else: self.env = None @@ -393,16 +392,24 @@ class Config(object): def _split(s): """Split NUL-terminated values.""" - words = s.split('\0') - assert words[-1] == '' + words = s.split("\0") + assert words[-1] == "" return words[:-1] def get(self, name, default=None): try: - values = self._split(read_git_output( - ['config', '--get', '--null', '%s.%s' % (self.section, name)], - env=self.env, keepends=True, - )) + values = self._split( + read_git_output( + [ + "config", + "--get", + "--null", + "%s.%s" % (self.section, name), + ], + env=self.env, + keepends=True, + ) + ) assert len(values) == 1 return values[0] except CommandError: @@ -411,12 +418,12 @@ class Config(object): def get_bool(self, name, default=None): try: value = read_git_output( - ['config', '--get', '--bool', '%s.%s' % (self.section, name)], + ["config", "--get", "--bool", "%s.%s" % (self.section, name)], env=self.env, - ) + ) except CommandError: return default - return value == 'true' + return value == "true" def get_all(self, name, default=None): """Read a (possibly multivalued) setting from the configuration. @@ -425,11 +432,18 @@ class Config(object): is unset.""" try: - return self._split(read_git_output( - ['config', '--get-all', '--null', '%s.%s' % ( - self.section, name)], - env=self.env, keepends=True, - )) + return self._split( + read_git_output( + [ + "config", + "--get-all", + "--null", + "%s.%s" % (self.section, name), + ], + env=self.env, + keepends=True, + ) + ) except CommandError as e: if e.retcode == 1: # "the section or key is invalid"; i.e., there is no @@ -448,19 +462,18 @@ class Config(object): lines = self.get_all(name, default=None) if lines is None: return default - return ', '.join(line.strip() for line in lines) + return ", ".join(line.strip() for line in lines) def set(self, name, value): read_git_output( - ['config', '%s.%s' % (self.section, name), value], - env=self.env, - ) + ["config", "%s.%s" % (self.section, name), value], env=self.env + ) def add(self, name, value): read_git_output( - ['config', '--add', '%s.%s' % (self.section, name), value], + ["config", "--add", "%s.%s" % (self.section, name), value], env=self.env, - ) + ) def has_key(self, name): return self.get_all(name, default=None) is not None @@ -468,9 +481,9 @@ class Config(object): def unset_all(self, name): try: read_git_output( - ['config', '--unset-all', '%s.%s' % (self.section, name)], + ["config", "--unset-all", "%s.%s" % (self.section, name)], env=self.env, - ) + ) except CommandError as e: if e.retcode == 5: # The name doesn't exist, which is what we wanted anyway... @@ -492,11 +505,9 @@ def generate_summaries(*log_args): commit specified by log_args (subject is the first line of the commit message as a string without EOLs).""" - cmd = [ - 'log', '--abbrev', '--format=%h %s', - ] + list(log_args) + ['--'] + cmd = ["log", "--abbrev", "--format=%h %s"] + list(log_args) + ["--"] for line in read_git_lines(cmd): - yield tuple(line.split(' ', 1)) + yield tuple(line.split(" ", 1)) def limit_lines(lines, max_lines): @@ -505,14 +516,14 @@ def limit_lines(lines, max_lines): yield line if index >= max_lines: - yield '... %d lines suppressed ...\n' % (index + 1 - max_lines,) + yield "... %d lines suppressed ...\n" % (index + 1 - max_lines,) def limit_linelength(lines, max_linelength): for line in lines: # Don't forget that lines always include a trailing newline. if len(line) > max_linelength + 1: - line = line[:max_linelength - 7] + ' [...]\n' + line = line[: max_linelength - 7] + " [...]\n" yield line @@ -544,31 +555,30 @@ class GitObject(object): self.sha1 = self.type = self.commit_sha1 = None else: self.sha1 = sha1 - self.type = type or read_git_output( - ['cat-file', '-t', self.sha1]) + self.type = type or read_git_output(["cat-file", "-t", self.sha1]) - if self.type == 'commit': + if self.type == "commit": self.commit_sha1 = self.sha1 - elif self.type == 'tag': + elif self.type == "tag": try: self.commit_sha1 = read_git_output( - ['rev-parse', '--verify', '%s^0' % (self.sha1,)] - ) + ["rev-parse", "--verify", "%s^0" % (self.sha1,)] + ) except CommandError: # Cannot deref tag to determine commit_sha1 self.commit_sha1 = None else: self.commit_sha1 = None - self.short = read_git_output(['rev-parse', '--short', sha1]) + self.short = read_git_output(["rev-parse", "--short", sha1]) def get_summary(self): """Return (sha1_short, subject) for this commit.""" if not self.sha1: - raise ValueError('Empty commit has no summary') + raise ValueError("Empty commit has no summary") - return next(iter(generate_summaries('--no-walk', self.sha1))) + return next(iter(generate_summaries("--no-walk", self.sha1))) def __eq__(self, other): return isinstance(other, GitObject) and self.sha1 == other.sha1 @@ -647,25 +657,23 @@ class Change(object): values = self.get_values(**extra_values) for line in template.splitlines(): - (name, value) = line.split(':', 1) + (name, value) = line.split(":", 1) try: value = value % values except KeyError as e: if DEBUG: sys.stderr.write( - 'Warning: unknown variable %r in the following ' - 'line; line skipped:\n' - ' %s\n' - % (e.args[0], line,) - ) + "Warning: unknown variable %r in the following " + "line; line skipped:\n" + " %s\n" % (e.args[0], line) + ) else: if name.lower() in ADDR_HEADERS: value = addr_header_encode(value, name) else: value = header_encode(value, name) - for splitline in ( - '%s: %s\n' % (name, value)).splitlines(True): + for splitline in ("%s: %s\n" % (name, value)).splitlines(True): yield splitline def generate_email_header(self): @@ -714,7 +722,7 @@ class Change(object): for line in self.generate_email_header(**extra_header_values): yield line - yield '\n' + yield "\n" for line in self.generate_email_intro(): yield line @@ -740,38 +748,40 @@ class Revision(Change): self.num = num self.tot = tot self.author = read_git_output( - ['log', '--no-walk', '--format=%aN <%aE>', self.rev.sha1]) + ["log", "--no-walk", "--format=%aN <%aE>", self.rev.sha1] + ) self.recipients = self.environment.get_revision_recipients(self) def _compute_values(self): values = Change._compute_values(self) oneline = read_git_output( - ['log', '--format=%s', '--no-walk', self.rev.sha1] - ) + ["log", "--format=%s", "--no-walk", self.rev.sha1] + ) - values['rev'] = self.rev.sha1 - values['rev_short'] = self.rev.short - values['change_type'] = self.change_type - values['refname'] = self.refname - values['short_refname'] = self.reference_change.short_refname - values['refname_type'] = self.reference_change.refname_type - values['reply_to_msgid'] = self.reference_change.msgid - values['num'] = self.num - values['tot'] = self.tot - values['recipients'] = self.recipients - values['oneline'] = oneline - values['author'] = self.author + values["rev"] = self.rev.sha1 + values["rev_short"] = self.rev.short + values["change_type"] = self.change_type + values["refname"] = self.refname + values["short_refname"] = self.reference_change.short_refname + values["refname_type"] = self.reference_change.refname_type + values["reply_to_msgid"] = self.reference_change.msgid + values["num"] = self.num + values["tot"] = self.tot + values["recipients"] = self.recipients + values["oneline"] = oneline + values["author"] = self.author reply_to = self.environment.get_reply_to_commit(self) if reply_to: - values['reply_to'] = reply_to + values["reply_to"] = reply_to return values def generate_email_header(self, **extra_values): for line in self.expand_header_lines( - REVISION_HEADER_TEMPLATE, **extra_values): + REVISION_HEADER_TEMPLATE, **extra_values + ): yield line def generate_email_intro(self): @@ -782,9 +792,9 @@ class Revision(Change): """Show this revision.""" return read_git_lines( - ['log'] + self.environment.commitlogopts + ['-1', self.rev.sha1], + ["log"] + self.environment.commitlogopts + ["-1", self.rev.sha1], keepends=True, - ) + ) def generate_email_footer(self): return self.expand_lines(REVISION_FOOTER_TEMPLATE) @@ -803,7 +813,7 @@ class ReferenceChange(Change): create() method, which has the logic to decide which derived class to instantiate.""" - REF_RE = re.compile(r'^refs\/(?P[^\/]+)\/(?P.*)$') + REF_RE = re.compile(r"^refs\/(?P[^\/]+)\/(?P.*)$") @staticmethod def create(environment, oldrev, newrev, refname): @@ -824,57 +834,60 @@ class ReferenceChange(Change): # - annotated tag m = ReferenceChange.REF_RE.match(refname) if m: - area = m.group('area') - short_refname = m.group('shortname') + area = m.group("area") + short_refname = m.group("shortname") else: - area = '' + area = "" short_refname = refname - if rev.type == 'tag': + if rev.type == "tag": # Annotated tag: klass = AnnotatedTagChange - elif rev.type == 'commit': - if area == 'tags': + elif rev.type == "commit": + if area == "tags": # Non-annotated tag: klass = NonAnnotatedTagChange - elif area == 'heads': + elif area == "heads": # Branch: klass = BranchChange - elif area == 'remotes': + elif area == "remotes": # Tracking branch: sys.stderr.write( - '*** Push-update of tracking branch %r\n' - '*** - incomplete email generated.\n' % (refname,) - ) + "*** Push-update of tracking branch %r\n" + "*** - incomplete email generated.\n" % (refname,) + ) klass = OtherReferenceChange else: # Some other reference namespace: sys.stderr.write( - '*** Push-update of strange reference %r\n' - '*** - incomplete email generated.\n' % (refname,) - ) + "*** Push-update of strange reference %r\n" + "*** - incomplete email generated.\n" % (refname,) + ) klass = OtherReferenceChange else: # Anything else (is there anything else?) sys.stderr.write( - '*** Unknown type of update to %r (%s)\n' - '*** - incomplete email generated.\n' % (refname, rev.type,) - ) + "*** Unknown type of update to %r (%s)\n" + "*** - incomplete email generated.\n" % (refname, rev.type) + ) klass = OtherReferenceChange return klass( environment, - refname=refname, short_refname=short_refname, - old=old, new=new, rev=rev, - ) + refname=refname, + short_refname=short_refname, + old=old, + new=new, + rev=rev, + ) def __init__(self, environment, refname, short_refname, old, new, rev): Change.__init__(self, environment) self.change_type = { - (False, True): 'create', - (True, True): 'update', - (True, False): 'delete', - }[bool(old), bool(new)] + (False, True): "create", + (True, True): "update", + (True, False): "delete", + }[bool(old), bool(new)] self.refname = refname self.short_refname = short_refname self.old = old @@ -889,42 +902,43 @@ class ReferenceChange(Change): def _compute_values(self): values = Change._compute_values(self) - values['change_type'] = self.change_type - values['refname_type'] = self.refname_type - values['refname'] = self.refname - values['short_refname'] = self.short_refname - values['msgid'] = self.msgid - values['recipients'] = self.recipients - values['oldrev'] = "%s" % self.old - values['oldrev_short'] = self.old.short - values['newrev'] = "%s" % self.new - values['newrev_short'] = self.new.short + values["change_type"] = self.change_type + values["refname_type"] = self.refname_type + values["refname"] = self.refname + values["short_refname"] = self.short_refname + values["msgid"] = self.msgid + values["recipients"] = self.recipients + values["oldrev"] = "%s" % self.old + values["oldrev_short"] = self.old.short + values["newrev"] = "%s" % self.new + values["newrev_short"] = self.new.short if self.old: - values['oldrev_type'] = self.old.type + values["oldrev_type"] = self.old.type if self.new: - values['newrev_type'] = self.new.type + values["newrev_type"] = self.new.type reply_to = self.environment.get_reply_to_refchange(self) if reply_to: - values['reply_to'] = reply_to + values["reply_to"] = reply_to return values def get_subject(self): template = { - 'create': REF_CREATED_SUBJECT_TEMPLATE, - 'update': REF_UPDATED_SUBJECT_TEMPLATE, - 'delete': REF_DELETED_SUBJECT_TEMPLATE, - }[self.change_type] + "create": REF_CREATED_SUBJECT_TEMPLATE, + "update": REF_UPDATED_SUBJECT_TEMPLATE, + "delete": REF_DELETED_SUBJECT_TEMPLATE, + }[self.change_type] return self.expand(template) def generate_email_header(self, **extra_values): - if 'subject' not in extra_values: - extra_values['subject'] = self.get_subject() + if "subject" not in extra_values: + extra_values["subject"] = self.get_subject() for line in self.expand_header_lines( - REFCHANGE_HEADER_TEMPLATE, **extra_values): + REFCHANGE_HEADER_TEMPLATE, **extra_values + ): yield line def generate_email_intro(self): @@ -938,10 +952,10 @@ class ReferenceChange(Change): generate_update_summary() / generate_delete_summary().""" change_summary = { - 'create': self.generate_create_summary, - 'delete': self.generate_delete_summary, - 'update': self.generate_update_summary, - }[self.change_type](push) + "create": self.generate_create_summary, + "delete": self.generate_delete_summary, + "update": self.generate_update_summary, + }[self.change_type](push) for line in change_summary: yield line @@ -953,14 +967,15 @@ class ReferenceChange(Change): def generate_revision_change_log(self, new_commits_list): if self.showlog: - yield '\n' - yield 'Detailed log of new commits:\n\n' + yield "\n" + yield "Detailed log of new commits:\n\n" for line in read_git_lines( - ['log', '--no-walk'] - + self.logopts - + new_commits_list - + ['--'], - keepends=True,): + ["log", "--no-walk"] + + self.logopts + + new_commits_list + + ["--"], + keepends=True, + ): yield line def generate_revision_change_summary(self, push): @@ -976,25 +991,27 @@ class ReferenceChange(Change): sha1s.reverse() tot = len(sha1s) new_revisions = [ - Revision(self, GitObject(sha1), num=i+1, tot=tot) + Revision(self, GitObject(sha1), num=i + 1, tot=tot) for (i, sha1) in enumerate(sha1s) - ] + ] if new_revisions: yield self.expand( - 'This %(refname_type)s includes the following new ' - 'commits:\n') - yield '\n' + "This %(refname_type)s includes the following new " + "commits:\n" + ) + yield "\n" for r in new_revisions: (sha1, subject) = r.rev.get_summary() yield r.expand( - BRIEF_SUMMARY_TEMPLATE, action='new', text=subject, - ) - yield '\n' + BRIEF_SUMMARY_TEMPLATE, action="new", text=subject + ) + yield "\n" for line in self.expand_lines(NEW_REVISIONS_TEMPLATE, tot=tot): yield line for line in self.generate_revision_change_log( - [r.rev.sha1 for r in new_revisions]): + [r.rev.sha1 for r in new_revisions] + ): yield line else: for line in self.expand_lines(NO_NEW_REVISIONS_TEMPLATE): @@ -1011,17 +1028,22 @@ class ReferenceChange(Change): # have already had notification emails; we want such # revisions in the summary even though we will not send # new notification emails for them. - adds = list(generate_summaries( - '--topo-order', '--reverse', '%s..%s' - % (self.old.commit_sha1, self.new.commit_sha1,) - )) + adds = list( + generate_summaries( + "--topo-order", + "--reverse", + "%s..%s" % (self.old.commit_sha1, self.new.commit_sha1), + ) + ) # List of the revisions that were removed from the branch # by this update. This will be empty except for # non-fast-forward updates. - discards = list(generate_summaries( - '%s..%s' % (self.new.commit_sha1, self.old.commit_sha1,) - )) + discards = list( + generate_summaries( + "%s..%s" % (self.new.commit_sha1, self.old.commit_sha1) + ) + ) if adds: new_commits_list = push.get_new_commits(self) @@ -1030,72 +1052,83 @@ class ReferenceChange(Change): new_commits = CommitSet(new_commits_list) if discards: - discarded_commits = CommitSet( - push.get_discarded_commits(self)) + discarded_commits = CommitSet(push.get_discarded_commits(self)) else: discarded_commits = CommitSet([]) if discards and adds: for (sha1, subject) in discards: if sha1 in discarded_commits: - action = 'discards' + action = "discards" else: - action = 'omits' + action = "omits" yield self.expand( - BRIEF_SUMMARY_TEMPLATE, action=action, - rev_short=sha1, text=subject, - ) + BRIEF_SUMMARY_TEMPLATE, + action=action, + rev_short=sha1, + text=subject, + ) for (sha1, subject) in adds: if sha1 in new_commits: - action = 'new' + action = "new" else: - action = 'adds' + action = "adds" yield self.expand( - BRIEF_SUMMARY_TEMPLATE, action=action, - rev_short=sha1, text=subject, - ) - yield '\n' + BRIEF_SUMMARY_TEMPLATE, + action=action, + rev_short=sha1, + text=subject, + ) + yield "\n" for line in self.expand_lines(NON_FF_TEMPLATE): yield line elif discards: for (sha1, subject) in discards: if sha1 in discarded_commits: - action = 'discards' + action = "discards" else: - action = 'omits' + action = "omits" yield self.expand( - BRIEF_SUMMARY_TEMPLATE, action=action, - rev_short=sha1, text=subject, - ) - yield '\n' + BRIEF_SUMMARY_TEMPLATE, + action=action, + rev_short=sha1, + text=subject, + ) + yield "\n" for line in self.expand_lines(REWIND_ONLY_TEMPLATE): yield line elif adds: (sha1, subject) = self.old.get_summary() yield self.expand( - BRIEF_SUMMARY_TEMPLATE, action='from', - rev_short=sha1, text=subject, - ) + BRIEF_SUMMARY_TEMPLATE, + action="from", + rev_short=sha1, + text=subject, + ) for (sha1, subject) in adds: if sha1 in new_commits: - action = 'new' + action = "new" else: - action = 'adds' + action = "adds" yield self.expand( - BRIEF_SUMMARY_TEMPLATE, action=action, - rev_short=sha1, text=subject, - ) + BRIEF_SUMMARY_TEMPLATE, + action=action, + rev_short=sha1, + text=subject, + ) - yield '\n' + yield "\n" if new_commits: for line in self.expand_lines( - NEW_REVISIONS_TEMPLATE, tot=len(new_commits)): + NEW_REVISIONS_TEMPLATE, tot=len(new_commits) + ): yield line for line in self.generate_revision_change_log( - new_commits_list): + new_commits_list + ): yield line else: for line in self.expand_lines(NO_NEW_REVISIONS_TEMPLATE): @@ -1108,14 +1141,14 @@ class ReferenceChange(Change): # random revision at this point - the user will be interested # in what this revision changed - including the undoing of # previous revisions in the case of non-fast-forward updates. - yield '\n' - yield 'Summary of changes:\n' + yield "\n" + yield "Summary of changes:\n" for line in read_git_lines( - ['diff-tree'] - + self.diffopts - + ['%s..%s' % ( - self.old.commit_sha1, self.new.commit_sha1,)], - keepends=True,): + ["diff-tree"] + + self.diffopts + + ["%s..%s" % (self.old.commit_sha1, self.new.commit_sha1)], + keepends=True, + ): yield line elif self.old.commit_sha1 and not self.new.commit_sha1: @@ -1125,23 +1158,21 @@ class ReferenceChange(Change): sha1s = list(push.get_discarded_commits(self)) tot = len(sha1s) discarded_revisions = [ - Revision(self, GitObject(sha1), num=i+1, tot=tot) + Revision(self, GitObject(sha1), num=i + 1, tot=tot) for (i, sha1) in enumerate(sha1s) - ] + ] if discarded_revisions: for line in self.expand_lines(DISCARDED_REVISIONS_TEMPLATE): yield line - yield '\n' + yield "\n" for r in discarded_revisions: (sha1, subject) = r.rev.get_summary() yield r.expand( - BRIEF_SUMMARY_TEMPLATE, action='discards', - text=subject, - ) + BRIEF_SUMMARY_TEMPLATE, action="discards", text=subject + ) else: - for line in self.expand_lines( - NO_DISCARDED_REVISIONS_TEMPLATE): + for line in self.expand_lines(NO_DISCARDED_REVISIONS_TEMPLATE): yield line elif not self.old.commit_sha1 and not self.new.commit_sha1: @@ -1154,10 +1185,9 @@ class ReferenceChange(Change): # This is a new reference and so oldrev is not valid (sha1, subject) = self.new.get_summary() yield self.expand( - BRIEF_SUMMARY_TEMPLATE, action='at', - rev_short=sha1, text=subject, - ) - yield '\n' + BRIEF_SUMMARY_TEMPLATE, action="at", rev_short=sha1, text=subject + ) + yield "\n" def generate_update_summary(self, push): """Called for the change of a pre-existing branch.""" @@ -1169,42 +1199,46 @@ class ReferenceChange(Change): (sha1, subject) = self.old.get_summary() yield self.expand( - BRIEF_SUMMARY_TEMPLATE, action='was', - rev_short=sha1, text=subject, - ) - yield '\n' + BRIEF_SUMMARY_TEMPLATE, action="was", rev_short=sha1, text=subject + ) + yield "\n" class BranchChange(ReferenceChange): - refname_type = 'branch' + refname_type = "branch" def __init__(self, environment, refname, short_refname, old, new, rev): ReferenceChange.__init__( - self, environment, - refname=refname, short_refname=short_refname, - old=old, new=new, rev=rev, - ) + self, + environment, + refname=refname, + short_refname=short_refname, + old=old, + new=new, + rev=rev, + ) self.recipients = environment.get_refchange_recipients(self) class AnnotatedTagChange(ReferenceChange): - refname_type = 'annotated tag' + refname_type = "annotated tag" def __init__(self, environment, refname, short_refname, old, new, rev): ReferenceChange.__init__( - self, environment, - refname=refname, short_refname=short_refname, - old=old, new=new, rev=rev, - ) + self, + environment, + refname=refname, + short_refname=short_refname, + old=old, + new=new, + rev=rev, + ) self.recipients = environment.get_announce_recipients(self) self.show_shortlog = environment.announce_show_shortlog ANNOTATED_TAG_FORMAT = ( - '%(*objectname)\n' - '%(*objecttype)\n' - '%(taggername)\n' - '%(taggerdate)' - ) + "%(*objectname)\n" "%(*objecttype)\n" "%(taggername)\n" "%(taggerdate)" + ) def describe_tag(self, push): """Describe the new value of an annotated tag.""" @@ -1212,69 +1246,81 @@ class AnnotatedTagChange(ReferenceChange): # Use git for-each-ref to pull out the individual fields from # the tag [tagobject, tagtype, tagger, tagged] = read_git_lines( - ['for-each-ref', '--format=%s' % ( - self.ANNOTATED_TAG_FORMAT,), self.refname], - ) + [ + "for-each-ref", + "--format=%s" % (self.ANNOTATED_TAG_FORMAT,), + self.refname, + ] + ) yield self.expand( - BRIEF_SUMMARY_TEMPLATE, action='tagging', - rev_short=tagobject, text='(%s)' % (tagtype,), - ) - if tagtype == 'commit': + BRIEF_SUMMARY_TEMPLATE, + action="tagging", + rev_short=tagobject, + text="(%s)" % (tagtype,), + ) + if tagtype == "commit": # If the tagged object is a commit, then we assume this is a # release, and so we calculate which tag this tag is # replacing try: prevtag = read_git_output( - ['describe', '--abbrev=0', '%s^' % (self.new,)]) + ["describe", "--abbrev=0", "%s^" % (self.new,)] + ) except CommandError: prevtag = None if prevtag: - yield ' replaces %s\n' % (prevtag,) + yield " replaces %s\n" % (prevtag,) else: prevtag = None - yield ' length %s bytes\n' % (read_git_output( - ['cat-file', '-s', tagobject]),) + yield " length %s bytes\n" % ( + read_git_output(["cat-file", "-s", tagobject]), + ) - yield ' tagged by %s\n' % (tagger,) - yield ' on %s\n' % (tagged,) - yield '\n' + yield " tagged by %s\n" % (tagger,) + yield " on %s\n" % (tagged,) + yield "\n" # Show the content of the tag message; this might contain a # change log or release notes so is worth displaying. yield LOGBEGIN - contents = list(read_git_lines( - ['cat-file', 'tag', self.new.sha1], keepends=True)) - contents = contents[contents.index('\n') + 1:] - if contents and contents[-1][-1:] != '\n': - contents.append('\n') + contents = list( + read_git_lines(["cat-file", "tag", self.new.sha1], keepends=True) + ) + contents = contents[contents.index("\n") + 1 :] + if contents and contents[-1][-1:] != "\n": + contents.append("\n") for line in contents: yield line - if self.show_shortlog and tagtype == 'commit': + if self.show_shortlog and tagtype == "commit": # Only commit tags make sense to have rev-list operations # performed on them - yield '\n' + yield "\n" if prevtag: # Show changes since the previous release revlist = read_git_output( - ['rev-list', '--pretty=short', - '%s..%s' % (prevtag, self.new,)], + [ + "rev-list", + "--pretty=short", + "%s..%s" % (prevtag, self.new), + ], keepends=True, - ) + ) else: # No previous tag, show all the changes since time # began revlist = read_git_output( - ['rev-list', '--pretty=short', '%s' % (self.new,)], + ["rev-list", "--pretty=short", "%s" % (self.new,)], keepends=True, - ) + ) for line in read_git_lines( - ['shortlog'], input=revlist, keepends=True): + ["shortlog"], input=revlist, keepends=True + ): yield line yield LOGEND - yield '\n' + yield "\n" def generate_create_summary(self, push): """Called for the creation of an annotated tag.""" @@ -1302,19 +1348,23 @@ class AnnotatedTagChange(ReferenceChange): for line in self.expand_lines(TAG_DELETED_TEMPLATE): yield line - yield self.expand(' tag was %(oldrev_short)s\n') - yield '\n' + yield self.expand(" tag was %(oldrev_short)s\n") + yield "\n" class NonAnnotatedTagChange(ReferenceChange): - refname_type = 'tag' + refname_type = "tag" def __init__(self, environment, refname, short_refname, old, new, rev): ReferenceChange.__init__( - self, environment, - refname=refname, short_refname=short_refname, - old=old, new=new, rev=rev, - ) + self, + environment, + refname=refname, + short_refname=short_refname, + old=old, + new=new, + rev=rev, + ) self.recipients = environment.get_refchange_recipients(self) def generate_create_summary(self, push): @@ -1340,17 +1390,21 @@ class NonAnnotatedTagChange(ReferenceChange): class OtherReferenceChange(ReferenceChange): - refname_type = 'reference' + refname_type = "reference" def __init__(self, environment, refname, short_refname, old, new, rev): # We use the full refname as short_refname, because otherwise # the full name of the reference would not be obvious from the # text of the email. ReferenceChange.__init__( - self, environment, - refname=refname, short_refname=refname, - old=old, new=new, rev=rev, - ) + self, + environment, + refname=refname, + short_refname=refname, + old=old, + new=new, + rev=rev, + ) self.recipients = environment.get_refchange_recipients(self) @@ -1375,10 +1429,7 @@ class Mailer(object): class SendMailer(Mailer): """Send emails using 'sendmail -oi -t'.""" - SENDMAIL_CANDIDATES = [ - '/usr/sbin/sendmail', - '/usr/lib/sendmail', - ] + SENDMAIL_CANDIDATES = ["/usr/sbin/sendmail", "/usr/lib/sendmail"] @staticmethod def find_sendmail(): @@ -1387,9 +1438,9 @@ class SendMailer(Mailer): return path else: raise ConfigurationException( - 'No sendmail executable found. ' - 'Try setting multimailhook.sendmailCommand.' - ) + "No sendmail executable found. " + "Try setting multimailhook.sendmailCommand." + ) def __init__(self, command=None, envelopesender=None): """Construct a SendMailer instance. @@ -1402,29 +1453,29 @@ class SendMailer(Mailer): if command: self.command = command[:] else: - self.command = [self.find_sendmail(), '-oi', '-t'] + self.command = [self.find_sendmail(), "-oi", "-t"] if envelopesender: - self.command.extend(['-f', envelopesender]) + self.command.extend(["-f", envelopesender]) def send(self, lines, to_addrs): try: p = subprocess.Popen(self.command, stdin=subprocess.PIPE) except OSError as e: sys.stderr.write( - '*** Cannot execute command: %s\n' % ' '.join(self.command) - + '*** %s\n' % e + "*** Cannot execute command: %s\n" % " ".join(self.command) + + "*** %s\n" % e + '*** Try setting multimailhook.mailer to "smtp"\n' - '*** to send emails without using the sendmail command.\n' - ) + "*** to send emails without using the sendmail command.\n" + ) sys.exit(1) try: p.stdin.writelines(lines) except: sys.stderr.write( - '*** Error while generating commit email\n' - '*** - mail sending aborted.\n' - ) + "*** Error while generating commit email\n" + "*** - mail sending aborted.\n" + ) p.terminate() raise else: @@ -1440,11 +1491,11 @@ class SMTPMailer(Mailer): def __init__(self, envelopesender, smtpserver): if not envelopesender: sys.stderr.write( - 'fatal: git_multimail: cannot use SMTPMailer without a ' - 'sender address.\n' - 'please set either multimailhook.envelopeSender or ' - 'user.email\n' - ) + "fatal: git_multimail: cannot use SMTPMailer without a " + "sender address.\n" + "please set either multimailhook.envelopeSender or " + "user.email\n" + ) sys.exit(1) self.envelopesender = envelopesender self.smtpserver = smtpserver @@ -1452,9 +1503,10 @@ class SMTPMailer(Mailer): self.smtp = smtplib.SMTP(self.smtpserver) except Exception as e: sys.stderr.write( - '*** Error establishing SMTP connection to %s***\n' % - self.smtpserver) - sys.stderr.write('*** %s\n' % e) + "*** Error establishing SMTP connection to %s***\n" + % self.smtpserver + ) + sys.stderr.write("*** %s\n" % e) sys.exit(1) def __del__(self): @@ -1462,15 +1514,16 @@ class SMTPMailer(Mailer): def send(self, lines, to_addrs): try: - msg = ''.join(lines) + msg = "".join(lines) # turn comma-separated list into Python list if needed. if isinstance(to_addrs, six.string_types): to_addrs = [ - email for (name, email) in getaddresses([to_addrs])] + email for (name, email) in getaddresses([to_addrs]) + ] self.smtp.sendmail(self.envelopesender, to_addrs, msg) except Exception as e: - sys.stderr.write('*** Error sending email***\n') - sys.stderr.write('*** %s\n' % e) + sys.stderr.write("*** Error sending email***\n") + sys.stderr.write("*** %s\n" % e) self.smtp.quit() sys.exit(1) @@ -1481,7 +1534,7 @@ class OutputMailer(Mailer): This is intended for debugging purposes.""" - SEPARATOR = '=' * 75 + '\n' + SEPARATOR = "=" * 75 + "\n" def __init__(self, f): self.f = f @@ -1499,9 +1552,9 @@ def get_git_dir(): from the working directory, using Git's usual rules.""" try: - return read_git_output(['rev-parse', '--git-dir']) + return read_git_output(["rev-parse", "--git-dir"]) except CommandError: - sys.stderr.write('fatal: git_multimail: not in a git directory\n') + sys.stderr.write("fatal: git_multimail: not in a git directory\n") sys.exit(1) @@ -1609,28 +1662,28 @@ class Environment(object): """ - REPO_NAME_RE = re.compile(r'^(?P.+?)(?:\.git)$') + REPO_NAME_RE = re.compile(r"^(?P.+?)(?:\.git)$") def __init__(self, osenv=None): self.osenv = osenv or os.environ self.announce_show_shortlog = False self.maxcommitemails = 500 - self.diffopts = ['--stat', '--summary', '--find-copies-harder'] + self.diffopts = ["--stat", "--summary", "--find-copies-harder"] self.logopts = [] self.refchange_showlog = False - self.commitlogopts = ['-C', '--stat', '-p', '--cc'] + self.commitlogopts = ["-C", "--stat", "-p", "--cc"] self.COMPUTED_KEYS = [ - 'administrator', - 'charset', - 'emailprefix', - 'fromaddr', - 'pusher', - 'pusher_email', - 'repo_path', - 'repo_shortname', - 'sender', - ] + "administrator", + "charset", + "emailprefix", + "fromaddr", + "pusher", + "pusher_email", + "repo_path", + "repo_shortname", + "sender", + ] self._values = None @@ -1641,7 +1694,7 @@ class Environment(object): basename = os.path.basename(os.path.abspath(self.get_repo_path())) m = self.REPO_NAME_RE.match(basename) if m: - return m.group('name') + return m.group("name") else: return basename @@ -1652,16 +1705,16 @@ class Environment(object): return None def get_administrator(self): - return 'the administrator of this repository' + return "the administrator of this repository" def get_emailprefix(self): - return '' + return "" def get_repo_path(self): - if read_git_output(['rev-parse', '--is-bare-repository']) == 'true': + if read_git_output(["rev-parse", "--is-bare-repository"]) == "true": path = get_git_dir() else: - path = read_git_output(['rev-parse', '--show-toplevel']) + path = read_git_output(["rev-parse", "--show-toplevel"]) return os.path.abspath(path) def get_charset(self): @@ -1681,7 +1734,7 @@ class Environment(object): values = {} for key in self.COMPUTED_KEYS: - value = getattr(self, 'get_%s' % (key,))() + value = getattr(self, "get_%s" % (key,))() if value is not None: values[key] = value @@ -1760,84 +1813,83 @@ class ConfigOptionsEnvironmentMixin(ConfigEnvironmentMixin): def __init__(self, config, **kw): super(ConfigOptionsEnvironmentMixin, self).__init__( config=config, **kw - ) + ) self.announce_show_shortlog = config.get_bool( - 'announceshortlog', default=self.announce_show_shortlog - ) + "announceshortlog", default=self.announce_show_shortlog + ) self.refchange_showlog = config.get_bool( - 'refchangeshowlog', default=self.refchange_showlog - ) + "refchangeshowlog", default=self.refchange_showlog + ) - maxcommitemails = config.get('maxcommitemails') + maxcommitemails = config.get("maxcommitemails") if maxcommitemails is not None: try: self.maxcommitemails = int(maxcommitemails) except ValueError: sys.stderr.write( - '*** Malformed value for multimailhook.maxCommitEmails: ' - '%s\n' % maxcommitemails - + '*** Expected a number. Ignoring.\n' - ) + "*** Malformed value for multimailhook.maxCommitEmails: " + "%s\n" % maxcommitemails + + "*** Expected a number. Ignoring.\n" + ) - diffopts = config.get('diffopts') + diffopts = config.get("diffopts") if diffopts is not None: self.diffopts = shlex.split(diffopts) - logopts = config.get('logopts') + logopts = config.get("logopts") if logopts is not None: self.logopts = shlex.split(logopts) - commitlogopts = config.get('commitlogopts') + commitlogopts = config.get("commitlogopts") if commitlogopts is not None: self.commitlogopts = shlex.split(commitlogopts) - reply_to = config.get('replyTo') + reply_to = config.get("replyTo") self.__reply_to_refchange = config.get( - 'replyToRefchange', default=reply_to) + "replyToRefchange", default=reply_to + ) if ( self.__reply_to_refchange is not None - and self.__reply_to_refchange.lower() == 'author'): + and self.__reply_to_refchange.lower() == "author" + ): raise ConfigurationException( '"author" is not an allowed setting for replyToRefchange' - ) - self.__reply_to_commit = config.get( - 'replyToCommit', default=reply_to) + ) + self.__reply_to_commit = config.get("replyToCommit", default=reply_to) def get_administrator(self): return ( - self.config.get('administrator') + self.config.get("administrator") or self.get_sender() - or super( - ConfigOptionsEnvironmentMixin, self).get_administrator() - ) + or super(ConfigOptionsEnvironmentMixin, self).get_administrator() + ) def get_repo_shortname(self): return ( - self.config.get('reponame') - or super( - ConfigOptionsEnvironmentMixin, self).get_repo_shortname() - ) + self.config.get("reponame") + or super(ConfigOptionsEnvironmentMixin, self).get_repo_shortname() + ) def get_emailprefix(self): - emailprefix = self.config.get('emailprefix') + emailprefix = self.config.get("emailprefix") if emailprefix and emailprefix.strip(): - return emailprefix.strip() + ' ' + return emailprefix.strip() + " " else: - return '[%s] ' % (self.get_repo_shortname(),) + return "[%s] " % (self.get_repo_shortname(),) def get_sender(self): - return self.config.get('envelopesender') + return self.config.get("envelopesender") def get_fromaddr(self): - fromaddr = self.config.get('from') + fromaddr = self.config.get("from") if fromaddr: return fromaddr else: - config = Config('user') - fromname = config.get('name', default='Pagure') - fromemail = config.get('email', default='') + config = Config("user") + fromname = config.get("name", default="Pagure") + fromemail = config.get("email", default="") if fromemail: return formataddr([fromname, fromemail]) else: @@ -1848,9 +1900,9 @@ class ConfigOptionsEnvironmentMixin(ConfigEnvironmentMixin): return super( ConfigOptionsEnvironmentMixin, self ).get_reply_to_refchange(refchange) - elif self.__reply_to_refchange.lower() == 'pusher': + elif self.__reply_to_refchange.lower() == "pusher": return self.get_pusher_email() - elif self.__reply_to_refchange.lower() == 'none': + elif self.__reply_to_refchange.lower() == "none": return None else: return self.__reply_to_refchange @@ -1860,11 +1912,11 @@ class ConfigOptionsEnvironmentMixin(ConfigEnvironmentMixin): return super( ConfigOptionsEnvironmentMixin, self ).get_reply_to_commit(revision) - elif self.__reply_to_commit.lower() == 'author': + elif self.__reply_to_commit.lower() == "author": return revision.get_author() - elif self.__reply_to_commit.lower() == 'pusher': + elif self.__reply_to_commit.lower() == "pusher": return self.get_pusher_email() - elif self.__reply_to_commit.lower() == 'none': + elif self.__reply_to_commit.lower() == "none": return None else: return self.__reply_to_commit @@ -1896,12 +1948,12 @@ class FilterLinesEnvironmentMixin(Environment): def filter_body(self, lines): lines = super(FilterLinesEnvironmentMixin, self).filter_body(lines) if self.__strict_utf8: - lines = (line.decode(ENCODING, 'replace') for line in lines) + lines = (line.decode(ENCODING, "replace") for line in lines) # Limit the line length in Unicode-space to avoid # splitting characters: if self.__emailmaxlinelength: lines = limit_linelength(lines, self.__emailmaxlinelength) - lines = (line.encode(ENCODING, 'replace') for line in lines) + lines = (line.encode(ENCODING, "replace") for line in lines) elif self.__emailmaxlinelength: lines = limit_linelength(lines, self.__emailmaxlinelength) @@ -1909,22 +1961,22 @@ class FilterLinesEnvironmentMixin(Environment): class ConfigFilterLinesEnvironmentMixin( - ConfigEnvironmentMixin, - FilterLinesEnvironmentMixin,): + ConfigEnvironmentMixin, FilterLinesEnvironmentMixin +): """Handle encoding and maximum line length based on config.""" def __init__(self, config, **kw): - strict_utf8 = config.get_bool('emailstrictutf8', default=None) + strict_utf8 = config.get_bool("emailstrictutf8", default=None) if strict_utf8 is not None: - kw['strict_utf8'] = strict_utf8 + kw["strict_utf8"] = strict_utf8 - emailmaxlinelength = config.get('emailmaxlinelength') + emailmaxlinelength = config.get("emailmaxlinelength") if emailmaxlinelength is not None: - kw['emailmaxlinelength'] = int(emailmaxlinelength) + kw["emailmaxlinelength"] = int(emailmaxlinelength) super(ConfigFilterLinesEnvironmentMixin, self).__init__( config=config, **kw - ) + ) class MaxlinesEnvironmentMixin(Environment): @@ -1942,17 +1994,15 @@ class MaxlinesEnvironmentMixin(Environment): class ConfigMaxlinesEnvironmentMixin( - ConfigEnvironmentMixin, - MaxlinesEnvironmentMixin,): + ConfigEnvironmentMixin, MaxlinesEnvironmentMixin +): """Limit the email body to the number of lines specified in config.""" def __init__(self, config, **kw): - emailmaxlines = int(config.get('emailmaxlines', default='0')) + emailmaxlines = int(config.get("emailmaxlines", default="0")) super(ConfigMaxlinesEnvironmentMixin, self).__init__( - config=config, - emailmaxlines=emailmaxlines, - **kw - ) + config=config, emailmaxlines=emailmaxlines, **kw + ) class FQDNEnvironmentMixin(Environment): @@ -1960,7 +2010,7 @@ class FQDNEnvironmentMixin(Environment): def __init__(self, fqdn, **kw): super(FQDNEnvironmentMixin, self).__init__(**kw) - self.COMPUTED_KEYS += ['fqdn'] + self.COMPUTED_KEYS += ["fqdn"] self.__fqdn = fqdn def get_fqdn(self): @@ -1971,18 +2021,14 @@ class FQDNEnvironmentMixin(Environment): return self.__fqdn -class ConfigFQDNEnvironmentMixin( - ConfigEnvironmentMixin, - FQDNEnvironmentMixin,): +class ConfigFQDNEnvironmentMixin(ConfigEnvironmentMixin, FQDNEnvironmentMixin): """Read the FQDN from the config.""" def __init__(self, config, **kw): - fqdn = config.get('fqdn') + fqdn = config.get("fqdn") super(ConfigFQDNEnvironmentMixin, self).__init__( - config=config, - fqdn=fqdn, - **kw - ) + config=config, fqdn=fqdn, **kw + ) class ComputeFQDNEnvironmentMixin(FQDNEnvironmentMixin): @@ -1990,9 +2036,8 @@ class ComputeFQDNEnvironmentMixin(FQDNEnvironmentMixin): def __init__(self, **kw): super(ComputeFQDNEnvironmentMixin, self).__init__( - fqdn=socket.getfqdn(), - **kw - ) + fqdn=socket.getfqdn(), **kw + ) class PusherDomainEnvironmentMixin(ConfigEnvironmentMixin): @@ -2000,23 +2045,26 @@ class PusherDomainEnvironmentMixin(ConfigEnvironmentMixin): def __init__(self, **kw): super(PusherDomainEnvironmentMixin, self).__init__(**kw) - self.__emaildomain = self.config.get('emaildomain') + self.__emaildomain = self.config.get("emaildomain") def get_pusher_email(self): if self.__emaildomain: # Derive the pusher's full email address in the default way: - return '%s@%s' % (self.get_pusher(), self.__emaildomain) + return "%s@%s" % (self.get_pusher(), self.__emaildomain) else: - return super( - PusherDomainEnvironmentMixin, self).get_pusher_email() + return super(PusherDomainEnvironmentMixin, self).get_pusher_email() class StaticRecipientsEnvironmentMixin(Environment): """Set recipients statically based on constructor parameters.""" def __init__( - self, refchange_recipients, announce_recipients, - revision_recipients, **kw): + self, + refchange_recipients, + announce_recipients, + revision_recipients, + **kw + ): super(StaticRecipientsEnvironmentMixin, self).__init__(**kw) # The recipients for various types of notification emails, as @@ -2026,10 +2074,10 @@ class StaticRecipientsEnvironmentMixin(Environment): # actual *contents* of the change being reported, we only # choose based on the *type* of the change. Therefore we can # compute them once and for all: - if not (refchange_recipients - or announce_recipients - or revision_recipients): - raise ConfigurationException('No email recipients configured!') + if not ( + refchange_recipients or announce_recipients or revision_recipients + ): + raise ConfigurationException("No email recipients configured!") self.__refchange_recipients = refchange_recipients self.__announce_recipients = announce_recipients self.__revision_recipients = revision_recipients @@ -2045,24 +2093,24 @@ class StaticRecipientsEnvironmentMixin(Environment): class ConfigRecipientsEnvironmentMixin( - ConfigEnvironmentMixin, - StaticRecipientsEnvironmentMixin): + ConfigEnvironmentMixin, StaticRecipientsEnvironmentMixin +): """Determine recipients statically based on config.""" def __init__(self, config, **kw): super(ConfigRecipientsEnvironmentMixin, self).__init__( config=config, refchange_recipients=self._get_recipients( - config, 'refchangelist', 'mailinglist', - ), + config, "refchangelist", "mailinglist" + ), announce_recipients=self._get_recipients( - config, 'announcelist', 'refchangelist', 'mailinglist', - ), + config, "announcelist", "refchangelist", "mailinglist" + ), revision_recipients=self._get_recipients( - config, 'commitlist', 'mailinglist', - ), + config, "commitlist", "mailinglist" + ), **kw - ) + ) def _get_recipients(self, config, *names): """Return the recipients for a particular type of message. @@ -2080,7 +2128,7 @@ class ConfigRecipientsEnvironmentMixin( if retval is not None: return retval else: - return '' + return "" class ProjectdescEnvironmentMixin(Environment): @@ -2091,40 +2139,41 @@ class ProjectdescEnvironmentMixin(Environment): def __init__(self, **kw): super(ProjectdescEnvironmentMixin, self).__init__(**kw) - self.COMPUTED_KEYS += ['projectdesc'] + self.COMPUTED_KEYS += ["projectdesc"] def get_projectdesc(self): """Return a one-line descripition of the project.""" git_dir = get_git_dir() try: - with open(os.path.join(git_dir, 'description')) as f: + with open(os.path.join(git_dir, "description")) as f: projectdesc = f.readline().strip() if projectdesc and not projectdesc.startswith( - 'Unnamed repository'): + "Unnamed repository" + ): return projectdesc except IOError: pass - return 'UNNAMED PROJECT' + return "UNNAMED PROJECT" class GenericEnvironmentMixin(Environment): def get_pusher(self): - return self.osenv.get('USER', 'unknown user') + return self.osenv.get("USER", "unknown user") class GenericEnvironment( - ProjectdescEnvironmentMixin, - ConfigMaxlinesEnvironmentMixin, - ComputeFQDNEnvironmentMixin, - ConfigFilterLinesEnvironmentMixin, - ConfigRecipientsEnvironmentMixin, - PusherDomainEnvironmentMixin, - ConfigOptionsEnvironmentMixin, - GenericEnvironmentMixin, - Environment, - ): + ProjectdescEnvironmentMixin, + ConfigMaxlinesEnvironmentMixin, + ComputeFQDNEnvironmentMixin, + ConfigFilterLinesEnvironmentMixin, + ConfigRecipientsEnvironmentMixin, + PusherDomainEnvironmentMixin, + ConfigOptionsEnvironmentMixin, + GenericEnvironmentMixin, + Environment, +): pass @@ -2134,12 +2183,12 @@ class GitoliteEnvironmentMixin(Environment): # repo_shortname (though it's probably not as good as a value # the user might have explicitly put in his config). return ( - self.osenv.get('GL_REPO', None) + self.osenv.get("GL_REPO", None) or super(GitoliteEnvironmentMixin, self).get_repo_shortname() - ) + ) def get_pusher(self): - return self.osenv.get('GL_USER', 'unknown user') + return self.osenv.get("GL_USER", "unknown user") class IncrementalDateTime(six.Iterator): @@ -2160,16 +2209,16 @@ class IncrementalDateTime(six.Iterator): class GitoliteEnvironment( - ProjectdescEnvironmentMixin, - ConfigMaxlinesEnvironmentMixin, - ComputeFQDNEnvironmentMixin, - ConfigFilterLinesEnvironmentMixin, - ConfigRecipientsEnvironmentMixin, - PusherDomainEnvironmentMixin, - ConfigOptionsEnvironmentMixin, - GitoliteEnvironmentMixin, - Environment, - ): + ProjectdescEnvironmentMixin, + ConfigMaxlinesEnvironmentMixin, + ComputeFQDNEnvironmentMixin, + ConfigFilterLinesEnvironmentMixin, + ConfigRecipientsEnvironmentMixin, + PusherDomainEnvironmentMixin, + ConfigOptionsEnvironmentMixin, + GitoliteEnvironmentMixin, + Environment, +): pass @@ -2239,21 +2288,24 @@ class Push(object): # following order thus causes commits to be grouped with branch # changes (as opposed to tag changes) if possible. SORT_ORDER = dict( - (value, i) for (i, value) in enumerate([ - (BranchChange, 'update'), - (BranchChange, 'create'), - (AnnotatedTagChange, 'update'), - (AnnotatedTagChange, 'create'), - (NonAnnotatedTagChange, 'update'), - (NonAnnotatedTagChange, 'create'), - (BranchChange, 'delete'), - (AnnotatedTagChange, 'delete'), - (NonAnnotatedTagChange, 'delete'), - (OtherReferenceChange, 'update'), - (OtherReferenceChange, 'create'), - (OtherReferenceChange, 'delete'), - ]) + (value, i) + for (i, value) in enumerate( + [ + (BranchChange, "update"), + (BranchChange, "create"), + (AnnotatedTagChange, "update"), + (AnnotatedTagChange, "create"), + (NonAnnotatedTagChange, "update"), + (NonAnnotatedTagChange, "create"), + (BranchChange, "delete"), + (AnnotatedTagChange, "delete"), + (NonAnnotatedTagChange, "delete"), + (OtherReferenceChange, "update"), + (OtherReferenceChange, "create"), + (OtherReferenceChange, "delete"), + ] ) + ) def __init__(self, changes): self.changes = sorted(changes, key=self._sort_key) @@ -2266,43 +2318,41 @@ class Push(object): other_ref_sha1s.union( change.old.sha1 for change in self.changes - if change.old.type in ['commit', 'tag'] - ) + if change.old.type in ["commit", "tag"] ) + ) self._new_rev_exclusion_spec = self._compute_rev_exclusion_spec( other_ref_sha1s.union( change.new.sha1 for change in self.changes - if change.new.type in ['commit', 'tag'] - ) + if change.new.type in ["commit", "tag"] ) + ) @classmethod def _sort_key(klass, change): return ( klass.SORT_ORDER[change.__class__, change.change_type], - change.refname,) + change.refname, + ) def _compute_other_ref_sha1s(self): """Return the GitObjects referred to by references unaffected by this push.""" # The refnames being changed by this push: - updated_refs = set( - change.refname - for change in self.changes - ) + updated_refs = set(change.refname for change in self.changes) # The SHA-1s of commits referred to by all references in this # repository *except* updated_refs: sha1s = set() fmt = ( - '%(objectname) %(objecttype) %(refname)\n' - '%(*objectname) %(*objecttype) %(refname)' - ) - for line in read_git_lines(['for-each-ref', '--format=%s' % (fmt,)]): - (sha1, type, name) = line.split(' ', 2) - if sha1 and type == 'commit' and name not in updated_refs: + "%(objectname) %(objecttype) %(refname)\n" + "%(*objectname) %(*objecttype) %(refname)" + ) + for line in read_git_lines(["for-each-ref", "--format=%s" % (fmt,)]): + (sha1, type, name) = line.split(" ", 2) + if sha1 and type == "commit" and name not in updated_refs: sha1s.add(sha1) return sha1s @@ -2315,9 +2365,7 @@ class Push(object): rev-list --stdin' to exclude all of the commits referred to by git_objects.""" - return ''.join( - ['^%s\n' % (sha1,) for sha1 in sorted(sha1s)] - ) + return "".join(["^%s\n" % (sha1,) for sha1 in sorted(sha1s)]) def get_new_commits(self, reference_change=None): """Return a list of commits added by this push. @@ -2329,16 +2377,14 @@ class Push(object): if not reference_change: new_revs = sorted( - change.new.sha1 - for change in self.changes - if change.new - ) + change.new.sha1 for change in self.changes if change.new + ) elif not reference_change.new.commit_sha1: return [] else: new_revs = [reference_change.new.commit_sha1] - cmd = ['rev-list', '--stdin'] + new_revs + cmd = ["rev-list", "--stdin"] + new_revs return read_git_lines(cmd, input=self._old_rev_exclusion_spec) def get_discarded_commits(self, reference_change): @@ -2353,7 +2399,7 @@ class Push(object): else: old_revs = [reference_change.old.commit_sha1] - cmd = ['rev-list', '--stdin'] + old_revs + cmd = ["rev-list", "--stdin"] + old_revs return read_git_lines(cmd, input=self._new_rev_exclusion_spec) def send_emails(self, mailer, body_filter=None): @@ -2375,19 +2421,20 @@ class Push(object): # Check if we've got anyone to send to if not change.recipients: sys.stderr.write( - '*** no recipients configured so no email will be sent\n' - '*** for %r update %s->%s\n' - % (change.refname, change.old.sha1, change.new.sha1,) - ) + "*** no recipients configured so no email will be sent\n" + "*** for %r update %s->%s\n" + % (change.refname, change.old.sha1, change.new.sha1) + ) else: sys.stderr.write( - 'Sending notification emails to: %s\n' % ( - change.recipients,)) - extra_values = {'send_date': next(send_date)} + "Sending notification emails to: %s\n" + % (change.recipients,) + ) + extra_values = {"send_date": next(send_date)} mailer.send( change.generate_email(self, body_filter, extra_values), change.recipients, - ) + ) sha1s = [] for sha1 in reversed(list(self.get_new_commits(change))): @@ -2398,41 +2445,41 @@ class Push(object): max_emails = change.environment.maxcommitemails if max_emails and len(sha1s) > max_emails: sys.stderr.write( - '*** Too many new commits (%d), not sending commit ' - 'emails.\n' % len(sha1s) - + '*** Try setting multimailhook.maxCommitEmails to a ' - 'greater value\n' - '*** Currently, multimailhook.maxCommitEmails=%d\n' % - max_emails - ) + "*** Too many new commits (%d), not sending commit " + "emails.\n" % len(sha1s) + + "*** Try setting multimailhook.maxCommitEmails to a " + "greater value\n" + "*** Currently, multimailhook.maxCommitEmails=%d\n" + % max_emails + ) return for (num, sha1) in enumerate(sha1s): rev = Revision( - change, GitObject(sha1), num=num+1, tot=len(sha1s)) + change, GitObject(sha1), num=num + 1, tot=len(sha1s) + ) if rev.recipients: - extra_values = {'send_date': next(send_date)} + extra_values = {"send_date": next(send_date)} mailer.send( rev.generate_email(self, body_filter, extra_values), rev.recipients, - ) + ) # Consistency check: if unhandled_sha1s: sys.stderr.write( - 'ERROR: No emails were sent for the following new commits:\n' - ' %s\n' - % ('\n '.join(sorted(unhandled_sha1s)),) - ) + "ERROR: No emails were sent for the following new commits:\n" + " %s\n" % ("\n ".join(sorted(unhandled_sha1s)),) + ) def run_as_post_receive_hook(environment, mailer): changes = [] for line in sys.stdin: - (oldrev, newrev, refname) = line.strip().split(' ', 2) + (oldrev, newrev, refname) = line.strip().split(" ", 2) changes.append( ReferenceChange.create(environment, oldrev, newrev, refname) - ) + ) push = Push(changes) push.send_emails(mailer, body_filter=environment.filter_body) @@ -2441,46 +2488,46 @@ def run_as_update_hook(environment, mailer, refname, oldrev, newrev): changes = [ ReferenceChange.create( environment, - read_git_output(['rev-parse', '--verify', oldrev]), - read_git_output(['rev-parse', '--verify', newrev]), + read_git_output(["rev-parse", "--verify", oldrev]), + read_git_output(["rev-parse", "--verify", newrev]), refname, - ), - ] + ) + ] push = Push(changes) push.send_emails(mailer, body_filter=environment.filter_body) def choose_mailer(config, environment): - mailer = config.get('mailer', default='sendmail') + mailer = config.get("mailer", default="sendmail") - if mailer == 'smtp': - smtpserver = config.get('smtpserver', default='localhost') + if mailer == "smtp": + smtpserver = config.get("smtpserver", default="localhost") mailer = SMTPMailer( envelopesender=( environment.get_sender() or environment.get_fromaddr() ), smtpserver=smtpserver, - ) - elif mailer == 'sendmail': - command = config.get('sendmailcommand') + ) + elif mailer == "sendmail": + command = config.get("sendmailcommand") if command: command = shlex.split(command) mailer = SendMailer( - command=command, envelopesender=environment.get_sender()) + command=command, envelopesender=environment.get_sender() + ) else: sys.stderr.write( - 'fatal: multimailhook.mailer is set to an incorrect value: ' - '"%s"\n' % mailer - + 'please use one of "smtp" or "sendmail".\n' - ) + "fatal: multimailhook.mailer is set to an incorrect value: " + '"%s"\n' % mailer + 'please use one of "smtp" or "sendmail".\n' + ) sys.exit(1) return mailer KNOWN_ENVIRONMENTS = { - 'generic': GenericEnvironmentMixin, - 'gitolite': GitoliteEnvironmentMixin, - } + "generic": GenericEnvironmentMixin, + "gitolite": GitoliteEnvironmentMixin, +} def choose_environment(config, osenv=None, env=None, recipients=None): @@ -2494,85 +2541,91 @@ def choose_environment(config, osenv=None, env=None, recipients=None): ConfigFilterLinesEnvironmentMixin, PusherDomainEnvironmentMixin, ConfigOptionsEnvironmentMixin, - ] - environment_kw = { - 'osenv': osenv, - 'config': config, - } + ] + environment_kw = {"osenv": osenv, "config": config} if not env: - env = config.get('environment') + env = config.get("environment") if not env: - if 'GL_USER' in osenv and 'GL_REPO' in osenv: - env = 'gitolite' + if "GL_USER" in osenv and "GL_REPO" in osenv: + env = "gitolite" else: - env = 'generic' + env = "generic" environment_mixins.append(KNOWN_ENVIRONMENTS[env]) if recipients: environment_mixins.insert(0, StaticRecipientsEnvironmentMixin) - environment_kw['refchange_recipients'] = recipients - environment_kw['announce_recipients'] = recipients - environment_kw['revision_recipients'] = recipients + environment_kw["refchange_recipients"] = recipients + environment_kw["announce_recipients"] = recipients + environment_kw["revision_recipients"] = recipients else: environment_mixins.insert(0, ConfigRecipientsEnvironmentMixin) environment_klass = type( - 'EffectiveEnvironment', - tuple(environment_mixins) + (Environment,), - {}, - ) + "EffectiveEnvironment", tuple(environment_mixins) + (Environment,), {} + ) return environment_klass(**environment_kw) def main(args): parser = optparse.OptionParser( description=__doc__, - usage='%prog [OPTIONS]\n or: %prog [OPTIONS] REFNAME OLDREV NEWREV', - ) + usage="%prog [OPTIONS]\n or: %prog [OPTIONS] REFNAME OLDREV NEWREV", + ) parser.add_option( - '--environment', '--env', action='store', type='choice', - choices=['generic', 'gitolite'], default=None, + "--environment", + "--env", + action="store", + type="choice", + choices=["generic", "gitolite"], + default=None, help=( - 'Choose type of environment is in use. Default is taken from ' + "Choose type of environment is in use. Default is taken from " 'multimailhook.environment if set; otherwise "generic".' - ), - ) + ), + ) parser.add_option( - '--stdout', action='store_true', default=False, - help='Output emails to stdout rather than sending them.', - ) + "--stdout", + action="store_true", + default=False, + help="Output emails to stdout rather than sending them.", + ) parser.add_option( - '--recipients', action='store', default=None, - help='Set list of email recipients for all types of emails.', - ) + "--recipients", + action="store", + default=None, + help="Set list of email recipients for all types of emails.", + ) parser.add_option( - '--show-env', action='store_true', default=False, + "--show-env", + action="store_true", + default=False, help=( - 'Write to stderr the values determined for the environment ' - '(intended for debugging purposes).' - ), - ) + "Write to stderr the values determined for the environment " + "(intended for debugging purposes)." + ), + ) (options, args) = parser.parse_args(args) - config = Config('multimailhook') + config = Config("multimailhook") try: environment = choose_environment( - config, osenv=os.environ, + config, + osenv=os.environ, env=options.environment, recipients=options.recipients, - ) + ) if options.show_env: - sys.stderr.write('Environment values:\n') + sys.stderr.write("Environment values:\n") for (k, v) in sorted(environment.get_values().items()): - sys.stderr.write(' %s : %r\n' % (k, v)) - sys.stderr.write('\n') + sys.stderr.write(" %s : %r\n" % (k, v)) + sys.stderr.write("\n") if options.stdout: mailer = OutputMailer(sys.stdout) @@ -2583,7 +2636,7 @@ def main(args): # like an update hook; otherwise, run as a post-receive hook. if args: if len(args) != 3: - parser.error('Need zero or three non-option arguments') + parser.error("Need zero or three non-option arguments") (refname, oldrev, newrev) = args run_as_update_hook(environment, mailer, refname, oldrev, newrev) else: @@ -2593,5 +2646,5 @@ def main(args): sys.exit(1) -if __name__ == '__main__': +if __name__ == "__main__": main(sys.argv[1:]) diff --git a/pagure/hooks/files/mirror.py b/pagure/hooks/files/mirror.py index 769a3a5..1a5b0b6 100755 --- a/pagure/hooks/files/mirror.py +++ b/pagure/hooks/files/mirror.py @@ -11,9 +11,10 @@ import os import sys -if 'PAGURE_CONFIG' not in os.environ \ - and os.path.exists('/etc/pagure/pagure.cfg'): - os.environ['PAGURE_CONFIG'] = '/etc/pagure/pagure.cfg' +if "PAGURE_CONFIG" not in os.environ and os.path.exists( + "/etc/pagure/pagure.cfg" +): + os.environ["PAGURE_CONFIG"] = "/etc/pagure/pagure.cfg" import pagure.config # noqa: E402 @@ -25,7 +26,7 @@ import pagure.ui.plugins # noqa: E402 _log = logging.getLogger(__name__) _config = pagure.config.config -abspath = os.path.abspath(os.environ['GIT_DIR']) +abspath = os.path.abspath(os.environ["GIT_DIR"]) def main(args): @@ -33,28 +34,30 @@ def main(args): repo = pagure.lib.git.get_repo_name(abspath) username = pagure.lib.git.get_username(abspath) namespace = pagure.lib.git.get_repo_namespace(abspath) - if _config.get('HOOK_DEBUG', False): - print('repo:', repo) - print('user:', username) - print('namespace:', namespace) + if _config.get("HOOK_DEBUG", False): + print("repo:", repo) + print("user:", username) + print("namespace:", namespace) - session = pagure.lib.create_session(_config['DB_URL']) + session = pagure.lib.create_session(_config["DB_URL"]) project = pagure.lib._get_project( - session, repo, user=username, namespace=namespace) + session, repo, user=username, namespace=namespace + ) if not project: - print('Could not find a project corresponding to this git repo') + print("Could not find a project corresponding to this git repo") session.close() return 1 pagure.lib.tasks_mirror.mirror_project.delay( username=project.user.user if project.is_fork else None, namespace=project.namespace, - name=project.name) + name=project.name, + ) session.close() return 0 -if __name__ == '__main__': +if __name__ == "__main__": main(sys.argv[1:]) diff --git a/pagure/hooks/files/pagure_block_unsigned.py b/pagure/hooks/files/pagure_block_unsigned.py index 00430a4..bca674e 100755 --- a/pagure/hooks/files/pagure_block_unsigned.py +++ b/pagure/hooks/files/pagure_block_unsigned.py @@ -11,9 +11,10 @@ import os import sys -if 'PAGURE_CONFIG' not in os.environ \ - and os.path.exists('/etc/pagure/pagure.cfg'): - os.environ['PAGURE_CONFIG'] = '/etc/pagure/pagure.cfg' +if "PAGURE_CONFIG" not in os.environ and os.path.exists( + "/etc/pagure/pagure.cfg" +): + os.environ["PAGURE_CONFIG"] = "/etc/pagure/pagure.cfg" import pagure # noqa: E402 @@ -22,42 +23,46 @@ import pagure.lib.link # noqa: E402 import pagure.ui.plugins # noqa: E402 _config = pagure.config.config -abspath = os.path.abspath(os.environ['GIT_DIR']) +abspath = os.path.abspath(os.environ["GIT_DIR"]) def run_as_pre_receive_hook(): for line in sys.stdin: - if _config.get('HOOK_DEBUG', False): + if _config.get("HOOK_DEBUG", False): print(line) - (oldrev, newrev, refname) = line.strip().split(' ', 2) + (oldrev, newrev, refname) = line.strip().split(" ", 2) - if _config.get('HOOK_DEBUG', False): - print(' -- Old rev') + if _config.get("HOOK_DEBUG", False): + print(" -- Old rev") print(oldrev) - print(' -- New rev') + print(" -- New rev") print(newrev) - print(' -- Ref name') + print(" -- Ref name") print(refname) - if set(newrev) == set(['0']): - print("Deleting a reference/branch, so we won't run the " - "hook to block unsigned commits") + if set(newrev) == set(["0"]): + print( + "Deleting a reference/branch, so we won't run the " + "hook to block unsigned commits" + ) return commits = pagure.lib.git.get_revs_between( - oldrev, newrev, abspath, refname) + oldrev, newrev, abspath, refname + ) for commit in commits: - if _config.get('HOOK_DEBUG', False): - print('Processing commit: %s' % commit) + if _config.get("HOOK_DEBUG", False): + print("Processing commit: %s" % commit) signed = False for line in pagure.lib.git.read_git_lines( - ['log', '--no-walk', commit], abspath): - if line.lower().strip().startswith('signed-off-by'): + ["log", "--no-walk", commit], abspath + ): + if line.lower().strip().startswith("signed-off-by"): signed = True break - if _config.get('HOOK_DEBUG', False): - print(' - Commit: %s is signed: %s' % (commit, signed)) + if _config.get("HOOK_DEBUG", False): + print(" - Commit: %s is signed: %s" % (commit, signed)) if not signed: print("Commit %s is not signed" % commit) sys.exit(1) @@ -67,5 +72,5 @@ def main(args): run_as_pre_receive_hook() -if __name__ == '__main__': +if __name__ == "__main__": main(sys.argv[1:]) diff --git a/pagure/hooks/files/pagure_force_commit_hook.py b/pagure/hooks/files/pagure_force_commit_hook.py index 495f441..8bb246c 100755 --- a/pagure/hooks/files/pagure_force_commit_hook.py +++ b/pagure/hooks/files/pagure_force_commit_hook.py @@ -10,9 +10,10 @@ import os import sys -if 'PAGURE_CONFIG' not in os.environ \ - and os.path.exists('/etc/pagure/pagure.cfg'): - os.environ['PAGURE_CONFIG'] = '/etc/pagure/pagure.cfg' +if "PAGURE_CONFIG" not in os.environ and os.path.exists( + "/etc/pagure/pagure.cfg" +): + os.environ["PAGURE_CONFIG"] = "/etc/pagure/pagure.cfg" import pagure # noqa: E402 @@ -22,54 +23,58 @@ import pagure.lib.plugins # noqa: E402 _config = pagure.config.config -abspath = os.path.abspath(os.environ['GIT_DIR']) +abspath = os.path.abspath(os.environ["GIT_DIR"]) def run_as_pre_receive_hook(): reponame = pagure.lib.git.get_repo_name(abspath) namespace = pagure.lib.git.get_repo_namespace(abspath) username = pagure.lib.git.get_username(abspath) - session = pagure.lib.create_session(_config['DB_URL']) - if _config.get('HOOK_DEBUG', False): - print('repo: ', reponame) - print('user: ', username) - print('namspaces:', namespace) + session = pagure.lib.create_session(_config["DB_URL"]) + if _config.get("HOOK_DEBUG", False): + print("repo: ", reponame) + print("user: ", username) + print("namspaces:", namespace) repo = pagure.lib._get_project( - session, reponame, user=username, namespace=namespace) + session, reponame, user=username, namespace=namespace + ) if not repo: - print('Unknown repo %s of username: %s in namespace %s' % ( - reponame, username, namespace)) + print( + "Unknown repo %s of username: %s in namespace %s" + % (reponame, username, namespace) + ) session.close() sys.exit(1) - plugin = pagure.lib.plugins.get_plugin('Block non fast-forward pushes') + plugin = pagure.lib.plugins.get_plugin("Block non fast-forward pushes") plugin.db_object() # Get the list of branches branches = [] if repo.pagure_force_commit_hook: branches = [ branch.strip() - for branch in repo.pagure_force_commit_hook.branches.split(',') - if branch.strip()] + for branch in repo.pagure_force_commit_hook.branches.split(",") + if branch.strip() + ] for line in sys.stdin: - if _config.get('HOOK_DEBUG', False): + if _config.get("HOOK_DEBUG", False): print(line) - (oldrev, newrev, refname) = line.strip().split(' ', 2) + (oldrev, newrev, refname) = line.strip().split(" ", 2) - refname = refname.replace('refs/heads/', '') - if refname in branches or branches == ['*']: - if _config.get('HOOK_DEBUG', False): - print(' -- Old rev') + refname = refname.replace("refs/heads/", "") + if refname in branches or branches == ["*"]: + if _config.get("HOOK_DEBUG", False): + print(" -- Old rev") print(oldrev) - print(' -- New rev') + print(" -- New rev") print(newrev) - print(' -- Ref name') + print(" -- Ref name") print(refname) - if set(newrev) == set(['0']): + if set(newrev) == set(["0"]): print("Deletion is forbidden") session.close() sys.exit(1) @@ -85,5 +90,5 @@ def main(args): run_as_pre_receive_hook() -if __name__ == '__main__': +if __name__ == "__main__": main(sys.argv[1:]) diff --git a/pagure/hooks/files/pagure_hook.py b/pagure/hooks/files/pagure_hook.py index 5747824..6718761 100755 --- a/pagure/hooks/files/pagure_hook.py +++ b/pagure/hooks/files/pagure_hook.py @@ -15,9 +15,10 @@ import pygit2 from sqlalchemy.exc import SQLAlchemyError -if 'PAGURE_CONFIG' not in os.environ \ - and os.path.exists('/etc/pagure/pagure.cfg'): - os.environ['PAGURE_CONFIG'] = '/etc/pagure/pagure.cfg' +if "PAGURE_CONFIG" not in os.environ and os.path.exists( + "/etc/pagure/pagure.cfg" +): + os.environ["PAGURE_CONFIG"] = "/etc/pagure/pagure.cfg" import pagure.config # noqa: E402 @@ -28,65 +29,70 @@ import pagure.lib.link # noqa: E402 _log = logging.getLogger(__name__) _config = pagure.config.config -abspath = os.path.abspath(os.environ['GIT_DIR']) +abspath = os.path.abspath(os.environ["GIT_DIR"]) def generate_revision_change_log(new_commits_list): - print('Detailed log of new commits:\n\n') + print("Detailed log of new commits:\n\n") commitid = None for line in pagure.lib.git.read_git_lines( - ['log', '--no-walk'] + new_commits_list + ['--'], abspath): - if line.startswith('commit'): - commitid = line.split('commit ')[-1] + ["log", "--no-walk"] + new_commits_list + ["--"], abspath + ): + if line.startswith("commit"): + commitid = line.split("commit ")[-1] line = line.strip() - session = pagure.lib.create_session(_config['DB_URL']) - print('*', line) + session = pagure.lib.create_session(_config["DB_URL"]) + print("*", line) for relation in pagure.lib.link.get_relation( - session, - pagure.lib.git.get_repo_name(abspath), - pagure.lib.git.get_username(abspath), - pagure.lib.git.get_repo_namespace(abspath), - line, - 'fixes', - include_prs=True): - if _config.get('HOOK_DEBUG', False): + session, + pagure.lib.git.get_repo_name(abspath), + pagure.lib.git.get_username(abspath), + pagure.lib.git.get_repo_namespace(abspath), + line, + "fixes", + include_prs=True, + ): + if _config.get("HOOK_DEBUG", False): print(commitid, relation) - fixes_relation(commitid, relation, session, - _config.get('APP_URL')) + fixes_relation(commitid, relation, session, _config.get("APP_URL")) for issue in pagure.lib.link.get_relation( - session, - pagure.lib.git.get_repo_name(abspath), - pagure.lib.git.get_username(abspath), - pagure.lib.git.get_repo_namespace(abspath), - line, - 'relates'): - if _config.get('HOOK_DEBUG', False): + session, + pagure.lib.git.get_repo_name(abspath), + pagure.lib.git.get_username(abspath), + pagure.lib.git.get_repo_namespace(abspath), + line, + "relates", + ): + if _config.get("HOOK_DEBUG", False): print(commitid, issue) - relates_commit(commitid, issue, session, _config.get('APP_URL')) + relates_commit(commitid, issue, session, _config.get("APP_URL")) session.close() def relates_commit(commitid, issue, session, app_url=None): - ''' Add a comment to an issue that this commit relates to it. ''' + """ Add a comment to an issue that this commit relates to it. """ - url = '../%s' % commitid[:8] + url = "../%s" % commitid[:8] if app_url: - if app_url.endswith('/'): + if app_url.endswith("/"): app_url = app_url[:-1] project = issue.project.fullname if issue.project.is_fork: - project = 'fork/%s' % project - url = '%s/%s/c/%s' % (app_url, project, commitid[:8]) + project = "fork/%s" % project + url = "%s/%s/c/%s" % (app_url, project, commitid[:8]) - comment = ''' Commit [%s](%s) relates to this ticket''' % ( - commitid[:8], url) + comment = """ Commit [%s](%s) relates to this ticket""" % ( + commitid[:8], + url, + ) user = os.environ.get( - 'GL_USER', pagure.lib.git.get_author_email(commitid, abspath)) + "GL_USER", pagure.lib.git.get_author_email(commitid, abspath) + ) try: pagure.lib.add_issue_comment( @@ -94,7 +100,7 @@ def relates_commit(commitid, issue, session, app_url=None): issue=issue, comment=comment, user=user, - ticketfolder=_config['TICKETS_FOLDER'], + ticketfolder=_config["TICKETS_FOLDER"], ) session.commit() except pagure.exceptions.PagureException as err: @@ -105,34 +111,38 @@ def relates_commit(commitid, issue, session, app_url=None): def fixes_relation(commitid, relation, session, app_url=None): - ''' Add a comment to an issue or PR that this commit fixes it and update - the status if the commit is in the master branch. ''' + """ Add a comment to an issue or PR that this commit fixes it and update + the status if the commit is in the master branch. """ - url = '../c/%s' % commitid[:8] + url = "../c/%s" % commitid[:8] if app_url: - if app_url.endswith('/'): + if app_url.endswith("/"): app_url = app_url[:-1] project = relation.project.fullname if relation.project.is_fork: - project = 'fork/%s' % project - url = '%s/%s/c/%s' % (app_url, project, commitid[:8]) + project = "fork/%s" % project + url = "%s/%s/c/%s" % (app_url, project, commitid[:8]) - comment = ''' Commit [%s](%s) fixes this %s''' % ( - commitid[:8], url, relation.isa) + comment = """ Commit [%s](%s) fixes this %s""" % ( + commitid[:8], + url, + relation.isa, + ) user = os.environ.get( - 'GL_USER', pagure.lib.git.get_author_email(commitid, abspath)) + "GL_USER", pagure.lib.git.get_author_email(commitid, abspath) + ) try: - if relation.isa == 'issue': + if relation.isa == "issue": pagure.lib.add_issue_comment( session, issue=relation, comment=comment, user=user, - ticketfolder=_config['TICKETS_FOLDER'], + ticketfolder=_config["TICKETS_FOLDER"], ) - elif relation.isa == 'pull-request': + elif relation.isa == "pull-request": pagure.lib.add_pull_request_comment( session, request=relation, @@ -142,7 +152,7 @@ def fixes_relation(commitid, relation, session, app_url=None): row=None, comment=comment, user=user, - requestfolder=_config['REQUESTS_FOLDER'], + requestfolder=_config["REQUESTS_FOLDER"], ) session.commit() except pagure.exceptions.PagureException as err: @@ -152,42 +162,45 @@ def fixes_relation(commitid, relation, session, app_url=None): _log.exception(err) try: - if relation.isa == 'issue': + if relation.isa == "issue": pagure.lib.edit_issue( session, relation, - ticketfolder=_config['TICKETS_FOLDER'], + ticketfolder=_config["TICKETS_FOLDER"], user=user, - status='Closed', close_status='Fixed') - elif relation.isa == 'pull-request': + status="Closed", + close_status="Fixed", + ) + elif relation.isa == "pull-request": pagure.lib.close_pull_request( session, relation, - requestfolder=_config['REQUESTS_FOLDER'], + requestfolder=_config["REQUESTS_FOLDER"], user=user, - merged=True) + merged=True, + ) session.commit() except pagure.exceptions.PagureException as err: print(err) except SQLAlchemyError as err: # pragma: no cover session.rollback() - print('ERROR', err) + print("ERROR", err) _log.exception(err) def run_as_post_receive_hook(): for line in sys.stdin: - if _config.get('HOOK_DEBUG', False): + if _config.get("HOOK_DEBUG", False): print(line) - (oldrev, newrev, refname) = line.strip().split(' ', 2) + (oldrev, newrev, refname) = line.strip().split(" ", 2) - if _config.get('HOOK_DEBUG', False): - print(' -- Old rev') + if _config.get("HOOK_DEBUG", False): + print(" -- Old rev") print(oldrev) - print(' -- New rev') + print(" -- New rev") print(newrev) - print(' -- Ref name') + print(" -- Ref name") print(refname) # Retrieve the default branch @@ -197,27 +210,30 @@ def run_as_post_receive_hook(): default_branch = repo_obj.head.shorthand # Skip all branch but the default one - refname = refname.replace('refs/heads/', '') + refname = refname.replace("refs/heads/", "") if refname != default_branch: continue - if set(newrev) == set(['0']): - print("Deleting a reference/branch, so we won't run the " - "pagure hook") + if set(newrev) == set(["0"]): + print( + "Deleting a reference/branch, so we won't run the " + "pagure hook" + ) return generate_revision_change_log( - pagure.lib.git.get_revs_between(oldrev, newrev, abspath, refname)) + pagure.lib.git.get_revs_between(oldrev, newrev, abspath, refname) + ) - if _config.get('HOOK_DEBUG', False): - print('ns :', pagure.lib.git.get_repo_namespace(abspath)) - print('repo:', pagure.lib.git.get_repo_name(abspath)) - print('user:', pagure.lib.git.get_username(abspath)) + if _config.get("HOOK_DEBUG", False): + print("ns :", pagure.lib.git.get_repo_namespace(abspath)) + print("repo:", pagure.lib.git.get_repo_name(abspath)) + print("user:", pagure.lib.git.get_username(abspath)) def main(args): run_as_post_receive_hook() -if __name__ == '__main__': +if __name__ == "__main__": main(sys.argv[1:]) diff --git a/pagure/hooks/files/pagure_hook_requests.py b/pagure/hooks/files/pagure_hook_requests.py index 2cce399..137449c 100755 --- a/pagure/hooks/files/pagure_hook_requests.py +++ b/pagure/hooks/files/pagure_hook_requests.py @@ -12,9 +12,10 @@ import sys # We need to access the database -if 'PAGURE_CONFIG' not in os.environ \ - and os.path.exists('/etc/pagure/pagure.cfg'): - os.environ['PAGURE_CONFIG'] = '/etc/pagure/pagure.cfg' +if "PAGURE_CONFIG" not in os.environ and os.path.exists( + "/etc/pagure/pagure.cfg" +): + os.environ["PAGURE_CONFIG"] = "/etc/pagure/pagure.cfg" import pagure.config # noqa: E402 @@ -22,18 +23,19 @@ import pagure.lib.tasks_services # noqa: E402 _config = pagure.config.config -abspath = os.path.abspath(os.environ['GIT_DIR']) +abspath = os.path.abspath(os.environ["GIT_DIR"]) def get_files_to_load(new_commits_list): - print('Files changed by new commits:\n') + print("Files changed by new commits:\n") file_list = [] new_commits_list.reverse() for commit in new_commits_list: filenames = pagure.lib.git.read_git_lines( - ['diff-tree', '--no-commit-id', '--name-only', '-r', commit], - abspath) + ["diff-tree", "--no-commit-id", "--name-only", "-r", commit], + abspath, + ) for line in filenames: if line.strip(): file_list.append(line.strip()) @@ -46,39 +48,43 @@ def run_as_post_receive_hook(): repo = pagure.lib.git.get_repo_name(abspath) username = pagure.lib.git.get_username(abspath) namespace = pagure.lib.git.get_repo_namespace( - abspath, gitfolder=_config['TICKETS_FOLDER']) - if _config.get('HOOK_DEBUG', False): - print('repo:', repo) - print('user:', username) - print('namespace:', namespace) + abspath, gitfolder=_config["TICKETS_FOLDER"] + ) + if _config.get("HOOK_DEBUG", False): + print("repo:", repo) + print("user:", username) + print("namespace:", namespace) for line in sys.stdin: - if _config.get('HOOK_DEBUG', False): + if _config.get("HOOK_DEBUG", False): print(line) - (oldrev, newrev, refname) = line.strip().split(' ', 2) + (oldrev, newrev, refname) = line.strip().split(" ", 2) - if _config.get('HOOK_DEBUG', False): - print(' -- Old rev') + if _config.get("HOOK_DEBUG", False): + print(" -- Old rev") print(oldrev) - print(' -- New rev') + print(" -- New rev") print(newrev) - print(' -- Ref name') + print(" -- Ref name") print(refname) - if set(newrev) == set(['0']): - print("Deleting a reference/branch, so we won't run the " - "pagure hook") + if set(newrev) == set(["0"]): + print( + "Deleting a reference/branch, so we won't run the " + "pagure hook" + ) return commits = pagure.lib.git.get_revs_between( - oldrev, newrev, abspath, refname) + oldrev, newrev, abspath, refname + ) pagure.lib.tasks_services.load_json_commits_to_db.delay( name=repo, commits=commits, abspath=abspath, - data_type='pull-request', - agent=os.environ.get('GL_USER'), + data_type="pull-request", + agent=os.environ.get("GL_USER"), namespace=namespace, username=username, ) @@ -88,5 +94,5 @@ def main(args): run_as_post_receive_hook() -if __name__ == '__main__': +if __name__ == "__main__": main(sys.argv[1:]) diff --git a/pagure/hooks/files/pagure_hook_tickets.py b/pagure/hooks/files/pagure_hook_tickets.py index a435d6d..c9577b2 100755 --- a/pagure/hooks/files/pagure_hook_tickets.py +++ b/pagure/hooks/files/pagure_hook_tickets.py @@ -11,16 +11,17 @@ import sys # We need to access the database -if 'PAGURE_CONFIG' not in os.environ \ - and os.path.exists('/etc/pagure/pagure.cfg'): - os.environ['PAGURE_CONFIG'] = '/etc/pagure/pagure.cfg' +if "PAGURE_CONFIG" not in os.environ and os.path.exists( + "/etc/pagure/pagure.cfg" +): + os.environ["PAGURE_CONFIG"] = "/etc/pagure/pagure.cfg" import pagure.config # noqa: E402 import pagure.lib.tasks_services # noqa: E402 _config = pagure.config.config -abspath = os.path.abspath(os.environ['GIT_DIR']) +abspath = os.path.abspath(os.environ["GIT_DIR"]) def run_as_post_receive_hook(): @@ -28,39 +29,43 @@ def run_as_post_receive_hook(): repo = pagure.lib.git.get_repo_name(abspath) username = pagure.lib.git.get_username(abspath) namespace = pagure.lib.git.get_repo_namespace( - abspath, gitfolder=_config['TICKETS_FOLDER']) - if _config.get('HOOK_DEBUG', False): - print('repo:', repo) - print('user:', username) - print('namespace:', namespace) + abspath, gitfolder=_config["TICKETS_FOLDER"] + ) + if _config.get("HOOK_DEBUG", False): + print("repo:", repo) + print("user:", username) + print("namespace:", namespace) for line in sys.stdin: - if _config.get('HOOK_DEBUG', False): + if _config.get("HOOK_DEBUG", False): print(line) - (oldrev, newrev, refname) = line.strip().split(' ', 2) + (oldrev, newrev, refname) = line.strip().split(" ", 2) - if _config.get('HOOK_DEBUG', False): - print(' -- Old rev') + if _config.get("HOOK_DEBUG", False): + print(" -- Old rev") print(oldrev) - print(' -- New rev') + print(" -- New rev") print(newrev) - print(' -- Ref name') + print(" -- Ref name") print(refname) - if set(newrev) == set(['0']): - print("Deleting a reference/branch, so we won't run the " - "pagure hook") + if set(newrev) == set(["0"]): + print( + "Deleting a reference/branch, so we won't run the " + "pagure hook" + ) return commits = pagure.lib.git.get_revs_between( - oldrev, newrev, abspath, refname) + oldrev, newrev, abspath, refname + ) pagure.lib.tasks_services.load_json_commits_to_db.delay( name=repo, commits=commits, abspath=abspath, - data_type='ticket', - agent=os.environ.get('GL_USER'), + data_type="ticket", + agent=os.environ.get("GL_USER"), namespace=namespace, username=username, ) @@ -70,5 +75,5 @@ def main(args): run_as_post_receive_hook() -if __name__ == '__main__': +if __name__ == "__main__": main(sys.argv[1:]) diff --git a/pagure/hooks/files/rtd_hook.py b/pagure/hooks/files/rtd_hook.py index fd583d6..00b0b03 100755 --- a/pagure/hooks/files/rtd_hook.py +++ b/pagure/hooks/files/rtd_hook.py @@ -12,9 +12,10 @@ import sys import requests -if 'PAGURE_CONFIG' not in os.environ \ - and os.path.exists('/etc/pagure/pagure.cfg'): - os.environ['PAGURE_CONFIG'] = '/etc/pagure/pagure.cfg' +if "PAGURE_CONFIG" not in os.environ and os.path.exists( + "/etc/pagure/pagure.cfg" +): + os.environ["PAGURE_CONFIG"] = "/etc/pagure/pagure.cfg" import pagure # noqa: E402 @@ -23,78 +24,78 @@ import pagure.lib.link # noqa: E402 import pagure.lib.plugins # noqa: E402 _config = pagure.config.config -abspath = os.path.abspath(os.environ['GIT_DIR']) +abspath = os.path.abspath(os.environ["GIT_DIR"]) def run_as_post_receive_hook(): reponame = pagure.lib.git.get_repo_name(abspath) username = pagure.lib.git.get_username(abspath) namespace = pagure.lib.git.get_repo_namespace(abspath) - session = pagure.lib.create_session(_config['DB_URL']) - if _config.get('HOOK_DEBUG', False): - print('repo: ', reponame) - print('user: ', username) - print('namespace:', namespace) + session = pagure.lib.create_session(_config["DB_URL"]) + if _config.get("HOOK_DEBUG", False): + print("repo: ", reponame) + print("user: ", username) + print("namespace:", namespace) repo = pagure.lib.get_authorized_project( - session, reponame, user=username, namespace=namespace) + session, reponame, user=username, namespace=namespace + ) if not repo: - print('Unknown repo %s of username: %s' % (reponame, username)) + print("Unknown repo %s of username: %s" % (reponame, username)) session.close() sys.exit(1) - hook = pagure.lib.plugins.get_plugin('Read the Doc') + hook = pagure.lib.plugins.get_plugin("Read the Doc") hook.db_object() # Get the list of branches branches = [ branch.strip() - for branch in repo.rtd_hook.branches.split(',') - if repo.rtd_hook] + for branch in repo.rtd_hook.branches.split(",") + if repo.rtd_hook + ] # Remove empty branches - branches = [ - branch.strip() - for branch in branches - if branch] + branches = [branch.strip() for branch in branches if branch] url = repo.rtd_hook.api_url if not url: - print('No API url specified to trigger the build, please update ' - 'the configuration') + print( + "No API url specified to trigger the build, please update " + "the configuration" + ) session.close() return 1 if not repo.rtd_hook.api_token: - print('No API token specified to trigger the build, please update ' - 'the configuration') + print( + "No API token specified to trigger the build, please update " + "the configuration" + ) session.close() return 1 for line in sys.stdin: - if _config.get('HOOK_DEBUG', False): + if _config.get("HOOK_DEBUG", False): print(line) - (oldrev, newrev, refname) = line.strip().split(' ', 2) + (oldrev, newrev, refname) = line.strip().split(" ", 2) - refname = refname.replace('refs/heads/', '') + refname = refname.replace("refs/heads/", "") if branches: if refname in branches: - print('Starting RTD build at %s' % (url)) + print("Starting RTD build at %s" % (url)) requests.post( url, data={ - 'branches': refname, - 'token': repo.rtd_hook.api_token + "branches": refname, + "token": repo.rtd_hook.api_token, }, timeout=60, ) else: - print('Starting RTD build at %s' % (url)) + print("Starting RTD build at %s" % (url)) requests.post( url, - data={ - 'branches': refname, - 'token': repo.rtd_hook.api_token - }, + data={"branches": refname, "token": repo.rtd_hook.api_token}, timeout=60, ) @@ -105,5 +106,5 @@ def main(args): run_as_post_receive_hook() -if __name__ == '__main__': +if __name__ == "__main__": main(sys.argv[1:]) diff --git a/pagure/hooks/irc.py b/pagure/hooks/irc.py index 6564bce..6b29e34 100644 --- a/pagure/hooks/irc.py +++ b/pagure/hooks/irc.py @@ -13,6 +13,7 @@ from __future__ import unicode_literals import sqlalchemy as sa import pygit2 import wtforms + try: from flask_wtf import FlaskForm except ImportError: @@ -31,16 +32,16 @@ class IrcTable(BASE): Table -- hook_irc """ - __tablename__ = 'hook_irc' + __tablename__ = "hook_irc" id = sa.Column(sa.Integer, primary_key=True) project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', onupdate='CASCADE', ondelete='CASCADE'), + sa.ForeignKey("projects.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=False, unique=True, - index=True) + index=True, + ) server = sa.Column(sa.Text, nullable=False) port = sa.Column(sa.Text, nullable=False) @@ -52,72 +53,71 @@ class IrcTable(BASE): ssl = sa.Column(sa.Boolean, nullable=False, default=True) project = relation( - 'Project', remote_side=[Project.id], + "Project", + remote_side=[Project.id], backref=backref( - 'irc_hook', cascade="delete, delete-orphan", - single_parent=True, uselist=False) + "irc_hook", + cascade="delete, delete-orphan", + single_parent=True, + uselist=False, + ), ) class IrcForm(FlaskForm): - ''' Form to configure the irc hook. ''' + """ Form to configure the irc hook. """ + server = wtforms.TextField( - 'Server *', - [RequiredIf('active')] + 'Server *', [RequiredIf("active")] ) port = wtforms.TextField( - 'Port *', - [RequiredIf('active')] + 'Port *', [RequiredIf("active")] ) room = wtforms.TextField( - 'Room *', - [RequiredIf('active')] - ) - nick = wtforms.TextField( - 'Nick', - [wtforms.validators.Optional()] + 'Room *', [RequiredIf("active")] ) + nick = wtforms.TextField("Nick", [wtforms.validators.Optional()]) nick_pass = wtforms.TextField( - 'Nickserv Password', - [wtforms.validators.Optional()] + "Nickserv Password", [wtforms.validators.Optional()] ) - active = wtforms.BooleanField( - 'Active', - [wtforms.validators.Optional()] - ) + active = wtforms.BooleanField("Active", [wtforms.validators.Optional()]) join = wtforms.BooleanField( - 'Message Without Join', - [wtforms.validators.Optional()] - ) - ssl = wtforms.BooleanField( - 'Use SSL', - [wtforms.validators.Optional()] + "Message Without Join", [wtforms.validators.Optional()] ) + ssl = wtforms.BooleanField("Use SSL", [wtforms.validators.Optional()]) class Hook(BaseHook): - ''' IRC hooks. ''' + """ IRC hooks. """ - name = 'IRC' - description = 'This hook sends message to the mention channel regarding'\ - ' the changes made by the pushes to the git repository.' + name = "IRC" + description = ( + "This hook sends message to the mention channel regarding" + " the changes made by the pushes to the git repository." + ) form = IrcForm db_object = IrcTable - backref = 'irc_hook' + backref = "irc_hook" form_fields = [ - 'server', 'port', 'room', 'nick', 'nick_pass', 'active', 'join', - 'ssl' + "server", + "port", + "room", + "nick", + "nick_pass", + "active", + "join", + "ssl", ] @classmethod def install(cls, project, dbobj): - ''' Method called to install the hook for a project. + """ Method called to install the hook for a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' + """ repopaths = [get_repo_path(project)] repo_obj = pygit2.Repository(repopaths[0]) # noqa @@ -130,12 +130,12 @@ class Hook(BaseHook): @classmethod def remove(cls, project): - ''' Method called to remove the hook of a project. + """ Method called to remove the hook of a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' + """ repopaths = [get_repo_path(project)] # noqa # cls.base_remove(repopaths, 'irc') diff --git a/pagure/hooks/mail.py b/pagure/hooks/mail.py index 7319423..583f458 100644 --- a/pagure/hooks/mail.py +++ b/pagure/hooks/mail.py @@ -13,6 +13,7 @@ from __future__ import unicode_literals import sqlalchemy as sa import pygit2 import wtforms + try: from flask_wtf import FlaskForm except ImportError: @@ -31,82 +32,82 @@ class MailTable(BASE): Table -- hook_mail """ - __tablename__ = 'hook_mail' + __tablename__ = "hook_mail" id = sa.Column(sa.Integer, primary_key=True) project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', onupdate='CASCADE', ondelete='CASCADE'), + sa.ForeignKey("projects.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=False, unique=True, - index=True) + index=True, + ) mail_to = sa.Column(sa.Text, nullable=False) active = sa.Column(sa.Boolean, nullable=False, default=False) project = relation( - 'Project', remote_side=[Project.id], + "Project", + remote_side=[Project.id], backref=backref( - 'mail_hook', cascade="delete, delete-orphan", - single_parent=True, uselist=False) + "mail_hook", + cascade="delete, delete-orphan", + single_parent=True, + uselist=False, + ), ) class MailForm(FlaskForm): - ''' Form to configure the mail hook. ''' - mail_to = wtforms.TextField( - 'Mail to', - [RequiredIf('active')] - ) - active = wtforms.BooleanField( - 'Active', - [wtforms.validators.Optional()] - ) + """ Form to configure the mail hook. """ + + mail_to = wtforms.TextField("Mail to", [RequiredIf("active")]) + active = wtforms.BooleanField("Active", [wtforms.validators.Optional()]) class Mail(BaseHook): - ''' Mail hooks. ''' + """ Mail hooks. """ - name = 'Mail' - description = 'Generate notification emails for pushes to a git '\ - 'repository. This hook sends emails describing changes introduced '\ - 'by pushes to a git repository.' + name = "Mail" + description = ( + "Generate notification emails for pushes to a git " + "repository. This hook sends emails describing changes introduced " + "by pushes to a git repository." + ) form = MailForm db_object = MailTable - backref = 'mail_hook' - form_fields = ['mail_to', 'active'] + backref = "mail_hook" + form_fields = ["mail_to", "active"] @classmethod def install(cls, project, dbobj): - ''' Method called to install the hook for a project. + """ Method called to install the hook for a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' + """ repopaths = [get_repo_path(project)] repo_obj = pygit2.Repository(repopaths[0]) # Configure the hook repo_obj.config.set_multivar( - 'multimailhook.mailingList', - '', - dbobj.mail_to + "multimailhook.mailingList", "", dbobj.mail_to ) repo_obj.config.set_multivar( - 'multimailhook.environment', '', 'gitolite') + "multimailhook.environment", "", "gitolite" + ) # Install the hook itself - cls.base_install(repopaths, dbobj, 'mail', 'git_multimail.py') + cls.base_install(repopaths, dbobj, "mail", "git_multimail.py") @classmethod def remove(cls, project): - ''' Method called to remove the hook of a project. + """ Method called to remove the hook of a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' + """ repopaths = [get_repo_path(project)] - cls.base_remove(repopaths, 'mail') + cls.base_remove(repopaths, "mail") diff --git a/pagure/hooks/mirror_hook.py b/pagure/hooks/mirror_hook.py index b874738..40dbeb7 100644 --- a/pagure/hooks/mirror_hook.py +++ b/pagure/hooks/mirror_hook.py @@ -31,16 +31,16 @@ class MirrorTable(BASE): Table -- mirror_pagure """ - __tablename__ = 'hook_mirror' + __tablename__ = "hook_mirror" id = sa.Column(sa.Integer, primary_key=True) project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', onupdate='CASCADE', ondelete='CASCADE'), + sa.ForeignKey("projects.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=False, unique=True, - index=True) + index=True, + ) active = sa.Column(sa.Boolean, nullable=False, default=False) @@ -49,19 +49,22 @@ class MirrorTable(BASE): last_log = sa.Column(sa.Text, nullable=True) project = relation( - 'Project', remote_side=[Project.id], + "Project", + remote_side=[Project.id], backref=backref( - 'mirror_hook', cascade="delete, delete-orphan", - single_parent=True, uselist=False) + "mirror_hook", + cascade="delete, delete-orphan", + single_parent=True, + uselist=False, + ), ) class CustomRegexp(wtforms.validators.Regexp): - def __init__(self, *args, **kwargs): - self.optional = kwargs.get('optional') or False + self.optional = kwargs.get("optional") or False if self.optional: - kwargs.pop('optional') + kwargs.pop("optional") super(CustomRegexp, self).__init__(*args, **kwargs) def __call__(self, form, field): @@ -73,31 +76,24 @@ class CustomRegexp(wtforms.validators.Regexp): class MirrorForm(FlaskForm): - ''' Form to configure the mirror hook. ''' - active = wtforms.BooleanField( - 'Active', - [wtforms.validators.Optional()] - ) + """ Form to configure the mirror hook. """ + + active = wtforms.BooleanField("Active", [wtforms.validators.Optional()]) target = wtforms.TextField( - 'Git repo to mirror to', - [ - RequiredIf('active'), - CustomRegexp(ssh_urlpattern, optional=True), - ] + "Git repo to mirror to", + [RequiredIf("active"), CustomRegexp(ssh_urlpattern, optional=True)], ) public_key = wtforms.TextAreaField( - 'Public SSH key', - [wtforms.validators.Optional()] + "Public SSH key", [wtforms.validators.Optional()] ) last_log = wtforms.TextAreaField( - 'Log of the last sync:', - [wtforms.validators.Optional()] + "Log of the last sync:", [wtforms.validators.Optional()] ) -DESCRIPTION = ''' +DESCRIPTION = """ Pagure specific hook to mirror a repo hosted on pagure to another location. The first field below should contain the URL to be set in the git configuration @@ -111,48 +107,50 @@ page shortly after the activation of this hook. Just refresh the page until it shows up. Finally the log of the last sync at the bottom is meant. -''' +""" class MirrorHook(BaseHook): - ''' Mirror hook. ''' + """ Mirror hook. """ - name = 'Mirroring' + name = "Mirroring" description = DESCRIPTION form = MirrorForm db_object = MirrorTable - backref = 'mirror_hook' - form_fields = ['active', 'target', 'public_key', 'last_log'] - form_fields_readonly = ['public_key', 'last_log'] + backref = "mirror_hook" + form_fields = ["active", "target", "public_key", "last_log"] + form_fields_readonly = ["public_key", "last_log"] @classmethod def install(cls, project, dbobj): - ''' Method called to install the hook for a project. + """ Method called to install the hook for a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' + """ pagure.lib.tasks_mirror.setup_mirroring.delay( username=project.user.user if project.is_fork else None, namespace=project.namespace, - name=project.name) + name=project.name, + ) repopaths = [get_repo_path(project)] - cls.base_install(repopaths, dbobj, 'mirror', 'mirror.py') + cls.base_install(repopaths, dbobj, "mirror", "mirror.py") @classmethod def remove(cls, project): - ''' Method called to remove the hook of a project. + """ Method called to remove the hook of a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' + """ pagure.lib.tasks_mirror.teardown_mirroring.delay( username=project.user.user if project.is_fork else None, namespace=project.namespace, - name=project.name) + name=project.name, + ) repopaths = [get_repo_path(project)] - cls.base_remove(repopaths, 'mirror') + cls.base_remove(repopaths, "mirror") diff --git a/pagure/hooks/pagure_ci.py b/pagure/hooks/pagure_ci.py index 0febfdd..fbcc74a 100644 --- a/pagure/hooks/pagure_ci.py +++ b/pagure/hooks/pagure_ci.py @@ -13,6 +13,7 @@ from __future__ import unicode_literals import flask import sqlalchemy as sa import wtforms + try: from flask_wtf import FlaskForm except ImportError: @@ -31,40 +32,33 @@ class PagureCITable(BASE): Table -- hook_pagure_ci """ - __tablename__ = 'hook_pagure_ci' + __tablename__ = "hook_pagure_ci" id = sa.Column(sa.Integer, primary_key=True) project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', onupdate='CASCADE', ondelete='CASCADE'), + sa.ForeignKey("projects.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=False, unique=True, - index=True) - pagure_ci_token = sa.Column( - sa.String(32), - nullable=True, - index=True) - ci_type = sa.Column( - sa.String(255), - nullable=True) - ci_url = sa.Column( - sa.String(255), - nullable=True, - unique=False) - ci_job = sa.Column( - sa.String(255), - nullable=True, - unique=False) + index=True, + ) + pagure_ci_token = sa.Column(sa.String(32), nullable=True, index=True) + ci_type = sa.Column(sa.String(255), nullable=True) + ci_url = sa.Column(sa.String(255), nullable=True, unique=False) + ci_job = sa.Column(sa.String(255), nullable=True, unique=False) active = sa.Column(sa.Boolean, nullable=False, default=False) active_commit = sa.Column(sa.Boolean, nullable=False, default=False) active_pr = sa.Column(sa.Boolean, nullable=False, default=False) project = relation( - 'Project', remote_side=[Project.id], + "Project", + remote_side=[Project.id], backref=backref( - 'ci_hook', cascade="delete, delete-orphan", - single_parent=True, uselist=False) + "ci_hook", + cascade="delete, delete-orphan", + single_parent=True, + uselist=False, + ), ) @@ -95,23 +89,28 @@ activation. class PagureCiForm(FlaskForm): - ''' Form to configure the CI hook. ''' + """ Form to configure the CI hook. """ + ci_type = wtforms.SelectField( - 'Type of CI service', - [RequiredIf(['active_commit', 'active_pr'])], - choices=[] + "Type of CI service", + [RequiredIf(["active_commit", "active_pr"])], + choices=[], ) ci_url = wtforms.TextField( - 'URL to the project on the CI service', - [RequiredIf(['active_commit', 'active_pr']), - wtforms.validators.Length(max=255)], + "URL to the project on the CI service", + [ + RequiredIf(["active_commit", "active_pr"]), + wtforms.validators.Length(max=255), + ], ) ci_job = wtforms.TextField( - 'Name of the job to trigger', - [RequiredIf(['active_commit', 'active_pr']), - wtforms.validators.Length(max=255)], + "Name of the job to trigger", + [ + RequiredIf(["active_commit", "active_pr"]), + wtforms.validators.Length(max=255), + ], ) # The active field is not render in the UI it used @@ -119,18 +118,15 @@ class PagureCiForm(FlaskForm): # and active_commit. # The value of active is set in pagure.ui.plugins.view_plugin active = wtforms.BooleanField( - 'Activate Pagure CI service', - [wtforms.validators.Optional()] + "Activate Pagure CI service", [wtforms.validators.Optional()] ) active_commit = wtforms.BooleanField( - 'Trigger CI job on commits', - [wtforms.validators.Optional()] + "Trigger CI job on commits", [wtforms.validators.Optional()] ) active_pr = wtforms.BooleanField( - 'Trigger CI job on pull-requests', - [wtforms.validators.Optional()] + "Trigger CI job on pull-requests", [wtforms.validators.Optional()] ) def __init__(self, *args, **kwargs): @@ -140,52 +136,52 @@ class PagureCiForm(FlaskForm): """ super(PagureCiForm, self).__init__(*args, **kwargs) - types = pagure.config.config.get('PAGURE_CI_SERVICES', []) - self.ci_type.choices = [ - (ci_type, ci_type) for ci_type in types - ] + types = pagure.config.config.get("PAGURE_CI_SERVICES", []) + self.ci_type.choices = [(ci_type, ci_type) for ci_type in types] class PagureCi(BaseHook): - ''' Continuous Integration (CI) hooks. ''' + """ Continuous Integration (CI) hooks. """ - name = 'Pagure CI' - description = 'Integrate continuous integration (CI) services into your '\ - 'pagure project, providing you notifications for every pull-request '\ - 'opened in the project.' + name = "Pagure CI" + description = ( + "Integrate continuous integration (CI) services into your " + "pagure project, providing you notifications for every pull-request " + "opened in the project." + ) extra_info = tmpl form = PagureCiForm db_object = PagureCITable - backref = 'ci_hook' - form_fields = ['ci_type', 'ci_url', 'ci_job', 'active_commit', 'active_pr'] + backref = "ci_hook" + form_fields = ["ci_type", "ci_url", "ci_job", "active_commit", "active_pr"] @classmethod def set_up(cls, project): - ''' Install the generic post-receive hook that allow us to call + """ Install the generic post-receive hook that allow us to call multiple post-receive hooks as set per plugin. - ''' + """ pass @classmethod def install(cls, project, dbobj): - ''' Method called to install the hook for a project. + """ Method called to install the hook for a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' + """ if not dbobj.pagure_ci_token: dbobj.pagure_ci_token = pagure.lib.login.id_generator(32) flask.g.session.commit() @classmethod def remove(cls, project): - ''' Method called to remove the hook of a project. + """ Method called to remove the hook of a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' + """ if project.ci_hook is not None: project.ci_hook.pagure_ci_token = None flask.g.session.commit() diff --git a/pagure/hooks/pagure_force_commit.py b/pagure/hooks/pagure_force_commit.py index 68dd4be..6f80f13 100644 --- a/pagure/hooks/pagure_force_commit.py +++ b/pagure/hooks/pagure_force_commit.py @@ -13,6 +13,7 @@ from __future__ import unicode_literals import sqlalchemy as sa import pygit2 import wtforms + try: from flask_wtf import FlaskForm except ImportError: @@ -31,78 +32,84 @@ class PagureForceCommitTable(BASE): Table -- hook_pagure_force_commit """ - __tablename__ = 'hook_pagure_force_commit' + __tablename__ = "hook_pagure_force_commit" id = sa.Column(sa.Integer, primary_key=True) project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', onupdate='CASCADE', ondelete='CASCADE'), + sa.ForeignKey("projects.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=False, unique=True, - index=True) + index=True, + ) branches = sa.Column(sa.Text, nullable=False) active = sa.Column(sa.Boolean, nullable=False, default=False) project = relation( - 'Project', foreign_keys=[project_id], remote_side=[Project.id], + "Project", + foreign_keys=[project_id], + remote_side=[Project.id], backref=backref( - 'pagure_force_commit_hook', cascade="delete, delete-orphan", - single_parent=True, uselist=False) + "pagure_force_commit_hook", + cascade="delete, delete-orphan", + single_parent=True, + uselist=False, + ), ) class PagureForceCommitForm(FlaskForm): - ''' Form to configure the pagure hook. ''' - branches = wtforms.TextField( - 'Branches', - [RequiredIf('active')] - ) + """ Form to configure the pagure hook. """ - active = wtforms.BooleanField( - 'Active', - [wtforms.validators.Optional()] - ) + branches = wtforms.TextField("Branches", [RequiredIf("active")]) + active = wtforms.BooleanField("Active", [wtforms.validators.Optional()]) -class PagureForceCommitHook(BaseHook): - ''' PagurPagureForceCommit hook. ''' - name = 'Block non fast-forward pushes' - description = 'Using this hook you can block any non-fast-forward '\ - 'commit forced pushed to one or more branches.\n'\ - 'You can specify one or more branch names (sperated them using '\ - 'commas) or block all the branches by specifying: ``*``' +class PagureForceCommitHook(BaseHook): + """ PagurPagureForceCommit hook. """ + + name = "Block non fast-forward pushes" + description = ( + "Using this hook you can block any non-fast-forward " + "commit forced pushed to one or more branches.\n" + "You can specify one or more branch names (sperated them using " + "commas) or block all the branches by specifying: ``*``" + ) form = PagureForceCommitForm db_object = PagureForceCommitTable - backref = 'pagure_force_commit_hook' - form_fields = ['branches', 'active'] - hook_type = 'pre-receive' + backref = "pagure_force_commit_hook" + form_fields = ["branches", "active"] + hook_type = "pre-receive" @classmethod def install(cls, project, dbobj): - ''' Method called to install the hook for a project. + """ Method called to install the hook for a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' + """ # Init the git repo in case repopaths = [get_repo_path(project)] pygit2.Repository(repopaths[0]) - cls.base_install(repopaths, dbobj, 'pagureforcecommit', - 'pagure_force_commit_hook.py') + cls.base_install( + repopaths, + dbobj, + "pagureforcecommit", + "pagure_force_commit_hook.py", + ) @classmethod def remove(cls, project): - ''' Method called to remove the hook of a project. + """ Method called to remove the hook of a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' + """ repopaths = [get_repo_path(project)] - cls.base_remove(repopaths, 'pagureforcecommit') + cls.base_remove(repopaths, "pagureforcecommit") diff --git a/pagure/hooks/pagure_hook.py b/pagure/hooks/pagure_hook.py index af3f84e..f536029 100644 --- a/pagure/hooks/pagure_hook.py +++ b/pagure/hooks/pagure_hook.py @@ -14,6 +14,7 @@ import os import sqlalchemy as sa import wtforms + try: from flask_wtf import FlaskForm except ImportError: @@ -33,36 +34,38 @@ class PagureTable(BASE): Table -- hook_pagure """ - __tablename__ = 'hook_pagure' + __tablename__ = "hook_pagure" id = sa.Column(sa.Integer, primary_key=True) project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', onupdate='CASCADE', ondelete='CASCADE'), + sa.ForeignKey("projects.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=False, unique=True, - index=True) + index=True, + ) active = sa.Column(sa.Boolean, nullable=False, default=False) project = relation( - 'Project', remote_side=[Project.id], + "Project", + remote_side=[Project.id], backref=backref( - 'pagure_hook', cascade="delete, delete-orphan", - single_parent=True, uselist=False) + "pagure_hook", + cascade="delete, delete-orphan", + single_parent=True, + uselist=False, + ), ) class PagureForm(FlaskForm): - ''' Form to configure the pagure hook. ''' - active = wtforms.BooleanField( - 'Active', - [wtforms.validators.Optional()] - ) + """ Form to configure the pagure hook. """ + active = wtforms.BooleanField("Active", [wtforms.validators.Optional()]) -DESCRIPTION = ''' + +DESCRIPTION = """ Pagure specific hook to add a comment to issues or pull requests if the pushed commits fix them or relate to them. This is determined based on the commit message. @@ -94,53 +97,51 @@ Capitalization does not matter; neither does the colon between keyword and number. -''' +""" class PagureHook(BaseHook): - ''' Pagure hook. ''' + """ Pagure hook. """ - name = 'Pagure' + name = "Pagure" description = DESCRIPTION form = PagureForm db_object = PagureTable - backref = 'pagure_hook' - form_fields = ['active'] + backref = "pagure_hook" + form_fields = ["active"] @classmethod def install(cls, project, dbobj): - ''' Method called to install the hook for a project. + """ Method called to install the hook for a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' + """ repopaths = [get_repo_path(project)] for folder in [ - pagure_config.get('DOCS_FOLDER'), - pagure_config.get('REQUESTS_FOLDER')]: + pagure_config.get("DOCS_FOLDER"), + pagure_config.get("REQUESTS_FOLDER"), + ]: if folder: - repopaths.append( - os.path.join(folder, project.path) - ) + repopaths.append(os.path.join(folder, project.path)) - cls.base_install(repopaths, dbobj, 'pagure', 'pagure_hook.py') + cls.base_install(repopaths, dbobj, "pagure", "pagure_hook.py") @classmethod def remove(cls, project): - ''' Method called to remove the hook of a project. + """ Method called to remove the hook of a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' + """ repopaths = [get_repo_path(project)] for folder in [ - pagure_config.get('DOCS_FOLDER'), - pagure_config.get('REQUESTS_FOLDER')]: + pagure_config.get("DOCS_FOLDER"), + pagure_config.get("REQUESTS_FOLDER"), + ]: if folder: - repopaths.append( - os.path.join(folder, project.path) - ) + repopaths.append(os.path.join(folder, project.path)) - cls.base_remove(repopaths, 'pagure') + cls.base_remove(repopaths, "pagure") diff --git a/pagure/hooks/pagure_no_new_branches.py b/pagure/hooks/pagure_no_new_branches.py index 22bde1e..e0b2e41 100644 --- a/pagure/hooks/pagure_no_new_branches.py +++ b/pagure/hooks/pagure_no_new_branches.py @@ -12,6 +12,7 @@ from __future__ import unicode_literals import sqlalchemy as sa import wtforms + try: from flask_wtf import FlaskForm except ImportError: @@ -30,68 +31,73 @@ class PagureNoNewBranchesTable(BASE): Table -- hook_pagure_no_new_branches """ - __tablename__ = 'hook_pagure_no_new_branches' + __tablename__ = "hook_pagure_no_new_branches" id = sa.Column(sa.Integer, primary_key=True) project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', onupdate='CASCADE', ondelete='CASCADE'), + sa.ForeignKey("projects.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=False, unique=True, - index=True) + index=True, + ) active = sa.Column(sa.Boolean, nullable=False, default=False) project = relation( - 'Project', remote_side=[Project.id], + "Project", + remote_side=[Project.id], backref=backref( - 'pagure_hook_no_new_branches', + "pagure_hook_no_new_branches", cascade="delete, delete-orphan", - single_parent=True, uselist=False) + single_parent=True, + uselist=False, + ), ) class PagureNoNewBranchesForm(FlaskForm): - ''' Form to configure the pagure hook. ''' - active = wtforms.BooleanField( - 'Active', - [wtforms.validators.Optional()] - ) + """ Form to configure the pagure hook. """ + + active = wtforms.BooleanField("Active", [wtforms.validators.Optional()]) class PagureNoNewBranchesHook(BaseHook): - ''' PagureNoNewBranches hook. ''' + """ PagureNoNewBranches hook. """ - name = 'Prevent creating new branches by git push' - description = 'This hook prevents creating new branches by git push.' + name = "Prevent creating new branches by git push" + description = "This hook prevents creating new branches by git push." form = PagureNoNewBranchesForm db_object = PagureNoNewBranchesTable - backref = 'pagure_hook_no_new_branches' - form_fields = ['active'] - hook_type = 'pre-receive' + backref = "pagure_hook_no_new_branches" + form_fields = ["active"] + hook_type = "pre-receive" @classmethod def install(cls, project, dbobj): - ''' Method called to install the hook for a project. + """ Method called to install the hook for a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' + """ repopaths = [get_repo_path(project)] - cls.base_install(repopaths, dbobj, 'pagure_no_new_branches', - 'pagure_no_new_branches') + cls.base_install( + repopaths, + dbobj, + "pagure_no_new_branches", + "pagure_no_new_branches", + ) @classmethod def remove(cls, project): - ''' Method called to remove the hook of a project. + """ Method called to remove the hook of a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' + """ repopaths = [get_repo_path(project)] - cls.base_remove(repopaths, 'pagure_no_new_branches') + cls.base_remove(repopaths, "pagure_no_new_branches") diff --git a/pagure/hooks/pagure_request_hook.py b/pagure/hooks/pagure_request_hook.py index c1a7785..7242eeb 100644 --- a/pagure/hooks/pagure_request_hook.py +++ b/pagure/hooks/pagure_request_hook.py @@ -15,6 +15,7 @@ import os import flask import sqlalchemy as sa import wtforms + try: from flask_wtf import FlaskForm except ImportError: @@ -34,93 +35,101 @@ class PagureRequestsTable(BASE): Table -- hook_pagure_requests """ - __tablename__ = 'hook_pagure_requests' + __tablename__ = "hook_pagure_requests" id = sa.Column(sa.Integer, primary_key=True) project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', onupdate='CASCADE', ondelete='CASCADE'), + sa.ForeignKey("projects.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=False, unique=True, - index=True) + index=True, + ) active = sa.Column(sa.Boolean, nullable=False, default=False) project = relation( - 'Project', remote_side=[Project.id], + "Project", + remote_side=[Project.id], backref=backref( - 'pagure_hook_requests', cascade="delete, delete-orphan", - single_parent=True, uselist=False) + "pagure_hook_requests", + cascade="delete, delete-orphan", + single_parent=True, + uselist=False, + ), ) class PagureRequestsForm(FlaskForm): - ''' Form to configure the pagure hook. ''' - active = wtforms.BooleanField( - 'Active', - [wtforms.validators.Optional()] - ) + """ Form to configure the pagure hook. """ + + active = wtforms.BooleanField("Active", [wtforms.validators.Optional()]) class PagureRequestHook(BaseHook): - ''' Pagure request hook. ''' + """ Pagure request hook. """ - name = 'Pagure requests' - description = 'Pagure specific hook to update pull-requests stored '\ - 'in the database based on the information pushed in the requests '\ - 'git repository.' + name = "Pagure requests" + description = ( + "Pagure specific hook to update pull-requests stored " + "in the database based on the information pushed in the requests " + "git repository." + ) form = PagureRequestsForm db_object = PagureRequestsTable - backref = 'pagure_hook_requests' - form_fields = ['active'] + backref = "pagure_hook_requests" + form_fields = ["active"] @classmethod def set_up(cls, project): - ''' Install the generic post-receive hook that allow us to call + """ Install the generic post-receive hook that allow us to call multiple post-receive hooks as set per plugin. - ''' - repopath = os.path.join(pagure_config['REQUESTS_FOLDER'], project.path) + """ + repopath = os.path.join(pagure_config["REQUESTS_FOLDER"], project.path) if not os.path.exists(repopath): - flask.abort(404, 'No git repo found') + flask.abort(404, "No git repo found") hook_files = os.path.join( - os.path.dirname(os.path.realpath(__file__)), 'files') + os.path.dirname(os.path.realpath(__file__)), "files" + ) # Make sure the hooks folder exists - hookfolder = os.path.join(repopath, 'hooks') + hookfolder = os.path.join(repopath, "hooks") if not os.path.exists(hookfolder): os.makedirs(hookfolder) # Install the main post-receive file - postreceive = os.path.join(hookfolder, 'post-receive') - hook_file = os.path.join(hook_files, 'post-receive') + postreceive = os.path.join(hookfolder, "post-receive") + hook_file = os.path.join(hook_files, "post-receive") if not os.path.exists(postreceive): os.symlink(hook_file, postreceive) @classmethod def install(cls, project, dbobj): - ''' Method called to install the hook for a project. + """ Method called to install the hook for a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' - repopaths = [os.path.join( - pagure_config['REQUESTS_FOLDER'], project.path)] + """ + repopaths = [ + os.path.join(pagure_config["REQUESTS_FOLDER"], project.path) + ] - cls.base_install(repopaths, dbobj, 'pagure-requests', - 'pagure_hook_requests.py') + cls.base_install( + repopaths, dbobj, "pagure-requests", "pagure_hook_requests.py" + ) @classmethod def remove(cls, project): - ''' Method called to remove the hook of a project. + """ Method called to remove the hook of a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' - repopaths = [os.path.join( - pagure_config['REQUESTS_FOLDER'], project.path)] + """ + repopaths = [ + os.path.join(pagure_config["REQUESTS_FOLDER"], project.path) + ] - cls.base_remove(repopaths, 'pagure-requests') + cls.base_remove(repopaths, "pagure-requests") diff --git a/pagure/hooks/pagure_ticket_hook.py b/pagure/hooks/pagure_ticket_hook.py index 6e2b9ed..8e30ad8 100644 --- a/pagure/hooks/pagure_ticket_hook.py +++ b/pagure/hooks/pagure_ticket_hook.py @@ -15,6 +15,7 @@ import os import flask import sqlalchemy as sa import wtforms + try: from flask_wtf import FlaskForm except ImportError: @@ -33,93 +34,101 @@ class PagureTicketsTable(BASE): Table -- hook_pagure_tickets """ - __tablename__ = 'hook_pagure_tickets' + __tablename__ = "hook_pagure_tickets" id = sa.Column(sa.Integer, primary_key=True) project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', onupdate='CASCADE', ondelete='CASCADE'), + sa.ForeignKey("projects.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=False, unique=True, - index=True) + index=True, + ) active = sa.Column(sa.Boolean, nullable=False, default=False) project = relation( - 'Project', remote_side=[Project.id], + "Project", + remote_side=[Project.id], backref=backref( - 'pagure_hook_tickets', cascade="delete, delete-orphan", - single_parent=True, uselist=False) + "pagure_hook_tickets", + cascade="delete, delete-orphan", + single_parent=True, + uselist=False, + ), ) class PagureTicketsForm(FlaskForm): - ''' Form to configure the pagure hook. ''' - active = wtforms.BooleanField( - 'Active', - [wtforms.validators.Optional()] - ) + """ Form to configure the pagure hook. """ + + active = wtforms.BooleanField("Active", [wtforms.validators.Optional()]) class PagureTicketHook(BaseHook): - ''' Pagure ticket hook. ''' + """ Pagure ticket hook. """ - name = 'Pagure tickets' - description = 'Pagure specific hook to update tickets stored in the '\ - 'database based on the information pushed in the tickets git '\ - 'repository.' + name = "Pagure tickets" + description = ( + "Pagure specific hook to update tickets stored in the " + "database based on the information pushed in the tickets git " + "repository." + ) form = PagureTicketsForm db_object = PagureTicketsTable - backref = 'pagure_hook_tickets' - form_fields = ['active'] + backref = "pagure_hook_tickets" + form_fields = ["active"] @classmethod def set_up(cls, project): - ''' Install the generic post-receive hook that allow us to call + """ Install the generic post-receive hook that allow us to call multiple post-receive hooks as set per plugin. - ''' - repopath = os.path.join(pagure_config['TICKETS_FOLDER'], project.path) + """ + repopath = os.path.join(pagure_config["TICKETS_FOLDER"], project.path) if not os.path.exists(repopath): - flask.abort(404, 'No git repo found') + flask.abort(404, "No git repo found") hook_files = os.path.join( - os.path.dirname(os.path.realpath(__file__)), 'files') + os.path.dirname(os.path.realpath(__file__)), "files" + ) # Make sure the hooks folder exists - hookfolder = os.path.join(repopath, 'hooks') + hookfolder = os.path.join(repopath, "hooks") if not os.path.exists(hookfolder): os.makedirs(hookfolder) # Install the main post-receive file - postreceive = os.path.join(hookfolder, 'post-receive') - hook_file = os.path.join(hook_files, 'post-receive') + postreceive = os.path.join(hookfolder, "post-receive") + hook_file = os.path.join(hook_files, "post-receive") if not os.path.exists(postreceive): os.symlink(hook_file, postreceive) @classmethod def install(cls, project, dbobj): - ''' Method called to install the hook for a project. + """ Method called to install the hook for a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' - repopaths = [os.path.join( - pagure_config['TICKETS_FOLDER'], project.path)] + """ + repopaths = [ + os.path.join(pagure_config["TICKETS_FOLDER"], project.path) + ] - cls.base_install(repopaths, dbobj, 'pagure-ticket', - 'pagure_hook_tickets.py') + cls.base_install( + repopaths, dbobj, "pagure-ticket", "pagure_hook_tickets.py" + ) @classmethod def remove(cls, project): - ''' Method called to remove the hook of a project. + """ Method called to remove the hook of a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' - repopaths = [os.path.join( - pagure_config['TICKETS_FOLDER'], project.path)] + """ + repopaths = [ + os.path.join(pagure_config["TICKETS_FOLDER"], project.path) + ] - cls.base_remove(repopaths, 'pagure-ticket') + cls.base_remove(repopaths, "pagure-ticket") diff --git a/pagure/hooks/pagure_unsigned_commits.py b/pagure/hooks/pagure_unsigned_commits.py index 1ec9580..56d80c4 100644 --- a/pagure/hooks/pagure_unsigned_commits.py +++ b/pagure/hooks/pagure_unsigned_commits.py @@ -12,6 +12,7 @@ from __future__ import unicode_literals import sqlalchemy as sa import wtforms + try: from flask_wtf import FlaskForm except ImportError: @@ -30,69 +31,77 @@ class PagureUnsignedCommitTable(BASE): Table -- hook_pagure_unsigned_commit """ - __tablename__ = 'hook_pagure_unsigned_commit' + __tablename__ = "hook_pagure_unsigned_commit" id = sa.Column(sa.Integer, primary_key=True) project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', onupdate='CASCADE', ondelete='CASCADE'), + sa.ForeignKey("projects.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=False, unique=True, - index=True) + index=True, + ) active = sa.Column(sa.Boolean, nullable=False, default=False) project = relation( - 'Project', foreign_keys=[project_id], remote_side=[Project.id], + "Project", + foreign_keys=[project_id], + remote_side=[Project.id], backref=backref( - 'pagure_unsigned_commit_hook', cascade="delete, delete-orphan", - single_parent=True, uselist=False) + "pagure_unsigned_commit_hook", + cascade="delete, delete-orphan", + single_parent=True, + uselist=False, + ), ) class PagureUnsignedCommitForm(FlaskForm): - ''' Form to configure the pagure hook. ''' + """ Form to configure the pagure hook. """ - active = wtforms.BooleanField( - 'Active', - [wtforms.validators.Optional()] - ) + active = wtforms.BooleanField("Active", [wtforms.validators.Optional()]) class PagureUnsignedCommitHook(BaseHook): - ''' PagurPagureUnsignedCommit hook. ''' + """ PagurPagureUnsignedCommit hook. """ - name = 'Block Un-Signed commits' - description = 'Using this hook you can block any push with commits '\ + name = "Block Un-Signed commits" + description = ( + "Using this hook you can block any push with commits " 'missing a "Signed-Off-By"' + ) form = PagureUnsignedCommitForm db_object = PagureUnsignedCommitTable - backref = 'pagure_unsigned_commit_hook' - form_fields = ['active'] - hook_type = 'pre-receive' + backref = "pagure_unsigned_commit_hook" + form_fields = ["active"] + hook_type = "pre-receive" @classmethod def install(cls, project, dbobj): - ''' Method called to install the hook for a project. + """ Method called to install the hook for a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' + """ repopaths = [get_repo_path(project)] - cls.base_install(repopaths, dbobj, 'pagureunsignedcommit', - 'pagure_block_unsigned.py') + cls.base_install( + repopaths, + dbobj, + "pagureunsignedcommit", + "pagure_block_unsigned.py", + ) @classmethod def remove(cls, project): - ''' Method called to remove the hook of a project. + """ Method called to remove the hook of a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' + """ repopaths = [get_repo_path(project)] - cls.base_remove(repopaths, 'pagureunsignedcommit') + cls.base_remove(repopaths, "pagureunsignedcommit") diff --git a/pagure/hooks/rtd.py b/pagure/hooks/rtd.py index d4aeb22..37a333c 100644 --- a/pagure/hooks/rtd.py +++ b/pagure/hooks/rtd.py @@ -32,16 +32,16 @@ class RtdTable(BASE): Table -- hook_rtd """ - __tablename__ = 'hook_rtd' + __tablename__ = "hook_rtd" id = sa.Column(sa.Integer, primary_key=True) project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', onupdate='CASCADE', ondelete='CASCADE'), + sa.ForeignKey("projects.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=False, unique=True, - index=True) + index=True, + ) active = sa.Column(sa.Boolean, nullable=False, default=False) @@ -50,35 +50,36 @@ class RtdTable(BASE): api_token = sa.Column(sa.Text, nullable=False) project = relation( - 'Project', remote_side=[Project.id], + "Project", + remote_side=[Project.id], backref=backref( - 'rtd_hook', cascade="delete, delete-orphan", - single_parent=True, uselist=False) + "rtd_hook", + cascade="delete, delete-orphan", + single_parent=True, + uselist=False, + ), ) class RtdForm(FlaskForm): - ''' Form to configure the pagure hook. ''' + """ Form to configure the pagure hook. """ + api_url = wtforms.TextField( - 'URL endpoint used to trigger the builds', - [wtforms.validators.Optional()] + "URL endpoint used to trigger the builds", + [wtforms.validators.Optional()], ) api_token = wtforms.TextField( - 'API token provided by readthedocs', - [wtforms.validators.Optional()] + "API token provided by readthedocs", [wtforms.validators.Optional()] ) branches = wtforms.TextField( - 'Restrict build to these branches only (comma separated)', - [wtforms.validators.Optional()] + "Restrict build to these branches only (comma separated)", + [wtforms.validators.Optional()], ) - active = wtforms.BooleanField( - 'Active', - [wtforms.validators.Optional()] - ) + active = wtforms.BooleanField("Active", [wtforms.validators.Optional()]) -DESCRIPTION = ''' +DESCRIPTION = """ Git hook to trigger building documentation on the readthedocs.org service when a commit is pushed to the repository. @@ -92,39 +93,39 @@ add a new ``Generic API incoming webhook``. This will give you access to one URL and one API token, both of which you will have to provide below. -''' +""" class RtdHook(BaseHook): - ''' Read The Doc hook. ''' + """ Read The Doc hook. """ - name = 'Read the Doc' + name = "Read the Doc" description = DESCRIPTION form = RtdForm db_object = RtdTable - backref = 'rtd_hook' - form_fields = ['active', 'api_url', 'api_token', 'branches'] + backref = "rtd_hook" + form_fields = ["active", "api_url", "api_token", "branches"] @classmethod def install(cls, project, dbobj): - ''' Method called to install the hook for a project. + """ Method called to install the hook for a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' + """ repopaths = [get_repo_path(project)] - cls.base_install(repopaths, dbobj, 'rtd', 'rtd_hook.py') + cls.base_install(repopaths, dbobj, "rtd", "rtd_hook.py") @classmethod def remove(cls, project): - ''' Method called to remove the hook of a project. + """ Method called to remove the hook of a project. :arg project: a ``pagure.model.Project`` object to which the hook should be installed - ''' + """ repopaths = [get_repo_path(project)] - cls.base_remove(repopaths, 'rtd') + cls.base_remove(repopaths, "rtd") diff --git a/pagure/internal/__init__.py b/pagure/internal/__init__.py index c62cee8..29c4f4d 100644 --- a/pagure/internal/__init__.py +++ b/pagure/internal/__init__.py @@ -22,7 +22,7 @@ import pygit2 from functools import wraps from sqlalchemy.exc import SQLAlchemyError -PV = flask.Blueprint('internal_ns', __name__, url_prefix='/pv') +PV = flask.Blueprint("internal_ns", __name__, url_prefix="/pv") import pagure # noqa: E402 import pagure.exceptions # noqa: E402 @@ -38,67 +38,69 @@ _log = logging.getLogger(__name__) MERGE_OPTIONS = { - 'NO_CHANGE': { - 'short_code': 'No changes', - 'message': 'Nothing to change, git is up to date' + "NO_CHANGE": { + "short_code": "No changes", + "message": "Nothing to change, git is up to date", }, - 'FFORWARD': { - 'short_code': 'Ok', - 'message': 'The pull-request can be merged and fast-forwarded' + "FFORWARD": { + "short_code": "Ok", + "message": "The pull-request can be merged and fast-forwarded", }, - 'CONFLICTS': { - 'short_code': 'Conflicts', - 'message': 'The pull-request cannot be merged due to conflicts' + "CONFLICTS": { + "short_code": "Conflicts", + "message": "The pull-request cannot be merged due to conflicts", + }, + "MERGE": { + "short_code": "With merge", + "message": "The pull-request can be merged with a merge commit", }, - 'MERGE': { - 'short_code': 'With merge', - 'message': 'The pull-request can be merged with a merge commit' - } } def localonly(function): - ''' Decorator used to check if the request is local or not. - ''' + """ Decorator used to check if the request is local or not. + """ + @wraps(function) def decorated_function(*args, **kwargs): - ''' Wrapped function actually checking if the request is local. - ''' + """ Wrapped function actually checking if the request is local. + """ ip_allowed = pagure.config.config.get( - 'IP_ALLOWED_INTERNAL', ['127.0.0.1', 'localhost', '::1']) + "IP_ALLOWED_INTERNAL", ["127.0.0.1", "localhost", "::1"] + ) if flask.request.remote_addr not in ip_allowed: - _log.debug('IP: %s is not in the list of allowed IPs: %s' % ( - flask.request.remote_addr, ip_allowed)) + _log.debug( + "IP: %s is not in the list of allowed IPs: %s" + % (flask.request.remote_addr, ip_allowed) + ) flask.abort(403) else: return function(*args, **kwargs) + return decorated_function -@PV.route('/pull-request/comment/', methods=['PUT']) +@PV.route("/pull-request/comment/", methods=["PUT"]) @localonly def pull_request_add_comment(): """ Add a comment to a pull-request. """ pform = pagure.forms.ProjectCommentForm(csrf_enabled=False) if not pform.validate_on_submit(): - flask.abort(400, 'Invalid request') + flask.abort(400, "Invalid request") objid = pform.objid.data useremail = pform.useremail.data - request = pagure.lib.get_request_by_uid( - flask.g.session, - request_uid=objid, - ) + request = pagure.lib.get_request_by_uid(flask.g.session, request_uid=objid) if not request: - flask.abort(404, 'Pull-request not found') + flask.abort(404, "Pull-request not found") form = pagure.forms.AddPullRequestCommentForm(csrf_enabled=False) if not form.validate_on_submit(): - flask.abort(400, 'Invalid request') + flask.abort(400, "Invalid request") commit = form.commit.data or None tree_id = form.tree_id.data or None @@ -116,52 +118,55 @@ def pull_request_add_comment(): row=row, comment=comment, user=useremail, - requestfolder=pagure.config.config['REQUESTS_FOLDER'], + requestfolder=pagure.config.config["REQUESTS_FOLDER"], ) flask.g.session.commit() except SQLAlchemyError as err: # pragma: no cover flask.g.session.rollback() _log.exception(err) - flask.abort(500, 'Error when saving the request to the database') + flask.abort(500, "Error when saving the request to the database") - return flask.jsonify({'message': message}) + return flask.jsonify({"message": message}) -@PV.route('/ticket/comment/', methods=['PUT']) +@PV.route("/ticket/comment/", methods=["PUT"]) @localonly def ticket_add_comment(): """ Add a comment to an issue. """ pform = pagure.forms.ProjectCommentForm(csrf_enabled=False) if not pform.validate_on_submit(): - flask.abort(400, 'Invalid request') + flask.abort(400, "Invalid request") objid = pform.objid.data useremail = pform.useremail.data - issue = pagure.lib.get_issue_by_uid( - flask.g.session, - issue_uid=objid - ) + issue = pagure.lib.get_issue_by_uid(flask.g.session, issue_uid=objid) if issue is None: - flask.abort(404, 'Issue not found') + flask.abort(404, "Issue not found") user_obj = pagure.lib.search_user(flask.g.session, email=useremail) admin = False if user_obj: admin = user_obj.user == issue.project.user.user or ( - user_obj.user in [user.user for user in issue.project.committers]) + user_obj.user in [user.user for user in issue.project.committers] + ) - if issue.private and user_obj and not admin \ - and not issue.user.user == user_obj.username: + if ( + issue.private + and user_obj + and not admin + and not issue.user.user == user_obj.username + ): flask.abort( - 403, 'This issue is private and you are not allowed to view it') + 403, "This issue is private and you are not allowed to view it" + ) form = pagure.forms.CommentForm(csrf_enabled=False) if not form.validate_on_submit(): - flask.abort(400, 'Invalid request') + flask.abort(400, "Invalid request") comment = form.comment.data @@ -171,52 +176,57 @@ def ticket_add_comment(): issue=issue, comment=comment, user=useremail, - ticketfolder=pagure.config.config['TICKETS_FOLDER'], - notify=True) + ticketfolder=pagure.config.config["TICKETS_FOLDER"], + notify=True, + ) flask.g.session.commit() except SQLAlchemyError as err: # pragma: no cover flask.g.session.rollback() _log.exception(err) - flask.abort(500, 'Error when saving the request to the database') + flask.abort(500, "Error when saving the request to the database") - return flask.jsonify({'message': message}) + return flask.jsonify({"message": message}) -@PV.route('/pull-request/merge', methods=['POST']) +@PV.route("/pull-request/merge", methods=["POST"]) def mergeable_request_pull(): """ Returns if the specified pull-request can be merged or not. """ - force = flask.request.form.get('force', False) + force = flask.request.form.get("force", False) if force is not False: force = True form = pagure.forms.ConfirmationForm() if not form.validate_on_submit(): - response = flask.jsonify({ - 'code': 'CONFLICTS', - 'message': 'Invalid input submitted', - }) + response = flask.jsonify( + {"code": "CONFLICTS", "message": "Invalid input submitted"} + ) response.status_code = 400 return response - requestid = flask.request.form.get('requestid') + requestid = flask.request.form.get("requestid") request = pagure.lib.get_request_by_uid( - flask.g.session, request_uid=requestid) + flask.g.session, request_uid=requestid + ) if not request: - response = flask.jsonify({ - 'code': 'CONFLICTS', - 'message': 'Pull-request not found', - }) + response = flask.jsonify( + {"code": "CONFLICTS", "message": "Pull-request not found"} + ) response.status_code = 404 return response if request.merge_status and not force: - return flask.jsonify({ - 'code': request.merge_status, - 'short_code': MERGE_OPTIONS[request.merge_status]['short_code'], - 'message': MERGE_OPTIONS[request.merge_status]['message']}) + return flask.jsonify( + { + "code": request.merge_status, + "short_code": MERGE_OPTIONS[request.merge_status][ + "short_code" + ], + "message": MERGE_OPTIONS[request.merge_status]["message"], + } + ) try: merge_status = pagure.lib.git.merge_pull_request( @@ -224,25 +234,27 @@ def mergeable_request_pull(): request=request, username=None, request_folder=None, - domerge=False) + domerge=False, + ) except pygit2.GitError as err: - response = flask.jsonify({ - 'code': 'CONFLICTS', 'message': '%s' % err}) + response = flask.jsonify({"code": "CONFLICTS", "message": "%s" % err}) response.status_code = 409 return response except pagure.exceptions.PagureException as err: - response = flask.jsonify({ - 'code': 'CONFLICTS', 'message': '%s' % err}) + response = flask.jsonify({"code": "CONFLICTS", "message": "%s" % err}) response.status_code = 500 return response - return flask.jsonify({ - 'code': merge_status, - 'short_code': MERGE_OPTIONS[merge_status]['short_code'], - 'message': MERGE_OPTIONS[merge_status]['message']}) + return flask.jsonify( + { + "code": merge_status, + "short_code": MERGE_OPTIONS[merge_status]["short_code"], + "message": MERGE_OPTIONS[merge_status]["message"], + } + ) -@PV.route('/pull-request/ready', methods=['POST']) +@PV.route("/pull-request/ready", methods=["POST"]) def get_pull_request_ready_branch(): """ Return the list of branches that have commits not in the main branch/repo (thus for which one could open a PR) and the number of @@ -250,46 +262,52 @@ def get_pull_request_ready_branch(): """ form = pagure.forms.ConfirmationForm() if not form.validate_on_submit(): - response = flask.jsonify({ - 'code': 'ERROR', - 'message': 'Invalid input submitted', - }) + response = flask.jsonify( + {"code": "ERROR", "message": "Invalid input submitted"} + ) response.status_code = 400 return response repo = pagure.lib.get_authorized_project( flask.g.session, - flask.request.form.get('repo', '').strip() or None, - namespace=flask.request.form.get('namespace', '').strip() or None, - user=flask.request.form.get('repouser', '').strip() or None) + flask.request.form.get("repo", "").strip() or None, + namespace=flask.request.form.get("namespace", "").strip() or None, + user=flask.request.form.get("repouser", "").strip() or None, + ) if not repo: - response = flask.jsonify({ - 'code': 'ERROR', - 'message': 'No repo found with the information provided', - }) + response = flask.jsonify( + { + "code": "ERROR", + "message": "No repo found with the information provided", + } + ) response.status_code = 404 return response reponame = pagure.utils.get_repo_path(repo) repo_obj = pygit2.Repository(reponame) if repo.is_fork and repo.parent: - if not repo.parent.settings.get('pull_requests', True): - response = flask.jsonify({ - 'code': 'ERROR', - 'message': 'Pull-request have been disabled for this repo', - }) + if not repo.parent.settings.get("pull_requests", True): + response = flask.jsonify( + { + "code": "ERROR", + "message": "Pull-request have been disabled for this repo", + } + ) response.status_code = 400 return response parentreponame = pagure.utils.get_repo_path(repo.parent) parent_repo_obj = pygit2.Repository(parentreponame) else: - if not repo.settings.get('pull_requests', True): - response = flask.jsonify({ - 'code': 'ERROR', - 'message': 'Pull-request have been disabled for this repo', - }) + if not repo.settings.get("pull_requests", True): + response = flask.jsonify( + { + "code": "ERROR", + "message": "Pull-request have been disabled for this repo", + } + ) response.status_code = 400 return response @@ -299,110 +317,120 @@ def get_pull_request_ready_branch(): if not repo_obj.is_empty and len(repo_obj.listall_branches()) > 0: for branchname in repo_obj.listall_branches(): compare_branch = None - if not parent_repo_obj.is_empty \ - and not parent_repo_obj.head_is_unborn: + if ( + not parent_repo_obj.is_empty + and not parent_repo_obj.head_is_unborn + ): try: - if pagure.config.config.get('PR_TARGET_MATCHING_BRANCH', - False): + if pagure.config.config.get( + "PR_TARGET_MATCHING_BRANCH", False + ): # find parent branch which is the longest substring of # branch that we're processing - compare_branch = '' + compare_branch = "" for parent_branch in parent_repo_obj.branches: - if not repo.is_fork \ - and branchname == parent_branch: + if ( + not repo.is_fork + and branchname == parent_branch + ): continue - if branchname.startswith(parent_branch) and \ - len(parent_branch) > len(compare_branch): + if branchname.startswith(parent_branch) and len( + parent_branch + ) > len(compare_branch): compare_branch = parent_branch - compare_branch = compare_branch \ - or repo_obj.head.shorthand + compare_branch = ( + compare_branch or repo_obj.head.shorthand + ) else: compare_branch = repo_obj.head.shorthand except pygit2.GitError: pass # let compare_branch be None # Do not compare a branch to itself - if not repo.is_fork \ - and compare_branch \ - and compare_branch == branchname: + if ( + not repo.is_fork + and compare_branch + and compare_branch == branchname + ): continue diff_commits = None try: _, diff_commits, _ = pagure.lib.git.get_diff_info( - repo_obj, parent_repo_obj, branchname, compare_branch) + repo_obj, parent_repo_obj, branchname, compare_branch + ) except pagure.exceptions.PagureException: pass if diff_commits: branches[branchname] = { - 'commits': [c.oid.hex for c in diff_commits], - 'target_branch': compare_branch or 'master', + "commits": [c.oid.hex for c in diff_commits], + "target_branch": compare_branch or "master", } prs = pagure.lib.search_pull_requests( - flask.g.session, - project_id_from=repo.id, - status='Open' + flask.g.session, project_id_from=repo.id, status="Open" ) branches_pr = {} for pr in prs: if pr.branch_from in branches: - branches_pr[pr.branch_from] = '%s/pull-request/%s' % ( - pr.project.url_path, pr.id) - del(branches[pr.branch_from]) + branches_pr[pr.branch_from] = "%s/pull-request/%s" % ( + pr.project.url_path, + pr.id, + ) + del (branches[pr.branch_from]) return flask.jsonify( { - 'code': 'OK', - 'message': { - 'new_branch': branches, - 'branch_w_pr': branches_pr, - }, + "code": "OK", + "message": {"new_branch": branches, "branch_w_pr": branches_pr}, } ) -@PV.route('//issue/template', methods=['POST']) -@PV.route('///issue/template', methods=['POST']) -@PV.route('/fork///issue/template', methods=['POST']) -@PV.route('/fork////issue/template', - methods=['POST']) +@PV.route("//issue/template", methods=["POST"]) +@PV.route("///issue/template", methods=["POST"]) +@PV.route("/fork///issue/template", methods=["POST"]) +@PV.route( + "/fork////issue/template", methods=["POST"] +) def get_ticket_template(repo, namespace=None, username=None): """ Return the template asked for the specified project """ form = pagure.forms.ConfirmationForm() if not form.validate_on_submit(): - response = flask.jsonify({ - 'code': 'ERROR', - 'message': 'Invalid input submitted', - }) + response = flask.jsonify( + {"code": "ERROR", "message": "Invalid input submitted"} + ) response.status_code = 400 return response - template = flask.request.args.get('template', None) + template = flask.request.args.get("template", None) if not template: - response = flask.jsonify({ - 'code': 'ERROR', - 'message': 'No template provided', - }) + response = flask.jsonify( + {"code": "ERROR", "message": "No template provided"} + ) response.status_code = 400 return response repo = pagure.lib.get_authorized_project( - flask.g.session, repo, user=username, namespace=namespace) + flask.g.session, repo, user=username, namespace=namespace + ) - if not repo.settings.get('issue_tracker', True): - response = flask.jsonify({ - 'code': 'ERROR', - 'message': 'No issue tracker found for this project', - }) + if not repo.settings.get("issue_tracker", True): + response = flask.jsonify( + { + "code": "ERROR", + "message": "No issue tracker found for this project", + } + ) response.status_code = 404 return response ticketrepopath = os.path.join( - pagure.config.config['TICKETS_FOLDER'], repo.path) + pagure.config.config["TICKETS_FOLDER"], repo.path + ) content = None if os.path.exists(ticketrepopath): ticketrepo = pygit2.Repository(ticketrepopath) @@ -410,68 +438,71 @@ def get_ticket_template(repo, namespace=None, username=None): commit = ticketrepo[ticketrepo.head.target] # Get the asked template content_file = pagure.utils.__get_file_in_tree( - ticketrepo, commit.tree, ['templates', '%s.md' % template], - bail_on_tree=True) + ticketrepo, + commit.tree, + ["templates", "%s.md" % template], + bail_on_tree=True, + ) if content_file: content, _ = pagure.doc_utils.convert_readme( - content_file.data, 'md') + content_file.data, "md" + ) if content: - response = flask.jsonify({ - 'code': 'OK', - 'message': content, - }) + response = flask.jsonify({"code": "OK", "message": content}) else: - response = flask.jsonify({ - 'code': 'ERROR', - 'message': 'No such template found', - }) + response = flask.jsonify( + {"code": "ERROR", "message": "No such template found"} + ) response.status_code = 404 return response -@PV.route('/branches/commit/', methods=['POST']) +@PV.route("/branches/commit/", methods=["POST"]) def get_branches_of_commit(): """ Return the list of branches that have the specified commit in """ form = pagure.forms.ConfirmationForm() if not form.validate_on_submit(): - response = flask.jsonify({ - 'code': 'ERROR', - 'message': 'Invalid input submitted', - }) + response = flask.jsonify( + {"code": "ERROR", "message": "Invalid input submitted"} + ) response.status_code = 400 return response - commit_id = flask.request.form.get('commit_id', '').strip() or None + commit_id = flask.request.form.get("commit_id", "").strip() or None if not commit_id: - response = flask.jsonify({ - 'code': 'ERROR', - 'message': 'No commit id submitted', - }) + response = flask.jsonify( + {"code": "ERROR", "message": "No commit id submitted"} + ) response.status_code = 400 return response repo = pagure.lib.get_authorized_project( flask.g.session, - flask.request.form.get('repo', '').strip() or None, - user=flask.request.form.get('repouser', '').strip() or None, - namespace=flask.request.form.get('namespace', '').strip() or None,) + flask.request.form.get("repo", "").strip() or None, + user=flask.request.form.get("repouser", "").strip() or None, + namespace=flask.request.form.get("namespace", "").strip() or None, + ) if not repo: - response = flask.jsonify({ - 'code': 'ERROR', - 'message': 'No repo found with the information provided', - }) + response = flask.jsonify( + { + "code": "ERROR", + "message": "No repo found with the information provided", + } + ) response.status_code = 404 return response - repopath = os.path.join(pagure.config.config['GIT_FOLDER'], repo.path) + repopath = os.path.join(pagure.config.config["GIT_FOLDER"], repo.path) if not os.path.exists(repopath): - response = flask.jsonify({ - 'code': 'ERROR', - 'message': 'No git repo found with the information provided', - }) + response = flask.jsonify( + { + "code": "ERROR", + "message": "No git repo found with the information provided", + } + ) response.status_code = 404 return response @@ -480,17 +511,18 @@ def get_branches_of_commit(): try: commit_id in repo_obj except ValueError: - response = flask.jsonify({ - 'code': 'ERROR', - 'message': 'This commit could not be found in this repo', - }) + response = flask.jsonify( + { + "code": "ERROR", + "message": "This commit could not be found in this repo", + } + ) response.status_code = 404 return response branches = [] if not repo_obj.head_is_unborn: - compare_branch = repo_obj.lookup_branch( - repo_obj.head.shorthand) + compare_branch = repo_obj.lookup_branch(repo_obj.head.shorthand) else: compare_branch = None @@ -503,8 +535,7 @@ def get_branches_of_commit(): if compare_branch: merge_commit_obj = repo_obj.merge_base( - compare_branch.get_object().hex, - branch.get_object().hex + compare_branch.get_object().hex, branch.get_object().hex ) if merge_commit_obj: @@ -513,7 +544,8 @@ def get_branches_of_commit(): repo_commit = repo_obj[branch.get_object().hex] for commit in repo_obj.walk( - repo_commit.oid.hex, pygit2.GIT_SORT_TIME): + repo_commit.oid.hex, pygit2.GIT_SORT_TIME + ): if commit.oid.hex == merge_commit: break if commit.oid.hex == commit_id: @@ -525,15 +557,10 @@ def get_branches_of_commit(): if not branches and compare_branch: branches.append(compare_branch.branch_name) - return flask.jsonify( - { - 'code': 'OK', - 'branches': branches, - } - ) + return flask.jsonify({"code": "OK", "branches": branches}) -@PV.route('/branches/heads/', methods=['POST']) +@PV.route("/branches/heads/", methods=["POST"]) def get_branches_head(): """ Return the heads of each branch in the repo, using the following structure: @@ -551,34 +578,38 @@ def get_branches_head(): """ form = pagure.forms.ConfirmationForm() if not form.validate_on_submit(): - response = flask.jsonify({ - 'code': 'ERROR', - 'message': 'Invalid input submitted', - }) + response = flask.jsonify( + {"code": "ERROR", "message": "Invalid input submitted"} + ) response.status_code = 400 return response repo = pagure.lib.get_authorized_project( flask.g.session, - flask.request.form.get('repo', '').strip() or None, - namespace=flask.request.form.get('namespace', '').strip() or None, - user=flask.request.form.get('repouser', '').strip() or None) + flask.request.form.get("repo", "").strip() or None, + namespace=flask.request.form.get("namespace", "").strip() or None, + user=flask.request.form.get("repouser", "").strip() or None, + ) if not repo: - response = flask.jsonify({ - 'code': 'ERROR', - 'message': 'No repo found with the information provided', - }) + response = flask.jsonify( + { + "code": "ERROR", + "message": "No repo found with the information provided", + } + ) response.status_code = 404 return response - repopath = os.path.join(pagure.config.config['GIT_FOLDER'], repo.path) + repopath = os.path.join(pagure.config.config["GIT_FOLDER"], repo.path) if not os.path.exists(repopath): - response = flask.jsonify({ - 'code': 'ERROR', - 'message': 'No git repo found with the information provided', - }) + response = flask.jsonify( + { + "code": "ERROR", + "message": "No git repo found with the information provided", + } + ) response.status_code = 404 return response @@ -595,16 +626,10 @@ def get_branches_head(): for branch, commit in branches.items(): heads[commit].append(branch) - return flask.jsonify( - { - 'code': 'OK', - 'branches': branches, - 'heads': heads, - } - ) + return flask.jsonify({"code": "OK", "branches": branches, "heads": heads}) -@PV.route('/task/', methods=['GET']) +@PV.route("/task/", methods=["GET"]) def task_info(taskid): """ Return the results of the specified task or a 418 if the task is still being processed. @@ -615,100 +640,103 @@ def task_info(taskid): result = task.get(timeout=0, propagate=False) if isinstance(result, Exception): result = "%s" % result - return flask.jsonify({'results': result}) + return flask.jsonify({"results": result}) else: flask.abort(418) -@PV.route('/stats/commits/authors', methods=['POST']) +@PV.route("/stats/commits/authors", methods=["POST"]) def get_stats_commits(): """ Return statistics about the commits made on the specified repo. """ form = pagure.forms.ConfirmationForm() if not form.validate_on_submit(): - response = flask.jsonify({ - 'code': 'ERROR', - 'message': 'Invalid input submitted', - }) + response = flask.jsonify( + {"code": "ERROR", "message": "Invalid input submitted"} + ) response.status_code = 400 return response repo = pagure.lib.get_authorized_project( flask.g.session, - flask.request.form.get('repo', '').strip() or None, - namespace=flask.request.form.get('namespace', '').strip() or None, - user=flask.request.form.get('repouser', '').strip() or None) + flask.request.form.get("repo", "").strip() or None, + namespace=flask.request.form.get("namespace", "").strip() or None, + user=flask.request.form.get("repouser", "").strip() or None, + ) if not repo: - response = flask.jsonify({ - 'code': 'ERROR', - 'message': 'No repo found with the information provided', - }) + response = flask.jsonify( + { + "code": "ERROR", + "message": "No repo found with the information provided", + } + ) response.status_code = 404 return response - repopath = os.path.join(pagure.config.config['GIT_FOLDER'], repo.path) + repopath = os.path.join(pagure.config.config["GIT_FOLDER"], repo.path) task = pagure.lib.tasks.commits_author_stats.delay(repopath) return flask.jsonify( { - 'code': 'OK', - 'message': 'Stats asked', - 'url': flask.url_for('internal_ns.task_info', taskid=task.id), - 'task_id': task.id, + "code": "OK", + "message": "Stats asked", + "url": flask.url_for("internal_ns.task_info", taskid=task.id), + "task_id": task.id, } ) -@PV.route('/stats/commits/trend', methods=['POST']) +@PV.route("/stats/commits/trend", methods=["POST"]) def get_stats_commits_trend(): """ Return evolution of the commits made on the specified repo. """ form = pagure.forms.ConfirmationForm() if not form.validate_on_submit(): - response = flask.jsonify({ - 'code': 'ERROR', - 'message': 'Invalid input submitted', - }) + response = flask.jsonify( + {"code": "ERROR", "message": "Invalid input submitted"} + ) response.status_code = 400 return response repo = pagure.lib.get_authorized_project( flask.g.session, - flask.request.form.get('repo', '').strip() or None, - namespace=flask.request.form.get('namespace', '').strip() or None, - user=flask.request.form.get('repouser', '').strip() or None) + flask.request.form.get("repo", "").strip() or None, + namespace=flask.request.form.get("namespace", "").strip() or None, + user=flask.request.form.get("repouser", "").strip() or None, + ) if not repo: - response = flask.jsonify({ - 'code': 'ERROR', - 'message': 'No repo found with the information provided', - }) + response = flask.jsonify( + { + "code": "ERROR", + "message": "No repo found with the information provided", + } + ) response.status_code = 404 return response - repopath = os.path.join(pagure.config.config['GIT_FOLDER'], repo.path) + repopath = os.path.join(pagure.config.config["GIT_FOLDER"], repo.path) task = pagure.lib.tasks.commits_history_stats.delay(repopath) return flask.jsonify( { - 'code': 'OK', - 'message': 'Stats asked', - 'url': flask.url_for('internal_ns.task_info', taskid=task.id), - 'task_id': task.id, + "code": "OK", + "message": "Stats asked", + "url": flask.url_for("internal_ns.task_info", taskid=task.id), + "task_id": task.id, } ) -@PV.route('//family', methods=['POST']) -@PV.route('///family', methods=['POST']) -@PV.route('/fork///family', methods=['POST']) -@PV.route('/fork////family', - methods=['POST']) +@PV.route("//family", methods=["POST"]) +@PV.route("///family", methods=["POST"]) +@PV.route("/fork///family", methods=["POST"]) +@PV.route("/fork////family", methods=["POST"]) def get_project_family(repo, namespace=None, username=None): """ Return the family of projects for the specified project @@ -719,52 +747,52 @@ def get_project_family(repo, namespace=None, username=None): } """ - allows_pr = flask.request.form.get( - 'allows_pr', '').lower().strip() in ['1', 'true'] + allows_pr = flask.request.form.get("allows_pr", "").lower().strip() in [ + "1", + "true", + ] allows_issues = flask.request.form.get( - 'allows_issues', '').lower().strip() in ['1', 'true'] + "allows_issues", "" + ).lower().strip() in ["1", "true"] form = pagure.forms.ConfirmationForm() if not form.validate_on_submit(): - response = flask.jsonify({ - 'code': 'ERROR', - 'message': 'Invalid input submitted', - }) + response = flask.jsonify( + {"code": "ERROR", "message": "Invalid input submitted"} + ) response.status_code = 400 return response repo = pagure.lib.get_authorized_project( - flask.g.session, repo, user=username, namespace=namespace) + flask.g.session, repo, user=username, namespace=namespace + ) if not repo: - response = flask.jsonify({ - 'code': 'ERROR', - 'message': 'No repo found with the information provided', - }) + response = flask.jsonify( + { + "code": "ERROR", + "message": "No repo found with the information provided", + } + ) response.status_code = 404 return response if allows_pr: family = [ - p.url_path for p in - pagure.lib.get_project_family(flask.g.session, repo) - if p.settings.get('pull_requests', True) + p.url_path + for p in pagure.lib.get_project_family(flask.g.session, repo) + if p.settings.get("pull_requests", True) ] elif allows_issues: family = [ - p.url_path for p in - pagure.lib.get_project_family(flask.g.session, repo) - if p.settings.get('issue_tracker', True) + p.url_path + for p in pagure.lib.get_project_family(flask.g.session, repo) + if p.settings.get("issue_tracker", True) ] else: family = [ - p.url_path for p in - pagure.lib.get_project_family(flask.g.session, repo) + p.url_path + for p in pagure.lib.get_project_family(flask.g.session, repo) ] - return flask.jsonify( - { - 'code': 'OK', - 'family': family - } - ) + return flask.jsonify({"code": "OK", "family": family}) diff --git a/pagure/lib/__init__.py b/pagure/lib/__init__.py index 2e2905e..c6c9b24 100644 --- a/pagure/lib/__init__.py +++ b/pagure/lib/__init__.py @@ -74,6 +74,7 @@ class Unspecified(object): """ Custom None object used to indicate that the caller has not made a choice for a particular argument. """ + pass @@ -98,9 +99,7 @@ def get_user(session, key): user_obj = search_user(session, email=key) if not user_obj: - raise pagure.exceptions.PagureException( - 'No user "%s" found' % key - ) + raise pagure.exceptions.PagureException('No user "%s" found' % key) return user_obj @@ -108,11 +107,7 @@ def get_user(session, key): def get_user_by_id(session, userid): """ Searches for a user in the database for a given username or email. """ - query = session.query( - model.User - ).filter( - model.User.id == userid - ) + query = session.query(model.User).filter(model.User.id == userid) return query.first() @@ -121,7 +116,7 @@ SESSIONMAKER = None def create_session(db_url=None, debug=False, pool_recycle=3600): - ''' Create the Session object to use to query the database. + """ Create the Session object to use to query the database. :arg db_url: URL used to connect to the database. The URL contains information with regards to the database engine, the host to connect @@ -131,28 +126,34 @@ def create_session(db_url=None, debug=False, pool_recycle=3600): output of sqlalchemy or not. :return a Session that can be used to query the database. - ''' + """ global SESSIONMAKER if SESSIONMAKER is None or ( - db_url and db_url != ("%s" % SESSIONMAKER.kw['bind'].engine.url)): + db_url and db_url != ("%s" % SESSIONMAKER.kw["bind"].engine.url) + ): if db_url is None: raise ValueError("First call to create_session needs db_url") - if db_url.startswith('postgres'): # pragma: no cover + if db_url.startswith("postgres"): # pragma: no cover engine = sqlalchemy.create_engine( - db_url, echo=debug, pool_recycle=pool_recycle, - client_encoding='utf8') + db_url, + echo=debug, + pool_recycle=pool_recycle, + client_encoding="utf8", + ) else: # pragma: no cover engine = sqlalchemy.create_engine( - db_url, echo=debug, pool_recycle=pool_recycle) + db_url, echo=debug, pool_recycle=pool_recycle + ) - if db_url.startswith('sqlite:'): + if db_url.startswith("sqlite:"): # Ignore the warning about con_record # pylint: disable=unused-argument def _fk_pragma_on_connect(dbapi_con, _): # pragma: no cover - ''' Tries to enforce referential constraints on sqlite. ''' - dbapi_con.execute('pragma foreign_keys=ON') - sqlalchemy.event.listen(engine, 'connect', _fk_pragma_on_connect) + """ Tries to enforce referential constraints on sqlite. """ + dbapi_con.execute("pragma foreign_keys=ON") + + sqlalchemy.event.listen(engine, "connect", _fk_pragma_on_connect) SESSIONMAKER = sessionmaker(bind=engine) scopedsession = scoped_session(SESSIONMAKER) @@ -164,23 +165,15 @@ def get_next_id(session, projectid): """ Returns the next identifier of a project ticket or pull-request based on the identifier already in the database. """ - query1 = session.query( - func.max(model.Issue.id) - ).filter( + query1 = session.query(func.max(model.Issue.id)).filter( model.Issue.project_id == projectid ) - query2 = session.query( - func.max(model.PullRequest.id) - ).filter( + query2 = session.query(func.max(model.PullRequest.id)).filter( model.PullRequest.project_id == projectid ) - ids = [ - el[0] - for el in query1.union(query2).all() - if el[0] is not None - ] + ids = [el[0] for el in query1.union(query2).all() if el[0] is not None] nid = 0 if ids: nid = max(ids) @@ -189,7 +182,7 @@ def get_next_id(session, projectid): def search_user(session, username=None, email=None, token=None, pattern=None): - ''' Searches the database for the user or users matching the given + """ Searches the database for the user or users matching the given criterias. :arg session: the session to use to connect to the database. @@ -205,35 +198,23 @@ def search_user(session, username=None, email=None, token=None, pattern=None): specified, a list of User objects otherwise. :rtype: User or [User] - ''' - query = session.query( - model.User - ).order_by( - model.User.user - ) + """ + query = session.query(model.User).order_by(model.User.user) if username is not None: - query = query.filter( - model.User.user == username - ) + query = query.filter(model.User.user == username) if email is not None: - query = query.filter( - model.UserEmail.user_id == model.User.id - ).filter( + query = query.filter(model.UserEmail.user_id == model.User.id).filter( model.UserEmail.email == email ) if token is not None: - query = query.filter( - model.User.token == token - ) + query = query.filter(model.User.token == token) if pattern: - pattern = pattern.replace('*', '%') - query = query.filter( - model.User.user.like(pattern) - ) + pattern = pattern.replace("*", "%") + query = query.filter(model.User.user.like(pattern)) if any([username, email, token]): output = query.first() @@ -253,16 +234,14 @@ def is_valid_ssh_key(key): if not key: return None with tempfile.TemporaryFile() as f: - f.write(key.encode('utf-8')) + f.write(key.encode("utf-8")) f.seek(0) - cmd = ['/usr/bin/ssh-keygen', '-l', '-f', - '/dev/stdin'] + cmd = ["/usr/bin/ssh-keygen", "-l", "-f", "/dev/stdin"] if _is_valid_ssh_key_force_md5: - cmd.extend(['-E', 'md5']) - proc = subprocess.Popen(cmd, - stdin=f, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) + cmd.extend(["-E", "md5"]) + proc = subprocess.Popen( + cmd, stdin=f, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) stdout, stderr = proc.communicate() if proc.returncode != 0: return False @@ -280,8 +259,8 @@ def is_valid_ssh_key(key): # Example line: # with hash: 1024 SHA256:ztcRX... root@test (RSA) # without : 1024 f9:a2:... key (RSA) - keyparts = stdout.split('\n')[0].split(' ')[1].split(':') - if len(keyparts) == 2 or keyparts[0].upper() in ('MD5', 'SHA256'): + keyparts = stdout.split("\n")[0].split(" ")[1].split(":") + if len(keyparts) == 2 or keyparts[0].upper() in ("MD5", "SHA256"): # This means that we get a keyid of HASH: rather than just # , which indicates this is a system that supports multiple # hash methods. Record this, and recall ourselves. @@ -297,27 +276,30 @@ def is_valid_ssh_key(key): def are_valid_ssh_keys(keys): """ Checks if all the ssh keys are valid or not. """ - return all([is_valid_ssh_key(key) is not False - for key in keys.split('\n')]) + return all( + [is_valid_ssh_key(key) is not False for key in keys.split("\n")] + ) def create_deploykeys_ssh_keys_on_disk(project, gitolite_keydir): - ''' Create the ssh keys for the projects' deploy keys on the key dir. + """ Create the ssh keys for the projects' deploy keys on the key dir. This method does NOT support multiple ssh keys per deploy key. - ''' + """ if not gitolite_keydir: # Nothing to do here, move right along return # First remove deploykeys that no longer exist - keyfiles = ['deploykey_%s_%s.pub' % - (werkzeug.secure_filename(project.fullname), - key.id) - for key in project.deploykeys] + keyfiles = [ + "deploykey_%s_%s.pub" + % (werkzeug.secure_filename(project.fullname), key.id) + for key in project.deploykeys + ] - project_key_dir = os.path.join(gitolite_keydir, 'deploykeys', - project.fullname) + project_key_dir = os.path.join( + gitolite_keydir, "deploykeys", project.fullname + ) if not os.path.exists(project_key_dir): os.makedirs(project_key_dir) @@ -329,25 +311,26 @@ def create_deploykeys_ssh_keys_on_disk(project, gitolite_keydir): for deploykey in project.deploykeys: # See the comment in lib/git.py:write_gitolite_acls about why this # name for a file is sane and does not inject a new security risk. - keyfile = 'deploykey_%s_%s.pub' % ( + keyfile = "deploykey_%s_%s.pub" % ( werkzeug.secure_filename(project.fullname), - deploykey.id) + deploykey.id, + ) if not os.path.exists(os.path.join(project_key_dir, keyfile)): # We only take the very first key - deploykeys must be single keys - key = deploykey.public_ssh_key.split('\n')[0] + key = deploykey.public_ssh_key.split("\n")[0] if not key: continue if not is_valid_ssh_key(key): continue - with open(os.path.join(project_key_dir, keyfile), 'w') as f: + with open(os.path.join(project_key_dir, keyfile), "w") as f: f.write(deploykey.public_ssh_key) def create_user_ssh_keys_on_disk(user, gitolite_keydir): - ''' Create the ssh keys for the user on the specific folder. + """ Create the ssh keys for the user on the specific folder. This is the method allowing to have multiple ssh keys per user. - ''' + """ if gitolite_keydir: # First remove any old keyfiles for the user # Assumption: we populated the keydir. This means that files @@ -358,37 +341,45 @@ def create_user_ssh_keys_on_disk(user, gitolite_keydir): # i being any integer, the user is most certainly not in # keys_/.pub. i = 0 - keyline_file = os.path.join(gitolite_keydir, - 'keys_%i' % i, - '%s.pub' % user.user) + keyline_file = os.path.join( + gitolite_keydir, "keys_%i" % i, "%s.pub" % user.user + ) while os.path.exists(keyline_file): os.unlink(keyline_file) i += 1 - keyline_file = os.path.join(gitolite_keydir, - 'keys_%i' % i, - '%s.pub' % user.user) + keyline_file = os.path.join( + gitolite_keydir, "keys_%i" % i, "%s.pub" % user.user + ) if not user.public_ssh_key: return # Now let's create new keyfiles for the user - keys = user.public_ssh_key.split('\n') + keys = user.public_ssh_key.split("\n") for i in range(len(keys)): if not keys[i]: continue if not is_valid_ssh_key(keys[i]): continue - keyline_dir = os.path.join(gitolite_keydir, 'keys_%i' % i) + keyline_dir = os.path.join(gitolite_keydir, "keys_%i" % i) if not os.path.exists(keyline_dir): os.mkdir(keyline_dir) - keyfile = os.path.join(keyline_dir, '%s.pub' % user.user) - with open(keyfile, 'w') as stream: + keyfile = os.path.join(keyline_dir, "%s.pub" % user.user) + with open(keyfile, "w") as stream: stream.write(keys[i].strip()) -def add_issue_comment(session, issue, comment, user, ticketfolder, - notify=True, date_created=None, notification=False): - ''' Add a comment to an issue. ''' +def add_issue_comment( + session, + issue, + comment, + user, + ticketfolder, + notify=True, + date_created=None, + notification=False, +): + """ Add a comment to an issue. """ user_obj = get_user(session, user) issue_comment = model.IssueComment( @@ -405,10 +396,11 @@ def add_issue_comment(session, issue, comment, user, ticketfolder, session.commit() pagure.lib.git.update_git( - issue, repo=issue.project, repofolder=ticketfolder) + issue, repo=issue.project, repofolder=ticketfolder + ) if not notification: - log_action(session, 'commented', issue, user_obj) + log_action(session, "commented", issue, user_obj) if notify: pagure.lib.notify.notify_new_comment(issue_comment, user=user_obj) @@ -416,7 +408,7 @@ def add_issue_comment(session, issue, comment, user, ticketfolder, if not issue.private: pagure.lib.notify.log( issue.project, - topic='issue.comment.added', + topic="issue.comment.added", msg=dict( issue=issue.to_json(public=True), project=issue.project.to_json(public=True), @@ -431,29 +423,38 @@ def add_issue_comment(session, issue, comment, user, ticketfolder, # so until we figure this out, we won't do live-refresh if REDIS and notify: if issue.private: - REDIS.publish('pagure.%s' % issue.uid, json.dumps({ - 'issue': 'private', - 'comment_id': issue_comment.id, - })) + REDIS.publish( + "pagure.%s" % issue.uid, + json.dumps( + {"issue": "private", "comment_id": issue_comment.id} + ), + ) else: - REDIS.publish('pagure.%s' % issue.uid, json.dumps({ - 'comment_id': issue_comment.id, - 'issue_id': issue.id, - 'project': issue.project.fullname, - 'comment_added': text2markdown(issue_comment.comment), - 'comment_user': issue_comment.user.user, - 'avatar_url': avatar_url_from_email( - issue_comment.user.default_email, size=16), - 'comment_date': issue_comment.date_created.strftime( - '%Y-%m-%d %H:%M:%S'), - 'notification': notification, - })) - - return 'Comment added' + REDIS.publish( + "pagure.%s" % issue.uid, + json.dumps( + { + "comment_id": issue_comment.id, + "issue_id": issue.id, + "project": issue.project.fullname, + "comment_added": text2markdown(issue_comment.comment), + "comment_user": issue_comment.user.user, + "avatar_url": avatar_url_from_email( + issue_comment.user.default_email, size=16 + ), + "comment_date": issue_comment.date_created.strftime( + "%Y-%m-%d %H:%M:%S" + ), + "notification": notification, + } + ), + ) + + return "Comment added" def add_tag_obj(session, obj, tags, user, gitfolder): - ''' Add a tag to an object (either an issue or a project). ''' + """ Add a tag to an object (either an issue or a project). """ user_obj = get_user(session, user) if isinstance(tags, six.string_types): @@ -471,7 +472,7 @@ def add_tag_obj(session, obj, tags, user, gitfolder): if known: continue - if obj.isa == 'project': + if obj.isa == "project": tagobj = get_tag(session, objtag) if not tagobj: tagobj = model.Tag(tag=objtag) @@ -479,30 +480,24 @@ def add_tag_obj(session, obj, tags, user, gitfolder): session.add(tagobj) session.flush() - dbobjtag = model.TagProject( - project_id=obj.id, - tag=tagobj.tag, - ) + dbobjtag = model.TagProject(project_id=obj.id, tag=tagobj.tag) else: tagobj = get_colored_tag(session, objtag, obj.project.id) if not tagobj: tagobj = model.TagColored( - tag=objtag, - project_id=obj.project.id + tag=objtag, project_id=obj.project.id ) session.add(tagobj) session.flush() - if obj.isa == 'issue': + if obj.isa == "issue": dbobjtag = model.TagIssueColored( - issue_uid=obj.uid, - tag_id=tagobj.id + issue_uid=obj.uid, tag_id=tagobj.id ) else: dbobjtag = model.TagPullRequest( - request_uid=obj.uid, - tag_id=tagobj.id + request_uid=obj.uid, tag_id=tagobj.id ) added_tags_color.append(tagobj.tag_color) @@ -513,13 +508,12 @@ def add_tag_obj(session, obj, tags, user, gitfolder): added_tags.append(tagobj.tag) if isinstance(obj, model.Issue): - pagure.lib.git.update_git( - obj, repo=obj.project, repofolder=gitfolder) + pagure.lib.git.update_git(obj, repo=obj.project, repofolder=gitfolder) if not obj.private: pagure.lib.notify.log( obj.project, - topic='issue.tag.added', + topic="issue.tag.added", msg=dict( issue=obj.to_json(public=True), project=obj.project.to_json(public=True), @@ -531,20 +525,22 @@ def add_tag_obj(session, obj, tags, user, gitfolder): # Send notification for the event-source server if REDIS and not obj.project.private: - REDIS.publish('pagure.%s' % obj.uid, json.dumps( - { - 'added_tags': added_tags, - 'added_tags_color': added_tags_color, - } - )) + REDIS.publish( + "pagure.%s" % obj.uid, + json.dumps( + { + "added_tags": added_tags, + "added_tags_color": added_tags_color, + } + ), + ) elif isinstance(obj, model.PullRequest): - pagure.lib.git.update_git( - obj, repo=obj.project, repofolder=gitfolder) + pagure.lib.git.update_git(obj, repo=obj.project, repofolder=gitfolder) if not obj.private: pagure.lib.notify.log( obj.project, - topic='pull-request.tag.added', + topic="pull-request.tag.added", msg=dict( pull_request=obj.to_json(public=True), project=obj.project.to_json(public=True), @@ -556,23 +552,29 @@ def add_tag_obj(session, obj, tags, user, gitfolder): # Send notification for the event-source server if REDIS and not obj.project.private: - REDIS.publish('pagure.%s' % obj.uid, json.dumps( - { - 'added_tags': added_tags, - 'added_tags_color': added_tags_color, - } - )) + REDIS.publish( + "pagure.%s" % obj.uid, + json.dumps( + { + "added_tags": added_tags, + "added_tags_color": added_tags_color, + } + ), + ) if added_tags: - return '%s tagged with: %s' % ( - obj.isa.capitalize(), ', '.join(added_tags)) + return "%s tagged with: %s" % ( + obj.isa.capitalize(), + ", ".join(added_tags), + ) else: - return 'Nothing to add' + return "Nothing to add" -def add_issue_assignee(session, issue, assignee, user, ticketfolder, - notify=True): - ''' Add an assignee to an issue, in other words, assigned an issue. ''' +def add_issue_assignee( + session, issue, assignee, user, ticketfolder, notify=True +): + """ Add an assignee to an issue, in other words, assigned an issue. """ user_obj = get_user(session, user) old_assignee = issue.assignee @@ -583,7 +585,8 @@ def add_issue_assignee(session, issue, assignee, user, ticketfolder, session.add(issue) session.commit() pagure.lib.git.update_git( - issue, repo=issue.project, repofolder=ticketfolder) + issue, repo=issue.project, repofolder=ticketfolder + ) if notify: pagure.lib.notify.notify_assigned_issue(issue, None, user_obj) @@ -591,7 +594,7 @@ def add_issue_assignee(session, issue, assignee, user, ticketfolder, if not issue.private: pagure.lib.notify.log( issue.project, - topic='issue.assigned.reset', + topic="issue.assigned.reset", msg=dict( issue=issue.to_json(public=True), project=issue.project.to_json(public=True), @@ -602,10 +605,11 @@ def add_issue_assignee(session, issue, assignee, user, ticketfolder, # Send notification for the event-source server if REDIS and not issue.project.private: - REDIS.publish('pagure.%s' % issue.uid, json.dumps( - {'unassigned': '-'})) + REDIS.publish( + "pagure.%s" % issue.uid, json.dumps({"unassigned": "-"}) + ) - return 'Assignee reset' + return "Assignee reset" elif not assignee and issue.assignee is None: return @@ -618,16 +622,18 @@ def add_issue_assignee(session, issue, assignee, user, ticketfolder, session.add(issue) session.commit() pagure.lib.git.update_git( - issue, repo=issue.project, repofolder=ticketfolder) + issue, repo=issue.project, repofolder=ticketfolder + ) if notify: pagure.lib.notify.notify_assigned_issue( - issue, assignee_obj, user_obj) + issue, assignee_obj, user_obj + ) if not issue.private: pagure.lib.notify.log( issue.project, - topic='issue.assigned.added', + topic="issue.assigned.added", msg=dict( issue=issue.to_json(public=True), project=issue.project.to_json(public=True), @@ -639,18 +645,19 @@ def add_issue_assignee(session, issue, assignee, user, ticketfolder, # Send notification for the event-source server if REDIS and not issue.project.private: - REDIS.publish('pagure.%s' % issue.uid, json.dumps( - {'assigned': assignee_obj.to_json(public=True)})) + REDIS.publish( + "pagure.%s" % issue.uid, + json.dumps({"assigned": assignee_obj.to_json(public=True)}), + ) - output = 'Issue assigned to %s' % assignee + output = "Issue assigned to %s" % assignee if old_assignee: - output += ' (was: %s)' % old_assignee.username + output += " (was: %s)" % old_assignee.username return output -def add_pull_request_assignee( - session, request, assignee, user, requestfolder): - ''' Add an assignee to a request, in other words, assigned an issue. ''' +def add_pull_request_assignee(session, request, assignee, user, requestfolder): + """ Add an assignee to a request, in other words, assigned an issue. """ get_user(session, assignee) user_obj = get_user(session, user) @@ -660,13 +667,14 @@ def add_pull_request_assignee( session.add(request) session.commit() pagure.lib.git.update_git( - request, repo=request.project, repofolder=requestfolder) + request, repo=request.project, repofolder=requestfolder + ) pagure.lib.notify.notify_assigned_request(request, None, user_obj) pagure.lib.notify.log( request.project, - topic='request.assigned.reset', + topic="request.assigned.reset", msg=dict( request=request.to_json(public=True), project=request.project.to_json(public=True), @@ -675,7 +683,7 @@ def add_pull_request_assignee( redis=REDIS, ) - return 'Request reset' + return "Request reset" elif assignee is None and request.assignee is None: return @@ -688,14 +696,16 @@ def add_pull_request_assignee( session.add(request) session.flush() pagure.lib.git.update_git( - request, repo=request.project, repofolder=requestfolder) + request, repo=request.project, repofolder=requestfolder + ) pagure.lib.notify.notify_assigned_request( - request, assignee_obj, user_obj) + request, assignee_obj, user_obj + ) pagure.lib.notify.log( request.project, - topic='request.assigned.added', + topic="request.assigned.added", msg=dict( request=request.to_json(public=True), project=request.project.to_json(public=True), @@ -704,40 +714,36 @@ def add_pull_request_assignee( redis=REDIS, ) - return 'Request assigned' + return "Request assigned" -def add_issue_dependency( - session, issue, issue_blocked, user, ticketfolder): - ''' Add a dependency between two issues. ''' +def add_issue_dependency(session, issue, issue_blocked, user, ticketfolder): + """ Add a dependency between two issues. """ user_obj = get_user(session, user) if issue.uid == issue_blocked.uid: raise pagure.exceptions.PagureException( - 'An issue cannot depend on itself' + "An issue cannot depend on itself" ) if issue_blocked not in issue.children: i2i = model.IssueToIssue( - parent_issue_id=issue.uid, - child_issue_id=issue_blocked.uid + parent_issue_id=issue.uid, child_issue_id=issue_blocked.uid ) session.add(i2i) # Make sure we won't have SQLAlchemy error before we continue session.flush() pagure.lib.git.update_git( - issue, - repo=issue.project, - repofolder=ticketfolder) + issue, repo=issue.project, repofolder=ticketfolder + ) pagure.lib.git.update_git( - issue_blocked, - repo=issue_blocked.project, - repofolder=ticketfolder) + issue_blocked, repo=issue_blocked.project, repofolder=ticketfolder + ) if not issue.private: pagure.lib.notify.log( issue.project, - topic='issue.dependency.added', + topic="issue.dependency.added", msg=dict( issue=issue.to_json(public=True), project=issue.project.to_json(public=True), @@ -749,28 +755,37 @@ def add_issue_dependency( # Send notification for the event-source server if REDIS and not issue.project.private: - REDIS.publish('pagure.%s' % issue.uid, json.dumps({ - 'added_dependency': issue_blocked.id, - 'issue_uid': issue.uid, - 'type': 'children', - })) - REDIS.publish('pagure.%s' % issue_blocked.uid, json.dumps({ - 'added_dependency': issue.id, - 'issue_uid': issue_blocked.uid, - 'type': 'parent', - })) - - return 'Issue marked as depending on: #%s' % issue_blocked.id - - -def remove_issue_dependency( - session, issue, issue_blocked, user, ticketfolder): - ''' Remove a dependency between two issues. ''' + REDIS.publish( + "pagure.%s" % issue.uid, + json.dumps( + { + "added_dependency": issue_blocked.id, + "issue_uid": issue.uid, + "type": "children", + } + ), + ) + REDIS.publish( + "pagure.%s" % issue_blocked.uid, + json.dumps( + { + "added_dependency": issue.id, + "issue_uid": issue_blocked.uid, + "type": "parent", + } + ), + ) + + return "Issue marked as depending on: #%s" % issue_blocked.id + + +def remove_issue_dependency(session, issue, issue_blocked, user, ticketfolder): + """ Remove a dependency between two issues. """ user_obj = get_user(session, user) if issue.uid == issue_blocked.uid: raise pagure.exceptions.PagureException( - 'An issue cannot depend on itself' + "An issue cannot depend on itself" ) if issue_blocked in issue.parents: @@ -783,18 +798,16 @@ def remove_issue_dependency( # Make sure we won't have SQLAlchemy error before we continue session.flush() pagure.lib.git.update_git( - issue, - repo=issue.project, - repofolder=ticketfolder) + issue, repo=issue.project, repofolder=ticketfolder + ) pagure.lib.git.update_git( - issue_blocked, - repo=issue_blocked.project, - repofolder=ticketfolder) + issue_blocked, repo=issue_blocked.project, repofolder=ticketfolder + ) if not issue.private: pagure.lib.notify.log( issue.project, - topic='issue.dependency.removed', + topic="issue.dependency.removed", msg=dict( issue=issue.to_json(public=True), project=issue.project.to_json(public=True), @@ -806,23 +819,34 @@ def remove_issue_dependency( # Send notification for the event-source server if REDIS and not issue.project.private: - REDIS.publish('pagure.%s' % issue.uid, json.dumps({ - 'removed_dependency': parent_del, - 'issue_uid': issue.uid, - 'type': 'children', - })) - REDIS.publish('pagure.%s' % issue_blocked.uid, json.dumps({ - 'removed_dependency': issue.id, - 'issue_uid': issue_blocked.uid, - 'type': 'parent', - })) + REDIS.publish( + "pagure.%s" % issue.uid, + json.dumps( + { + "removed_dependency": parent_del, + "issue_uid": issue.uid, + "type": "children", + } + ), + ) + REDIS.publish( + "pagure.%s" % issue_blocked.uid, + json.dumps( + { + "removed_dependency": issue.id, + "issue_uid": issue_blocked.uid, + "type": "parent", + } + ), + ) - return 'Issue **un**marked as depending on: #%s' % ' #'.join( - [("%s" % id) for id in parent_del]) + return "Issue **un**marked as depending on: #%s" % " #".join( + [("%s" % id) for id in parent_del] + ) def remove_tags(session, project, tags, gitfolder, user): - ''' Removes the specified tag of a project. ''' + """ Removes the specified tag of a project. """ user_obj = get_user(session, user) if not isinstance(tags, list): @@ -839,12 +863,13 @@ def remove_tags(session, project, tags, gitfolder, user): if tagobj: tag_found = True removed_tags.append(tag) - msgs.append('Tag: %s has been deleted' % tag) + msgs.append("Tag: %s has been deleted" % tag) session.delete(tagobj) if not tag_found: raise pagure.exceptions.PagureException( - 'Tags not found: %s' % ', '.join(tags)) + "Tags not found: %s" % ", ".join(tags) + ) for issue in issues: for issue_tag in issue.tags: @@ -852,11 +877,12 @@ def remove_tags(session, project, tags, gitfolder, user): tag = issue_tag.tag session.delete(issue_tag) pagure.lib.git.update_git( - issue, repo=issue.project, repofolder=gitfolder) + issue, repo=issue.project, repofolder=gitfolder + ) pagure.lib.notify.log( project, - topic='project.tag.removed', + topic="project.tag.removed", msg=dict( project=project.to_json(public=True), tags=removed_tags, @@ -869,26 +895,26 @@ def remove_tags(session, project, tags, gitfolder, user): def remove_tags_obj(session, obj, tags, gitfolder, user): - ''' Removes the specified tag(s) of a given object. ''' + """ Removes the specified tag(s) of a given object. """ user_obj = get_user(session, user) if isinstance(tags, six.string_types): tags = [tags] removed_tags = [] - if obj.isa == 'project': + if obj.isa == "project": for objtag in obj.tags: if objtag.tag in tags: tag = objtag.tag removed_tags.append(tag) session.delete(objtag) - elif obj.isa == 'issue': + elif obj.isa == "issue": for objtag in obj.tags_issues_colored: if objtag.tag.tag in tags: tag = objtag.tag.tag removed_tags.append(tag) session.delete(objtag) - elif obj.isa == 'pull-request': + elif obj.isa == "pull-request": for objtag in obj.tags_pr_colored: if objtag.tag.tag in tags: tag = objtag.tag.tag @@ -896,12 +922,11 @@ def remove_tags_obj(session, obj, tags, gitfolder, user): session.delete(objtag) if isinstance(obj, model.Issue): - pagure.lib.git.update_git( - obj, repo=obj.project, repofolder=gitfolder) + pagure.lib.git.update_git(obj, repo=obj.project, repofolder=gitfolder) pagure.lib.notify.log( obj.project, - topic='issue.tag.removed', + topic="issue.tag.removed", msg=dict( issue=obj.to_json(public=True), project=obj.project.to_json(public=True), @@ -913,15 +938,16 @@ def remove_tags_obj(session, obj, tags, gitfolder, user): # Send notification for the event-source server if REDIS and not obj.project.private: - REDIS.publish('pagure.%s' % obj.uid, json.dumps( - {'removed_tags': removed_tags})) + REDIS.publish( + "pagure.%s" % obj.uid, + json.dumps({"removed_tags": removed_tags}), + ) elif isinstance(obj, model.PullRequest): - pagure.lib.git.update_git( - obj, repo=obj.project, repofolder=gitfolder) + pagure.lib.git.update_git(obj, repo=obj.project, repofolder=gitfolder) pagure.lib.notify.log( obj.project, - topic='pull-request.tag.removed', + topic="pull-request.tag.removed", msg=dict( pull_request=obj.to_json(public=True), project=obj.project.to_json(public=True), @@ -933,17 +959,28 @@ def remove_tags_obj(session, obj, tags, gitfolder, user): # Send notification for the event-source server if REDIS and not obj.project.private: - REDIS.publish('pagure.%s' % obj.uid, json.dumps( - {'removed_tags': removed_tags})) + REDIS.publish( + "pagure.%s" % obj.uid, + json.dumps({"removed_tags": removed_tags}), + ) - return '%s **un**tagged with: %s' % ( - obj.isa.capitalize(), ', '.join(removed_tags)) + return "%s **un**tagged with: %s" % ( + obj.isa.capitalize(), + ", ".join(removed_tags), + ) def edit_issue_tags( - session, project, old_tag, new_tag, new_tag_description, - new_tag_color, ticketfolder, user): - ''' Removes the specified tag of a project. ''' + session, + project, + old_tag, + new_tag, + new_tag_description, + new_tag_color, + ticketfolder, + user, +): + """ Removes the specified tag of a project. """ user_obj = get_user(session, user) old_tag_name = old_tag @@ -952,66 +989,78 @@ def edit_issue_tags( if not old_tag: raise pagure.exceptions.PagureException( - 'No tag "%s" found related to this project' % (old_tag_name)) + 'No tag "%s" found related to this project' % (old_tag_name) + ) old_tag_name = old_tag.tag old_tag_description = old_tag.tag_description old_tag_color = old_tag.tag_color # check for change - no_change_in_tag = old_tag.tag == new_tag \ - and old_tag_description == new_tag_description \ + no_change_in_tag = ( + old_tag.tag == new_tag + and old_tag_description == new_tag_description and old_tag_color == new_tag_color + ) if no_change_in_tag: raise pagure.exceptions.PagureException( 'No change. Old tag "%s(%s)[%s]" is the same as ' - 'new tag "%s(%s)[%s]"' % ( - old_tag, old_tag_description, old_tag_color, new_tag, - new_tag_description, new_tag_color)) + 'new tag "%s(%s)[%s]"' + % ( + old_tag, + old_tag_description, + old_tag_color, + new_tag, + new_tag_description, + new_tag_color, + ) + ) elif old_tag.tag != new_tag: # Check if new tag already exists existing_tag = get_colored_tag(session, new_tag, project.id) if existing_tag: raise pagure.exceptions.PagureException( - 'Can not rename a tag to an existing tag name: %s' % new_tag) + "Can not rename a tag to an existing tag name: %s" % new_tag + ) - session.query( - model.TagColored - ).filter( + session.query(model.TagColored).filter( model.TagColored.tag == old_tag.tag - ).filter( - model.TagColored.project_id == project.id - ).update( + ).filter(model.TagColored.project_id == project.id).update( { model.TagColored.tag: new_tag, model.TagColored.tag_description: new_tag_description, - model.TagColored.tag_color: new_tag_color + model.TagColored.tag_color: new_tag_color, } ) - issues = session.query( - model.Issue - ).filter( - model.TagIssueColored.tag_id == old_tag.id - ).filter( - model.TagIssueColored.issue_uid == model.Issue.uid - ).all() + issues = ( + session.query(model.Issue) + .filter(model.TagIssueColored.tag_id == old_tag.id) + .filter(model.TagIssueColored.issue_uid == model.Issue.uid) + .all() + ) for issue in issues: # Update the git version pagure.lib.git.update_git( - issue, repo=issue.project, repofolder=ticketfolder) + issue, repo=issue.project, repofolder=ticketfolder + ) msgs = [] msgs.append( - 'Edited tag: %s(%s)[%s] to %s(%s)[%s]' % ( - old_tag_name, old_tag_description, old_tag_color, - new_tag, new_tag_description, new_tag_color + "Edited tag: %s(%s)[%s] to %s(%s)[%s]" + % ( + old_tag_name, + old_tag_description, + old_tag_color, + new_tag, + new_tag_description, + new_tag_color, ) ) pagure.lib.notify.log( project, - topic='project.tag.edited', + topic="project.tag.edited", msg=dict( project=project.to_json(public=True), old_tag=old_tag.tag, @@ -1029,32 +1078,32 @@ def edit_issue_tags( def add_deploykey_to_project(session, project, ssh_key, pushaccess, user): - ''' Add a deploy key to a specified project. ''' + """ Add a deploy key to a specified project. """ ssh_key = ssh_key.strip() - if '\n' in ssh_key: + if "\n" in ssh_key: raise pagure.exceptions.PagureException( - 'Deploy key can only be single keys.' + "Deploy key can only be single keys." ) ssh_short_key = is_valid_ssh_key(ssh_key) if ssh_short_key in [None, False]: - raise pagure.exceptions.PagureException( - 'Deploy key invalid.' - ) + raise pagure.exceptions.PagureException("Deploy key invalid.") # We are sure that this only contains a single key, but ssh-keygen still # return a \n at the end - ssh_short_key = ssh_short_key.split('\n')[0] + ssh_short_key = ssh_short_key.split("\n")[0] # Make sure that this key is not a deploy key in this or another project. # If we dupe keys, gitolite might choke. - ssh_search_key = ssh_short_key.split(' ')[1] - if session.query(model.DeployKey).filter( - model.DeployKey.ssh_search_key == ssh_search_key).count() != 0: - raise pagure.exceptions.PagureException( - 'Deploy key already exists.' - ) + ssh_search_key = ssh_short_key.split(" ")[1] + if ( + session.query(model.DeployKey) + .filter(model.DeployKey.ssh_search_key == ssh_search_key) + .count() + != 0 + ): + raise pagure.exceptions.PagureException("Deploy key already exists.") user_obj = get_user(session, user) new_key_obj = model.DeployKey( @@ -1063,7 +1112,8 @@ def add_deploykey_to_project(session, project, ssh_key, pushaccess, user): public_ssh_key=ssh_key, ssh_short_key=ssh_short_key, ssh_search_key=ssh_search_key, - creator_user_id=user_obj.id) + creator_user_id=user_obj.id, + ) session.add(new_key_obj) # Make sure we won't have SQLAlchemy error before we continue @@ -1071,40 +1121,42 @@ def add_deploykey_to_project(session, project, ssh_key, pushaccess, user): # We do not send any notifications on purpose - return 'Deploy key added' + return "Deploy key added" def add_user_to_project( - session, project, new_user, user, access='admin', - required_groups=None): - ''' Add a specified user to a specified project with a specified access - ''' + session, project, new_user, user, access="admin", required_groups=None +): + """ Add a specified user to a specified project with a specified access + """ new_user_obj = get_user(session, new_user) - if required_groups and access != 'ticket': + if required_groups and access != "ticket": for key in required_groups: if fnmatch.fnmatch(project.fullname, key): user_grps = set(new_user_obj.groups) req_grps = set(required_groups[key]) if not user_grps.intersection(req_grps): raise pagure.exceptions.PagureException( - 'This user must be in one of the following groups ' - 'to be allowed to be added to this project: %s' % - ', '.join(req_grps) + "This user must be in one of the following groups " + "to be allowed to be added to this project: %s" + % ", ".join(req_grps) ) user_obj = get_user(session, user) - users = set([ - user_.user - for user_ in project.get_project_users(access, combine=False) - ]) + users = set( + [ + user_.user + for user_ in project.get_project_users(access, combine=False) + ] + ) users.add(project.user.user) if new_user in users: raise pagure.exceptions.PagureException( - 'This user is already listed on this project with the same access' + "This user is already listed on this project with the same access" ) # user has some access on project, so update to new access @@ -1119,7 +1171,7 @@ def add_user_to_project( pagure.lib.notify.log( project, - topic='project.user.access.updated', + topic="project.user.access.updated", msg=dict( project=project.to_json(public=True), new_user=new_user_obj.username, @@ -1129,12 +1181,10 @@ def add_user_to_project( redis=REDIS, ) - return 'User access updated' + return "User access updated" project_user = model.ProjectUser( - project_id=project.id, - user_id=new_user_obj.id, - access=access, + project_id=project.id, user_id=new_user_obj.id, access=access ) project.date_modified = datetime.datetime.utcnow() session.add(project_user) @@ -1146,7 +1196,7 @@ def add_user_to_project( pagure.lib.notify.log( project, - topic='project.user.added', + topic="project.user.added", msg=dict( project=project.to_json(public=True), new_user=new_user_obj.username, @@ -1156,19 +1206,23 @@ def add_user_to_project( redis=REDIS, ) - return 'User added' + return "User added" def add_group_to_project( - session, project, new_group, user, access='admin', - create=False, is_admin=False): - ''' Add a specified group to a specified project with some access ''' + session, + project, + new_group, + user, + access="admin", + create=False, + is_admin=False, +): + """ Add a specified group to a specified project with some access """ user_obj = search_user(session, username=user) if not user_obj: - raise pagure.exceptions.PagureException( - 'No user %s found.' % user - ) + raise pagure.exceptions.PagureException("No user %s found." % user) group_obj = search_groups(session, group_name=new_group) @@ -1177,31 +1231,35 @@ def add_group_to_project( group_obj = pagure.lib.model.PagureGroup( group_name=new_group, display_name=new_group, - group_type='user', + group_type="user", user_id=user_obj.id, ) session.add(group_obj) session.flush() else: raise pagure.exceptions.PagureException( - 'No group %s found.' % new_group + "No group %s found." % new_group ) - if user_obj not in project.users \ - and user_obj != project.user \ - and not is_admin: + if ( + user_obj not in project.users + and user_obj != project.user + and not is_admin + ): raise pagure.exceptions.PagureException( - 'You are not allowed to add a group of users to this project' + "You are not allowed to add a group of users to this project" ) - groups = set([ - group.group_name - for group in project.get_project_groups(access, combine=False) - ]) + groups = set( + [ + group.group_name + for group in project.get_project_groups(access, combine=False) + ] + ) if new_group in groups: raise pagure.exceptions.PagureException( - 'This group already has this access on this project' + "This group already has this access on this project" ) # the group already has some access, update to new access @@ -1216,7 +1274,7 @@ def add_group_to_project( pagure.lib.notify.log( project, - topic='project.group.access.updated', + topic="project.group.access.updated", msg=dict( project=project.to_json(public=True), new_group=group_obj.group_name, @@ -1226,12 +1284,10 @@ def add_group_to_project( redis=REDIS, ) - return 'Group access updated' + return "Group access updated" project_group = model.ProjectGroup( - project_id=project.id, - group_id=group_obj.id, - access=access, + project_id=project.id, group_id=group_obj.id, access=access ) session.add(project_group) # Make sure we won't have SQLAlchemy error before we continue @@ -1243,7 +1299,7 @@ def add_group_to_project( pagure.lib.notify.log( project, - topic='project.group.added', + topic="project.group.added", msg=dict( project=project.to_json(public=True), new_group=group_obj.group_name, @@ -1253,14 +1309,24 @@ def add_group_to_project( redis=REDIS, ) - return 'Group added' - - -def add_pull_request_comment(session, request, commit, tree_id, filename, - row, comment, user, requestfolder, - notify=True, notification=False, - trigger_ci=None): - ''' Add a comment to a pull-request. ''' + return "Group added" + + +def add_pull_request_comment( + session, + request, + commit, + tree_id, + filename, + row, + comment, + user, + requestfolder, + notify=True, + notification=False, + trigger_ci=None, +): + """ Add a comment to a pull-request. """ user_obj = get_user(session, user) pr_comment = model.PullRequestComment( @@ -1280,9 +1346,10 @@ def add_pull_request_comment(session, request, commit, tree_id, filename, request.last_updated = datetime.datetime.utcnow() pagure.lib.git.update_git( - request, repo=request.project, repofolder=requestfolder) + request, repo=request.project, repofolder=requestfolder + ) - log_action(session, 'commented', request, user_obj) + log_action(session, "commented", request, user_obj) if notify: pagure.lib.notify.notify_pull_request_comment(pr_comment, user_obj) @@ -1291,65 +1358,74 @@ def add_pull_request_comment(session, request, commit, tree_id, filename, if REDIS and not request.project.private: comment_text = text2markdown(pr_comment.comment) - REDIS.publish('pagure.%s' % request.uid, json.dumps({ - 'request_id': request.id, - 'comment_added': comment_text, - 'comment_user': pr_comment.user.user, - 'comment_id': pr_comment.id, - 'project': request.project.fullname, - 'avatar_url': avatar_url_from_email( - pr_comment.user.default_email, size=16), - 'comment_date': pr_comment.date_created.strftime( - '%Y-%m-%d %H:%M:%S'), - 'commit_id': commit, - 'filename': filename, - 'line': row, - 'notification': notification, - })) + REDIS.publish( + "pagure.%s" % request.uid, + json.dumps( + { + "request_id": request.id, + "comment_added": comment_text, + "comment_user": pr_comment.user.user, + "comment_id": pr_comment.id, + "project": request.project.fullname, + "avatar_url": avatar_url_from_email( + pr_comment.user.default_email, size=16 + ), + "comment_date": pr_comment.date_created.strftime( + "%Y-%m-%d %H:%M:%S" + ), + "commit_id": commit, + "filename": filename, + "line": row, + "notification": notification, + } + ), + ) # Send notification to the CI server, if the comment added was a # notification and the PR is still open and project is not private - if notification \ - and request.status == 'Open' \ - and pagure_config.get('PAGURE_CI_SERVICES') \ - and request.project.ci_hook \ - and request.project.ci_hook.active_pr \ - and not request.project.private: + if ( + notification + and request.status == "Open" + and pagure_config.get("PAGURE_CI_SERVICES") + and request.project.ci_hook + and request.project.ci_hook.active_pr + and not request.project.private + ): tasks_services.trigger_ci_build.delay( project_name=request.project_from.fullname, cause=request.id, branch=request.branch_from, - ci_type=request.project.ci_hook.ci_type + ci_type=request.project.ci_hook.ci_type, ) pagure.lib.notify.log( request.project, - topic='pull-request.comment.added', + topic="pull-request.comment.added", msg=dict( - pullrequest=request.to_json(public=True), - agent=user_obj.username, + pullrequest=request.to_json(public=True), agent=user_obj.username ), redis=REDIS, ) - if trigger_ci \ - and comment.strip().lower() in trigger_ci \ - and pagure_config.get('PAGURE_CI_SERVICES') \ - and request.project.ci_hook \ - and request.project.ci_hook.active_pr: + if ( + trigger_ci + and comment.strip().lower() in trigger_ci + and pagure_config.get("PAGURE_CI_SERVICES") + and request.project.ci_hook + and request.project.ci_hook.active_pr + ): tasks_services.trigger_ci_build.delay( project_name=request.project_from.fullname, cause=request.id, branch=request.branch_from, - ci_type=request.project.ci_hook.ci_type + ci_type=request.project.ci_hook.ci_type, ) - return 'Comment added' + return "Comment added" -def edit_comment(session, parent, comment, user, - updated_comment, folder): - ''' Edit a comment. ''' +def edit_comment(session, parent, comment, user, updated_comment, folder): + """ Edit a comment. """ user_obj = get_user(session, user) comment.comment = updated_comment comment.edited_on = datetime.datetime.utcnow() @@ -1361,21 +1437,20 @@ def edit_comment(session, parent, comment, user, # Make sure we won't have SQLAlchemy error before we continue session.flush() - pagure.lib.git.update_git( - parent, repo=parent.project, repofolder=folder) + pagure.lib.git.update_git(parent, repo=parent.project, repofolder=folder) - topic = 'unknown' - key = 'unknown' - id_ = 'unknown' + topic = "unknown" + key = "unknown" + id_ = "unknown" private = False - if parent.isa == 'pull-request': - topic = 'pull-request.comment.edited' - key = 'pullrequest' - id_ = 'request_id' - elif parent.isa == 'issue': - topic = 'issue.comment.edited' - key = 'issue' - id_ = 'issue_id' + if parent.isa == "pull-request": + topic = "pull-request.comment.edited" + key = "pullrequest" + id_ = "request_id" + elif parent.isa == "issue": + topic = "issue.comment.edited" + key = "issue" + id_ = "issue_id" private = parent.private if not private: @@ -1384,46 +1459,66 @@ def edit_comment(session, parent, comment, user, topic=topic, msg={ key: parent.to_json(public=True, with_comments=False), - 'project': parent.project.to_json(public=True), - 'comment': comment.to_json(public=True), - 'agent': user_obj.username, + "project": parent.project.to_json(public=True), + "comment": comment.to_json(public=True), + "agent": user_obj.username, }, redis=REDIS, ) if REDIS and not parent.project.private: if private: - REDIS.publish('pagure.%s' % comment.parent.uid, json.dumps({ - 'comment_updated': 'private', - 'comment_id': comment.id, - })) + REDIS.publish( + "pagure.%s" % comment.parent.uid, + json.dumps( + {"comment_updated": "private", "comment_id": comment.id} + ), + ) else: - REDIS.publish('pagure.%s' % parent.uid, json.dumps({ - id_: len(parent.comments), - 'comment_updated': text2markdown(comment.comment), - 'comment_id': comment.id, - 'parent_id': comment.parent.id, - 'comment_editor': user_obj.user, - 'avatar_url': avatar_url_from_email( - comment.user.default_email, size=16), - 'comment_date': comment.edited_on.strftime( - '%Y-%m-%d %H:%M:%S'), - })) + REDIS.publish( + "pagure.%s" % parent.uid, + json.dumps( + { + id_: len(parent.comments), + "comment_updated": text2markdown(comment.comment), + "comment_id": comment.id, + "parent_id": comment.parent.id, + "comment_editor": user_obj.user, + "avatar_url": avatar_url_from_email( + comment.user.default_email, size=16 + ), + "comment_date": comment.edited_on.strftime( + "%Y-%m-%d %H:%M:%S" + ), + } + ), + ) return "Comment updated" -def add_pull_request_flag(session, request, username, percent, comment, url, - status, uid, user, token, requestfolder): - ''' Add a flag to a pull-request. ''' +def add_pull_request_flag( + session, + request, + username, + percent, + comment, + url, + status, + uid, + user, + token, + requestfolder, +): + """ Add a flag to a pull-request. """ user_obj = get_user(session, user) - action = 'added' + action = "added" pr_flag = None if uid: pr_flag = get_pull_request_flag_by_uid(session, request, uid) if pr_flag: - action = 'updated' + action = "updated" pr_flag.comment = comment pr_flag.status = status pr_flag.percent = percent @@ -1444,15 +1539,16 @@ def add_pull_request_flag(session, request, username, percent, comment, url, # Make sure we won't have SQLAlchemy error before we continue session.flush() - if request.project.settings.get('notify_on_pull-request_flag'): + if request.project.settings.get("notify_on_pull-request_flag"): pagure.lib.notify.notify_pull_request_flag(pr_flag, username) pagure.lib.git.update_git( - request, repo=request.project, repofolder=requestfolder) + request, repo=request.project, repofolder=requestfolder + ) pagure.lib.notify.log( request.project, - topic='pull-request.flag.%s' % action, + topic="pull-request.flag.%s" % action, msg=dict( pullrequest=request.to_json(public=True), flag=pr_flag.to_json(public=True), @@ -1461,19 +1557,29 @@ def add_pull_request_flag(session, request, username, percent, comment, url, redis=REDIS, ) - return ('Flag %s' % action, pr_flag.uid) + return ("Flag %s" % action, pr_flag.uid) def add_commit_flag( - session, repo, commit_hash, username, status, percent, comment, url, - uid, user, token): - ''' Add a flag to a add_commit_flag. ''' + session, + repo, + commit_hash, + username, + status, + percent, + comment, + url, + uid, + user, + token, +): + """ Add a flag to a add_commit_flag. """ user_obj = get_user(session, user) - action = 'added' + action = "added" c_flag = get_commit_flag_by_uid(session, commit_hash, uid) if c_flag: - action = 'updated' + action = "updated" c_flag.comment = comment c_flag.percent = percent c_flag.status = status @@ -1495,12 +1601,12 @@ def add_commit_flag( # Make sure we won't have SQLAlchemy error before we continue session.flush() - if repo.settings.get('notify_on_commit_flag'): + if repo.settings.get("notify_on_commit_flag"): pagure.lib.notify.notify_commit_flag(c_flag, username) pagure.lib.notify.log( repo, - topic='commit.flag.%s' % action, + topic="commit.flag.%s" % action, msg=dict( repo=repo.to_json(public=True), flag=c_flag.to_json(public=True), @@ -1509,11 +1615,11 @@ def add_commit_flag( redis=REDIS, ) - return ('Flag %s' % action, c_flag.uid) + return ("Flag %s" % action, c_flag.uid) def get_commit_flag(session, project, commit_hash): - ''' Return the commit flags corresponding to the specified git hash + """ Return the commit flags corresponding to the specified git hash (commitid) in the specified repository. :arg session: the session with which to connect to the database @@ -1522,34 +1628,48 @@ def get_commit_flag(session, project, commit_hash): :arg commit_hash: the hash of the commit who has been flagged :return: list of pagure.lib.model.CommitFlag objects or an empty list - ''' - query = session.query( - model.CommitFlag - ).filter( - model.CommitFlag.project_id == project.id - ).filter( - model.CommitFlag.commit_hash == commit_hash + """ + query = ( + session.query(model.CommitFlag) + .filter(model.CommitFlag.project_id == project.id) + .filter(model.CommitFlag.commit_hash == commit_hash) ) return query.all() -def new_project(session, user, name, blacklist, allowed_prefix, - gitfolder, docfolder, ticketfolder, requestfolder, - description=None, url=None, avatar_email=None, - parent_id=None, add_readme=False, userobj=None, - prevent_40_chars=False, namespace=None, user_ns=False, - ignore_existing_repo=False, private=False): - ''' Create a new project based on the information provided. +def new_project( + session, + user, + name, + blacklist, + allowed_prefix, + gitfolder, + docfolder, + ticketfolder, + requestfolder, + description=None, + url=None, + avatar_email=None, + parent_id=None, + add_readme=False, + userobj=None, + prevent_40_chars=False, + namespace=None, + user_ns=False, + ignore_existing_repo=False, + private=False, +): + """ Create a new project based on the information provided. Is an async operation, and returns task ID. - ''' - ns_name = name if not namespace else '%s/%s' % (namespace, name) + """ + ns_name = name if not namespace else "%s/%s" % (namespace, name) matched = any(map(functools.partial(fnmatch.fnmatch, ns_name), blacklist)) if matched: raise pagure.exceptions.ProjectBlackListedException( 'No project "%s" are allowed to be created due to potential ' - 'conflicts in URLs with pagure itself' % ns_name + "conflicts in URLs with pagure itself" % ns_name ) user_obj = get_user(session, user) @@ -1564,9 +1684,9 @@ def new_project(session, user, name, blacklist, allowed_prefix, if namespace and namespace not in allowed_prefix: raise pagure.exceptions.PagureException( - 'The namespace of your project must be in the list of allowed ' - 'namespaces set by the admins of this pagure instance, or the ' - 'name of a group of which you are a member.' + "The namespace of your project must be in the list of allowed " + "namespaces set by the admins of this pagure instance, or the " + "name of a group of which you are a member." ) if len(name) == 40 and prevent_40_chars: @@ -1577,16 +1697,16 @@ def new_project(session, user, name, blacklist, allowed_prefix, # endpoint redirecting / to /c/ # available as an option. raise pagure.exceptions.PagureException( - 'Your project name cannot have exactly 40 characters after ' - 'the `/`' + "Your project name cannot have exactly 40 characters after " + "the `/`" ) path = name if namespace: - path = '%s/%s' % (namespace, name) + path = "%s/%s" % (namespace, name) # Repo exists on disk - gitrepo = os.path.join(gitfolder, '%s.git' % path) + gitrepo = os.path.join(gitfolder, "%s.git" % path) if os.path.exists(gitrepo): if not ignore_existing_repo: raise pagure.exceptions.RepoExistsException( @@ -1611,39 +1731,52 @@ def new_project(session, user, name, blacklist, allowed_prefix, user_id=user_obj.id, parent_id=parent_id, private=private, - hook_token=pagure.lib.login.id_generator(40) + hook_token=pagure.lib.login.id_generator(40), ) session.add(project) # Flush so that a project ID is generated session.flush() for ltype in model.ProjectLock.lock_type.type.enums: - lock = model.ProjectLock( - project_id=project.id, - lock_type=ltype) + lock = model.ProjectLock(project_id=project.id, lock_type=ltype) session.add(lock) session.commit() # Register creation et al - log_action(session, 'created', project, user_obj) + log_action(session, "created", project, user_obj) pagure.lib.notify.log( project, - topic='project.new', + topic="project.new", msg=dict( - project=project.to_json(public=True), - agent=user_obj.username, + project=project.to_json(public=True), agent=user_obj.username ), ) - return tasks.create_project.delay(user_obj.username, namespace, name, - add_readme, ignore_existing_repo) + return tasks.create_project.delay( + user_obj.username, namespace, name, add_readme, ignore_existing_repo + ) -def new_issue(session, repo, title, content, user, ticketfolder, issue_id=None, - issue_uid=None, private=False, status=None, close_status=None, - notify=True, date_created=None, milestone=None, priority=None, - assignee=None, tags=None): - ''' Create a new issue for the specified repo. ''' +def new_issue( + session, + repo, + title, + content, + user, + ticketfolder, + issue_id=None, + issue_uid=None, + private=False, + status=None, + close_status=None, + notify=True, + date_created=None, + milestone=None, + priority=None, + assignee=None, + tags=None, +): + """ Create a new issue for the specified repo. """ user_obj = get_user(session, user) # Only store the priority if there is one in the project @@ -1652,12 +1785,15 @@ def new_issue(session, repo, title, content, user, ticketfolder, issue_id=None, priority = int(priority) except (ValueError, TypeError): priority = None - if priorities \ - and priority is not None \ - and ("%s" % priority) not in priorities: + if ( + priorities + and priority is not None + and ("%s" % priority) not in priorities + ): raise pagure.exceptions.PagureException( - 'You are trying to create an issue with a priority that does ' - 'not exist in the project.') + "You are trying to create an issue with a priority that does " + "not exist in the project." + ) assignee_id = None if assignee is not None: @@ -1692,25 +1828,20 @@ def new_issue(session, repo, title, content, user, ticketfolder, issue_id=None, for lbl in tags: tagobj = get_colored_tag(session, lbl, repo.id) if not tagobj: - tagobj = model.TagColored( - tag=lbl, - project_id=repo.id - ) + tagobj = model.TagColored(tag=lbl, project_id=repo.id) session.add(tagobj) session.flush() dbobjtag = model.TagIssueColored( - issue_uid=issue.uid, - tag_id=tagobj.id + issue_uid=issue.uid, tag_id=tagobj.id ) session.add(dbobjtag) session.commit() - pagure.lib.git.update_git( - issue, repo=repo, repofolder=ticketfolder) + pagure.lib.git.update_git(issue, repo=repo, repofolder=ticketfolder) - log_action(session, 'created', issue, user_obj) + log_action(session, "created", issue, user_obj) if notify: pagure.lib.notify.notify_new_issue(issue, user=user_obj) @@ -1718,7 +1849,7 @@ def new_issue(session, repo, title, content, user, ticketfolder, issue_id=None, if not private: pagure.lib.notify.log( issue.project, - topic='issue.new', + topic="issue.new", msg=dict( issue=issue.to_json(public=True), project=issue.project.to_json(public=True), @@ -1731,7 +1862,7 @@ def new_issue(session, repo, title, content, user, ticketfolder, issue_id=None, def drop_issue(session, issue, user, ticketfolder): - ''' Delete a specified issue. ''' + """ Delete a specified issue. """ user_obj = get_user(session, user) private = issue.private @@ -1741,12 +1872,13 @@ def drop_issue(session, issue, user, ticketfolder): session.flush() pagure.lib.git.clean_git( - issue, repo=issue.project, repofolder=ticketfolder) + issue, repo=issue.project, repofolder=ticketfolder + ) if not private: pagure.lib.notify.log( issue.project, - topic='issue.drop', + topic="issue.drop", msg=dict( issue=issue.to_json(public=True), project=issue.project.to_json(public=True), @@ -1758,18 +1890,30 @@ def drop_issue(session, issue, user, ticketfolder): return issue -def new_pull_request(session, branch_from, - repo_to, branch_to, title, user, - requestfolder, initial_comment=None, - repo_from=None, remote_git=None, - requestuid=None, requestid=None, - status='Open', notify=True, - commit_start=None, commit_stop=None): - ''' Create a new pull request on the specified repo. ''' +def new_pull_request( + session, + branch_from, + repo_to, + branch_to, + title, + user, + requestfolder, + initial_comment=None, + repo_from=None, + remote_git=None, + requestuid=None, + requestid=None, + status="Open", + notify=True, + commit_start=None, + commit_stop=None, +): + """ Create a new pull request on the specified repo. """ if not repo_from and not remote_git: raise pagure.exceptions.PagureException( - 'Invalid input, you must specify either a local repo or a ' - 'remote one') + "Invalid input, you must specify either a local repo or a " + "remote one" + ) user_obj = get_user(session, user) @@ -1795,35 +1939,37 @@ def new_pull_request(session, branch_from, session.flush() pagure.lib.git.update_git( - request, repo=request.project, repofolder=requestfolder) + request, repo=request.project, repofolder=requestfolder + ) pagure.lib.tasks.link_pr_to_ticket.delay(request.uid) - log_action(session, 'created', request, user_obj) + log_action(session, "created", request, user_obj) if notify: pagure.lib.notify.notify_new_pull_request(request) pagure.lib.notify.log( request.project, - topic='pull-request.new', + topic="pull-request.new", msg=dict( - pullrequest=request.to_json(public=True), - agent=user_obj.username, + pullrequest=request.to_json(public=True), agent=user_obj.username ), redis=REDIS, ) # Send notification to the CI server - if pagure_config.get('PAGURE_CI_SERVICES') \ - and request.project.ci_hook \ - and request.project.ci_hook.active_pr \ - and not request.project.private: + if ( + pagure_config.get("PAGURE_CI_SERVICES") + and request.project.ci_hook + and request.project.ci_hook.active_pr + and not request.project.private + ): tasks_services.trigger_ci_build.delay( project_name=request.project_from.fullname, cause=request.id, branch=request.branch_from, - ci_type=request.project.ci_hook.ci_type + ci_type=request.project.ci_hook.ci_type, ) # Create the ref from the start @@ -1831,19 +1977,19 @@ def new_pull_request(session, branch_from, request.project.name, request.project.namespace, request.project.user.username if request.project.is_fork else None, - request.id + request.id, ) return request def new_tag(session, tag_name, tag_description, tag_color, project_id): - ''' Return a new tag object ''' + """ Return a new tag object """ tagobj = model.TagColored( tag=tag_name, tag_description=tag_description, tag_color=tag_color, - project_id=project_id + project_id=project_id, ) session.add(tagobj) session.flush() @@ -1851,11 +1997,21 @@ def new_tag(session, tag_name, tag_description, tag_color, project_id): return tagobj -def edit_issue(session, issue, ticketfolder, user, repo=None, - title=None, content=None, status=None, - close_status=Unspecified, priority=Unspecified, - milestone=Unspecified, private=None): - ''' Edit the specified issue. +def edit_issue( + session, + issue, + ticketfolder, + user, + repo=None, + title=None, + content=None, + status=None, + close_status=Unspecified, + priority=Unspecified, + milestone=Unspecified, + private=None, +): + """ Edit the specified issue. :arg session: the session to use to connect to the database. :arg issue: the pagure.lib.model.Issue object to edit. @@ -1872,46 +2028,48 @@ def edit_issue(session, issue, ticketfolder, user, repo=None, :kwarg milestone: the new milestone of the issue if it's being changed :kwarg private: the new private of the issue if it's being changed - ''' + """ user_obj = get_user(session, user) - if status and status != 'Open' and issue.parents: + if status and status != "Open" and issue.parents: for parent in issue.parents: - if parent.status == 'Open': + if parent.status == "Open": raise pagure.exceptions.PagureException( - 'You cannot close a ticket that has ticket ' - 'depending that are still open.') + "You cannot close a ticket that has ticket " + "depending that are still open." + ) edit = [] messages = [] if title and title != issue.title: issue.title = title - edit.append('title') + edit.append("title") if content and content != issue.content: issue.content = content - edit.append('content') + edit.append("content") if status and status != issue.status: old_status = issue.status issue.status = status - if status.lower() != 'open': + if status.lower() != "open": issue.closed_at = datetime.datetime.utcnow() elif issue.close_status: issue.close_status = None close_status = Unspecified - edit.append('close_status') - edit.append('status') + edit.append("close_status") + edit.append("status") messages.append( - 'Issue status updated to: %s (was: %s)' % (status, old_status)) + "Issue status updated to: %s (was: %s)" % (status, old_status) + ) if close_status != Unspecified and close_status != issue.close_status: old_status = issue.close_status issue.close_status = close_status - edit.append('close_status') - msg = 'Issue close_status updated to: %s' % close_status + edit.append("close_status") + msg = "Issue close_status updated to: %s" % close_status if old_status: - msg += ' (was: %s)' % old_status - if issue.status.lower() == 'open' and close_status: - issue.status = 'Closed' + msg += " (was: %s)" % old_status + if issue.status.lower() == "open" and close_status: + issue.status = "Closed" issue.closed_at = datetime.datetime.utcnow() - edit.append('status') + edit.append("status") messages.append(msg) if priority != Unspecified: priorities = issue.project.priorities @@ -1927,44 +2085,47 @@ def edit_issue(session, issue, ticketfolder, user, repo=None, if priority != issue.priority: old_priority = issue.priority issue.priority = priority - edit.append('priority') - msg = 'Issue priority set to: %s' % ( - priorities[priority_string] if priority else None) + edit.append("priority") + msg = "Issue priority set to: %s" % ( + priorities[priority_string] if priority else None + ) if old_priority: - msg += ' (was: %s)' % priorities.get( - "%s" % old_priority, old_priority) + msg += " (was: %s)" % priorities.get( + "%s" % old_priority, old_priority + ) messages.append(msg) if private in [True, False] and private != issue.private: old_private = issue.private issue.private = private - edit.append('private') - msg = 'Issue private status set to: %s' % private + edit.append("private") + msg = "Issue private status set to: %s" % private if old_private: - msg += ' (was: %s)' % old_private + msg += " (was: %s)" % old_private messages.append(msg) if milestone != Unspecified and milestone != issue.milestone: old_milestone = issue.milestone issue.milestone = milestone - edit.append('milestone') - msg = 'Issue set to the milestone: %s' % milestone + edit.append("milestone") + msg = "Issue set to the milestone: %s" % milestone if old_milestone: - msg += ' (was: %s)' % old_milestone + msg += " (was: %s)" % old_milestone messages.append(msg) issue.last_updated = datetime.datetime.utcnow() # uniquify the list of edited fields edit = list(set(edit)) pagure.lib.git.update_git( - issue, repo=issue.project, repofolder=ticketfolder) + issue, repo=issue.project, repofolder=ticketfolder + ) - if 'status' in edit: + if "status" in edit: log_action(session, issue.status.lower(), issue, user_obj) pagure.lib.notify.notify_status_change_issue(issue, user_obj) if not issue.private and edit: pagure.lib.notify.log( issue.project, - topic='issue.edit', + topic="issue.edit", msg=dict( issue=issue.to_json(public=True), project=issue.project.to_json(public=True), @@ -1976,16 +2137,23 @@ def edit_issue(session, issue, ticketfolder, user, repo=None, if REDIS and edit and not issue.project.private: if issue.private: - REDIS.publish('pagure.%s' % issue.uid, json.dumps({ - 'issue': 'private', - 'fields': edit, - })) + REDIS.publish( + "pagure.%s" % issue.uid, + json.dumps({"issue": "private", "fields": edit}), + ) else: - REDIS.publish('pagure.%s' % issue.uid, json.dumps({ - 'fields': edit, - 'issue': issue.to_json(public=True, with_comments=False), - 'priorities': issue.project.priorities, - })) + REDIS.publish( + "pagure.%s" % issue.uid, + json.dumps( + { + "fields": edit, + "issue": issue.to_json( + public=True, with_comments=False + ), + "priorities": issue.project.priorities, + } + ), + ) if edit: session.add(issue) @@ -1994,34 +2162,34 @@ def edit_issue(session, issue, ticketfolder, user, repo=None, def update_project_settings(session, repo, settings, user): - ''' Update the settings of a project. ''' + """ Update the settings of a project. """ user_obj = get_user(session, user) update = [] new_settings = repo.settings for key in new_settings: if key in settings: - if key == 'Minimum_score_to_merge_pull-request': + if key == "Minimum_score_to_merge_pull-request": try: - settings[key] = int(settings[key]) \ - if settings[key] else -1 + settings[key] = int(settings[key]) if settings[key] else -1 except (ValueError, TypeError): raise pagure.exceptions.PagureException( "Please enter a numeric value for the 'minimum " - "score to merge pull request' field.") - elif key == 'Web-hooks': + "score to merge pull request' field." + ) + elif key == "Web-hooks": settings[key] = settings[key] or None else: # All the remaining keys are boolean, so True is provided # as 'y' by the html, let's convert it back - settings[key] = settings[key] in ['y', True] + settings[key] = settings[key] in ["y", True] if new_settings[key] != settings[key]: update.append(key) new_settings[key] = settings[key] else: val = False - if key == 'Web-hooks': + if key == "Web-hooks": val = None # Ensure the default value is different from what is stored. @@ -2030,7 +2198,7 @@ def update_project_settings(session, repo, settings, user): new_settings[key] = val if not update: - return 'No settings to change' + return "No settings to change" else: repo.settings = new_settings repo.date_modified = datetime.datetime.utcnow() @@ -2039,7 +2207,7 @@ def update_project_settings(session, repo, settings, user): pagure.lib.notify.log( repo, - topic='project.edit', + topic="project.edit", msg=dict( project=repo.to_json(public=True), fields=update, @@ -2048,17 +2216,17 @@ def update_project_settings(session, repo, settings, user): redis=REDIS, ) - if 'pull_request_access_only' in update: + if "pull_request_access_only" in update: update_read_only_mode(session, repo, read_only=True) session.add(repo) session.flush() pagure.lib.git.generate_gitolite_acls(project=repo) - return 'Edited successfully settings of repo: %s' % repo.fullname + return "Edited successfully settings of repo: %s" % repo.fullname def update_user_settings(session, settings, user): - ''' Update the settings of a project. ''' + """ Update the settings of a project. """ user_obj = get_user(session, user) update = [] @@ -2074,26 +2242,39 @@ def update_user_settings(session, settings, user): new_settings[key] = False if not update: - return 'No settings to change' + return "No settings to change" else: user_obj.settings = new_settings session.add(user_obj) session.flush() - return 'Successfully edited your settings' - - -def fork_project(session, user, repo, gitfolder, - docfolder, ticketfolder, requestfolder, - editbranch=None, editfile=None): - ''' Fork a given project into the user's forks. ''' - forkreponame = '%s.git' % os.path.join( - gitfolder, 'forks', user, - repo.namespace if repo.namespace else '', repo.name) + return "Successfully edited your settings" + + +def fork_project( + session, + user, + repo, + gitfolder, + docfolder, + ticketfolder, + requestfolder, + editbranch=None, + editfile=None, +): + """ Fork a given project into the user's forks. """ + forkreponame = "%s.git" % os.path.join( + gitfolder, + "forks", + user, + repo.namespace if repo.namespace else "", + repo.name, + ) if os.path.exists(forkreponame): raise pagure.exceptions.RepoExistsException( - 'Repo "forks/%s/%s" already exists' % (user, repo.name)) + 'Repo "forks/%s/%s" already exists' % (user, repo.name) + ) user_obj = get_user(session, user) @@ -2105,13 +2286,13 @@ def fork_project(session, user, repo, gitfolder, user_id=user_obj.id, parent_id=repo.id, is_fork=True, - hook_token=pagure.lib.login.id_generator(40) + hook_token=pagure.lib.login.id_generator(40), ) # disable issues, PRs in the fork by default default_repo_settings = project.settings - default_repo_settings['issue_tracker'] = False - default_repo_settings['pull_requests'] = False + default_repo_settings["issue_tracker"] = False + default_repo_settings["pull_requests"] = False project.settings = default_repo_settings session.add(project) @@ -2119,29 +2300,41 @@ def fork_project(session, user, repo, gitfolder, session.flush() session.commit() - task = tasks.fork.delay(repo.name, - repo.namespace, - repo.user.username if repo.is_fork else None, - user, - editbranch, - editfile) + task = tasks.fork.delay( + repo.name, + repo.namespace, + repo.user.username if repo.is_fork else None, + user, + editbranch, + editfile, + ) return task def search_projects( - session, username=None, - fork=None, tags=None, namespace=None, pattern=None, - start=None, limit=None, count=False, sort=None, - exclude_groups=None, private=None, owner=None): - '''List existing projects - ''' - projects = session.query( - sqlalchemy.distinct(model.Project.id) - ) + session, + username=None, + fork=None, + tags=None, + namespace=None, + pattern=None, + start=None, + limit=None, + count=False, + sort=None, + exclude_groups=None, + private=None, + owner=None, +): + """List existing projects + """ + projects = session.query(sqlalchemy.distinct(model.Project.id)) if owner is not None and username is not None: - raise RuntimeError('You cannot supply both a username and an owner ' - 'as parameters in the `search_projects` function') + raise RuntimeError( + "You cannot supply both a username and an owner " + "as parameters in the `search_projects` function" + ) elif owner is not None: projects = projects.join(model.User).filter(model.User.user == owner) elif username is not None: @@ -2152,51 +2345,45 @@ def search_projects( model.User.id == model.Project.user_id, ) ) - sub_q2 = session.query( - model.Project.id - ).filter( + sub_q2 = session.query(model.Project.id).filter( # User got admin or commit right sqlalchemy.and_( model.User.user == username, model.User.id == model.ProjectUser.user_id, model.ProjectUser.project_id == model.Project.id, sqlalchemy.or_( - model.ProjectUser.access == 'admin', - model.ProjectUser.access == 'commit', - ) + model.ProjectUser.access == "admin", + model.ProjectUser.access == "commit", + ), ) ) - sub_q3 = session.query( - model.Project.id - ).filter( + sub_q3 = session.query(model.Project.id).filter( # User created a group that has admin or commit right sqlalchemy.and_( model.User.user == username, model.PagureGroup.user_id == model.User.id, - model.PagureGroup.group_type == 'user', + model.PagureGroup.group_type == "user", model.PagureGroup.id == model.ProjectGroup.group_id, model.Project.id == model.ProjectGroup.project_id, sqlalchemy.or_( - model.ProjectGroup.access == 'admin', - model.ProjectGroup.access == 'commit', - ) + model.ProjectGroup.access == "admin", + model.ProjectGroup.access == "commit", + ), ) ) - sub_q4 = session.query( - model.Project.id - ).filter( + sub_q4 = session.query(model.Project.id).filter( # User is part of a group that has admin or commit right sqlalchemy.and_( model.User.user == username, model.PagureUserGroup.user_id == model.User.id, model.PagureUserGroup.group_id == model.PagureGroup.id, - model.PagureGroup.group_type == 'user', + model.PagureGroup.group_type == "user", model.PagureGroup.id == model.ProjectGroup.group_id, model.Project.id == model.ProjectGroup.project_id, sqlalchemy.or_( - model.ProjectGroup.access == 'admin', - model.ProjectGroup.access == 'commit', - ) + model.ProjectGroup.access == "admin", + model.ProjectGroup.access == "commit", + ), ) ) @@ -2223,20 +2410,16 @@ def search_projects( subquery0 = session.query( sqlalchemy.distinct(model.Project.id) ).filter( - model.Project.private == False, # noqa: E712 + model.Project.private == False # noqa: E712 ) - sub_q1 = session.query( - sqlalchemy.distinct(model.Project.id) - ).filter( + sub_q1 = session.query(sqlalchemy.distinct(model.Project.id)).filter( sqlalchemy.and_( model.Project.private == True, # noqa: E712 model.User.id == model.Project.user_id, model.User.user == private, ) ) - sub_q2 = session.query( - model.Project.id - ).filter( + sub_q2 = session.query(model.Project.id).filter( # User got admin or commit right sqlalchemy.and_( model.Project.private == True, # noqa: E712 @@ -2244,44 +2427,40 @@ def search_projects( model.User.id == model.ProjectUser.user_id, model.ProjectUser.project_id == model.Project.id, sqlalchemy.or_( - model.ProjectUser.access == 'admin', - model.ProjectUser.access == 'commit', - ) + model.ProjectUser.access == "admin", + model.ProjectUser.access == "commit", + ), ) ) - sub_q3 = session.query( - model.Project.id - ).filter( + sub_q3 = session.query(model.Project.id).filter( # User created a group that has admin or commit right sqlalchemy.and_( model.Project.private == True, # noqa: E712 model.User.user == private, model.PagureGroup.user_id == model.User.id, - model.PagureGroup.group_type == 'user', + model.PagureGroup.group_type == "user", model.PagureGroup.id == model.ProjectGroup.group_id, model.Project.id == model.ProjectGroup.project_id, sqlalchemy.or_( - model.ProjectGroup.access == 'admin', - model.ProjectGroup.access == 'commit', - ) + model.ProjectGroup.access == "admin", + model.ProjectGroup.access == "commit", + ), ) ) - sub_q4 = session.query( - model.Project.id - ).filter( + sub_q4 = session.query(model.Project.id).filter( # User is part of a group that has admin or commit right sqlalchemy.and_( model.Project.private == True, # noqa: E712 model.User.user == private, model.PagureUserGroup.user_id == model.User.id, model.PagureUserGroup.group_id == model.PagureGroup.id, - model.PagureGroup.group_type == 'user', + model.PagureGroup.group_type == "user", model.PagureGroup.id == model.ProjectGroup.group_id, model.Project.id == model.ProjectGroup.project_id, sqlalchemy.or_( - model.ProjectGroup.access == 'admin', - model.ProjectGroup.access == 'commit', - ) + model.ProjectGroup.access == "admin", + model.ProjectGroup.access == "commit", + ), ) ) @@ -2297,8 +2476,10 @@ def search_projects( projects = projects.filter( model.Project.id.in_( - subquery0.union(sub_q1).union(sub_q2).union(sub_q3).union( - sub_q4) + subquery0.union(sub_q1) + .union(sub_q2) + .union(sub_q3) + .union(sub_q4) ) ) @@ -2318,44 +2499,28 @@ def search_projects( projects = projects.filter( model.Project.id == model.TagProject.project_id - ).filter( - model.TagProject.tag.in_(tags) - ) + ).filter(model.TagProject.tag.in_(tags)) if pattern: - pattern = pattern.replace('*', '%') - if '%' in pattern: - projects = projects.filter( - model.Project.name.ilike(pattern) - ) + pattern = pattern.replace("*", "%") + if "%" in pattern: + projects = projects.filter(model.Project.name.ilike(pattern)) else: - projects = projects.filter( - model.Project.name == pattern - ) + projects = projects.filter(model.Project.name == pattern) if namespace: - projects = projects.filter( - model.Project.namespace == namespace - ) + projects = projects.filter(model.Project.namespace == namespace) - query = session.query( - model.Project - ).filter( + query = session.query(model.Project).filter( model.Project.id.in_(projects.subquery()) ) - if sort == 'latest': - query = query.order_by( - model.Project.date_created.desc() - ) - elif sort == 'oldest': - query = query.order_by( - model.Project.date_created.asc() - ) + if sort == "latest": + query = query.order_by(model.Project.date_created.desc()) + elif sort == "oldest": + query = query.order_by(model.Project.date_created.asc()) else: - query = query.order_by( - asc(func.lower(model.Project.name)) - ) + query = query.order_by(asc(func.lower(model.Project.name))) if start is not None: query = query.offset(start) @@ -2370,18 +2535,26 @@ def search_projects( def list_users_projects( - session, username, - fork=None, tags=None, namespace=None, pattern=None, - start=None, limit=None, count=False, sort=None, - exclude_groups=None, private=None, acls=None): - '''List a users projects - ''' - projects = session.query( - sqlalchemy.distinct(model.Project.id) - ) + session, + username, + fork=None, + tags=None, + namespace=None, + pattern=None, + start=None, + limit=None, + count=False, + sort=None, + exclude_groups=None, + private=None, + acls=None, +): + """List a users projects + """ + projects = session.query(sqlalchemy.distinct(model.Project.id)) if acls is None: - acls = ['main admin', 'admin', 'commit', 'ticket'] + acls = ["main admin", "admin", "commit", "ticket"] if username is not None: @@ -2392,47 +2565,39 @@ def list_users_projects( model.User.id == model.Project.user_id, ) ) - if 'main admin' not in acls: - projects = projects.filter( - model.User.id != model.Project.user_id, - ) + if "main admin" not in acls: + projects = projects.filter(model.User.id != model.Project.user_id) - sub_q2 = session.query( - model.Project.id - ).filter( + sub_q2 = session.query(model.Project.id).filter( # User got admin or commit right sqlalchemy.and_( model.User.user == username, model.User.id == model.ProjectUser.user_id, model.ProjectUser.project_id == model.Project.id, - model.ProjectUser.access.in_(acls) + model.ProjectUser.access.in_(acls), ) ) - sub_q3 = session.query( - model.Project.id - ).filter( + sub_q3 = session.query(model.Project.id).filter( # User created a group that has admin or commit right sqlalchemy.and_( model.User.user == username, model.PagureGroup.user_id == model.User.id, - model.PagureGroup.group_type == 'user', + model.PagureGroup.group_type == "user", model.PagureGroup.id == model.ProjectGroup.group_id, model.Project.id == model.ProjectGroup.project_id, - model.ProjectGroup.access.in_(acls) + model.ProjectGroup.access.in_(acls), ) ) - sub_q4 = session.query( - model.Project.id - ).filter( + sub_q4 = session.query(model.Project.id).filter( # User is part of a group that has admin or commit right sqlalchemy.and_( model.User.user == username, model.PagureUserGroup.user_id == model.User.id, model.PagureUserGroup.group_id == model.PagureGroup.id, - model.PagureGroup.group_type == 'user', + model.PagureGroup.group_type == "user", model.PagureGroup.id == model.ProjectGroup.group_id, model.Project.id == model.ProjectGroup.project_id, - model.ProjectGroup.access.in_(acls) + model.ProjectGroup.access.in_(acls), ) ) @@ -2459,56 +2624,48 @@ def list_users_projects( subquery0 = session.query( sqlalchemy.distinct(model.Project.id) ).filter( - model.Project.private == False, # noqa: E712 + model.Project.private == False # noqa: E712 ) - sub_q1 = session.query( - sqlalchemy.distinct(model.Project.id) - ).filter( + sub_q1 = session.query(sqlalchemy.distinct(model.Project.id)).filter( sqlalchemy.and_( model.Project.private == True, # noqa: E712 model.User.id == model.Project.user_id, model.User.user == private, ) ) - sub_q2 = session.query( - model.Project.id - ).filter( + sub_q2 = session.query(model.Project.id).filter( # User got admin or commit right sqlalchemy.and_( model.Project.private == True, # noqa: E712 model.User.user == private, model.User.id == model.ProjectUser.user_id, model.ProjectUser.project_id == model.Project.id, - model.ProjectUser.access.in_(acls) + model.ProjectUser.access.in_(acls), ) ) - sub_q3 = session.query( - model.Project.id - ).filter( + sub_q3 = session.query(model.Project.id).filter( # User created a group that has admin or commit right sqlalchemy.and_( model.Project.private == True, # noqa: E712 model.User.user == private, model.PagureGroup.user_id == model.User.id, - model.PagureGroup.group_type == 'user', + model.PagureGroup.group_type == "user", model.PagureGroup.id == model.ProjectGroup.group_id, model.Project.id == model.ProjectGroup.project_id, - model.ProjectGroup.access.in_(acls) + model.ProjectGroup.access.in_(acls), ) ) - sub_q4 = session.query( - model.Project.id - ).filter( + sub_q4 = session.query(model.Project.id).filter( # User is part of a group that has admin or commit right sqlalchemy.and_( model.Project.private == True, # noqa: E712 model.User.user == private, model.PagureUserGroup.user_id == model.User.id, model.PagureUserGroup.group_id == model.PagureGroup.id, - model.PagureGroup.group_type == 'user', + model.PagureGroup.group_type == "user", model.PagureGroup.id == model.ProjectGroup.group_id, model.Project.id == model.ProjectGroup.project_id, - model.ProjectGroup.access.in_(acls) + model.ProjectGroup.access.in_(acls), ) ) @@ -2524,8 +2681,10 @@ def list_users_projects( projects = projects.filter( model.Project.id.in_( - subquery0.union(sub_q1).union(sub_q2).union(sub_q3).union( - sub_q4) + subquery0.union(sub_q1) + .union(sub_q2) + .union(sub_q3) + .union(sub_q4) ) ) @@ -2545,44 +2704,28 @@ def list_users_projects( projects = projects.filter( model.Project.id == model.TagProject.project_id - ).filter( - model.TagProject.tag.in_(tags) - ) + ).filter(model.TagProject.tag.in_(tags)) if pattern: - pattern = pattern.replace('*', '%') - if '%' in pattern: - projects = projects.filter( - model.Project.name.ilike(pattern) - ) + pattern = pattern.replace("*", "%") + if "%" in pattern: + projects = projects.filter(model.Project.name.ilike(pattern)) else: - projects = projects.filter( - model.Project.name == pattern - ) + projects = projects.filter(model.Project.name == pattern) if namespace: - projects = projects.filter( - model.Project.namespace == namespace - ) + projects = projects.filter(model.Project.namespace == namespace) - query = session.query( - model.Project - ).filter( + query = session.query(model.Project).filter( model.Project.id.in_(projects.subquery()) ) - if sort == 'latest': - query = query.order_by( - model.Project.date_created.desc() - ) - elif sort == 'oldest': - query = query.order_by( - model.Project.date_created.asc() - ) + if sort == "latest": + query = query.order_by(model.Project.date_created.desc()) + elif sort == "oldest": + query = query.order_by(model.Project.date_created.asc()) else: - query = query.order_by( - asc(func.lower(model.Project.name)) - ) + query = query.order_by(asc(func.lower(model.Project.name))) if start is not None: query = query.offset(start) @@ -2597,22 +2740,16 @@ def list_users_projects( def _get_project(session, name, user=None, namespace=None): - '''Get a project from the database - ''' - case = pagure_config.get('CASE_SENSITIVE', False) + """Get a project from the database + """ + case = pagure_config.get("CASE_SENSITIVE", False) - query = session.query( - model.Project - ) + query = session.query(model.Project) if not case: - query = query.filter( - func.lower(model.Project.name) == name.lower() - ) + query = query.filter(func.lower(model.Project.name) == name.lower()) else: - query = query.filter( - model.Project.name == name - ) + query = query.filter(model.Project.name == name) if namespace: if not case: @@ -2620,36 +2757,46 @@ def _get_project(session, name, user=None, namespace=None): func.lower(model.Project.namespace) == namespace.lower() ) else: - query = query.filter( - model.Project.namespace == namespace - ) + query = query.filter(model.Project.namespace == namespace) else: query = query.filter(model.Project.namespace == namespace) if user is not None: - query = query.filter( - model.User.user == user - ).filter( - model.User.id == model.Project.user_id - ).filter( - model.Project.is_fork == True # noqa: E712 + query = ( + query.filter(model.User.user == user) + .filter(model.User.id == model.Project.user_id) + .filter(model.Project.is_fork == True) # noqa: E712 ) else: - query = query.filter( - model.Project.is_fork == False # noqa: E712 - ) + query = query.filter(model.Project.is_fork == False) # noqa: E712 return query.first() def search_issues( - session, repo=None, issueid=None, issueuid=None, status=None, - closed=False, tags=None, assignee=None, author=None, private=None, - priority=None, milestones=None, count=False, offset=None, - limit=None, search_pattern=None, custom_search=None, - updated_after=None, no_milestones=None, order='desc', - order_key=None): - ''' Retrieve one or more issues associated to a project with the given + session, + repo=None, + issueid=None, + issueuid=None, + status=None, + closed=False, + tags=None, + assignee=None, + author=None, + private=None, + priority=None, + milestones=None, + count=False, + offset=None, + limit=None, + search_pattern=None, + custom_search=None, + updated_after=None, + no_milestones=None, + order="desc", + order_key=None, +): + """ Retrieve one or more issues associated to a project with the given criterias. Watch out that the closed argument is incompatible with the status @@ -2714,88 +2861,62 @@ def search_issues( objects otherwise. :rtype: Project or [Project] - ''' - query = session.query( - sqlalchemy.distinct(model.Issue.uid) - ) + """ + query = session.query(sqlalchemy.distinct(model.Issue.uid)) if repo is not None: - query = query.filter( - model.Issue.project_id == repo.id - ) + query = query.filter(model.Issue.project_id == repo.id) if updated_after: - query = query.filter( - model.Issue.last_updated >= updated_after - ) + query = query.filter(model.Issue.last_updated >= updated_after) if issueid is not None: - query = query.filter( - model.Issue.id == issueid - ) + query = query.filter(model.Issue.id == issueid) if issueuid is not None: - query = query.filter( - model.Issue.uid == issueuid - ) + query = query.filter(model.Issue.uid == issueuid) if status is not None: - if status in ['Open', 'Closed']: - query = query.filter( - model.Issue.status == status - ) + if status in ["Open", "Closed"]: + query = query.filter(model.Issue.status == status) else: - query = query.filter( - model.Issue.close_status == status - ) + query = query.filter(model.Issue.close_status == status) if closed: - query = query.filter( - model.Issue.status != 'Open' - ) + query = query.filter(model.Issue.status != "Open") if priority: - query = query.filter( - model.Issue.priority == priority - ) + query = query.filter(model.Issue.priority == priority) if tags is not None and tags != []: if isinstance(tags, six.string_types): tags = [tags] notags = [] ytags = [] for tag in tags: - if tag.startswith('!'): + if tag.startswith("!"): notags.append(tag[1:]) else: ytags.append(tag) if ytags: - sub_q2 = session.query( - sqlalchemy.distinct(model.Issue.uid) - ) + sub_q2 = session.query(sqlalchemy.distinct(model.Issue.uid)) if repo is not None: - sub_q2 = sub_q2.filter( - model.Issue.project_id == repo.id + sub_q2 = sub_q2.filter(model.Issue.project_id == repo.id) + sub_q2 = ( + sub_q2.filter( + model.Issue.uid == model.TagIssueColored.issue_uid ) - sub_q2 = sub_q2.filter( - model.Issue.uid == model.TagIssueColored.issue_uid - ).filter( - model.TagIssueColored.tag_id == model.TagColored.id - ).filter( - model.TagColored.tag.in_(ytags) + .filter(model.TagIssueColored.tag_id == model.TagColored.id) + .filter(model.TagColored.tag.in_(ytags)) ) if notags: - sub_q3 = session.query( - sqlalchemy.distinct(model.Issue.uid) - ) + sub_q3 = session.query(sqlalchemy.distinct(model.Issue.uid)) if repo is not None: - sub_q3 = sub_q3.filter( - model.Issue.project_id == repo.id + sub_q3 = sub_q3.filter(model.Issue.project_id == repo.id) + sub_q3 = ( + sub_q3.filter( + model.Issue.uid == model.TagIssueColored.issue_uid ) - sub_q3 = sub_q3.filter( - model.Issue.uid == model.TagIssueColored.issue_uid - ).filter( - model.TagIssueColored.tag_id == model.TagColored.id - ).filter( - model.TagColored.tag.in_(notags) + .filter(model.TagIssueColored.tag_id == model.TagColored.id) + .filter(model.TagColored.tag.in_(notags)) ) # Adjust the main query based on the parameters specified if ytags and not notags: @@ -2803,9 +2924,7 @@ def search_issues( elif not ytags and notags: query = query.filter(~model.Issue.uid.in_(sub_q3)) elif ytags and notags: - final_set = set( - [i[0] for i in sub_q2.all()] - ) - set( + final_set = set([i[0] for i in sub_q2.all()]) - set( [i[0] for i in sub_q3.all()] ) if final_set: @@ -2813,71 +2932,57 @@ def search_issues( if assignee is not None: assignee = "%s" % assignee - if not pagure.utils.is_true(assignee, ['false', '0', 'true', '1']): + if not pagure.utils.is_true(assignee, ["false", "0", "true", "1"]): reverseassignee = False - if assignee.startswith('!'): + if assignee.startswith("!"): reverseassignee = True assignee = assignee[1:] - userassignee = session.query( - model.User.id - ).filter( - model.User.user == assignee - ).subquery() + userassignee = ( + session.query(model.User.id) + .filter(model.User.user == assignee) + .subquery() + ) if reverseassignee: - sub = session.query( - model.Issue.uid - ).filter( + sub = session.query(model.Issue.uid).filter( model.Issue.assignee_id == userassignee ) - query = query.filter( - ~model.Issue.uid.in_(sub) - ) + query = query.filter(~model.Issue.uid.in_(sub)) else: - query = query.filter( - model.Issue.assignee_id == userassignee - ) + query = query.filter(model.Issue.assignee_id == userassignee) elif pagure.utils.is_true(assignee): - query = query.filter( - model.Issue.assignee_id.isnot(None) - ) + query = query.filter(model.Issue.assignee_id.isnot(None)) else: - query = query.filter( - model.Issue.assignee_id.is_(None) - ) + query = query.filter(model.Issue.assignee_id.is_(None)) if author is not None: - userauthor = session.query( - model.User.id - ).filter( - model.User.user == author - ).subquery() - query = query.filter( - model.Issue.user_id == userauthor + userauthor = ( + session.query(model.User.id) + .filter(model.User.user == author) + .subquery() ) + query = query.filter(model.Issue.user_id == userauthor) if private is False: - query = query.filter( - model.Issue.private == False # noqa: E712 - ) + query = query.filter(model.Issue.private == False) # noqa: E712 elif isinstance(private, six.string_types): - userprivate = session.query( - model.User.id - ).filter( - model.User.user == private - ).subquery() + userprivate = ( + session.query(model.User.id) + .filter(model.User.user == private) + .subquery() + ) query = query.filter( sqlalchemy.or_( model.Issue.private == False, # noqa: E712 sqlalchemy.and_( model.Issue.private == True, # noqa: E712 - model.Issue.user_id == userprivate + model.Issue.user_id == userprivate, ), sqlalchemy.and_( model.Issue.private == True, # noqa: E712 - model.Issue.assignee_id == userprivate - ) + model.Issue.assignee_id == userprivate, + ), ) ) @@ -2886,103 +2991,92 @@ def search_issues( if isinstance(milestones, six.string_types): milestones = [milestones] query = query.filter( - (model.Issue.milestone.is_(None)) | - (model.Issue.milestone.in_(milestones)) + (model.Issue.milestone.is_(None)) + | (model.Issue.milestone.in_(milestones)) ) elif no_milestones: # Asking for issues without a milestone - query = query.filter( - model.Issue.milestone.is_(None) - ) + query = query.filter(model.Issue.milestone.is_(None)) elif milestones is not None and milestones != []: # Asking for a single specific milestone if isinstance(milestones, six.string_types): milestones = [milestones] - query = query.filter( - model.Issue.milestone.in_(milestones) - ) + query = query.filter(model.Issue.milestone.in_(milestones)) elif no_milestones is False: # Asking for all ticket with a milestone - query = query.filter( - model.Issue.milestone.isnot(None) - ) + query = query.filter(model.Issue.milestone.isnot(None)) if custom_search: constraints = [] for key in custom_search: value = custom_search[key] - if '*' in value: - value = value.replace('*', '%') + if "*" in value: + value = value.replace("*", "%") constraints.append( sqlalchemy.and_( model.IssueKeys.name == key, - model.IssueValues.value.ilike(value) + model.IssueValues.value.ilike(value), ) ) else: constraints.append( sqlalchemy.and_( model.IssueKeys.name == key, - model.IssueValues.value == value + model.IssueValues.value == value, ) ) if constraints: query = query.filter( model.Issue.uid == model.IssueValues.issue_uid - ).filter( - model.IssueValues.key_id == model.IssueKeys.id - ) + ).filter(model.IssueValues.key_id == model.IssueKeys.id) query = query.filter( - sqlalchemy.or_( - (const for const in constraints) - ) + sqlalchemy.or_((const for const in constraints)) ) - query = session.query( - model.Issue - ).filter( + query = session.query(model.Issue).filter( model.Issue.uid.in_(query.subquery()) ) if repo is not None: - query = query.filter( - model.Issue.project_id == repo.id - ) + query = query.filter(model.Issue.project_id == repo.id) if search_pattern is not None: query = query.filter( - model.Issue.title.ilike('%%%s%%' % search_pattern) + model.Issue.title.ilike("%%%s%%" % search_pattern) ) column = model.Issue.date_created if order_key: # If we are ordering by assignee, then order by the assignees' # usernames - if order_key == 'assignee': + if order_key == "assignee": # We must do a LEFT JOIN on model.Issue.assignee because there are # two foreign keys on model.Issue tied to model.User. This tells # SQLAlchemy which foreign key on model.User to order on. query = query.outerjoin( - model.User, model.Issue.assignee_id == model.User.id) + model.User, model.Issue.assignee_id == model.User.id + ) column = model.User.user # If we are ordering by user, then order by reporters' usernames - elif order_key == 'user': + elif order_key == "user": # We must do a LEFT JOIN on model.Issue.user because there are # two foreign keys on model.Issue tied to model.User. This tells # SQLAlchemy which foreign key on model.User to order on. query = query.outerjoin( - model.User, model.Issue.user_id == model.User.id) + model.User, model.Issue.user_id == model.User.id + ) column = model.User.user elif order_key in model.Issue.__table__.columns.keys(): column = getattr(model.Issue, order_key) - if ("%s" % column.type) == 'TEXT': + if ("%s" % column.type) == "TEXT": column = func.lower(column) # The priority is sorted differently because it is by weight and the lower # the number, the higher the priority - if (order_key != 'priority' and order == 'asc') or \ - (order_key == 'priority' and order == 'desc'): + if (order_key != "priority" and order == "asc") or ( + order_key == "priority" and order == "desc" + ): query = query.order_by(asc(column)) else: query = query.order_by(desc(column)) @@ -3002,82 +3096,79 @@ def search_issues( def get_tags_of_project(session, project, pattern=None): - ''' Returns the list of tags associated with the issues of a project. - ''' - query = session.query( - model.TagColored - ).filter( - model.TagColored.tag != "" - ).filter( - model.TagColored.project_id == project.id - ).order_by( - model.TagColored.tag + """ Returns the list of tags associated with the issues of a project. + """ + query = ( + session.query(model.TagColored) + .filter(model.TagColored.tag != "") + .filter(model.TagColored.project_id == project.id) + .order_by(model.TagColored.tag) ) if pattern: query = query.filter( - model.TagColored.tag.ilike(pattern.replace('*', '%')) + model.TagColored.tag.ilike(pattern.replace("*", "%")) ) return query.all() def get_tag(session, tag): - ''' Returns a Tag object for the given tag text. - ''' - query = session.query( - model.Tag - ).filter( - model.Tag.tag == tag - ) + """ Returns a Tag object for the given tag text. + """ + query = session.query(model.Tag).filter(model.Tag.tag == tag) return query.first() def get_colored_tag(session, tag, project_id): - ''' Returns a TagColored object for the given tag text. - ''' - query = session.query( - model.TagColored - ).filter( - model.TagColored.tag == tag - ).filter( - model.TagColored.project_id == project_id + """ Returns a TagColored object for the given tag text. + """ + query = ( + session.query(model.TagColored) + .filter(model.TagColored.tag == tag) + .filter(model.TagColored.project_id == project_id) ) return query.first() def search_pull_requests( - session, requestid=None, project_id=None, project_id_from=None, - status=None, author=None, assignee=None, count=False, - offset=None, limit=None, updated_after=None, branch_from=None, - order='desc', order_key=None, search_pattern=None): - ''' Retrieve the specified pull-requests. - ''' + session, + requestid=None, + project_id=None, + project_id_from=None, + status=None, + author=None, + assignee=None, + count=False, + offset=None, + limit=None, + updated_after=None, + branch_from=None, + order="desc", + order_key=None, + search_pattern=None, +): + """ Retrieve the specified pull-requests. + """ query = session.query(model.PullRequest) # by default sort request by date_created. column = model.PullRequest.date_created - if order_key == 'last_updated': + if order_key == "last_updated": column = model.PullRequest.last_updated if requestid: - query = query.filter( - model.PullRequest.id == requestid - ) + query = query.filter(model.PullRequest.id == requestid) if updated_after: - query = query.filter( - model.PullRequest.last_updated >= updated_after - ) + query = query.filter(model.PullRequest.last_updated >= updated_after) if project_id: - query = query.filter( - model.PullRequest.project_id == project_id - ) + query = query.filter(model.PullRequest.project_id == project_id) if project_id_from: query = query.filter( @@ -3087,73 +3178,51 @@ def search_pull_requests( if status is not None: if isinstance(status, bool): if status: - query = query.filter( - model.PullRequest.status == 'Open' - ) + query = query.filter(model.PullRequest.status == "Open") else: - query = query.filter( - model.PullRequest.status != 'Open' - ) + query = query.filter(model.PullRequest.status != "Open") else: - query = query.filter( - model.PullRequest.status == status - ) + query = query.filter(model.PullRequest.status == status) if assignee is not None: assignee = "%s" % assignee - if not pagure.utils.is_true(assignee, ['false', '0', 'true', '1']): + if not pagure.utils.is_true(assignee, ["false", "0", "true", "1"]): user2 = aliased(model.User) - if assignee.startswith('!'): - sub = session.query( - model.PullRequest.uid - ).filter( - model.PullRequest.assignee_id == user2.id - ).filter( - user2.user == assignee[1:] + if assignee.startswith("!"): + sub = ( + session.query(model.PullRequest.uid) + .filter(model.PullRequest.assignee_id == user2.id) + .filter(user2.user == assignee[1:]) ) - query = query.filter( - ~model.PullRequest.uid.in_(sub) - ) + query = query.filter(~model.PullRequest.uid.in_(sub)) else: query = query.filter( model.PullRequest.assignee_id == user2.id - ).filter( - user2.user == assignee - ) + ).filter(user2.user == assignee) elif pagure.utils.is_true(assignee): - query = query.filter( - model.PullRequest.assignee_id.isnot(None) - ) + query = query.filter(model.PullRequest.assignee_id.isnot(None)) else: - query = query.filter( - model.PullRequest.assignee_id.is_(None) - ) + query = query.filter(model.PullRequest.assignee_id.is_(None)) if author is not None: user3 = aliased(model.User) - query = query.filter( - model.PullRequest.user_id == user3.id - ).filter( + query = query.filter(model.PullRequest.user_id == user3.id).filter( user3.user == author ) if branch_from is not None: - query = query.filter( - model.PullRequest.branch_from == branch_from - ) + query = query.filter(model.PullRequest.branch_from == branch_from) if search_pattern is not None: - if '*' in search_pattern: - search_pattern = search_pattern.replace('*', '%') + if "*" in search_pattern: + search_pattern = search_pattern.replace("*", "%") else: - search_pattern = '%%%s%%' % search_pattern - query = query.filter( - model.PullRequest.title.ilike(search_pattern) - ) + search_pattern = "%%%s%%" % search_pattern + query = query.filter(model.PullRequest.title.ilike(search_pattern)) # Depending on the order, the query is sorted(default is desc) - if order == 'asc': + if order == "asc": query = query.order_by(asc(column)) else: query = query.order_by(desc(column)) @@ -3173,49 +3242,53 @@ def search_pull_requests( def reopen_pull_request(session, request, user, requestfolder): - ''' Re-Open the provided pull request - ''' - if request.status != 'Closed': + """ Re-Open the provided pull request + """ + if request.status != "Closed": raise pagure.exceptions.PagureException( - 'Trying to reopen a pull request that is not closed' + "Trying to reopen a pull request that is not closed" ) user_obj = get_user(session, user) - request.status = 'Open' + request.status = "Open" session.add(request) session.flush() log_action(session, request.status.lower(), request, user_obj) pagure.lib.notify.notify_reopen_pull_request(request, user_obj) pagure.lib.git.update_git( - request, repo=request.project, repofolder=requestfolder) + request, repo=request.project, repofolder=requestfolder + ) pagure.lib.add_pull_request_comment( - session, request, - commit=None, tree_id=None, filename=None, row=None, - comment='Pull-Request has been reopened by %s' % ( - user), + session, + request, + commit=None, + tree_id=None, + filename=None, + row=None, + comment="Pull-Request has been reopened by %s" % (user), user=user, requestfolder=requestfolder, - notify=False, notification=True + notify=False, + notification=True, ) pagure.lib.notify.log( request.project, - topic='pull-request.reopened', + topic="pull-request.reopened", msg=dict( - pullrequest=request.to_json(public=True), - agent=user_obj.username, + pullrequest=request.to_json(public=True), agent=user_obj.username ), redis=REDIS, ) def close_pull_request(session, request, user, requestfolder, merged=True): - ''' Close the provided pull-request. - ''' + """ Close the provided pull-request. + """ user_obj = get_user(session, user) if merged is True: - request.status = 'Merged' + request.status = "Merged" else: - request.status = 'Closed' + request.status = "Closed" request.closed_by_id = user_obj.id request.closed_at = datetime.datetime.utcnow() session.add(request) @@ -3229,21 +3302,27 @@ def close_pull_request(session, request, user, requestfolder, merged=True): pagure.lib.notify.notify_cancelled_pull_request(request, user_obj) pagure.lib.git.update_git( - request, repo=request.project, repofolder=requestfolder) + request, repo=request.project, repofolder=requestfolder + ) pagure.lib.add_pull_request_comment( - session, request, - commit=None, tree_id=None, filename=None, row=None, - comment='Pull-Request has been %s by %s' % ( - request.status.lower(), user), + session, + request, + commit=None, + tree_id=None, + filename=None, + row=None, + comment="Pull-Request has been %s by %s" + % (request.status.lower(), user), user=user, requestfolder=requestfolder, - notify=False, notification=True + notify=False, + notification=True, ) pagure.lib.notify.log( request.project, - topic='pull-request.closed', + topic="pull-request.closed", msg=dict( pullrequest=request.to_json(public=True), merged=merged, @@ -3254,15 +3333,11 @@ def close_pull_request(session, request, user, requestfolder, merged=True): def reset_status_pull_request(session, project): - ''' Reset the status of all opened Pull-Requests of a project. - ''' - session.query( - model.PullRequest - ).filter( + """ Reset the status of all opened Pull-Requests of a project. + """ + session.query(model.PullRequest).filter( model.PullRequest.project_id == project.id - ).filter( - model.PullRequest.status == 'Open' - ).update( + ).filter(model.PullRequest.status == "Open").update( {model.PullRequest.merge_status: None} ) @@ -3270,17 +3345,19 @@ def reset_status_pull_request(session, project): def add_attachment(repo, issue, attachmentfolder, user, filename, filestream): - ''' Add a file to the attachments folder of repo and update git. ''' + """ Add a file to the attachments folder of repo and update git. """ _log.info( - 'Adding file: %s to the git repo: %s', - repo.path, werkzeug.secure_filename(filename)) + "Adding file: %s to the git repo: %s", + repo.path, + werkzeug.secure_filename(filename), + ) # Prefix the filename with a timestamp: - filename = '%s-%s' % ( + filename = "%s-%s" % ( hashlib.sha256(filestream.read()).hexdigest(), - werkzeug.secure_filename(filename) + werkzeug.secure_filename(filename), ) - filedir = os.path.join(attachmentfolder, repo.fullname, 'files') + filedir = os.path.join(attachmentfolder, repo.fullname, "files") filepath = os.path.join(filedir, filename) if os.path.exists(filepath): @@ -3291,20 +3368,24 @@ def add_attachment(repo, issue, attachmentfolder, user, filename, filestream): # Write file filestream.seek(0) - with open(filepath, 'wb') as stream: + with open(filepath, "wb") as stream: stream.write(filestream.read()) tasks.add_file_to_git.delay( - repo.name, repo.namespace, + repo.name, + repo.namespace, repo.user.username if repo.is_fork else None, - user.username, issue.uid, filename) + user.username, + issue.uid, + filename, + ) return filename def get_issue_statuses(session): - ''' Return the complete list of status an issue can have. - ''' + """ Return the complete list of status an issue can have. + """ output = [] statuses = session.query(model.StatusIssue).all() for status in statuses: @@ -3313,52 +3394,46 @@ def get_issue_statuses(session): def get_issue_comment(session, issue_uid, comment_id): - ''' Return a specific comment of a specified issue. - ''' - query = session.query( - model.IssueComment - ).filter( - model.IssueComment.issue_uid == issue_uid - ).filter( - model.IssueComment.id == comment_id + """ Return a specific comment of a specified issue. + """ + query = ( + session.query(model.IssueComment) + .filter(model.IssueComment.issue_uid == issue_uid) + .filter(model.IssueComment.id == comment_id) ) return query.first() def get_issue_comment_by_user_and_comment( - session, issue_uid, user_id, content): - ''' Return a specific comment of a specified issue. - ''' - query = session.query( - model.IssueComment - ).filter( - model.IssueComment.issue_uid == issue_uid - ).filter( - model.IssueComment.user_id == user_id - ).filter( - model.IssueComment.comment == content + session, issue_uid, user_id, content +): + """ Return a specific comment of a specified issue. + """ + query = ( + session.query(model.IssueComment) + .filter(model.IssueComment.issue_uid == issue_uid) + .filter(model.IssueComment.user_id == user_id) + .filter(model.IssueComment.comment == content) ) return query.first() def get_request_comment(session, request_uid, comment_id): - ''' Return a specific comment of a specified request. - ''' - query = session.query( - model.PullRequestComment - ).filter( - model.PullRequestComment.pull_request_uid == request_uid - ).filter( - model.PullRequestComment.id == comment_id + """ Return a specific comment of a specified request. + """ + query = ( + session.query(model.PullRequestComment) + .filter(model.PullRequestComment.pull_request_uid == request_uid) + .filter(model.PullRequestComment.id == comment_id) ) return query.first() def get_issue_by_uid(session, issue_uid): - ''' Return the issue corresponding to the specified unique identifier. + """ Return the issue corresponding to the specified unique identifier. :arg session: the session to use to connect to the database. :arg issue_uid: the unique identifier of an issue. This identifier is @@ -3369,17 +3444,13 @@ def get_issue_by_uid(session, issue_uid): :return: A single Issue object. :rtype: pagure.lib.model.Issue - ''' - query = session.query( - model.Issue - ).filter( - model.Issue.uid == issue_uid - ) + """ + query = session.query(model.Issue).filter(model.Issue.uid == issue_uid) return query.first() def get_request_by_uid(session, request_uid): - ''' Return the request corresponding to the specified unique identifier. + """ Return the request corresponding to the specified unique identifier. :arg session: the session to use to connect to the database. :arg request_uid: the unique identifier of a request. This identifier is @@ -3390,17 +3461,15 @@ def get_request_by_uid(session, request_uid): :return: A single Issue object. :rtype: pagure.lib.model.PullRequest - ''' - query = session.query( - model.PullRequest - ).filter( + """ + query = session.query(model.PullRequest).filter( model.PullRequest.uid == request_uid ) return query.first() def get_pull_request_flag_by_uid(session, request, flag_uid): - ''' Return the flag corresponding to the specified unique identifier. + """ Return the flag corresponding to the specified unique identifier. :arg session: the session to use to connect to the database. :arg request: the pull-request that was flagged @@ -3412,19 +3481,17 @@ def get_pull_request_flag_by_uid(session, request, flag_uid): :return: A single Issue object. :rtype: pagure.lib.model.PullRequestFlag - ''' - query = session.query( - model.PullRequestFlag - ).filter( - model.PullRequestFlag.pull_request_uid == request.uid - ).filter( - model.PullRequestFlag.uid == flag_uid.strip() + """ + query = ( + session.query(model.PullRequestFlag) + .filter(model.PullRequestFlag.pull_request_uid == request.uid) + .filter(model.PullRequestFlag.uid == flag_uid.strip()) ) return query.first() def get_commit_flag_by_uid(session, commit_hash, flag_uid): - ''' Return the flag corresponding to the specified unique identifier. + """ Return the flag corresponding to the specified unique identifier. :arg session: the session to use to connect to the database. :arg commit_hash: the hash of the commit that got flagged @@ -3436,26 +3503,29 @@ def get_commit_flag_by_uid(session, commit_hash, flag_uid): :return: A single Issue object. :rtype: pagure.lib.model.PullRequestFlag - ''' - query = session.query( - model.CommitFlag - ).filter( - model.CommitFlag.commit_hash == commit_hash - ).filter( - model.CommitFlag.uid == flag_uid.strip() if flag_uid else None + """ + query = ( + session.query(model.CommitFlag) + .filter(model.CommitFlag.commit_hash == commit_hash) + .filter(model.CommitFlag.uid == flag_uid.strip() if flag_uid else None) ) return query.first() -def set_up_user(session, username, fullname, default_email, - emails=None, ssh_key=None, keydir=None): - ''' Set up a new user into the database or update its information. ''' +def set_up_user( + session, + username, + fullname, + default_email, + emails=None, + ssh_key=None, + keydir=None, +): + """ Set up a new user into the database or update its information. """ user = search_user(session, username=username) if not user: user = model.User( - user=username, - fullname=fullname, - default_email=default_email + user=username, fullname=fullname, default_email=default_email ) session.add(user) session.flush() @@ -3480,12 +3550,10 @@ def set_up_user(session, username, fullname, default_email, def add_email_to_user(session, user, user_email): - ''' Add the provided email to the specified user. ''' + """ Add the provided email to the specified user. """ emails = [email.email for email in user.emails] if user_email not in emails: - useremail = model.UserEmail( - user_id=user.id, - email=user_email) + useremail = model.UserEmail(user_id=user.id, email=user_email) session.add(useremail) session.flush() if email_logs_count(session, user_email): @@ -3493,7 +3561,7 @@ def add_email_to_user(session, user, user_email): def update_user_ssh(session, user, ssh_key, keydir, update_only=False): - ''' Set up a new user into the database or update its information. ''' + """ Set up a new user into the database or update its information. """ if isinstance(user, six.string_types): user = get_user(session, user) @@ -3508,7 +3576,7 @@ def update_user_ssh(session, user, ssh_key, keydir, update_only=False): session.flush() -def avatar_url_from_email(email, size=64, default='retro', dns=False): +def avatar_url_from_email(email, size=64, default="retro", dns=False): """ Our own implementation since fas doesn't support this nicely yet. """ @@ -3516,17 +3584,15 @@ def avatar_url_from_email(email, size=64, default='retro', dns=False): # This makes an extra DNS SRV query, which can slow down our webapps. # It is necessary for libravatar federation, though. import libravatar + return libravatar.libravatar_url( - openid=email, - size=size, - default=default, + openid=email, size=size, default=default ) else: - query = urlencode({'s': size, 'd': default}) - email = email.encode('utf-8') + query = urlencode({"s": size, "d": default}) + email = email.encode("utf-8") hashhex = hashlib.sha256(email).hexdigest() - return "https://seccdn.libravatar.org/avatar/%s?%s" % ( - hashhex, query) + return "https://seccdn.libravatar.org/avatar/%s?%s" % (hashhex, query) def update_tags(session, obj, tags, username, gitfolder): @@ -3542,25 +3608,21 @@ def update_tags(session, obj, tags, username, gitfolder): messages = [] if toadd: add_tag_obj( - session, - obj=obj, - tags=toadd, - user=username, - gitfolder=gitfolder, + session, obj=obj, tags=toadd, user=username, gitfolder=gitfolder + ) + messages.append( + "%s tagged with: %s" + % (obj.isa.capitalize(), ", ".join(sorted(toadd))) ) - messages.append('%s tagged with: %s' % ( - obj.isa.capitalize(), ', '.join(sorted(toadd)))) if torm: remove_tags_obj( - session, - obj=obj, - tags=torm, - user=username, - gitfolder=gitfolder, + session, obj=obj, tags=torm, user=username, gitfolder=gitfolder + ) + messages.append( + "%s **un**tagged with: %s" + % (obj.isa.capitalize(), ", ".join(sorted(torm))) ) - messages.append('%s **un**tagged with: %s' % ( - obj.isa.capitalize(), ', '.join(sorted(torm)))) session.commit() @@ -3568,7 +3630,8 @@ def update_tags(session, obj, tags, username, gitfolder): def update_dependency_issue( - session, repo, issue, depends, username, ticketfolder): + session, repo, issue, depends, username, ticketfolder +): """ Update the dependency of a specified issue (adding or removing them) """ @@ -3621,8 +3684,7 @@ def update_dependency_issue( return messages -def update_blocked_issue( - session, repo, issue, blocks, username, ticketfolder): +def update_blocked_issue(session, repo, issue, blocks, username, ticketfolder): """ Update the upstream dependency of a specified issue (adding or removing them) @@ -3679,24 +3741,24 @@ def update_blocked_issue( def add_user_pending_email(session, userobj, email): - ''' Add the provided user to the specified user. - ''' + """ Add the provided user to the specified user. + """ other_user = search_user(session, email=email) if other_user and other_user != userobj: raise pagure.exceptions.PagureException( - 'Someone else has already registered this email' + "Someone else has already registered this email" ) pending_email = search_pending_email(session, email=email) if pending_email: raise pagure.exceptions.PagureException( - 'This email is already pending confirmation' + "This email is already pending confirmation" ) tmpemail = pagure.lib.model.UserEmailPending( user_id=userobj.id, token=pagure.lib.login.id_generator(40), - email=email + email=email, ) session.add(tmpemail) session.flush() @@ -3705,19 +3767,19 @@ def add_user_pending_email(session, userobj, email): def resend_pending_email(session, userobj, email): - ''' Resend to the user the confirmation email for the provided email + """ Resend to the user the confirmation email for the provided email address. - ''' + """ other_user = search_user(session, email=email) if other_user and other_user != userobj: raise pagure.exceptions.PagureException( - 'Someone else has already registered this email address' + "Someone else has already registered this email address" ) pending_email = search_pending_email(session, email=email) if not pending_email: raise pagure.exceptions.PagureException( - 'This email address has already been confirmed' + "This email address has already been confirmed" ) pending_email.token = pagure.lib.login.id_generator(40) @@ -3728,7 +3790,7 @@ def resend_pending_email(session, userobj, email): def search_pending_email(session, email=None, token=None): - ''' Searches the database for the pending email matching the given + """ Searches the database for the pending email matching the given criterias. :arg session: the session to use to connect to the database. @@ -3739,20 +3801,14 @@ def search_pending_email(session, email=None, token=None): :return: A single UserEmailPending object :rtype: UserEmailPending - ''' - query = session.query( - model.UserEmailPending - ) + """ + query = session.query(model.UserEmailPending) if email is not None: - query = query.filter( - model.UserEmailPending.email == email - ) + query = query.filter(model.UserEmailPending.email == email) if token is not None: - query = query.filter( - model.UserEmailPending.token == token - ) + query = query.filter(model.UserEmailPending.token == token) output = query.first() @@ -3760,9 +3816,9 @@ def search_pending_email(session, email=None, token=None): def generate_hook_token(session): - ''' For each project in the database, re-generate a unique hook_token. + """ For each project in the database, re-generate a unique hook_token. - ''' + """ for project in search_projects(session): project.hook_token = pagure.lib.login.id_generator(40) @@ -3771,57 +3827,53 @@ def generate_hook_token(session): def get_group_types(session, group_type=None): - ''' Return the list of type a group can have. + """ Return the list of type a group can have. - ''' - query = session.query( - model.PagureGroupType - ).order_by( + """ + query = session.query(model.PagureGroupType).order_by( model.PagureGroupType.group_type ) if group_type: - query = query.filter( - model.PagureGroupType.group_type == group_type - ) + query = query.filter(model.PagureGroupType.group_type == group_type) return query.all() -def search_groups(session, pattern=None, group_name=None, group_type=None, - display_name=None, offset=None, limit=None, count=False): - ''' Return the groups based on the criteria specified. +def search_groups( + session, + pattern=None, + group_name=None, + group_type=None, + display_name=None, + offset=None, + limit=None, + count=False, +): + """ Return the groups based on the criteria specified. - ''' - query = session.query( - model.PagureGroup - ).order_by( + """ + query = session.query(model.PagureGroup).order_by( model.PagureGroup.group_type ) if pattern: - pattern = pattern.replace('*', '%') + pattern = pattern.replace("*", "%") query = query.filter( sqlalchemy.or_( model.PagureGroup.group_name.ilike(pattern), - model.PagureGroup.display_name.ilike(pattern) + model.PagureGroup.display_name.ilike(pattern), ) ) if group_name: - query = query.filter( - model.PagureGroup.group_name == group_name - ) + query = query.filter(model.PagureGroup.group_name == group_name) if display_name: - query = query.filter( - model.PagureGroup.display_name == display_name - ) + query = query.filter(model.PagureGroup.display_name == display_name) if group_type: - query = query.filter( - model.PagureGroup.group_type == group_type - ) + query = query.filter(model.PagureGroup.group_type == group_type) if offset: query = query.offset(offset) @@ -3836,76 +3888,86 @@ def search_groups(session, pattern=None, group_name=None, group_type=None, return query.all() -def add_user_to_group(session, username, group, user, is_admin, - from_external=False): - ''' Add the specified user to the given group. +def add_user_to_group( + session, username, group, user, is_admin, from_external=False +): + """ Add the specified user to the given group. from_external indicates whether this is a remotely synced group. - ''' + """ new_user = search_user(session, username=username) if not new_user: raise pagure.exceptions.PagureException( - 'No user `%s` found' % username) + "No user `%s` found" % username + ) action_user = user user = search_user(session, username=user) if not user: raise pagure.exceptions.PagureException( - 'No user `%s` found' % action_user) + "No user `%s` found" % action_user + ) - if not from_external and \ - group.group_name not in user.groups and not is_admin\ - and user.username != group.creator.username: + if ( + not from_external + and group.group_name not in user.groups + and not is_admin + and user.username != group.creator.username + ): raise pagure.exceptions.PagureException( - 'You are not allowed to add user to this group') + "You are not allowed to add user to this group" + ) for guser in group.users: if guser.username == new_user.username: - return 'User `%s` already in the group, nothing to change.' % ( - new_user.username) + return "User `%s` already in the group, nothing to change." % ( + new_user.username + ) - grp = model.PagureUserGroup( - group_id=group.id, - user_id=new_user.id - ) + grp = model.PagureUserGroup(group_id=group.id, user_id=new_user.id) session.add(grp) session.flush() - return 'User `%s` added to the group `%s`.' % ( - new_user.username, group.group_name) + return "User `%s` added to the group `%s`." % ( + new_user.username, + group.group_name, + ) -def edit_group_info( - session, group, display_name, description, user, is_admin): - ''' Edit the information regarding a given group. - ''' +def edit_group_info(session, group, display_name, description, user, is_admin): + """ Edit the information regarding a given group. + """ action_user = user user = search_user(session, username=user) if not user: raise pagure.exceptions.PagureException( - 'No user `%s` found' % action_user) + "No user `%s` found" % action_user + ) - if group.group_name not in user.groups \ - and not is_admin \ - and user.username != group.creator.username: + if ( + group.group_name not in user.groups + and not is_admin + and user.username != group.creator.username + ): raise pagure.exceptions.PagureException( - 'You are not allowed to edit this group') + "You are not allowed to edit this group" + ) edits = [] if display_name and display_name != group.display_name: group.display_name = display_name - edits.append('display_name') + edits.append("display_name") if description and description != group.description: group.description = description - edits.append('description') + edits.append("description") session.add(group) session.flush() - msg = 'Nothing changed' + msg = "Nothing changed" if edits: pagure.lib.notify.log( None, - topic='group.edit', + topic="group.edit", msg=dict( group=group.to_json(public=True), fields=edits, @@ -3913,96 +3975,119 @@ def edit_group_info( ), redis=REDIS, ) - msg = 'Group "%s" (%s) edited' % ( - group.display_name, group.group_name) + msg = 'Group "%s" (%s) edited' % (group.display_name, group.group_name) return msg -def delete_user_of_group(session, username, groupname, user, is_admin, - force=False, from_external=False): - ''' Removes the specified user from the given group. - ''' +def delete_user_of_group( + session, + username, + groupname, + user, + is_admin, + force=False, + from_external=False, +): + """ Removes the specified user from the given group. + """ group_obj = search_groups(session, group_name=groupname) if not group_obj: raise pagure.exceptions.PagureException( - 'No group `%s` found' % groupname) + "No group `%s` found" % groupname + ) drop_user = search_user(session, username=username) if not drop_user: raise pagure.exceptions.PagureException( - 'No user `%s` found' % username) + "No user `%s` found" % username + ) action_user = user user = search_user(session, username=user) if not user: raise pagure.exceptions.PagureException( - 'Could not find user %s' % action_user) + "Could not find user %s" % action_user + ) - if not from_external and \ - group_obj.group_name not in user.groups and not is_admin: + if ( + not from_external + and group_obj.group_name not in user.groups + and not is_admin + ): raise pagure.exceptions.PagureException( - 'You are not allowed to remove user from this group') + "You are not allowed to remove user from this group" + ) if drop_user.username == group_obj.creator.username and not force: raise pagure.exceptions.PagureException( - 'The creator of a group cannot be removed') + "The creator of a group cannot be removed" + ) user_grp = get_user_group(session, drop_user.id, group_obj.id) if not user_grp: raise pagure.exceptions.PagureException( - 'User `%s` could not be found in the group `%s`' % ( - username, groupname)) + "User `%s` could not be found in the group `%s`" + % (username, groupname) + ) session.delete(user_grp) session.flush() def add_group( - session, group_name, display_name, description, - group_type, user, is_admin, blacklist): - ''' Creates a new group with the given information. - ''' - if ' ' in group_name: + session, + group_name, + display_name, + description, + group_type, + user, + is_admin, + blacklist, +): + """ Creates a new group with the given information. + """ + if " " in group_name: raise pagure.exceptions.PagureException( - 'Spaces are not allowed in group names: %s' % group_name) + "Spaces are not allowed in group names: %s" % group_name + ) if group_name in blacklist: raise pagure.exceptions.PagureException( - 'This group name has been blacklisted, ' - 'please choose another one') + "This group name has been blacklisted, " + "please choose another one" + ) - group_types = ['user'] + group_types = ["user"] if is_admin: - group_types = [ - grp.group_type - for grp in get_group_types(session) - ] + group_types = [grp.group_type for grp in get_group_types(session)] if not is_admin: - group_type = 'user' + group_type = "user" if group_type not in group_types: - raise pagure.exceptions.PagureException( - 'Invalide type for this group') + raise pagure.exceptions.PagureException("Invalide type for this group") username = user user = search_user(session, username=user) if not user: raise pagure.exceptions.PagureException( - 'Could not find user %s' % username) + "Could not find user %s" % username + ) group = search_groups(session, group_name=group_name) if group: raise pagure.exceptions.PagureException( - 'There is already a group named %s' % group_name) + "There is already a group named %s" % group_name + ) display = search_groups(session, display_name=display_name) if display: raise pagure.exceptions.PagureException( - 'There is already a group with display name `%s` created.' % - display_name) + "There is already a group with display name `%s` created." + % display_name + ) grp = pagure.lib.model.PagureGroup( group_name=group_name, @@ -4015,22 +4100,21 @@ def add_group( session.flush() return add_user_to_group( - session, user.username, grp, user.username, is_admin) + session, user.username, grp, user.username, is_admin + ) def get_user_group(session, userid, groupid): - ''' Return a specific user_group for the specified group and user + """ Return a specific user_group for the specified group and user identifiers. :arg session: the session with which to connect to the database. - ''' - query = session.query( - model.PagureUserGroup - ).filter( - model.PagureUserGroup.user_id == userid - ).filter( - model.PagureUserGroup.group_id == groupid + """ + query = ( + session.query(model.PagureUserGroup) + .filter(model.PagureUserGroup.user_id == userid) + .filter(model.PagureUserGroup.group_id == groupid) ) return query.first() @@ -4052,11 +4136,7 @@ def get_api_token(session, token_str): """ Return the Token object corresponding to the provided token string if there is any, returns None otherwise. """ - query = session.query( - model.Token - ).filter( - model.Token.id == token_str - ) + query = session.query(model.Token).filter(model.Token.id == token_str) return query.first() @@ -4065,20 +4145,12 @@ def get_acls(session, restrict=None): """ Returns all the possible ACLs a token can have according to the database. """ - query = session.query( - model.ACL - ).order_by( - model.ACL.name - ) + query = session.query(model.ACL).order_by(model.ACL.name) if restrict: if isinstance(restrict, list): - query = query.filter( - model.ACL.name.in_(restrict) - ) + query = query.filter(model.ACL.name.in_(restrict)) else: - query = query.filter( - model.ACL.name == restrict - ) + query = query.filter(model.ACL.name == restrict) return query.all() @@ -4087,11 +4159,7 @@ def add_token_to_user(session, project, acls, username, description=None): """ Create a new token for the specified user on the specified project with the given ACLs. """ - acls_obj = session.query( - model.ACL - ).filter( - model.ACL.name.in_(acls) - ).all() + acls_obj = session.query(model.ACL).filter(model.ACL.name.in_(acls)).all() user = search_user(session, username=username) @@ -4100,21 +4168,18 @@ def add_token_to_user(session, project, acls, username, description=None): user_id=user.id, project_id=project.id if project else None, description=description, - expiration=datetime.datetime.utcnow() + datetime.timedelta(days=60) + expiration=datetime.datetime.utcnow() + datetime.timedelta(days=60), ) session.add(token) session.flush() for acl in acls_obj: - item = pagure.lib.model.TokenAcl( - token_id=token.id, - acl_id=acl.id, - ) + item = pagure.lib.model.TokenAcl(token_id=token.id, acl_id=acl.id) session.add(item) session.commit() - return 'Token created' + return "Token created" def _convert_markdown(md_processor, text): @@ -4130,39 +4195,34 @@ def text2markdown(text, extended=True, readme=False): """ Simple text to html converter using the markdown library. """ extensions = [ - 'markdown.extensions.def_list', - 'markdown.extensions.fenced_code', - 'markdown.extensions.tables', - 'markdown.extensions.smart_strong', + "markdown.extensions.def_list", + "markdown.extensions.fenced_code", + "markdown.extensions.tables", + "markdown.extensions.smart_strong", # All of the above are the .extra extensions # w/o the "attribute lists" one - 'markdown.extensions.admonition', - 'markdown.extensions.codehilite', - 'markdown.extensions.sane_lists', - 'markdown.extensions.toc', + "markdown.extensions.admonition", + "markdown.extensions.codehilite", + "markdown.extensions.sane_lists", + "markdown.extensions.toc", ] # Some extensions are enabled for READMEs and disabled otherwise if readme: - extensions.extend([ - 'markdown.extensions.abbr', - 'markdown.extensions.footnotes', - ]) - else: - extensions.append( - 'markdown.extensions.nl2br', + extensions.extend( + ["markdown.extensions.abbr", "markdown.extensions.footnotes"] ) + else: + extensions.append("markdown.extensions.nl2br") if extended: # Install our markdown modifications - extensions.append('pagure.pfmarkdown') + extensions.append("pagure.pfmarkdown") md_processor = markdown.Markdown( extensions=extensions, extension_configs={ - 'markdown.extensions.codehilite': { - 'guess_lang': False, - } + "markdown.extensions.codehilite": {"guess_lang": False} }, - output_format='xhtml5', + output_format="xhtml5", ) if text: @@ -4170,21 +4230,22 @@ def text2markdown(text, extended=True, readme=False): text = _convert_markdown(md_processor, text) except Exception: _log.debug( - 'A markdown error occured while processing: ``%s``', - text) + "A markdown error occured while processing: ``%s``", text + ) return clean_input(text) - return '' + return "" def filter_img_src(name, value): - ''' Filter in img html tags images coming from a different domain. ''' - if name in ('alt', 'height', 'width', 'class', 'data-src'): + """ Filter in img html tags images coming from a different domain. """ + if name in ("alt", "height", "width", "class", "data-src"): return True - if name == 'src': + if name == "src": parsed = urlparse(value) return (not parsed.netloc) or parsed.netloc == urlparse( - pagure_config['APP_URL']).netloc + pagure_config["APP_URL"] + ).netloc return False @@ -4195,7 +4256,7 @@ def clean_input(text, ignore=None): if ignore and not isinstance(ignore, (tuple, set, list)): ignore = [ignore] - bleach_v = bleach.__version__.split('.') + bleach_v = bleach.__version__.split(".") for idx, val in enumerate(bleach_v): try: val = int(val) @@ -4204,38 +4265,59 @@ def clean_input(text, ignore=None): bleach_v[idx] = val attrs = bleach.ALLOWED_ATTRIBUTES.copy() - attrs['table'] = ['class'] - attrs['span'] = ['class', 'id'] - attrs['div'] = ['class'] - attrs['td'] = ['align'] - attrs['th'] = ['align'] - if not ignore or 'img' not in ignore: + attrs["table"] = ["class"] + attrs["span"] = ["class", "id"] + attrs["div"] = ["class"] + attrs["td"] = ["align"] + attrs["th"] = ["align"] + if not ignore or "img" not in ignore: # newer bleach need three args for attribute callable if tuple(bleach_v) >= (2, 0, 0): # pragma: no cover - attrs['img'] = lambda tag, name, val: filter_img_src(name, val) + attrs["img"] = lambda tag, name, val: filter_img_src(name, val) else: - attrs['img'] = filter_img_src + attrs["img"] = filter_img_src tags = bleach.ALLOWED_TAGS + [ - 'p', 'br', 'div', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', - 'table', 'td', 'tr', 'th', 'thead', 'tbody', - 'col', 'pre', 'img', 'hr', 'dl', 'dt', 'dd', 'span', - 'kbd', 'var', 'del', 'cite', 'noscript' + "p", + "br", + "div", + "h1", + "h2", + "h3", + "h4", + "h5", + "h6", + "table", + "td", + "tr", + "th", + "thead", + "tbody", + "col", + "pre", + "img", + "hr", + "dl", + "dt", + "dd", + "span", + "kbd", + "var", + "del", + "cite", + "noscript", ] if ignore: for tag in ignore: if tag in tags: tags.remove(tag) - kwargs = { - 'tags': tags, - 'attributes': attrs - } + kwargs = {"tags": tags, "attributes": attrs} # newer bleach allow to customize the protocol supported if tuple(bleach_v) >= (1, 5, 0): # pragma: no cover - protocols = bleach.ALLOWED_PROTOCOLS + ['irc', 'ircs'] - kwargs['protocols'] = protocols + protocols = bleach.ALLOWED_PROTOCOLS + ["irc", "ircs"] + kwargs["protocols"] = protocols return bleach.clean(text, **kwargs) @@ -4244,16 +4326,23 @@ def could_be_text(text): """ Returns whether we think this chain of character could be text or not """ try: - text.decode('utf-8') + text.decode("utf-8") return True except (UnicodeDecodeError, UnicodeEncodeError): return False def get_pull_request_of_user( - session, username, status=None, filed=None, actionable=None, - offset=None, limit=None, count=False): - '''List the opened pull-requests of an user. + session, + username, + status=None, + filed=None, + actionable=None, + offset=None, + limit=None, + count=False, +): + """List the opened pull-requests of an user. These pull-requests have either been opened by that user or against projects that user has commit on. @@ -4261,108 +4350,91 @@ def get_pull_request_of_user( returned. If actionable: only the PRs not opened/filed by the specified username will be returned. - ''' - projects = session.query( - sqlalchemy.distinct(model.Project.id) - ) + """ + projects = session.query(sqlalchemy.distinct(model.Project.id)) projects = projects.filter( # User created the project sqlalchemy.and_( - model.User.user == username, - model.User.id == model.Project.user_id, + model.User.user == username, model.User.id == model.Project.user_id ) ) - sub_q2 = session.query( - sqlalchemy.distinct(model.Project.id) - ).filter( + sub_q2 = session.query(sqlalchemy.distinct(model.Project.id)).filter( # User got commit right sqlalchemy.and_( model.User.user == username, model.User.id == model.ProjectUser.user_id, model.ProjectUser.project_id == model.Project.id, sqlalchemy.or_( - model.ProjectUser.access == 'admin', - model.ProjectUser.access == 'commit', - ) + model.ProjectUser.access == "admin", + model.ProjectUser.access == "commit", + ), ) ) - sub_q3 = session.query( - sqlalchemy.distinct(model.Project.id) - ).filter( + sub_q3 = session.query(sqlalchemy.distinct(model.Project.id)).filter( # User created a group that has commit right sqlalchemy.and_( model.User.user == username, model.PagureGroup.user_id == model.User.id, - model.PagureGroup.group_type == 'user', + model.PagureGroup.group_type == "user", model.PagureGroup.id == model.ProjectGroup.group_id, model.Project.id == model.ProjectGroup.project_id, sqlalchemy.or_( - model.ProjectGroup.access == 'admin', - model.ProjectGroup.access == 'commit', - ) + model.ProjectGroup.access == "admin", + model.ProjectGroup.access == "commit", + ), ) ) - sub_q4 = session.query( - sqlalchemy.distinct(model.Project.id) - ).filter( + sub_q4 = session.query(sqlalchemy.distinct(model.Project.id)).filter( # User is part of a group that has commit right sqlalchemy.and_( model.User.user == username, model.PagureUserGroup.user_id == model.User.id, model.PagureUserGroup.group_id == model.PagureGroup.id, - model.PagureGroup.group_type == 'user', + model.PagureGroup.group_type == "user", model.PagureGroup.id == model.ProjectGroup.group_id, model.Project.id == model.ProjectGroup.project_id, sqlalchemy.or_( - model.ProjectGroup.access == 'admin', - model.ProjectGroup.access == 'commit', - ) + model.ProjectGroup.access == "admin", + model.ProjectGroup.access == "commit", + ), ) ) projects = projects.union(sub_q2).union(sub_q3).union(sub_q4) - query = session.query( - sqlalchemy.distinct(model.PullRequest.uid) - ).filter( + query = session.query(sqlalchemy.distinct(model.PullRequest.uid)).filter( model.PullRequest.project_id.in_(projects.subquery()) ) - query_2 = session.query( - sqlalchemy.distinct(model.PullRequest.uid) - ).filter( + query_2 = session.query(sqlalchemy.distinct(model.PullRequest.uid)).filter( # User open the PR sqlalchemy.and_( model.PullRequest.user_id == model.User.id, - model.User.user == username + model.User.user == username, ) ) final_sub = query.union(query_2) - query = session.query( - model.PullRequest - ).filter( - model.PullRequest.uid.in_(final_sub.subquery()) - ).order_by( - model.PullRequest.date_created.desc() + query = ( + session.query(model.PullRequest) + .filter(model.PullRequest.uid.in_(final_sub.subquery())) + .order_by(model.PullRequest.date_created.desc()) ) if status: - query = query.filter( - model.PullRequest.status == status - ) + query = query.filter(model.PullRequest.status == status) if filed: query = query.filter( model.PullRequest.user_id == model.User.id, - model.User.user == filed + model.User.user == filed, ) elif actionable: query = query.filter( model.PullRequest.user_id == model.User.id, - model.User.user != actionable + model.User.user != actionable, ) if offset: @@ -4377,7 +4449,7 @@ def get_pull_request_of_user( def update_watch_status(session, project, user, watch): - ''' Update the user status for watching a project. + """ Update the user status for watching a project. The watch status can be: -1: reset the watch status to default @@ -4386,37 +4458,40 @@ def update_watch_status(session, project, user, watch): 2: watch commits 3: watch issues, PRs and commits - ''' - if watch not in ['-1', '0', '1', '2', '3']: + """ + if watch not in ["-1", "0", "1", "2", "3"]: raise pagure.exceptions.PagureException( - 'The watch value of "%s" is invalid' % watch) + 'The watch value of "%s" is invalid' % watch + ) user_obj = get_user(session, user) - watcher = session.query( - model.Watcher - ).filter( - sqlalchemy.and_( - model.Watcher.project_id == project.id, - model.Watcher.user_id == user_obj.id, + watcher = ( + session.query(model.Watcher) + .filter( + sqlalchemy.and_( + model.Watcher.project_id == project.id, + model.Watcher.user_id == user_obj.id, + ) ) - ).first() + .first() + ) - if watch == '-1': + if watch == "-1": if not watcher: - return 'Watch status is already reset' + return "Watch status is already reset" session.delete(watcher) session.flush() - return 'Watch status reset' + return "Watch status reset" should_watch_issues = False should_watch_commits = False - if watch == '1': + if watch == "1": should_watch_issues = True - elif watch == '2': + elif watch == "2": should_watch_commits = True - elif watch == '3': + elif watch == "3": should_watch_issues = True should_watch_commits = True @@ -4425,7 +4500,7 @@ def update_watch_status(session, project, user, watch): project_id=project.id, user_id=user_obj.id, watch_issues=should_watch_issues, - watch_commits=should_watch_commits + watch_commits=should_watch_commits, ) else: watcher.watch_issues = should_watch_issues @@ -4435,19 +4510,20 @@ def update_watch_status(session, project, user, watch): session.flush() if should_watch_issues and should_watch_commits: - return 'You are now watching issues, PRs, and commits on this project' + return "You are now watching issues, PRs, and commits on this project" elif should_watch_issues: - return 'You are now watching issues and PRs on this project' + return "You are now watching issues and PRs on this project" elif should_watch_commits: - return 'You are now watching commits on this project' + return "You are now watching commits on this project" else: - return 'You are no longer watching this project' + return "You are no longer watching this project" -def get_watch_level_on_repo(session, user, repo, repouser=None, - namespace=None): - ''' Get a list representing the watch level of the user on the project. - ''' +def get_watch_level_on_repo( + session, user, repo, repouser=None, namespace=None +): + """ Get a list representing the watch level of the user on the project. + """ # If a user wasn't passed in, we can't determine their watch level if user is None: return [] @@ -4467,22 +4543,24 @@ def get_watch_level_on_repo(session, user, repo, repouser=None, # If the project passed in a string, then assume it is a project name elif isinstance(repo, six.string_types): project = _get_project( - session, repo, user=repouser, namespace=namespace) + session, repo, user=repouser, namespace=namespace + ) else: - raise RuntimeError('The passed in repo is an invalid type of "{0}"' - .format(type(repo).__name__)) + raise RuntimeError( + 'The passed in repo is an invalid type of "{0}"'.format( + type(repo).__name__ + ) + ) # If the project is not found, we can't determine the involvement of the # user in the project if not project: return [] - query = session.query( - model.Watcher - ).filter( - model.Watcher.user_id == user_obj.id - ).filter( - model.Watcher.project_id == project.id + query = ( + session.query(model.Watcher) + .filter(model.Watcher.user_id == user_obj.id) + .filter(model.Watcher.project_id == project.id) ) watcher = query.first() @@ -4490,11 +4568,11 @@ def get_watch_level_on_repo(session, user, repo, repouser=None, # level on the project if watcher: if watcher.watch_issues and watcher.watch_commits: - return ['issues', 'commits'] + return ["issues", "commits"] elif watcher.watch_issues: - return ['issues'] + return ["issues"] elif watcher.watch_commits: - return ['commits'] + return ["commits"] else: # If a watcher entry is set and both are set to False, that # means the user explicitly asked to not be notified @@ -4503,18 +4581,18 @@ def get_watch_level_on_repo(session, user, repo, repouser=None, # If the user is the project owner, by default they will be watching # issues and PRs if user_obj.username == project.user.username: - return ['issues'] + return ["issues"] # If the user is a contributor, by default they will be watching issues # and PRs for contributor in project.users: if user_obj.username == contributor.username: - return ['issues'] + return ["issues"] # If the user is in a project group, by default they will be watching # issues and PRs for group in project.groups: for guser in group.users: if user_obj.username == guser.username: - return ['issues'] + return ["issues"] # If no other condition is true, then they are not explicitly watching # the project or are not involved in the project to the point that # comes with aq default watch level @@ -4522,34 +4600,28 @@ def get_watch_level_on_repo(session, user, repo, repouser=None, def user_watch_list(session, user, exclude_groups=None): - ''' Returns list of all the projects which the user is watching ''' + """ Returns list of all the projects which the user is watching """ user_obj = search_user(session, username=user) if not user_obj: return [] - unwatched = session.query( - model.Watcher - ).filter( - model.Watcher.user_id == user_obj.id - ).filter( - model.Watcher.watch_issues == False # noqa: E712 - ).filter( - model.Watcher.watch_commits == False # noqa: E712 + unwatched = ( + session.query(model.Watcher) + .filter(model.Watcher.user_id == user_obj.id) + .filter(model.Watcher.watch_issues == False) # noqa: E712 + .filter(model.Watcher.watch_commits == False) # noqa: E712 ) unwatched_list = [] if unwatched: unwatched_list = [unwatch.project for unwatch in unwatched.all()] - watched = session.query( - model.Watcher - ).filter( - model.Watcher.user_id == user_obj.id - ).filter( - model.Watcher.watch_issues == True # noqa: E712 - ).filter( - model.Watcher.watch_commits == True # noqa: E712 + watched = ( + session.query(model.Watcher) + .filter(model.Watcher.user_id == user_obj.id) + .filter(model.Watcher.watch_issues == True) # noqa: E712 + .filter(model.Watcher.watch_commits == True) # noqa: E712 ) watched_list = [] @@ -4557,7 +4629,8 @@ def user_watch_list(session, user, exclude_groups=None): watched_list = [watch.project for watch in watched.all()] user_projects = search_projects( - session, username=user_obj.user, exclude_groups=exclude_groups) + session, username=user_obj.user, exclude_groups=exclude_groups + ) watch = set(watched_list + user_projects) for project in user_projects: @@ -4568,28 +4641,24 @@ def user_watch_list(session, user, exclude_groups=None): def set_watch_obj(session, user, obj, watch_status): - ''' Set the watch status of the user on the specified object. + """ Set the watch status of the user on the specified object. Objects can be either an issue or a pull-request - ''' + """ user_obj = get_user(session, user) if obj.isa == "issue": - query = session.query( - model.IssueWatcher - ).filter( - model.IssueWatcher.user_id == user_obj.id - ).filter( - model.IssueWatcher.issue_uid == obj.uid + query = ( + session.query(model.IssueWatcher) + .filter(model.IssueWatcher.user_id == user_obj.id) + .filter(model.IssueWatcher.issue_uid == obj.uid) ) elif obj.isa == "pull-request": - query = session.query( - model.PullRequestWatcher - ).filter( - model.PullRequestWatcher.user_id == user_obj.id - ).filter( - model.PullRequestWatcher.pull_request_uid == obj.uid + query = ( + session.query(model.PullRequestWatcher) + .filter(model.PullRequestWatcher.user_id == user_obj.id) + .filter(model.PullRequestWatcher.pull_request_uid == obj.uid) ) else: raise pagure.exceptions.InvalidObjectException( @@ -4601,9 +4670,7 @@ def set_watch_obj(session, user, obj, watch_status): if not dbobj: if obj.isa == "issue": dbobj = model.IssueWatcher( - user_id=user_obj.id, - issue_uid=obj.uid, - watch=watch_status, + user_id=user_obj.id, issue_uid=obj.uid, watch=watch_status ) elif obj.isa == "pull-request": dbobj = model.PullRequestWatcher( @@ -4616,9 +4683,9 @@ def set_watch_obj(session, user, obj, watch_status): session.add(dbobj) - output = 'You are no longer watching this %s' % obj.isa + output = "You are no longer watching this %s" % obj.isa if watch_status: - output = 'You are now watching this %s' % obj.isa + output = "You are now watching this %s" % obj.isa return output @@ -4628,15 +4695,11 @@ def get_watch_list(session, obj): private = False if obj.isa == "issue": private = obj.private - obj_watchers_query = session.query( - model.IssueWatcher - ).filter( + obj_watchers_query = session.query(model.IssueWatcher).filter( model.IssueWatcher.issue_uid == obj.uid ) elif obj.isa == "pull-request": - obj_watchers_query = session.query( - model.PullRequestWatcher - ).filter( + obj_watchers_query = session.query(model.PullRequestWatcher).filter( model.PullRequestWatcher.pull_request_uid == obj.uid ) else: @@ -4644,9 +4707,7 @@ def get_watch_list(session, obj): 'Unsupported object found: "%s"' % obj ) - project_watchers_query = session.query( - model.Watcher - ).filter( + project_watchers_query = session.query(model.Watcher).filter( model.Watcher.project_id == obj.project.id ) @@ -4696,7 +4757,7 @@ def save_report(session, repo, name, url, username): """ Save the report of issues based on the given URL of the project. """ url_obj = urlparse(url) - url = url_obj.geturl().replace(url_obj.query, '') + url = url_obj.geturl().replace(url_obj.query, "") query = {} for k, v in parse_qsl(url_obj.query): if k in query: @@ -4712,8 +4773,7 @@ def save_report(session, repo, name, url, username): session.add(repo) -def set_custom_key_fields( - session, project, fields, types, data, notify=None): +def set_custom_key_fields(session, project, fields, types, data, notify=None): """ Set or update the custom key fields of a project with the values provided. "data" is currently only used for lists """ @@ -4728,10 +4788,7 @@ def set_custom_key_fields( data[idx] = None else: if data[idx]: - data[idx] = [ - item.strip() - for item in data[idx].split(',') - ] + data[idx] = [item.strip() for item in data[idx].split(",")] if notify and notify[idx] == "on": notify_flag = True @@ -4749,7 +4806,7 @@ def set_custom_key_fields( name=key, key_type=types[idx], data=data[idx], - key_notify=notify_flag + key_notify=notify_flag, ) session.add(issuekey) @@ -4758,19 +4815,17 @@ def set_custom_key_fields( if key not in fields: session.delete(current_keys[key]) - return 'List of custom fields updated' + return "List of custom fields updated" def set_custom_key_value(session, issue, key, value): """ Set or update the value of the specified custom key. """ - query = session.query( - model.IssueValues - ).filter( - model.IssueValues.key_id == key.id - ).filter( - model.IssueValues.issue_uid == issue.uid + query = ( + session.query(model.IssueValues) + .filter(model.IssueValues.key_id == key.id) + .filter(model.IssueValues.issue_uid == issue.uid) ) current_field = query.first() @@ -4779,9 +4834,9 @@ def set_custom_key_value(session, issue, key, value): old_value = None if current_field: old_value = current_field.value - if current_field.key.key_type == 'boolean': + if current_field.key.key_type == "boolean": value = value or False - if value is None or value == '': + if value is None or value == "": session.delete(current_field) updated = True delete = True @@ -4789,13 +4844,11 @@ def set_custom_key_value(session, issue, key, value): current_field.value = value updated = True else: - if value is None or value == '': + if value is None or value == "": delete = True else: current_field = model.IssueValues( - issue_uid=issue.uid, - key_id=key.id, - value=value, + issue_uid=issue.uid, key_id=key.id, value=value ) updated = True @@ -4804,26 +4857,33 @@ def set_custom_key_value(session, issue, key, value): if REDIS and updated: if issue.private: - REDIS.publish('pagure.%s' % issue.uid, json.dumps({ - 'issue': 'private', - 'custom_fields': [key.name], - })) + REDIS.publish( + "pagure.%s" % issue.uid, + json.dumps({"issue": "private", "custom_fields": [key.name]}), + ) else: - REDIS.publish('pagure.%s' % issue.uid, json.dumps({ - 'custom_fields': [key.name], - 'issue': issue.to_json(public=True, with_comments=False), - })) + REDIS.publish( + "pagure.%s" % issue.uid, + json.dumps( + { + "custom_fields": [key.name], + "issue": issue.to_json( + public=True, with_comments=False + ), + } + ), + ) if updated and value: - output = 'Custom field %s adjusted to %s' % (key.name, value) + output = "Custom field %s adjusted to %s" % (key.name, value) if old_value: - output += ' (was: %s)' % old_value + output += " (was: %s)" % old_value return output elif updated and old_value: - return 'Custom field %s reset (from %s)' % (key.name, old_value) + return "Custom field %s reset (from %s)" % (key.name, old_value) -def get_yearly_stats_user(session, user, date, tz='UTC'): +def get_yearly_stats_user(session, user, date, tz="UTC"): """ Return the activity of the specified user in the year preceding the specified date. 'offset' is intended to be a timezone offset from UTC, in minutes: you can discover the offset for a timezone and pass that @@ -4836,13 +4896,12 @@ def get_yearly_stats_user(session, user, date, tz='UTC'): """ start_date = datetime.datetime(date.year - 1, date.month, date.day) - events = session.query( - model.PagureLog - ).filter( - model.PagureLog.date_created.between(start_date, date) - ).filter( - model.PagureLog.user_id == user.id - ).all() + events = ( + session.query(model.PagureLog) + .filter(model.PagureLog.date_created.between(start_date, date)) + .filter(model.PagureLog.user_id == user.id) + .all() + ) # Counter very handily does exactly what we want here: it gives # us a dict with the dates as keys and the number of times each # date occurs in the data as the values, we return its items as @@ -4850,7 +4909,7 @@ def get_yearly_stats_user(session, user, date, tz='UTC'): return list(Counter([event.date_tz(tz) for event in events]).items()) -def get_user_activity_day(session, user, date, tz='UTC'): +def get_user_activity_day(session, user, date, tz="UTC"): """ Return the activity of the specified user on the specified date. 'offset' is intended to be a timezone offset from UTC, in minutes: you can discover the offset for a timezone and pass that, so this @@ -4862,7 +4921,7 @@ def get_user_activity_day(session, user, date, tz='UTC'): the opposite of what Javascript getTimezoneOffset() does, so you have to invert any value you get from that. """ - dt = datetime.datetime.strptime(date, '%Y-%m-%d') + dt = datetime.datetime.strptime(date, "%Y-%m-%d") # if the offset is *negative* some of the events we want may be # on the next day in UTC terms. if the offset is *positive* some # of the events we want may be on the previous day in UTC terms. @@ -4873,14 +4932,11 @@ def get_user_activity_day(session, user, date, tz='UTC'): # in UTC time. prevday = dt - datetime.timedelta(days=1) nextday = dt + datetime.timedelta(days=2) - query = session.query( - model.PagureLog - ).filter( - model.PagureLog.date_created.between(prevday, nextday) - ).filter( - model.PagureLog.user_id == user.id - ).order_by( - model.PagureLog.id.asc() + query = ( + session.query(model.PagureLog) + .filter(model.PagureLog.date_created.between(prevday, nextday)) + .filter(model.PagureLog.user_id == user.id) + .order_by(model.PagureLog.id.asc()) ) events = query.all() # Now we filter down to the events that *really* occurred on the @@ -4894,12 +4950,10 @@ def get_watchlist_messages(session, user, limit=None): watched_list = [watch.id for watch in watched] - events = session.query( - model.PagureLog - ).filter( - model.PagureLog.project_id.in_(watched_list) - ).order_by( - model.PagureLog.id.desc() + events = ( + session.query(model.PagureLog) + .filter(model.PagureLog.project_id.in_(watched_list)) + .order_by(model.PagureLog.id.desc()) ) if limit is not None: @@ -4911,13 +4965,13 @@ def get_watchlist_messages(session, user, limit=None): def log_action(session, action, obj, user_obj): - ''' Log an user action on a project/issue/PR. ''' + """ Log an user action on a project/issue/PR. """ project_id = None - if obj.isa in ['issue', 'pull-request']: + if obj.isa in ["issue", "pull-request"]: project_id = obj.project_id if obj.project.private: return - elif obj.isa == 'project': + elif obj.isa == "project": project_id = obj.id if obj.private: return @@ -4933,12 +4987,12 @@ def log_action(session, action, obj, user_obj): user_id=user_obj.id, project_id=project_id, log_type=action, - ref_id=obj.id + ref_id=obj.id, ) - if obj.isa == 'issue': - setattr(log, 'issue_uid', obj.uid) - elif obj.isa == 'pull-request': - setattr(log, 'pull_request_uid', obj.uid) + if obj.isa == "issue": + setattr(log, "issue_uid", obj.uid) + elif obj.isa == "pull-request": + setattr(log, "pull_request_uid", obj.uid) session.add(log) session.commit() @@ -4946,9 +5000,7 @@ def log_action(session, action, obj, user_obj): def email_logs_count(session, email): """ Returns the number of logs associated with a given email.""" - query = session.query( - model.PagureLog - ).filter( + query = session.query(model.PagureLog).filter( model.PagureLog.user_email == email ) @@ -4959,51 +5011,41 @@ def update_log_email_user(session, email, user): """ Update the logs with the provided email to point to the specified user. """ - session.query( - model.PagureLog - ).filter( + session.query(model.PagureLog).filter( model.PagureLog.user_email == email - ).update( - {model.PagureLog.user_id: user.id}, - synchronize_session=False - ) + ).update({model.PagureLog.user_id: user.id}, synchronize_session=False) def get_custom_key(session, project, keyname): - ''' Returns custom key object given it's name and the project ''' - - query = session.query( - model.IssueKeys - ).filter( - model.IssueKeys.project_id == project.id - ).filter( - model.IssueKeys.name == keyname + """ Returns custom key object given it's name and the project """ + + query = ( + session.query(model.IssueKeys) + .filter(model.IssueKeys.project_id == project.id) + .filter(model.IssueKeys.name == keyname) ) return query.first() def get_active_milestones(session, project): - ''' Returns the list of all the active milestones for a given project. - ''' - - query = session.query( - model.Issue.milestone - ).filter( - model.Issue.project_id == project.id - ).filter( - model.Issue.status == 'Open' - ).filter( - model.Issue.milestone.isnot(None) + """ Returns the list of all the active milestones for a given project. + """ + + query = ( + session.query(model.Issue.milestone) + .filter(model.Issue.project_id == project.id) + .filter(model.Issue.status == "Open") + .filter(model.Issue.milestone.isnot(None)) ) return sorted([item[0] for item in query.distinct()]) def add_metadata_update_notif(session, obj, messages, user, gitfolder): - ''' Add a notification to the specified issue with the given messages + """ Add a notification to the specified issue with the given messages which should reflect changes made to the meta-data of the issue. - ''' + """ if not messages: return @@ -5015,19 +5057,19 @@ def add_metadata_update_notif(session, obj, messages, user, gitfolder): user_obj = get_user(session, user) user_id = user_obj.id - if obj.isa == 'issue': + if obj.isa == "issue": obj_comment = model.IssueComment( issue_uid=obj.uid, - comment='**Metadata Update from @%s**:\n- %s' % ( - user, '\n- '.join(sorted(messages))), + comment="**Metadata Update from @%s**:\n- %s" + % (user, "\n- ".join(sorted(messages))), user_id=user_id, notification=True, ) - elif obj.isa == 'pull-request': + elif obj.isa == "pull-request": obj_comment = model.PullRequestComment( pull_request_uid=obj.uid, - comment='**Metadata Update from @%s**:\n- %s' % ( - user, '\n- '.join(sorted(messages))), + comment="**Metadata Update from @%s**:\n- %s" + % (user, "\n- ".join(sorted(messages))), user_id=user_id, notification=True, ) @@ -5039,23 +5081,27 @@ def add_metadata_update_notif(session, obj, messages, user, gitfolder): if REDIS: REDIS.publish( - 'pagure.%s' % obj.uid, json.dumps({ - 'comment_id': obj_comment.id, - '%s_id' % obj.isa: obj.id, - 'project': obj.project.fullname, - 'comment_added': text2markdown(obj_comment.comment), - 'comment_user': obj_comment.user.user, - 'avatar_url': avatar_url_from_email( - obj_comment.user.default_email, size=16), - 'comment_date': obj_comment.date_created.strftime( - '%Y-%m-%d %H:%M:%S'), - 'notification': True, - }) + "pagure.%s" % obj.uid, + json.dumps( + { + "comment_id": obj_comment.id, + "%s_id" % obj.isa: obj.id, + "project": obj.project.fullname, + "comment_added": text2markdown(obj_comment.comment), + "comment_user": obj_comment.user.user, + "avatar_url": avatar_url_from_email( + obj_comment.user.default_email, size=16 + ), + "comment_date": obj_comment.date_created.strftime( + "%Y-%m-%d %H:%M:%S" + ), + "notification": True, + } + ), ) if gitfolder: - pagure.lib.git.update_git( - obj, repo=obj.project, repofolder=gitfolder) + pagure.lib.git.update_git(obj, repo=obj.project, repofolder=gitfolder) def tokenize_search_string(pattern): @@ -5067,25 +5113,25 @@ def tokenize_search_string(pattern): return {}, None def finalize_token(token, custom_search): - if ':' in token: + if ":" in token: # This was a "key:value" parameter - key, value = token.split(':', 1) + key, value = token.split(":", 1) custom_search[key] = value - return '' + return "" else: # This was a token without colon, thus a search pattern - return '%s ' % token + return "%s " % token custom_search = {} # Remaining is the remaining real search_pattern (aka, non-key:values) - remaining = '' + remaining = "" # Token is the current "search token" we are processing - token = '' + token = "" in_quotes = False for char in pattern: - if char == ' ' and not in_quotes: + if char == " " and not in_quotes: remaining += finalize_token(token, custom_search) - token = '' + token = "" elif char == '"': in_quotes = not in_quotes else: @@ -5098,72 +5144,61 @@ def tokenize_search_string(pattern): def get_access_levels(session): - ''' Returns all the access levels a user/group can have for a project ''' + """ Returns all the access levels a user/group can have for a project """ access_level_objs = session.query(model.AccessLevels).all() return [access_level.access for access_level in access_level_objs] def get_obj_access(session, project_obj, obj): - ''' Returns the level of access the user/group has on the project. + """ Returns the level of access the user/group has on the project. :arg session: the session to use to connect to the database. :arg project_obj: SQLAlchemy object of Project class :arg obj: SQLAlchemy object of either User or PagureGroup class - ''' + """ if isinstance(obj, model.User): - query = session.query( - model.ProjectUser - ).filter( - model.ProjectUser.project_id == project_obj.id - ).filter( - model.ProjectUser.user_id == obj.id + query = ( + session.query(model.ProjectUser) + .filter(model.ProjectUser.project_id == project_obj.id) + .filter(model.ProjectUser.user_id == obj.id) ) else: - query = session.query( - model.ProjectGroup - ).filter( - model.ProjectGroup.project_id == project_obj.id - ).filter( - model.ProjectGroup.group_id == obj.id + query = ( + session.query(model.ProjectGroup) + .filter(model.ProjectGroup.project_id == project_obj.id) + .filter(model.ProjectGroup.group_id == obj.id) ) return query.first() def search_token( - session, acls, user=None, token=None, active=False, expired=False): - ''' Searches the API tokens corresponding to the criterias specified. + session, acls, user=None, token=None, active=False, expired=False +): + """ Searches the API tokens corresponding to the criterias specified. :arg session: the session to use to connect to the database. :arg acls: List of the ACL associated with these API tokens :arg user: restrict the API tokens to this given user :arg token: restrict the API tokens to this specified token (if it exists) - ''' - query = session.query( - model.Token - ).filter( - model.Token.id == model.TokenAcl.token_id - ).filter( - model.TokenAcl.acl_id == model.ACL.id + """ + query = ( + session.query(model.Token) + .filter(model.Token.id == model.TokenAcl.token_id) + .filter(model.TokenAcl.acl_id == model.ACL.id) ) if acls: if isinstance(acls, list): - query = query.filter( - model.ACL.name.in_(acls) - ) + query = query.filter(model.ACL.name.in_(acls)) else: - query = query.filter( - model.ACL.name == acls - ) + query = query.filter(model.ACL.name == acls) if user: - query = query.filter( - model.Token.user_id == model.User.id - ).filter( + query = query.filter(model.Token.user_id == model.User.id).filter( model.User.user == user ) @@ -5177,16 +5212,14 @@ def search_token( ) if token: - query = query.filter( - model.Token.id == token - ) + query = query.filter(model.Token.id == token) return query.first() else: return query.all() def set_project_owner(session, project, user, required_groups=None): - ''' Set the ownership of a project + """ Set the ownership of a project :arg session: the session to use to connect to the database. :arg project: a Project object representing the project's ownership to change. @@ -5195,7 +5228,7 @@ def set_project_owner(session, project, user, required_groups=None): should be in to become owner if one of the pattern matches the project fullname. :return: None - ''' + """ if required_groups: for key in required_groups: @@ -5204,9 +5237,9 @@ def set_project_owner(session, project, user, required_groups=None): req_grps = set(required_groups[key]) if not user_grps.intersection(req_grps): raise pagure.exceptions.PagureException( - 'This user must be in one of the following groups ' - 'to be allowed to be added to this project: %s' % - ', '.join(req_grps) + "This user must be in one of the following groups " + "to be allowed to be added to this project: %s" + % ", ".join(req_grps) ) for contributor in project.users: @@ -5218,7 +5251,8 @@ def set_project_owner(session, project, user, required_groups=None): def get_pagination_metadata( - flask_request, page, per_page, total, key_page='page'): + flask_request, page, per_page, total, key_page="page" +): """ Returns pagination metadata for an API. The code was inspired by Flask-SQLAlchemy. @@ -5234,11 +5268,11 @@ def get_pagination_metadata( # Remove pagination related args because those are handled elsewhere # Also, remove any args that url_for accepts in case the user entered # those in - for key in [key_page, 'per_page', 'endpoint']: + for key in [key_page, "per_page", "endpoint"]: if key in request_args_wo_page: request_args_wo_page.pop(key) for key in flask_request.args: - if key.startswith('_'): + if key.startswith("_"): request_args_wo_page.pop(key) request_args_wo_page.update(flask_request.view_args) @@ -5247,39 +5281,51 @@ def get_pagination_metadata( if page < pages: request_args_wo_page.update({key_page: page + 1}) next_page = url_for( - flask_request.endpoint, per_page=per_page, - _external=True, **request_args_wo_page) + flask_request.endpoint, + per_page=per_page, + _external=True, + **request_args_wo_page + ) prev_page = None if page > 1: request_args_wo_page.update({key_page: page - 1}) prev_page = url_for( - flask_request.endpoint, per_page=per_page, - _external=True, **request_args_wo_page) + flask_request.endpoint, + per_page=per_page, + _external=True, + **request_args_wo_page + ) request_args_wo_page.update({key_page: 1}) first_page = url_for( - flask_request.endpoint, per_page=per_page, _external=True, - **request_args_wo_page) + flask_request.endpoint, + per_page=per_page, + _external=True, + **request_args_wo_page + ) request_args_wo_page.update({key_page: pages}) last_page = url_for( - flask_request.endpoint, per_page=per_page, - _external=True, **request_args_wo_page) + flask_request.endpoint, + per_page=per_page, + _external=True, + **request_args_wo_page + ) return { key_page: page, - 'pages': pages, - 'per_page': per_page, - 'prev': prev_page, - 'next': next_page, - 'first': first_page, - 'last': last_page + "pages": pages, + "per_page": per_page, + "prev": prev_page, + "next": next_page, + "first": first_page, + "last": last_page, } def update_star_project(session, repo, star, user): - ''' Unset or set the star status depending on the star value. + """ Unset or set the star status depending on the star value. :arg session: the session to use to connect to the database. :arg repo: a model.Project object representing the project to star/unstar @@ -5287,54 +5333,43 @@ def update_star_project(session, repo, star, user): :arg user: string representing the user :return: None or string containing 'You starred this project' or 'You unstarred this project' - ''' + """ if not all([repo, user, star]): return user_obj = get_user(session, user) msg = None - if star == '1': - msg = _star_project( - session, - repo=repo, - user=user_obj, - ) - elif star == '0': - msg = _unstar_project( - session, - repo=repo, - user=user_obj, - ) + if star == "1": + msg = _star_project(session, repo=repo, user=user_obj) + elif star == "0": + msg = _unstar_project(session, repo=repo, user=user_obj) return msg def _star_project(session, repo, user): - ''' Star a project + """ Star a project :arg session: Session object to connect to db with :arg repo: model.Project object representing the repo to star :arg user: model.User object who is starring this repo :return: None or string containing 'You starred this project' - ''' + """ if not all([repo, user]): return - stargazer_obj = model.Star( - project_id=repo.id, - user_id=user.id, - ) + stargazer_obj = model.Star(project_id=repo.id, user_id=user.id) session.add(stargazer_obj) - return 'You starred this project' + return "You starred this project" def _unstar_project(session, repo, user): - ''' Unstar a project + """ Unstar a project :arg session: Session object to connect to db with :arg repo: model.Project object representing the repo to unstar :arg user: model.User object who is unstarring this repo :return: None or string containing 'You unstarred this project' or 'You never starred the project' - ''' + """ if not all([repo, user]): return @@ -5342,41 +5377,39 @@ def _unstar_project(session, repo, user): stargazer_obj = _get_stargazer_obj(session, repo, user) if isinstance(stargazer_obj, model.Star): session.delete(stargazer_obj) - msg = 'You unstarred this project' + msg = "You unstarred this project" else: - msg = 'You never starred the project' + msg = "You never starred the project" return msg def _get_stargazer_obj(session, repo, user): - ''' Query the db to find stargazer object with given repo and user + """ Query the db to find stargazer object with given repo and user :arg session: Session object to connect to db with :arg repo: model.Project object :arg user: model.User object :return: None or model.Star object - ''' + """ if not all([repo, user]): return - stargazer_obj = session.query( - model.Star, - ).filter( - model.Star.project_id == repo.id, - ).filter( - model.Star.user_id == user.id, + stargazer_obj = ( + session.query(model.Star) + .filter(model.Star.project_id == repo.id) + .filter(model.Star.user_id == user.id) ) return stargazer_obj.first() def has_starred(session, repo, user): - ''' Check if a given user has starred a particular project + """ Check if a given user has starred a particular project :arg session: The session object to query the db with :arg repo: model.Project object for which the star is checked :arg user: The username of the user in question :return: True if user has starred the project, False otherwise - ''' + """ if not all([repo, user]): return @@ -5388,18 +5421,19 @@ def has_starred(session, repo, user): def update_read_only_mode(session, repo, read_only=True): - ''' Remove the read only mode from the project + """ Remove the read only mode from the project :arg session: The session object to query the db with :arg repo: model.Project object to mark/unmark read only :arg read_only: True if project is to be made read only, False otherwise - ''' + """ if ( - not repo - or not isinstance(repo, model.Project) - or read_only not in [True, False]): + not repo + or not isinstance(repo, model.Project) + or read_only not in [True, False] + ): return if repo.read_only != read_only: repo.read_only = read_only @@ -5407,48 +5441,40 @@ def update_read_only_mode(session, repo, read_only=True): def issues_history_stats(session, project): - ''' Returns the number of opened issues on the specified project over + """ Returns the number of opened issues on the specified project over the last 365 days :arg session: The session object to query the db with :arg repo: model.Project object to get the issues stats about - ''' + """ # Some ticket got imported as closed but without a closed_at date, so # let's ignore them all - to_ignore = session.query( - model.Issue - ).filter( - model.Issue.project_id == project.id - ).filter( - model.Issue.closed_at == None, # noqa - ).filter( - model.Issue.status == 'Closed' - ).count() + to_ignore = ( + session.query(model.Issue) + .filter(model.Issue.project_id == project.id) + .filter(model.Issue.closed_at == None) # noqa + .filter(model.Issue.status == "Closed") + .count() + ) # For each week from tomorrow, get the number of open tickets tomorrow = datetime.datetime.utcnow() + datetime.timedelta(days=1) output = {} for week in range(53): start = tomorrow - datetime.timedelta(days=(week * 7)) - closed_ticket = session.query( - model.Issue - ).filter( - model.Issue.project_id == project.id - ).filter( - model.Issue.closed_at >= start - ).filter( - model.Issue.date_created <= start - ) - open_ticket = session.query( - model.Issue - ).filter( - model.Issue.project_id == project.id - ).filter( - model.Issue.status == 'Open' - ).filter( - model.Issue.date_created <= start + closed_ticket = ( + session.query(model.Issue) + .filter(model.Issue.project_id == project.id) + .filter(model.Issue.closed_at >= start) + .filter(model.Issue.date_created <= start) + ) + open_ticket = ( + session.query(model.Issue) + .filter(model.Issue.project_id == project.id) + .filter(model.Issue.status == "Open") + .filter(model.Issue.date_created <= start) ) cnt = open_ticket.count() + closed_ticket.count() - to_ignore if cnt < 0: @@ -5458,9 +5484,8 @@ def issues_history_stats(session, project): return output -def get_authorized_project( - session, project_name, user=None, namespace=None): - ''' Retrieving the project with user permission constraint +def get_authorized_project(session, project_name, user=None, namespace=None): + """ Retrieving the project with user permission constraint :arg session: The SQLAlchemy session to use :type session: sqlalchemy.orm.session.Session @@ -5474,10 +5499,8 @@ def get_authorized_project( permissions for the project else it returns None :rtype: Project - ''' - repo = pagure.lib._get_project( - session, project_name, user, namespace, - ) + """ + repo = pagure.lib._get_project(session, project_name, user, namespace) if repo and repo.private and not pagure.utils.is_repo_user(repo): return None @@ -5486,7 +5509,7 @@ def get_authorized_project( def get_project_family(session, project): - ''' Retrieve the family of the specified project, ie: all the forks + """ Retrieve the family of the specified project, ie: all the forks of the main project. If the specified project is a fork, let's work our way up the chain until we find the main project so we can go down and get all the forks @@ -5497,34 +5520,31 @@ def get_project_family(session, project): :arg project: The project whose family is searched :type project: pagure.lib.model.Project - ''' + """ parent = project while parent.is_fork: parent = parent.parent - sub = session.query( - sqlalchemy.distinct(model.Project.id), - ).filter( - model.Project.parent_id == parent.id, + sub = session.query(sqlalchemy.distinct(model.Project.id)).filter( + model.Project.parent_id == parent.id ) - query = session.query( - model.Project, - ).filter( - sqlalchemy.or_( - model.Project.parent_id.in_(sub.subquery()), - model.Project.parent_id == parent.id, - ) - ).filter( - model.Project.user_id == model.User.id - ).order_by( - model.User.user + query = ( + session.query(model.Project) + .filter( + sqlalchemy.or_( + model.Project.parent_id.in_(sub.subquery()), + model.Project.parent_id == parent.id, + ) + ) + .filter(model.Project.user_id == model.User.id) + .order_by(model.User.user) ) return [parent] + query.all() def link_pr_issue(session, issue, request): - ''' Associate the specified issue with the specified pull-requets. + """ Associate the specified issue with the specified pull-requets. :arg session: The SQLAlchemy session to use :type session: sqlalchemy.orm.session.Session @@ -5534,20 +5554,19 @@ def link_pr_issue(session, issue, request): :arg request: A pull-request to associate the specified issue with :type request: pagure.lib.model.PullRequest - ''' + """ associated_issues = [iss.uid for iss in request.related_issues] if issue.uid not in associated_issues: obj = model.PrToIssue( - pull_request_uid=request.uid, - issue_uid=issue.uid + pull_request_uid=request.uid, issue_uid=issue.uid ) session.add(obj) session.flush() def remove_user_of_project(session, user, project, agent): - ''' Remove the specified user from the given project. + """ Remove the specified user from the given project. :arg session: the session with which to connect to the database. :arg user: an pagure.lib.model.User object to remove from the project. @@ -5555,13 +5574,14 @@ def remove_user_of_project(session, user, project, agent): the specified user. :arg agent: the username of the user performing the action. - ''' + """ userids = [u.id for u in project.users] if user.id not in userids: raise pagure.exceptions.PagureException( - 'User does not have any access on the repo') + "User does not have any access on the repo" + ) for u in project.users: if u.id == user.id: @@ -5576,12 +5596,12 @@ def remove_user_of_project(session, user, project, agent): pagure.lib.git.generate_gitolite_acls(project=project) pagure.lib.notify.log( project, - topic='project.user.removed', + topic="project.user.removed", msg=dict( project=project.to_json(public=True), removed_user=user.username, - agent=agent - ) + agent=agent, + ), ) - return 'User removed' + return "User removed" diff --git a/pagure/lib/encoding_utils.py b/pagure/lib/encoding_utils.py index aaeb5a3..66f7dce 100644 --- a/pagure/lib/encoding_utils.py +++ b/pagure/lib/encoding_utils.py @@ -22,7 +22,7 @@ from pagure.exceptions import PagureEncodingException _log = logging.getLogger(__name__) -Guess = namedtuple('Guess', ['encoding', 'confidence']) +Guess = namedtuple("Guess", ["encoding", "confidence"]) def detect_encodings(data): @@ -40,7 +40,7 @@ def detect_encodings(data): """ if not data: # It's an empty string so we can safely say it's ascii - return {'ascii': 1.0} + return {"ascii": 1.0} # We can't use ``chardet.detect`` because we want to dig in the internals # of the detector to bias the utf-8 result. @@ -49,11 +49,11 @@ def detect_encodings(data): detector.feed(data) result = detector.close() if not result: - return {'utf-8': 1.0} - encodings = {result['encoding']: result['confidence']} - if ch_version[0] == '3': + return {"utf-8": 1.0} + encodings = {result["encoding"]: result["confidence"]} + if ch_version[0] == "3": for prober in detector._charset_probers: - if hasattr(prober, 'probers'): + if hasattr(prober, "probers"): for prober in prober.probers: encodings[prober.charset_name] = prober.get_confidence() else: @@ -93,14 +93,17 @@ def guess_encodings(data): # the expected range from chardet. This requires chardet to be very # unconfident in utf-8 and very confident in something else for utf-8 to # not be selected. - if 'utf-8' in encodings and encodings['utf-8'] > 0.0: - encodings['utf-8'] = (encodings['utf-8'] + 2.0) / 3.0 - encodings = [Guess(encoding, confidence) - for encoding, confidence in encodings.items()] + if "utf-8" in encodings and encodings["utf-8"] > 0.0: + encodings["utf-8"] = (encodings["utf-8"] + 2.0) / 3.0 + encodings = [ + Guess(encoding, confidence) + for encoding, confidence in encodings.items() + ] sorted_encodings = sorted( - encodings, key=lambda guess: guess.confidence, reverse=True) + encodings, key=lambda guess: guess.confidence, reverse=True + ) - _log.debug('Possible encodings: %s' % sorted_encodings) + _log.debug("Possible encodings: %s" % sorted_encodings) return sorted_encodings @@ -129,7 +132,7 @@ def guess_encoding(data): encodings = guess_encodings(data) for encoding in encodings: - _log.debug('Trying encoding: %s', encoding) + _log.debug("Trying encoding: %s", encoding) try: data.decode(encoding.encoding) return encoding.encoding @@ -137,7 +140,7 @@ def guess_encoding(data): # The first error is thrown when we failed to decode in that # encoding, the second when encoding.encoding returned None pass - raise PagureEncodingException('No encoding could be guessed for this file') + raise PagureEncodingException("No encoding could be guessed for this file") def decode(data): diff --git a/pagure/lib/git.py b/pagure/lib/git.py index c88fb64..adf0f99 100644 --- a/pagure/lib/git.py +++ b/pagure/lib/git.py @@ -44,9 +44,9 @@ _log = logging.getLogger(__name__) def commit_to_patch( - repo_obj, commits, diff_view=False, find_similar=False, - separated=False): - ''' For a given commit (PyGit2 commit object) of a specified git repo, + repo_obj, commits, diff_view=False, find_similar=False, separated=False +): + """ For a given commit (PyGit2 commit object) of a specified git repo, returns a string representation of the changes the commit did in a format that allows it to be used as patch. @@ -69,7 +69,7 @@ def commit_to_patch( :return: the patch or diff representation of the provided commits :rtype: str - ''' + """ if not isinstance(commits, list): commits = [commits] @@ -86,26 +86,26 @@ def commit_to_patch( if diff_view: if separated: - for el in diff.patch.split('\ndiff --git a/'): - if el and not el.startswith('diff --git a/'): - patch.append('\ndiff --git a/' + el) + for el in diff.patch.split("\ndiff --git a/"): + if el and not el.startswith("diff --git a/"): + patch.append("\ndiff --git a/" + el) elif el: patch.append(el) else: patch.append(diff.patch) else: - subject = message = '' - if '\n' in commit.message: - subject, message = commit.message.split('\n', 1) + subject = message = "" + if "\n" in commit.message: + subject, message = commit.message.split("\n", 1) else: subject = commit.message if len(commits) > 1: - subject = '[PATCH %s/%s] %s' % ( - cnt + 1, len(commits), subject) + subject = "[PATCH %s/%s] %s" % (cnt + 1, len(commits), subject) - patch.append("""From {commit} Mon Sep 17 00:00:00 2001 + patch.append( + """From {commit} Mon Sep 17 00:00:00 2001 From: {author_name} <{author_email}> Date: {date} Subject: {subject} @@ -115,19 +115,22 @@ Subject: {subject} {patch} """.format( - commit=commit.oid.hex, - author_name=commit.author.name, - author_email=commit.author.email, - date=datetime.datetime.utcfromtimestamp( - commit.commit_time).strftime('%b %d %Y %H:%M:%S +0000'), - subject=subject, - msg=message, - patch=diff.patch)) + commit=commit.oid.hex, + author_name=commit.author.name, + author_email=commit.author.email, + date=datetime.datetime.utcfromtimestamp( + commit.commit_time + ).strftime("%b %d %Y %H:%M:%S +0000"), + subject=subject, + msg=message, + patch=diff.patch, + ) + ) if separated: return patch else: - return ''.join(patch) + return "".join(patch) def generate_gitolite_acls(project=None, group=None): @@ -151,7 +154,7 @@ def generate_gitolite_acls(project=None, group=None): namespace=project.namespace if project else None, name=project.name if project else None, user=project.user.user if project and project.is_fork else None, - group=group + group=group, ) else: task = tasks.generate_gitolite_acls.delay(name=-1, group=group) @@ -165,17 +168,20 @@ def update_git(obj, repo, repofolder): ticketuid = None requestuid = None - if obj.isa == 'issue': + if obj.isa == "issue": ticketuid = obj.uid - elif obj.isa == 'pull-request': + elif obj.isa == "pull-request": requestuid = obj.uid else: - raise NotImplementedError('Unknown object type %s' % obj.isa) + raise NotImplementedError("Unknown object type %s" % obj.isa) queued = pagure.lib.tasks.update_git.delay( - repo.name, repo.namespace, + repo.name, + repo.namespace, repo.user.username if repo.is_fork else None, - ticketuid, requestuid) + ticketuid, + requestuid, + ) _maybe_wait(queued) return queued @@ -205,7 +211,7 @@ def _update_git(obj, repo, repofolder): changes commit them and push them back to the original repo. """ - _log.info('Update the git repo: %s for: %s', repo.path, obj) + _log.info("Update the git repo: %s for: %s", repo.path, obj) if not repofolder: return @@ -214,7 +220,7 @@ def _update_git(obj, repo, repofolder): repopath = os.path.join(repofolder, repo.path) # Clone the repo into a temp folder - newpath = tempfile.mkdtemp(prefix='pagure-') + newpath = tempfile.mkdtemp(prefix="pagure-") new_repo = pygit2.clone_repository(repopath, newpath) file_path = os.path.join(newpath, obj.uid) @@ -228,10 +234,12 @@ def _update_git(obj, repo, repofolder): added = True # Write down what changed - with open(file_path, 'w') as stream: - stream.write(json.dumps( - obj.to_json(), sort_keys=True, indent=4, - separators=(',', ': '))) + with open(file_path, "w") as stream: + stream.write( + json.dumps( + obj.to_json(), sort_keys=True, indent=4, separators=(",", ": ") + ) + ) # Retrieve the list of files that changed diff = new_repo.diff() @@ -262,22 +270,23 @@ def _update_git(obj, repo, repofolder): parents.append(parent) # Author/commiter will always be this one - author = _make_signature(name='pagure', email='pagure') + author = _make_signature(name="pagure", email="pagure") # Actually commit new_repo.create_commit( - 'refs/heads/master', + "refs/heads/master", author, author, - 'Updated %s %s: %s' % (obj.isa, obj.uid, obj.title), + "Updated %s %s: %s" % (obj.isa, obj.uid, obj.title), new_repo.index.write_tree(), - parents) + parents, + ) index.write() # Push to origin ori_remote = new_repo.remotes[0] - master_ref = new_repo.lookup_reference('HEAD').resolve() - refname = '%s:%s' % (master_ref.name, master_ref.name) + master_ref = new_repo.lookup_reference("HEAD").resolve() + refname = "%s:%s" % (master_ref.name, master_ref.name) PagureRepo.push(ori_remote, refname) @@ -292,9 +301,11 @@ def clean_git(obj, repo, repofolder): ticketuid = obj.uid return pagure.lib.tasks.clean_git.delay( - repo.name, repo.namespace, + repo.name, + repo.namespace, repo.user.username if repo.is_fork else None, - ticketuid) + ticketuid, + ) def _clean_git(obj, repo, repofolder): @@ -305,13 +316,13 @@ def _clean_git(obj, repo, repofolder): if not repofolder: return - _log.info('Update the git repo: %s to remove: %s', repo.path, obj) + _log.info("Update the git repo: %s to remove: %s", repo.path, obj) # Get the fork repopath = os.path.join(repofolder, repo.path) # Clone the repo into a temp folder - newpath = tempfile.mkdtemp(prefix='pagure-') + newpath = tempfile.mkdtemp(prefix="pagure-") new_repo = pygit2.clone_repository(repopath, newpath) file_path = os.path.join(newpath, obj.uid) @@ -340,22 +351,23 @@ def _clean_git(obj, repo, repofolder): parents.append(parent) # Author/commiter will always be this one - author = _make_signature(name='pagure', email='pagure') + author = _make_signature(name="pagure", email="pagure") # Actually commit new_repo.create_commit( - 'refs/heads/master', + "refs/heads/master", author, author, - 'Removed %s %s: %s' % (obj.isa, obj.uid, obj.title), + "Removed %s %s: %s" % (obj.isa, obj.uid, obj.title), new_repo.index.write_tree(), - parents) + parents, + ) index.write() # Push to origin ori_remote = new_repo.remotes[0] - master_ref = new_repo.lookup_reference('HEAD').resolve() - refname = '%s:%s' % (master_ref.name, master_ref.name) + master_ref = new_repo.lookup_reference("HEAD").resolve() + refname = "%s:%s" % (master_ref.name, master_ref.name) PagureRepo.push(ori_remote, refname) @@ -363,7 +375,7 @@ def _clean_git(obj, repo, repofolder): shutil.rmtree(newpath) -def get_user_from_json(session, jsondata, key='user'): +def get_user_from_json(session, jsondata, key="user"): """ From the given json blob, retrieve the user info and search for it in the db and create the user if it does not already exist. """ @@ -374,10 +386,10 @@ def get_user_from_json(session, jsondata, key='user'): data = jsondata.get(key, None) if data: - username = data.get('name') - fullname = data.get('fullname') - useremails = data.get('emails') - default_email = data.get('default_email') + username = data.get("name") + fullname = data.get("fullname") + useremails = data.get("emails") + default_email = data.get("default_email") if not default_email and useremails: default_email = useremails[0] @@ -399,7 +411,7 @@ def get_user_from_json(session, jsondata, key='user'): fullname=fullname or username, default_email=default_email, emails=useremails, - keydir=pagure_config.get('GITOLITE_KEYDIR', None), + keydir=pagure_config.get("GITOLITE_KEYDIR", None), ) session.commit() @@ -413,66 +425,73 @@ def get_project_from_json(session, jsondata): project = None user = get_user_from_json(session, jsondata) - name = jsondata.get('name') - namespace = jsondata.get('namespace') + name = jsondata.get("name") + namespace = jsondata.get("namespace") project_user = None - if jsondata.get('parent'): + if jsondata.get("parent"): project_user = user.username project = pagure.lib._get_project( - session, name, user=project_user, namespace=namespace) + session, name, user=project_user, namespace=namespace + ) if not project: parent = None - if jsondata.get('parent'): - parent = get_project_from_json( - session, jsondata.get('parent')) + if jsondata.get("parent"): + parent = get_project_from_json(session, jsondata.get("parent")) pagure.lib.fork_project( session=session, repo=parent, - gitfolder=pagure_config['GIT_FOLDER'], - docfolder=pagure_config.get('DOCS_FOLDER'), - ticketfolder=pagure_config.get('TICKETS_FOLDER'), - requestfolder=pagure_config['REQUESTS_FOLDER'], - user=user.username) + gitfolder=pagure_config["GIT_FOLDER"], + docfolder=pagure_config.get("DOCS_FOLDER"), + ticketfolder=pagure_config.get("TICKETS_FOLDER"), + requestfolder=pagure_config["REQUESTS_FOLDER"], + user=user.username, + ) else: - gitfolder = os.path.join( - pagure_config['GIT_FOLDER'], 'forks', user.username) \ - if parent else pagure_config['GIT_FOLDER'] + gitfolder = ( + os.path.join( + pagure_config["GIT_FOLDER"], "forks", user.username + ) + if parent + else pagure_config["GIT_FOLDER"] + ) pagure.lib.new_project( session, user=user.username, name=name, namespace=namespace, - description=jsondata.get('description'), + description=jsondata.get("description"), parent_id=parent.id if parent else None, - blacklist=pagure_config.get('BLACKLISTED_PROJECTS', []), - allowed_prefix=pagure_config.get('ALLOWED_PREFIX', []), + blacklist=pagure_config.get("BLACKLISTED_PROJECTS", []), + allowed_prefix=pagure_config.get("ALLOWED_PREFIX", []), gitfolder=gitfolder, - docfolder=pagure_config.get('DOCS_FOLDER'), - ticketfolder=pagure_config.get('TICKETS_FOLDER'), - requestfolder=pagure_config['REQUESTS_FOLDER'], + docfolder=pagure_config.get("DOCS_FOLDER"), + ticketfolder=pagure_config.get("TICKETS_FOLDER"), + requestfolder=pagure_config["REQUESTS_FOLDER"], prevent_40_chars=pagure_config.get( - 'OLD_VIEW_COMMIT_ENABLED', False), + "OLD_VIEW_COMMIT_ENABLED", False + ), ) session.commit() project = pagure.lib._get_project( - session, name, user=user.username, namespace=namespace) + session, name, user=user.username, namespace=namespace + ) - tags = jsondata.get('tags', None) + tags = jsondata.get("tags", None) if tags: pagure.lib.add_tag_obj( - session, project, tags=tags, user=user.username, - gitfolder=None) + session, project, tags=tags, user=user.username, gitfolder=None + ) return project def update_custom_field_from_json(session, repo, issue, json_data): - ''' Update the custom fields according to the custom fields of + """ Update the custom fields according to the custom fields of the issue. If the custom field is not present for the repo in it's settings, this will create them. @@ -481,10 +500,10 @@ def update_custom_field_from_json(session, repo, issue, json_data): :arg issue: the sqlalchemy object of the issue :arg json_data: the json representation of the issue taken from the git and used to update the data in the database. - ''' + """ # Update custom key value, if present - custom_fields = json_data.get('custom_fields') + custom_fields = json_data.get("custom_fields") if not custom_fields: return @@ -493,11 +512,11 @@ def update_custom_field_from_json(session, repo, issue, json_data): current_keys.append(key.name) for new_key in custom_fields: - if new_key['name'] not in current_keys: + if new_key["name"] not in current_keys: issuekey = model.IssueKeys( project_id=repo.id, - name=new_key['name'], - key_type=new_key['key_type'], + name=new_key["name"], + key_type=new_key["key_type"], ) try: session.add(issuekey) @@ -507,16 +526,13 @@ def update_custom_field_from_json(session, repo, issue, json_data): continue # The key should be present in the database now - key_obj = pagure.lib.get_custom_key(session, repo, new_key['name']) + key_obj = pagure.lib.get_custom_key(session, repo, new_key["name"]) - value = new_key.get('value') + value = new_key.get("value") if value: value = value.strip() pagure.lib.set_custom_key_value( - session, - issue=issue, - key=key_obj, - value=value, + session, issue=issue, key=key_obj, value=value ) try: session.commit() @@ -525,7 +541,8 @@ def update_custom_field_from_json(session, repo, issue, json_data): def update_ticket_from_git( - session, reponame, namespace, username, issue_uid, json_data, agent): + session, reponame, namespace, username, issue_uid, json_data, agent +): """ Update the specified issue (identified by its unique identifier) with the data present in the json blob provided. @@ -543,12 +560,14 @@ def update_ticket_from_git( """ repo = pagure.lib._get_project( - session, reponame, user=username, namespace=namespace) + session, reponame, user=username, namespace=namespace + ) if not repo: raise pagure.exceptions.PagureException( - 'Unknown repo %s of username: %s in namespace: %s' % ( - reponame, username, namespace)) + "Unknown repo %s of username: %s in namespace: %s" + % (reponame, username, namespace) + ) user = get_user_from_json(session, json_data) # rely on the agent provided, but if something goes wrong, behave as @@ -562,18 +581,19 @@ def update_ticket_from_git( pagure.lib.new_issue( session, repo=repo, - title=json_data.get('title'), - content=json_data.get('content'), - priority=json_data.get('priority'), + title=json_data.get("title"), + content=json_data.get("content"), + priority=json_data.get("priority"), user=user.username, ticketfolder=None, - issue_id=json_data.get('id'), + issue_id=json_data.get("id"), issue_uid=issue_uid, - private=json_data.get('private'), - status=json_data.get('status'), - close_status=json_data.get('close_status'), + private=json_data.get("private"), + status=json_data.get("status"), + close_status=json_data.get("close_status"), date_created=datetime.datetime.utcfromtimestamp( - float(json_data.get('date_created'))), + float(json_data.get("date_created")) + ), notify=False, ) @@ -584,12 +604,12 @@ def update_ticket_from_git( issue=issue, ticketfolder=None, user=agent.username, - title=json_data.get('title'), - content=json_data.get('content'), - priority=json_data.get('priority'), - status=json_data.get('status'), - close_status=json_data.get('close_status'), - private=json_data.get('private'), + title=json_data.get("title"), + content=json_data.get("content"), + priority=json_data.get("priority"), + status=json_data.get("status"), + close_status=json_data.get("close_status"), + private=json_data.get("private"), ) if msgs: messages.extend(msgs) @@ -599,14 +619,11 @@ def update_ticket_from_git( issue = pagure.lib.get_issue_by_uid(session, issue_uid=issue_uid) update_custom_field_from_json( - session, - repo=repo, - issue=issue, - json_data=json_data, + session, repo=repo, issue=issue, json_data=json_data ) # Update milestone - milestone = json_data.get('milestone') + milestone = json_data.get("milestone") # If milestone is not in the repo settings, add it if milestone: @@ -626,11 +643,11 @@ def update_ticket_from_git( ticketfolder=None, user=agent.username, milestone=milestone, - title=json_data.get('title'), - content=json_data.get('content'), - status=json_data.get('status'), - close_status=json_data.get('close_status'), - private=json_data.get('private'), + title=json_data.get("title"), + content=json_data.get("content"), + status=json_data.get("status"), + close_status=json_data.get("close_status"), + private=json_data.get("private"), ) if msgs: messages.extend(msgs) @@ -638,7 +655,7 @@ def update_ticket_from_git( session.rollback() # Update close_status - close_status = json_data.get('close_status') + close_status = json_data.get("close_status") if close_status: if close_status.strip() not in repo.close_status: @@ -650,51 +667,69 @@ def update_ticket_from_git( session.rollback() # Update tags - tags = json_data.get('tags', []) + tags = json_data.get("tags", []) msgs = pagure.lib.update_tags( - session, issue, tags, username=user.user, gitfolder=None) + session, issue, tags, username=user.user, gitfolder=None + ) if msgs: messages.extend(msgs) # Update assignee - assignee = get_user_from_json(session, json_data, key='assignee') + assignee = get_user_from_json(session, json_data, key="assignee") if assignee: msg = pagure.lib.add_issue_assignee( - session, issue, assignee.username, - user=agent.user, ticketfolder=None, notify=False) + session, + issue, + assignee.username, + user=agent.user, + ticketfolder=None, + notify=False, + ) if msg: messages.append(msg) # Update depends - depends = json_data.get('depends', []) + depends = json_data.get("depends", []) msgs = pagure.lib.update_dependency_issue( - session, issue.project, issue, depends, - username=agent.user, ticketfolder=None) + session, + issue.project, + issue, + depends, + username=agent.user, + ticketfolder=None, + ) if msgs: messages.extend(msgs) # Update blocks - blocks = json_data.get('blocks', []) + blocks = json_data.get("blocks", []) msgs = pagure.lib.update_blocked_issue( - session, issue.project, issue, blocks, - username=agent.user, ticketfolder=None) + session, + issue.project, + issue, + blocks, + username=agent.user, + ticketfolder=None, + ) if msgs: messages.extend(msgs) - for comment in json_data['comments']: + for comment in json_data["comments"]: usercomment = get_user_from_json(session, comment) commentobj = pagure.lib.get_issue_comment_by_user_and_comment( - session, issue_uid, usercomment.id, comment['comment']) + session, issue_uid, usercomment.id, comment["comment"] + ) if not commentobj: pagure.lib.add_issue_comment( session, issue=issue, - comment=comment['comment'], + comment=comment["comment"], user=usercomment.username, ticketfolder=None, notify=False, date_created=datetime.datetime.fromtimestamp( - float(comment['date_created'])), + float(comment["date_created"]) + ), ) if messages: @@ -703,13 +738,14 @@ def update_ticket_from_git( obj=issue, messages=messages, user=agent.username, - gitfolder=None + gitfolder=None, ) session.commit() def update_request_from_git( - session, reponame, namespace, username, request_uid, json_data): + session, reponame, namespace, username, request_uid, json_data +): """ Update the specified request (identified by its unique identifier) with the data present in the json blob provided. @@ -724,78 +760,79 @@ def update_request_from_git( """ repo = pagure.lib._get_project( - session, reponame, user=username, namespace=namespace) + session, reponame, user=username, namespace=namespace + ) if not repo: raise pagure.exceptions.PagureException( - 'Unknown repo %s of username: %s in namespace: %s' % ( - reponame, username, namespace)) + "Unknown repo %s of username: %s in namespace: %s" + % (reponame, username, namespace) + ) user = get_user_from_json(session, json_data) - request = pagure.lib.get_request_by_uid( - session, request_uid=request_uid) + request = pagure.lib.get_request_by_uid(session, request_uid=request_uid) if not request: - repo_from = get_project_from_json( - session, json_data.get('repo_from') - ) + repo_from = get_project_from_json(session, json_data.get("repo_from")) - repo_to = get_project_from_json( - session, json_data.get('project') - ) + repo_to = get_project_from_json(session, json_data.get("project")) - status = json_data.get('status') + status = json_data.get("status") if pagure.utils.is_true(status): - status = 'Open' - elif pagure.utils.is_true(status, ['false']): - status = 'Merged' + status = "Open" + elif pagure.utils.is_true(status, ["false"]): + status = "Merged" # Create new request pagure.lib.new_pull_request( session, repo_from=repo_from, - branch_from=json_data.get('branch_from'), + branch_from=json_data.get("branch_from"), repo_to=repo_to if repo_to else None, - remote_git=json_data.get('remote_git'), - branch_to=json_data.get('branch'), - title=json_data.get('title'), + remote_git=json_data.get("remote_git"), + branch_to=json_data.get("branch"), + title=json_data.get("title"), user=user.username, - requestuid=json_data.get('uid'), - requestid=json_data.get('id'), + requestuid=json_data.get("uid"), + requestid=json_data.get("id"), status=status, requestfolder=None, notify=False, ) session.commit() - request = pagure.lib.get_request_by_uid( - session, request_uid=request_uid) + request = pagure.lib.get_request_by_uid(session, request_uid=request_uid) # Update start and stop commits - request.commit_start = json_data.get('commit_start') - request.commit_stop = json_data.get('commit_stop') + request.commit_start = json_data.get("commit_start") + request.commit_stop = json_data.get("commit_stop") # Update assignee - assignee = get_user_from_json(session, json_data, key='assignee') + assignee = get_user_from_json(session, json_data, key="assignee") if assignee: pagure.lib.add_pull_request_assignee( - session, request, assignee.username, - user=user.user, requestfolder=None) + session, + request, + assignee.username, + user=user.user, + requestfolder=None, + ) - for comment in json_data['comments']: + for comment in json_data["comments"]: user = get_user_from_json(session, comment) commentobj = pagure.lib.get_request_comment( - session, request_uid, comment['id']) + session, request_uid, comment["id"] + ) if not commentobj: pagure.lib.add_pull_request_comment( session, request, - commit=comment['commit'], - tree_id=comment.get('tree_id') or None, - filename=comment['filename'], - row=comment['line'], - comment=comment['comment'], + commit=comment["commit"], + tree_id=comment.get("tree_id") or None, + filename=comment["filename"], + row=comment["line"], + comment=comment["comment"], user=user.username, requestfolder=None, notify=False, @@ -803,9 +840,10 @@ def update_request_from_git( session.commit() -def _add_file_to_git(repo, issue, attachmentfolder, ticketfolder, user, - filename): - ''' Add a given file to the specified ticket git repository. +def _add_file_to_git( + repo, issue, attachmentfolder, ticketfolder, user, filename +): + """ Add a given file to the specified ticket git repository. :arg repo: the Project object from the database :arg attachmentfolder: the folder on the filesystem where the attachments @@ -815,15 +853,15 @@ def _add_file_to_git(repo, issue, attachmentfolder, ticketfolder, user, :arg user: the user object with its username and email :arg filename: the name of the file to save - ''' + """ # Get the fork repopath = os.path.join(ticketfolder, repo.path) # Clone the repo into a temp folder - newpath = tempfile.mkdtemp(prefix='pagure-') + newpath = tempfile.mkdtemp(prefix="pagure-") new_repo = pygit2.clone_repository(repopath, newpath) - folder_path = os.path.join(newpath, 'files') + folder_path = os.path.join(newpath, "files") file_path = os.path.join(folder_path, filename) # Get the current index @@ -833,13 +871,13 @@ def _add_file_to_git(repo, issue, attachmentfolder, ticketfolder, user, if os.path.exists(file_path): # File exists, remove the clone and return shutil.rmtree(newpath) - return os.path.join('files', filename) + return os.path.join("files", filename) if not os.path.exists(folder_path): os.mkdir(folder_path) # Copy from attachments directory - src = os.path.join(attachmentfolder, repo.fullname, 'files', filename) + src = os.path.join(attachmentfolder, repo.fullname, "files", filename) shutil.copyfile(src, file_path) # Retrieve the list of files that changed @@ -847,7 +885,7 @@ def _add_file_to_git(repo, issue, attachmentfolder, ticketfolder, user, files = [patch.new_file_path for patch in diff] # Add the changes to the index - index.add(os.path.join('files', filename)) + index.add(os.path.join("files", filename)) for filename in files: index.add(filename) @@ -863,40 +901,45 @@ def _add_file_to_git(repo, issue, attachmentfolder, ticketfolder, user, parents.append(parent) # Author/commiter will always be this one - author = _make_signature( - name=user.username, - email=user.default_email, - ) + author = _make_signature(name=user.username, email=user.default_email) # Actually commit new_repo.create_commit( - 'refs/heads/master', + "refs/heads/master", author, author, - 'Add file %s to ticket %s: %s' % ( - filename, issue.uid, issue.title), + "Add file %s to ticket %s: %s" % (filename, issue.uid, issue.title), new_repo.index.write_tree(), - parents) + parents, + ) index.write() # Push to origin ori_remote = new_repo.remotes[0] - master_ref = new_repo.lookup_reference('HEAD').resolve() - refname = '%s:%s' % (master_ref.name, master_ref.name) + master_ref = new_repo.lookup_reference("HEAD").resolve() + refname = "%s:%s" % (master_ref.name, master_ref.name) - _log.info('Pushing to %s: %s', ori_remote.name, refname) + _log.info("Pushing to %s: %s", ori_remote.name, refname) PagureRepo.push(ori_remote, refname) # Remove the clone shutil.rmtree(newpath) - return os.path.join('files', filename) + return os.path.join("files", filename) def _update_file_in_git( - repo, branch, branchto, filename, content, message, user, email, - runhook=False): - ''' Update a specific file in the specified repository with the content + repo, + branch, + branchto, + filename, + content, + message, + user, + email, + runhook=False, +): + """ Update a specific file in the specified repository with the content given and commit the change under the user's name. :arg repo: the Project object from the database @@ -910,16 +953,17 @@ def _update_file_in_git( :kwarg runhook: boolean specifying if the post-update hook should be called or not - ''' - _log.info('Updating file: %s in the repo: %s', filename, repo.path) + """ + _log.info("Updating file: %s in the repo: %s", filename, repo.path) # Get the fork repopath = pagure.utils.get_repo_path(repo) # Clone the repo into a temp folder - newpath = tempfile.mkdtemp(prefix='pagure-') + newpath = tempfile.mkdtemp(prefix="pagure-") new_repo = pygit2.clone_repository( - repopath, newpath, checkout_branch=branch) + repopath, newpath, checkout_branch=branch + ) file_path = os.path.join(newpath, filename) @@ -927,8 +971,8 @@ def _update_file_in_git( index = new_repo.index # Write down what changed - with open(file_path, 'wb') as stream: - stream.write(content.replace('\r', '').encode('utf-8')) + with open(file_path, "wb") as stream: + stream.write(content.replace("\r", "").encode("utf-8")) # Retrieve the list of files that changed diff = new_repo.diff() @@ -971,35 +1015,35 @@ def _update_file_in_git( author, message.strip(), new_repo.index.write_tree(), - parents) + parents, + ) index.write() # Push to origin ori_remote = new_repo.remotes[0] - refname = '%s:refs/heads/%s' % ( + refname = "%s:refs/heads/%s" % ( nbranch_ref.name if nbranch_ref else branch_ref.name, - branchto) + branchto, + ) try: PagureRepo.push(ori_remote, refname) except pygit2.GitError as err: # pragma: no cover shutil.rmtree(newpath) raise pagure.exceptions.PagureException( - 'Commit could not be done: %s' % err) + "Commit could not be done: %s" % err + ) if runhook: gitrepo_obj = PagureRepo(repopath) gitrepo_obj.run_hook( - parent.hex, - commit.hex, - 'refs/heads/%s' % branchto, - user.username + parent.hex, commit.hex, "refs/heads/%s" % branchto, user.username ) # Remove the clone shutil.rmtree(newpath) - return os.path.join('files', filename) + return os.path.join("files", filename) def read_output(cmd, abspath, input=None, keepends=False, error=False, **kw): @@ -1035,17 +1079,18 @@ def read_output(cmd, abspath, input=None, keepends=False, error=False, **kw): stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=abspath, - **kw) + **kw + ) (out, err) = procs.communicate(input) - out = out.decode('utf-8') - err = err.decode('utf-8') + out = out.decode("utf-8") + err = err.decode("utf-8") retcode = procs.wait() if retcode: - print('ERROR: %s =-- %s' % (cmd, retcode)) + print("ERROR: %s =-- %s" % (cmd, retcode)) print(out) print(err) if not keepends: - out = out.rstrip('\n\r') + out = out.rstrip("\n\r") if error: return (out, err) @@ -1054,12 +1099,18 @@ def read_output(cmd, abspath, input=None, keepends=False, error=False, **kw): def read_git_output( - args, abspath, input=None, keepends=False, error=False, **kw): + args, abspath, input=None, keepends=False, error=False, **kw +): """Read the output of a Git command.""" return read_output( - ['git'] + args, abspath, input=input, - keepends=keepends, error=error, **kw) + ["git"] + args, + abspath, + input=input, + keepends=keepends, + error=error, + **kw + ) def read_git_lines(args, abspath, keepends=False, error=False, **kw): @@ -1080,17 +1131,17 @@ def read_git_lines(args, abspath, keepends=False, error=False, **kw): def get_revs_between(oldrev, newrev, abspath, refname, forced=False): """ Yield revisions between HEAD and BASE. """ - cmd = ['rev-list', '%s...%s' % (oldrev, newrev)] + cmd = ["rev-list", "%s...%s" % (oldrev, newrev)] if forced: head = get_default_branch(abspath) - cmd.append('^%s' % head) - if set(newrev) == set('0'): - cmd = ['rev-list', '%s' % oldrev] - elif set(oldrev) == set('0') or set(oldrev) == set('^0'): + cmd.append("^%s" % head) + if set(newrev) == set("0"): + cmd = ["rev-list", "%s" % oldrev] + elif set(oldrev) == set("0") or set(oldrev) == set("^0"): head = get_default_branch(abspath) - cmd = ['rev-list', '%s' % newrev, '^%s' % head] + cmd = ["rev-list", "%s" % newrev, "^%s" % head] if head in refname: - cmd = ['rev-list', '%s' % newrev] + cmd = ["rev-list", "%s" % newrev] return pagure.lib.git.read_git_lines(cmd, abspath) @@ -1099,11 +1150,11 @@ def is_forced_push(oldrev, newrev, abspath): Doc: http://stackoverflow.com/a/12258773 """ - if set(oldrev) == set('0'): + if set(oldrev) == set("0"): # This is a push that's creating a new branch => certainly ok return False # Returns if there was any commits deleted in the changeset - cmd = ['rev-list', '%s' % oldrev, '^%s' % newrev] + cmd = ["rev-list", "%s" % oldrev, "^%s" % newrev] out = pagure.lib.git.read_git_lines(cmd, abspath) return len(out) > 0 @@ -1112,113 +1163,118 @@ def get_base_revision(torev, fromrev, abspath): """ Return the base revision between HEAD and BASE. This is useful in case of force-push. """ - cmd = ['merge-base', fromrev, torev] + cmd = ["merge-base", fromrev, torev] return pagure.lib.git.read_git_lines(cmd, abspath) def get_default_branch(abspath): """ Return the default branch of a repo. """ - cmd = ['rev-parse', '--abbrev-ref', 'HEAD'] + cmd = ["rev-parse", "--abbrev-ref", "HEAD"] out = pagure.lib.git.read_git_lines(cmd, abspath) if out: return out[0] else: - return 'master' + return "master" def get_author(commit, abspath): - ''' Return the name of the person that authored the commit. ''' + """ Return the name of the person that authored the commit. """ user = pagure.lib.git.read_git_lines( - ['log', '-1', '--pretty=format:"%an"', commit], - abspath)[0].replace('"', '') + ["log", "-1", '--pretty=format:"%an"', commit], abspath + )[0].replace('"', "") return user def get_author_email(commit, abspath): - ''' Return the email of the person that authored the commit. ''' + """ Return the email of the person that authored the commit. """ user = pagure.lib.git.read_git_lines( - ['log', '-1', '--pretty=format:"%ae"', commit], - abspath)[0].replace('"', '') + ["log", "-1", '--pretty=format:"%ae"', commit], abspath + )[0].replace('"', "") return user def get_commit_subject(commit, abspath): - ''' Return the subject of the commit. ''' + """ Return the subject of the commit. """ subject = pagure.lib.git.read_git_lines( - ['log', '-1', '--pretty=format:"%s"', commit], - abspath)[0].replace('"', '') + ["log", "-1", '--pretty=format:"%s"', commit], abspath + )[0].replace('"', "") return subject def get_repo_name(abspath): - ''' Return the name of the git repo based on its path. - ''' - repo_name = '.'.join( - abspath.rsplit(os.path.sep, 1)[-1].rsplit('.', 1)[:-1]) + """ Return the name of the git repo based on its path. + """ + repo_name = ".".join( + abspath.rsplit(os.path.sep, 1)[-1].rsplit(".", 1)[:-1] + ) return repo_name def get_repo_namespace(abspath, gitfolder=None): - ''' Return the name of the git repo based on its path. - ''' + """ Return the name of the git repo based on its path. + """ namespace = None if not gitfolder: - gitfolder = pagure_config['GIT_FOLDER'] + gitfolder = pagure_config["GIT_FOLDER"] - short_path = os.path.realpath(abspath).replace( - os.path.realpath(gitfolder), '').strip('/') + short_path = ( + os.path.realpath(abspath) + .replace(os.path.realpath(gitfolder), "") + .strip("/") + ) - if short_path.startswith('forks/'): - username, projectname = short_path.split('forks/', 1)[1].split('/', 1) + if short_path.startswith("forks/"): + username, projectname = short_path.split("forks/", 1)[1].split("/", 1) else: projectname = short_path - if '/' in projectname: - namespace = projectname.rsplit('/', 1)[0] + if "/" in projectname: + namespace = projectname.rsplit("/", 1)[0] return namespace def get_username(abspath): - ''' Return the username of the git repo based on its path. - ''' + """ Return the username of the git repo based on its path. + """ username = None - repo = os.path.abspath(os.path.join(abspath, '..')) - if '/forks/' in repo: - username = repo.split('/forks/', 1)[1].split('/', 1)[0] + repo = os.path.abspath(os.path.join(abspath, "..")) + if "/forks/" in repo: + username = repo.split("/forks/", 1)[1].split("/", 1)[0] return username def get_branch_ref(repo, branchname): - ''' Return the reference to the specified branch or raises an exception. - ''' + """ Return the reference to the specified branch or raises an exception. + """ location = pygit2.GIT_BRANCH_LOCAL if branchname not in repo.listall_branches(): - branchname = 'origin/%s' % branchname + branchname = "origin/%s" % branchname location = pygit2.GIT_BRANCH_REMOTE branch_ref = repo.lookup_branch(branchname, location) if not branch_ref or not branch_ref.resolve(): raise pagure.exceptions.PagureException( - 'No refs found for %s' % branchname) + "No refs found for %s" % branchname + ) return branch_ref.resolve() def merge_pull_request( - session, request, username, request_folder, domerge=True): - ''' Merge the specified pull-request. - ''' + session, request, username, request_folder, domerge=True +): + """ Merge the specified pull-request. + """ if domerge: - _log.info( - '%s asked to merge the pull-request: %s', username, request) + _log.info("%s asked to merge the pull-request: %s", username, request) else: - _log.info( - '%s asked to diff the pull-request: %s', username, request) + _log.info("%s asked to diff the pull-request: %s", username, request) if request.remote: # Get the fork repopath = pagure.utils.get_remote_repo_path( - request.remote_git, request.branch_from) + request.remote_git, request.branch_from + ) elif request.project_from: # Get the fork repopath = pagure.utils.get_repo_path(request.project_from) @@ -1231,8 +1287,8 @@ def merge_pull_request( parentpath = pagure.utils.get_repo_path(request.project) # Clone the original repo into a temp folder - newpath = tempfile.mkdtemp(prefix='pagure-pr-merge') - _log.info(' working directory: %s', newpath) + newpath = tempfile.mkdtemp(prefix="pagure-pr-merge") + _log.info(" working directory: %s", newpath) new_repo = pygit2.clone_repository(parentpath, newpath) # Main repo, bare version @@ -1241,19 +1297,26 @@ def merge_pull_request( # Update the start and stop commits in the DB, one last time diff_commits = diff_pull_request( - session, request, fork_obj, PagureRepo(parentpath), - requestfolder=request_folder, with_diff=False) - _log.info(' %s commit to merge', len(diff_commits)) + session, + request, + fork_obj, + PagureRepo(parentpath), + requestfolder=request_folder, + with_diff=False, + ) + _log.info(" %s commit to merge", len(diff_commits)) if request.project.settings.get( - 'Enforce_signed-off_commits_in_pull-request', False): + "Enforce_signed-off_commits_in_pull-request", False + ): for commit in diff_commits: - if 'signed-off-by' not in commit.message.lower(): + if "signed-off-by" not in commit.message.lower(): shutil.rmtree(newpath) - _log.info(' Missing a required: signed-off-by: Bailing') + _log.info(" Missing a required: signed-off-by: Bailing") raise pagure.exceptions.PagureException( - 'This repo enforces that all commits are ' - 'signed off by their author. ') + "This repo enforces that all commits are " + "signed off by their author. " + ) # Check/Get the branch from try: @@ -1262,18 +1325,22 @@ def merge_pull_request( branch = None if not branch: shutil.rmtree(newpath) - _log.info(' Branch of origin could not be found') + _log.info(" Branch of origin could not be found") raise pagure.exceptions.BranchNotFoundException( - 'Branch %s could not be found in the repo %s' % ( - request.branch_from, request.project_from.fullname - if request.project_from else request.remote_git - )) + "Branch %s could not be found in the repo %s" + % ( + request.branch_from, + request.project_from.fullname + if request.project_from + else request.remote_git, + ) + ) ori_remote = new_repo.remotes[0] # Add the fork as remote repo - reponame = '%s_%s' % (request.user.user, request.uid) + reponame = "%s_%s" % (request.user.user, request.uid) - _log.info(' Adding remote: %s pointing to: %s', reponame, repopath) + _log.info(" Adding remote: %s pointing to: %s", reponame, repopath) remote = new_repo.create_remote(reponame, repopath) # Fetch the commits @@ -1285,50 +1352,51 @@ def merge_pull_request( # Checkout the correct branch if new_repo.is_empty or new_repo.head_is_unborn: _log.debug( - ' target repo is empty, so PR can be merged using ' - 'fast-forward, reporting it') + " target repo is empty, so PR can be merged using " + "fast-forward, reporting it" + ) if domerge: - _log.info(' PR merged using fast-forward') - if not request.project.settings.get('always_merge', False): + _log.info(" PR merged using fast-forward") + if not request.project.settings.get("always_merge", False): new_repo.create_branch(request.branch, repo_commit) commit = repo_commit.oid.hex else: tree = new_repo.index.write_tree() user_obj = pagure.lib.get_user(session, username) commitname = user_obj.fullname or user_obj.user - author = _make_signature( - commitname, - user_obj.default_email) + author = _make_signature(commitname, user_obj.default_email) commit = new_repo.create_commit( - 'refs/heads/%s' % request.branch, + "refs/heads/%s" % request.branch, author, author, - 'Merge #%s `%s`' % (request.id, request.title), + "Merge #%s `%s`" % (request.id, request.title), tree, - [repo_commit.oid.hex]) + [repo_commit.oid.hex], + ) - _log.info(' New head: %s', commit) - refname = 'refs/heads/%s:refs/heads/%s' % ( - request.branch, request.branch) + _log.info(" New head: %s", commit) + refname = "refs/heads/%s:refs/heads/%s" % ( + request.branch, + request.branch, + ) PagureRepo.push(ori_remote, refname) bare_main_repo.run_hook( - '0' * 40, commit, 'refs/heads/%s' % request.branch, - username) + "0" * 40, commit, "refs/heads/%s" % request.branch, username + ) # Update status - _log.info(' Closing the PR in the DB') + _log.info(" Closing the PR in the DB") pagure.lib.close_pull_request( - session, request, username, - requestfolder=request_folder, + session, request, username, requestfolder=request_folder ) shutil.rmtree(newpath) - return 'Changes merged!' + return "Changes merged!" else: - _log.info(' PR merged using fast-forward, reporting it') - request.merge_status = 'FFORWARD' + _log.info(" PR merged using fast-forward, reporting it") + request.merge_status = "FFORWARD" session.commit() shutil.rmtree(newpath) - return 'FFORWARD' + return "FFORWARD" try: branch_ref = get_branch_ref(new_repo, request.branch) @@ -1336,70 +1404,70 @@ def merge_pull_request( branch_ref = None if not branch_ref: shutil.rmtree(newpath) - _log.info(' Target branch could not be found') + _log.info(" Target branch could not be found") raise pagure.exceptions.BranchNotFoundException( - 'Branch %s could not be found in the repo %s' % ( - request.branch, request.project.fullname - )) + "Branch %s could not be found in the repo %s" + % (request.branch, request.project.fullname) + ) new_repo.checkout(branch_ref) merge = new_repo.merge(repo_commit.oid) - _log.debug(' Merge: %s', merge) + _log.debug(" Merge: %s", merge) if merge is None: mergecode = new_repo.merge_analysis(repo_commit.oid)[0] - _log.debug(' Mergecode: %s', mergecode) + _log.debug(" Mergecode: %s", mergecode) # Wait until the last minute then check if the PR was already closed # by someone else in the mean while and if so, just bail - if request.status != 'Open': + if request.status != "Open": shutil.rmtree(newpath) _log.info( - ' This pull-request has already been merged or closed by %s ' - 'on %s' % (request.closed_by.user, request.closed_at)) + " This pull-request has already been merged or closed by %s " + "on %s" % (request.closed_by.user, request.closed_at) + ) raise pagure.exceptions.PagureException( - 'This pull-request was merged or closed by %s' % - request.closed_by.user) + "This pull-request was merged or closed by %s" + % request.closed_by.user + ) - refname = '%s:refs/heads/%s' % (branch_ref.name, request.branch) - if ( - (merge is not None and merge.is_uptodate) - or # noqa - (merge is None and - mergecode & pygit2.GIT_MERGE_ANALYSIS_UP_TO_DATE)): + refname = "%s:refs/heads/%s" % (branch_ref.name, request.branch) + if (merge is not None and merge.is_uptodate) or ( # noqa + merge is None and mergecode & pygit2.GIT_MERGE_ANALYSIS_UP_TO_DATE + ): if domerge: - _log.info(' PR up to date, closing it') + _log.info(" PR up to date, closing it") pagure.lib.close_pull_request( - session, request, username, - requestfolder=request_folder) + session, request, username, requestfolder=request_folder + ) shutil.rmtree(newpath) try: session.commit() except SQLAlchemyError as err: # pragma: no cover session.rollback() - _log.exception(' Could not merge the PR in the DB') + _log.exception(" Could not merge the PR in the DB") raise pagure.exceptions.PagureException( - 'Could not close this pull-request') + "Could not close this pull-request" + ) raise pagure.exceptions.PagureException( - 'Nothing to do, changes were already merged') + "Nothing to do, changes were already merged" + ) else: - _log.info(' PR up to date, reporting it') - request.merge_status = 'NO_CHANGE' + _log.info(" PR up to date, reporting it") + request.merge_status = "NO_CHANGE" session.commit() shutil.rmtree(newpath) - return 'NO_CHANGE' + return "NO_CHANGE" - elif ( - (merge is not None and merge.is_fastforward) - or # noqa - (merge is None and - mergecode & pygit2.GIT_MERGE_ANALYSIS_FASTFORWARD)): + elif (merge is not None and merge.is_fastforward) or ( # noqa + merge is None and mergecode & pygit2.GIT_MERGE_ANALYSIS_FASTFORWARD + ): if domerge: - _log.info(' PR merged using fast-forward') - head = new_repo.lookup_reference('HEAD').get_object() - if not request.project.settings.get('always_merge', False): + _log.info(" PR merged using fast-forward") + head = new_repo.lookup_reference("HEAD").get_object() + if not request.project.settings.get("always_merge", False): if merge is not None: # This is depending on the pygit2 version branch_ref.target = merge.fastforward_oid @@ -1410,94 +1478,90 @@ def merge_pull_request( tree = new_repo.index.write_tree() user_obj = pagure.lib.get_user(session, username) commitname = user_obj.fullname or user_obj.user - author = _make_signature( - commitname, - user_obj.default_email) + author = _make_signature(commitname, user_obj.default_email) commit = new_repo.create_commit( - 'refs/heads/%s' % request.branch, + "refs/heads/%s" % request.branch, author, author, - 'Merge #%s `%s`' % (request.id, request.title), + "Merge #%s `%s`" % (request.id, request.title), tree, - [head.hex, repo_commit.oid.hex]) + [head.hex, repo_commit.oid.hex], + ) - _log.info(' New head: %s', commit) + _log.info(" New head: %s", commit) PagureRepo.push(ori_remote, refname) bare_main_repo.run_hook( - head.hex, commit, 'refs/heads/%s' % request.branch, - username) + head.hex, commit, "refs/heads/%s" % request.branch, username + ) else: - _log.info(' PR merged using fast-forward, reporting it') - request.merge_status = 'FFORWARD' + _log.info(" PR merged using fast-forward, reporting it") + request.merge_status = "FFORWARD" session.commit() shutil.rmtree(newpath) - return 'FFORWARD' + return "FFORWARD" else: tree = None try: tree = new_repo.index.write_tree() except pygit2.GitError as err: - _log.debug( - ' Could not write down the new tree: merge conflicts') + _log.debug(" Could not write down the new tree: merge conflicts") _log.debug(err) shutil.rmtree(newpath) if domerge: - _log.info(' Merge conflict: Bailing') - raise pagure.exceptions.PagureException('Merge conflicts!') + _log.info(" Merge conflict: Bailing") + raise pagure.exceptions.PagureException("Merge conflicts!") else: - _log.info(' Merge conflict, reporting it') - request.merge_status = 'CONFLICTS' + _log.info(" Merge conflict, reporting it") + request.merge_status = "CONFLICTS" session.commit() - return 'CONFLICTS' + return "CONFLICTS" if domerge: - _log.info(' Writing down merge commit') - head = new_repo.lookup_reference('HEAD').get_object() - _log.info(' Basing on: %s - %s', head.hex, repo_commit.oid.hex) + _log.info(" Writing down merge commit") + head = new_repo.lookup_reference("HEAD").get_object() + _log.info(" Basing on: %s - %s", head.hex, repo_commit.oid.hex) user_obj = pagure.lib.get_user(session, username) commitname = user_obj.fullname or user_obj.user - author = _make_signature( - commitname, - user_obj.default_email) + author = _make_signature(commitname, user_obj.default_email) commit = new_repo.create_commit( - 'refs/heads/%s' % request.branch, + "refs/heads/%s" % request.branch, author, author, - 'Merge #%s `%s`' % (request.id, request.title), + "Merge #%s `%s`" % (request.id, request.title), tree, - [head.hex, repo_commit.oid.hex]) + [head.hex, repo_commit.oid.hex], + ) - _log.info(' New head: %s', commit) - local_ref = 'refs/heads/_pagure_topush' + _log.info(" New head: %s", commit) + local_ref = "refs/heads/_pagure_topush" new_repo.create_reference(local_ref, commit) - refname = '%s:refs/heads/%s' % (local_ref, request.branch) + refname = "%s:refs/heads/%s" % (local_ref, request.branch) PagureRepo.push(ori_remote, refname) - _log.info(' Pushing to: %s to %s', refname, ori_remote) + _log.info(" Pushing to: %s to %s", refname, ori_remote) bare_main_repo.run_hook( - head.hex, commit, 'refs/heads/%s' % request.branch, - username) + head.hex, commit, "refs/heads/%s" % request.branch, username + ) else: - _log.info(' PR can be merged with a merge commit, reporting it') - request.merge_status = 'MERGE' + _log.info(" PR can be merged with a merge commit, reporting it") + request.merge_status = "MERGE" session.commit() shutil.rmtree(newpath) - return 'MERGE' + return "MERGE" # Update status - _log.info(' Closing the PR in the DB') + _log.info(" Closing the PR in the DB") pagure.lib.close_pull_request( - session, request, username, - requestfolder=request_folder, + session, request, username, requestfolder=request_folder ) shutil.rmtree(newpath) - return 'Changes merged!' + return "Changes merged!" def get_diff_info(repo_obj, orig_repo, branch_from, branch_to, prid=None): - ''' Return the info needed to see a diff or make a Pull-Request between + """ Return the info needed to see a diff or make a Pull-Request between the two specified repo. :arg repo_obj: The pygit2.Repository object of the first git repo @@ -1508,16 +1572,16 @@ def get_diff_info(repo_obj, orig_repo, branch_from, branch_to, prid=None): changes in the second git repo :kwarg prid: the identifier of the pull-request to - ''' + """ try: frombranch = repo_obj.lookup_branch(branch_from) except ValueError: raise pagure.exceptions.BranchNotFoundException( - 'Branch %s does not exist' % branch_from + "Branch %s does not exist" % branch_from ) if not frombranch and not repo_obj.is_empty and prid is None: raise pagure.exceptions.BranchNotFoundException( - 'Branch %s does not exist' % branch_from + "Branch %s does not exist" % branch_from ) branch = None @@ -1526,12 +1590,12 @@ def get_diff_info(repo_obj, orig_repo, branch_from, branch_to, prid=None): branch = orig_repo.lookup_branch(branch_to) except ValueError: raise pagure.exceptions.BranchNotFoundException( - 'Branch %s does not exist' % branch_to + "Branch %s does not exist" % branch_to ) local_branches = orig_repo.listall_branches(pygit2.GIT_BRANCH_LOCAL) if not branch and local_branches: raise pagure.exceptions.BranchNotFoundException( - 'Branch %s could not be found in the target repo' % branch_to + "Branch %s could not be found in the target repo" % branch_to ) commitid = None @@ -1548,7 +1612,7 @@ def get_diff_info(repo_obj, orig_repo, branch_from, branch_to, prid=None): if not commitid and not repo_obj.is_empty: raise pagure.exceptions.PagureException( - 'No branch from which to pull or local PR reference were found' + "No branch from which to pull or local PR reference were found" ) diff_commits = [] @@ -1560,15 +1624,17 @@ def get_diff_info(repo_obj, orig_repo, branch_from, branch_to, prid=None): repo_obj = orig_repo if not repo_obj.is_empty and not orig_repo.is_empty: - _log.info('pagure.lib.get_diff_info: Pulling into a non-empty repo') + _log.info("pagure.lib.get_diff_info: Pulling into a non-empty repo") if branch: orig_commit = orig_repo[branch.get_object().hex] main_walker = orig_repo.walk( - orig_commit.oid.hex, pygit2.GIT_SORT_TIME) + orig_commit.oid.hex, pygit2.GIT_SORT_TIME + ) repo_commit = repo_obj[commitid] branch_walker = repo_obj.walk( - repo_commit.oid.hex, pygit2.GIT_SORT_TIME) + repo_commit.oid.hex, pygit2.GIT_SORT_TIME + ) main_commits = set() branch_commits = set() @@ -1606,50 +1672,51 @@ def get_diff_info(repo_obj, orig_repo, branch_from, branch_to, prid=None): break diff_commits = diff_commits[:i] - _log.info('Diff commits: %s', diff_commits) + _log.info("Diff commits: %s", diff_commits) if diff_commits: first_commit = repo_obj[diff_commits[-1].oid.hex] if len(first_commit.parents) > 0: diff = repo_obj.diff( repo_obj.revparse_single(first_commit.parents[0].oid.hex), - repo_obj.revparse_single(diff_commits[0].oid.hex) + repo_obj.revparse_single(diff_commits[0].oid.hex), ) elif first_commit.oid.hex == diff_commits[0].oid.hex: _log.info( - 'pagure.lib.get_diff_info: First commit is also the last ' - 'commit') + "pagure.lib.get_diff_info: First commit is also the last " + "commit" + ) diff = diff_commits[0].tree.diff_to_tree(swap=True) elif orig_repo.is_empty and not repo_obj.is_empty: - _log.info('pagure.lib.get_diff_info: Pulling into an empty repo') - if 'master' in repo_obj.listall_branches(): + _log.info("pagure.lib.get_diff_info: Pulling into an empty repo") + if "master" in repo_obj.listall_branches(): repo_commit = repo_obj[repo_obj.head.target] else: branch = repo_obj.lookup_branch(branch_from) repo_commit = branch.get_object() - for commit in repo_obj.walk( - repo_commit.oid.hex, pygit2.GIT_SORT_TIME): + for commit in repo_obj.walk(repo_commit.oid.hex, pygit2.GIT_SORT_TIME): diff_commits.append(commit) - _log.info('Diff commits: %s', diff_commits) + _log.info("Diff commits: %s", diff_commits) diff = repo_commit.tree.diff_to_tree(swap=True) else: raise pagure.exceptions.PagureException( - 'Fork is empty, there are no commits to create a pull ' - 'request with' + "Fork is empty, there are no commits to create a pull " + "request with" ) _log.info( - 'pagure.lib.get_diff_info: diff_commits length: %s', len(diff_commits)) - _log.info('pagure.lib.get_diff_info: original commit: %s', orig_commit) + "pagure.lib.get_diff_info: diff_commits length: %s", len(diff_commits) + ) + _log.info("pagure.lib.get_diff_info: original commit: %s", orig_commit) - return(diff, diff_commits, orig_commit) + return (diff, diff_commits, orig_commit) def diff_pull_request( - session, request, repo_obj, orig_repo, requestfolder, - with_diff=True): + session, request, repo_obj, orig_repo, requestfolder, with_diff=True +): """ Returns the diff and the list of commits between the two git repos mentionned in the given pull-request. @@ -1668,15 +1735,21 @@ def diff_pull_request( diff = None diff_commits = [] diff, diff_commits, _ = get_diff_info( - repo_obj, orig_repo, request.branch_from, request.branch, - prid=request.id) + repo_obj, + orig_repo, + request.branch_from, + request.branch, + prid=request.id, + ) - if request.status == 'Open' and diff_commits: + if request.status == "Open" and diff_commits: first_commit = repo_obj[diff_commits[-1].oid.hex] # Check if we can still rely on the merge_status commenttext = None - if request.commit_start != first_commit.oid.hex or\ - request.commit_stop != diff_commits[0].oid.hex: + if ( + request.commit_start != first_commit.oid.hex + or request.commit_stop != diff_commits[0].oid.hex + ): request.merge_status = None if request.commit_start: new_commits_count = 0 @@ -1685,17 +1758,25 @@ def diff_pull_request( if i.oid.hex == request.commit_stop: break new_commits_count = new_commits_count + 1 - commenttext = '%s * ``%s``\n' % ( - commenttext, i.message.strip().split('\n')[0]) + commenttext = "%s * ``%s``\n" % ( + commenttext, + i.message.strip().split("\n")[0], + ) if new_commits_count == 1: commenttext = "**%d new commit added**\n\n%s" % ( - new_commits_count, commenttext) + new_commits_count, + commenttext, + ) else: commenttext = "**%d new commits added**\n\n%s" % ( - new_commits_count, commenttext) - if request.commit_start and \ - request.commit_start != first_commit.oid.hex: - commenttext = 'rebased onto %s' % first_commit.oid.hex + new_commits_count, + commenttext, + ) + if ( + request.commit_start + and request.commit_start != first_commit.oid.hex + ): + commenttext = "rebased onto %s" % first_commit.oid.hex request.commit_start = first_commit.oid.hex request.commit_stop = diff_commits[0].oid.hex session.add(request) @@ -1705,23 +1786,28 @@ def diff_pull_request( request.project.name, request.project.namespace, request.project.user.username if request.project.is_fork else None, - request.id + request.id, ) if commenttext: pagure.lib.add_pull_request_comment( - session, request, - commit=None, tree_id=None, filename=None, row=None, - comment='%s' % commenttext, + session, + request, + commit=None, + tree_id=None, + filename=None, + row=None, + comment="%s" % commenttext, user=request.user.username, requestfolder=requestfolder, - notify=False, notification=True + notify=False, + notification=True, ) session.commit() tasks.link_pr_to_ticket.delay(request.uid) pagure.lib.git.update_git( - request, repo=request.project, - repofolder=requestfolder) + request, repo=request.project, repofolder=requestfolder + ) if with_diff: return (diff_commits, diff) @@ -1734,18 +1820,21 @@ def update_pull_ref(request, repo): """ repopath = pagure.utils.get_repo_path(request.project) - reponame = '%s_%s' % (request.user.user, request.uid) + reponame = "%s_%s" % (request.user.user, request.uid) - _log.info( - ' Adding remote: %s pointing to: %s', reponame, repopath) + _log.info(" Adding remote: %s pointing to: %s", reponame, repopath) rc = RemoteCollection(repo) remote = rc.create(reponame, repopath) try: _log.info( - ' Pushing refs/heads/%s to refs/pull/%s/head', - request.branch_from, request.id) - refname = '+refs/heads/%s:refs/pull/%s/head' % ( - request.branch_from, request.id) + " Pushing refs/heads/%s to refs/pull/%s/head", + request.branch_from, + request.id, + ) + refname = "+refs/heads/%s:refs/pull/%s/head" % ( + request.branch_from, + request.id, + ) PagureRepo.push(remote, refname) finally: rc.delete(reponame) @@ -1761,17 +1850,17 @@ def get_git_tags(project, with_commits=False): if with_commits: tags = {} for tag in repo_obj.listall_references(): - if tag.startswith('refs/tags/'): + if tag.startswith("refs/tags/"): ref = repo_obj.lookup_reference(tag) if ref: com = ref.get_object() if com: - tags[tag.split('refs/tags/')[1]] = com.oid.hex + tags[tag.split("refs/tags/")[1]] = com.oid.hex else: tags = [ - tag.split('refs/tags/')[1] + tag.split("refs/tags/")[1] for tag in repo_obj.listall_references() - if tag.startswith('refs/tags/') + if tag.startswith("refs/tags/") ] return tags @@ -1785,7 +1874,7 @@ def get_git_tags_objects(project): repo_obj = PagureRepo(repopath) tags = {} for tag in repo_obj.listall_references(): - if 'refs/tags/' in tag and repo_obj.lookup_reference(tag): + if "refs/tags/" in tag and repo_obj.lookup_reference(tag): commit_time = None try: theobject = repo_obj[repo_obj.lookup_reference(tag).target] @@ -1807,12 +1896,14 @@ def get_git_tags_objects(project): "head_msg": None, "body_msg": None, } - if objecttype == 'tag': + if objecttype == "tag": head_msg, _, body_msg = tags[commit_time][ - "object"].message.partition('\n') - if body_msg.strip().endswith('\n-----END PGP SIGNATURE-----'): + "object" + ].message.partition("\n") + if body_msg.strip().endswith("\n-----END PGP SIGNATURE-----"): body_msg = body_msg.rsplit( - '-----BEGIN PGP SIGNATURE-----', 1)[0].strip() + "-----BEGIN PGP SIGNATURE-----", 1 + )[0].strip() tags[commit_time]["head_msg"] = head_msg tags[commit_time]["body_msg"] = body_msg sorted_tags = [] @@ -1844,21 +1935,21 @@ def log_commits_to_db(session, project, commits, gitdir): user_id=author_obj.id if author_obj else None, user_email=commit.author.email if not author_obj else None, project_id=project.id, - log_type='committed', + log_type="committed", ref_id=commit.oid.hex, date=date_created.date(), - date_created=date_created.datetime + date_created=date_created.datetime, ) session.add(log) def reinit_git(project, repofolder): - ''' Delete and recreate a git folder + """ Delete and recreate a git folder :args project: SQLAlchemy object of the project :args folder: The folder which contains the git repos like TICKETS_FOLDER for tickets and REQUESTS_FOLDER for pull requests - ''' + """ repo_path = os.path.join(repofolder, project.path) if not os.path.exists(repo_path): @@ -1869,27 +1960,26 @@ def reinit_git(project, repofolder): # create it again pygit2.init_repository( - repo_path, bare=True, - mode=pygit2.C.GIT_REPOSITORY_INIT_SHARED_GROUP + repo_path, bare=True, mode=pygit2.C.GIT_REPOSITORY_INIT_SHARED_GROUP ) def get_git_branches(project): - ''' Return a list of branches for the project + """ Return a list of branches for the project :arg project: The Project instance to get the branches for - ''' + """ repo_path = pagure.utils.get_repo_path(project) repo_obj = pygit2.Repository(repo_path) return repo_obj.listall_branches() def new_git_branch(project, branch, from_branch=None, from_commit=None): - ''' Create a new git branch on the project + """ Create a new git branch on the project :arg project: The Project instance to get the branches for :arg from_branch: The branch to branch off of - ''' + """ if not from_branch and not from_commit: - from_branch = 'master' + from_branch = "master" repo_path = pagure.utils.get_repo_path(project) repo_obj = pygit2.Repository(repo_path) branches = repo_obj.listall_branches() @@ -1897,16 +1987,19 @@ def new_git_branch(project, branch, from_branch=None, from_commit=None): if from_branch: if from_branch not in branches: raise pagure.exceptions.PagureException( - 'The "{0}" branch does not exist'.format(from_branch)) + 'The "{0}" branch does not exist'.format(from_branch) + ) parent = get_branch_ref(repo_obj, from_branch).get_object() else: if from_commit not in repo_obj: raise pagure.exceptions.PagureException( - 'The commit "{0}" does not exist'.format(from_commit)) + 'The commit "{0}" does not exist'.format(from_commit) + ) parent = repo_obj[from_commit] if branch not in branches: repo_obj.create_branch(branch, parent) else: raise pagure.exceptions.PagureException( - 'The branch "{0}" already exists'.format(branch)) + 'The branch "{0}" already exists'.format(branch) + ) diff --git a/pagure/lib/git_auth.py b/pagure/lib/git_auth.py index 6b8d8a8..f997aa1 100644 --- a/pagure/lib/git_auth.py +++ b/pagure/lib/git_auth.py @@ -40,8 +40,8 @@ def get_git_auth_helper(backend, *args, **kwargs): :type backend: str """ - _log.info('Looking for backend: %s', backend) - points = pkg_resources.iter_entry_points('pagure.git_auth.helpers') + _log.info("Looking for backend: %s", backend) + points = pkg_resources.iter_entry_points("pagure.git_auth.helpers") classes = dict([(point.name, point) for point in points]) _log.debug("Found the following installed helpers %r" % classes) cls = classes[backend].load() @@ -102,7 +102,7 @@ def _read_file(filename): Returns None if it could not read the file for any reason. """ if not os.path.exists(filename): - _log.info('Could not find file: %s', filename) + _log.info("Could not find file: %s", filename) else: with open(filename) as stream: return stream.read() @@ -130,46 +130,49 @@ class Gitolite2Auth(GitAuthHelper): :return type: list """ - _log.debug(' Processing project: %s', project.fullname) + _log.debug(" Processing project: %s", project.fullname) # Check if the project or the pagure instance enforce the PR only # development model. - pr_only = project.settings.get('pull_request_access_only', False) + pr_only = project.settings.get("pull_request_access_only", False) - repos_to_create = ['repos'] - if pagure_config.get('ENABLE_DOCS', True): - repos_to_create.append('docs/') - if pagure_config.get('ENABLE_TICKETS', True): - repos_to_create.append('tickets/') + repos_to_create = ["repos"] + if pagure_config.get("ENABLE_DOCS", True): + repos_to_create.append("docs/") + if pagure_config.get("ENABLE_TICKETS", True): + repos_to_create.append("tickets/") # no setting yet to disable pull-requests - repos_to_create.append('requests/') + repos_to_create.append("requests/") for repos in repos_to_create: - if repos == 'repos': + if repos == "repos": # Do not grant access to project enforcing the PR model if pr_only or (global_pr_only and not project.is_fork): continue - repos = '' + repos = "" - config.append('repo %s%s' % (repos, project.fullname)) - if not project.private and repos not in ['tickets/', 'requests/']: - config.append(' R = @all') + config.append("repo %s%s" % (repos, project.fullname)) + if not project.private and repos not in ["tickets/", "requests/"]: + config.append(" R = @all") if project.committer_groups: - config.append(' RW+ = @%s' % ' @'.join( - [ - group.group_name - for group in project.committer_groups - ] - )) - config.append(' RW+ = %s' % project.user.user) + config.append( + " RW+ = @%s" + % " @".join( + [ + group.group_name + for group in project.committer_groups + ] + ) + ) + config.append(" RW+ = %s" % project.user.user) for user in project.committers: # This should never be the case (that the project.user # is in the committers) but better safe than sorry if user.user != project.user.user: - config.append(' RW+ = %s' % user.user) + config.append(" RW+ = %s" % user.user) for deploykey in project.deploykeys: - access = 'R' + access = "R" if deploykey.pushaccess: - access = 'RW+' + access = "RW+" # Note: the replace of / with _ is because gitolite # users can't contain a /. At first, this might look # like deploy keys in a project called @@ -180,17 +183,20 @@ class Gitolite2Auth(GitAuthHelper): # unique. The project name is solely there to make it # easier to determine what project created the deploykey # for admins. - config.append(' %s = deploykey_%s_%s' % - (access, - werkzeug.secure_filename(project.fullname), - deploykey.id)) - config.append('') + config.append( + " %s = deploykey_%s_%s" + % ( + access, + werkzeug.secure_filename(project.fullname), + deploykey.id, + ) + ) + config.append("") return config @classmethod - def _clean_current_config( - cls, current_config, project): + def _clean_current_config(cls, current_config, project): """ Remove the specified project from the current configuration file :arg current_config: the content of the current/actual gitolite @@ -201,8 +207,8 @@ class Gitolite2Auth(GitAuthHelper): """ keys = [ - 'repo %s%s' % (repos, project.fullname) - for repos in ['', 'docs/', 'tickets/', 'requests/'] + "repo %s%s" % (repos, project.fullname) + for repos in ["", "docs/", "tickets/", "requests/"] ] keep = True @@ -214,7 +220,7 @@ class Gitolite2Auth(GitAuthHelper): keep = False continue - if keep is False and line == '': + if keep is False and line == "": keep = True if keep: @@ -239,32 +245,32 @@ class Gitolite2Auth(GitAuthHelper): output = [ row.rstrip() for row in config - if not row.startswith('@') - and row.strip() != '# end of groups'] + if not row.startswith("@") and row.strip() != "# end of groups" + ] else: end_grp = None seen = False output = [] for idx, row in enumerate(config): - if end_grp is None and row.startswith('repo '): + if end_grp is None and row.startswith("repo "): end_grp = idx - if row.startswith('@%s ' % group.group_name): + if row.startswith("@%s " % group.group_name): seen = True - row = '@%s = %s' % ( + row = "@%s = %s" % ( group.group_name, - ' '.join(sorted( - [user.username for user in group.users]) - ) + " ".join( + sorted([user.username for user in group.users]) + ), ) output.append(row) if not seen: - row = '@%s = %s' % ( + row = "@%s = %s" % ( group.group_name, - ' '.join(sorted([user.username for user in group.users])) + " ".join(sorted([user.username for user in group.users])), ) - output.insert(end_grp, '') + output.insert(end_grp, "") output.insert(end_grp, row) return output @@ -278,9 +284,7 @@ class Gitolite2Auth(GitAuthHelper): :return type: list """ - query = session.query( - model.PagureGroup - ).order_by( + query = session.query(model.PagureGroup).order_by( model.PagureGroup.group_name ) @@ -306,39 +310,47 @@ class Gitolite2Auth(GitAuthHelper): :type postconf: None or str """ - _log.info('Reading in the current configuration: %s', configfile) + _log.info("Reading in the current configuration: %s", configfile) with open(configfile) as stream: current_config = [line.rstrip() for line in stream] - if current_config and current_config[-1] == '# end of body': + if current_config and current_config[-1] == "# end of body": current_config = current_config[:-1] if preconfig: idx = None for idx, row in enumerate(current_config): - if row.strip() == '# end of header': + if row.strip() == "# end of header": break if idx is not None: idx = idx + 1 - _log.info('Removing the first %s lines', idx) + _log.info("Removing the first %s lines", idx) current_config = current_config[idx:] if postconfig: idx = None for idx, row in enumerate(current_config): - if row.strip() == '# end of body': + if row.strip() == "# end of body": break if idx is not None: _log.info( - 'Keeping the first %s lines out of %s', - idx, len(current_config)) + "Keeping the first %s lines out of %s", + idx, + len(current_config), + ) current_config = current_config[:idx] return current_config @classmethod def write_gitolite_acls( - cls, session, configfile, project, preconf=None, postconf=None, - group=None): + cls, + session, + configfile, + project, + preconf=None, + postconf=None, + group=None, + ): """ Generate the configuration file for gitolite for all projects on the forge. @@ -367,83 +379,84 @@ class Gitolite2Auth(GitAuthHelper): :type group: None or pagure.lib.model.PagureGroup """ - _log.info('Write down the gitolite configuration file') + _log.info("Write down the gitolite configuration file") preconfig = None if preconf: _log.info( - 'Loading the file to include at the top of the generated one') + "Loading the file to include at the top of the generated one" + ) preconfig = _read_file(preconf) postconfig = None if postconf: _log.info( - 'Loading the file to include at the end of the generated one') + "Loading the file to include at the end of the generated one" + ) postconfig = _read_file(postconf) - global_pr_only = pagure_config.get('PR_ONLY', False) + global_pr_only = pagure_config.get("PR_ONLY", False) config = [] groups = {} if group is None: groups = cls._generate_groups_config(session) if project == -1 or not os.path.exists(configfile): - _log.info('Refreshing the configuration for all projects') + _log.info("Refreshing the configuration for all projects") query = session.query(model.Project).order_by(model.Project.id) for project in query.all(): - config = cls._process_project( - project, config, global_pr_only) + config = cls._process_project(project, config, global_pr_only) elif project: - _log.info('Refreshing the configuration for one project') + _log.info("Refreshing the configuration for one project") config = cls._process_project(project, config, global_pr_only) current_config = cls._get_current_config( - configfile, preconfig, postconfig) + configfile, preconfig, postconfig + ) - current_config = cls._clean_current_config( - current_config, project) + current_config = cls._clean_current_config(current_config, project) config = current_config + config if config: - _log.info('Cleaning the group %s from the loaded config', group) + _log.info("Cleaning the group %s from the loaded config", group) config = cls._clean_groups(config, group=group) else: current_config = cls._get_current_config( - configfile, preconfig, postconfig) + configfile, preconfig, postconfig + ) - _log.info( - 'Cleaning the group %s from the config on disk', group) + _log.info("Cleaning the group %s from the config on disk", group) config = cls._clean_groups(current_config, group=group) if not config: return - _log.info('Writing the configuration to: %s', configfile) - with open(configfile, 'w', encoding="utf-8") as stream: + _log.info("Writing the configuration to: %s", configfile) + with open(configfile, "w", encoding="utf-8") as stream: if preconfig: - stream.write(preconfig + '\n') - stream.write('# end of header\n') + stream.write(preconfig + "\n") + stream.write("# end of header\n") if groups: for key in sorted(groups): - stream.write('@%s = %s\n' % (key, ' '.join(groups[key]))) - stream.write('# end of groups\n\n') + stream.write("@%s = %s\n" % (key, " ".join(groups[key]))) + stream.write("# end of groups\n\n") prev = None for row in config: if prev is None: prev = row - if prev == row == '': + if prev == row == "": continue - stream.write(row + '\n') + stream.write(row + "\n") prev = row - stream.write('# end of body\n') + stream.write("# end of body\n") if postconfig: - stream.write(postconfig + '\n') + stream.write(postconfig + "\n") @classmethod def _remove_from_gitolite_cache(cls, cache_file, project): @@ -468,19 +481,18 @@ class Gitolite2Auth(GitAuthHelper): :arg project: the project to remove from gitolite cache file :type project: pagure.lib.model.Project """ - _log.info('Remove project from the gitolite cache file') + _log.info("Remove project from the gitolite cache file") cf = None try: # unfortunately dbm_gnu.open isn't a context manager in Python 2 :( - cf = dbm_gnu.open(cache_file, 'ws') - for repo in ['', 'docs/', 'tickets/', 'requests/']: + cf = dbm_gnu.open(cache_file, "ws") + for repo in ["", "docs/", "tickets/", "requests/"]: to_remove = repo + project.fullname - if to_remove.encode('ascii') in cf: + if to_remove.encode("ascii") in cf: del cf[to_remove] except dbm_gnu.error as e: - msg = ( - 'Failed to remove project from gitolite cache: {msg}' - .format(msg=e[1]) + msg = "Failed to remove project from gitolite cache: {msg}".format( + msg=e[1] ) raise pagure.exceptions.PagureException(msg) finally: @@ -499,87 +511,91 @@ class Gitolite2Auth(GitAuthHelper): :type project: pagure.lib.model.Project """ - _log.info('Remove project from the gitolite configuration file') + _log.info("Remove project from the gitolite configuration file") if not project: - raise RuntimeError('Project undefined') + raise RuntimeError("Project undefined") - configfile = pagure_config['GITOLITE_CONFIG'] - preconf = pagure_config.get('GITOLITE_PRE_CONFIG') or None - postconf = pagure_config.get('GITOLITE_POST_CONFIG') or None + configfile = pagure_config["GITOLITE_CONFIG"] + preconf = pagure_config.get("GITOLITE_PRE_CONFIG") or None + postconf = pagure_config.get("GITOLITE_POST_CONFIG") or None if not os.path.exists(configfile): _log.info( - 'Not configuration file found at: %s... bailing' % configfile) + "Not configuration file found at: %s... bailing" % configfile + ) return preconfig = None if preconf: _log.info( - 'Loading the file to include at the top of the generated one') + "Loading the file to include at the top of the generated one" + ) preconfig = _read_file(preconf) postconfig = None if postconf: _log.info( - 'Loading the file to include at the end of the generated one') + "Loading the file to include at the end of the generated one" + ) postconfig = _read_file(postconf) config = [] groups = cls._generate_groups_config(session) - _log.info('Removing the project from the configuration') + _log.info("Removing the project from the configuration") current_config = cls._get_current_config( - configfile, preconfig, postconfig) + configfile, preconfig, postconfig + ) - current_config = cls._clean_current_config( - current_config, project) + current_config = cls._clean_current_config(current_config, project) config = current_config + config if config: - _log.info('Cleaning the groups from the loaded config') + _log.info("Cleaning the groups from the loaded config") config = cls._clean_groups(config) else: current_config = cls._get_current_config( - configfile, preconfig, postconfig) + configfile, preconfig, postconfig + ) - _log.info( - 'Cleaning the groups from the config on disk') + _log.info("Cleaning the groups from the config on disk") config = cls._clean_groups(config) if not config: return - _log.info('Writing the configuration to: %s', configfile) - with open(configfile, 'w', encoding="utf-8") as stream: + _log.info("Writing the configuration to: %s", configfile) + with open(configfile, "w", encoding="utf-8") as stream: if preconfig: - stream.write(preconfig + '\n') - stream.write('# end of header\n') + stream.write(preconfig + "\n") + stream.write("# end of header\n") if groups: for key in sorted(groups): - stream.write('@%s = %s\n' % (key, ' '.join(groups[key]))) - stream.write('# end of groups\n\n') + stream.write("@%s = %s\n" % (key, " ".join(groups[key]))) + stream.write("# end of groups\n\n") prev = None for row in config: if prev is None: prev = row - if prev == row == '': + if prev == row == "": continue - stream.write(row + '\n') + stream.write(row + "\n") prev = row - stream.write('# end of body\n') + stream.write("# end of body\n") if postconfig: - stream.write(postconfig + '\n') + stream.write(postconfig + "\n") gl_cache_path = os.path.join( - os.path.dirname(configfile), '..', 'gl-conf.cache') + os.path.dirname(configfile), "..", "gl-conf.cache" + ) if os.path.exists(gl_cache_path): cls._remove_from_gitolite_cache(gl_cache_path, project) @@ -588,14 +604,14 @@ class Gitolite2Auth(GitAuthHelper): """ Return the gitolite command to run based on the info in the configuration file. """ - _log.info('Compiling the gitolite configuration') - gitolite_folder = pagure_config.get('GITOLITE_HOME', None) + _log.info("Compiling the gitolite configuration") + gitolite_folder = pagure_config.get("GITOLITE_HOME", None) if gitolite_folder: - cmd = 'GL_RC=%s GL_BINDIR=%s gl-compile-conf' % ( - pagure_config.get('GL_RC'), - pagure_config.get('GL_BINDIR') + cmd = "GL_RC=%s GL_BINDIR=%s gl-compile-conf" % ( + pagure_config.get("GL_RC"), + pagure_config.get("GL_BINDIR"), ) - _log.debug('Command: %s', cmd) + _log.debug("Command: %s", cmd) return cmd @classmethod @@ -605,12 +621,12 @@ class Gitolite2Auth(GitAuthHelper): """ repos = [] for l in lines: - if l.startswith('repo '): + if l.startswith("repo "): repos.append([l]) else: repos[-1].append(l) for i, repo_lines in enumerate(repos): - repos[i] = '\n'.join(repo_lines) + repos[i] = "\n".join(repo_lines) return repos @classmethod @@ -624,14 +640,16 @@ class Gitolite2Auth(GitAuthHelper): shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, - cwd=pagure_config['GITOLITE_HOME'] + cwd=pagure_config["GITOLITE_HOME"], ) stdout, stderr = proc.communicate() if proc.returncode != 0: error_msg = ( 'The command "{0}" failed with' - '\n\n out: "{1}\n\n err:"{2}"' - .format(cmd, stdout, stderr)) + '\n\n out: "{1}\n\n err:"{2}"'.format( + cmd, stdout, stderr + ) + ) raise pagure.exceptions.PagureException(error_msg) @classmethod @@ -651,41 +669,44 @@ class Gitolite2Auth(GitAuthHelper): :type group: None or pagure.lib.model.PagureGroup """ - _log.info('Refresh gitolite configuration') + _log.info("Refresh gitolite configuration") if project is not None or group is not None: - session = pagure.lib.create_session(pagure_config['DB_URL']) + session = pagure.lib.create_session(pagure_config["DB_URL"]) cls.write_gitolite_acls( session, project=project, - configfile=pagure_config['GITOLITE_CONFIG'], - preconf=pagure_config.get('GITOLITE_PRE_CONFIG') or None, - postconf=pagure_config.get('GITOLITE_POST_CONFIG') or None, + configfile=pagure_config["GITOLITE_CONFIG"], + preconf=pagure_config.get("GITOLITE_PRE_CONFIG") or None, + postconf=pagure_config.get("GITOLITE_POST_CONFIG") or None, group=group, ) session.remove() - if not group and project not in [None, -1] and \ - hasattr(cls, '_individual_repos_command') and \ - pagure_config.get('GITOLITE_HAS_COMPILE_1', False): + if ( + not group + and project not in [None, -1] + and hasattr(cls, "_individual_repos_command") + and pagure_config.get("GITOLITE_HAS_COMPILE_1", False) + ): # optimization for adding single repo - we don't want to recompile # whole gitolite.conf repos_config = [] cls._process_project( - project, - repos_config, - pagure_config.get('PR_ONLY', False) + project, repos_config, pagure_config.get("PR_ONLY", False) ) # repos_config will contain lines for repo itself as well as # docs, requests, tickets; compile-1 only accepts one repo, # so we have to run it separately for all of them for repo in cls._repos_from_lines(repos_config): - repopath = repo.splitlines()[0][len('repo '):].strip() - repotype = repopath.split('/')[0] - if (repotype == 'docs' and not - pagure_config.get('ENABLE_DOCS')) or \ - (repotype == 'tickets' and - not pagure_config.get('ENABLE_TICKETS')): + repopath = repo.splitlines()[0][len("repo ") :].strip() + repotype = repopath.split("/")[0] + if ( + repotype == "docs" and not pagure_config.get("ENABLE_DOCS") + ) or ( + repotype == "tickets" + and not pagure_config.get("ENABLE_TICKETS") + ): continue with tempfile.NamedTemporaryFile() as f: f.write(repo) @@ -702,13 +723,17 @@ class Gitolite3Auth(Gitolite2Auth): @staticmethod def _individual_repos_command(config_file): - _log.info('Compiling gitolite configuration %s for single repository', - config_file) - gitolite_folder = pagure_config.get('GITOLITE_HOME', None) + _log.info( + "Compiling gitolite configuration %s for single repository", + config_file, + ) + gitolite_folder = pagure_config.get("GITOLITE_HOME", None) if gitolite_folder: - cmd = 'HOME=%s gitolite compile-1 %s' % ( - gitolite_folder, config_file) - _log.debug('Command: %s', cmd) + cmd = "HOME=%s gitolite compile-1 %s" % ( + gitolite_folder, + config_file, + ) + _log.debug("Command: %s", cmd) return cmd @staticmethod @@ -716,12 +741,14 @@ class Gitolite3Auth(Gitolite2Auth): """ Return the gitolite command to run based on the info in the configuration file. """ - _log.info('Compiling the gitolite configuration') - gitolite_folder = pagure_config.get('GITOLITE_HOME', None) + _log.info("Compiling the gitolite configuration") + gitolite_folder = pagure_config.get("GITOLITE_HOME", None) if gitolite_folder: - cmd = 'HOME=%s gitolite compile && HOME=%s gitolite trigger '\ - 'POST_COMPILE' % (gitolite_folder, gitolite_folder) - _log.debug('Command: %s', cmd) + cmd = ( + "HOME=%s gitolite compile && HOME=%s gitolite trigger " + "POST_COMPILE" % (gitolite_folder, gitolite_folder) + ) + _log.debug("Command: %s", cmd) return cmd @classmethod @@ -730,11 +757,11 @@ class Gitolite3Auth(Gitolite2Auth): any other gitolite configuration. Most importantly, this will process SSH keys used by gitolite. """ - _log.info('Triggering gitolite POST_COMPILE') - gitolite_folder = pagure_config.get('GITOLITE_HOME', None) + _log.info("Triggering gitolite POST_COMPILE") + gitolite_folder = pagure_config.get("GITOLITE_HOME", None) if gitolite_folder: - cmd = 'HOME=%s gitolite trigger POST_COMPILE' % gitolite_folder - _log.debug('Command: %s', cmd) + cmd = "HOME=%s gitolite trigger POST_COMPILE" % gitolite_folder + _log.debug("Command: %s", cmd) cls._run_gitolite_cmd(cmd) @@ -752,8 +779,10 @@ class GitAuthTestHelper(GitAuthHelper): :type group: None or pagure.lib.model.PagureGroup """ - out = 'Called GitAuthTestHelper.generate_acls() ' \ - 'with args: project=%s, group=%s' % (project, group) + out = ( + "Called GitAuthTestHelper.generate_acls() " + "with args: project=%s, group=%s" % (project, group) + ) print(out) return out @@ -771,7 +800,9 @@ class GitAuthTestHelper(GitAuthHelper): """ - out = 'Called GitAuthTestHelper.remove_acls() ' \ - 'with args: project=%s' % (project.fullname) + out = ( + "Called GitAuthTestHelper.remove_acls() " + "with args: project=%s" % (project.fullname) + ) print(out) return out diff --git a/pagure/lib/lib_ci.py b/pagure/lib/lib_ci.py index 5bab661..cd40c14 100644 --- a/pagure/lib/lib_ci.py +++ b/pagure/lib/lib_ci.py @@ -26,84 +26,95 @@ from pagure.config import config as pagure_config _log = logging.getLogger(__name__) BUILD_STATS = { - 'SUCCESS': ('Build successful', pagure_config['FLAG_SUCCESS'], 100), - 'FAILURE': ('Build failed', pagure_config['FLAG_FAILURE'], 0), - 'ABORTED': ('Build aborted', 'error', 0), - 'BUILDING': ('Build in progress', pagure_config['FLAG_PENDING'], 0), + "SUCCESS": ("Build successful", pagure_config["FLAG_SUCCESS"], 100), + "FAILURE": ("Build failed", pagure_config["FLAG_FAILURE"], 0), + "ABORTED": ("Build aborted", "error", 0), + "BUILDING": ("Build in progress", pagure_config["FLAG_PENDING"], 0), } def process_jenkins_build( - session, project, build_id, requestfolder, iteration=0): + session, project, build_id, requestfolder, iteration=0 +): """ Gets the build info from jenkins and flags that particular pull-request. """ import jenkins + # Jenkins Base URL - _log.info('Querying jenkins at: %s', project.ci_hook.ci_url) + _log.info("Querying jenkins at: %s", project.ci_hook.ci_url) jenk = jenkins.Jenkins(project.ci_hook.ci_url) jenkins_name = project.ci_hook.ci_job _log.info( - 'Querying jenkins for project: %s, build: %s', - jenkins_name, build_id) + "Querying jenkins for project: %s, build: %s", jenkins_name, build_id + ) try: build_info = jenk.get_build_info(jenkins_name, build_id) except jenkins.NotFoundException: - _log.debug('Could not find build %s at: %s', build_id, jenkins_name) + _log.debug("Could not find build %s at: %s", build_id, jenkins_name) raise pagure.exceptions.PagureException( - 'Could not find build %s at: %s' % (build_id, jenkins_name)) + "Could not find build %s at: %s" % (build_id, jenkins_name) + ) - if build_info.get('building') is True: + if build_info.get("building") is True: if iteration < 5: - _log.info('Build is still going, let\'s wait a sec and try again') + _log.info("Build is still going, let's wait a sec and try again") time.sleep(1) return process_jenkins_build( - session, project, build_id, requestfolder, - iteration=iteration + 1) + session, + project, + build_id, + requestfolder, + iteration=iteration + 1, + ) _log.info( "We've been waiting for 5 seconds and the build is still " - "not finished, so let's keep going.") + "not finished, so let's keep going." + ) - result = build_info.get('result') - if not result and build_info.get('building') is True: - result = 'BUILDING' + result = build_info.get("result") + if not result and build_info.get("building") is True: + result = "BUILDING" - _log.info('Result from jenkins: %s', result) - url = build_info['url'] - _log.info('URL from jenkins: %s', url) + _log.info("Result from jenkins: %s", result) + url = build_info["url"] + _log.info("URL from jenkins: %s", url) pr_id = None - for action in build_info['actions']: - for cause in action.get('causes', []): + for action in build_info["actions"]: + for cause in action.get("causes", []): try: - pr_id = int(cause['note']) + pr_id = int(cause["note"]) except (KeyError, ValueError): continue if not pr_id: - raise pagure.exceptions.NoCorrespondingPR( - 'No corresponding PR found') + raise pagure.exceptions.NoCorrespondingPR("No corresponding PR found") if not result or result not in BUILD_STATS: raise pagure.exceptions.PagureException( - 'Unknown build status: %s' % result) + "Unknown build status: %s" % result + ) request = pagure.lib.search_pull_requests( - session, project_id=project.id, requestid=pr_id) + session, project_id=project.id, requestid=pr_id + ) if not request: - raise pagure.exceptions.PagureException('Request not found') + raise pagure.exceptions.PagureException("Request not found") comment, state, percent = BUILD_STATS[result] # Adding build ID to the CI type username = "%s #%s" % (project.ci_hook.ci_type, build_id) if request.commit_stop: - comment += ' (commit: %s)' % (request.commit_stop[:8]) + comment += " (commit: %s)" % (request.commit_stop[:8]) uid = None for flag in request.flags: - if flag.status == pagure_config['FLAG_PENDING'] \ - and flag.username == username: + if ( + flag.status == pagure_config["FLAG_PENDING"] + and flag.username == username + ): uid = flag.uid break @@ -129,32 +140,21 @@ def trigger_jenkins_build(project_path, url, job, token, branch, cause): try: import jenkins except ImportError: - _log.error( - 'Pagure-CI: Failed to load the jenkins module, bailing') + _log.error("Pagure-CI: Failed to load the jenkins module, bailing") return - _log.info('Jenkins CI') + _log.info("Jenkins CI") - repo = '%s/%s' % ( - pagure_config['GIT_URL_GIT'].rstrip('/'), - project_path) + repo = "%s/%s" % (pagure_config["GIT_URL_GIT"].rstrip("/"), project_path) - data = { - 'cause': cause, - 'REPO': repo, - 'BRANCH': branch - } + data = {"cause": cause, "REPO": repo, "BRANCH": branch} server = jenkins.Jenkins(url) _log.info( - 'Pagure-CI: Triggering at: %s for: %s - data: %s', - url, job, data) + "Pagure-CI: Triggering at: %s for: %s - data: %s", url, job, data + ) try: - server.build_job( - name=job, - parameters=data, - token=token - ) - _log.info('Pagure-CI: Build triggered') + server.build_job(name=job, parameters=data, token=token) + _log.info("Pagure-CI: Build triggered") except Exception as err: - _log.info('Pagure-CI:An error occured: %s', err) + _log.info("Pagure-CI:An error occured: %s", err) diff --git a/pagure/lib/link.py b/pagure/lib/link.py index a983f4e..2d50bfb 100644 --- a/pagure/lib/link.py +++ b/pagure/lib/link.py @@ -18,32 +18,47 @@ import pagure.exceptions FIXES = [ - re.compile(r'(?:.*\s+)?fixe?[sd]?:?\s*?#(\d+)', re.I), + re.compile(r"(?:.*\s+)?fixe?[sd]?:?\s*?#(\d+)", re.I), re.compile( - r'(?:.*\s+)?fixe?[sd]?:?\s*?https?://.*/([a-zA-z0-9_][a-zA-Z0-9-_]*)' - '/(?:issue|pull-request)/(\d+)', re.I), - re.compile(r'(?:.*\s+)?merge?[sd]?:?\s*?#(\d+)', re.I), + r"(?:.*\s+)?fixe?[sd]?:?\s*?https?://.*/([a-zA-z0-9_][a-zA-Z0-9-_]*)" + "/(?:issue|pull-request)/(\d+)", + re.I, + ), + re.compile(r"(?:.*\s+)?merge?[sd]?:?\s*?#(\d+)", re.I), re.compile( - r'(?:.*\s+)?merge?[sd]?:?\s*?https?://.*/([a-zA-z0-9_][a-zA-Z0-9-_]*)' - '/(?:issue|pull-request)/(\d+)', re.I), - re.compile(r'(?:.*\s+)?close?[sd]?:?\s*?#(\d+)', re.I), + r"(?:.*\s+)?merge?[sd]?:?\s*?https?://.*/([a-zA-z0-9_][a-zA-Z0-9-_]*)" + "/(?:issue|pull-request)/(\d+)", + re.I, + ), + re.compile(r"(?:.*\s+)?close?[sd]?:?\s*?#(\d+)", re.I), re.compile( - r'(?:.*\s+)?close?[sd]?:?\s*?https?://.*/([a-zA-z0-9_][a-zA-Z0-9-_]*)' - '/(?:issue|pull-request)/(\d+)', re.I), + r"(?:.*\s+)?close?[sd]?:?\s*?https?://.*/([a-zA-z0-9_][a-zA-Z0-9-_]*)" + "/(?:issue|pull-request)/(\d+)", + re.I, + ), ] RELATES = [ - re.compile(r'(?:.*\s+)?relate[sd]?:?\s*?(?:to)?\s*?#(\d+)', re.I), - re.compile(r'(?:.*\s+)?relate[sd]?:?\s?#(\d+)', re.I), + re.compile(r"(?:.*\s+)?relate[sd]?:?\s*?(?:to)?\s*?#(\d+)", re.I), + re.compile(r"(?:.*\s+)?relate[sd]?:?\s?#(\d+)", re.I), re.compile( - r'(?:.*\s+)?relate[sd]?:?\s*?(?:to)?\s*?' - 'https?://.*/([a-zA-z0-9_][a-zA-Z0-9-_]*)/issue/(\d+)', re.I), + r"(?:.*\s+)?relate[sd]?:?\s*?(?:to)?\s*?" + "https?://.*/([a-zA-z0-9_][a-zA-Z0-9-_]*)/issue/(\d+)", + re.I, + ), ] -def get_relation(session, reponame, username, namespace, text, - reftype='relates', include_prs=False): - ''' For a given text, searches using regex if the text contains +def get_relation( + session, + reponame, + username, + namespace, + text, + reftype="relates", + include_prs=False, +): + """ For a given text, searches using regex if the text contains reference to another issue in this project or another one. Returns the list of issues referenced (possibly empty). @@ -56,15 +71,16 @@ def get_relation(session, reponame, username, namespace, text, example: ``this commits fixes #3``. - ''' + """ repo = pagure.lib.get_authorized_project( - session, reponame, user=username, namespace=namespace) + session, reponame, user=username, namespace=namespace + ) if not repo: return [] regex = RELATES - if reftype == 'fixes': + if reftype == "fixes": regex = FIXES relations = [] @@ -80,14 +96,18 @@ def get_relation(session, reponame, username, namespace, text, if relid: relation = pagure.lib.search_issues( - session, repo=repo, issueid=relid) + session, repo=repo, issueid=relid + ) if relation is None and include_prs: relation = pagure.lib.search_pull_requests( - session, project_id=repo.id, requestid=relid) + session, project_id=repo.id, requestid=relid + ) - if relation is None or relation.project.name not in [project, - repo.name]: + if relation is None or relation.project.name not in [ + project, + repo.name, + ]: continue if relation not in relations: diff --git a/pagure/lib/login.py b/pagure/lib/login.py index 516ae12..4d9b33e 100644 --- a/pagure/lib/login.py +++ b/pagure/lib/login.py @@ -32,18 +32,16 @@ def id_generator(size=15, chars=string.ascii_uppercase + string.digits): :arg chars: the list of characters that can be used in the idenfitier. """ - return ''.join(random.choice(chars) for x in range(size)) + return "".join(random.choice(chars) for x in range(size)) def get_session_by_visitkey(session, sessionid): - ''' Return a specified VisitUser via its session identifier (visit_key). + """ Return a specified VisitUser via its session identifier (visit_key). :arg session: the session with which to connect to the database. - ''' - query = session.query( - model.PagureUserVisit - ).filter( + """ + query = session.query(model.PagureUserVisit).filter( model.PagureUserVisit.visit_key == sessionid ) @@ -51,22 +49,23 @@ def get_session_by_visitkey(session, sessionid): def generate_hashed_value(password): - ''' Generate hash value for password. + """ Generate hash value for password. :arg password: password for which the hash has to be generated. :type password: str (Python 3) or unicode (Python 2) :return: a hashed string of characters. :rtype: an encoded string(bytes). - ''' + """ if not isinstance(password, six.text_type): raise ValueError("Password supplied is not unicode text") - return (b'$2$' + bcrypt.hashpw(password.encode('utf-8'), - bcrypt.gensalt())).decode('utf-8') + return ( + b"$2$" + bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt()) + ).decode("utf-8") def check_password(entered_password, user_password, seed=None): - ''' Version checking and returning the password + """ Version checking and returning the password :arg entered_password: password entered by the user. :type entered_password: str (Python 3) or unicode (Python 2) @@ -74,31 +73,34 @@ def check_password(entered_password, user_password, seed=None): :type user_password: bytes :return: a Boolean depending upon the entered_password, True if the password matches - ''' + """ if not isinstance(entered_password, six.text_type): raise ValueError("Entered password is not unicode text") if isinstance(user_password, six.text_type): user_password = user_password.encode("utf-8") - if not user_password.count(b'$') >= 2: + if not user_password.count(b"$") >= 2: raise pagure.exceptions.PagureException( - 'Password of unknown version found in the database' + "Password of unknown version found in the database" ) - _, version, user_password = user_password.split(b'$', 2) + _, version, user_password = user_password.split(b"$", 2) - if version == b'2': + if version == b"2": password = bcrypt.hashpw( - entered_password.encode('utf-8'), - user_password) - elif version == b'1': - password = '%s%s' % (entered_password, seed) - password = hashlib.sha512( - password.encode('utf-8')).hexdigest().encode("utf-8") + entered_password.encode("utf-8"), user_password + ) + elif version == b"1": + password = "%s%s" % (entered_password, seed) + password = ( + hashlib.sha512(password.encode("utf-8")) + .hexdigest() + .encode("utf-8") + ) else: raise pagure.exceptions.PagureException( - 'Password of unknown version found in the database' + "Password of unknown version found in the database" ) return constant_time.bytes_eq(password, user_password) diff --git a/pagure/lib/mimetype.py b/pagure/lib/mimetype.py index 4798a75..5f83a36 100644 --- a/pagure/lib/mimetype.py +++ b/pagure/lib/mimetype.py @@ -14,7 +14,7 @@ _log = logging.getLogger(__name__) def guess_type(filename, data): - ''' + """ Guess the type of a file based on its filename and data. Return value is a tuple (type, encoding) where type or encoding is None @@ -22,31 +22,32 @@ def guess_type(filename, data): :param filename: file name string :param data: file data string - ''' + """ mimetype = None encoding = None if filename: mimetype, encoding = mimetypes.guess_type(filename) if data: if not mimetype: - if not isinstance(data, six.text_type) and b'\0' in data: - mimetype = 'application/octet-stream' + if not isinstance(data, six.text_type) and b"\0" in data: + mimetype = "application/octet-stream" else: - mimetype = 'text/plain' + mimetype = "text/plain" - if mimetype.startswith('text/') and not encoding: + if mimetype.startswith("text/") and not encoding: try: encoding = pagure.lib.encoding_utils.guess_encoding( - ktc.to_bytes(data)) + ktc.to_bytes(data) + ) except pagure.exceptions.PagureException: # pragma: no cover # We cannot decode the file, so bail but warn the admins - _log.exception('File could not be decoded') + _log.exception("File could not be decoded") return mimetype, encoding def get_type_headers(filename, data): - ''' + """ Get the HTTP headers used for downloading or previewing the file. If the file is html, it will return headers which make browser start @@ -54,15 +55,15 @@ def get_type_headers(filename, data): :param filename: file name string :param data: file data string - ''' + """ mimetype, encoding = guess_type(filename, data) if not mimetype: return None - headers = {str('X-Content-Type-Options'): 'nosniff'} - if 'html' in mimetype or 'javascript' in mimetype or 'svg' in mimetype: - mimetype = 'application/octet-stream' - headers[str('Content-Disposition')] = 'attachment' + headers = {str("X-Content-Type-Options"): "nosniff"} + if "html" in mimetype or "javascript" in mimetype or "svg" in mimetype: + mimetype = "application/octet-stream" + headers[str("Content-Disposition")] = "attachment" if encoding: - mimetype += '; charset={encoding}'.format(encoding=encoding) - headers[str('Content-Type')] = mimetype + mimetype += "; charset={encoding}".format(encoding=encoding) + headers[str("Content-Type")] = mimetype return headers diff --git a/pagure/lib/model.py b/pagure/lib/model.py index 81a62ef..55d34f2 100644 --- a/pagure/lib/model.py +++ b/pagure/lib/model.py @@ -10,7 +10,7 @@ from __future__ import unicode_literals -__requires__ = ['SQLAlchemy >= 0.8', 'jinja2 >= 2.4'] # noqa +__requires__ = ["SQLAlchemy >= 0.8", "jinja2 >= 2.4"] # noqa import pkg_resources # noqa: E402,F401 import arrow @@ -37,7 +37,7 @@ from pagure.utils import is_true CONVENTION = { - "ix": 'ix_%(table_name)s_%(column_0_label)s', + "ix": "ix_%(table_name)s_%(column_0_label)s", # Checks are currently buggy and prevent us from naming them correctly # "ck": "ck_%(table_name)s_%(constraint_name)s", "fk": "%(table_name)s_%(column_0_name)s_fkey", @@ -71,22 +71,24 @@ def create_tables(db_url, alembic_ini=None, acls=None, debug=False): :return a session that can be used to query the database. """ - if db_url.startswith('postgres'): # pragma: no cover - engine = create_engine(db_url, echo=debug, client_encoding='utf8') + if db_url.startswith("postgres"): # pragma: no cover + engine = create_engine(db_url, echo=debug, client_encoding="utf8") else: # pragma: no cover engine = create_engine(db_url, echo=debug) from pagure.lib.plugins import get_plugin_tables + get_plugin_tables() BASE.metadata.create_all(engine) # engine.execute(collection_package_create_view(driver=engine.driver)) - if db_url.startswith('sqlite:'): + if db_url.startswith("sqlite:"): # Ignore the warning about con_record # pylint: disable=unused-argument def _fk_pragma_on_connect(dbapi_con, _): # pragma: no cover - ''' Tries to enforce referential constraints on sqlite. ''' - dbapi_con.execute('pragma foreign_keys=ON') - sa.event.listen(engine, 'connect', _fk_pragma_on_connect) + """ Tries to enforce referential constraints on sqlite. """ + dbapi_con.execute("pragma foreign_keys=ON") + + sa.event.listen(engine, "connect", _fk_pragma_on_connect) if alembic_ini is not None: # pragma: no cover # then, load the Alembic configuration and generate the @@ -96,6 +98,7 @@ def create_tables(db_url, alembic_ini=None, acls=None, debug=False): # pylint: disable=import-error from alembic.config import Config from alembic import command + alembic_cfg = Config(alembic_ini) command.stamp(alembic_cfg, "head") @@ -110,7 +113,7 @@ def create_default_status(session, acls=None): """ Insert the defaults status in the status tables. """ - statuses = ['Open', 'Closed'] + statuses = ["Open", "Closed"] for status in statuses: ticket_stat = StatusIssue(status=status) session.add(ticket_stat) @@ -118,48 +121,45 @@ def create_default_status(session, acls=None): session.commit() except SQLAlchemyError: # pragma: no cover session.rollback() - _log.debug('Status %s could not be added', ticket_stat) + _log.debug("Status %s could not be added", ticket_stat) - for status in ['Open', 'Closed', 'Merged']: + for status in ["Open", "Closed", "Merged"]: pr_stat = StatusPullRequest(status=status) session.add(pr_stat) try: session.commit() except SQLAlchemyError: # pragma: no cover session.rollback() - _log.debug('Status %s could not be added', pr_stat) + _log.debug("Status %s could not be added", pr_stat) - for grptype in ['user', 'admin']: + for grptype in ["user", "admin"]: grp_type = PagureGroupType(group_type=grptype) session.add(grp_type) try: session.commit() except SQLAlchemyError: # pragma: no cover session.rollback() - _log.debug('Type %s could not be added', grptype) + _log.debug("Type %s could not be added", grptype) acls = acls or {} keys = sorted(list(acls.keys())) for acl in keys: - item = ACL( - name=acl, - description=acls[acl] - ) + item = ACL(name=acl, description=acls[acl]) session.add(item) try: session.commit() except SQLAlchemyError: # pragma: no cover session.rollback() - _log.debug('ACL %s could not be added', acl) + _log.debug("ACL %s could not be added", acl) - for access in ['ticket', 'commit', 'admin']: + for access in ["ticket", "commit", "admin"]: access_obj = AccessLevels(access=access) session.add(access_obj) try: session.commit() except SQLAlchemyError: session.rollback() - _log.debug('Access level %s could not be added', access) + _log.debug("Access level %s could not be added", access) def arrow_ts(value): @@ -167,8 +167,9 @@ def arrow_ts(value): class AccessLevels(BASE): - ''' Different access levels a user/group can have for a project ''' - __tablename__ = 'access_levels' + """ Different access levels a user/group can have for a project """ + + __tablename__ = "access_levels" access = sa.Column(sa.String(255), primary_key=True) @@ -178,7 +179,8 @@ class StatusIssue(BASE): Table -- status_issue """ - __tablename__ = 'status_issue' + + __tablename__ = "status_issue" id = sa.Column(sa.Integer, primary_key=True) status = sa.Column(sa.String(255), nullable=False, unique=True) @@ -189,7 +191,8 @@ class StatusPullRequest(BASE): Table -- status_issue """ - __tablename__ = 'status_pull_requests' + + __tablename__ = "status_pull_requests" id = sa.Column(sa.Integer, primary_key=True) status = sa.Column(sa.String(255), nullable=False, unique=True) @@ -201,7 +204,7 @@ class User(BASE): Table -- users """ - __tablename__ = 'users' + __tablename__ = "users" id = sa.Column(sa.Integer, primary_key=True) user = sa.Column(sa.String(255), nullable=False, unique=True, index=True) fullname = sa.Column(sa.String(255), nullable=False, index=True) @@ -211,20 +214,17 @@ class User(BASE): password = sa.Column(sa.Text, nullable=True) token = sa.Column(sa.String(50), nullable=True) - created = sa.Column( - sa.DateTime, - nullable=False, - default=sa.func.now()) + created = sa.Column(sa.DateTime, nullable=False, default=sa.func.now()) updated_on = sa.Column( sa.DateTime, nullable=False, default=sa.func.now(), - onupdate=sa.func.now()) + onupdate=sa.func.now(), + ) refuse_sessions_before = sa.Column( - sa.DateTime, - nullable=True, - default=None) + sa.DateTime, nullable=True, default=None + ) # Relations group_objs = relation( @@ -238,14 +238,14 @@ class User(BASE): @property def username(self): - ''' Return the username. ''' + """ Return the username. """ return self.user @property def html_title(self): - ''' Return the ``fullname (username)`` or simply ``username`` to be + """ Return the ``fullname (username)`` or simply ``username`` to be used in the html templates. - ''' + """ if self.fullname: return "%s (%s)" % (self.fullname, self.user) else: @@ -253,7 +253,7 @@ class User(BASE): @property def groups(self): - ''' Return the list of Group.group_name in which the user is. ''' + """ Return the list of Group.group_name in which the user is. """ return [group.group_name for group in self.group_objs] @property @@ -261,9 +261,7 @@ class User(BASE): """ Return the dict stored as string in the database as an actual dict object. """ - default = { - 'cc_me_to_my_actions': False, - } + default = {"cc_me_to_my_actions": False} if self._settings: current = json.loads(self._settings) @@ -279,23 +277,20 @@ class User(BASE): @settings.setter def settings(self, settings): - ''' Ensures the settings are properly saved. ''' + """ Ensures the settings are properly saved. """ self._settings = json.dumps(settings) def __repr__(self): - ''' Return a string representation of this object. ''' + """ Return a string representation of this object. """ - return 'User: %s - name %s' % (self.id, self.user) + return "User: %s - name %s" % (self.id, self.user) def to_json(self, public=False): - ''' Return a representation of the User in a dictionary. ''' - output = { - 'name': self.user, - 'fullname': self.fullname, - } + """ Return a representation of the User in a dictionary. """ + output = {"name": self.user, "fullname": self.fullname} if not public: - output['default_email'] = self.default_email - output['emails'] = sorted([email.email for email in self.emails]) + output["default_email"] = self.default_email + output["emails"] = sorted([email.email for email in self.emails]) return output @@ -306,22 +301,23 @@ class UserEmail(BASE): Table -- user_emails """ - __tablename__ = 'user_emails' + __tablename__ = "user_emails" id = sa.Column(sa.Integer, primary_key=True) user_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'users.id', onupdate='CASCADE', - ), + sa.ForeignKey("users.id", onupdate="CASCADE"), nullable=False, - index=True) + index=True, + ) email = sa.Column(sa.String(255), nullable=False, unique=True) user = relation( - 'User', foreign_keys=[user_id], remote_side=[User.id], + "User", + foreign_keys=[user_id], + remote_side=[User.id], backref=backref( - 'emails', cascade="delete, delete-orphan", single_parent=True - ) + "emails", cascade="delete, delete-orphan", single_parent=True + ), ) @@ -331,29 +327,27 @@ class UserEmailPending(BASE): Table -- user_emails_pending """ - __tablename__ = 'user_emails_pending' + __tablename__ = "user_emails_pending" id = sa.Column(sa.Integer, primary_key=True) user_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'users.id', onupdate='CASCADE', - ), + sa.ForeignKey("users.id", onupdate="CASCADE"), nullable=False, - index=True) + index=True, + ) email = sa.Column(sa.String(255), nullable=False, unique=True) token = sa.Column(sa.String(50), nullable=True) - created = sa.Column( - sa.DateTime, - nullable=False, - default=sa.func.now()) + created = sa.Column(sa.DateTime, nullable=False, default=sa.func.now()) user = relation( - 'User', foreign_keys=[user_id], remote_side=[User.id], + "User", + foreign_keys=[user_id], + remote_side=[User.id], backref=backref( - 'emails_pending', + "emails_pending", cascade="delete, delete-orphan", - single_parent=True - ) + single_parent=True, + ), ) @@ -363,16 +357,15 @@ class Project(BASE): Table -- projects """ - __tablename__ = 'projects' + __tablename__ = "projects" id = sa.Column(sa.Integer, primary_key=True) user_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'users.id', onupdate='CASCADE', - ), + sa.ForeignKey("users.id", onupdate="CASCADE"), nullable=False, - index=True) + index=True, + ) namespace = sa.Column(sa.String(255), nullable=True, index=True) name = sa.Column(sa.String(255), nullable=False, index=True) description = sa.Column(sa.Text, nullable=True) @@ -385,10 +378,9 @@ class Project(BASE): read_only = sa.Column(sa.Boolean, default=True, nullable=False) parent_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', onupdate='CASCADE', - ), - nullable=True) + sa.ForeignKey("projects.id", onupdate="CASCADE"), + nullable=True, + ) _priorities = sa.Column(sa.Text, nullable=True) default_priority = sa.Column(sa.Text, nullable=True) _milestones = sa.Column(sa.Text, nullable=True) @@ -398,49 +390,54 @@ class Project(BASE): _notifications = sa.Column(sa.Text, nullable=True) _close_status = sa.Column(sa.Text, nullable=True) - date_created = sa.Column(sa.DateTime, nullable=False, - default=datetime.datetime.utcnow) - date_modified = sa.Column(sa.DateTime, nullable=False, - default=datetime.datetime.utcnow) + date_created = sa.Column( + sa.DateTime, nullable=False, default=datetime.datetime.utcnow + ) + date_modified = sa.Column( + sa.DateTime, nullable=False, default=datetime.datetime.utcnow + ) parent = relation( - 'Project', + "Project", remote_side=[id], backref=backref( - "forks", - order_by=str("(projects.c.date_created).desc()") - ) + "forks", order_by=str("(projects.c.date_created).desc()") + ), + ) + user = relation( + "User", + foreign_keys=[user_id], + remote_side=[User.id], + backref="projects", ) - user = relation('User', foreign_keys=[user_id], - remote_side=[User.id], backref='projects') private = sa.Column(sa.Boolean, nullable=False, default=False) users = relation( - 'User', + "User", secondary="user_projects", primaryjoin="projects.c.id==user_projects.c.project_id", secondaryjoin="users.c.id==user_projects.c.user_id", - backref='co_projects', + backref="co_projects", ) admins = relation( - 'User', + "User", secondary="user_projects", primaryjoin="projects.c.id==user_projects.c.project_id", secondaryjoin="and_(users.c.id==user_projects.c.user_id,\ user_projects.c.access=='admin')", - backref='co_projects_admins', - viewonly=True + backref="co_projects_admins", + viewonly=True, ) committers = relation( - 'User', + "User", secondary="user_projects", primaryjoin="projects.c.id==user_projects.c.project_id", secondaryjoin="and_(users.c.id==user_projects.c.user_id,\ or_(user_projects.c.access=='commit',\ user_projects.c.access=='admin'))", - backref='co_projects_committers', - viewonly=True + backref="co_projects_committers", + viewonly=True, ) groups = relation( @@ -450,8 +447,10 @@ class Project(BASE): secondaryjoin="pagure_group.c.id==projects_groups.c.group_id", backref=backref( "projects", - order_by=str("func.lower(projects.c.namespace).desc(), " - "func.lower(projects.c.name)") + order_by=str( + "func.lower(projects.c.namespace).desc(), " + "func.lower(projects.c.name)" + ), ), order_by="PagureGroup.group_name.asc()", ) @@ -464,7 +463,7 @@ class Project(BASE): projects_groups.c.access=='admin')", backref="projects_admin_groups", order_by="PagureGroup.group_name.asc()", - viewonly=True + viewonly=True, ) committer_groups = relation( @@ -476,53 +475,53 @@ class Project(BASE): projects_groups.c.access=='commit'))", backref="projects_committer_groups", order_by="PagureGroup.group_name.asc()", - viewonly=True + viewonly=True, ) @property def isa(self): - ''' A string to allow finding out that this is a project. ''' - return 'project' + """ A string to allow finding out that this is a project. """ + return "project" @property def mail_id(self): - ''' Return a unique representation of the project as string that + """ Return a unique representation of the project as string that can be used when sending emails. - ''' - return '%s-project-%s' % (self.fullname, self.id) + """ + return "%s-project-%s" % (self.fullname, self.id) @property def path(self): - ''' Return the name of the git repo on the filesystem. ''' - return '%s.git' % self.fullname + """ Return the name of the git repo on the filesystem. """ + return "%s.git" % self.fullname @property def fullname(self): - ''' Return the name of the git repo as user/project if it is a + """ Return the name of the git repo as user/project if it is a project forked, otherwise it returns the project name. - ''' + """ str_name = self.name if self.namespace: - str_name = '%s/%s' % (self.namespace, str_name) + str_name = "%s/%s" % (self.namespace, str_name) if self.is_fork: str_name = "forks/%s/%s" % (self.user.user, str_name) return str_name @property def url_path(self): - ''' Return the path at which this project can be accessed in the + """ Return the path at which this project can be accessed in the web UI. - ''' + """ path = self.name if self.namespace: - path = '%s/%s' % (self.namespace, path) + path = "%s/%s" % (self.namespace, path) if self.is_fork: path = "fork/%s/%s" % (self.user.user, path) return path @property def tags_text(self): - ''' Return the list of tags in a simple text form. ''' + """ Return the list of tags in a simple text form. """ return [tag.tag for tag in self.tags] @property @@ -531,22 +530,22 @@ class Project(BASE): dict object. """ default = { - 'issue_tracker': True, - 'project_documentation': False, - 'pull_requests': True, - 'Only_assignee_can_merge_pull-request': False, - 'Minimum_score_to_merge_pull-request': -1, - 'Web-hooks': None, - 'Enforce_signed-off_commits_in_pull-request': False, - 'always_merge': False, - 'issues_default_to_private': False, - 'fedmsg_notifications': True, - 'stomp_notifications': True, - 'pull_request_access_only': False, - 'roadmap_on_issues_page': False, - 'notify_on_pull-request_flag': False, - 'notify_on_commit_flag': False, - 'issue_tracker_read_only': False, + "issue_tracker": True, + "project_documentation": False, + "pull_requests": True, + "Only_assignee_can_merge_pull-request": False, + "Minimum_score_to_merge_pull-request": -1, + "Web-hooks": None, + "Enforce_signed-off_commits_in_pull-request": False, + "always_merge": False, + "issues_default_to_private": False, + "fedmsg_notifications": True, + "stomp_notifications": True, + "pull_request_access_only": False, + "roadmap_on_issues_page": False, + "notify_on_pull-request_flag": False, + "notify_on_commit_flag": False, + "issue_tracker_read_only": False, } if self._settings: @@ -555,7 +554,7 @@ class Project(BASE): for key in default: if key not in current: current[key] = default[key] - elif key == 'Minimum_score_to_merge_pull-request': + elif key == "Minimum_score_to_merge_pull-request": current[key] = int(current[key]) elif is_true(current[key]): current[key] = True @@ -565,7 +564,7 @@ class Project(BASE): @settings.setter def settings(self, settings): - ''' Ensures the settings are properly saved. ''' + """ Ensures the settings are properly saved. """ self._settings = json.dumps(settings) @property @@ -576,21 +575,25 @@ class Project(BASE): milestones = {} if self._milestones: + def _convert_to_dict(value): if isinstance(value, dict): return value else: - return {'date': value, 'active': True} - milestones = dict([ - (k, _convert_to_dict(v)) for k, v in - json.loads(self._milestones).items() - ]) + return {"date": value, "active": True} + + milestones = dict( + [ + (k, _convert_to_dict(v)) + for k, v in json.loads(self._milestones).items() + ] + ) return milestones @milestones.setter def milestones(self, milestones): - ''' Ensures the milestones are properly saved. ''' + """ Ensures the milestones are properly saved. """ self._milestones = json.dumps(milestones) @property @@ -606,7 +609,7 @@ class Project(BASE): @milestones_keys.setter def milestones_keys(self, milestones_keys): - ''' Ensures the milestones keys are properly saved. ''' + """ Ensures the milestones keys are properly saved. """ self._milestones_keys = json.dumps(milestones_keys) @property @@ -623,7 +626,7 @@ class Project(BASE): @priorities.setter def priorities(self, priorities): - ''' Ensures the priorities are properly saved. ''' + """ Ensures the priorities are properly saved. """ self._priorities = json.dumps(priorities) @property @@ -657,7 +660,7 @@ class Project(BASE): @notifications.setter def notifications(self, notifications): - ''' Ensures the notifications are properly saved. ''' + """ Ensures the notifications are properly saved. """ self._notifications = json.dumps(notifications) @property @@ -674,7 +677,7 @@ class Project(BASE): @reports.setter def reports(self, reports): - ''' Ensures the reports are properly saved. ''' + """ Ensures the reports are properly saved. """ self._reports = json.dumps(reports) @property @@ -691,43 +694,39 @@ class Project(BASE): @close_status.setter def close_status(self, close_status): - ''' Ensures the different close status are properly saved. ''' + """ Ensures the different close status are properly saved. """ self._close_status = json.dumps(close_status) @property def open_requests(self): - ''' Returns the number of open pull-requests for this project. ''' - return BASE.metadata.bind.query( - PullRequest - ).filter( - self.id == PullRequest.project_id - ).filter( - PullRequest.status == 'Open' - ).count() + """ Returns the number of open pull-requests for this project. """ + return ( + BASE.metadata.bind.query(PullRequest) + .filter(self.id == PullRequest.project_id) + .filter(PullRequest.status == "Open") + .count() + ) @property def open_tickets(self): - ''' Returns the number of open tickets for this project. ''' - return BASE.metadata.bind.query( - Issue - ).filter( - self.id == Issue.project_id - ).filter( - Issue.status == 'Open' - ).count() + """ Returns the number of open tickets for this project. """ + return ( + BASE.metadata.bind.query(Issue) + .filter(self.id == Issue.project_id) + .filter(Issue.status == "Open") + .count() + ) @property def open_tickets_public(self): - ''' Returns the number of open tickets for this project. ''' - return BASE.metadata.bind.query( - Issue - ).filter( - self.id == Issue.project_id - ).filter( - Issue.status == 'Open' - ).filter( - Issue.private == False # noqa: E712 - ).count() + """ Returns the number of open tickets for this project. """ + return ( + BASE.metadata.bind.query(Issue) + .filter(self.id == Issue.project_id) + .filter(Issue.status == "Open") + .filter(Issue.private == False) # noqa: E712 + .count() + ) @property def contributors(self): @@ -754,7 +753,7 @@ class Project(BASE): return contributors def get_project_users(self, access, combine=True): - ''' Returns the list of users/groups of the project according + """ Returns the list of users/groups of the project according to the given access. :arg access: the access level to query for, can be: 'admin', @@ -772,34 +771,35 @@ class Project(BASE): it would have returned only the users with ticket access and would not have included committers and admins. :type combine: boolean - ''' + """ - if access not in ['admin', 'commit', 'ticket']: + if access not in ["admin", "commit", "ticket"]: raise pagure.exceptions.AccessLevelNotFound( - 'The access level does not exist') + "The access level does not exist" + ) if combine: - if access == 'admin': + if access == "admin": return self.admins - elif access == 'commit': + elif access == "commit": return self.committers - elif access == 'ticket': + elif access == "ticket": return self.users else: - if access == 'admin': + if access == "admin": return self.admins - elif access == 'commit': + elif access == "commit": committers = set(self.committers) admins = set(self.admins) return list(committers - admins) - elif access == 'ticket': + elif access == "ticket": committers = set(self.committers) admins = set(self.admins) users = set(self.users) return list(users - committers - admins) def get_project_groups(self, access, combine=True): - ''' Returns the list of groups of the project according + """ Returns the list of groups of the project according to the given access. :arg access: the access level to query for, can be: 'admin', @@ -817,27 +817,28 @@ class Project(BASE): it would have returned only the groups with ticket access and would not have included committer_groups and admin_groups. :type combine: boolean - ''' + """ - if access not in ['admin', 'commit', 'ticket']: + if access not in ["admin", "commit", "ticket"]: raise pagure.exceptions.AccessLevelNotFound( - 'The access level does not exist') + "The access level does not exist" + ) if combine: - if access == 'admin': + if access == "admin": return self.admin_groups - elif access == 'commit': + elif access == "commit": return self.committer_groups - elif access == 'ticket': + elif access == "ticket": return self.groups else: - if access == 'admin': + if access == "admin": return self.admin_groups - elif access == 'commit': + elif access == "commit": committers = set(self.committer_groups) admins = set(self.admin_groups) return list(committers - admins) - elif access == 'ticket': + elif access == "ticket": committers = set(self.committer_groups) admins = set(self.admin_groups) groups = set(self.groups) @@ -845,17 +846,17 @@ class Project(BASE): @property def access_users(self): - ''' Return a dictionary with all user access - ''' + """ Return a dictionary with all user access + """ return { - 'admin': self.get_project_users(access='admin', combine=False), - 'commit': self.get_project_users(access='commit', combine=False), - 'ticket': self.get_project_users(access='ticket', combine=False), + "admin": self.get_project_users(access="admin", combine=False), + "commit": self.get_project_users(access="commit", combine=False), + "ticket": self.get_project_users(access="ticket", combine=False), } @property def access_users_json(self): - json_access_users = {'owner': [self.user.username]} + json_access_users = {"owner": [self.user.username]} for access, users in self.access_users.items(): json_access_users[access] = [] for user in users: @@ -875,12 +876,12 @@ class Project(BASE): @property def access_groups(self): - ''' Return a dictionary with all group access - ''' + """ Return a dictionary with all group access + """ return { - 'admin': self.get_project_groups(access='admin', combine=False), - 'commit': self.get_project_groups(access='commit', combine=False), - 'ticket': self.get_project_groups(access='ticket', combine=False), + "admin": self.get_project_groups(access="admin", combine=False), + "commit": self.get_project_groups(access="commit", combine=False), + "ticket": self.get_project_groups(access="ticket", combine=False), } def lock(self, ltype): @@ -889,34 +890,33 @@ class Project(BASE): return ProjectLocker(self, ltype) def to_json(self, public=False, api=False): - ''' Return a representation of the project as JSON. - ''' - custom_keys = [ - [key.name, key.key_type] for key in self.issue_keys - ] + """ Return a representation of the project as JSON. + """ + custom_keys = [[key.name, key.key_type] for key in self.issue_keys] output = { - 'id': self.id, - 'name': self.name, - 'fullname': self.fullname, - 'url_path': self.url_path, - 'description': self.description, - 'namespace': self.namespace, - 'parent': self.parent.to_json( - public=public, api=api) if self.parent else None, - 'date_created': arrow_ts(self.date_created), - 'date_modified': arrow_ts(self.date_modified), - 'user': self.user.to_json(public=public), - 'access_users': self.access_users_json, - 'access_groups': self.access_groups_json, - 'tags': self.tags_text, - 'priorities': self.priorities, - 'custom_keys': custom_keys, - 'close_status': self.close_status, - 'milestones': self.milestones, + "id": self.id, + "name": self.name, + "fullname": self.fullname, + "url_path": self.url_path, + "description": self.description, + "namespace": self.namespace, + "parent": self.parent.to_json(public=public, api=api) + if self.parent + else None, + "date_created": arrow_ts(self.date_created), + "date_modified": arrow_ts(self.date_modified), + "user": self.user.to_json(public=public), + "access_users": self.access_users_json, + "access_groups": self.access_groups_json, + "tags": self.tags_text, + "priorities": self.priorities, + "custom_keys": custom_keys, + "close_status": self.close_status, + "milestones": self.milestones, } if not api and not public: - output['settings'] = self.settings + output["settings"] = self.settings return output @@ -926,22 +926,22 @@ class ProjectLock(BASE): Table -- project_locks """ - __tablename__ = 'project_locks' + + __tablename__ = "project_locks" project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', onupdate='CASCADE', ondelete='CASCADE' - ), + sa.ForeignKey("projects.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=False, - primary_key=True) + primary_key=True, + ) lock_type = sa.Column( sa.Enum( - 'WORKER', 'WORKER_TICKET', 'WORKER_REQUEST', - name='lock_type_enum', + "WORKER", "WORKER_TICKET", "WORKER_REQUEST", name="lock_type_enum" ), nullable=False, - primary_key=True) + primary_key=True, + ) class ProjectLocker(object): @@ -950,6 +950,7 @@ class ProjectLocker(object): This is used as a context manager to make it very explicit when we unlock the project, and so that we unlock even if an exception occurs. """ + def __init__(self, project, ltype): self.session = None self.lock = None @@ -961,32 +962,29 @@ class ProjectLocker(object): self.session = create_session() - _log.info('Grabbing lock for %d', self.project_id) - query = self.session.query( - ProjectLock - ).filter( - ProjectLock.project_id == self.project_id - ).filter( - ProjectLock.lock_type == self.ltype - ).with_for_update(nowait=False, - read=False) + _log.info("Grabbing lock for %d", self.project_id) + query = ( + self.session.query(ProjectLock) + .filter(ProjectLock.project_id == self.project_id) + .filter(ProjectLock.lock_type == self.ltype) + .with_for_update(nowait=False, read=False) + ) try: self.lock = query.one() except Exception: - pl = ProjectLock( - project_id=self.project_id, lock_type=self.ltype) + pl = ProjectLock(project_id=self.project_id, lock_type=self.ltype) self.session.add(pl) self.session.commit() self.lock = query.one() assert self.lock is not None - _log.info('Got lock for %d: %s', self.project_id, self.lock) + _log.info("Got lock for %d: %s", self.project_id, self.lock) def __exit__(self, *exargs): - _log.info('Releasing lock for %d', self.project_id) + _log.info("Releasing lock for %d", self.project_id) self.session.remove() - _log.info('Released lock for %d', self.project_id) + _log.info("Released lock for %d", self.project_id) class ProjectUser(BASE): @@ -995,41 +993,38 @@ class ProjectUser(BASE): Table -- user_projects """ - __tablename__ = 'user_projects' - __table_args__ = ( - sa.UniqueConstraint('project_id', 'user_id', 'access'), - ) + __tablename__ = "user_projects" + __table_args__ = (sa.UniqueConstraint("project_id", "user_id", "access"),) id = sa.Column(sa.Integer, primary_key=True) project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', onupdate='CASCADE', - ), - nullable=False) + sa.ForeignKey("projects.id", onupdate="CASCADE"), + nullable=False, + ) user_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'users.id', onupdate='CASCADE', - ), + sa.ForeignKey("users.id", onupdate="CASCADE"), nullable=False, - index=True) + index=True, + ) access = sa.Column( sa.String(255), sa.ForeignKey( - 'access_levels.access', onupdate='CASCADE', ondelete='CASCADE', + "access_levels.access", onupdate="CASCADE", ondelete="CASCADE" ), - nullable=False) + nullable=False, + ) project = relation( - 'Project', remote_side=[Project.id], + "Project", + remote_side=[Project.id], backref=backref( - 'user_projects', cascade="delete,delete-orphan", - single_parent=True - ) + "user_projects", cascade="delete,delete-orphan", single_parent=True + ), ) - user = relation('User', backref='user_projects') + user = relation("User", backref="user_projects") class DeployKey(BASE): @@ -1038,40 +1033,39 @@ class DeployKey(BASE): Table -- deploykeys """ - __tablename__ = 'deploykeys' + __tablename__ = "deploykeys" id = sa.Column(sa.Integer, primary_key=True) project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', onupdate='CASCADE', ondelete='CASCADE', - )) + sa.ForeignKey("projects.id", onupdate="CASCADE", ondelete="CASCADE"), + ) pushaccess = sa.Column(sa.Boolean, nullable=False, default=False) public_ssh_key = sa.Column(sa.Text, nullable=False) ssh_short_key = sa.Column(sa.Text, nullable=False) ssh_search_key = sa.Column(sa.Text, nullable=False) creator_user_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'users.id', onupdate='CASCADE', - ), + sa.ForeignKey("users.id", onupdate="CASCADE"), nullable=False, - index=True) - date_created = sa.Column(sa.DateTime, nullable=False, - default=datetime.datetime.utcnow) + index=True, + ) + date_created = sa.Column( + sa.DateTime, nullable=False, default=datetime.datetime.utcnow + ) # Relations project = relation( - 'Project', foreign_keys=[project_id], remote_side=[Project.id], + "Project", + foreign_keys=[project_id], + remote_side=[Project.id], backref=backref( - 'deploykeys', cascade="delete, delete-orphan", - single_parent=True - ) + "deploykeys", cascade="delete, delete-orphan", single_parent=True + ), ) creator_user = relation( - 'User', - foreign_keys=[creator_user_id], - remote_side=[User.id]) + "User", foreign_keys=[creator_user_id], remote_side=[User.id] + ) class Issue(BASE): @@ -1080,65 +1074,64 @@ class Issue(BASE): Table -- issues """ - __tablename__ = 'issues' + __tablename__ = "issues" id = sa.Column(sa.Integer, primary_key=True) uid = sa.Column(sa.String(32), unique=True, nullable=False) project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', onupdate='CASCADE', - ), - primary_key=True) - title = sa.Column( - sa.Text, - nullable=False) - content = sa.Column( - sa.Text(), - nullable=False) + sa.ForeignKey("projects.id", onupdate="CASCADE"), + primary_key=True, + ) + title = sa.Column(sa.Text, nullable=False) + content = sa.Column(sa.Text(), nullable=False) user_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'users.id', onupdate='CASCADE', - ), + sa.ForeignKey("users.id", onupdate="CASCADE"), nullable=False, - index=True) + index=True, + ) assignee_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'users.id', onupdate='CASCADE', - ), + sa.ForeignKey("users.id", onupdate="CASCADE"), nullable=True, - index=True) + index=True, + ) status = sa.Column( sa.String(255), - sa.ForeignKey( - 'status_issue.status', onupdate='CASCADE', - ), - default='Open', - nullable=False) + sa.ForeignKey("status_issue.status", onupdate="CASCADE"), + default="Open", + nullable=False, + ) private = sa.Column(sa.Boolean, nullable=False, default=False) priority = sa.Column(sa.Integer, nullable=True, default=None) milestone = sa.Column(sa.String(255), nullable=True, default=None) close_status = sa.Column(sa.Text, nullable=True) - date_created = sa.Column(sa.DateTime, nullable=False, - default=datetime.datetime.utcnow) - last_updated = sa.Column(sa.DateTime, nullable=False, - default=datetime.datetime.utcnow) + date_created = sa.Column( + sa.DateTime, nullable=False, default=datetime.datetime.utcnow + ) + last_updated = sa.Column( + sa.DateTime, nullable=False, default=datetime.datetime.utcnow + ) closed_at = sa.Column(sa.DateTime, nullable=True) project = relation( - 'Project', foreign_keys=[project_id], remote_side=[Project.id], - backref=backref( - 'issues', cascade="delete, delete-orphan", - ), - single_parent=True + "Project", + foreign_keys=[project_id], + remote_side=[Project.id], + backref=backref("issues", cascade="delete, delete-orphan"), + single_parent=True, ) - user = relation('User', foreign_keys=[user_id], - remote_side=[User.id], backref='issues') - assignee = relation('User', foreign_keys=[assignee_id], - remote_side=[User.id], backref='assigned_issues') + user = relation( + "User", foreign_keys=[user_id], remote_side=[User.id], backref="issues" + ) + assignee = relation( + "User", + foreign_keys=[assignee_id], + remote_side=[User.id], + backref="assigned_issues", + ) parents = relation( "Issue", @@ -1153,25 +1146,28 @@ class Issue(BASE): secondary="tags_issues_colored", primaryjoin="issues.c.uid==tags_issues_colored.c.issue_uid", secondaryjoin="tags_issues_colored.c.tag_id==tags_colored.c.id", - viewonly=True + viewonly=True, ) def __repr__(self): - return 'Issue(%s, project:%s, user:%s, title:%s)' % ( - self.id, self.project.name, self.user.user, self.title + return "Issue(%s, project:%s, user:%s, title:%s)" % ( + self.id, + self.project.name, + self.user.user, + self.title, ) @property def attachments(self): - ''' Return a list of attachment tuples: (LINK, FILENAME, DISPLAY_NAME, - DATE) ''' + """ Return a list of attachment tuples: (LINK, FILENAME, DISPLAY_NAME, + DATE) """ def extract_info(text): - ''' Return a tuple containing the link, file name, and the - "display" file name from the markdown attachment link ''' - pattern_md = re.compile('^\[\!(.*)\]') - pattern_link = re.compile('\(([^)]+)\)') - pattern_file = re.compile('\[([^]]+)\]') + """ Return a tuple containing the link, file name, and the + "display" file name from the markdown attachment link """ + pattern_md = re.compile("^\[\!(.*)\]") + pattern_link = re.compile("\(([^)]+)\)") + pattern_file = re.compile("\[([^]]+)\]") try: md_link = pattern_md.search(text).group(1) @@ -1193,14 +1189,18 @@ class Issue(BASE): attachments = [] if self.content: # Check the initial issue description for attachments - lines = self.content.split('\n') + lines = self.content.split("\n") for line in lines: if line and line != "" and line.startswith("[!["): link, filename, display_name = extract_info(line) attachments.append( - (link, filename, display_name, - self.date_created.strftime('%Y-%m-%d %H:%M:%S'), - None) + ( + link, + filename, + display_name, + self.date_created.strftime("%Y-%m-%d %H:%M:%S"), + None, + ) ) if self.comments: # Check the comments for attachments @@ -1209,93 +1209,98 @@ class Issue(BASE): comment_text = comment.content else: comment_text = comment.comment - lines = comment_text.split('\n') + lines = comment_text.split("\n") for line in lines: if line and line != "" and line.startswith("[!["): link, filename, display_name = extract_info(line) attachments.append( - (link, filename, display_name, - comment.date_created.strftime( - '%Y-%m-%d %H:%M:%S'), - "%s" % comment.id) + ( + link, + filename, + display_name, + comment.date_created.strftime( + "%Y-%m-%d %H:%M:%S" + ), + "%s" % comment.id, + ) ) return attachments @property def isa(self): - ''' A string to allow finding out that this is an issue. ''' - return 'issue' + """ A string to allow finding out that this is an issue. """ + return "issue" @property def mail_id(self): - ''' Return a unique reprensetation of the issue as string that + """ Return a unique reprensetation of the issue as string that can be used when sending emails. - ''' - return '%s-ticket-%s' % (self.project.name, self.uid) + """ + return "%s-ticket-%s" % (self.project.name, self.uid) @property def tags_text(self): - ''' Return the list of tags in a simple text form. ''' + """ Return the list of tags in a simple text form. """ return [tag.tag for tag in self.tags] @property def depending_text(self): - ''' Return the list of issue this issue depends on in simple text. ''' + """ Return the list of issue this issue depends on in simple text. """ return [issue.id for issue in self.parents] @property def blocking_text(self): - ''' Return the list of issue this issue blocks on in simple text. ''' + """ Return the list of issue this issue blocks on in simple text. """ return [issue.id for issue in self.children] @property def user_comments(self): - ''' Return user comments only, filter it from notifications - ''' + """ Return user comments only, filter it from notifications + """ return [ - comment - for comment in self.comments - if not comment.notification] + comment for comment in self.comments if not comment.notification + ] @property def sortable_priority(self): - ''' Return an empty string if no priority is set allowing issues to - be sorted using this attribute. ''' - return self.priority if self.priority else '' + """ Return an empty string if no priority is set allowing issues to + be sorted using this attribute. """ + return self.priority if self.priority else "" def to_json(self, public=False, with_comments=True, with_project=False): - ''' Returns a dictionary representation of the issue. + """ Returns a dictionary representation of the issue. - ''' + """ custom_fields = [ dict( name=field.key.name, key_type=field.key.key_type, value=field.value, - key_data=field.key.key_data + key_data=field.key.key_data, ) for field in self.other_fields ] output = { - 'id': self.id, - 'title': self.title, - 'content': self.content, - 'status': self.status, - 'close_status': self.close_status, - 'date_created': arrow_ts(self.date_created), - 'last_updated': arrow_ts(self.last_updated), - 'closed_at': arrow_ts(self.closed_at) if self.closed_at else None, - 'user': self.user.to_json(public=public), - 'private': self.private, - 'tags': self.tags_text, - 'depends': ["%s" % item for item in self.depending_text], - 'blocks': ["%s" % item for item in self.blocking_text], - 'assignee': self.assignee.to_json( - public=public) if self.assignee else None, - 'priority': self.priority, - 'milestone': self.milestone, - 'custom_fields': custom_fields, + "id": self.id, + "title": self.title, + "content": self.content, + "status": self.status, + "close_status": self.close_status, + "date_created": arrow_ts(self.date_created), + "last_updated": arrow_ts(self.last_updated), + "closed_at": arrow_ts(self.closed_at) if self.closed_at else None, + "user": self.user.to_json(public=public), + "private": self.private, + "tags": self.tags_text, + "depends": ["%s" % item for item in self.depending_text], + "blocks": ["%s" % item for item in self.blocking_text], + "assignee": self.assignee.to_json(public=public) + if self.assignee + else None, + "priority": self.priority, + "milestone": self.milestone, + "custom_fields": custom_fields, } comments = [] @@ -1303,10 +1308,10 @@ class Issue(BASE): for comment in self.comments: comments.append(comment.to_json(public=public)) - output['comments'] = comments + output["comments"] = comments if with_project: - output['project'] = self.project.to_json(public=public, api=True) + output["project"] = self.project.to_json(public=public, api=True) return output @@ -1317,20 +1322,18 @@ class IssueToIssue(BASE): Table -- issue_to_issue """ - __tablename__ = 'issue_to_issue' + __tablename__ = "issue_to_issue" parent_issue_id = sa.Column( sa.String(32), - sa.ForeignKey( - 'issues.uid', ondelete='CASCADE', onupdate='CASCADE', - ), - primary_key=True) + sa.ForeignKey("issues.uid", ondelete="CASCADE", onupdate="CASCADE"), + primary_key=True, + ) child_issue_id = sa.Column( sa.String(32), - sa.ForeignKey( - 'issues.uid', ondelete='CASCADE', onupdate='CASCADE', - ), - primary_key=True) + sa.ForeignKey("issues.uid", ondelete="CASCADE", onupdate="CASCADE"), + primary_key=True, + ) class PrToIssue(BASE): @@ -1339,20 +1342,20 @@ class PrToIssue(BASE): Table -- pr_to_issue """ - __tablename__ = 'pr_to_issue' + __tablename__ = "pr_to_issue" pull_request_uid = sa.Column( sa.String(32), sa.ForeignKey( - 'pull_requests.uid', ondelete='CASCADE', onupdate='CASCADE', + "pull_requests.uid", ondelete="CASCADE", onupdate="CASCADE" ), - primary_key=True) + primary_key=True, + ) issue_uid = sa.Column( sa.String(32), - sa.ForeignKey( - 'issues.uid', ondelete='CASCADE', onupdate='CASCADE', - ), - primary_key=True) + sa.ForeignKey("issues.uid", ondelete="CASCADE", onupdate="CASCADE"), + primary_key=True, + ) class IssueComment(BASE): @@ -1361,105 +1364,105 @@ class IssueComment(BASE): Table -- issue_comments """ - __tablename__ = 'issue_comments' + __tablename__ = "issue_comments" id = sa.Column(sa.Integer, primary_key=True) issue_uid = sa.Column( sa.String(32), - sa.ForeignKey( - 'issues.uid', ondelete='CASCADE', onupdate='CASCADE', - ), - index=True) - comment = sa.Column( - sa.Text(), - nullable=False) + sa.ForeignKey("issues.uid", ondelete="CASCADE", onupdate="CASCADE"), + index=True, + ) + comment = sa.Column(sa.Text(), nullable=False) parent_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'issue_comments.id', onupdate='CASCADE', - ), - nullable=True) + sa.ForeignKey("issue_comments.id", onupdate="CASCADE"), + nullable=True, + ) user_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'users.id', onupdate='CASCADE', - ), + sa.ForeignKey("users.id", onupdate="CASCADE"), nullable=False, - index=True) + index=True, + ) notification = sa.Column(sa.Boolean, default=False, nullable=False) edited_on = sa.Column(sa.DateTime, nullable=True) editor_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'users.id', onupdate='CASCADE', - ), - nullable=True) + sa.ForeignKey("users.id", onupdate="CASCADE"), + nullable=True, + ) - date_created = sa.Column(sa.DateTime, nullable=False, - default=datetime.datetime.utcnow) + date_created = sa.Column( + sa.DateTime, nullable=False, default=datetime.datetime.utcnow + ) issue = relation( - 'Issue', foreign_keys=[issue_uid], remote_side=[Issue.uid], + "Issue", + foreign_keys=[issue_uid], + remote_side=[Issue.uid], backref=backref( - 'comments', cascade="delete, delete-orphan", - order_by=str("IssueComment.date_created") + "comments", + cascade="delete, delete-orphan", + order_by=str("IssueComment.date_created"), ), ) user = relation( - 'User', + "User", foreign_keys=[user_id], remote_side=[User.id], - backref='comment_issues') - editor = relation( - 'User', - foreign_keys=[editor_id], - remote_side=[User.id]) + backref="comment_issues", + ) + editor = relation("User", foreign_keys=[editor_id], remote_side=[User.id]) _reactions = sa.Column(sa.Text, nullable=True) @property def mail_id(self): - ''' Return a unique reprensetation of the issue as string that + """ Return a unique reprensetation of the issue as string that can be used when sending emails. - ''' - return '%s-ticket-%s-%s' % ( - self.issue.project.name, self.issue.uid, self.id) + """ + return "%s-ticket-%s-%s" % ( + self.issue.project.name, + self.issue.uid, + self.id, + ) @property def parent(self): - ''' Return the parent, in this case the issue object. ''' + """ Return the parent, in this case the issue object. """ return self.issue @property def reactions(self): - ''' Return the reactions stored as a string in the database parsed as + """ Return the reactions stored as a string in the database parsed as an actual dict object. - ''' + """ if self._reactions: return json.loads(self._reactions) return {} @reactions.setter def reactions(self, reactions): - ''' Ensures that reactions are properly saved. ''' + """ Ensures that reactions are properly saved. """ self._reactions = json.dumps(reactions) def to_json(self, public=False): - ''' Returns a dictionary representation of the issue. + """ Returns a dictionary representation of the issue. - ''' + """ output = { - 'id': self.id, - 'comment': self.comment, - 'parent': self.parent_id, - 'date_created': arrow_ts(self.date_created), - 'user': self.user.to_json(public=public), - 'edited_on': arrow_ts(self.edited_on) if self.edited_on else None, - 'editor': self.editor.to_json(public=public) - if self.editor_id else None, - 'notification': self.notification, - 'reactions': self.reactions, + "id": self.id, + "comment": self.comment, + "parent": self.parent_id, + "date_created": arrow_ts(self.date_created), + "user": self.user.to_json(public=public), + "edited_on": arrow_ts(self.edited_on) if self.edited_on else None, + "editor": self.editor.to_json(public=public) + if self.editor_id + else None, + "notification": self.notification, + "reactions": self.reactions, } return output @@ -1470,37 +1473,37 @@ class IssueKeys(BASE): Table -- issue_keys """ - __tablename__ = 'issue_keys' + __tablename__ = "issue_keys" id = sa.Column(sa.Integer, primary_key=True) project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', onupdate='CASCADE', ondelete='CASCADE', - ), - nullable=False) + sa.ForeignKey("projects.id", onupdate="CASCADE", ondelete="CASCADE"), + nullable=False, + ) name = sa.Column(sa.String(255), nullable=False) key_type = sa.Column(sa.String(255), nullable=False) key_data = sa.Column(sa.Text()) key_notify = sa.Column(sa.Boolean, default=False, nullable=False) - __table_args__ = (sa.UniqueConstraint('project_id', 'name'),) + __table_args__ = (sa.UniqueConstraint("project_id", "name"),) project = relation( - 'Project', foreign_keys=[project_id], remote_side=[Project.id], + "Project", + foreign_keys=[project_id], + remote_side=[Project.id], backref=backref( - 'issue_keys', cascade="delete, delete-orphan", - single_parent=True - ) + "issue_keys", cascade="delete, delete-orphan", single_parent=True + ), ) def __lt__(self, other): - if hasattr(other, 'name'): + if hasattr(other, "name"): return self.name.__lt__(other.name) @property def data(self): - ''' Return the list of items ''' + """ Return the list of items """ if self.key_data: return json.loads(self.key_data) else: @@ -1508,7 +1511,7 @@ class IssueKeys(BASE): @data.setter def data(self, data_obj): - ''' Store the list data in JSON. ''' + """ Store the list data in JSON. """ if data_obj is None: self.key_data = None else: @@ -1521,34 +1524,34 @@ class IssueValues(BASE): Table -- issue_values """ - __tablename__ = 'issue_values' + __tablename__ = "issue_values" key_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'issue_keys.id', ondelete='CASCADE', onupdate='CASCADE', - ), - primary_key=True) + sa.ForeignKey("issue_keys.id", ondelete="CASCADE", onupdate="CASCADE"), + primary_key=True, + ) issue_uid = sa.Column( sa.String(32), - sa.ForeignKey( - 'issues.uid', ondelete='CASCADE', onupdate='CASCADE', - ), - primary_key=True) + sa.ForeignKey("issues.uid", ondelete="CASCADE", onupdate="CASCADE"), + primary_key=True, + ) value = sa.Column(sa.Text(), nullable=False) issue = relation( - 'Issue', foreign_keys=[issue_uid], remote_side=[Issue.uid], + "Issue", + foreign_keys=[issue_uid], + remote_side=[Issue.uid], backref=backref( - 'other_fields', - cascade="delete, delete-orphan", - single_parent=True - ) + "other_fields", cascade="delete, delete-orphan", single_parent=True + ), ) key = relation( - 'IssueKeys', foreign_keys=[key_id], remote_side=[IssueKeys.id], - backref=backref('values', cascade="delete, delete-orphan") + "IssueKeys", + foreign_keys=[key_id], + remote_side=[IssueKeys.id], + backref=backref("values", cascade="delete, delete-orphan"), ) @@ -1558,11 +1561,12 @@ class Tag(BASE): Table -- tags """ - __tablename__ = 'tags' + __tablename__ = "tags" tag = sa.Column(sa.String(255), primary_key=True) - date_created = sa.Column(sa.DateTime, nullable=False, - default=datetime.datetime.utcnow) + date_created = sa.Column( + sa.DateTime, nullable=False, default=datetime.datetime.utcnow + ) class TagIssue(BASE): @@ -1571,32 +1575,33 @@ class TagIssue(BASE): Table -- tags_issues """ - __tablename__ = 'tags_issues' + __tablename__ = "tags_issues" tag = sa.Column( sa.String(255), - sa.ForeignKey( - 'tags.tag', ondelete='CASCADE', onupdate='CASCADE', - ), - primary_key=True) + sa.ForeignKey("tags.tag", ondelete="CASCADE", onupdate="CASCADE"), + primary_key=True, + ) issue_uid = sa.Column( sa.String(32), - sa.ForeignKey( - 'issues.uid', ondelete='CASCADE', onupdate='CASCADE', - ), - primary_key=True) - date_created = sa.Column(sa.DateTime, nullable=False, - default=datetime.datetime.utcnow) + sa.ForeignKey("issues.uid", ondelete="CASCADE", onupdate="CASCADE"), + primary_key=True, + ) + date_created = sa.Column( + sa.DateTime, nullable=False, default=datetime.datetime.utcnow + ) issue = relation( - 'Issue', foreign_keys=[issue_uid], remote_side=[Issue.uid], + "Issue", + foreign_keys=[issue_uid], + remote_side=[Issue.uid], backref=backref( - 'old_tags', cascade="delete, delete-orphan", single_parent=True - ) + "old_tags", cascade="delete, delete-orphan", single_parent=True + ), ) def __repr__(self): - return 'TagIssue(issue:%s, tag:%s)' % (self.issue.id, self.tag) + return "TagIssue(issue:%s, tag:%s)" % (self.issue.id, self.tag) class TagColored(BASE): @@ -1605,35 +1610,39 @@ class TagColored(BASE): Table -- tags_colored """ - __tablename__ = 'tags_colored' + __tablename__ = "tags_colored" id = sa.Column(sa.Integer, primary_key=True) tag = sa.Column(sa.String(255), nullable=False) tag_description = sa.Column(sa.String(255), default="") project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', ondelete='CASCADE', onupdate='CASCADE', - ), + sa.ForeignKey("projects.id", ondelete="CASCADE", onupdate="CASCADE"), nullable=False, ) tag_color = sa.Column(sa.String(25), default="DeepSkyBlue") - date_created = sa.Column(sa.DateTime, nullable=False, - default=datetime.datetime.utcnow) + date_created = sa.Column( + sa.DateTime, nullable=False, default=datetime.datetime.utcnow + ) - __table_args__ = (sa.UniqueConstraint('project_id', 'tag'),) + __table_args__ = (sa.UniqueConstraint("project_id", "tag"),) project = relation( - 'Project', foreign_keys=[project_id], remote_side=[Project.id], + "Project", + foreign_keys=[project_id], + remote_side=[Project.id], backref=backref( - 'tags_colored', cascade="delete,delete-orphan", - single_parent=True - ) + "tags_colored", cascade="delete,delete-orphan", single_parent=True + ), ) def __repr__(self): - return 'TagColored(id: %s, tag:%s, tag_description:%s, color:%s)' % ( - self.id, self.tag, self.tag_description, self.tag_color) + return "TagColored(id: %s, tag:%s, tag_description:%s, color:%s)" % ( + self.id, + self.tag, + self.tag_description, + self.tag_color, + ) class TagIssueColored(BASE): @@ -1642,36 +1651,42 @@ class TagIssueColored(BASE): Table -- tags_issues_colored """ - __tablename__ = 'tags_issues_colored' + __tablename__ = "tags_issues_colored" tag_id = sa.Column( sa.Integer, sa.ForeignKey( - 'tags_colored.id', ondelete='CASCADE', onupdate='CASCADE', + "tags_colored.id", ondelete="CASCADE", onupdate="CASCADE" ), - primary_key=True) + primary_key=True, + ) issue_uid = sa.Column( sa.String(32), - sa.ForeignKey( - 'issues.uid', ondelete='CASCADE', onupdate='CASCADE', - ), - primary_key=True) - date_created = sa.Column(sa.DateTime, nullable=False, - default=datetime.datetime.utcnow) + sa.ForeignKey("issues.uid", ondelete="CASCADE", onupdate="CASCADE"), + primary_key=True, + ) + date_created = sa.Column( + sa.DateTime, nullable=False, default=datetime.datetime.utcnow + ) issue = relation( - 'Issue', foreign_keys=[issue_uid], remote_side=[Issue.uid], + "Issue", + foreign_keys=[issue_uid], + remote_side=[Issue.uid], backref=backref( - 'tags_issues_colored', cascade="delete, delete-orphan" - ) + "tags_issues_colored", cascade="delete, delete-orphan" + ), ) tag = relation( - 'TagColored', foreign_keys=[tag_id], remote_side=[TagColored.id], + "TagColored", foreign_keys=[tag_id], remote_side=[TagColored.id] ) def __repr__(self): - return 'TagIssueColored(issue:%s, tag:%s, project:%s)' % ( - self.issue.id, self.tag.tag, self.tag.project.fullname) + return "TagIssueColored(issue:%s, tag:%s, project:%s)" % ( + self.issue.id, + self.tag.tag, + self.tag.project.fullname, + ) class TagProject(BASE): @@ -1680,33 +1695,36 @@ class TagProject(BASE): Table -- tags_projects """ - __tablename__ = 'tags_projects' + __tablename__ = "tags_projects" tag = sa.Column( sa.String(255), - sa.ForeignKey( - 'tags.tag', ondelete='CASCADE', onupdate='CASCADE', - ), - primary_key=True) + sa.ForeignKey("tags.tag", ondelete="CASCADE", onupdate="CASCADE"), + primary_key=True, + ) project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', ondelete='CASCADE', onupdate='CASCADE', - ), - primary_key=True) - date_created = sa.Column(sa.DateTime, nullable=False, - default=datetime.datetime.utcnow) + sa.ForeignKey("projects.id", ondelete="CASCADE", onupdate="CASCADE"), + primary_key=True, + ) + date_created = sa.Column( + sa.DateTime, nullable=False, default=datetime.datetime.utcnow + ) project = relation( - 'Project', foreign_keys=[project_id], remote_side=[Project.id], + "Project", + foreign_keys=[project_id], + remote_side=[Project.id], backref=backref( - 'tags', cascade="delete, delete-orphan", single_parent=True - ) + "tags", cascade="delete, delete-orphan", single_parent=True + ), ) def __repr__(self): - return 'TagProject(project:%s, tag:%s)' % ( - self.project.fullname, self.tag) + return "TagProject(project:%s, tag:%s)" % ( + self.project.fullname, + self.tag, + ) class PullRequest(BASE): @@ -1715,117 +1733,116 @@ class PullRequest(BASE): Table -- pull_requests """ - __tablename__ = 'pull_requests' + __tablename__ = "pull_requests" id = sa.Column(sa.Integer, primary_key=True) uid = sa.Column(sa.String(32), unique=True, nullable=False) - title = sa.Column( - sa.Text, - nullable=False) + title = sa.Column(sa.Text, nullable=False) project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', ondelete='CASCADE', onupdate='CASCADE', - ), - primary_key=True) - branch = sa.Column( - sa.Text(), - nullable=False) + sa.ForeignKey("projects.id", ondelete="CASCADE", onupdate="CASCADE"), + primary_key=True, + ) + branch = sa.Column(sa.Text(), nullable=False) project_id_from = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', ondelete='SET NULL', onupdate='CASCADE', - ), - nullable=True) - remote_git = sa.Column( - sa.Text(), - nullable=True) - branch_from = sa.Column( - sa.Text(), - nullable=False) - commit_start = sa.Column( - sa.Text(), - nullable=True) - commit_stop = sa.Column( - sa.Text(), - nullable=True) - initial_comment = sa.Column( - sa.Text(), - nullable=True) + sa.ForeignKey("projects.id", ondelete="SET NULL", onupdate="CASCADE"), + nullable=True, + ) + remote_git = sa.Column(sa.Text(), nullable=True) + branch_from = sa.Column(sa.Text(), nullable=False) + commit_start = sa.Column(sa.Text(), nullable=True) + commit_stop = sa.Column(sa.Text(), nullable=True) + initial_comment = sa.Column(sa.Text(), nullable=True) user_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'users.id', onupdate='CASCADE', - ), + sa.ForeignKey("users.id", onupdate="CASCADE"), nullable=False, - index=True) + index=True, + ) assignee_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'users.id', onupdate='CASCADE', - ), + sa.ForeignKey("users.id", onupdate="CASCADE"), nullable=True, - index=True) + index=True, + ) merge_status = sa.Column( sa.Enum( - 'NO_CHANGE', 'FFORWARD', 'CONFLICTS', 'MERGE', - name='merge_status_enum', + "NO_CHANGE", + "FFORWARD", + "CONFLICTS", + "MERGE", + name="merge_status_enum", ), - nullable=True) + nullable=True, + ) # While present this column isn't used anywhere yet private = sa.Column(sa.Boolean, nullable=False, default=False) status = sa.Column( sa.String(255), - sa.ForeignKey( - 'status_pull_requests.status', onupdate='CASCADE', - ), - default='Open', - nullable=False) + sa.ForeignKey("status_pull_requests.status", onupdate="CASCADE"), + default="Open", + nullable=False, + ) closed_by_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'users.id', onupdate='CASCADE', - ), - nullable=True) - closed_at = sa.Column( - sa.DateTime, - nullable=True) + sa.ForeignKey("users.id", onupdate="CASCADE"), + nullable=True, + ) + closed_at = sa.Column(sa.DateTime, nullable=True) - date_created = sa.Column(sa.DateTime, nullable=False, - default=datetime.datetime.utcnow) + date_created = sa.Column( + sa.DateTime, nullable=False, default=datetime.datetime.utcnow + ) updated_on = sa.Column( + sa.DateTime, nullable=False, default=datetime.datetime.utcnow + ) + + last_updated = sa.Column( sa.DateTime, nullable=False, - default=datetime.datetime.utcnow) - - last_updated = sa.Column(sa.DateTime, nullable=False, - default=datetime.datetime.utcnow, - onupdate=datetime.datetime.utcnow) + default=datetime.datetime.utcnow, + onupdate=datetime.datetime.utcnow, + ) project = relation( - 'Project', foreign_keys=[project_id], remote_side=[Project.id], - backref=backref( - 'requests', cascade="delete, delete-orphan", - ), - single_parent=True) + "Project", + foreign_keys=[project_id], + remote_side=[Project.id], + backref=backref("requests", cascade="delete, delete-orphan"), + single_parent=True, + ) project_from = relation( - 'Project', foreign_keys=[project_id_from], remote_side=[Project.id]) + "Project", foreign_keys=[project_id_from], remote_side=[Project.id] + ) - user = relation('User', foreign_keys=[user_id], - remote_side=[User.id], backref='pull_requests') - assignee = relation('User', foreign_keys=[assignee_id], - remote_side=[User.id], backref='assigned_requests') - closed_by = relation('User', foreign_keys=[closed_by_id], - remote_side=[User.id], backref='closed_requests') + user = relation( + "User", + foreign_keys=[user_id], + remote_side=[User.id], + backref="pull_requests", + ) + assignee = relation( + "User", + foreign_keys=[assignee_id], + remote_side=[User.id], + backref="assigned_requests", + ) + closed_by = relation( + "User", + foreign_keys=[closed_by_id], + remote_side=[User.id], + backref="closed_requests", + ) tags = relation( "TagColored", secondary="tags_pull_requests", primaryjoin="pull_requests.c.uid==tags_pull_requests.c.request_uid", secondaryjoin="tags_pull_requests.c.tag_id==tags_colored.c.id", - viewonly=True + viewonly=True, ) related_issues = relation( @@ -1834,59 +1851,59 @@ class PullRequest(BASE): primaryjoin="pull_requests.c.uid==pr_to_issue.c.pull_request_uid", secondaryjoin="pr_to_issue.c.issue_uid==issues.c.uid", backref=backref( - "related_prs", order_by=str("pull_requests.c.id.desc()")) + "related_prs", order_by=str("pull_requests.c.id.desc()") + ), ) def __repr__(self): - return 'PullRequest(%s, project:%s, user:%s, title:%s)' % ( - self.id, self.project.name, self.user.user, self.title + return "PullRequest(%s, project:%s, user:%s, title:%s)" % ( + self.id, + self.project.name, + self.user.user, + self.title, ) @property def isa(self): - ''' A string to allow finding out that this is an pull-request. ''' - return 'pull-request' + """ A string to allow finding out that this is an pull-request. """ + return "pull-request" @property def mail_id(self): - ''' Return a unique reprensetation of the issue as string that + """ Return a unique reprensetation of the issue as string that can be used when sending emails. - ''' - return '%s-pull-request-%s' % (self.project.name, self.uid) + """ + return "%s-pull-request-%s" % (self.project.name, self.uid) @property def tags_text(self): - ''' Return the list of tags in a simple text form. ''' + """ Return the list of tags in a simple text form. """ return [tag.tag for tag in self.tags] @property def discussion(self): - ''' Return the list of comments related to the pull-request itself, + """ Return the list of comments related to the pull-request itself, ie: not related to a specific commit. - ''' - return [ - comment - for comment in self.comments - if not comment.commit_id - ] + """ + return [comment for comment in self.comments if not comment.commit_id] @property def score(self): - ''' Return the review score of the pull-request by checking the + """ Return the review score of the pull-request by checking the number of +1, -1, :thumbup: and :thumbdown: in the comment of the pull-request. This includes only the main comments not the inline ones. An user can only give one +1 and one -1. - ''' + """ positive = set() negative = set() for comment in self.discussion: - for word in ['+1', ':thumbsup:']: + for word in ["+1", ":thumbsup:"]: if word in comment.comment: positive.add(comment.user_id) break - for word in ['-1', ':thumbsdown:']: + for word in ["-1", ":thumbsdown:"]: if word in comment.comment: negative.add(comment.user_id) break @@ -1895,48 +1912,50 @@ class PullRequest(BASE): @property def remote(self): - ''' Return whether the current PullRequest is a remote pull-request + """ Return whether the current PullRequest is a remote pull-request or not. - ''' + """ return self.remote_git is not None @property def user_comments(self): - ''' Return user comments only, filter it from notifications - ''' + """ Return user comments only, filter it from notifications + """ return [ - comment - for comment in self.comments - if not comment.notification] + comment for comment in self.comments if not comment.notification + ] def to_json(self, public=False, api=False, with_comments=True): - ''' Returns a dictionary representation of the pull-request. + """ Returns a dictionary representation of the pull-request. - ''' + """ output = { - 'id': self.id, - 'uid': self.uid, - 'title': self.title, - 'branch': self.branch, - 'project': self.project.to_json(public=public, api=api), - 'branch_from': self.branch_from, - 'repo_from': self.project_from.to_json( - public=public, api=api) if self.project_from else None, - 'remote_git': self.remote_git, - 'date_created': arrow_ts(self.date_created), - 'updated_on': arrow_ts(self.updated_on), - 'last_updated': arrow_ts(self.last_updated), - 'closed_at': arrow_ts(self.closed_at) if self.closed_at else None, - 'user': self.user.to_json(public=public), - 'assignee': self.assignee.to_json( - public=public) if self.assignee else None, - 'status': self.status, - 'commit_start': self.commit_start, - 'commit_stop': self.commit_stop, - 'closed_by': self.closed_by.to_json( - public=public) if self.closed_by else None, - 'initial_comment': self.initial_comment, - 'cached_merge_status': self.merge_status or 'unknown' + "id": self.id, + "uid": self.uid, + "title": self.title, + "branch": self.branch, + "project": self.project.to_json(public=public, api=api), + "branch_from": self.branch_from, + "repo_from": self.project_from.to_json(public=public, api=api) + if self.project_from + else None, + "remote_git": self.remote_git, + "date_created": arrow_ts(self.date_created), + "updated_on": arrow_ts(self.updated_on), + "last_updated": arrow_ts(self.last_updated), + "closed_at": arrow_ts(self.closed_at) if self.closed_at else None, + "user": self.user.to_json(public=public), + "assignee": self.assignee.to_json(public=public) + if self.assignee + else None, + "status": self.status, + "commit_start": self.commit_start, + "commit_stop": self.commit_stop, + "closed_by": self.closed_by.to_json(public=public) + if self.closed_by + else None, + "initial_comment": self.initial_comment, + "cached_merge_status": self.merge_status or "unknown", } comments = [] @@ -1944,7 +1963,7 @@ class PullRequest(BASE): for comment in self.comments: comments.append(comment.to_json(public=public)) - output['comments'] = comments + output["comments"] = comments return output @@ -1955,120 +1974,116 @@ class PullRequestComment(BASE): Table -- pull_request_comments """ - __tablename__ = 'pull_request_comments' + __tablename__ = "pull_request_comments" id = sa.Column(sa.Integer, primary_key=True) pull_request_uid = sa.Column( sa.String(32), sa.ForeignKey( - 'pull_requests.uid', ondelete='CASCADE', onupdate='CASCADE', + "pull_requests.uid", ondelete="CASCADE", onupdate="CASCADE" ), - nullable=False) - commit_id = sa.Column( - sa.String(40), - nullable=True, - index=True) + nullable=False, + ) + commit_id = sa.Column(sa.String(40), nullable=True, index=True) user_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'users.id', onupdate='CASCADE', - ), + sa.ForeignKey("users.id", onupdate="CASCADE"), nullable=False, - index=True) - filename = sa.Column( - sa.Text, - nullable=True) - line = sa.Column( - sa.Integer, - nullable=True) - tree_id = sa.Column( - sa.String(40), - nullable=True) - comment = sa.Column( - sa.Text(), - nullable=False) + index=True, + ) + filename = sa.Column(sa.Text, nullable=True) + line = sa.Column(sa.Integer, nullable=True) + tree_id = sa.Column(sa.String(40), nullable=True) + comment = sa.Column(sa.Text(), nullable=False) parent_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'pull_request_comments.id', onupdate='CASCADE', - ), - nullable=True) + sa.ForeignKey("pull_request_comments.id", onupdate="CASCADE"), + nullable=True, + ) notification = sa.Column(sa.Boolean, default=False, nullable=False) edited_on = sa.Column(sa.DateTime, nullable=True) editor_id = sa.Column( sa.Integer, - sa.ForeignKey('users.id', onupdate='CASCADE'), - nullable=True) + sa.ForeignKey("users.id", onupdate="CASCADE"), + nullable=True, + ) - date_created = sa.Column(sa.DateTime, nullable=False, - default=datetime.datetime.utcnow) + date_created = sa.Column( + sa.DateTime, nullable=False, default=datetime.datetime.utcnow + ) - user = relation('User', foreign_keys=[user_id], - remote_side=[User.id], - backref=backref( - 'pull_request_comments', - order_by=str("PullRequestComment.date_created"))) + user = relation( + "User", + foreign_keys=[user_id], + remote_side=[User.id], + backref=backref( + "pull_request_comments", + order_by=str("PullRequestComment.date_created"), + ), + ) pull_request = relation( - 'PullRequest', + "PullRequest", backref=backref( - 'comments', + "comments", cascade="delete, delete-orphan", - order_by=str("PullRequestComment.date_created") + order_by=str("PullRequestComment.date_created"), ), foreign_keys=[pull_request_uid], - remote_side=[PullRequest.uid]) - editor = relation( - 'User', - foreign_keys=[editor_id], - remote_side=[User.id]) + remote_side=[PullRequest.uid], + ) + editor = relation("User", foreign_keys=[editor_id], remote_side=[User.id]) _reactions = sa.Column(sa.Text, nullable=True) @property def mail_id(self): - ''' Return a unique representation of the issue as string that + """ Return a unique representation of the issue as string that can be used when sending emails. - ''' - return '%s-pull-request-%s-%s' % ( - self.pull_request.project.name, self.pull_request.uid, self.id) + """ + return "%s-pull-request-%s-%s" % ( + self.pull_request.project.name, + self.pull_request.uid, + self.id, + ) @property def parent(self): - ''' Return the parent, in this case the pull_request object. ''' + """ Return the parent, in this case the pull_request object. """ return self.pull_request @property def reactions(self): - ''' Return the reactions stored as a string in the database parsed as + """ Return the reactions stored as a string in the database parsed as an actual dict object. - ''' + """ if self._reactions: return json.loads(self._reactions) return {} @reactions.setter def reactions(self, reactions): - ''' Ensures that reactions are properly saved. ''' + """ Ensures that reactions are properly saved. """ self._reactions = json.dumps(reactions) def to_json(self, public=False): - ''' Return a dict representation of the pull-request comment. ''' + """ Return a dict representation of the pull-request comment. """ return { - 'id': self.id, - 'commit': self.commit_id, - 'tree': self.tree_id, - 'filename': self.filename, - 'line': self.line, - 'comment': self.comment, - 'parent': self.parent_id, - 'date_created': arrow_ts(self.date_created), - 'user': self.user.to_json(public=public), - 'edited_on': arrow_ts(self.edited_on) if self.edited_on else None, - 'editor': self.editor.to_json(public=public) - if self.editor_id else None, - 'notification': self.notification, - 'reactions': self.reactions, + "id": self.id, + "commit": self.commit_id, + "tree": self.tree_id, + "filename": self.filename, + "line": self.line, + "comment": self.comment, + "parent": self.parent_id, + "date_created": arrow_ts(self.date_created), + "user": self.user.to_json(public=public), + "edited_on": arrow_ts(self.edited_on) if self.edited_on else None, + "editor": self.editor.to_json(public=public) + if self.editor_id + else None, + "notification": self.notification, + "reactions": self.reactions, } @@ -2078,84 +2093,78 @@ class PullRequestFlag(BASE): Table -- pull_request_flags """ - __tablename__ = 'pull_request_flags' + __tablename__ = "pull_request_flags" id = sa.Column(sa.Integer, primary_key=True) uid = sa.Column(sa.String(32), nullable=False) pull_request_uid = sa.Column( sa.String(32), sa.ForeignKey( - 'pull_requests.uid', ondelete='CASCADE', onupdate='CASCADE', + "pull_requests.uid", ondelete="CASCADE", onupdate="CASCADE" ), - nullable=False) + nullable=False, + ) token_id = sa.Column( - sa.String(64), sa.ForeignKey( - 'tokens.id', - ), - nullable=True) - status = sa.Column( - sa.String(32), - nullable=False) + sa.String(64), sa.ForeignKey("tokens.id"), nullable=True + ) + status = sa.Column(sa.String(32), nullable=False) user_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'users.id', onupdate='CASCADE', - ), + sa.ForeignKey("users.id", onupdate="CASCADE"), nullable=False, - index=True) - username = sa.Column( - sa.Text(), - nullable=False) - percent = sa.Column( - sa.Integer(), - nullable=True) - comment = sa.Column( - sa.Text(), - nullable=False) - url = sa.Column( - sa.Text(), - nullable=False) - - date_created = sa.Column(sa.DateTime, nullable=False, - default=datetime.datetime.utcnow) - - __table_args__ = (sa.UniqueConstraint('uid', 'pull_request_uid'),) - - user = relation('User', foreign_keys=[user_id], - remote_side=[User.id], - backref=backref( - 'pull_request_flags', - order_by=str("PullRequestFlag.date_created"))) + index=True, + ) + username = sa.Column(sa.Text(), nullable=False) + percent = sa.Column(sa.Integer(), nullable=True) + comment = sa.Column(sa.Text(), nullable=False) + url = sa.Column(sa.Text(), nullable=False) - pull_request = relation( - 'PullRequest', + date_created = sa.Column( + sa.DateTime, nullable=False, default=datetime.datetime.utcnow + ) + + __table_args__ = (sa.UniqueConstraint("uid", "pull_request_uid"),) + + user = relation( + "User", + foreign_keys=[user_id], + remote_side=[User.id], backref=backref( - 'flags', cascade="delete, delete-orphan", + "pull_request_flags", order_by=str("PullRequestFlag.date_created") ), + ) + + pull_request = relation( + "PullRequest", + backref=backref("flags", cascade="delete, delete-orphan"), foreign_keys=[pull_request_uid], - remote_side=[PullRequest.uid]) + remote_side=[PullRequest.uid], + ) @property def mail_id(self): - ''' Return a unique representation of the flag as string that + """ Return a unique representation of the flag as string that can be used when sending emails. - ''' - return '%s-pull-request-%s-%s' % ( - self.pull_request.project.name, self.pull_request.uid, self.id) + """ + return "%s-pull-request-%s-%s" % ( + self.pull_request.project.name, + self.pull_request.uid, + self.id, + ) def to_json(self, public=False): - ''' Returns a dictionary representation of the pull-request. + """ Returns a dictionary representation of the pull-request. - ''' + """ output = { - 'pull_request_uid': self.pull_request_uid, - 'username': self.username, - 'percent': self.percent, - 'comment': self.comment, - 'status': self.status, - 'url': self.url, - 'date_created': arrow_ts(self.date_created), - 'user': self.user.to_json(public=public), + "pull_request_uid": self.pull_request_uid, + "username": self.username, + "percent": self.percent, + "comment": self.comment, + "status": self.status, + "url": self.url, + "date_created": arrow_ts(self.date_created), + "user": self.user.to_json(public=public), } return output @@ -2167,93 +2176,84 @@ class CommitFlag(BASE): Table -- commit_flags """ - __tablename__ = 'commit_flags' + __tablename__ = "commit_flags" id = sa.Column(sa.Integer, primary_key=True) commit_hash = sa.Column(sa.String(40), index=True, nullable=False) project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', onupdate='CASCADE', ondelete='CASCADE', - ), - nullable=False, index=True) + sa.ForeignKey("projects.id", onupdate="CASCADE", ondelete="CASCADE"), + nullable=False, + index=True, + ) token_id = sa.Column( - sa.String(64), sa.ForeignKey( - 'tokens.id', - ), - nullable=False) + sa.String(64), sa.ForeignKey("tokens.id"), nullable=False + ) user_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'users.id', onupdate='CASCADE', - ), + sa.ForeignKey("users.id", onupdate="CASCADE"), nullable=False, - index=True) + index=True, + ) uid = sa.Column(sa.String(32), nullable=False) - status = sa.Column( - sa.String(32), - nullable=False) - username = sa.Column( - sa.Text(), - nullable=False) - percent = sa.Column( - sa.Integer(), - nullable=True) - comment = sa.Column( - sa.Text(), - nullable=False) - url = sa.Column( - sa.Text(), - nullable=False) - - date_created = sa.Column(sa.DateTime, nullable=False, - default=datetime.datetime.utcnow) - - __table_args__ = (sa.UniqueConstraint('commit_hash', 'uid'),) + status = sa.Column(sa.String(32), nullable=False) + username = sa.Column(sa.Text(), nullable=False) + percent = sa.Column(sa.Integer(), nullable=True) + comment = sa.Column(sa.Text(), nullable=False) + url = sa.Column(sa.Text(), nullable=False) + + date_created = sa.Column( + sa.DateTime, nullable=False, default=datetime.datetime.utcnow + ) + + __table_args__ = (sa.UniqueConstraint("commit_hash", "uid"),) project = relation( - 'Project', foreign_keys=[project_id], remote_side=[Project.id], - backref=backref( - 'commit_flags', cascade="delete, delete-orphan", - ), - single_parent=True) + "Project", + foreign_keys=[project_id], + remote_side=[Project.id], + backref=backref("commit_flags", cascade="delete, delete-orphan"), + single_parent=True, + ) user = relation( - 'User', + "User", foreign_keys=[user_id], remote_side=[User.id], backref=backref( - 'commit_flags', - order_by=str("CommitFlag.date_created") - ) + "commit_flags", order_by=str("CommitFlag.date_created") + ), ) @property def isa(self): - ''' A string to allow finding out that this is a commit flag. ''' - return 'commit-flag' + """ A string to allow finding out that this is a commit flag. """ + return "commit-flag" @property def mail_id(self): - ''' Return a unique representation of the flag as string that + """ Return a unique representation of the flag as string that can be used when sending emails. - ''' - return '%s-commit-%s-%s' % ( - self.project.name, self.project.id, self.id) + """ + return "%s-commit-%s-%s" % ( + self.project.name, + self.project.id, + self.id, + ) def to_json(self, public=False): - ''' Returns a dictionary representation of the commit flag. + """ Returns a dictionary representation of the commit flag. - ''' + """ output = { - 'commit_hash': self.commit_hash, - 'username': self.username, - 'percent': self.percent, - 'comment': self.comment, - 'status': self.status, - 'url': self.url, - 'date_created': arrow_ts(self.date_created), - 'user': self.user.to_json(public=public), + "commit_hash": self.commit_hash, + "username": self.username, + "percent": self.percent, + "comment": self.comment, + "status": self.status, + "url": self.url, + "date_created": arrow_ts(self.date_created), + "user": self.user.to_json(public=public), } return output @@ -2265,38 +2265,41 @@ class TagPullRequest(BASE): Table -- tags_pull_requests """ - __tablename__ = 'tags_pull_requests' + __tablename__ = "tags_pull_requests" tag_id = sa.Column( sa.Integer, sa.ForeignKey( - 'tags_colored.id', ondelete='CASCADE', onupdate='CASCADE', + "tags_colored.id", ondelete="CASCADE", onupdate="CASCADE" ), - primary_key=True) + primary_key=True, + ) request_uid = sa.Column( sa.String(32), sa.ForeignKey( - 'pull_requests.uid', ondelete='CASCADE', onupdate='CASCADE', + "pull_requests.uid", ondelete="CASCADE", onupdate="CASCADE" ), - primary_key=True) - date_created = sa.Column(sa.DateTime, nullable=False, - default=datetime.datetime.utcnow) + primary_key=True, + ) + date_created = sa.Column( + sa.DateTime, nullable=False, default=datetime.datetime.utcnow + ) pull_request = relation( - 'PullRequest', + "PullRequest", foreign_keys=[request_uid], remote_side=[PullRequest.uid], - backref=backref( - 'tags_pr_colored', cascade="delete, delete-orphan" - ) + backref=backref("tags_pr_colored", cascade="delete, delete-orphan"), ) tag = relation( - 'TagColored', foreign_keys=[tag_id], remote_side=[TagColored.id], + "TagColored", foreign_keys=[tag_id], remote_side=[TagColored.id] ) def __repr__(self): - return 'TagPullRequest(PR:%s, tag:%s)' % ( - self.pull_request.id, self.tag) + return "TagPullRequest(PR:%s, tag:%s)" % ( + self.pull_request.id, + self.tag, + ) class PagureGroupType(BASE): @@ -2306,16 +2309,17 @@ class PagureGroupType(BASE): # names like "Group", "Order" and "User" are reserved words in SQL # so we set the name to something safe for SQL - __tablename__ = 'pagure_group_type' + __tablename__ = "pagure_group_type" group_type = sa.Column(sa.String(16), primary_key=True) created = sa.Column( - sa.DateTime, nullable=False, default=datetime.datetime.utcnow) + sa.DateTime, nullable=False, default=datetime.datetime.utcnow + ) def __repr__(self): - ''' Return a string representation of this object. ''' + """ Return a string representation of this object. """ - return 'GroupType: %s' % (self.group_type) + return "GroupType: %s" % (self.group_type) class PagureGroup(BASE): @@ -2325,7 +2329,7 @@ class PagureGroup(BASE): # names like "Group", "Order" and "User" are reserved words in SQL # so we set the name to something safe for SQL - __tablename__ = 'pagure_group' + __tablename__ = "pagure_group" id = sa.Column(sa.Integer, primary_key=True) group_name = sa.Column(sa.String(255), nullable=False, unique=True) @@ -2333,45 +2337,44 @@ class PagureGroup(BASE): description = sa.Column(sa.String(255), nullable=True) group_type = sa.Column( sa.String(16), - sa.ForeignKey( - 'pagure_group_type.group_type', - ), - default='user', - nullable=False) + sa.ForeignKey("pagure_group_type.group_type"), + default="user", + nullable=False, + ) user_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'users.id', onupdate='CASCADE', - ), + sa.ForeignKey("users.id", onupdate="CASCADE"), nullable=False, - index=True) + index=True, + ) created = sa.Column( - sa.DateTime, nullable=False, default=datetime.datetime.utcnow) + sa.DateTime, nullable=False, default=datetime.datetime.utcnow + ) creator = relation( - 'User', + "User", foreign_keys=[user_id], remote_side=[User.id], - backref=backref('groups_created') + backref=backref("groups_created"), ) def __repr__(self): - ''' Return a string representation of this object. ''' + """ Return a string representation of this object. """ - return 'Group: %s - name %s' % (self.id, self.group_name) + return "Group: %s - name %s" % (self.id, self.group_name) def to_json(self, public=False): - ''' Returns a dictionary representation of the pull-request. + """ Returns a dictionary representation of the pull-request. - ''' + """ output = { - 'name': self.group_name, - 'display_name': self.display_name, - 'description': self.description, - 'group_type': self.group_type, - 'creator': self.creator.to_json(public=public), - 'date_created': arrow_ts(self.created), - 'members': [user.username for user in self.users] + "name": self.group_name, + "display_name": self.display_name, + "description": self.description, + "group_type": self.group_type, + "creator": self.creator.to_json(public=public), + "date_created": arrow_ts(self.created), + "members": [user.username for user in self.users], } return output @@ -2383,39 +2386,39 @@ class ProjectGroup(BASE): This allow linking projects to groups. """ - __tablename__ = 'projects_groups' + __tablename__ = "projects_groups" project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', onupdate='CASCADE', ondelete='CASCADE', - ), - primary_key=True) + sa.ForeignKey("projects.id", onupdate="CASCADE", ondelete="CASCADE"), + primary_key=True, + ) group_id = sa.Column( - sa.Integer, - sa.ForeignKey( - 'pagure_group.id', - ), - primary_key=True) + sa.Integer, sa.ForeignKey("pagure_group.id"), primary_key=True + ) access = sa.Column( sa.String(255), sa.ForeignKey( - 'access_levels.access', onupdate='CASCADE', ondelete='CASCADE', + "access_levels.access", onupdate="CASCADE", ondelete="CASCADE" ), - nullable=False) + nullable=False, + ) project = relation( - 'Project', foreign_keys=[project_id], remote_side=[Project.id], + "Project", + foreign_keys=[project_id], + remote_side=[Project.id], backref=backref( - 'projects_groups', cascade="delete,delete-orphan", - single_parent=True - ) + "projects_groups", + cascade="delete,delete-orphan", + single_parent=True, + ), ) - group = relation('PagureGroup', backref='projects_groups') + group = relation("PagureGroup", backref="projects_groups") # Constraints - __table_args__ = (sa.UniqueConstraint('project_id', 'group_id'),) + __table_args__ = (sa.UniqueConstraint("project_id", "group_id"),) class Star(BASE): @@ -2425,37 +2428,38 @@ class Star(BASE): Table -- star """ - __tablename__ = 'stargazers' + __tablename__ = "stargazers" __table_args__ = ( sa.UniqueConstraint( - 'project_id', - 'user_id', - name='uq_stargazers_project_id_user_id_key'), + "project_id", + "user_id", + name="uq_stargazers_project_id_user_id_key", + ), ) id = sa.Column(sa.Integer, primary_key=True) project_id = sa.Column( sa.Integer, - sa.ForeignKey('projects.id', onupdate='CASCADE', ondelete='CASCADE'), + sa.ForeignKey("projects.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=False, index=True, ) user_id = sa.Column( sa.Integer, - sa.ForeignKey('users.id', onupdate='CASCADE', ondelete='CASCADE'), + sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=False, ) user = relation( - 'User', foreign_keys=[user_id], remote_side=[User.id], - backref=backref( - 'stars', cascade="delete, delete-orphan" - ), + "User", + foreign_keys=[user_id], + remote_side=[User.id], + backref=backref("stars", cascade="delete, delete-orphan"), ) project = relation( - 'Project', foreign_keys=[project_id], remote_side=[Project.id], - backref=backref( - 'stargazers', cascade="delete, delete-orphan", - ), + "Project", + foreign_keys=[project_id], + remote_side=[Project.id], + backref=backref("stargazers", cascade="delete, delete-orphan"), ) @@ -2465,36 +2469,36 @@ class Watcher(BASE): Table -- watchers """ - __tablename__ = 'watchers' - __table_args__ = ( - sa.UniqueConstraint('project_id', 'user_id'), - ) + __tablename__ = "watchers" + __table_args__ = (sa.UniqueConstraint("project_id", "user_id"),) id = sa.Column(sa.Integer, primary_key=True) project_id = sa.Column( sa.Integer, - sa.ForeignKey('projects.id', onupdate='CASCADE'), - nullable=False) + sa.ForeignKey("projects.id", onupdate="CASCADE"), + nullable=False, + ) user_id = sa.Column( sa.Integer, - sa.ForeignKey('users.id', onupdate='CASCADE'), + sa.ForeignKey("users.id", onupdate="CASCADE"), nullable=False, - index=True) + index=True, + ) watch_issues = sa.Column(sa.Boolean, nullable=False, default=False) watch_commits = sa.Column(sa.Boolean, nullable=False, default=False) user = relation( - 'User', foreign_keys=[user_id], remote_side=[User.id], - backref=backref( - 'watchers', cascade="delete, delete-orphan" - ), + "User", + foreign_keys=[user_id], + remote_side=[User.id], + backref=backref("watchers", cascade="delete, delete-orphan"), ) project = relation( - 'Project', foreign_keys=[project_id], remote_side=[Project.id], - backref=backref( - 'watchers', cascade="delete, delete-orphan", - ), + "Project", + foreign_keys=[project_id], + remote_side=[Project.id], + backref=backref("watchers", cascade="delete, delete-orphan"), ) @@ -2503,134 +2507,133 @@ class PagureLog(BASE): """ Log user's actions. """ - __tablename__ = 'pagure_logs' + + __tablename__ = "pagure_logs" id = sa.Column(sa.Integer, primary_key=True) user_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'users.id', onupdate='CASCADE', ondelete='CASCADE', - ), - nullable=True, - index=True) - user_email = sa.Column( - sa.String(255), + sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=True, - index=True) + index=True, + ) + user_email = sa.Column(sa.String(255), nullable=True, index=True) project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', onupdate='CASCADE', ondelete='CASCADE', - ), + sa.ForeignKey("projects.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=True, - index=True + index=True, ) issue_uid = sa.Column( sa.String(32), - sa.ForeignKey( - 'issues.uid', ondelete='CASCADE', onupdate='CASCADE', - ), + sa.ForeignKey("issues.uid", ondelete="CASCADE", onupdate="CASCADE"), nullable=True, - index=True + index=True, ) pull_request_uid = sa.Column( sa.String(32), sa.ForeignKey( - 'pull_requests.uid', ondelete='CASCADE', onupdate='CASCADE', + "pull_requests.uid", ondelete="CASCADE", onupdate="CASCADE" ), nullable=True, - index=True + index=True, ) log_type = sa.Column(sa.Text, nullable=False) ref_id = sa.Column(sa.Text, nullable=False) date = sa.Column( - sa.Date, - nullable=False, - default=datetime.datetime.utcnow, - index=True) + sa.Date, nullable=False, default=datetime.datetime.utcnow, index=True + ) date_created = sa.Column( sa.DateTime, nullable=False, default=datetime.datetime.utcnow, - index=True) + index=True, + ) user = relation( - 'User', foreign_keys=[user_id], remote_side=[User.id], - backref=backref('logs', cascade="delete, delete-orphan"), + "User", + foreign_keys=[user_id], + remote_side=[User.id], + backref=backref("logs", cascade="delete, delete-orphan"), ) project = relation( - 'Project', foreign_keys=[project_id], remote_side=[Project.id], - backref=backref('logs', cascade="delete, delete-orphan") + "Project", + foreign_keys=[project_id], + remote_side=[Project.id], + backref=backref("logs", cascade="delete, delete-orphan"), ) issue = relation( - 'Issue', foreign_keys=[issue_uid], remote_side=[Issue.uid], + "Issue", foreign_keys=[issue_uid], remote_side=[Issue.uid] ) pull_request = relation( - 'PullRequest', + "PullRequest", foreign_keys=[pull_request_uid], - remote_side=[PullRequest.uid] + remote_side=[PullRequest.uid], ) def to_json(self, public=False): - ''' Returns a dictionary representation of the issue. + """ Returns a dictionary representation of the issue. - ''' + """ output = { - 'id': self.id, - 'type': self.log_type, - 'ref_id': self.ref_id, - 'date': self.date.strftime('%Y-%m-%d'), - 'date_created': arrow_ts(self.date_created), - 'user': self.user.to_json(public=public), + "id": self.id, + "type": self.log_type, + "ref_id": self.ref_id, + "date": self.date.strftime("%Y-%m-%d"), + "date_created": arrow_ts(self.date_created), + "user": self.user.to_json(public=public), } return output def __str__(self): - ''' A string representation of this log entry. ''' - verb = '' - desc = '%(user)s %(verb)s %(project)s#%(obj_id)s' + """ A string representation of this log entry. """ + verb = "" + desc = "%(user)s %(verb)s %(project)s#%(obj_id)s" arg = { - 'user': self.user.user if self.user else self.user_email, - 'obj_id': self.ref_id, - 'project': self.project.fullname, + "user": self.user.user if self.user else self.user_email, + "obj_id": self.ref_id, + "project": self.project.fullname, } issue_verb = { - 'created': 'created issue', - 'commented': 'commented on issue', - 'close': 'closed issue', - 'open': 'opened issue', + "created": "created issue", + "commented": "commented on issue", + "close": "closed issue", + "open": "opened issue", } pr_verb = { - 'created': 'created PR', - 'commented': 'commented on PR', - 'closed': 'closed PR', - 'merged': 'merged PR' + "created": "created PR", + "commented": "commented on PR", + "closed": "closed PR", + "merged": "merged PR", } if self.issue and self.log_type in issue_verb: verb = issue_verb[self.log_type] elif self.pull_request and self.log_type in pr_verb: verb = pr_verb[self.log_type] - elif not self.pull_request and not self.issue \ - and self.log_type == 'created': - verb = 'created Project' - desc = '%(user)s %(verb)s %(project)s' - elif self.log_type == 'committed': - verb = 'committed on' - - arg['verb'] = verb + elif ( + not self.pull_request + and not self.issue + and self.log_type == "created" + ): + verb = "created Project" + desc = "%(user)s %(verb)s %(project)s" + elif self.log_type == "committed": + verb = "committed on" + + arg["verb"] = verb return desc % arg - def date_tz(self, tz='UTC'): - '''Returns the date (as a datetime.date()) of this log entry + def date_tz(self, tz="UTC"): + """Returns the date (as a datetime.date()) of this log entry in a specified timezone (Olson name as a string). Assumes that date_created is aware, or UTC. If tz isn't a valid timezone identifier for arrow, just returns the date component of date_created. - ''' + """ try: return arrow.get(self.date_created).to(tz).date() except arrow.parser.ParserError: @@ -2643,37 +2646,35 @@ class IssueWatcher(BASE): Table -- issue_watchers """ - __tablename__ = 'issue_watchers' - __table_args__ = ( - sa.UniqueConstraint('issue_uid', 'user_id'), - ) + __tablename__ = "issue_watchers" + __table_args__ = (sa.UniqueConstraint("issue_uid", "user_id"),) id = sa.Column(sa.Integer, primary_key=True) issue_uid = sa.Column( sa.String(32), - sa.ForeignKey('issues.uid', onupdate='CASCADE', ondelete='CASCADE'), - nullable=False) + sa.ForeignKey("issues.uid", onupdate="CASCADE", ondelete="CASCADE"), + nullable=False, + ) user_id = sa.Column( sa.Integer, - sa.ForeignKey('users.id', onupdate='CASCADE', ondelete='CASCADE'), + sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=False, - index=True) - watch = sa.Column( - sa.Boolean, - nullable=False) + index=True, + ) + watch = sa.Column(sa.Boolean, nullable=False) user = relation( - 'User', foreign_keys=[user_id], remote_side=[User.id], - backref=backref( - 'issue_watched', cascade="delete, delete-orphan" - ), + "User", + foreign_keys=[user_id], + remote_side=[User.id], + backref=backref("issue_watched", cascade="delete, delete-orphan"), ) issue = relation( - 'Issue', foreign_keys=[issue_uid], remote_side=[Issue.uid], - backref=backref( - 'watchers', cascade="delete, delete-orphan", - ), + "Issue", + foreign_keys=[issue_uid], + remote_side=[Issue.uid], + backref=backref("watchers", cascade="delete, delete-orphan"), ) @@ -2683,40 +2684,37 @@ class PullRequestWatcher(BASE): Table -- pull_request_watchers """ - __tablename__ = 'pull_request_watchers' - __table_args__ = ( - sa.UniqueConstraint('pull_request_uid', 'user_id'), - ) + __tablename__ = "pull_request_watchers" + __table_args__ = (sa.UniqueConstraint("pull_request_uid", "user_id"),) id = sa.Column(sa.Integer, primary_key=True) pull_request_uid = sa.Column( sa.String(32), sa.ForeignKey( - 'pull_requests.uid', onupdate='CASCADE', ondelete='CASCADE'), - nullable=False) + "pull_requests.uid", onupdate="CASCADE", ondelete="CASCADE" + ), + nullable=False, + ) user_id = sa.Column( sa.Integer, - sa.ForeignKey('users.id', onupdate='CASCADE', ondelete='CASCADE'), + sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=False, - index=True) - watch = sa.Column( - sa.Boolean, - nullable=False) + index=True, + ) + watch = sa.Column(sa.Boolean, nullable=False) user = relation( - 'User', foreign_keys=[user_id], remote_side=[User.id], - backref=backref( - 'pr_watched', cascade="delete, delete-orphan" - ), + "User", + foreign_keys=[user_id], + remote_side=[User.id], + backref=backref("pr_watched", cascade="delete, delete-orphan"), ) pull_request = relation( - 'PullRequest', + "PullRequest", foreign_keys=[pull_request_uid], remote_side=[PullRequest.uid], - backref=backref( - 'watchers', cascade="delete, delete-orphan", - ), + backref=backref("watchers", cascade="delete, delete-orphan"), ) @@ -2730,18 +2728,19 @@ class ACL(BASE): Table listing all the rights a token can be given """ - __tablename__ = 'acls' + __tablename__ = "acls" id = sa.Column(sa.Integer, primary_key=True) name = sa.Column(sa.String(32), unique=True, nullable=False) description = sa.Column(sa.Text(), nullable=False) created = sa.Column( - sa.DateTime, nullable=False, default=datetime.datetime.utcnow) + sa.DateTime, nullable=False, default=datetime.datetime.utcnow + ) def __repr__(self): - ''' Return a string representation of this object. ''' + """ Return a string representation of this object. """ - return 'ACL: %s - name %s' % (self.id, self.name) + return "ACL: %s - name %s" % (self.id, self.name) class Token(BASE): @@ -2749,28 +2748,28 @@ class Token(BASE): Table listing all the tokens per user and per project """ - __tablename__ = 'tokens' + __tablename__ = "tokens" id = sa.Column(sa.String(64), primary_key=True) user_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'users.id', onupdate='CASCADE', - ), + sa.ForeignKey("users.id", onupdate="CASCADE"), nullable=False, - index=True) + index=True, + ) project_id = sa.Column( sa.Integer, - sa.ForeignKey( - 'projects.id', onupdate='CASCADE', - ), + sa.ForeignKey("projects.id", onupdate="CASCADE"), nullable=True, - index=True) + index=True, + ) description = sa.Column(sa.Text(), nullable=True) expiration = sa.Column( - sa.DateTime, nullable=False, default=datetime.datetime.utcnow) + sa.DateTime, nullable=False, default=datetime.datetime.utcnow + ) created = sa.Column( - sa.DateTime, nullable=False, default=datetime.datetime.utcnow) + sa.DateTime, nullable=False, default=datetime.datetime.utcnow + ) acls = relation( "ACL", @@ -2780,30 +2779,31 @@ class Token(BASE): ) user = relation( - 'User', + "User", backref=backref( - 'tokens', cascade="delete, delete-orphan", - order_by=str("Token.created") + "tokens", + cascade="delete, delete-orphan", + order_by=str("Token.created"), ), foreign_keys=[user_id], - remote_side=[User.id]) + remote_side=[User.id], + ) project = relation( - 'Project', - backref=backref( - 'tokens', cascade="delete, delete-orphan", - ), + "Project", + backref=backref("tokens", cascade="delete, delete-orphan"), foreign_keys=[project_id], - remote_side=[Project.id]) + remote_side=[Project.id], + ) def __repr__(self): - ''' Return a string representation of this object. ''' + """ Return a string representation of this object. """ - return 'Token: %s - name %s' % (self.id, self.expiration) + return "Token: %s - name %s" % (self.id, self.expiration) @property def expired(self): - ''' Returns whether a token has expired or not. ''' + """ Returns whether a token has expired or not. """ if datetime.datetime.utcnow().date() >= self.expiration.date(): return True else: @@ -2811,17 +2811,19 @@ class Token(BASE): @property def acls_list(self): - ''' Return a list containing the name of each ACLs this token has. - ''' + """ Return a list containing the name of each ACLs this token has. + """ return sorted(["%s" % acl.name for acl in self.acls]) @property def acls_list_pretty(self): - ''' + """ Return a list containing the description of each ACLs this token has. - ''' - return [acl.description for acl in sorted( - self.acls, key=operator.attrgetter('name'))] + """ + return [ + acl.description + for acl in sorted(self.acls, key=operator.attrgetter("name")) + ] class TokenAcl(BASE): @@ -2830,24 +2832,15 @@ class TokenAcl(BASE): This allow linking token to acl. """ - __tablename__ = 'tokens_acls' + __tablename__ = "tokens_acls" token_id = sa.Column( - sa.String(64), sa.ForeignKey( - 'tokens.id', - ), - primary_key=True) - acl_id = sa.Column( - sa.Integer, sa.ForeignKey( - 'acls.id', - ), - primary_key=True) + sa.String(64), sa.ForeignKey("tokens.id"), primary_key=True + ) + acl_id = sa.Column(sa.Integer, sa.ForeignKey("acls.id"), primary_key=True) # Constraints - __table_args__ = ( - sa.UniqueConstraint( - 'token_id', 'acl_id'), - ) + __table_args__ = (sa.UniqueConstraint("token_id", "acl_id"),) # ########################################################## @@ -2861,19 +2854,17 @@ class PagureUserVisit(BASE): Table storing the visits of the user. """ - __tablename__ = 'pagure_user_visit' + __tablename__ = "pagure_user_visit" id = sa.Column(sa.Integer, primary_key=True) - user_id = sa.Column( - sa.Integer, sa.ForeignKey( - 'users.id', - ), - nullable=False) + user_id = sa.Column(sa.Integer, sa.ForeignKey("users.id"), nullable=False) visit_key = sa.Column( - sa.String(40), nullable=False, unique=True, index=True) + sa.String(40), nullable=False, unique=True, index=True + ) user_ip = sa.Column(sa.String(50), nullable=False) created = sa.Column( - sa.DateTime, nullable=False, default=datetime.datetime.utcnow) + sa.DateTime, nullable=False, default=datetime.datetime.utcnow + ) expiry = sa.Column(sa.DateTime) @@ -2883,21 +2874,14 @@ class PagureUserGroup(BASE): This allow linking users to groups. """ - __tablename__ = 'pagure_user_group' + __tablename__ = "pagure_user_group" user_id = sa.Column( - sa.Integer, sa.ForeignKey( - 'users.id', - ), - primary_key=True) + sa.Integer, sa.ForeignKey("users.id"), primary_key=True + ) group_id = sa.Column( - sa.Integer, sa.ForeignKey( - 'pagure_group.id', - ), - primary_key=True) + sa.Integer, sa.ForeignKey("pagure_group.id"), primary_key=True + ) # Constraints - __table_args__ = ( - sa.UniqueConstraint( - 'user_id', 'group_id'), - ) + __table_args__ = (sa.UniqueConstraint("user_id", "group_id"),) diff --git a/pagure/lib/notify.py b/pagure/lib/notify.py index d6b0945..cec176f 100644 --- a/pagure/lib/notify.py +++ b/pagure/lib/notify.py @@ -36,36 +36,37 @@ from pagure.config import config as pagure_config _log = logging.getLogger(__name__) -REPLY_MSG = 'To reply, visit the link below' -if pagure_config['EVENTSOURCE_SOURCE']: - REPLY_MSG += ' or just reply to this email' +REPLY_MSG = "To reply, visit the link below" +if pagure_config["EVENTSOURCE_SOURCE"]: + REPLY_MSG += " or just reply to this email" def fedmsg_publish(*args, **kwargs): # pragma: no cover - ''' Try to publish a message on the fedmsg bus. ''' - if not pagure_config.get('FEDMSG_NOTIFICATIONS', True): + """ Try to publish a message on the fedmsg bus. """ + if not pagure_config.get("FEDMSG_NOTIFICATIONS", True): return # We catch Exception if we want :-p # pylint: disable=broad-except # Ignore message about fedmsg import # pylint: disable=import-error - kwargs['modname'] = 'pagure' - kwargs['cert_prefix'] = 'pagure' - kwargs['active'] = True + kwargs["modname"] = "pagure" + kwargs["cert_prefix"] = "pagure" + kwargs["active"] = True try: import fedmsg + fedmsg.publish(*args, **kwargs) except Exception: - _log.exception('Error sending fedmsg') + _log.exception("Error sending fedmsg") stomp_conn = None def stomp_publish(topic, message): - ''' Try to publish a message on a Stomp-compliant message bus. ''' - if not pagure_config.get('STOMP_NOTIFICATIONS', True): + """ Try to publish a message on a Stomp-compliant message bus. """ + if not pagure_config.get("STOMP_NOTIFICATIONS", True): return # We catch Exception if we want :-p # pylint: disable=broad-except @@ -73,42 +74,47 @@ def stomp_publish(topic, message): # pylint: disable=import-error try: import stomp + global stomp_conn if not stomp_conn or not stomp_conn.is_connected(): - stomp_conn = stomp.Connection12(pagure_config['STOMP_BROKERS']) - if pagure_config.get('STOMP_SSL'): + stomp_conn = stomp.Connection12(pagure_config["STOMP_BROKERS"]) + if pagure_config.get("STOMP_SSL"): stomp_conn.set_ssl( - pagure_config['STOMP_BROKERS'], - key_file=pagure_config.get('STOMP_KEY_FILE'), - cert_file=pagure_config.get('STOMP_CERT_FILE'), - password=pagure_config.get('STOMP_CREDS_PASSWORD'), + pagure_config["STOMP_BROKERS"], + key_file=pagure_config.get("STOMP_KEY_FILE"), + cert_file=pagure_config.get("STOMP_CERT_FILE"), + password=pagure_config.get("STOMP_CREDS_PASSWORD"), ) from stomp import PrintingListener - stomp_conn.set_listener('', PrintingListener()) + + stomp_conn.set_listener("", PrintingListener()) stomp_conn.start() stomp_conn.connect(wait=True) - hierarchy = pagure_config['STOMP_HIERARCHY'] + hierarchy = pagure_config["STOMP_HIERARCHY"] stomp_conn.send( - destination=hierarchy + topic, - body=json.dumps(message) + destination=hierarchy + topic, body=json.dumps(message) ) except Exception: - _log.exception('Error sending stomp message') + _log.exception("Error sending stomp message") def log(project, topic, msg, redis=None): - ''' This is the place where we send notifications to user about actions + """ This is the place where we send notifications to user about actions occuring in pagure. - ''' + """ # Send fedmsg notification (if fedmsg is there and set-up) - if not project or (project.settings.get('fedmsg_notifications', True) - and not project.private): + if not project or ( + project.settings.get("fedmsg_notifications", True) + and not project.private + ): fedmsg_publish(topic, msg) # Send stomp notification (if stomp is there and set-up) - if not project or (project.settings.get('stomp_notifications', True) - and not project.private): + if not project or ( + project.settings.get("stomp_notifications", True) + and not project.private + ): stomp_publish(topic, msg) if redis and project and not project.private: @@ -122,27 +128,29 @@ def log(project, topic, msg, redis=None): def _add_mentioned_users(emails, comment): - ''' Check the comment to see if an user is mentioned in it and if + """ Check the comment to see if an user is mentioned in it and if so add this user to the list of people to notify. - ''' - mentio_re = r'@(\w+)' + """ + mentio_re = r"@(\w+)" for username in re.findall(mentio_re, comment): - user = pagure.lib.search_user( - flask.g.session, username=username) + user = pagure.lib.search_user(flask.g.session, username=username) if user: emails.add(user.default_email) return emails def _clean_emails(emails, user): - ''' Remove the email of the user doing the action if it is in the list. + """ Remove the email of the user doing the action if it is in the list. This avoids receiving emails about action you do. - ''' + """ # Remove the user doing the action from the list of person to email # unless they actively asked for it - if user and user.emails \ - and not user.settings.get('cc_me_to_my_actions', False): + if ( + user + and user.emails + and not user.settings.get("cc_me_to_my_actions", False) + ): for email in user.emails: if email.email in emails: emails.remove(email.email) @@ -150,16 +158,16 @@ def _clean_emails(emails, user): def _get_emails_for_obj(obj): - ''' Return the list of emails to send notification to when notifying + """ Return the list of emails to send notification to when notifying about the specified issue or pull-request. - ''' + """ emails = set() # Add project creator/owner if obj.project.user.default_email: emails.add(obj.project.user.default_email) # Add committers is object is private, otherwise all contributors - if obj.isa in ['issue', 'pull-request'] and obj.private: + if obj.isa in ["issue", "pull-request"] and obj.private: for user in obj.project.committers: if user.default_email: emails.add(user.default_email) @@ -177,7 +185,7 @@ def _get_emails_for_obj(obj): emails.add(user.default_email) # Add people that commented on the issue/PR - if obj.isa in ['issue', 'pull-request']: + if obj.isa in ["issue", "pull-request"]: for comment in obj.comments: if comment.user.default_email: emails.add(comment.user.default_email) @@ -187,21 +195,21 @@ def _get_emails_for_obj(obj): emails.add(obj.user.default_email) # Add the person assigned to the issue/PR - if obj.isa in ['issue', 'pull-request']: + if obj.isa in ["issue", "pull-request"]: if obj.assignee and obj.assignee.default_email: emails.add(obj.assignee.default_email) # Add public notifications to lists/users set project-wide - if obj.isa == 'issue' and not obj.private: - for notifs in obj.project.notifications.get('issues', []): + if obj.isa == "issue" and not obj.private: + for notifs in obj.project.notifications.get("issues", []): emails.add(notifs) - elif obj.isa == 'pull-request': - for notifs in obj.project.notifications.get('requests', []): + elif obj.isa == "pull-request": + for notifs in obj.project.notifications.get("requests", []): emails.add(notifs) # Add the person watching this project, if it's a public issue or a # pull-request - if (obj.isa == 'issue' and not obj.private) or obj.isa == 'pull-request': + if (obj.isa == "issue" and not obj.private) or obj.isa == "pull-request": for watcher in obj.project.watchers: if watcher.watch_issues: emails.add(watcher.user.default_email) @@ -212,7 +220,7 @@ def _get_emails_for_obj(obj): emails.remove(watcher.user.default_email) # Add/Remove people who explicitly asked to be added/removed - if obj.isa in ['issue', 'pull-request']: + if obj.isa in ["issue", "pull-request"]: for watcher in obj.watchers: if not watcher.watch and watcher.user.default_email in emails: emails.remove(watcher.user.default_email) @@ -221,8 +229,10 @@ def _get_emails_for_obj(obj): # Drop the email used by pagure when sending emails = _clean_emails( - emails, pagure_config.get(pagure_config.get( - 'FROM_EMAIL', 'pagure@fedoraproject.org')) + emails, + pagure_config.get( + pagure_config.get("FROM_EMAIL", "pagure@fedoraproject.org") + ), ) return emails @@ -236,40 +246,48 @@ def _get_emails_for_commit_notification(project): # Drop the email used by pagure when sending emails = _clean_emails( - emails, pagure_config.get(pagure_config.get( - 'FROM_EMAIL', 'pagure@fedoraproject.org')) + emails, + pagure_config.get( + pagure_config.get("FROM_EMAIL", "pagure@fedoraproject.org") + ), ) return emails def _build_url(*args): - ''' Build a URL from a given list of arguments. ''' + """ Build a URL from a given list of arguments. """ items = [] for idx, arg in enumerate(args): arg = "%s" % arg - if arg.startswith('/'): + if arg.startswith("/"): arg = arg[1:] - if arg.endswith('/') and not idx + 1 == len(args): + if arg.endswith("/") and not idx + 1 == len(args): arg = arg[:-1] items.append(arg) - return '/'.join(items) + return "/".join(items) def _fullname_to_url(fullname): - ''' For forked projects, fullname is 'forks/user/...' but URL is + """ For forked projects, fullname is 'forks/user/...' but URL is 'fork/user/...'. This is why we can't have nice things. - ''' - if fullname.startswith('forks/'): - fullname = fullname.replace('forks', 'fork', 1) + """ + if fullname.startswith("forks/"): + fullname = fullname.replace("forks", "fork", 1) return fullname -def send_email(text, subject, to_mail, - mail_id=None, in_reply_to=None, - project_name=None, user_from=None): # pragma: no cover - ''' Send an email with the specified information. +def send_email( + text, + subject, + to_mail, + mail_id=None, + in_reply_to=None, + project_name=None, + user_from=None, +): # pragma: no cover + """ Send an email with the specified information. :arg text: the content of the email to send :type text: unicode @@ -281,105 +299,110 @@ def send_email(text, subject, to_mail, this value :kwarg project_name: if defined, the name of the project - ''' + """ if not to_mail: return - from_email = pagure_config.get( - 'FROM_EMAIL', 'pagure@fedoraproject.org') + from_email = pagure_config.get("FROM_EMAIL", "pagure@fedoraproject.org") if isinstance(from_email, bytes): - from_email = from_email.decode('utf-8') + from_email = from_email.decode("utf-8") if user_from: - header = Header(user_from, 'utf-8') - from_email = '%s <%s>' % (header, from_email) + header = Header(user_from, "utf-8") + from_email = "%s <%s>" % (header, from_email) if project_name is not None: subject_tag = project_name else: - subject_tag = 'Pagure' + subject_tag = "Pagure" if mail_id: - mail_id = mail_id + "@%s" %\ - pagure_config['DOMAIN_EMAIL_NOTIFICATIONS'] + mail_id = mail_id + "@%s" % pagure_config["DOMAIN_EMAIL_NOTIFICATIONS"] if in_reply_to: - in_reply_to = in_reply_to + "@%s" %\ - pagure_config['DOMAIN_EMAIL_NOTIFICATIONS'] + in_reply_to = ( + in_reply_to + "@%s" % pagure_config["DOMAIN_EMAIL_NOTIFICATIONS"] + ) smtp = None - for mailto in to_mail.split(','): - msg = MIMEText(text.encode('utf-8'), 'plain', 'utf-8') - msg['Subject'] = Header( - '[%s] %s' % (subject_tag, subject), 'utf-8') - msg['From'] = from_email + for mailto in to_mail.split(","): + msg = MIMEText(text.encode("utf-8"), "plain", "utf-8") + msg["Subject"] = Header("[%s] %s" % (subject_tag, subject), "utf-8") + msg["From"] = from_email if mail_id: - msg['mail-id'] = mail_id - msg['Message-Id'] = '<%s>' % mail_id + msg["mail-id"] = mail_id + msg["Message-Id"] = "<%s>" % mail_id if in_reply_to: - msg['In-Reply-To'] = '<%s>' % in_reply_to + msg["In-Reply-To"] = "<%s>" % in_reply_to - msg['X-Auto-Response-Suppress'] = 'All' - msg['X-pagure'] = pagure_config['APP_URL'] + msg["X-Auto-Response-Suppress"] = "All" + msg["X-pagure"] = pagure_config["APP_URL"] if project_name is not None: - msg['X-pagure-project'] = project_name - msg['List-ID'] = project_name - msg['List-Archive'] = _build_url( - pagure_config['APP_URL'], - _fullname_to_url(project_name)) + msg["X-pagure-project"] = project_name + msg["List-ID"] = project_name + msg["List-Archive"] = _build_url( + pagure_config["APP_URL"], _fullname_to_url(project_name) + ) # Send the message via our own SMTP server, but don't include the # envelope header. - msg['To'] = mailto - salt = pagure_config.get('SALT_EMAIL') + msg["To"] = mailto + salt = pagure_config.get("SALT_EMAIL") if salt and not isinstance(salt, bytes): - salt = salt.encode('utf-8') + salt = salt.encode("utf-8") - if mail_id and pagure_config['EVENTSOURCE_SOURCE']: + if mail_id and pagure_config["EVENTSOURCE_SOURCE"]: - key = (b'<' + mail_id.encode("utf-8") + b'>' + salt - + mailto.encode("utf-8")) + key = ( + b"<" + + mail_id.encode("utf-8") + + b">" + + salt + + mailto.encode("utf-8") + ) if isinstance(key, six.text_type): - key = key.encode('utf-8') + key = key.encode("utf-8") mhash = hashlib.sha512(key) - msg['Reply-To'] = 'reply+%s@%s' % ( + msg["Reply-To"] = "reply+%s@%s" % ( mhash.hexdigest(), - pagure_config['DOMAIN_EMAIL_NOTIFICATIONS']) - msg['Mail-Followup-To'] = msg['Reply-To'] - if not pagure_config.get('EMAIL_SEND', True): - _log.debug('******EMAIL******') - _log.debug('From: %s', from_email) - _log.debug('To: %s', to_mail) - _log.debug('Subject: %s', subject) - _log.debug('in_reply_to: %s', in_reply_to) - _log.debug('mail_id: %s', mail_id) - _log.debug('Contents:') - _log.debug('%s' % text) - _log.debug('*****************') + pagure_config["DOMAIN_EMAIL_NOTIFICATIONS"], + ) + msg["Mail-Followup-To"] = msg["Reply-To"] + if not pagure_config.get("EMAIL_SEND", True): + _log.debug("******EMAIL******") + _log.debug("From: %s", from_email) + _log.debug("To: %s", to_mail) + _log.debug("Subject: %s", subject) + _log.debug("in_reply_to: %s", in_reply_to) + _log.debug("mail_id: %s", mail_id) + _log.debug("Contents:") + _log.debug("%s" % text) + _log.debug("*****************") _log.debug(msg.as_string()) - _log.debug('*****/EMAIL******') + _log.debug("*****/EMAIL******") continue try: if smtp is None: - if pagure_config['SMTP_SSL']: + if pagure_config["SMTP_SSL"]: smtp = smtplib.SMTP_SSL( - pagure_config['SMTP_SERVER'], - pagure_config['SMTP_PORT']) + pagure_config["SMTP_SERVER"], + pagure_config["SMTP_PORT"], + ) else: smtp = smtplib.SMTP( - pagure_config['SMTP_SERVER'], - pagure_config['SMTP_PORT']) - if pagure_config['SMTP_USERNAME'] \ - and pagure_config['SMTP_PASSWORD']: + pagure_config["SMTP_SERVER"], + pagure_config["SMTP_PORT"], + ) + if ( + pagure_config["SMTP_USERNAME"] + and pagure_config["SMTP_PASSWORD"] + ): smtp.login( - pagure_config['SMTP_USERNAME'], - pagure_config['SMTP_PASSWORD'] + pagure_config["SMTP_USERNAME"], + pagure_config["SMTP_PASSWORD"], ) - smtp.sendmail( - from_email, - [mailto], - msg.as_string()) + smtp.sendmail(from_email, [mailto], msg.as_string()) except smtplib.SMTPException as err: _log.exception(err) if smtp: @@ -388,9 +411,9 @@ def send_email(text, subject, to_mail, def notify_new_comment(comment, user=None): - ''' Notify the people following an issue that a new comment was added + """ Notify the people following an issue that a new comment was added to the issue. - ''' + """ text = """ %s added a new comment to an issue you are following: @@ -400,14 +423,17 @@ def notify_new_comment(comment, user=None): %s %s -""" % (comment.user.user, - comment.comment, - REPLY_MSG, - _build_url( - pagure_config['APP_URL'], - _fullname_to_url(comment.issue.project.fullname), - 'issue', - comment.issue.id)) +""" % ( + comment.user.user, + comment.comment, + REPLY_MSG, + _build_url( + pagure_config["APP_URL"], + _fullname_to_url(comment.issue.project.fullname), + "issue", + comment.issue.id, + ), + ) mail_to = _get_emails_for_obj(comment.issue) if comment.user and comment.user.default_email: mail_to.add(comment.user.default_email) @@ -417,8 +443,8 @@ def notify_new_comment(comment, user=None): send_email( text, - 'Issue #%s: %s' % (comment.issue.id, comment.issue.title), - ','.join(mail_to), + "Issue #%s: %s" % (comment.issue.id, comment.issue.title), + ",".join(mail_to), mail_id=comment.mail_id, in_reply_to=comment.issue.mail_id, project_name=comment.issue.project.fullname, @@ -427,9 +453,9 @@ def notify_new_comment(comment, user=None): def notify_new_issue(issue, user=None): - ''' Notify the people following a project that a new issue was added + """ Notify the people following a project that a new issue was added to it. - ''' + """ text = """ %s reported a new issue against the project: `%s` that you are following: `` @@ -438,23 +464,26 @@ def notify_new_issue(issue, user=None): %s %s -""" % (issue.user.user, - issue.project.name, - issue.content, - REPLY_MSG, - _build_url( - pagure_config['APP_URL'], - _fullname_to_url(issue.project.fullname), - 'issue', - issue.id)) +""" % ( + issue.user.user, + issue.project.name, + issue.content, + REPLY_MSG, + _build_url( + pagure_config["APP_URL"], + _fullname_to_url(issue.project.fullname), + "issue", + issue.id, + ), + ) mail_to = _get_emails_for_obj(issue) mail_to = _add_mentioned_users(mail_to, issue.content) mail_to = _clean_emails(mail_to, user) send_email( text, - 'Issue #%s: %s' % (issue.id, issue.title), - ','.join(mail_to), + "Issue #%s: %s" % (issue.id, issue.title), + ",".join(mail_to), mail_id=issue.mail_id, project_name=issue.project.fullname, user_from=issue.user.fullname or issue.user.user, @@ -462,24 +491,27 @@ def notify_new_issue(issue, user=None): def notify_assigned_issue(issue, new_assignee, user): - ''' Notify the people following an issue that the assignee changed. - ''' - action = 'reset' + """ Notify the people following an issue that the assignee changed. + """ + action = "reset" if new_assignee: - action = 'assigned to `%s`' % new_assignee.user + action = "assigned to `%s`" % new_assignee.user text = """ The issue: `%s` of project: `%s` has been %s by %s. %s -""" % (issue.title, - issue.project.name, - action, - user.username, - _build_url( - pagure_config['APP_URL'], - _fullname_to_url(issue.project.fullname), - 'issue', - issue.id)) +""" % ( + issue.title, + issue.project.name, + action, + user.username, + _build_url( + pagure_config["APP_URL"], + _fullname_to_url(issue.project.fullname), + "issue", + issue.id, + ), + ) mail_to = _get_emails_for_obj(issue) if new_assignee and new_assignee.default_email: mail_to.add(new_assignee.default_email) @@ -489,9 +521,9 @@ The issue: `%s` of project: `%s` has been %s by %s. uid = time.mktime(datetime.datetime.now().timetuple()) send_email( text, - 'Issue #%s: %s' % (issue.id, issue.title), - ','.join(mail_to), - mail_id='%s/assigned/%s' % (issue.mail_id, uid), + "Issue #%s: %s" % (issue.id, issue.title), + ",".join(mail_to), + mail_id="%s/assigned/%s" % (issue.mail_id, uid), in_reply_to=issue.mail_id, project_name=issue.project.fullname, user_from=user.fullname or user.user, @@ -499,32 +531,35 @@ The issue: `%s` of project: `%s` has been %s by %s. def notify_status_change_issue(issue, user): - ''' Notify the people following a project that an issue changed status. - ''' + """ Notify the people following a project that an issue changed status. + """ status = issue.status - if status.lower() != 'open' and issue.close_status: - status = '%s as %s' % (status, issue.close_status) + if status.lower() != "open" and issue.close_status: + status = "%s as %s" % (status, issue.close_status) text = """ The status of the issue: `%s` of project: `%s` has been updated to: %s by %s. %s -""" % (issue.title, - issue.project.fullname, - status, - user.username, - _build_url( - pagure_config['APP_URL'], - _fullname_to_url(issue.project.fullname), - 'issue', - issue.id)) +""" % ( + issue.title, + issue.project.fullname, + status, + user.username, + _build_url( + pagure_config["APP_URL"], + _fullname_to_url(issue.project.fullname), + "issue", + issue.id, + ), + ) mail_to = _get_emails_for_obj(issue) uid = time.mktime(datetime.datetime.now().timetuple()) send_email( text, - 'Issue #%s: %s' % (issue.id, issue.title), - ','.join(mail_to), - mail_id='%s/close/%s' % (issue.mail_id, uid), + "Issue #%s: %s" % (issue.id, issue.title), + ",".join(mail_to), + mail_id="%s/close/%s" % (issue.mail_id, uid), in_reply_to=issue.mail_id, project_name=issue.project.fullname, user_from=user.fullname or user.user, @@ -532,28 +567,31 @@ The status of the issue: `%s` of project: `%s` has been updated to: %s by %s. def notify_meta_change_issue(issue, user, msg): - ''' Notify that a custom field changed - ''' + """ Notify that a custom field changed + """ text = """ `%s` updated issue. %s %s -""" % (user.username, - msg, - _build_url( - pagure_config['APP_URL'], - _fullname_to_url(issue.project.fullname), - 'issue', - issue.id)) +""" % ( + user.username, + msg, + _build_url( + pagure_config["APP_URL"], + _fullname_to_url(issue.project.fullname), + "issue", + issue.id, + ), + ) mail_to = _get_emails_for_obj(issue) uid = time.mktime(datetime.datetime.now().timetuple()) send_email( text, - 'Issue #%s: %s' % (issue.id, issue.title), - ','.join(mail_to), - mail_id='%s/close/%s' % (issue.mail_id, uid), + "Issue #%s: %s" % (issue.id, issue.title), + ",".join(mail_to), + mail_id="%s/close/%s" % (issue.mail_id, uid), in_reply_to=issue.mail_id, project_name=issue.project.fullname, user_from=user.fullname or user.user, @@ -561,24 +599,27 @@ def notify_meta_change_issue(issue, user, msg): def notify_assigned_request(request, new_assignee, user): - ''' Notify the people following a pull-request that the assignee changed. - ''' - action = 'reset' + """ Notify the people following a pull-request that the assignee changed. + """ + action = "reset" if new_assignee: - action = 'assigned to `%s`' % new_assignee.user + action = "assigned to `%s`" % new_assignee.user text = """ The pull-request: `%s` of project: `%s` has been %s by %s. %s -""" % (request.title, - request.project.name, - action, - user.username, - _build_url( - pagure_config['APP_URL'], - _fullname_to_url(request.project.fullname), - 'pull-request', - request.id)) +""" % ( + request.title, + request.project.name, + action, + user.username, + _build_url( + pagure_config["APP_URL"], + _fullname_to_url(request.project.fullname), + "pull-request", + request.id, + ), + ) mail_to = _get_emails_for_obj(request) if new_assignee and new_assignee.default_email: mail_to.add(new_assignee.default_email) @@ -588,9 +629,9 @@ The pull-request: `%s` of project: `%s` has been %s by %s. uid = time.mktime(datetime.datetime.now().timetuple()) send_email( text, - 'PR #%s: %s' % (request.id, request.title), - ','.join(mail_to), - mail_id='%s/assigned/%s' % (request.mail_id, uid), + "PR #%s: %s" % (request.id, request.title), + ",".join(mail_to), + mail_id="%s/assigned/%s" % (request.mail_id, uid), in_reply_to=request.mail_id, project_name=request.project.fullname, user_from=user.fullname or user.user, @@ -598,9 +639,9 @@ The pull-request: `%s` of project: `%s` has been %s by %s. def notify_new_pull_request(request): - ''' Notify the people following a project that a new pull-request was + """ Notify the people following a project that a new pull-request was added to it. - ''' + """ text = """ %s opened a new pull-request against the project: `%s` that you are following: `` @@ -609,21 +650,24 @@ def notify_new_pull_request(request): %s %s -""" % (request.user.user, - request.project.name, - request.title, - REPLY_MSG, - _build_url( - pagure_config['APP_URL'], - _fullname_to_url(request.project.fullname), - 'pull-request', - request.id)) +""" % ( + request.user.user, + request.project.name, + request.title, + REPLY_MSG, + _build_url( + pagure_config["APP_URL"], + _fullname_to_url(request.project.fullname), + "pull-request", + request.id, + ), + ) mail_to = _get_emails_for_obj(request) send_email( text, - 'PR #%s: %s' % (request.id, request.title), - ','.join(mail_to), + "PR #%s: %s" % (request.id, request.title), + ",".join(mail_to), mail_id=request.mail_id, project_name=request.project.fullname, user_from=request.user.fullname or request.user.user, @@ -631,9 +675,9 @@ def notify_new_pull_request(request): def notify_merge_pull_request(request, user): - ''' Notify the people following a project that a pull-request was merged + """ Notify the people following a project that a pull-request was merged in it. - ''' + """ text = """ %s merged a pull-request against the project: `%s` that you are following. @@ -644,22 +688,25 @@ Merged pull-request: `` %s -""" % (user.username, - request.project.name, - request.title, - _build_url( - pagure_config['APP_URL'], - _fullname_to_url(request.project.fullname), - 'pull-request', - request.id)) +""" % ( + user.username, + request.project.name, + request.title, + _build_url( + pagure_config["APP_URL"], + _fullname_to_url(request.project.fullname), + "pull-request", + request.id, + ), + ) mail_to = _get_emails_for_obj(request) uid = time.mktime(datetime.datetime.now().timetuple()) send_email( text, - 'PR #%s: %s' % (request.id, request.title), - ','.join(mail_to), - mail_id='%s/close/%s' % (request.mail_id, uid), + "PR #%s: %s" % (request.id, request.title), + ",".join(mail_to), + mail_id="%s/close/%s" % (request.mail_id, uid), in_reply_to=request.mail_id, project_name=request.project.fullname, user_from=user.fullname or user.user, @@ -667,9 +714,9 @@ Merged pull-request: def notify_reopen_pull_request(request, user): - ''' Notify the people following a project that a closed pull-request + """ Notify the people following a project that a closed pull-request has been reopened. - ''' + """ text = """ %s reopened a pull-request against the project: `%s` that you are following. @@ -680,22 +727,25 @@ Reopened pull-request: `` %s -""" % (user.username, - request.project.name, - request.title, - _build_url( - pagure_config['APP_URL'], - _fullname_to_url(request.project.fullname), - 'pull-request', - request.id)) +""" % ( + user.username, + request.project.name, + request.title, + _build_url( + pagure_config["APP_URL"], + _fullname_to_url(request.project.fullname), + "pull-request", + request.id, + ), + ) mail_to = _get_emails_for_obj(request) uid = time.mktime(datetime.datetime.now().timetuple()) send_email( text, - 'PR #%s: %s' % (request.id, request.title), - ','.join(mail_to), - mail_id='%s/close/%s' % (request.mail_id, uid), + "PR #%s: %s" % (request.id, request.title), + ",".join(mail_to), + mail_id="%s/close/%s" % (request.mail_id, uid), in_reply_to=request.mail_id, project_name=request.project.fullname, user_from=user.fullname or user.user, @@ -703,9 +753,9 @@ Reopened pull-request: def notify_cancelled_pull_request(request, user): - ''' Notify the people following a project that a pull-request was + """ Notify the people following a project that a pull-request was cancelled in it. - ''' + """ text = """ %s canceled a pull-request against the project: `%s` that you are following. @@ -716,22 +766,25 @@ Cancelled pull-request: `` %s -""" % (user.username, - request.project.name, - request.title, - _build_url( - pagure_config['APP_URL'], - _fullname_to_url(request.project.fullname), - 'pull-request', - request.id)) +""" % ( + user.username, + request.project.name, + request.title, + _build_url( + pagure_config["APP_URL"], + _fullname_to_url(request.project.fullname), + "pull-request", + request.id, + ), + ) mail_to = _get_emails_for_obj(request) uid = time.mktime(datetime.datetime.now().timetuple()) send_email( text, - 'PR #%s: %s' % (request.id, request.title), - ','.join(mail_to), - mail_id='%s/close/%s' % (request.mail_id, uid), + "PR #%s: %s" % (request.id, request.title), + ",".join(mail_to), + mail_id="%s/close/%s" % (request.mail_id, uid), in_reply_to=request.mail_id, project_name=request.project.fullname, user_from=user.fullname or user.user, @@ -739,9 +792,9 @@ Cancelled pull-request: def notify_pull_request_comment(comment, user): - ''' Notify the people following a pull-request that a new comment was + """ Notify the people following a pull-request that a new comment was added to it. - ''' + """ text = """ %s commented on the pull-request: `%s` that you are following: `` @@ -750,23 +803,26 @@ def notify_pull_request_comment(comment, user): %s %s -""" % (comment.user.user, - comment.pull_request.title, - comment.comment, - REPLY_MSG, - _build_url( - pagure_config['APP_URL'], - _fullname_to_url(comment.pull_request.project.fullname), - 'pull-request', - comment.pull_request.id)) +""" % ( + comment.user.user, + comment.pull_request.title, + comment.comment, + REPLY_MSG, + _build_url( + pagure_config["APP_URL"], + _fullname_to_url(comment.pull_request.project.fullname), + "pull-request", + comment.pull_request.id, + ), + ) mail_to = _get_emails_for_obj(comment.pull_request) mail_to = _add_mentioned_users(mail_to, comment.comment) mail_to = _clean_emails(mail_to, user) send_email( text, - 'PR #%s: %s' % (comment.pull_request.id, comment.pull_request.title), - ','.join(mail_to), + "PR #%s: %s" % (comment.pull_request.id, comment.pull_request.title), + ",".join(mail_to), mail_id=comment.mail_id, in_reply_to=comment.pull_request.mail_id, project_name=comment.pull_request.project.fullname, @@ -775,29 +831,31 @@ def notify_pull_request_comment(comment, user): def notify_pull_request_flag(flag, user): - ''' Notify the people following a pull-request that a new flag was + """ Notify the people following a pull-request that a new flag was added to it. - ''' + """ text = """ %s flagged the pull-request `%s` as %s: %s %s -""" % (flag.username, - flag.pull_request.title, - flag.status, - flag.comment, - _build_url( - pagure_config['APP_URL'], - _fullname_to_url(flag.pull_request.project.fullname), - 'pull-request', - flag.pull_request.id)) +""" % ( + flag.username, + flag.pull_request.title, + flag.status, + flag.comment, + _build_url( + pagure_config["APP_URL"], + _fullname_to_url(flag.pull_request.project.fullname), + "pull-request", + flag.pull_request.id, + ), + ) mail_to = _get_emails_for_obj(flag.pull_request) send_email( text, - 'PR #%s - %s: %s' % ( - flag.pull_request.id, flag.username, flag.status), - ','.join(mail_to), + "PR #%s - %s: %s" % (flag.pull_request.id, flag.username, flag.status), + ",".join(mail_to), mail_id=flag.mail_id, in_reply_to=flag.pull_request.mail_id, project_name=flag.pull_request.project.fullname, @@ -806,14 +864,14 @@ def notify_pull_request_flag(flag, user): def notify_new_email(email, user): - ''' Ask the user to confirm to the email belong to them. - ''' + """ Ask the user to confirm to the email belong to them. + """ - root_url = pagure_config.get('APP_URL', flask.request.url_root) + root_url = pagure_config.get("APP_URL", flask.request.url_root) url = urljoin( root_url or flask.request.url_root, - flask.url_for('ui_ns.confirm_email', token=email.token), + flask.url_for("ui_ns.confirm_email", token=email.token), ) text = """Dear %(username)s, @@ -828,41 +886,51 @@ The email will not be activated until you finish this step. Sincerely, Your pagure admin. -""" % ({'username': user.username, 'url': url, 'root_url': root_url}) +""" % ( + {"username": user.username, "url": url, "root_url": root_url} + ) send_email( text, - 'Confirm new email', + "Confirm new email", email.email, user_from=user.fullname or user.user, ) def notify_new_commits(abspath, project, branch, commits): - ''' Notify the people following a project's commits that new commits have + """ Notify the people following a project's commits that new commits have been added. - ''' + """ # string note: abspath, project and branch can only contain ASCII # by policy (pagure and/or gitolite) commits_info = [] for commit in commits: - commits_info.append({ - 'commit': commit, - 'author': pagure.lib.git.get_author( - commit, abspath), - 'subject': pagure.lib.git.get_commit_subject( - commit, abspath) - }) + commits_info.append( + { + "commit": commit, + "author": pagure.lib.git.get_author(commit, abspath), + "subject": pagure.lib.git.get_commit_subject(commit, abspath), + } + ) # make sure this is unicode - commits_string = u'\n'.join(u'{0} {1} {2}'.format( - commit_info['commit'], commit_info['author'], commit_info['subject']) - for commit_info in commits_info) + commits_string = "\n".join( + "{0} {1} {2}".format( + commit_info["commit"], + commit_info["author"], + commit_info["subject"], + ) + for commit_info in commits_info + ) commit_url = _build_url( - pagure_config['APP_URL'], _fullname_to_url(project.fullname), - 'commits', branch) + pagure_config["APP_URL"], + _fullname_to_url(project.fullname), + "commits", + branch, + ) - email_body = u''' + email_body = """ The following commits were pushed to the repo %s on branch %s, which you are following: %s @@ -871,44 +939,49 @@ The following commits were pushed to the repo %s on branch To view more about the commits, visit: %s -''' % (project.fullname, - branch, - commits_string, - commit_url) +""" % ( + project.fullname, + branch, + commits_string, + commit_url, + ) mail_to = _get_emails_for_commit_notification(project) send_email( email_body, 'New Commits To "{0}" ({1})'.format(project.fullname, branch), - ','.join(mail_to), - project_name=project.fullname + ",".join(mail_to), + project_name=project.fullname, ) def notify_commit_flag(flag, user): - ''' Notify the people following a project that a new flag was added + """ Notify the people following a project that a new flag was added to one of its commit. - ''' + """ text = """ %s flagged the commit `%s` as %s: %s %s -""" % (flag.username, - flag.commit_hash, - flag.status, - flag.comment, - _build_url( - pagure_config['APP_URL'], - _fullname_to_url(flag.project.fullname), - 'c', - flag.commit_hash)) +""" % ( + flag.username, + flag.commit_hash, + flag.status, + flag.comment, + _build_url( + pagure_config["APP_URL"], + _fullname_to_url(flag.project.fullname), + "c", + flag.commit_hash, + ), + ) mail_to = _get_emails_for_obj(flag) send_email( text, - 'Coommit #%s - %s: %s' % ( - flag.commit_hash, flag.username, flag.status), - ','.join(mail_to), + "Coommit #%s - %s: %s" + % (flag.commit_hash, flag.username, flag.status), + ",".join(mail_to), mail_id=flag.mail_id, in_reply_to=flag.project.mail_id, project_name=flag.project.fullname, diff --git a/pagure/lib/plugins.py b/pagure/lib/plugins.py index a0c108b..e154c2a 100644 --- a/pagure/lib/plugins.py +++ b/pagure/lib/plugins.py @@ -16,35 +16,35 @@ from pagure.lib.model import BASE def get_plugin_names(blacklist=None): - ''' Return the list of plugins names. ''' + """ Return the list of plugins names. """ from pagure.hooks import BaseHook - plugins = load('pagure.hooks', subclasses=BaseHook) + + plugins = load("pagure.hooks", subclasses=BaseHook) if not blacklist: blacklist = [] elif not isinstance(blacklist, list): blacklist = [blacklist] output = [ - plugin.name - for plugin in plugins - if plugin.name not in blacklist + plugin.name for plugin in plugins if plugin.name not in blacklist ] # The default hook is not one we show - if 'default' in output: - output.remove('default') + if "default" in output: + output.remove("default") return sorted(output) def get_plugin_tables(): - ''' Return the list of all plugins. ''' - plugins = load('pagure.hooks', subclasses=BASE) + """ Return the list of all plugins. """ + plugins = load("pagure.hooks", subclasses=BASE) return plugins def get_plugin(plugin_name): - ''' Return the list of plugins names. ''' + """ Return the list of plugins names. """ from pagure.hooks import BaseHook - plugins = load('pagure.hooks', subclasses=BaseHook) + + plugins = load("pagure.hooks", subclasses=BaseHook) for plugin in plugins: if plugin.name == plugin_name: return plugin diff --git a/pagure/lib/repo.py b/pagure/lib/repo.py index 2c6dbfd..de36405 100644 --- a/pagure/lib/repo.py +++ b/pagure/lib/repo.py @@ -23,10 +23,10 @@ _log = logging.getLogger(__name__) def get_pygit2_version(): - ''' Return pygit2 version as a tuple of integers. + """ Return pygit2 version as a tuple of integers. This is needed for correct version comparison. - ''' - return tuple([int(i) for i in pygit2.__version__.split('.')]) + """ + return tuple([int(i) for i in pygit2.__version__.split(".")]) class PagureRepo(pygit2.Repository): @@ -44,24 +44,26 @@ class PagureRepo(pygit2.Repository): else: remote.push(refname) - def pull(self, remote_name='origin', branch='master', force=False): - ''' pull changes for the specified remote (defaults to origin). + def pull(self, remote_name="origin", branch="master", force=False): + """ pull changes for the specified remote (defaults to origin). Code from MichaelBoselowitz at: https://github.com/MichaelBoselowitz/pygit2-examples/blob/ 68e889e50a592d30ab4105a2e7b9f28fac7324c8/examples.py#L58 licensed under the MIT license. - ''' + """ for remote in self.remotes: if remote.name == remote_name: remote.fetch() remote_master_id = self.lookup_reference( - 'refs/remotes/origin/%s' % branch).target + "refs/remotes/origin/%s" % branch + ).target if force: repo_branch = self.lookup_reference( - 'refs/heads/%s' % branch) + "refs/heads/%s" % branch + ) repo_branch.set_target(remote_master_id) merge_result, _ = self.merge_analysis(remote_master_id) @@ -72,33 +74,37 @@ class PagureRepo(pygit2.Repository): elif merge_result & pygit2.GIT_MERGE_ANALYSIS_FASTFORWARD: self.checkout_tree(self.get(remote_master_id)) master_ref = self.lookup_reference( - 'refs/heads/%s' % branch) + "refs/heads/%s" % branch + ) master_ref.set_target(remote_master_id) self.head.set_target(remote_master_id) elif merge_result & pygit2.GIT_MERGE_ANALYSIS_NORMAL: raise pagure.exceptions.GitConflictsException( - 'Pulling remote changes leads to a conflict') + "Pulling remote changes leads to a conflict" + ) else: _log.debug( - 'Unexpected merge result: %s' % ( - pygit2.GIT_MERGE_ANALYSIS_NORMAL)) - raise AssertionError('Unknown merge analysis result') + "Unexpected merge result: %s" + % (pygit2.GIT_MERGE_ANALYSIS_NORMAL) + ) + raise AssertionError("Unknown merge analysis result") def run_hook(self, old, new, ref, username): - ''' Runs the post-update hook on the repo. ''' - line = '%s %s %s\n' % (old, new, ref) - cmd = ['./hooks/post-receive'] + """ Runs the post-update hook on the repo. """ + line = "%s %s %s\n" % (old, new, ref) + cmd = ["./hooks/post-receive"] env = os.environ.copy() - env['GIT_DIR'] = self.path - env['GL_USER'] = username + env["GIT_DIR"] = self.path + env["GL_USER"] = username _log.debug( - 'Running post-receive hook in: %s with user: %s and input: \n' - '%s' % (self.path, username, line)) + "Running post-receive hook in: %s with user: %s and input: \n" + "%s" % (self.path, username, line) + ) - hookfile = os.path.join(self.path, 'hooks', 'post-receive') + hookfile = os.path.join(self.path, "hooks", "post-receive") if not os.path.exists(hookfile): - _log.debug('No post-receive hook found, bailing') + _log.debug("No post-receive hook found, bailing") return procs = subprocess.Popen( @@ -112,7 +118,7 @@ class PagureRepo(pygit2.Repository): (out, err) = procs.communicate(line) retcode = procs.wait() if retcode: - print('ERROR: %s =-- %s' % (cmd, retcode)) + print("ERROR: %s =-- %s" % (cmd, retcode)) print(out) print(err) - out = out.rstrip('\n\r') + out = out.rstrip("\n\r") diff --git a/pagure/lib/tasks.py b/pagure/lib/tasks.py index e4f0c44..c64e1c0 100644 --- a/pagure/lib/tasks.py +++ b/pagure/lib/tasks.py @@ -45,15 +45,15 @@ from pagure.utils import get_parent_repo_path _log = get_task_logger(__name__) -if os.environ.get('PAGURE_BROKER_URL'): - broker_url = os.environ['PAGURE_BROKER_URL'] -elif pagure_config.get('BROKER_URL'): - broker_url = pagure_config['BROKER_URL'] +if os.environ.get("PAGURE_BROKER_URL"): + broker_url = os.environ["PAGURE_BROKER_URL"] +elif pagure_config.get("BROKER_URL"): + broker_url = pagure_config["BROKER_URL"] else: - broker_url = 'redis://%s' % pagure_config['REDIS_HOST'] + broker_url = "redis://%s" % pagure_config["REDIS_HOST"] -conn = Celery('tasks', broker=broker_url, backend=broker_url) -conn.conf.update(pagure_config['CELERY_CONFIG']) +conn = Celery("tasks", broker=broker_url, backend=broker_url) +conn.conf.update(pagure_config["CELERY_CONFIG"]) @after_setup_task_logger.connect @@ -72,10 +72,10 @@ def pagure_task(function): """ Decorated function, actually does the work. """ if self is not None: try: - self.update_state(state='RUNNING') + self.update_state(state="RUNNING") except TypeError: pass - session = pagure.lib.create_session(pagure_config['DB_URL']) + session = pagure.lib.create_session(pagure_config["DB_URL"]) try: return function(self, session, *args, **kwargs) except: # noqa: E722 @@ -86,6 +86,7 @@ def pagure_task(function): finally: session.remove() gc_clean() + return decorated_function @@ -101,7 +102,7 @@ def get_result(uuid): def ret(endpoint, **kwargs): - toret = {'endpoint': endpoint} + toret = {"endpoint": endpoint} toret.update(kwargs) return toret @@ -112,10 +113,11 @@ def gc_clean(): gc.collect() -@conn.task(queue=pagure_config.get('GITOLITE_CELERY_QUEUE', None), bind=True) +@conn.task(queue=pagure_config.get("GITOLITE_CELERY_QUEUE", None), bind=True) @pagure_task def generate_gitolite_acls( - self, session, namespace=None, name=None, user=None, group=None): + self, session, namespace=None, name=None, user=None, group=None +): """ Generate the gitolite configuration file either entirely or for a specific project. @@ -134,33 +136,37 @@ def generate_gitolite_acls( project = None if name and name != -1: project = pagure.lib._get_project( - session, namespace=namespace, name=name, user=user) + session, namespace=namespace, name=name, user=user + ) elif name == -1: project = name helper = pagure.lib.git_auth.get_git_auth_helper( - pagure_config['GITOLITE_BACKEND']) - _log.debug('Got helper: %s', helper) + pagure_config["GITOLITE_BACKEND"] + ) + _log.debug("Got helper: %s", helper) group_obj = None if group: group_obj = pagure.lib.search_groups(session, group_name=group) _log.debug( - 'Calling helper: %s with arg: project=%s, group=%s', - helper, project, group_obj) + "Calling helper: %s with arg: project=%s, group=%s", + helper, + project, + group_obj, + ) helper.generate_acls(project=project, group=group_obj) pagure.lib.update_read_only_mode(session, project, read_only=False) try: session.commit() - _log.debug('Project %s is no longer in Read Only Mode', project) + _log.debug("Project %s is no longer in Read Only Mode", project) except SQLAlchemyError: session.rollback() - _log.exception( - 'Failed to unmark read_only for: %s project', project) + _log.exception("Failed to unmark read_only for: %s project", project) -@conn.task(queue=pagure_config.get('GITOLITE_CELERY_QUEUE', None), bind=True) +@conn.task(queue=pagure_config.get("GITOLITE_CELERY_QUEUE", None), bind=True) @pagure_task def gitolite_post_compile_only(self, session): """ Do gitolite post-processing only. Most importantly, this processes SSH @@ -169,18 +175,20 @@ def gitolite_post_compile_only(self, session): touching any other gitolite configuration """ helper = pagure.lib.git_auth.get_git_auth_helper( - pagure_config['GITOLITE_BACKEND']) - _log.debug('Got helper: %s', helper) - if hasattr(helper, 'post_compile_only'): + pagure_config["GITOLITE_BACKEND"] + ) + _log.debug("Got helper: %s", helper) + if hasattr(helper, "post_compile_only"): helper.post_compile_only() else: helper.generate_acls(project=None) -@conn.task(queue=pagure_config.get('GITOLITE_CELERY_QUEUE', None), bind=True) +@conn.task(queue=pagure_config.get("GITOLITE_CELERY_QUEUE", None), bind=True) @pagure_task def delete_project( - self, session, namespace=None, name=None, user=None, action_user=None): + self, session, namespace=None, name=None, user=None, action_user=None +): """ Delete a project in pagure. This is achieved in three steps: @@ -201,27 +209,34 @@ def delete_project( """ project = pagure.lib._get_project( - session, namespace=namespace, name=name, user=user) + session, namespace=namespace, name=name, user=user + ) if not project: raise RuntimeError( - 'Project: %s/%s from user: %s not found in the DB' % ( - namespace, name, user)) + "Project: %s/%s from user: %s not found in the DB" + % (namespace, name, user) + ) # Remove the project from gitolite.conf helper = pagure.lib.git_auth.get_git_auth_helper( - pagure_config['GITOLITE_BACKEND']) - _log.debug('Got helper: %s', helper) + pagure_config["GITOLITE_BACKEND"] + ) + _log.debug("Got helper: %s", helper) _log.debug( - 'Calling helper: %s with arg: project=%s', helper, project.fullname) + "Calling helper: %s with arg: project=%s", helper, project.fullname + ) helper.remove_acls(session=session, project=project) # Remove the git repositories on disk paths = [] for key in [ - 'GIT_FOLDER', 'DOCS_FOLDER', - 'TICKETS_FOLDER', 'REQUESTS_FOLDER']: + "GIT_FOLDER", + "DOCS_FOLDER", + "TICKETS_FOLDER", + "REQUESTS_FOLDER", + ]: if pagure_config.get(key): path = os.path.join(pagure_config[key], project.path) if os.path.exists(path): @@ -229,15 +244,14 @@ def delete_project( try: for path in paths: - _log.info('Deleting: %s' % path) + _log.info("Deleting: %s" % path) shutil.rmtree(path) except (OSError, IOError) as err: _log.exception(err) - raise RuntimeError( - 'Could not delete all the repos from the system') + raise RuntimeError("Could not delete all the repos from the system") for path in paths: - _log.info('Path: %s - exists: %s' % (path, os.path.exists(path))) + _log.info("Path: %s - exists: %s" % (path, os.path.exists(path))) # Remove the project from the DB username = project.user.user @@ -247,24 +261,23 @@ def delete_project( session.commit() pagure.lib.notify.log( project, - topic='project.deleted', - msg=dict( - project=project_json, - agent=action_user, - ), + topic="project.deleted", + msg=dict(project=project_json, agent=action_user), ) except SQLAlchemyError: session.rollback() _log.exception( - 'Failed to delete project: %s from the DB', project.fullname) + "Failed to delete project: %s from the DB", project.fullname + ) - return ret('ui_ns.view_user', username=username) + return ret("ui_ns.view_user", username=username) -@conn.task(queue=pagure_config.get('FAST_CELERY_QUEUE', None), bind=True) +@conn.task(queue=pagure_config.get("FAST_CELERY_QUEUE", None), bind=True) @pagure_task -def create_project(self, session, username, namespace, name, add_readme, - ignore_existing_repo): +def create_project( + self, session, username, namespace, name, add_readme, ignore_existing_repo +): """ Create a project. :arg session: SQLAlchemy session object @@ -283,75 +296,75 @@ def create_project(self, session, username, namespace, name, add_readme, :type ignore_existing_repo: bool """ - project = pagure.lib._get_project( - session, namespace=namespace, name=name) + project = pagure.lib._get_project(session, namespace=namespace, name=name) - with project.lock('WORKER'): + with project.lock("WORKER"): userobj = pagure.lib.search_user(session, username=username) - gitrepo = os.path.join(pagure_config['GIT_FOLDER'], project.path) + gitrepo = os.path.join(pagure_config["GIT_FOLDER"], project.path) # Add the readme file if it was asked - _log.debug('Create git repo at: %s', gitrepo) + _log.debug("Create git repo at: %s", gitrepo) templ = None if project.is_fork: - templ = pagure_config.get('FORK_TEMPLATE_PATH') + templ = pagure_config.get("FORK_TEMPLATE_PATH") else: - templ = pagure_config.get('PROJECT_TEMPLATE_PATH') + templ = pagure_config.get("PROJECT_TEMPLATE_PATH") if templ: if not os.path.exists(templ): _log.warning( - 'Invalid git template configured: %s, not found on disk', - templ) + "Invalid git template configured: %s, not found on disk", + templ, + ) templ = None else: - _log.debug(' Using template at: %s', templ) + _log.debug(" Using template at: %s", templ) - pygit2.init_repository( - gitrepo, bare=True, template_path=templ) + pygit2.init_repository(gitrepo, bare=True, template_path=templ) if add_readme: # Clone main project - temp_gitrepo_path = tempfile.mkdtemp(prefix='pagure-') + temp_gitrepo_path = tempfile.mkdtemp(prefix="pagure-") temp_gitrepo = pygit2.clone_repository( - gitrepo, temp_gitrepo_path, bare=False) + gitrepo, temp_gitrepo_path, bare=False + ) # Add README file author = userobj.fullname or userobj.user author_email = userobj.default_email if six.PY2: - author = author.encode('utf-8') - author_email = author_email.encode('utf-8') + author = author.encode("utf-8") + author_email = author_email.encode("utf-8") author = pygit2.Signature(author, author_email) content = "# %s\n\n%s" % (name, project.description) readme_file = os.path.join(temp_gitrepo.workdir, "README.md") - with open(readme_file, 'wb') as stream: - stream.write(content.encode('utf-8')) + with open(readme_file, "wb") as stream: + stream.write(content.encode("utf-8")) temp_gitrepo.index.add_all() temp_gitrepo.index.write() tree = temp_gitrepo.index.write_tree() temp_gitrepo.create_commit( - 'HEAD', author, author, 'Added the README', tree, []) + "HEAD", author, author, "Added the README", tree, [] + ) # Push the README back to the main project ori_remote = temp_gitrepo.remotes[0] - master_ref = temp_gitrepo.lookup_reference('HEAD').resolve() - refname = '%s:%s' % (master_ref.name, master_ref.name) + master_ref = temp_gitrepo.lookup_reference("HEAD").resolve() + refname = "%s:%s" % (master_ref.name, master_ref.name) - _log.info('Pushing to %s: %s', ori_remote.name, refname) + _log.info("Pushing to %s: %s", ori_remote.name, refname) pagure.lib.repo.PagureRepo.push(ori_remote, refname) shutil.rmtree(temp_gitrepo_path) if not project.private: # Make the repo exportable via apache - http_clone_file = os.path.join(gitrepo, 'git-daemon-export-ok') + http_clone_file = os.path.join(gitrepo, "git-daemon-export-ok") if not os.path.exists(http_clone_file): - with open(http_clone_file, 'w') as stream: + with open(http_clone_file, "w") as stream: pass docrepo = None - if pagure_config.get('DOCS_FOLDER'): - docrepo = os.path.join( - pagure_config['DOCS_FOLDER'], project.path) + if pagure_config.get("DOCS_FOLDER"): + docrepo = os.path.join(pagure_config["DOCS_FOLDER"], project.path) if os.path.exists(docrepo): if not ignore_existing_repo: shutil.rmtree(gitrepo) @@ -360,13 +373,14 @@ def create_project(self, session, username, namespace, name, add_readme, 'The docs repo "%s" already exists' % project.path ) else: - _log.debug('Create git repo at: %s', docrepo) + _log.debug("Create git repo at: %s", docrepo) pygit2.init_repository(docrepo, bare=True) ticketrepo = None - if pagure_config.get('TICKETS_FOLDER'): + if pagure_config.get("TICKETS_FOLDER"): ticketrepo = os.path.join( - pagure_config['TICKETS_FOLDER'], project.path) + pagure_config["TICKETS_FOLDER"], project.path + ) if os.path.exists(ticketrepo): if not ignore_existing_repo: shutil.rmtree(gitrepo) @@ -374,17 +388,19 @@ def create_project(self, session, username, namespace, name, add_readme, shutil.rmtree(docrepo) session.remove() raise pagure.exceptions.RepoExistsException( - 'The tickets repo "%s" already exists' % - project.path + 'The tickets repo "%s" already exists' % project.path ) else: - _log.debug('Create git repo at: %s', ticketrepo) + _log.debug("Create git repo at: %s", ticketrepo) pygit2.init_repository( - ticketrepo, bare=True, - mode=pygit2.C.GIT_REPOSITORY_INIT_SHARED_GROUP) + ticketrepo, + bare=True, + mode=pygit2.C.GIT_REPOSITORY_INIT_SHARED_GROUP, + ) requestrepo = os.path.join( - pagure_config['REQUESTS_FOLDER'], project.path) + pagure_config["REQUESTS_FOLDER"], project.path + ) if os.path.exists(requestrepo): if not ignore_existing_repo: shutil.rmtree(gitrepo) @@ -394,17 +410,18 @@ def create_project(self, session, username, namespace, name, add_readme, shutil.rmtree(ticketrepo) session.remove() raise pagure.exceptions.RepoExistsException( - 'The requests repo "%s" already exists' % - project.path + 'The requests repo "%s" already exists' % project.path ) else: - _log.debug('Create git repo at: %s', requestrepo) + _log.debug("Create git repo at: %s", requestrepo) pygit2.init_repository( - requestrepo, bare=True, - mode=pygit2.C.GIT_REPOSITORY_INIT_SHARED_GROUP) + requestrepo, + bare=True, + mode=pygit2.C.GIT_REPOSITORY_INIT_SHARED_GROUP, + ) # Install the default hook - plugin = pagure.lib.plugins.get_plugin('default') + plugin = pagure.lib.plugins.get_plugin("default") dbobj = plugin.db_object() dbobj.active = True dbobj.project_id = project.id @@ -417,98 +434,128 @@ def create_project(self, session, username, namespace, name, add_readme, task = generate_gitolite_acls.delay( namespace=project.namespace, name=project.name, - user=project.user.user if project.is_fork else None) - _log.info('Refreshing gitolite config queued in task: %s', task.id) + user=project.user.user if project.is_fork else None, + ) + _log.info("Refreshing gitolite config queued in task: %s", task.id) - return ret('ui_ns.view_repo', repo=name, namespace=namespace) + return ret("ui_ns.view_repo", repo=name, namespace=namespace) -@conn.task(queue=pagure_config.get('SLOW_CELERY_QUEUE', None), bind=True) +@conn.task(queue=pagure_config.get("SLOW_CELERY_QUEUE", None), bind=True) @pagure_task -def update_git(self, session, name, namespace, user, - ticketuid=None, requestuid=None): +def update_git( + self, session, name, namespace, user, ticketuid=None, requestuid=None +): """ Update the JSON representation of either a ticket or a pull-request depending on the argument specified. """ project = pagure.lib._get_project( - session, namespace=namespace, name=name, user=user) + session, namespace=namespace, name=name, user=user + ) - project_lock = 'WORKER' + project_lock = "WORKER" if ticketuid is not None: - project_lock = 'WORKER_TICKET' + project_lock = "WORKER_TICKET" elif requestuid is not None: - project_lock = 'WORKER_REQUEST' + project_lock = "WORKER_REQUEST" with project.lock(project_lock): if ticketuid is not None: obj = pagure.lib.get_issue_by_uid(session, ticketuid) - folder = pagure_config['TICKETS_FOLDER'] + folder = pagure_config["TICKETS_FOLDER"] elif requestuid is not None: obj = pagure.lib.get_request_by_uid(session, requestuid) - folder = pagure_config['REQUESTS_FOLDER'] + folder = pagure_config["REQUESTS_FOLDER"] else: - raise NotImplementedError('No ticket ID or request ID provided') + raise NotImplementedError("No ticket ID or request ID provided") if obj is None: - raise Exception('Unable to find object') + raise Exception("Unable to find object") result = pagure.lib.git._update_git(obj, project, folder) return result -@conn.task(queue=pagure_config.get('SLOW_CELERY_QUEUE', None), bind=True) +@conn.task(queue=pagure_config.get("SLOW_CELERY_QUEUE", None), bind=True) @pagure_task def clean_git(self, session, name, namespace, user, ticketuid): """ Remove the JSON representation of a ticket on the git repository for tickets. """ project = pagure.lib._get_project( - session, namespace=namespace, name=name, user=user) + session, namespace=namespace, name=name, user=user + ) - with project.lock('WORKER_TICKET'): + with project.lock("WORKER_TICKET"): obj = pagure.lib.get_issue_by_uid(session, ticketuid) - folder = pagure_config['TICKETS_FOLDER'] + folder = pagure_config["TICKETS_FOLDER"] if obj is None: - raise Exception('Unable to find object') + raise Exception("Unable to find object") result = pagure.lib.git._clean_git(obj, project, folder) return result -@conn.task(queue=pagure_config.get('MEDIUM_CELERY_QUEUE', None), bind=True) +@conn.task(queue=pagure_config.get("MEDIUM_CELERY_QUEUE", None), bind=True) @pagure_task -def update_file_in_git(self, session, name, namespace, user, branch, branchto, - filename, content, message, username, email, - runhook=False): +def update_file_in_git( + self, + session, + name, + namespace, + user, + branch, + branchto, + filename, + content, + message, + username, + email, + runhook=False, +): """ Update a file in the specified git repo. """ userobj = pagure.lib.search_user(session, username=username) project = pagure.lib._get_project( - session, namespace=namespace, name=name, user=user) + session, namespace=namespace, name=name, user=user + ) - with project.lock('WORKER'): + with project.lock("WORKER"): pagure.lib.git._update_file_in_git( - project, branch, branchto, filename, - content, message, userobj, email, runhook=runhook) + project, + branch, + branchto, + filename, + content, + message, + userobj, + email, + runhook=runhook, + ) - return ret('ui_ns.view_commits', repo=project.name, username=user, - namespace=namespace, branchname=branchto) + return ret( + "ui_ns.view_commits", + repo=project.name, + username=user, + namespace=namespace, + branchname=branchto, + ) -@conn.task(queue=pagure_config.get('MEDIUM_CELERY_QUEUE', None), bind=True) +@conn.task(queue=pagure_config.get("MEDIUM_CELERY_QUEUE", None), bind=True) @pagure_task def delete_branch(self, session, name, namespace, user, branchname): """ Delete a branch from a git repo. """ project = pagure.lib._get_project( - session, namespace=namespace, name=name, user=user) + session, namespace=namespace, name=name, user=user + ) - with project.lock('WORKER'): - repo_obj = pygit2.Repository( - pagure.utils.get_repo_path(project)) + with project.lock("WORKER"): + repo_obj = pygit2.Repository(pagure.utils.get_repo_path(project)) try: branch = repo_obj.lookup_branch(branchname) @@ -517,13 +564,22 @@ def delete_branch(self, session, name, namespace, user, branchname): _log.exception(err) return ret( - 'ui_ns.view_repo', repo=name, namespace=namespace, username=user) + "ui_ns.view_repo", repo=name, namespace=namespace, username=user + ) -@conn.task(queue=pagure_config.get('FAST_CELERY_QUEUE', None), bind=True) +@conn.task(queue=pagure_config.get("FAST_CELERY_QUEUE", None), bind=True) @pagure_task -def fork(self, session, name, namespace, user_owner, user_forker, - editbranch, editfile): +def fork( + self, + session, + name, + namespace, + user_owner, + user_forker, + editbranch, + editfile, +): """ Forks the specified project for the specified user. :arg namespace: the namespace of the project @@ -543,36 +599,38 @@ def fork(self, session, name, namespace, user_owner, user_forker, """ repo_from = pagure.lib._get_project( - session, namespace=namespace, name=name, user=user_owner) + session, namespace=namespace, name=name, user=user_owner + ) repo_to = pagure.lib._get_project( - session, namespace=namespace, name=name, user=user_forker) + session, namespace=namespace, name=name, user=user_forker + ) - with repo_to.lock('WORKER'): - reponame = os.path.join(pagure_config['GIT_FOLDER'], repo_from.path) - forkreponame = os.path.join(pagure_config['GIT_FOLDER'], repo_to.path) + with repo_to.lock("WORKER"): + reponame = os.path.join(pagure_config["GIT_FOLDER"], repo_from.path) + forkreponame = os.path.join(pagure_config["GIT_FOLDER"], repo_to.path) frepo = pygit2.clone_repository(reponame, forkreponame, bare=True) # Clone all the branches as well for branch in frepo.listall_branches(pygit2.GIT_BRANCH_REMOTE): branch_obj = frepo.lookup_branch(branch, pygit2.GIT_BRANCH_REMOTE) branchname = branch_obj.branch_name.replace( - branch_obj.remote_name, '', 1)[1:] + branch_obj.remote_name, "", 1 + )[1:] if branchname in frepo.listall_branches(pygit2.GIT_BRANCH_LOCAL): continue frepo.create_branch(branchname, frepo.get(branch_obj.target.hex)) # Create the git-daemon-export-ok file on the clone - http_clone_file = os.path.join(forkreponame, 'git-daemon-export-ok') + http_clone_file = os.path.join(forkreponame, "git-daemon-export-ok") if not os.path.exists(http_clone_file): - with open(http_clone_file, 'w'): + with open(http_clone_file, "w"): pass # Only fork the doc folder if the pagure instance supports the doc # service/server. - if pagure_config.get('DOCS_FOLDER'): - docrepo = os.path.join( - pagure_config['DOCS_FOLDER'], repo_to.path) + if pagure_config.get("DOCS_FOLDER"): + docrepo = os.path.join(pagure_config["DOCS_FOLDER"], repo_to.path) if os.path.exists(docrepo): shutil.rmtree(forkreponame) raise pagure.exceptions.RepoExistsException( @@ -580,9 +638,10 @@ def fork(self, session, name, namespace, user_owner, user_forker, ) pygit2.init_repository(docrepo, bare=True) - if pagure_config.get('TICKETS_FOLDER'): + if pagure_config.get("TICKETS_FOLDER"): ticketrepo = os.path.join( - pagure_config['TICKETS_FOLDER'], repo_to.path) + pagure_config["TICKETS_FOLDER"], repo_to.path + ) if os.path.exists(ticketrepo): shutil.rmtree(forkreponame) shutil.rmtree(docrepo) @@ -590,11 +649,14 @@ def fork(self, session, name, namespace, user_owner, user_forker, 'The tickets repo "%s" already exists' % repo_to.path ) pygit2.init_repository( - ticketrepo, bare=True, - mode=pygit2.C.GIT_REPOSITORY_INIT_SHARED_GROUP) + ticketrepo, + bare=True, + mode=pygit2.C.GIT_REPOSITORY_INIT_SHARED_GROUP, + ) requestrepo = os.path.join( - pagure_config['REQUESTS_FOLDER'], repo_to.path) + pagure_config["REQUESTS_FOLDER"], repo_to.path + ) if os.path.exists(requestrepo): shutil.rmtree(forkreponame) shutil.rmtree(docrepo) @@ -603,69 +665,85 @@ def fork(self, session, name, namespace, user_owner, user_forker, 'The requests repo "%s" already exists' % repo_to.path ) pygit2.init_repository( - requestrepo, bare=True, - mode=pygit2.C.GIT_REPOSITORY_INIT_SHARED_GROUP) + requestrepo, + bare=True, + mode=pygit2.C.GIT_REPOSITORY_INIT_SHARED_GROUP, + ) pagure.lib.notify.log( repo_to, - topic='project.forked', - msg=dict( - project=repo_to.to_json(public=True), - agent=user_forker, - ), + topic="project.forked", + msg=dict(project=repo_to.to_json(public=True), agent=user_forker), ) del frepo - _log.info('Project created, refreshing auth async') + _log.info("Project created, refreshing auth async") task = generate_gitolite_acls.delay( namespace=repo_to.namespace, name=repo_to.name, - user=repo_to.user.user if repo_to.is_fork else None) - _log.info('Refreshing gitolite config queued in task: %s', task.id) + user=repo_to.user.user if repo_to.is_fork else None, + ) + _log.info("Refreshing gitolite config queued in task: %s", task.id) if editfile is None: - return ret('ui_ns.view_repo', repo=name, namespace=namespace, - username=user_forker) + return ret( + "ui_ns.view_repo", + repo=name, + namespace=namespace, + username=user_forker, + ) else: - return ret('ui_ns.edit_file', repo=name, namespace=namespace, - username=user_forker, branchname=editbranch, - filename=editfile) + return ret( + "ui_ns.edit_file", + repo=name, + namespace=namespace, + username=user_forker, + branchname=editbranch, + filename=editfile, + ) -@conn.task(queue=pagure_config.get('FAST_CELERY_QUEUE', None), bind=True) +@conn.task(queue=pagure_config.get("FAST_CELERY_QUEUE", None), bind=True) @pagure_task def pull_remote_repo(self, session, remote_git, branch_from): """ Clone a remote git repository locally for remote PRs. """ clonepath = pagure.utils.get_remote_repo_path( - remote_git, branch_from, ignore_non_exist=True) + remote_git, branch_from, ignore_non_exist=True + ) repo = pygit2.clone_repository( - remote_git, clonepath, checkout_branch=branch_from) + remote_git, clonepath, checkout_branch=branch_from + ) del repo return clonepath -@conn.task(queue=pagure_config.get('MEDIUM_CELERY_QUEUE', None), bind=True) +@conn.task(queue=pagure_config.get("MEDIUM_CELERY_QUEUE", None), bind=True) @pagure_task def refresh_remote_pr(self, session, name, namespace, user, requestid): """ Refresh the local clone of a git repository used in a remote pull-request. """ project = pagure.lib._get_project( - session, namespace=namespace, name=name, user=user) + session, namespace=namespace, name=name, user=user + ) request = pagure.lib.search_pull_requests( - session, project_id=project.id, requestid=requestid) + session, project_id=project.id, requestid=requestid + ) _log.debug( - 'refreshing remote pull-request: %s/#%s', request.project.fullname, - request.id) + "refreshing remote pull-request: %s/#%s", + request.project.fullname, + request.id, + ) clonepath = pagure.utils.get_remote_repo_path( - request.remote_git, request.branch_from) + request.remote_git, request.branch_from + ) repo = pagure.lib.repo.PagureRepo(clonepath) repo.pull(branch=request.branch_from, force=True) @@ -673,81 +751,107 @@ def refresh_remote_pr(self, session, name, namespace, user, requestid): refresh_pr_cache.delay(name, namespace, user) del repo return ret( - 'ui_ns.request_pull', username=user, namespace=namespace, - repo=name, requestid=requestid) + "ui_ns.request_pull", + username=user, + namespace=namespace, + repo=name, + requestid=requestid, + ) -@conn.task(queue=pagure_config.get('FAST_CELERY_QUEUE', None), bind=True) +@conn.task(queue=pagure_config.get("FAST_CELERY_QUEUE", None), bind=True) @pagure_task def refresh_pr_cache(self, session, name, namespace, user): """ Refresh the merge status cached of pull-requests. """ project = pagure.lib._get_project( - session, namespace=namespace, name=name, user=user) + session, namespace=namespace, name=name, user=user + ) pagure.lib.reset_status_pull_request(session, project) -@conn.task(queue=pagure_config.get('FAST_CELERY_QUEUE', None), bind=True) +@conn.task(queue=pagure_config.get("FAST_CELERY_QUEUE", None), bind=True) @pagure_task -def merge_pull_request(self, session, name, namespace, user, requestid, - user_merger, delete_branch_after=False): +def merge_pull_request( + self, + session, + name, + namespace, + user, + requestid, + user_merger, + delete_branch_after=False, +): """ Merge pull-request. """ project = pagure.lib._get_project( - session, namespace=namespace, name=name, user=user) + session, namespace=namespace, name=name, user=user + ) - with project.lock('WORKER'): + with project.lock("WORKER"): request = pagure.lib.search_pull_requests( - session, project_id=project.id, requestid=requestid) + session, project_id=project.id, requestid=requestid + ) _log.debug( - 'Merging pull-request: %s/#%s', request.project.fullname, - request.id) + "Merging pull-request: %s/#%s", + request.project.fullname, + request.id, + ) pagure.lib.git.merge_pull_request( - session, request, user_merger, pagure_config['REQUESTS_FOLDER']) + session, request, user_merger, pagure_config["REQUESTS_FOLDER"] + ) if delete_branch_after: - _log.debug('Will delete source branch of pull-request: %s/#%s', - request.project.fullname, request.id) - owner = (request.project_from.user.username - if request.project_from.parent else None) + _log.debug( + "Will delete source branch of pull-request: %s/#%s", + request.project.fullname, + request.id, + ) + owner = ( + request.project_from.user.username + if request.project_from.parent + else None + ) delete_branch.delay( request.project_from.name, request.project_from.namespace, owner, - request.branch_from) + request.branch_from, + ) refresh_pr_cache.delay(name, namespace, user) return ret( - 'ui_ns.view_repo', repo=name, username=user, namespace=namespace) + "ui_ns.view_repo", repo=name, username=user, namespace=namespace + ) -@conn.task(queue=pagure_config.get('FAST_CELERY_QUEUE', None), bind=True) +@conn.task(queue=pagure_config.get("FAST_CELERY_QUEUE", None), bind=True) @pagure_task -def add_file_to_git(self, session, name, namespace, user, user_attacher, - issueuid, filename): +def add_file_to_git( + self, session, name, namespace, user, user_attacher, issueuid, filename +): """ Add a file to the specified git repo. """ project = pagure.lib._get_project( - session, namespace=namespace, name=name, user=user) + session, namespace=namespace, name=name, user=user + ) - with project.lock('WORKER'): + with project.lock("WORKER"): issue = pagure.lib.get_issue_by_uid(session, issueuid) user_attacher = pagure.lib.search_user(session, username=user_attacher) - from_folder = pagure_config['ATTACHMENTS_FOLDER'] - to_folder = pagure_config['TICKETS_FOLDER'] + from_folder = pagure_config["ATTACHMENTS_FOLDER"] + to_folder = pagure_config["TICKETS_FOLDER"] _log.info( - 'Adding file %s from %s to %s', filename, from_folder, to_folder) + "Adding file %s from %s to %s", filename, from_folder, to_folder + ) pagure.lib.git._add_file_to_git( - project, issue, - from_folder, - to_folder, - user_attacher, - filename) + project, issue, from_folder, to_folder, user_attacher, filename + ) -@conn.task(queue=pagure_config.get('MEDIUM_CELERY_QUEUE', None), bind=True) +@conn.task(queue=pagure_config.get("MEDIUM_CELERY_QUEUE", None), bind=True) @pagure_task def project_dowait(self, session, name, namespace, user): """ This is a task used to test the locking systems. @@ -755,68 +859,70 @@ def project_dowait(self, session, name, namespace, user): It should never be allowed to be called in production instances, since that would allow an attacker to basically DOS a project by calling this repeatedly. """ - assert pagure_config.get('ALLOW_PROJECT_DOWAIT', False) + assert pagure_config.get("ALLOW_PROJECT_DOWAIT", False) project = pagure.lib._get_project( - session, namespace=namespace, name=name, user=user) + session, namespace=namespace, name=name, user=user + ) - with project.lock('WORKER'): + with project.lock("WORKER"): time.sleep(10) return ret( - 'ui_ns.view_repo', repo=name, username=user, namespace=namespace) + "ui_ns.view_repo", repo=name, username=user, namespace=namespace + ) -@conn.task(queue=pagure_config.get('MEDIUM_CELERY_QUEUE', None), bind=True) +@conn.task(queue=pagure_config.get("MEDIUM_CELERY_QUEUE", None), bind=True) @pagure_task def sync_pull_ref(self, session, name, namespace, user, requestid): """ Synchronize a pull/ reference from the content in the forked repo, allowing local checkout of the pull-request. """ project = pagure.lib._get_project( - session, namespace=namespace, name=name, user=user) + session, namespace=namespace, name=name, user=user + ) - with project.lock('WORKER'): + with project.lock("WORKER"): request = pagure.lib.search_pull_requests( - session, project_id=project.id, requestid=requestid) + session, project_id=project.id, requestid=requestid + ) _log.debug( - 'Update pull refs of: %s#%s', - request.project.fullname, request.id) + "Update pull refs of: %s#%s", request.project.fullname, request.id + ) if request.remote: # Get the fork repopath = pagure.utils.get_remote_repo_path( - request.remote_git, request.branch_from) + request.remote_git, request.branch_from + ) else: # Get the fork repopath = pagure.utils.get_repo_path(request.project_from) - _log.debug(' working on the repo in: %s', repopath) + _log.debug(" working on the repo in: %s", repopath) repo_obj = pygit2.Repository(repopath) pagure.lib.git.update_pull_ref(request, repo_obj) -@conn.task(queue=pagure_config.get('MEDIUM_CELERY_QUEUE', None), bind=True) +@conn.task(queue=pagure_config.get("MEDIUM_CELERY_QUEUE", None), bind=True) @pagure_task def update_checksums_file(self, session, folder, filenames): """ Update the checksums file in the release folder of the project. """ - sha_file = os.path.join(folder, 'CHECKSUMS') + sha_file = os.path.join(folder, "CHECKSUMS") new_file = not os.path.exists(sha_file) if not new_file: with open(sha_file) as stream: row = stream.readline().strip() - if row != '# Generated and updated by pagure': + if row != "# Generated and updated by pagure": # This wasn't generated by pagure, don't touch it! return for filename in filenames: - algos = { - 'sha256': hashlib.sha256(), - 'sha512': hashlib.sha512(), - } + algos = {"sha256": hashlib.sha256(), "sha512": hashlib.sha512()} # for each files computes the different algorythm supported with open(os.path.join(folder, filename), "rb") as stream: while True: @@ -828,16 +934,18 @@ def update_checksums_file(self, session, folder, filenames): break # Write them out to the output file - with open(sha_file, 'a') as stream: + with open(sha_file, "a") as stream: if new_file: - stream.write('# Generated and updated by pagure\n') + stream.write("# Generated and updated by pagure\n") new_file = False for algo in sorted(algos): - stream.write('%s (%s) = %s\n' % ( - algo.upper(), filename, algos[algo].hexdigest())) + stream.write( + "%s (%s) = %s\n" + % (algo.upper(), filename, algos[algo].hexdigest()) + ) -@conn.task(queue=pagure_config.get('FAST_CELERY_QUEUE', None), bind=True) +@conn.task(queue=pagure_config.get("FAST_CELERY_QUEUE", None), bind=True) @pagure_task def commits_author_stats(self, session, repopath): """ Returns some statistics about commits made against the specified @@ -845,7 +953,7 @@ def commits_author_stats(self, session, repopath): """ if not os.path.exists(repopath): - raise ValueError('Git repository not found.') + raise ValueError("Git repository not found.") repo_obj = pygit2.Repository(repopath) @@ -853,7 +961,8 @@ def commits_author_stats(self, session, repopath): number_of_commits = 0 authors_email = set() for commit in repo_obj.walk( - repo_obj.head.get_object().oid.hex, pygit2.GIT_SORT_TIME): + repo_obj.head.get_object().oid.hex, pygit2.GIT_SORT_TIME + ): # For each commit record how many times each combination of name and # e-mail appears in the git history. number_of_commits += 1 @@ -884,19 +993,18 @@ def commits_author_stats(self, session, repopath): authors_email.add(authors[1]) out_stats[val].append(authors) out_list = [ - (key, out_stats[key]) - for key in sorted(out_stats, reverse=True) + (key, out_stats[key]) for key in sorted(out_stats, reverse=True) ] return ( number_of_commits, out_list, len(authors_email), - commit.commit_time + commit.commit_time, ) -@conn.task(queue=pagure_config.get('FAST_CELERY_QUEUE', None), bind=True) +@conn.task(queue=pagure_config.get("FAST_CELERY_QUEUE", None), bind=True) @pagure_task def commits_history_stats(self, session, repopath): """ Returns the evolution of the commits made against the specified @@ -904,15 +1012,17 @@ def commits_history_stats(self, session, repopath): """ if not os.path.exists(repopath): - raise ValueError('Git repository not found.') + raise ValueError("Git repository not found.") repo_obj = pygit2.Repository(repopath) dates = collections.defaultdict(int) for commit in repo_obj.walk( - repo_obj.head.get_object().oid.hex, pygit2.GIT_SORT_TIME): - delta = datetime.datetime.utcnow() \ - - arrow.get(commit.commit_time).naive + repo_obj.head.get_object().oid.hex, pygit2.GIT_SORT_TIME + ): + delta = ( + datetime.datetime.utcnow() - arrow.get(commit.commit_time).naive + ) if delta.days > 365: break dates[arrow.get(commit.commit_time).date().isoformat()] += 1 @@ -920,24 +1030,24 @@ def commits_history_stats(self, session, repopath): return [(key, dates[key]) for key in sorted(dates)] -@conn.task(queue=pagure_config.get('MEDIUM_CELERY_QUEUE', None), bind=True) +@conn.task(queue=pagure_config.get("MEDIUM_CELERY_QUEUE", None), bind=True) @pagure_task def link_pr_to_ticket(self, session, pr_uid): """ Link the specified pull-request against the ticket(s) mentioned in the commits of the pull-request """ - _log.info( - 'LINK_PR_TO_TICKET: Linking ticket(s) to PR for: %s' % pr_uid) + _log.info("LINK_PR_TO_TICKET: Linking ticket(s) to PR for: %s" % pr_uid) request = pagure.lib.get_request_by_uid(session, pr_uid) if not request: - _log.info('LINK_PR_TO_TICKET: Not PR found for: %s' % pr_uid) + _log.info("LINK_PR_TO_TICKET: Not PR found for: %s" % pr_uid) return if request.remote: repopath = pagure.utils.get_remote_repo_path( - request.remote_git, request.branch_from) + request.remote_git, request.branch_from + ) parentpath = pagure.utils.get_repo_path(request.project) else: repo_from = request.project_from @@ -948,40 +1058,48 @@ def link_pr_to_ticket(self, session, pr_uid): orig_repo = pygit2.Repository(parentpath) diff_commits = pagure.lib.git.diff_pull_request( - session, request, repo_obj, orig_repo, - requestfolder=pagure_config['REQUESTS_FOLDER'], with_diff=False) + session, + request, + repo_obj, + orig_repo, + requestfolder=pagure_config["REQUESTS_FOLDER"], + with_diff=False, + ) _log.info( - 'LINK_PR_TO_TICKET: Found %s commits in that PR' % len(diff_commits)) + "LINK_PR_TO_TICKET: Found %s commits in that PR" % len(diff_commits) + ) name = request.project.name namespace = request.project.namespace - user = request.project.user.user \ - if request.project.is_fork else None + user = request.project.user.user if request.project.is_fork else None for line in pagure.lib.git.read_git_lines( - ['log', '--no-walk'] - + [c.oid.hex for c in diff_commits] - + ['--'], repopath): + ["log", "--no-walk"] + [c.oid.hex for c in diff_commits] + ["--"], + repopath, + ): line = line.strip() for issue in pagure.lib.link.get_relation( - session, name, user, namespace, line, 'fixes', - include_prs=False): + session, name, user, namespace, line, "fixes", include_prs=False + ): _log.info( - 'LINK_PR_TO_TICKET: Link ticket %s to PRs %s' % ( - issue, request)) + "LINK_PR_TO_TICKET: Link ticket %s to PRs %s" + % (issue, request) + ) pagure.lib.link_pr_issue(session, issue, request) for issue in pagure.lib.link.get_relation( - session, name, user, namespace, line, 'relates'): + session, name, user, namespace, line, "relates" + ): _log.info( - 'LINK_PR_TO_TICKET: Link ticket %s to PRs %s' % ( - issue, request)) + "LINK_PR_TO_TICKET: Link ticket %s to PRs %s" + % (issue, request) + ) pagure.lib.link_pr_issue(session, issue, request) try: session.commit() except SQLAlchemyError: - _log.exception('Could not link ticket to PR :(') + _log.exception("Could not link ticket to PR :(") session.rollback() diff --git a/pagure/lib/tasks_mirror.py b/pagure/lib/tasks_mirror.py index f64d423..f9aa457 100644 --- a/pagure/lib/tasks_mirror.py +++ b/pagure/lib/tasks_mirror.py @@ -37,15 +37,15 @@ from pagure.utils import ssh_urlpattern _log = logging.getLogger(__name__) -if os.environ.get('PAGURE_BROKER_URL'): # pragma: no-cover - broker_url = os.environ['PAGURE_BROKER_URL'] -elif pagure_config.get('BROKER_URL'): - broker_url = pagure_config['BROKER_URL'] +if os.environ.get("PAGURE_BROKER_URL"): # pragma: no-cover + broker_url = os.environ["PAGURE_BROKER_URL"] +elif pagure_config.get("BROKER_URL"): + broker_url = pagure_config["BROKER_URL"] else: - broker_url = 'redis://%s' % pagure_config['REDIS_HOST'] + broker_url = "redis://%s" % pagure_config["REDIS_HOST"] -conn = Celery('tasks_mirror', broker=broker_url, backend=broker_url) -conn.conf.update(pagure_config['CELERY_CONFIG']) +conn = Celery("tasks_mirror", broker=broker_url, backend=broker_url) +conn.conf.update(pagure_config["CELERY_CONFIG"]) # Code from: @@ -71,9 +71,9 @@ def _serialize_public_ssh_key(key): if isinstance(key, rsa.RSAPublicKey): public_numbers = key.public_numbers() return b"ssh-rsa " + base64.b64encode( - _ssh_write_string(b"ssh-rsa") + - _ssh_write_mpint(public_numbers.e) + - _ssh_write_mpint(public_numbers.n) + _ssh_write_string(b"ssh-rsa") + + _ssh_write_mpint(public_numbers.e) + + _ssh_write_mpint(public_numbers.n) ) else: # Since we only write RSA keys, drop the other serializations @@ -81,102 +81,104 @@ def _serialize_public_ssh_key(key): def _create_ssh_key(keyfile): - ''' Create the public and private ssh keys. + """ Create the public and private ssh keys. The specified file name will be the private key and the public one will be in a similar file name ending with a '.pub'. - ''' + """ private_key = rsa.generate_private_key( - public_exponent=65537, - key_size=4096, - backend=default_backend() + public_exponent=65537, key_size=4096, backend=default_backend() ) private_pem = private_key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, - encryption_algorithm=serialization.NoEncryption() + encryption_algorithm=serialization.NoEncryption(), ) - with os.fdopen(os.open( - keyfile, os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o600), 'wb')\ - as stream: + with os.fdopen( + os.open(keyfile, os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o600), "wb" + ) as stream: stream.write(private_pem) public_key = private_key.public_key() public_pem = _serialize_public_ssh_key(public_key) if public_pem: - with open(keyfile + '.pub', 'wb') as stream: + with open(keyfile + ".pub", "wb") as stream: stream.write(public_pem) -@conn.task(queue=pagure_config['MIRRORING_QUEUE'], bind=True) +@conn.task(queue=pagure_config["MIRRORING_QUEUE"], bind=True) @pagure_task def setup_mirroring(self, session, username, namespace, name): - ''' Setup the specified project for mirroring. - ''' - plugin = pagure.lib.plugins.get_plugin('Mirroring') + """ Setup the specified project for mirroring. + """ + plugin = pagure.lib.plugins.get_plugin("Mirroring") plugin.db_object() project = pagure.lib._get_project( - session, namespace=namespace, name=name, user=username) + session, namespace=namespace, name=name, user=username + ) public_key_name = werkzeug.secure_filename(project.fullname) - ssh_folder = pagure_config['MIRROR_SSHKEYS_FOLDER'] + ssh_folder = pagure_config["MIRROR_SSHKEYS_FOLDER"] if not os.path.exists(ssh_folder): os.makedirs(ssh_folder, mode=0o700) else: if os.path.islink(ssh_folder): - raise pagure.exceptions.PagureException( - 'SSH folder is a link') + raise pagure.exceptions.PagureException("SSH folder is a link") folder_stat = os.stat(ssh_folder) filemode = stat.S_IMODE(folder_stat.st_mode) - if filemode != int('0700', 8): + if filemode != int("0700", 8): raise pagure.exceptions.PagureException( - 'SSH folder had invalid permissions') - if folder_stat.st_uid != os.getuid() \ - or folder_stat.st_gid != os.getgid(): + "SSH folder had invalid permissions" + ) + if ( + folder_stat.st_uid != os.getuid() + or folder_stat.st_gid != os.getgid() + ): raise pagure.exceptions.PagureException( - 'SSH folder does not belong to the user or group running ' - 'this task') + "SSH folder does not belong to the user or group running " + "this task" + ) - public_key_file = os.path.join(ssh_folder, '%s.pub' % public_key_name) - _log.info('Public key of interest: %s', public_key_file) + public_key_file = os.path.join(ssh_folder, "%s.pub" % public_key_name) + _log.info("Public key of interest: %s", public_key_file) if os.path.exists(public_key_file): - raise pagure.exceptions.PagureException('SSH key already exists') + raise pagure.exceptions.PagureException("SSH key already exists") - _log.info('Creating public key') + _log.info("Creating public key") _create_ssh_key(os.path.join(ssh_folder, public_key_name)) with open(public_key_file) as stream: public_key = stream.read() if project.mirror_hook.public_key != public_key: - _log.info('Updating information in the DB') + _log.info("Updating information in the DB") project.mirror_hook.public_key = public_key session.add(project.mirror_hook) session.commit() -@conn.task(queue=pagure_config['MIRRORING_QUEUE'], bind=True) +@conn.task(queue=pagure_config["MIRRORING_QUEUE"], bind=True) @pagure_task def teardown_mirroring(self, session, username, namespace, name): - ''' Stop the mirroring of the specified project. - ''' - plugin = pagure.lib.plugins.get_plugin('Mirroring') + """ Stop the mirroring of the specified project. + """ + plugin = pagure.lib.plugins.get_plugin("Mirroring") plugin.db_object() project = pagure.lib._get_project( - session, namespace=namespace, name=name, user=username) + session, namespace=namespace, name=name, user=username + ) - ssh_folder = pagure_config['MIRROR_SSHKEYS_FOLDER'] + ssh_folder = pagure_config["MIRROR_SSHKEYS_FOLDER"] public_key_name = werkzeug.secure_filename(project.fullname) private_key_file = os.path.join(ssh_folder, public_key_name) - public_key_file = os.path.join( - ssh_folder, '%s.pub' % public_key_name) + public_key_file = os.path.join(ssh_folder, "%s.pub" % public_key_name) if os.path.exists(private_key_file): os.unlink(private_key_file) @@ -189,68 +191,78 @@ def teardown_mirroring(self, session, username, namespace, name): session.commit() -@conn.task(queue=pagure_config['MIRRORING_QUEUE'], bind=True) +@conn.task(queue=pagure_config["MIRRORING_QUEUE"], bind=True) @pagure_task def mirror_project(self, session, username, namespace, name): - ''' Does the actual mirroring of the specified project. - ''' - plugin = pagure.lib.plugins.get_plugin('Mirroring') + """ Does the actual mirroring of the specified project. + """ + plugin = pagure.lib.plugins.get_plugin("Mirroring") plugin.db_object() project = pagure.lib._get_project( - session, namespace=namespace, name=name, user=username) + session, namespace=namespace, name=name, user=username + ) - repofolder = pagure_config['GIT_FOLDER'] + repofolder = pagure_config["GIT_FOLDER"] repopath = os.path.join(repofolder, project.path) if not os.path.exists(repopath): - _log.info('Git folder not found at: %s, bailing', repopath) + _log.info("Git folder not found at: %s, bailing", repopath) return - newpath = tempfile.mkdtemp(prefix='pagure-mirror-') + newpath = tempfile.mkdtemp(prefix="pagure-mirror-") pygit2.clone_repository(repopath, newpath) - ssh_folder = pagure_config['MIRROR_SSHKEYS_FOLDER'] + ssh_folder = pagure_config["MIRROR_SSHKEYS_FOLDER"] public_key_name = werkzeug.secure_filename(project.fullname) private_key_file = os.path.join(ssh_folder, public_key_name) # Get the list of remotes remotes = [ remote.strip() - for remote in project.mirror_hook.target.split('\n') - if project.mirror_hook and remote.strip() + for remote in project.mirror_hook.target.split("\n") + if project.mirror_hook + and remote.strip() and ssh_urlpattern.match(remote.strip()) ] # Add the remotes for idx, remote in enumerate(remotes): - remote_name = '%s_%s' % (public_key_name, idx) - _log.info('Adding remote %s as %s', remote, remote_name) + remote_name = "%s_%s" % (public_key_name, idx) + _log.info("Adding remote %s as %s", remote, remote_name) (stdout, stderr) = pagure.lib.git.read_git_lines( - ['remote', 'add', remote_name, remote, '--mirror=push'], - abspath=newpath, error=True) + ["remote", "add", remote_name, remote, "--mirror=push"], + abspath=newpath, + error=True, + ) _log.info( "Output from git remote add:\n stdout: %s\n stderr: %s", - stdout, stderr) + stdout, + stderr, + ) # Push logs = [] for idx, remote in enumerate(remotes): - remote_name = '%s_%s' % (public_key_name, idx) + remote_name = "%s_%s" % (public_key_name, idx) _log.info( - 'Pushing to remote %s using key: %s', remote_name, - private_key_file) + "Pushing to remote %s using key: %s", remote_name, private_key_file + ) (stdout, stderr) = pagure.lib.git.read_git_lines( - ['push', remote_name], - abspath=newpath, error=True, - env={'GIT_SSH_COMMAND': 'ssh -i %s' % private_key_file}) + ["push", remote_name], + abspath=newpath, + error=True, + env={"GIT_SSH_COMMAND": "ssh -i %s" % private_key_file}, + ) log = "Output from the push:\n stdout: %s\n stderr: %s" % ( - stdout, stderr) + stdout, + stderr, + ) logs.append(log) if logs: - project.mirror_hook.last_log = '\n'.join(logs) + project.mirror_hook.last_log = "\n".join(logs) session.add(project.mirror_hook) session.commit() - _log.info('\n'.join(logs)) + _log.info("\n".join(logs)) # Remove the clone shutil.rmtree(newpath) diff --git a/pagure/lib/tasks_services.py b/pagure/lib/tasks_services.py index b76d073..86ce9da 100644 --- a/pagure/lib/tasks_services.py +++ b/pagure/lib/tasks_services.py @@ -40,15 +40,15 @@ _log = get_task_logger(__name__) _i = 0 -if os.environ.get('PAGURE_BROKER_URL'): # pragma: no cover - broker_url = os.environ['PAGURE_BROKER_URL'] -elif pagure_config.get('BROKER_URL'): - broker_url = pagure_config['BROKER_URL'] +if os.environ.get("PAGURE_BROKER_URL"): # pragma: no cover + broker_url = os.environ["PAGURE_BROKER_URL"] +elif pagure_config.get("BROKER_URL"): + broker_url = pagure_config["BROKER_URL"] else: - broker_url = 'redis://%s' % pagure_config['REDIS_HOST'] + broker_url = "redis://%s" % pagure_config["REDIS_HOST"] -conn = Celery('tasks', broker=broker_url, backend=broker_url) -conn.conf.update(pagure_config['CELERY_CONFIG']) +conn = Celery("tasks", broker=broker_url, backend=broker_url) +conn.conf.update(pagure_config["CELERY_CONFIG"]) @after_setup_task_logger.connect @@ -57,21 +57,20 @@ def augment_celery_log(**kwargs): def call_web_hooks(project, topic, msg, urls): - ''' Sends the web-hook notification. ''' - _log.info( - "Processing project: %s - topic: %s", project.fullname, topic) - _log.debug('msg: %s', msg) + """ Sends the web-hook notification. """ + _log.info("Processing project: %s - topic: %s", project.fullname, topic) + _log.debug("msg: %s", msg) # Send web-hooks notification global _i _i += 1 year = datetime.datetime.utcnow().year if isinstance(topic, six.text_type): - topic = to_bytes(topic, encoding='utf8', nonstring="passthru") - msg['pagure_instance'] = pagure_config['APP_URL'] - msg['project_fullname'] = project.fullname + topic = to_bytes(topic, encoding="utf8", nonstring="passthru") + msg["pagure_instance"] = pagure_config["APP_URL"] + msg["project_fullname"] = project.fullname msg = dict( - topic=topic.decode('utf-8'), + topic=topic.decode("utf-8"), msg=msg, timestamp=int(time.time()), msg_id="%s-%s" % (year, uuid.uuid4()), @@ -80,45 +79,46 @@ def call_web_hooks(project, topic, msg, urls): content = json.dumps(msg, sort_keys=True) hashhex = hmac.new( - project.hook_token.encode('utf-8'), - content.encode('utf-8'), - hashlib.sha1).hexdigest() + project.hook_token.encode("utf-8"), + content.encode("utf-8"), + hashlib.sha1, + ).hexdigest() hashhex256 = hmac.new( - project.hook_token.encode('utf-8'), - content.encode('utf-8'), - hashlib.sha256).hexdigest() + project.hook_token.encode("utf-8"), + content.encode("utf-8"), + hashlib.sha256, + ).hexdigest() headers = { - 'X-Pagure': pagure_config['APP_URL'], - 'X-Pagure-project': project.fullname, - 'X-Pagure-Signature': hashhex, - 'X-Pagure-Signature-256': hashhex256, - 'X-Pagure-Topic': topic, - 'Content-Type': 'application/json', + "X-Pagure": pagure_config["APP_URL"], + "X-Pagure-project": project.fullname, + "X-Pagure-Signature": hashhex, + "X-Pagure-Signature-256": hashhex256, + "X-Pagure-Topic": topic, + "Content-Type": "application/json", } for url in sorted(urls): url = url.strip() - _log.info('Calling url %s' % url) + _log.info("Calling url %s" % url) try: req = requests.post( - url, - headers=headers, - data={'payload': content}, - timeout=60, + url, headers=headers, data={"payload": content}, timeout=60 ) if not req: _log.info( - 'An error occured while querying: %s - ' - 'Error code: %s' % (url, req.status_code)) + "An error occured while querying: %s - " + "Error code: %s" % (url, req.status_code) + ) except (requests.exceptions.RequestException, Exception) as err: _log.info( - 'An error occured while querying: %s - Error: %s' % ( - url, err)) + "An error occured while querying: %s - Error: %s" % (url, err) + ) -@conn.task(queue=pagure_config.get('WEBHOOK_CELERY_QUEUE', None), bind=True) +@conn.task(queue=pagure_config.get("WEBHOOK_CELERY_QUEUE", None), bind=True) @pagure_task def webhook_notification( - self, session, topic, msg, namespace=None, name=None, user=None): + self, session, topic, msg, namespace=None, name=None, user=None +): """ Send webhook notifications about an event on that project. :arg session: SQLAlchemy session object @@ -136,29 +136,39 @@ def webhook_notification( """ project = pagure.lib._get_project( - session, namespace=namespace, name=name, user=user) + session, namespace=namespace, name=name, user=user + ) if not project: session.close() raise RuntimeError( - 'Project: %s/%s from user: %s not found in the DB' % ( - namespace, name, user)) + "Project: %s/%s from user: %s not found in the DB" + % (namespace, name, user) + ) - urls = project.settings.get('Web-hooks') + urls = project.settings.get("Web-hooks") if not urls: - _log.info('No URLs set: %s' % urls) + _log.info("No URLs set: %s" % urls) return - urls = urls.split('\n') - _log.info('Got the project and urls, going to the webhooks') + urls = urls.split("\n") + _log.info("Got the project and urls, going to the webhooks") call_web_hooks(project, topic, msg, urls) -@conn.task(queue=pagure_config.get('LOGCOM_CELERY_QUEUE', None), bind=True) +@conn.task(queue=pagure_config.get("LOGCOM_CELERY_QUEUE", None), bind=True) @pagure_task def log_commit_send_notifications( - self, session, name, commits, abspath, branch, default_branch, - namespace=None, username=None): + self, + session, + name, + commits, + abspath, + branch, + default_branch, + namespace=None, + username=None, +): """ Send webhook notifications about an event on that project. :arg session: SQLAlchemy session object @@ -176,30 +186,30 @@ def log_commit_send_notifications( """ _log.info( - 'Looking for project: %s%s of %s', - '%s/' % namespace if namespace else '', + "Looking for project: %s%s of %s", + "%s/" % namespace if namespace else "", name, - username) + username, + ) project = pagure.lib._get_project( - session, name, user=username, namespace=namespace) + session, name, user=username, namespace=namespace + ) if not project: - _log.info('No project found') + _log.info("No project found") return - _log.info('Found project: %s', project.fullname) + _log.info("Found project: %s", project.fullname) - _log.info('Processing %s commits in %s', len(commits), abspath) + _log.info("Processing %s commits in %s", len(commits), abspath) # Only log commits when the branch is the default branch if branch == default_branch: - pagure.lib.git.log_commits_to_db( - session, project, commits, abspath) + pagure.lib.git.log_commits_to_db(session, project, commits, abspath) # Notify subscribed users that there are new commits - if pagure_config.get('EMAIL_ON_WATCHCOMMITS', True): - pagure.lib.notify.notify_new_commits( - abspath, project, branch, commits) + if pagure_config.get("EMAIL_ON_WATCHCOMMITS", True): + pagure.lib.notify.notify_new_commits(abspath, project, branch, commits) try: session.commit() @@ -210,23 +220,32 @@ def log_commit_send_notifications( def get_files_to_load(title, new_commits_list, abspath): - _log.info('%s: Retrieve the list of files changed' % title) + _log.info("%s: Retrieve the list of files changed" % title) file_list = [] new_commits_list.reverse() n = len(new_commits_list) for idx, commit in enumerate(new_commits_list): if (idx % 100) == 0: _log.info( - 'Loading files change in commits for %s: %s/%s', - title, idx, n) + "Loading files change in commits for %s: %s/%s", title, idx, n + ) if commit == new_commits_list[0]: filenames = pagure.lib.git.read_git_lines( - ['diff-tree', '--no-commit-id', '--name-only', '-r', '--root', - commit], abspath) + [ + "diff-tree", + "--no-commit-id", + "--name-only", + "-r", + "--root", + commit, + ], + abspath, + ) else: filenames = pagure.lib.git.read_git_lines( - ['diff-tree', '--no-commit-id', '--name-only', '-r', commit], - abspath) + ["diff-tree", "--no-commit-id", "--name-only", "-r", commit], + abspath, + ) for line in filenames: if line.strip(): file_list.append(line.strip()) @@ -234,60 +253,80 @@ def get_files_to_load(title, new_commits_list, abspath): return file_list -@conn.task(queue=pagure_config.get('LOADJSON_CELERY_QUEUE', None), bind=True) +@conn.task(queue=pagure_config.get("LOADJSON_CELERY_QUEUE", None), bind=True) @pagure_task def load_json_commits_to_db( - self, session, name, commits, abspath, data_type, agent, - namespace=None, username=None): - ''' Loads into the database the specified commits that have been pushed + self, + session, + name, + commits, + abspath, + data_type, + agent, + namespace=None, + username=None, +): + """ Loads into the database the specified commits that have been pushed to either the tickets or the pull-request repository. - ''' + """ - if data_type not in ['ticket', 'pull-request']: - _log.info('LOADJSON: Invalid data_type retrieved: %s', data_type) + if data_type not in ["ticket", "pull-request"]: + _log.info("LOADJSON: Invalid data_type retrieved: %s", data_type) return _log.info( - 'LOADJSON: Looking for project: %s%s of user: %s', - '%s/' % namespace if namespace else '', - name, username) + "LOADJSON: Looking for project: %s%s of user: %s", + "%s/" % namespace if namespace else "", + name, + username, + ) project = pagure.lib._get_project( - session, name, user=username, namespace=namespace) + session, name, user=username, namespace=namespace + ) if not project: - _log.info('LOADJSON: No project found') + _log.info("LOADJSON: No project found") return - _log.info('LOADJSON: Found project: %s', project.fullname) + _log.info("LOADJSON: Found project: %s", project.fullname) _log.info( - 'LOADJSON: %s: Processing %s commits in %s', project.fullname, - len(commits), abspath) + "LOADJSON: %s: Processing %s commits in %s", + project.fullname, + len(commits), + abspath, + ) file_list = set(get_files_to_load(project.fullname, commits, abspath)) n = len(file_list) - _log.info('LOADJSON: %s files to process' % n) + _log.info("LOADJSON: %s files to process" % n) mail_body = [] for idx, filename in enumerate(sorted(file_list)): _log.info( - 'LOADJSON: Loading: %s: %s -- %s/%s', - project.fullname, filename, idx + 1, n) - tmp = 'Loading: %s -- %s/%s' % (filename, idx + 1, n) + "LOADJSON: Loading: %s: %s -- %s/%s", + project.fullname, + filename, + idx + 1, + n, + ) + tmp = "Loading: %s -- %s/%s" % (filename, idx + 1, n) json_data = None - data = ''.join( + data = "".join( pagure.lib.git.read_git_lines( - ['show', 'HEAD:%s' % filename], abspath)) - if data and not filename.startswith('files/'): + ["show", "HEAD:%s" % filename], abspath + ) + ) + if data and not filename.startswith("files/"): try: json_data = json.loads(data) except ValueError: pass if json_data: try: - if data_type == 'ticket': + if data_type == "ticket": pagure.lib.git.update_ticket_from_git( session, reponame=name, @@ -297,7 +336,7 @@ def load_json_commits_to_db( json_data=json_data, agent=agent, ) - elif data_type == 'pull-request': + elif data_type == "pull-request": pagure.lib.git.update_request_from_git( session, reponame=name, @@ -306,82 +345,95 @@ def load_json_commits_to_db( request_uid=filename, json_data=json_data, ) - tmp += ' ... ... Done' + tmp += " ... ... Done" except Exception as err: - _log.info('data: %s', json_data) + _log.info("data: %s", json_data) session.rollback() _log.exception(err) - tmp += ' ... ... FAILED\n' + tmp += " ... ... FAILED\n" tmp += format_callstack() break finally: mail_body.append(tmp) else: - tmp += ' ... ... SKIPPED - No JSON data' + tmp += " ... ... SKIPPED - No JSON data" mail_body.append(tmp) try: session.commit() _log.info( - 'LOADJSON: Emailing results for %s to %s', - project.fullname, agent) + "LOADJSON: Emailing results for %s to %s", project.fullname, agent + ) try: if not agent: raise pagure.exceptions.PagureException( - 'No agent found: %s' % agent) + "No agent found: %s" % agent + ) user_obj = pagure.lib.get_user(session, agent) pagure.lib.notify.send_email( - '\n'.join(mail_body), - 'Issue import report', - user_obj.default_email) + "\n".join(mail_body), + "Issue import report", + user_obj.default_email, + ) except pagure.exceptions.PagureException as err: - _log.exception('LOADJSON: Could not find user %s' % agent) + _log.exception("LOADJSON: Could not find user %s" % agent) except SQLAlchemyError as err: # pragma: no cover session.rollback() - _log.info('LOADJSON: Ready for another') + _log.info("LOADJSON: Ready for another") -@conn.task(queue=pagure_config.get('CI_CELERY_QUEUE', None), bind=True) +@conn.task(queue=pagure_config.get("CI_CELERY_QUEUE", None), bind=True) @pagure_task def trigger_ci_build(self, session, project_name, cause, branch, ci_type): - ''' Triggers a new run of the CI system on the specified pull-request. + """ Triggers a new run of the CI system on the specified pull-request. - ''' - pagure.lib.plugins.get_plugin('Pagure CI') + """ + pagure.lib.plugins.get_plugin("Pagure CI") user, namespace, project_name = split_project_fullname(project_name) - _log.info('Pagure-CI: Looking for project: %s', project_name) - project = pagure.lib.get_authorized_project(session=session, - project_name=project_name, - user=user, - namespace=namespace) + _log.info("Pagure-CI: Looking for project: %s", project_name) + project = pagure.lib.get_authorized_project( + session=session, + project_name=project_name, + user=user, + namespace=namespace, + ) if project is None: - _log.warning('Pagure-CI: No project could be found for the name %s', - project_name) + _log.warning( + "Pagure-CI: No project could be found for the name %s", + project_name, + ) session.close() return if project.is_fork: - if project.parent.ci_hook is None \ - or project.parent.ci_hook.ci_url is None: + if ( + project.parent.ci_hook is None + or project.parent.ci_hook.ci_url is None + ): raise pagure.exceptions.PagureException( - 'Project %s not configured or incorectly configured for ci', - project.parent.fullname) + "Project %s not configured or incorectly configured for ci", + project.parent.fullname, + ) elif project.ci_hook is None or project.ci_hook.ci_url is None: raise pagure.exceptions.PagureException( - 'Project %s not configured or incorectly configured for ci', - project.fullname) + "Project %s not configured or incorectly configured for ci", + project.fullname, + ) - _log.info('Pagure-CI: project retrieved: %s', project.fullname) + _log.info("Pagure-CI: project retrieved: %s", project.fullname) _log.info( "Pagure-CI: Trigger from %s cause (PR# or commit) %s branch: %s", - project.fullname, cause, branch) + project.fullname, + cause, + branch, + ) - if ci_type == 'jenkins': + if ci_type == "jenkins": if project.is_fork: url = project.parent.ci_hook.ci_url @@ -392,14 +444,16 @@ def trigger_ci_build(self, session, project_name, cause, branch, ci_type): job = project.ci_hook.ci_job token = project.ci_hook.pagure_ci_token - trigger_jenkins_build(project_path=project.path, - url=url, - job=job, - token=token, - branch=branch, - cause=cause) + trigger_jenkins_build( + project_path=project.path, + url=url, + job=job, + token=token, + branch=branch, + cause=cause, + ) else: - _log.warning('Pagure-CI:Un-supported CI type') + _log.warning("Pagure-CI:Un-supported CI type") - _log.info('Pagure-CI: Ready for another') + _log.info("Pagure-CI: Ready for another") diff --git a/pagure/login_forms.py b/pagure/login_forms.py index 8433ca3..50f5d5c 100644 --- a/pagure/login_forms.py +++ b/pagure/login_forms.py @@ -23,6 +23,7 @@ from __future__ import unicode_literals import wtforms + try: from flask_wtf import FlaskForm as FlaskForm except ImportError: @@ -30,80 +31,83 @@ except ImportError: def same_password(form, field): - ''' Check if the data in the field is the same as in the password field. - ''' + """ Check if the data in the field is the same as in the password field. + """ if field.data != form.password.data: raise wtforms.validators.ValidationError( - 'Both password fields should be equal') + "Both password fields should be equal" + ) class LostPasswordForm(FlaskForm): """ Form to ask for a password change. """ + username = wtforms.TextField( 'username *', - [wtforms.validators.Required()] + [wtforms.validators.Required()], ) class ResetPasswordForm(FlaskForm): """ Form to reset one's password in the local database. """ + password = wtforms.PasswordField( 'Password *', - [wtforms.validators.Required()] + [wtforms.validators.Required()], ) confirm_password = wtforms.PasswordField( 'Confirm password *', - [wtforms.validators.Required(), same_password] + [wtforms.validators.Required(), same_password], ) class LoginForm(FlaskForm): """ Form to login via the local database. """ + username = wtforms.TextField( 'username *', - [wtforms.validators.Required()] + [wtforms.validators.Required()], ) password = wtforms.PasswordField( 'Password *', - [wtforms.validators.Required()] + [wtforms.validators.Required()], ) class NewUserForm(FlaskForm): """ Form to add a new user to the local database. """ + user = wtforms.TextField( 'username *', - [wtforms.validators.Required()] - ) - fullname = wtforms.TextField( - 'Full name', - [wtforms.validators.Optional()] + [wtforms.validators.Required()], ) + fullname = wtforms.TextField("Full name", [wtforms.validators.Optional()]) email_address = wtforms.TextField( 'Email address *', - [wtforms.validators.Required(), wtforms.validators.Email()] + [wtforms.validators.Required(), wtforms.validators.Email()], ) password = wtforms.PasswordField( 'Password *', - [wtforms.validators.Required()] + [wtforms.validators.Required()], ) confirm_password = wtforms.PasswordField( 'Confirm password *', - [wtforms.validators.Required(), same_password] + [wtforms.validators.Required(), same_password], ) class ChangePasswordForm(FlaskForm): """ Form to reset one's password in the local database. """ + old_password = wtforms.PasswordField( 'Old Password *', - [wtforms.validators.Required()] + [wtforms.validators.Required()], ) password = wtforms.PasswordField( 'Password *', - [wtforms.validators.Required()] + [wtforms.validators.Required()], ) confirm_password = wtforms.PasswordField( 'Confirm password *', - [wtforms.validators.Required(), same_password] + [wtforms.validators.Required(), same_password], ) diff --git a/pagure/mail_logging.py b/pagure/mail_logging.py index a1bdd15..b85eb3c 100644 --- a/pagure/mail_logging.py +++ b/pagure/mail_logging.py @@ -19,9 +19,9 @@ # of Red Hat, Inc. # -''' +""" Mail handler for logging. -''' +""" from __future__ import unicode_literals @@ -48,9 +48,9 @@ def format_callstack(): """ Format the callstack to find out the stack trace. """ ind = 0 for ind, frame in enumerate(f[0] for f in inspect.stack()): - if '__name__' not in frame.f_globals: + if "__name__" not in frame.f_globals: continue - modname = frame.f_globals['__name__'].split('.')[0] + modname = frame.f_globals["__name__"].split(".")[0] if modname != "logging": break @@ -94,7 +94,7 @@ class ContextInjector(logging.Filter): # pragma: no cover record.host = current_hostname record.proc = current_process - record.pid = '-' + record.pid = "-" if not isinstance(current_process, str): record.pid = current_process.pid # Be compatible with python-psutil 1.0 and 2.0, 3.0 @@ -111,14 +111,14 @@ class ContextInjector(logging.Filter): # pragma: no cover record.callstack = format_callstack() try: - record.url = getattr(flask.request, 'url', '-') - record.args = getattr(flask.request, 'args', '-') - record.form = '-' - record.username = '-' + record.url = getattr(flask.request, "url", "-") + record.args = getattr(flask.request, "args", "-") + record.form = "-" + record.username = "-" try: record.form = dict(flask.request.form) - if 'csrf_token' in record.form: - record.form['csrf_token'] = 'Was present, is cleaned up' + if "csrf_token" in record.form: + record.form["csrf_token"] = "Was present, is cleaned up" except RuntimeError: pass try: @@ -127,10 +127,10 @@ class ContextInjector(logging.Filter): # pragma: no cover pass except RuntimeError: # This means we are sending an error email from the worker - record.url = '* Worker *' - record.args = '' - record.form = '-' - record.username = '-' + record.url = "* Worker *" + record.args = "" + record.form = "-" + record.username = "-" return True @@ -179,10 +179,8 @@ def get_mail_handler(smtp_server, mail_admin, from_email): """ mail_handler = logging.handlers.SMTPHandler( - smtp_server, - from_email, - mail_admin, - 'Pagure error') + smtp_server, from_email, mail_admin, "Pagure error" + ) mail_handler.setFormatter(logging.Formatter(MSG_FORMAT)) mail_handler.setLevel(logging.ERROR) mail_handler.addFilter(ContextInjector()) diff --git a/pagure/perfrepo.py b/pagure/perfrepo.py index 4db9cb0..2e4d39f 100644 --- a/pagure/perfrepo.py +++ b/pagure/perfrepo.py @@ -24,8 +24,7 @@ import _pygit2 real_pygit2_repository = pygit2.Repository -TOTALS = {'walks': 0, - 'steps': 0} +TOTALS = {"walks": 0, "steps": 0} REQUESTS = [] STATS = {} @@ -33,17 +32,19 @@ STATS = {} class PerfRepoMeta(type): # pragma: no cover def __new__(cls, name, parents, dct): # create a class_id if it's not specified - if 'class_id' not in dct: - dct['class_id'] = name.lower() + if "class_id" not in dct: + dct["class_id"] = name.lower() # we need to call type.__new__ to complete the initialization return super(PerfRepoMeta, cls).__new__(cls, name, parents, dct) def __getattr__(cls, attr): real = getattr(real_pygit2_repository, attr) - if type(real).__name__ in ['function', 'builtin_function_or_method']: + if type(real).__name__ in ["function", "builtin_function_or_method"]: + def fake(*args, **kwargs): return real(*args, **kwargs) + return fake else: return real @@ -52,27 +53,28 @@ class PerfRepoMeta(type): # pragma: no cover class FakeWalker(six.Iterator): # pragma: no cover def __init__(self, parent): self.parent = parent - self.wid = STATS['counters']['walks'] - STATS['counters']['walks'] += 1 + self.wid = STATS["counters"]["walks"] + STATS["counters"]["walks"] += 1 - STATS['walks'][self.wid] = { - 'steps': 0, - 'type': 'walker', - 'init': traceback.extract_stack(limit=3)[0], - 'iter': None} - TOTALS['walks'] += 1 + STATS["walks"][self.wid] = { + "steps": 0, + "type": "walker", + "init": traceback.extract_stack(limit=3)[0], + "iter": None, + } + TOTALS["walks"] += 1 def __getattr__(self, attr): return getattr(self.parent, attr) def __iter__(self): - STATS['walks'][self.wid]['iter'] = traceback.extract_stack(limit=2)[0] + STATS["walks"][self.wid]["iter"] = traceback.extract_stack(limit=2)[0] return self def __next__(self): - STATS['walks'][self.wid]['steps'] += 1 - TOTALS['steps'] += 1 + STATS["walks"][self.wid]["steps"] += 1 + TOTALS["steps"] += 1 resp = next(iter(self.parent)) return resp @@ -82,9 +84,9 @@ class FakeDiffHunk(object): # pragma: no cover self.parent = parent def __getattr__(self, attr): - print('Getting Fake Hunk %s' % attr) + print("Getting Fake Hunk %s" % attr) resp = getattr(self.parent, attr) - print('Response: %s' % resp) + print("Response: %s" % resp) return resp @@ -93,7 +95,7 @@ class FakeDiffPatch(object): # pragma: no cover self.parent = parent def __getattr__(self, attr): - if attr == 'hunks': + if attr == "hunks": return [FakeDiffHunk(h) for h in self.parent.hunks] return getattr(self.parent, attr) @@ -102,13 +104,14 @@ class FakeDiffer(six.Iterator): # pragma: no cover def __init__(self, parent): self.parent = parent self.iter = None - self.did = STATS['counters']['diffs'] - STATS['counters']['diffs'] += 1 + self.did = STATS["counters"]["diffs"] + STATS["counters"]["diffs"] += 1 - STATS['diffs'][self.did] = { - 'init': traceback.extract_stack(limit=3)[0], - 'steps': 0, - 'iter': None} + STATS["diffs"][self.did] = { + "init": traceback.extract_stack(limit=3)[0], + "steps": 0, + "iter": None, + } def __getattr__(self, attr): return getattr(self.parent, attr) @@ -117,18 +120,18 @@ class FakeDiffer(six.Iterator): # pragma: no cover return dir(self.parent) def __iter__(self): - STATS['diffs'][self.did]['iter'] = traceback.extract_stack(limit=2)[0] + STATS["diffs"][self.did]["iter"] = traceback.extract_stack(limit=2)[0] self.iter = iter(self.parent) return self def __next__(self): - STATS['diffs'][self.did]['steps'] += 1 + STATS["diffs"][self.did]["steps"] += 1 resp = next(self.iter) if isinstance(resp, _pygit2.Patch): resp = FakeDiffPatch(resp) else: - raise Exception('Unexpected %s returned from differ' % resp) + raise Exception("Unexpected %s returned from differ" % resp) return resp def __len__(self): @@ -136,24 +139,28 @@ class FakeDiffer(six.Iterator): # pragma: no cover class PerfRepo( - six.with_metaclass(PerfRepoMeta, six.Iterator)): # pragma: no cover + six.with_metaclass(PerfRepoMeta, six.Iterator) +): # pragma: no cover """ An utility class allowing to go around pygit2's inability to be stable. """ def __init__(self, path): - STATS['repo_inits'].append((path, traceback.extract_stack(limit=2)[0])) - STATS['counters']['inits'] += 1 + STATS["repo_inits"].append((path, traceback.extract_stack(limit=2)[0])) + STATS["counters"]["inits"] += 1 self.repo = real_pygit2_repository(path) self.iter = None def __getattr__(self, attr): real = getattr(self.repo, attr) - if type(real) in [types.FunctionType, - types.BuiltinFunctionType, - types.BuiltinMethodType]: + if type(real) in [ + types.FunctionType, + types.BuiltinFunctionType, + types.BuiltinMethodType, + ]: + def fake(*args, **kwargs): resp = real(*args, **kwargs) if isinstance(resp, _pygit2.Walker): @@ -161,12 +168,14 @@ class PerfRepo( elif isinstance(resp, _pygit2.Diff): resp = FakeDiffer(resp) return resp + return fake elif isinstance(real, dict): real_getitem = real.__getitem__ def fake_getitem(self, item): return real_getitem(item) + real.__getitem__ = fake_getitem return real else: @@ -179,20 +188,21 @@ class PerfRepo( return self.repo.__contains__(item) def __iter__(self): - self.wid = STATS['counters']['walks'] - STATS['counters']['walks'] += 1 - STATS['walks'][self.wid] = { - 'steps': 0, - 'type': 'iter', - 'iter': traceback.extract_stack(limit=3)[0]} - TOTALS['walks'] += 1 + self.wid = STATS["counters"]["walks"] + STATS["counters"]["walks"] += 1 + STATS["walks"][self.wid] = { + "steps": 0, + "type": "iter", + "iter": traceback.extract_stack(limit=3)[0], + } + TOTALS["walks"] += 1 self.iter = iter(self.repo) return self def __next__(self): - STATS['walks'][self.wid]['steps'] += 1 - TOTALS['steps'] += 1 + STATS["walks"][self.wid]["steps"] += 1 + TOTALS["steps"] += 1 return next(self.iter) @@ -204,12 +214,12 @@ if six.PY2: def reset_stats(): # pragma: no cover """Resets STATS to be clear for the next request.""" global STATS - STATS = {'walks': {}, - 'diffs': {}, - 'repo_inits': [], - 'counters': {'walks': 0, - 'diffs': 0, - 'inits': 0}} + STATS = { + "walks": {}, + "diffs": {}, + "repo_inits": [], + "counters": {"walks": 0, "diffs": 0, "inits": 0}, + } # Make sure we start blank @@ -219,10 +229,10 @@ reset_stats() def print_stats(response): # pragma: no cover """Finalizes stats for the current request, and prints them possibly.""" REQUESTS.append(STATS) - if not os.environ.get('PAGURE_PERFREPO_VERBOSE'): + if not os.environ.get("PAGURE_PERFREPO_VERBOSE"): return response - print('Statistics:') + print("Statistics:") pprint.pprint(STATS) return response diff --git a/pagure/pfmarkdown.py b/pagure/pfmarkdown.py index 3619841..11b259c 100644 --- a/pagure/pfmarkdown.py +++ b/pagure/pfmarkdown.py @@ -41,7 +41,7 @@ from pagure.config import config as pagure_config # MENTION_RE regex). Note that it is a zero-length match - it does # not capture or consume any of the string - and it does not appear # as a group for the match object. -MENTION_RE = r'(?` -EXPLICIT_LINK_RE = \ - r'(?[0-9]+)' -COMMIT_LINK_RE = \ - r'(?[\w]{40})' +EXPLICIT_LINK_RE = ( + r"(?[0-9]+)" +) +COMMIT_LINK_RE = ( + r"(?[\w]{40})" +) # PREPROCIMPLLINK is used by ImplicitIssuePreprocessor to replace the # '#' when a line starts with an implicit issue link, to prevent # markdown parsing it as a header; we have to handle it here -IMPLICIT_ISSUE_RE = r'(?`). """ + def handleMatch(self, m): """ When the pattern matches, update the text. @@ -314,12 +315,12 @@ class AutolinkPattern2(markdown.inlinepatterns.Pattern): """ url = m.group(2) - if url.startswith('<'): + if url.startswith("<"): url = url[1:] - if url.endswith('>'): + if url.endswith(">"): url = url[:-1] el = markdown.util.etree.Element("a") - el.set('href', self.unescape(url)) + el.set("href", self.unescape(url)) el.text = markdown.util.AtomicString(url) return el @@ -336,10 +337,10 @@ class ImagePatternLazyLoad(markdown.inlinepatterns.ImagePattern): # Modify the origina img tag img = markdown.util.etree.Element("img") - img.set('data-src', el.get('src')) - img.set('src', '') - img.set('alt', el.get('alt')) - img.set('class', 'lazyload') + img.set("data-src", el.get("src")) + img.set("src", "") + img.set("alt", el.get("alt")) + img.set("class", "lazyload") # Create a global span in which we add both the new img tag and the # noscript one @@ -351,42 +352,45 @@ class ImagePatternLazyLoad(markdown.inlinepatterns.ImagePattern): class PagureExtension(markdown.extensions.Extension): - def extendMarkdown(self, md, md_globals): # First, make it so that bare links get automatically linkified. - AUTOLINK_RE = '(%s)' % '|'.join([ - r'<((?:[Ff]|[Hh][Tt])[Tt][Pp][Ss]?://[^>]*)>', - r'\b(?:[Ff]|[Hh][Tt])[Tt][Pp][Ss]?://[^)<>\s]+[^.,)<>\s]', - r'<(Ii][Rr][Cc][Ss]?://[^>]*)>', - r'\b[Ii][Rr][Cc][Ss]?://[^)<>\s]+[^.,)<>\s]', - ]) + AUTOLINK_RE = "(%s)" % "|".join( + [ + r"<((?:[Ff]|[Hh][Tt])[Tt][Pp][Ss]?://[^>]*)>", + r"\b(?:[Ff]|[Hh][Tt])[Tt][Pp][Ss]?://[^)<>\s]+[^.,)<>\s]", + r"<(Ii][Rr][Cc][Ss]?://[^>]*)>", + r"\b[Ii][Rr][Cc][Ss]?://[^)<>\s]+[^.,)<>\s]", + ] + ) markdown.inlinepatterns.AUTOLINK_RE = AUTOLINK_RE - md.preprocessors['implicit_issue'] = ImplicitIssuePreprocessor() + md.preprocessors["implicit_issue"] = ImplicitIssuePreprocessor() - md.inlinePatterns['mention'] = MentionPattern(MENTION_RE) + md.inlinePatterns["mention"] = MentionPattern(MENTION_RE) # Customize the image linking to support lazy loading md.inlinePatterns["image_link"] = ImagePatternLazyLoad( - markdown.inlinepatterns.IMAGE_LINK_RE, md) - - md.inlinePatterns['implicit_commit'] = ImplicitCommitPattern( - IMPLICIT_COMMIT_RE) - md.inlinePatterns['commit_links'] = CommitLinkPattern( - COMMIT_LINK_RE) - md.inlinePatterns['autolink'] = AutolinkPattern2( - AUTOLINK_RE, md) - - if pagure_config.get('ENABLE_TICKETS', True): - md.inlinePatterns['implicit_pr'] = \ - ImplicitPRPattern(IMPLICIT_PR_RE) - md.inlinePatterns['explicit_fork_issue'] = \ - ExplicitLinkPattern(EXPLICIT_LINK_RE) - md.inlinePatterns['implicit_issue'] = \ - ImplicitIssuePattern(IMPLICIT_ISSUE_RE) - - md.inlinePatterns['striked'] = StrikeThroughPattern( - STRIKE_THROUGH_RE) + markdown.inlinepatterns.IMAGE_LINK_RE, md + ) + + md.inlinePatterns["implicit_commit"] = ImplicitCommitPattern( + IMPLICIT_COMMIT_RE + ) + md.inlinePatterns["commit_links"] = CommitLinkPattern(COMMIT_LINK_RE) + md.inlinePatterns["autolink"] = AutolinkPattern2(AUTOLINK_RE, md) + + if pagure_config.get("ENABLE_TICKETS", True): + md.inlinePatterns["implicit_pr"] = ImplicitPRPattern( + IMPLICIT_PR_RE + ) + md.inlinePatterns["explicit_fork_issue"] = ExplicitLinkPattern( + EXPLICIT_LINK_RE + ) + md.inlinePatterns["implicit_issue"] = ImplicitIssuePattern( + IMPLICIT_ISSUE_RE + ) + + md.inlinePatterns["striked"] = StrikeThroughPattern(STRIKE_THROUGH_RE) md.registerExtension(self) @@ -399,13 +403,15 @@ def _issue_exists(user, namespace, repo, idx): """ Utility method checking if a given issue exists. """ repo_obj = pagure.lib.get_authorized_project( - flask.g.session, project_name=repo, user=user, namespace=namespace) + flask.g.session, project_name=repo, user=user, namespace=namespace + ) if not repo_obj: return False issue_obj = pagure.lib.search_issues( - flask.g.session, repo=repo_obj, issueid=idx) + flask.g.session, repo=repo_obj, issueid=idx + ) if not issue_obj: return False @@ -415,13 +421,15 @@ def _issue_exists(user, namespace, repo, idx): def _pr_exists(user, namespace, repo, idx): """ Utility method checking if a given PR exists. """ repo_obj = pagure.lib.get_authorized_project( - flask.g.session, project_name=repo, user=user, namespace=namespace) + flask.g.session, project_name=repo, user=user, namespace=namespace + ) if not repo_obj: return False pr_obj = pagure.lib.search_pull_requests( - flask.g.session, project_id=repo_obj.id, requestid=idx) + flask.g.session, project_id=repo_obj.id, requestid=idx + ) if not pr_obj: return False @@ -431,7 +439,8 @@ def _pr_exists(user, namespace, repo, idx): def _commit_exists(user, namespace, repo, githash): """ Utility method checking if a given commit exists. """ repo_obj = pagure.lib.get_authorized_project( - flask.g.session, project_name=repo, user=user, namespace=namespace) + flask.g.session, project_name=repo, user=user, namespace=namespace + ) if not repo_obj: return False @@ -449,32 +458,44 @@ def _obj_anchor_tag(user, namespace, repo, obj, text): """ if isinstance(obj, six.string_types): url = flask.url_for( - 'ui_ns.view_commit', username=user, namespace=namespace, - repo=repo, commitid=obj) - title = 'Commit %s' % obj - elif obj.isa == 'issue': + "ui_ns.view_commit", + username=user, + namespace=namespace, + repo=repo, + commitid=obj, + ) + title = "Commit %s" % obj + elif obj.isa == "issue": url = flask.url_for( - 'ui_ns.view_issue', username=user, namespace=namespace, - repo=repo, issueid=obj.id) + "ui_ns.view_issue", + username=user, + namespace=namespace, + repo=repo, + issueid=obj.id, + ) if obj.private: - title = 'Private issue' + title = "Private issue" else: if obj.status: - title = '[%s] %s' % (obj.status, obj.title) + title = "[%s] %s" % (obj.status, obj.title) else: title = obj.title else: url = flask.url_for( - 'ui_ns.request_pull', username=user, namespace=namespace, - repo=repo, requestid=obj.id) + "ui_ns.request_pull", + username=user, + namespace=namespace, + repo=repo, + requestid=obj.id, + ) if obj.status: - title = '[%s] %s' % (obj.status, obj.title) + title = "[%s] %s" % (obj.status, obj.title) else: title = obj.title element = markdown.util.etree.Element("a") - element.set('href', url) - element.set('title', title) + element.set("href", url) + element.set("title", title) element.text = text return element @@ -489,19 +510,19 @@ def _get_ns_repo_user(): root = flask.request.url_root url = flask.request.url - user = flask.request.args.get('user') - namespace = flask.request.args.get('namespace') - repo = flask.request.args.get('repo') + user = flask.request.args.get("user") + namespace = flask.request.args.get("namespace") + repo = flask.request.args.get("repo") if not user and not repo: - if 'fork/' in url: - user, ext = url.split('fork/')[1].split('/', 1) + if "fork/" in url: + user, ext = url.split("fork/")[1].split("/", 1) else: ext = url.split(root)[1] - if ext.count('/') >= 3: - namespace, repo = ext.split('/', 2)[:2] + if ext.count("/") >= 3: + namespace, repo = ext.split("/", 2)[:2] else: - repo = ext.split('/', 1)[0] + repo = ext.split("/", 1)[0] return (namespace, repo, user) diff --git a/pagure/proxy.py b/pagure/proxy.py index 43eebba..3cacdbe 100644 --- a/pagure/proxy.py +++ b/pagure/proxy.py @@ -19,18 +19,18 @@ # of Red Hat, Inc. # -''' +""" Makes pagure an application behind a reverse proxy and thus ensure the redirects are using ``https``. Source: http://flask.pocoo.org/snippets/35/ by Peter Hansen -''' +""" from __future__ import unicode_literals class ReverseProxied(object): # pragma: no cover - '''Wrap the application in this middleware and configure the + """Wrap the application in this middleware and configure the front-end server to add these headers, to let you quietly bind this to a URL other than / and to an HTTP scheme that is different than what is used locally. @@ -50,24 +50,25 @@ class ReverseProxied(object): # pragma: no cover RequestHeader set X-Forwarded-Proto https early :param app: the WSGI application - ''' + """ + def __init__(self, app): self.app = app def __call__(self, environ, start_response): - script_name = environ.get('HTTP_X_SCRIPT_NAME', '') + script_name = environ.get("HTTP_X_SCRIPT_NAME", "") if script_name: - environ['SCRIPT_NAME'] = script_name - path_info = environ['PATH_INFO'] + environ["SCRIPT_NAME"] = script_name + path_info = environ["PATH_INFO"] if path_info.startswith(script_name): - environ['PATH_INFO'] = path_info[len(script_name):] + environ["PATH_INFO"] = path_info[len(script_name) :] - server = environ.get('HTTP_X_FORWARDED_HOST', '') + server = environ.get("HTTP_X_FORWARDED_HOST", "") if server: - environ['HTTP_HOST'] = server + environ["HTTP_HOST"] = server - scheme = environ.get('HTTP_X_SCHEME', '') + scheme = environ.get("HTTP_X_SCHEME", "") if scheme: - environ['wsgi.url_scheme'] = scheme + environ["wsgi.url_scheme"] = scheme return self.app(environ, start_response) diff --git a/pagure/ui/__init__.py b/pagure/ui/__init__.py index 479152c..8288099 100644 --- a/pagure/ui/__init__.py +++ b/pagure/ui/__init__.py @@ -12,43 +12,45 @@ from __future__ import unicode_literals import flask -UI_NS = flask.Blueprint('ui_ns', __name__) +UI_NS = flask.Blueprint("ui_ns", __name__) # Import the different controllers in the UI namespace/blueprint import pagure.config # noqa: E402 import pagure.ui.app # noqa: E402 import pagure.ui.fork # noqa: E402 import pagure.ui.groups # noqa: E402 -if pagure.config.config.get('ENABLE_TICKETS', True): + +if pagure.config.config.get("ENABLE_TICKETS", True): import pagure.ui.issues # noqa: E402 import pagure.ui.plugins # noqa: E402 import pagure.ui.repo # noqa: E402 -if pagure.config.config['PAGURE_AUTH'] == 'local': + +if pagure.config.config["PAGURE_AUTH"] == "local": import pagure.ui.login # noqa: E402 @UI_NS.errorhandler(404) def not_found(error): """404 Not Found page""" - return flask.render_template('not_found.html', error=error), 404 + return flask.render_template("not_found.html", error=error), 404 @UI_NS.errorhandler(500) def fatal_error(error): # pragma: no cover """500 Fatal Error page""" - return flask.render_template('fatal_error.html', error=error), 500 + return flask.render_template("fatal_error.html", error=error), 500 @UI_NS.errorhandler(401) def unauthorized(error): # pragma: no cover """401 Unauthorized page""" - return flask.render_template('unauthorized.html', error=error), 401 + return flask.render_template("unauthorized.html", error=error), 401 -@UI_NS.route('/api/') -@UI_NS.route('/api') +@UI_NS.route("/api/") +@UI_NS.route("/api") def api_redirect(): - ''' Redirects the user to the API documentation page. + """ Redirects the user to the API documentation page. - ''' - return flask.redirect(flask.url_for('api_ns.api')) + """ + return flask.redirect(flask.url_for("api_ns.api")) diff --git a/pagure/ui/app.py b/pagure/ui/app.py index 1065027..8daf0b0 100644 --- a/pagure/ui/app.py +++ b/pagure/ui/app.py @@ -42,34 +42,27 @@ def _filter_acls(repos, acl, user): """ Filter the given list of repositories to return only the ones where the user has the specified acl. """ - if acl.lower() == 'main admin': + if acl.lower() == "main admin": + repos = [repo for repo in repos if user.username == repo.user.username] + elif acl.lower() == "ticket" or "commit" or "admin": repos = [ - repo - for repo in repos - if user.username == repo.user.username - ] - elif acl.lower() == 'ticket' or 'commit' or 'admin': - repos = [ - repo - for repo in repos - if user in repo.contributors[acl.lower()] + repo for repo in repos if user in repo.contributors[acl.lower()] ] return repos -@UI_NS.route('/browse/projects', endpoint='browse_projects') -@UI_NS.route('/browse/projects/', endpoint='browse_projects') -@UI_NS.route('/') +@UI_NS.route("/browse/projects", endpoint="browse_projects") +@UI_NS.route("/browse/projects/", endpoint="browse_projects") +@UI_NS.route("/") def index(): """ Front page of the application. """ - if authenticated() and flask.request.path == '/': - return flask.redirect( - flask.url_for('ui_ns.userdash_projects')) + if authenticated() and flask.request.path == "/": + return flask.redirect(flask.url_for("ui_ns.userdash_projects")) - sorting = flask.request.args.get('sorting') or None - page = flask.request.args.get('page', 1) + sorting = flask.request.args.get("sorting") or None + page = flask.request.args.get("page", 1) try: page = int(page) if page < 1: @@ -77,7 +70,7 @@ def index(): except ValueError: page = 1 - limit = pagure_config['ITEM_PER_PAGE'] + limit = pagure_config["ITEM_PER_PAGE"] start = limit * (page - 1) private = None @@ -94,14 +87,12 @@ def index(): ) num_repos = pagure.lib.search_projects( - flask.g.session, - fork=False, - private=private, - count=True) + flask.g.session, fork=False, private=private, count=True + ) total_page = int(ceil(num_repos / float(limit)) if num_repos > 0 else 1) return flask.render_template( - 'index.html', + "index.html", select="projects", repos=repos, repos_length=num_repos, @@ -114,7 +105,7 @@ def index(): def get_userdash_common(user): userdash_counts = {} - userdash_counts['repos_length'] = pagure.lib.list_users_projects( + userdash_counts["repos_length"] = pagure.lib.list_users_projects( flask.g.session, username=flask.g.fas_user.username, exclude_groups=None, @@ -123,7 +114,7 @@ def get_userdash_common(user): count=True, ) - userdash_counts['forks_length'] = pagure.lib.search_projects( + userdash_counts["forks_length"] = pagure.lib.search_projects( flask.g.session, username=flask.g.fas_user.username, fork=True, @@ -131,13 +122,15 @@ def get_userdash_common(user): count=True, ) - userdash_counts['watchlist_length'] = len(pagure.lib.user_watch_list( - flask.g.session, - user=flask.g.fas_user.username, - exclude_groups=pagure_config.get('EXCLUDE_GROUP_INDEX'), - )) + userdash_counts["watchlist_length"] = len( + pagure.lib.user_watch_list( + flask.g.session, + user=flask.g.fas_user.username, + exclude_groups=pagure_config.get("EXCLUDE_GROUP_INDEX"), + ) + ) - userdash_counts['groups_length'] = len(user.groups) + userdash_counts["groups_length"] = len(user.groups) search_data = pagure.lib.list_users_projects( flask.g.session, @@ -148,8 +141,8 @@ def get_userdash_common(user): return userdash_counts, search_data -@UI_NS.route('/dashboard/projects/') -@UI_NS.route('/dashboard/projects') +@UI_NS.route("/dashboard/projects/") +@UI_NS.route("/dashboard/projects") @login_required def userdash_projects(): """ User Dashboard page listing projects for the user @@ -160,18 +153,20 @@ def userdash_projects(): groups = [] for group in user.groups: - groups.append(pagure.lib.search_groups(flask.g.session, - group_name=group, - group_type='user')) + groups.append( + pagure.lib.search_groups( + flask.g.session, group_name=group, group_type="user" + ) + ) - acl = flask.request.args.get('acl', '').strip().lower() or None - search_pattern = flask.request.args.get('search_pattern', None) + acl = flask.request.args.get("acl", "").strip().lower() or None + search_pattern = flask.request.args.get("search_pattern", None) if search_pattern == "": search_pattern = None - limit = pagure_config['ITEM_PER_PAGE'] + limit = pagure_config["ITEM_PER_PAGE"] - repopage = flask.request.args.get('repopage', 1) + repopage = flask.request.args.get("repopage", 1) try: repopage = int(repopage) if repopage < 1: @@ -219,24 +214,24 @@ def userdash_projects(): for group in groups: if repo in group.projects: thegroup = {"group_name": "", "access": ""} - thegroup['group_name'] = group.group_name + thegroup["group_name"] = group.group_name for a in repo.contributor_groups: for gr in repo.contributor_groups[a]: if group.group_name == gr.group_name: thegroup["access"] = a grouplist.append(thegroup) - repo_list.append({ - "repo": repo, - "grouplist": grouplist, - "access": access, - }) + repo_list.append( + {"repo": repo, "grouplist": grouplist, "access": access} + ) total_repo_page = int( - ceil(filtered_repos_count / - float(limit)) if filtered_repos_count > 0 else 1) + ceil(filtered_repos_count / float(limit)) + if filtered_repos_count > 0 + else 1 + ) return flask.render_template( - 'userdash_projects.html', + "userdash_projects.html", username=flask.g.fas_user.username, user=user, select="projects", @@ -251,8 +246,8 @@ def userdash_projects(): ) -@UI_NS.route('/dashboard/activity/') -@UI_NS.route('/dashboard/activity') +@UI_NS.route("/dashboard/activity/") +@UI_NS.route("/dashboard/activity") @login_required def userdash_activity(): """ User Dashboard page listing user activity @@ -260,12 +255,12 @@ def userdash_activity(): user = _get_user(username=flask.g.fas_user.username) userdash_counts, search_data = get_userdash_common(user) - messages = pagure.lib.get_watchlist_messages(flask.g.session, - user, - limit=20) + messages = pagure.lib.get_watchlist_messages( + flask.g.session, user, limit=20 + ) return flask.render_template( - 'userdash_activity.html', + "userdash_activity.html", username=flask.g.fas_user.username, user=user, select="activity", @@ -275,8 +270,8 @@ def userdash_activity(): ) -@UI_NS.route('/dashboard/groups/') -@UI_NS.route('/dashboard/groups') +@UI_NS.route("/dashboard/groups/") +@UI_NS.route("/dashboard/groups") @login_required def userdash_groups(): """ User Dashboard page listing a user's groups @@ -287,12 +282,14 @@ def userdash_groups(): groups = [] for group in user.groups: - groups.append(pagure.lib.search_groups(flask.g.session, - group_name=group, - group_type='user')) + groups.append( + pagure.lib.search_groups( + flask.g.session, group_name=group, group_type="user" + ) + ) return flask.render_template( - 'userdash_groups.html', + "userdash_groups.html", username=flask.g.fas_user.username, user=user, select="groups", @@ -302,8 +299,8 @@ def userdash_groups(): ) -@UI_NS.route('/dashboard/forks/') -@UI_NS.route('/dashboard/forks') +@UI_NS.route("/dashboard/forks/") +@UI_NS.route("/dashboard/forks") @login_required def userdash_forks(): """ Forks tab of the user dashboard @@ -311,10 +308,10 @@ def userdash_forks(): user = _get_user(username=flask.g.fas_user.username) userdash_counts, search_data = get_userdash_common(user) - limit = pagure_config['ITEM_PER_PAGE'] + limit = pagure_config["ITEM_PER_PAGE"] # FORKS - forkpage = flask.request.args.get('forkpage', 1) + forkpage = flask.request.args.get("forkpage", 1) try: forkpage = int(forkpage) if forkpage < 1: @@ -333,11 +330,13 @@ def userdash_forks(): ) total_fork_page = int( - ceil(userdash_counts['forks_length'] / - float(limit)) if userdash_counts['forks_length'] > 0 else 1) + ceil(userdash_counts["forks_length"] / float(limit)) + if userdash_counts["forks_length"] > 0 + else 1 + ) return flask.render_template( - 'userdash_forks.html', + "userdash_forks.html", username=flask.g.fas_user.username, user=user, select="forks", @@ -349,8 +348,8 @@ def userdash_forks(): ) -@UI_NS.route('/dashboard/watchlist/') -@UI_NS.route('/dashboard/watchlist') +@UI_NS.route("/dashboard/watchlist/") +@UI_NS.route("/dashboard/watchlist") @login_required def userdash_watchlist(): """ User Dashboard page for a user's watchlist @@ -359,14 +358,14 @@ def userdash_watchlist(): watch_list = pagure.lib.user_watch_list( flask.g.session, user=flask.g.fas_user.username, - exclude_groups=pagure_config.get('EXCLUDE_GROUP_INDEX'), + exclude_groups=pagure_config.get("EXCLUDE_GROUP_INDEX"), ) user = _get_user(username=flask.g.fas_user.username) userdash_counts, search_data = get_userdash_common(user) return flask.render_template( - 'userdash_watchlist.html', + "userdash_watchlist.html", username=flask.g.fas_user.username, user=user, select="watchlist", @@ -381,9 +380,9 @@ def index_auth(): """ user = _get_user(username=flask.g.fas_user.username) - acl = flask.request.args.get('acl', '').strip().lower() or None + acl = flask.request.args.get("acl", "").strip().lower() or None - repopage = flask.request.args.get('repopage', 1) + repopage = flask.request.args.get("repopage", 1) try: repopage = int(repopage) if repopage < 1: @@ -391,14 +390,14 @@ def index_auth(): except ValueError: repopage = 1 - limit = pagure_config['ITEM_PER_PAGE'] + limit = pagure_config["ITEM_PER_PAGE"] # PROJECTS start = limit * (repopage - 1) repos = pagure.lib.search_projects( flask.g.session, username=flask.g.fas_user.username, - exclude_groups=pagure_config.get('EXCLUDE_GROUP_INDEX'), + exclude_groups=pagure_config.get("EXCLUDE_GROUP_INDEX"), fork=False, private=flask.g.fas_user.username, start=start, @@ -410,16 +409,17 @@ def index_auth(): repos_length = pagure.lib.search_projects( flask.g.session, username=flask.g.fas_user.username, - exclude_groups=pagure_config.get('EXCLUDE_GROUP_INDEX'), + exclude_groups=pagure_config.get("EXCLUDE_GROUP_INDEX"), fork=False, private=flask.g.fas_user.username, count=True, ) total_repo_page = int( - ceil(repos_length / float(limit)) if repos_length > 0 else 1) + ceil(repos_length / float(limit)) if repos_length > 0 else 1 + ) # FORKS - forkpage = flask.request.args.get('forkpage', 1) + forkpage = flask.request.args.get("forkpage", 1) try: forkpage = int(forkpage) if forkpage < 1: @@ -447,16 +447,17 @@ def index_auth(): count=True, ) total_fork_page = int( - ceil(forks_length / float(limit)) if forks_length > 0 else 1) + ceil(forks_length / float(limit)) if forks_length > 0 else 1 + ) watch_list = pagure.lib.user_watch_list( flask.g.session, user=flask.g.fas_user.username, - exclude_groups=pagure_config.get('EXCLUDE_GROUP_INDEX'), + exclude_groups=pagure_config.get("EXCLUDE_GROUP_INDEX"), ) return flask.render_template( - 'userdash_projects.html', + "userdash_projects.html", username=flask.g.fas_user.username, user=user, forks=forks, @@ -471,15 +472,15 @@ def index_auth(): ) -@UI_NS.route('/search/') -@UI_NS.route('/search') +@UI_NS.route("/search/") +@UI_NS.route("/search") def search(): """ Search this pagure instance for projects or users. """ - stype = flask.request.args.get('type', 'projects') - term = flask.request.args.get('term') - page = flask.request.args.get('page', 1) - direct = is_true(flask.request.values.get('direct', False)) + stype = flask.request.args.get("type", "projects") + term = flask.request.args.get("term") + page = flask.request.args.get("page", 1) + direct = is_true(flask.request.values.get("direct", False)) try: page = int(page) @@ -489,31 +490,29 @@ def search(): page = 1 if direct: + return flask.redirect(flask.url_for("ui_ns.view_repo", repo="") + term) + + if stype == "projects": return flask.redirect( - flask.url_for('ui_ns.view_repo', repo='') + term + flask.url_for("ui_ns.view_projects", pattern=term) ) - - if stype == 'projects': - return flask.redirect(flask.url_for( - 'ui_ns.view_projects', pattern=term)) - elif stype == 'projects_forks': - return flask.redirect(flask.url_for( - 'view_projects', pattern=term, forks=True)) - elif stype == 'groups': - return flask.redirect(flask.url_for( - 'ui_ns.view_group', group=term)) + elif stype == "projects_forks": + return flask.redirect( + flask.url_for("view_projects", pattern=term, forks=True) + ) + elif stype == "groups": + return flask.redirect(flask.url_for("ui_ns.view_group", group=term)) else: - return flask.redirect(flask.url_for( - 'ui_ns.view_users', username=term)) + return flask.redirect(flask.url_for("ui_ns.view_users", username=term)) -@UI_NS.route('/users/') -@UI_NS.route('/users') -@UI_NS.route('/users/') +@UI_NS.route("/users/") +@UI_NS.route("/users") +@UI_NS.route("/users/") def view_users(username=None): """ Present the list of users. """ - page = flask.request.args.get('page', 1) + page = flask.request.args.get("page", 1) try: page = int(page) if page < 1: @@ -529,7 +528,7 @@ def view_users(username=None): if authenticated() and username == flask.g.fas_user.username: private = flask.g.fas_user.username - limit = pagure_config['ITEM_PER_PAGE'] + limit = pagure_config["ITEM_PER_PAGE"] start = limit * (page - 1) end = limit * page users_length = len(users) @@ -543,36 +542,38 @@ def view_users(username=None): username=user.user, fork=False, count=True, - private=private) + private=private, + ) forks_length = pagure.lib.search_projects( flask.g.session, username=user.user, fork=True, count=True, - private=private) + private=private, + ) user.repos_length = repos_length user.forks_length = forks_length return flask.render_template( - 'user_list.html', + "user_list.html", users=users, users_length=users_length, total_page=total_page, page=page, - select='users', + select="users", ) -@UI_NS.route('/projects/') -@UI_NS.route('/projects') -@UI_NS.route('/projects/') -@UI_NS.route('/projects//') +@UI_NS.route("/projects/") +@UI_NS.route("/projects") +@UI_NS.route("/projects/") +@UI_NS.route("/projects//") def view_projects(pattern=None, namespace=None): """ Present the list of projects. """ - forks = flask.request.args.get('forks') - page = flask.request.args.get('page', 1) + forks = flask.request.args.get("forks") + page = flask.request.args.get("page", 1) try: page = int(page) @@ -581,11 +582,11 @@ def view_projects(pattern=None, namespace=None): except ValueError: page = 1 - select = 'projects' + select = "projects" # If forks is specified, we want both forks and projects if is_true(forks): forks = None - select = 'projects_forks' + select = "projects_forks" else: forks = False private = False @@ -593,29 +594,45 @@ def view_projects(pattern=None, namespace=None): if authenticated(): private = flask.g.fas_user.username - limit = pagure_config['ITEM_PER_PAGE'] + limit = pagure_config["ITEM_PER_PAGE"] start = limit * (page - 1) projects = pagure.lib.search_projects( - flask.g.session, pattern=pattern, namespace=namespace, - fork=forks, start=start, limit=limit, private=private) + flask.g.session, + pattern=pattern, + namespace=namespace, + fork=forks, + start=start, + limit=limit, + private=private, + ) if len(projects) == 1: - flask.flash('Only one result found, redirecting you to it') - return flask.redirect(flask.url_for( - 'ui_ns.view_repo', repo=projects[0].name, - namespace=projects[0].namespace, - username=projects[0].user.username if projects[0].is_fork else None - )) + flask.flash("Only one result found, redirecting you to it") + return flask.redirect( + flask.url_for( + "ui_ns.view_repo", + repo=projects[0].name, + namespace=projects[0].namespace, + username=projects[0].user.username + if projects[0].is_fork + else None, + ) + ) projects_length = pagure.lib.search_projects( - flask.g.session, pattern=pattern, namespace=namespace, - fork=forks, count=True, private=private) + flask.g.session, + pattern=pattern, + namespace=namespace, + fork=forks, + count=True, + private=private, + ) total_page = int(ceil(projects_length / float(limit))) return flask.render_template( - 'index.html', + "index.html", repos=projects, repos_length=projects_length, total_page=total_page, @@ -627,26 +644,28 @@ def view_projects(pattern=None, namespace=None): def get_userprofile_common(user): userprofile_counts = {} - userprofile_counts['repos_length'] = pagure.lib.search_projects( + userprofile_counts["repos_length"] = pagure.lib.search_projects( flask.g.session, username=user.username, fork=False, exclude_groups=None, private=False, - count=True) + count=True, + ) - userprofile_counts['forks_length'] = pagure.lib.search_projects( + userprofile_counts["forks_length"] = pagure.lib.search_projects( flask.g.session, username=user.username, fork=True, private=False, - count=True) + count=True, + ) return userprofile_counts -@UI_NS.route('/user//') -@UI_NS.route('/user/') +@UI_NS.route("/user//") +@UI_NS.route("/user/") def view_user(username): """ Front page of a specific user. """ @@ -669,24 +688,24 @@ def view_user(username): userprofile_common = get_userprofile_common(user) return flask.render_template( - 'userprofile_overview.html', + "userprofile_overview.html", username=username, user=user, owned_repos=owned_repos, - repos_length=userprofile_common['repos_length'], - forks_length=userprofile_common['forks_length'], - select='overview', + repos_length=userprofile_common["repos_length"], + forks_length=userprofile_common["forks_length"], + select="overview", ) -@UI_NS.route('/user//projects/') -@UI_NS.route('/user//projects') +@UI_NS.route("/user//projects/") +@UI_NS.route("/user//projects") def userprofile_projects(username): """ Public Profile view of a user's projects. """ user = _get_user(username=username) - repopage = flask.request.args.get('repopage', 1) + repopage = flask.request.args.get("repopage", 1) try: repopage = int(repopage) if repopage < 1: @@ -694,43 +713,45 @@ def userprofile_projects(username): except ValueError: repopage = 1 - limit = pagure_config['ITEM_PER_PAGE'] + limit = pagure_config["ITEM_PER_PAGE"] repo_start = limit * (repopage - 1) repos = pagure.lib.search_projects( flask.g.session, username=username, fork=False, - exclude_groups=pagure_config.get('EXCLUDE_GROUP_INDEX'), + exclude_groups=pagure_config.get("EXCLUDE_GROUP_INDEX"), start=repo_start, limit=limit, - private=False) + private=False, + ) userprofile_common = get_userprofile_common(user) total_page_repos = int( - ceil(userprofile_common['repos_length'] / float(limit))) + ceil(userprofile_common["repos_length"] / float(limit)) + ) return flask.render_template( - 'userprofile_projects.html', + "userprofile_projects.html", username=username, user=user, repos=repos, total_page_repos=total_page_repos, repopage=repopage, - repos_length=userprofile_common['repos_length'], - forks_length=userprofile_common['forks_length'], + repos_length=userprofile_common["repos_length"], + forks_length=userprofile_common["forks_length"], select="projects", ) -@UI_NS.route('/user//forks/') -@UI_NS.route('/user//forks') +@UI_NS.route("/user//forks/") +@UI_NS.route("/user//forks") def userprofile_forks(username): """ Public Profile view of a user's forks. """ user = _get_user(username=username) - forkpage = flask.request.args.get('forkpage', 1) + forkpage = flask.request.args.get("forkpage", 1) try: forkpage = int(forkpage) if forkpage < 1: @@ -738,7 +759,7 @@ def userprofile_forks(username): except ValueError: forkpage = 1 - limit = pagure_config['ITEM_PER_PAGE'] + limit = pagure_config["ITEM_PER_PAGE"] fork_start = limit * (forkpage - 1) forks = pagure.lib.search_projects( @@ -747,36 +768,38 @@ def userprofile_forks(username): fork=True, start=fork_start, limit=limit, - private=False) + private=False, + ) userprofile_common = get_userprofile_common(user) total_page_forks = int( - ceil(userprofile_common['forks_length'] / float(limit))) + ceil(userprofile_common["forks_length"] / float(limit)) + ) return flask.render_template( - 'userprofile_forks.html', + "userprofile_forks.html", username=username, user=user, forks=forks, total_page_forks=total_page_forks, forkpage=forkpage, - repos_length=userprofile_common['repos_length'], - forks_length=userprofile_common['forks_length'], + repos_length=userprofile_common["repos_length"], + forks_length=userprofile_common["forks_length"], select="forks", ) # original view_user() -@UI_NS.route('/user2//') -@UI_NS.route('/user2/') +@UI_NS.route("/user2//") +@UI_NS.route("/user2/") def view_user2(username): """ Front page of a specific user. """ user = _get_user(username=username) - acl = flask.request.args.get('acl', '').strip().lower() or None + acl = flask.request.args.get("acl", "").strip().lower() or None - repopage = flask.request.args.get('repopage', 1) + repopage = flask.request.args.get("repopage", 1) try: repopage = int(repopage) if repopage < 1: @@ -784,7 +807,7 @@ def view_user2(username): except ValueError: repopage = 1 - forkpage = flask.request.args.get('forkpage', 1) + forkpage = flask.request.args.get("forkpage", 1) try: forkpage = int(forkpage) if forkpage < 1: @@ -792,7 +815,7 @@ def view_user2(username): except ValueError: forkpage = 1 - limit = pagure_config['ITEM_PER_PAGE'] + limit = pagure_config["ITEM_PER_PAGE"] repo_start = limit * (repopage - 1) fork_start = limit * (forkpage - 1) @@ -804,10 +827,11 @@ def view_user2(username): flask.g.session, username=username, fork=False, - exclude_groups=pagure_config.get('EXCLUDE_GROUP_INDEX'), + exclude_groups=pagure_config.get("EXCLUDE_GROUP_INDEX"), start=repo_start, limit=limit, - private=private) + private=private, + ) if repos and acl: repos = _filter_acls(repos, acl, user) @@ -816,9 +840,10 @@ def view_user2(username): flask.g.session, username=username, fork=False, - exclude_groups=pagure_config.get('EXCLUDE_GROUP_INDEX'), + exclude_groups=pagure_config.get("EXCLUDE_GROUP_INDEX"), private=private, - count=True) + count=True, + ) forks = pagure.lib.search_projects( flask.g.session, @@ -826,20 +851,22 @@ def view_user2(username): fork=True, start=fork_start, limit=limit, - private=private) + private=private, + ) forks_length = pagure.lib.search_projects( flask.g.session, username=username, fork=True, private=private, - count=True) + count=True, + ) total_page_repos = int(ceil(repos_length / float(limit))) total_page_forks = int(ceil(forks_length / float(limit))) return flask.render_template( - 'userprofile_overview.html', + "userprofile_overview.html", username=username, user=user, repos=repos, @@ -853,28 +880,24 @@ def view_user2(username): ) -@UI_NS.route('/user//requests/') -@UI_NS.route('/user//requests') +@UI_NS.route("/user//requests/") +@UI_NS.route("/user//requests") def view_user_requests(username): """ Shows the pull-requests for the specified user. """ user = _get_user(username=username) requests = pagure.lib.get_pull_request_of_user( - flask.g.session, - username=username + flask.g.session, username=username ) return flask.render_template( - 'user_requests.html', - username=username, - user=user, - requests=requests, + "user_requests.html", username=username, user=user, requests=requests ) -@UI_NS.route('/user//issues/') -@UI_NS.route('/user//issues') +@UI_NS.route("/user//issues/") +@UI_NS.route("/user//issues") def view_user_issues(username): """ Shows the issues created or assigned to the specified user. @@ -883,20 +906,18 @@ def view_user_issues(username): :type username: str """ - if not pagure_config.get('ENABLE_TICKETS', True): - flask.abort(404, 'Tickets have been disabled on this pagure instance') + if not pagure_config.get("ENABLE_TICKETS", True): + flask.abort(404, "Tickets have been disabled on this pagure instance") user = _get_user(username=username) return flask.render_template( - 'user_issues.html', - username=username, - user=user, + "user_issues.html", username=username, user=user ) -@UI_NS.route('/user//stars/') -@UI_NS.route('/user//stars') +@UI_NS.route("/user//stars/") +@UI_NS.route("/user//stars") def userprofile_starred(username): """ Shows the starred projects of the specified user. @@ -908,18 +929,18 @@ def userprofile_starred(username): userprofile_common = get_userprofile_common(user) return flask.render_template( - 'userprofile_starred.html', + "userprofile_starred.html", username=username, user=user, repos=[star.project for star in user.stars], - repos_length=userprofile_common['repos_length'], - forks_length=userprofile_common['forks_length'], + repos_length=userprofile_common["repos_length"], + forks_length=userprofile_common["forks_length"], select="starred", ) -@UI_NS.route('/user//groups/') -@UI_NS.route('/user//groups') +@UI_NS.route("/user//groups/") +@UI_NS.route("/user//groups") def userprofile_groups(username): """ Shows the groups of a user @@ -931,37 +952,43 @@ def userprofile_groups(username): groups = [] for groupname in user.groups: groups.append( - pagure.lib.search_groups(flask.g.session, group_name=groupname)) + pagure.lib.search_groups(flask.g.session, group_name=groupname) + ) return flask.render_template( - 'userprofile_groups.html', + "userprofile_groups.html", username=username, user=user, groups=groups, - repos_length=userprofile_common['repos_length'], - forks_length=userprofile_common['forks_length'], + repos_length=userprofile_common["repos_length"], + forks_length=userprofile_common["forks_length"], select="groups", ) -@UI_NS.route('/new/', methods=('GET', 'POST')) -@UI_NS.route('/new', methods=('GET', 'POST')) +@UI_NS.route("/new/", methods=("GET", "POST")) +@UI_NS.route("/new", methods=("GET", "POST")) @login_required def new_project(): """ Form to create a new project. """ user = pagure.lib.search_user( - flask.g.session, username=flask.g.fas_user.username) + flask.g.session, username=flask.g.fas_user.username + ) - if not pagure_config.get('ENABLE_NEW_PROJECTS', True) or \ - not pagure_config.get('ENABLE_UI_NEW_PROJECTS', True): - flask.abort(404, 'Creation of new project is not allowed on this \ - pagure instance') + if not pagure_config.get( + "ENABLE_NEW_PROJECTS", True + ) or not pagure_config.get("ENABLE_UI_NEW_PROJECTS", True): + flask.abort( + 404, + "Creation of new project is not allowed on this \ + pagure instance", + ) - namespaces = pagure_config['ALLOWED_PREFIX'][:] + namespaces = pagure_config["ALLOWED_PREFIX"][:] if user: namespaces.extend([grp for grp in user.groups]) - if pagure_config.get('USER_NAMESPACE', False): + if pagure_config.get("USER_NAMESPACE", False): namespaces.insert(0, flask.g.fas_user.username) form = pagure.forms.ProjectForm(namespaces=namespaces) @@ -973,7 +1000,7 @@ def new_project(): avatar_email = form.avatar_email.data create_readme = form.create_readme.data private = False - if pagure_config.get('PRIVATE_PROJECTS', False): + if pagure_config.get("PRIVATE_PROJECTS", False): private = form.private.data namespace = form.namespace.data if namespace: @@ -989,44 +1016,42 @@ def new_project(): url=url, avatar_email=avatar_email, user=flask.g.fas_user.username, - blacklist=pagure_config['BLACKLISTED_PROJECTS'], - allowed_prefix=pagure_config['ALLOWED_PREFIX'], - gitfolder=pagure_config['GIT_FOLDER'], - docfolder=pagure_config.get('DOCS_FOLDER'), - ticketfolder=pagure_config.get('TICKETS_FOLDER'), - requestfolder=pagure_config['REQUESTS_FOLDER'], + blacklist=pagure_config["BLACKLISTED_PROJECTS"], + allowed_prefix=pagure_config["ALLOWED_PREFIX"], + gitfolder=pagure_config["GIT_FOLDER"], + docfolder=pagure_config.get("DOCS_FOLDER"), + ticketfolder=pagure_config.get("TICKETS_FOLDER"), + requestfolder=pagure_config["REQUESTS_FOLDER"], add_readme=create_readme, userobj=user, prevent_40_chars=pagure_config.get( - 'OLD_VIEW_COMMIT_ENABLED', False), - user_ns=pagure_config.get('USER_NAMESPACE', False), + "OLD_VIEW_COMMIT_ENABLED", False + ), + user_ns=pagure_config.get("USER_NAMESPACE", False), ) flask.g.session.commit() return pagure.utils.wait_for_task(task) except pagure.exceptions.PagureException as err: - flask.flash(str(err), 'error') + flask.flash(str(err), "error") except SQLAlchemyError as err: # pragma: no cover flask.g.session.rollback() - flask.flash(str(err), 'error') + flask.flash(str(err), "error") - return flask.render_template( - 'new_project.html', - form=form, - ) + return flask.render_template("new_project.html", form=form) -@UI_NS.route('/wait/') +@UI_NS.route("/wait/") def wait_task(taskid): """ Shows a wait page until the task finishes. """ task = pagure.lib.tasks.get_result(taskid) - is_js = is_true(flask.request.args.get('js')) + is_js = is_true(flask.request.args.get("js")) - prev = flask.request.args.get('prev') + prev = flask.request.args.get("prev") if not is_safe_url(prev): - prev = flask.url_for('index') + prev = flask.url_for("index") - count = flask.request.args.get('count', 0) + count = flask.request.args.get("count", 0) try: count = int(count) if count < 1: @@ -1040,73 +1065,64 @@ def wait_task(taskid): return flask.redirect(get_task_redirect_url(task, prev)) else: if is_js: - return flask.jsonify({ - 'count': count + 1, - 'status': task.status, - }) + return flask.jsonify({"count": count + 1, "status": task.status}) return flask.render_template( - 'waiting.html', - task=task, - count=count, - prev=prev, + "waiting.html", task=task, count=count, prev=prev ) -@UI_NS.route('/settings/', methods=('GET', 'POST')) -@UI_NS.route('/settings', methods=('GET', 'POST')) +@UI_NS.route("/settings/", methods=("GET", "POST")) +@UI_NS.route("/settings", methods=("GET", "POST")) @login_required def user_settings(): """ Update the user settings. """ if admin_session_timedout(): return flask.redirect( - flask.url_for('auth_login', next=flask.request.url)) + flask.url_for("auth_login", next=flask.request.url) + ) user = _get_user(username=flask.g.fas_user.username) form = pagure.forms.UserSettingsForm() - if form.validate_on_submit() and pagure_config.get('LOCAL_SSH_KEY', True): + if form.validate_on_submit() and pagure_config.get("LOCAL_SSH_KEY", True): ssh_key = form.ssh_key.data try: - message = 'Nothing to update' + message = "Nothing to update" if user.public_ssh_key != ssh_key: pagure.lib.update_user_ssh( flask.g.session, user=user, ssh_key=ssh_key, - keydir=pagure_config.get('GITOLITE_KEYDIR', None), + keydir=pagure_config.get("GITOLITE_KEYDIR", None), update_only=True, ) flask.g.session.commit() - message = 'Public ssh key updated' + message = "Public ssh key updated" flask.flash(message) - return flask.redirect( - flask.url_for('ui_ns.user_settings')) + return flask.redirect(flask.url_for("ui_ns.user_settings")) except SQLAlchemyError as err: # pragma: no cover flask.g.session.rollback() - flask.flash(str(err), 'error') - elif flask.request.method == 'GET': + flask.flash(str(err), "error") + elif flask.request.method == "GET": form.ssh_key.data = user.public_ssh_key - return flask.render_template( - 'user_settings.html', - user=user, - form=form, - ) + return flask.render_template("user_settings.html", user=user, form=form) -@UI_NS.route('/settings/usersettings', methods=['POST']) +@UI_NS.route("/settings/usersettings", methods=["POST"]) @login_required def update_user_settings(): """ Update the user's settings set in the settings page. """ if admin_session_timedout(): - if flask.request.method == 'POST': - flask.flash('Action canceled, try it again', 'error') + if flask.request.method == "POST": + flask.flash("Action canceled, try it again", "error") return flask.redirect( - flask.url_for('auth_login', next=flask.request.url)) + flask.url_for("auth_login", next=flask.request.url) + ) user = _get_user(username=flask.g.fas_user.username) @@ -1115,29 +1131,27 @@ def update_user_settings(): if form.validate_on_submit(): settings = {} for key in flask.request.form: - if key == 'csrf_token': + if key == "csrf_token": continue settings[key] = flask.request.form[key] try: message = pagure.lib.update_user_settings( - flask.g.session, - settings=settings, - user=user.username, + flask.g.session, settings=settings, user=user.username ) flask.g.session.commit() flask.flash(message) except pagure.exceptions.PagureException as msg: flask.g.session.rollback() - flask.flash(msg, 'error') + flask.flash(msg, "error") except SQLAlchemyError as err: # pragma: no cover flask.g.session.rollback() - flask.flash(str(err), 'error') + flask.flash(str(err), "error") - return flask.redirect(flask.url_for('ui_ns.user_settings')) + return flask.redirect(flask.url_for("ui_ns.user_settings")) -@UI_NS.route('/markdown/', methods=['POST']) +@UI_NS.route("/markdown/", methods=["POST"]) def markdown_preview(): """ Return the provided markdown text in html. @@ -1145,28 +1159,26 @@ def markdown_preview(): """ form = pagure.forms.ConfirmationForm() if form.validate_on_submit(): - return pagure.ui.filters.markdown_filter(flask.request.form['content']) + return pagure.ui.filters.markdown_filter(flask.request.form["content"]) else: - flask.abort(400, 'Invalid request') + flask.abort(400, "Invalid request") -@UI_NS.route('/settings/email/drop', methods=['POST']) +@UI_NS.route("/settings/email/drop", methods=["POST"]) @login_required def remove_user_email(): """ Remove the specified email from the logged in user. """ if admin_session_timedout(): return flask.redirect( - flask.url_for('auth_login', next=flask.request.url)) + flask.url_for("auth_login", next=flask.request.url) + ) user = _get_user(username=flask.g.fas_user.username) if len(user.emails) == 1: - flask.flash( - 'You must always have at least one email', 'error') - return flask.redirect( - flask.url_for('ui_ns.user_settings') - ) + flask.flash("You must always have at least one email", "error") + return flask.redirect(flask.url_for("ui_ns.user_settings")) form = pagure.forms.UserEmailForm() @@ -1176,11 +1188,10 @@ def remove_user_email(): if email not in useremails: flask.flash( - 'You do not have the email: %s, nothing to remove' % email, - 'error') - return flask.redirect( - flask.url_for('ui_ns.user_settings') + "You do not have the email: %s, nothing to remove" % email, + "error", ) + return flask.redirect(flask.url_for("ui_ns.user_settings")) for mail in user.emails: if mail.email == email: @@ -1188,59 +1199,58 @@ def remove_user_email(): break try: flask.g.session.commit() - flask.flash('Email removed') + flask.flash("Email removed") except SQLAlchemyError as err: # pragma: no cover flask.g.session.rollback() _log.exception(err) - flask.flash('Email could not be removed', 'error') + flask.flash("Email could not be removed", "error") - return flask.redirect(flask.url_for('ui_ns.user_settings')) + return flask.redirect(flask.url_for("ui_ns.user_settings")) -@UI_NS.route('/settings/email/add/', methods=['GET', 'POST']) -@UI_NS.route('/settings/email/add', methods=['GET', 'POST']) +@UI_NS.route("/settings/email/add/", methods=["GET", "POST"]) +@UI_NS.route("/settings/email/add", methods=["GET", "POST"]) @login_required def add_user_email(): """ Add a new email for the logged in user. """ if admin_session_timedout(): return flask.redirect( - flask.url_for('auth_login', next=flask.request.url)) + flask.url_for("auth_login", next=flask.request.url) + ) user = _get_user(username=flask.g.fas_user.username) form = pagure.forms.UserEmailForm( - emails=[mail.email for mail in user.emails]) + emails=[mail.email for mail in user.emails] + ) if form.validate_on_submit(): email = form.email.data try: pagure.lib.add_user_pending_email(flask.g.session, user, email) flask.g.session.commit() - flask.flash('Email pending validation') - return flask.redirect(flask.url_for('ui_ns.user_settings')) + flask.flash("Email pending validation") + return flask.redirect(flask.url_for("ui_ns.user_settings")) except pagure.exceptions.PagureException as err: - flask.flash(str(err), 'error') + flask.flash(str(err), "error") except SQLAlchemyError as err: # pragma: no cover flask.g.session.rollback() _log.exception(err) - flask.flash('Email could not be added', 'error') + flask.flash("Email could not be added", "error") - return flask.render_template( - 'user_emails.html', - user=user, - form=form, - ) + return flask.render_template("user_emails.html", user=user, form=form) -@UI_NS.route('/settings/email/default', methods=['POST']) +@UI_NS.route("/settings/email/default", methods=["POST"]) @login_required def set_default_email(): """ Set the default email address of the user. """ if admin_session_timedout(): return flask.redirect( - flask.url_for('auth_login', next=flask.request.url)) + flask.url_for("auth_login", next=flask.request.url) + ) user = _get_user(username=flask.g.fas_user.username) @@ -1251,34 +1261,34 @@ def set_default_email(): if email not in useremails: flask.flash( - 'You do not have the email: %s, nothing to set' % email, - 'error') - - return flask.redirect( - flask.url_for('ui_ns.user_settings') + "You do not have the email: %s, nothing to set" % email, + "error", ) + return flask.redirect(flask.url_for("ui_ns.user_settings")) + user.default_email = email try: flask.g.session.commit() - flask.flash('Default email set to: %s' % email) + flask.flash("Default email set to: %s" % email) except SQLAlchemyError as err: # pragma: no cover flask.g.session.rollback() _log.exception(err) - flask.flash('Default email could not be set', 'error') + flask.flash("Default email could not be set", "error") - return flask.redirect(flask.url_for('ui_ns.user_settings')) + return flask.redirect(flask.url_for("ui_ns.user_settings")) -@UI_NS.route('/settings/email/resend', methods=['POST']) +@UI_NS.route("/settings/email/resend", methods=["POST"]) @login_required def reconfirm_email(): """ Re-send the email address of the user. """ if admin_session_timedout(): return flask.redirect( - flask.url_for('auth_login', next=flask.request.url)) + flask.url_for("auth_login", next=flask.request.url) + ) user = _get_user(username=flask.g.fas_user.username) @@ -1289,74 +1299,78 @@ def reconfirm_email(): try: pagure.lib.resend_pending_email(flask.g.session, user, email) flask.g.session.commit() - flask.flash('Confirmation email re-sent') + flask.flash("Confirmation email re-sent") except pagure.exceptions.PagureException as err: - flask.flash(str(err), 'error') + flask.flash(str(err), "error") except SQLAlchemyError as err: # pragma: no cover flask.g.session.rollback() _log.exception(err) - flask.flash('Confirmation email could not be re-sent', 'error') + flask.flash("Confirmation email could not be re-sent", "error") - return flask.redirect(flask.url_for('ui_ns.user_settings')) + return flask.redirect(flask.url_for("ui_ns.user_settings")) -@UI_NS.route('/settings/email/confirm//') -@UI_NS.route('/settings/email/confirm/') +@UI_NS.route("/settings/email/confirm//") +@UI_NS.route("/settings/email/confirm/") def confirm_email(token): """ Confirm a new email. """ if admin_session_timedout(): return flask.redirect( - flask.url_for('auth_login', next=flask.request.url)) + flask.url_for("auth_login", next=flask.request.url) + ) email = pagure.lib.search_pending_email(flask.g.session, token=token) if not email: - flask.flash('No email associated with this token.', 'error') + flask.flash("No email associated with this token.", "error") else: try: pagure.lib.add_email_to_user( - flask.g.session, email.user, email.email) + flask.g.session, email.user, email.email + ) flask.g.session.delete(email) flask.g.session.commit() - flask.flash('Email validated') + flask.flash("Email validated") except SQLAlchemyError as err: # pragma: no cover flask.g.session.rollback() flask.flash( - 'Could not set the account as active in the db, ' - 'please report this error to an admin', 'error') + "Could not set the account as active in the db, " + "please report this error to an admin", + "error", + ) _log.exception(err) - return flask.redirect(flask.url_for('ui_ns.user_settings')) + return flask.redirect(flask.url_for("ui_ns.user_settings")) -@UI_NS.route('/ssh_info/') -@UI_NS.route('/ssh_info') +@UI_NS.route("/ssh_info/") +@UI_NS.route("/ssh_info") def ssh_hostkey(): """ Endpoint returning information about the SSH hostkey and fingerprint of the current pagure instance. """ - return flask.render_template( - 'doc_ssh_keys.html', - ) + return flask.render_template("doc_ssh_keys.html") -@UI_NS.route('/settings/token/new/', methods=('GET', 'POST')) -@UI_NS.route('/settings/token/new', methods=('GET', 'POST')) +@UI_NS.route("/settings/token/new/", methods=("GET", "POST")) +@UI_NS.route("/settings/token/new", methods=("GET", "POST")) @login_required def add_api_user_token(): """ Create an user token (not project specific). """ if admin_session_timedout(): - if flask.request.method == 'POST': - flask.flash('Action canceled, try it again', 'error') + if flask.request.method == "POST": + flask.flash("Action canceled, try it again", "error") return flask.redirect( - flask.url_for('auth_login', next=flask.request.url)) + flask.url_for("auth_login", next=flask.request.url) + ) # Ensure the user is in the DB at least user = _get_user(username=flask.g.fas_user.username) acls = pagure.lib.get_acls( - flask.g.session, restrict=pagure_config.get('CROSS_PROJECT_ACLS')) + flask.g.session, restrict=pagure_config.get("CROSS_PROJECT_ACLS") + ) form = pagure.forms.NewTokenForm(acls=acls) if form.validate_on_submit(): @@ -1371,41 +1385,37 @@ def add_api_user_token(): flask.g.session.commit() flask.flash(msg) return flask.redirect( - flask.url_for('ui_ns.user_settings') + "#nav-api-tab") + flask.url_for("ui_ns.user_settings") + "#nav-api-tab" + ) except SQLAlchemyError as err: # pragma: no cover flask.g.session.rollback() _log.exception(err) - flask.flash('API key could not be added', 'error') + flask.flash("API key could not be added", "error") # When form is displayed after an empty submission, show an error. - if form.errors.get('acls'): - flask.flash('You must select at least one permission.', 'error') + if form.errors.get("acls"): + flask.flash("You must select at least one permission.", "error") return flask.render_template( - 'add_token.html', - select='settings', - form=form, - acls=acls, + "add_token.html", select="settings", form=form, acls=acls ) -@UI_NS.route('/settings/token/revoke//', methods=['POST']) -@UI_NS.route('/settings/token/revoke/', methods=['POST']) +@UI_NS.route("/settings/token/revoke//", methods=["POST"]) +@UI_NS.route("/settings/token/revoke/", methods=["POST"]) @login_required def revoke_api_user_token(token_id): """ Revoke a user token (ie: not project specific). """ if admin_session_timedout(): - flask.flash('Action canceled, try it again', 'error') - url = flask.url_for('.user_settings') - return flask.redirect( - flask.url_for('auth_login', next=url)) + flask.flash("Action canceled, try it again", "error") + url = flask.url_for(".user_settings") + return flask.redirect(flask.url_for("auth_login", next=url)) token = pagure.lib.get_api_token(flask.g.session, token_id) - if not token \ - or token.user.username != flask.g.fas_user.username: - flask.abort(404, 'Token not found') + if not token or token.user.username != flask.g.fas_user.username: + flask.abort(404, "Token not found") form = pagure.forms.ConfirmationForm() @@ -1415,34 +1425,36 @@ def revoke_api_user_token(token_id): token.expiration = datetime.datetime.utcnow() flask.g.session.add(token) flask.g.session.commit() - flask.flash('Token revoked') + flask.flash("Token revoked") except SQLAlchemyError as err: # pragma: no cover flask.g.session.rollback() _log.exception(err) flask.flash( - 'Token could not be revoked, please contact an admin', - 'error') + "Token could not be revoked, please contact an admin", "error" + ) return flask.redirect( - flask.url_for('ui_ns.user_settings') + "#nav-api-token") + flask.url_for("ui_ns.user_settings") + "#nav-api-token" + ) -@UI_NS.route('/settings/forcelogout/', methods=('POST', )) -@UI_NS.route('/settings/forcelogout', methods=('POST', )) +@UI_NS.route("/settings/forcelogout/", methods=("POST",)) +@UI_NS.route("/settings/forcelogout", methods=("POST",)) @login_required def force_logout(): """ Set refuse_sessions_before, logging the user out everywhere """ if admin_session_timedout(): - if flask.request.method == 'POST': - flask.flash('Action canceled, try it again', 'error') + if flask.request.method == "POST": + flask.flash("Action canceled, try it again", "error") return flask.redirect( - flask.url_for('auth_login', next=flask.request.url)) + flask.url_for("auth_login", next=flask.request.url) + ) # Ensure the user is in the DB at least user = _get_user(username=flask.g.fas_user.username) user.refuse_sessions_before = datetime.datetime.utcnow() flask.g.session.commit() - flask.flash('All active sessions logged out') - return flask.redirect(flask.url_for('ui_ns.user_settings')) + flask.flash("All active sessions logged out") + return flask.redirect(flask.url_for("ui_ns.user_settings")) diff --git a/pagure/ui/fas_login.py b/pagure/ui/fas_login.py index 4756969..a11cc16 100644 --- a/pagure/ui/fas_login.py +++ b/pagure/ui/fas_login.py @@ -21,6 +21,7 @@ import pagure.utils from pagure.flask_app import logout from pagure.config import config as pagure_config import flask_fas_openid + FAS = flask_fas_openid.FAS() _log = logging.getLogger(__name__) @@ -28,28 +29,33 @@ _log = logging.getLogger(__name__) @FAS.postlogin def set_user(return_url): - ''' After login method. ''' + """ After login method. """ if flask.g.fas_user.username is None: flask.flash( - 'It looks like your OpenID provider did not provide an ' - 'username we could retrieve, username being needed we cannot ' - 'go further.', 'error') + "It looks like your OpenID provider did not provide an " + "username we could retrieve, username being needed we cannot " + "go further.", + "error", + ) logout() return flask.redirect(return_url) - flask.session['_new_user'] = False + flask.session["_new_user"] = False user = pagure.lib.search_user( - flask.g.session, username=flask.g.fas_user.username) + flask.g.session, username=flask.g.fas_user.username + ) if not user: - flask.session['_new_user'] = True + flask.session["_new_user"] = True else: user_email = pagure.lib.search_user( - flask.g.session, email=flask.g.fas_user.email) + flask.g.session, email=flask.g.fas_user.email + ) if user_email and user_email.user != user.user: flask.flash( - 'This email address seems to already be associated with ' - 'another account and thus can not be associated with yours', - 'error') + "This email address seems to already be associated with " + "another account and thus can not be associated with yours", + "error", + ) logout() return flask.redirect(return_url) @@ -59,14 +65,15 @@ def set_user(return_url): username=flask.g.fas_user.username, fullname=flask.g.fas_user.fullname, default_email=flask.g.fas_user.email, - ssh_key=flask.g.fas_user.get('ssh_key'), - keydir=pagure_config.get('GITOLITE_KEYDIR', None), + ssh_key=flask.g.fas_user.get("ssh_key"), + keydir=pagure_config.get("GITOLITE_KEYDIR", None), ) # If groups are managed outside pagure, set up the user at login - if not pagure_config.get('ENABLE_GROUP_MNGT', False): + if not pagure_config.get("ENABLE_GROUP_MNGT", False): user = pagure.lib.search_user( - flask.g.session, username=flask.g.fas_user.username) + flask.g.session, username=flask.g.fas_user.username + ) groups = set(user.groups) fas_groups = set(flask.g.fas_user.groups) # Add the new groups @@ -74,7 +81,8 @@ def set_user(return_url): groupobj = None if group: groupobj = pagure.lib.search_groups( - flask.g.session, group_name=group) + flask.g.session, group_name=group + ) if groupobj: try: pagure.lib.add_user_to_group( @@ -108,8 +116,10 @@ def set_user(return_url): flask.g.session.rollback() _log.exception(err) flask.flash( - 'Could not set up you as a user properly, please contact ' - 'an admin', 'error') + "Could not set up you as a user properly, please contact " + "an admin", + "error", + ) # Ensure the user is logged out if we cannot set them up # correctly logout() diff --git a/pagure/ui/filters.py b/pagure/ui/filters.py index c08e13a..2b1b59b 100644 --- a/pagure/ui/filters.py +++ b/pagure/ui/filters.py @@ -35,7 +35,7 @@ from pagure.utils import authenticated, is_repo_committer, is_true # Jinja filters -@UI_NS.app_template_filter('hasattr') +@UI_NS.app_template_filter("hasattr") def jinja_hasattr(obj, string): """ Template filter checking if the provided object at the provided string as attribute @@ -43,14 +43,14 @@ def jinja_hasattr(obj, string): return hasattr(obj, string) -@UI_NS.app_template_filter('render') +@UI_NS.app_template_filter("render") def jinja_render(tmpl, **kwargs): """ Render the given template with the provided arguments """ return flask.render_template_string(tmpl, **kwargs) -@UI_NS.app_template_filter('humanize') +@UI_NS.app_template_filter("humanize") def humanize_date(date): """ Template filter returning the last commit date of the provided repo. """ @@ -58,8 +58,8 @@ def humanize_date(date): return arrow.get(date).humanize() -@UI_NS.app_template_filter('format_ts') -@UI_NS.app_template_filter('format_datetime') +@UI_NS.app_template_filter("format_ts") +@UI_NS.app_template_filter("format_datetime") def format_ts(string): """ Template filter transforming a timestamp, datetime or anything else arrow.get() can handle to a human-readable date @@ -71,66 +71,78 @@ def format_ts(string): # %b or %d because they will be 'localized' for the *server*. # This format should be pretty 'locale-neutral'. arr = arrow.get(string) - return arr.strftime('%Y-%m-%d %H:%M:%S %Z') - - -@UI_NS.app_template_filter('format_loc') -def format_loc(loc, commit=None, filename=None, tree_id=None, prequest=None, - index=None, isprdiff=False): + return arr.strftime("%Y-%m-%d %H:%M:%S %Z") + + +@UI_NS.app_template_filter("format_loc") +def format_loc( + loc, + commit=None, + filename=None, + tree_id=None, + prequest=None, + index=None, + isprdiff=False, +): """ Template filter putting the provided lines of code into a table """ if loc is None: return - output = [ - '
    ', - '' - ] + output = ['
    ', '
    '] comments = {} if prequest and not isinstance(prequest, flask.wrappers.Request): for com in prequest.comments: - if commit and com.commit_id == commit.hex \ - and com.filename == filename: + if ( + commit + and com.commit_id == commit.hex + and com.filename == filename + ): if com.line in comments: comments[com.line].append(com) else: comments[com.line] = [com] for key in comments: - comments[key] = sorted( - comments[key], key=lambda obj: obj.date_created) + comments[key] = sorted(comments[key], key=lambda obj: obj.date_created) if not index: - index = '' + index = "" cnt = 1 - for line in loc.split('\n'): + for line in loc.split("\n"): if filename and commit: if isinstance(filename, str) and six.PY2: - filename = filename.decode('UTF-8') - - if isprdiff and (line.startswith('@@') or - line.startswith('+') or - line.startswith('-')): - if line.startswith('@@'): + filename = filename.decode("UTF-8") + + if isprdiff and ( + line.startswith("@@") + or line.startswith("+") + or line.startswith("-") + ): + if line.startswith("@@"): output.append( '' - % ({'cnt_lbl': cnt, 'commit': commit})) - elif line.startswith('+'): + % ({"cnt_lbl": cnt, "commit": commit}) + ) + elif line.startswith("+"): output.append( '' - % ({'cnt_lbl': cnt, 'commit': commit})) - elif line.startswith('-'): + % ({"cnt_lbl": cnt, "commit": commit}) + ) + elif line.startswith("-"): output.append( '' - % ({'cnt_lbl': cnt, 'commit': commit})) + % ({"cnt_lbl": cnt, "commit": commit}) + ) else: output.append( '' - % ({'cnt_lbl': cnt, 'commit': commit})) + % ({"cnt_lbl": cnt, "commit": commit}) + ) output.append( '' % ( + "

    " + "" + % ( { - 'cnt': '%s_%s' % (index, cnt), - 'cnt_lbl': cnt, - 'filename': filename, - 'commit': commit, - 'tree_id': tree_id, + "cnt": "%s_%s" % (index, cnt), + "cnt_lbl": cnt, + "filename": filename, + "commit": commit, + "tree_id": tree_id, } ) ) @@ -158,111 +171,136 @@ def format_loc(loc, commit=None, filename=None, tree_id=None, prequest=None, '' - % ( - { - 'cnt': '%s_%s' % (index, cnt), - 'cnt_lbl': cnt, - } - ) + % ({"cnt": "%s_%s" % (index, cnt), "cnt_lbl": cnt}) ) cnt += 1 if not line: output.append(line) continue - if line.startswith('@@'): + if line.startswith("@@"): if prequest and prequest.project_from: - rangeline = line.partition('@@ ')[2] \ - if line.partition('@@ ')[1] == '@@ ' else None + rangeline = ( + line.partition("@@ ")[2] + if line.partition("@@ ")[1] == "@@ " + else None + ) if rangeline: - rangeline = rangeline.split(' @@')[0] - linenumber = rangeline.split('+')[1].split(',')[0] + rangeline = rangeline.split(" @@")[0] + linenumber = rangeline.split("+")[1].split(",")[0] line = line + ' ' + \ - '' - - if isprdiff and (line.startswith('@@') or - line.startswith('+') or - line.startswith('-')): - if line.startswith('@@'): + filename=filename, + ), + linenumber, + ) + line = ( + line + + 'class="open_changed_file_icon_wrap">' + + '' + ) + + if isprdiff and ( + line.startswith("@@") + or line.startswith("+") + or line.startswith("-") + ): + if line.startswith("@@"): output.append( '' - % line) - elif line.startswith('+'): + % line + ) + elif line.startswith("+"): output.append( '' - % line) - elif line.startswith('-'): + % line + ) + elif line.startswith("-"): output.append( '' - % line) + % line + ) else: output.append( - '' % line) - - output.append('') - - tpl_edit = '' \ - '' \ - '' - tpl_edited = ' ' \ - 'Edited %(human_edit_date)s by %(user)s ' - - tpl_delete = '' + '' % line + ) + + output.append("") + + tpl_edit = ( + '' + '' + "" + ) + tpl_edited = ( + ' ' + "Edited %(human_edit_date)s by %(user)s " + ) + + tpl_delete = ( + '" + ) if cnt - 1 in comments: for comment in comments[cnt - 1]: - templ_delete = '' - templ_edit = '' - templ_edited = '' + templ_delete = "" + templ_edit = "" + templ_edited = "" if authenticated() and ( - ( - is_true(comment.parent.status, ['true', 'open']) - and comment.user.user == flask.g.fas_user.username - ) - or is_repo_committer(comment.parent.project)): - templ_delete = tpl_delete % ({'commentid': comment.id}) - templ_edit = tpl_edit % ({ - 'edit_url': flask.url_for( - 'ui_ns.pull_request_edit_comment', - repo=comment.parent.project.name, - requestid=comment.parent.id, - commentid=comment.id, - username=comment.parent.user.user - if comment.parent.project.is_fork else None - ), - 'requestid': comment.parent.id, - 'commentid': comment.id, - }) + ( + is_true(comment.parent.status, ["true", "open"]) + and comment.user.user == flask.g.fas_user.username + ) + or is_repo_committer(comment.parent.project) + ): + templ_delete = tpl_delete % ({"commentid": comment.id}) + templ_edit = tpl_edit % ( + { + "edit_url": flask.url_for( + "ui_ns.pull_request_edit_comment", + repo=comment.parent.project.name, + requestid=comment.parent.id, + commentid=comment.id, + username=comment.parent.user.user + if comment.parent.project.is_fork + else None, + ), + "requestid": comment.parent.id, + "commentid": comment.id, + } + ) if comment.edited_on: - templ_edited = tpl_edited % ({ - 'edit_date': format_ts(comment.edited_on), - 'human_edit_date': humanize_date(comment.edited_on), - 'user': comment.editor.user, - }) + templ_edited = tpl_edited % ( + { + "edit_date": format_ts(comment.edited_on), + "human_edit_date": humanize_date( + comment.edited_on + ), + "user": comment.editor.user, + } + ) output.append( '' @@ -270,54 +308,57 @@ def format_loc(loc, commit=None, filename=None, tree_id=None, prequest=None, '
    ' '
    ' - '
    ' + "
    " '
    ' '' '' - '%(user)s commented ' + "%(user)s commented " '' '%(human_date)s' - '
    ' - '
    ' + "
    " + "
    " '
    ' - '%(templ_edit)s' - '%(templ_delete)s' - '
    ' - '
    ' + "%(templ_edit)s" + "%(templ_delete)s" + "" + "" '
    ' - '' + "" '
    ' '
    ' - '%(comment)s' - '
    ' - '
    ' - '
    ' - '' % ( + "%(comment)s" + "" + "" + "" + "" + % ( { - 'url': flask.url_for( - 'ui_ns.view_user', username=comment.user.user), - 'templ_delete': templ_delete, - 'templ_edit': templ_edit, - 'templ_edited': templ_edited, - 'user': comment.user.user, - 'user_html': comment.user.html_title, - 'avatar_url': avatar_url( - comment.user.default_email, 16), - 'date': format_ts(comment.date_created), - 'human_date': humanize_date(comment.date_created), - 'comment': markdown_filter(comment.comment), - 'commentid': comment.id, + "url": flask.url_for( + "ui_ns.view_user", username=comment.user.user + ), + "templ_delete": templ_delete, + "templ_edit": templ_edit, + "templ_edited": templ_edited, + "user": comment.user.user, + "user_html": comment.user.html_title, + "avatar_url": avatar_url( + comment.user.default_email, 16 + ), + "date": format_ts(comment.date_created), + "human_date": humanize_date(comment.date_created), + "comment": markdown_filter(comment.comment), + "commentid": comment.id, } ) ) - output.append('
    ' @@ -139,17 +151,18 @@ def format_loc(loc, commit=None, filename=None, tree_id=None, prequest=None, '' - '

    ' + "

    " '' - '

    ' - '
    ' '\
    %s
    \
    %s
    \
    %s
    %s
    %s
    ') + output.append("") - return '\n'.join(output) + return "\n".join(output) -@UI_NS.app_template_filter('blame_loc') +@UI_NS.app_template_filter("blame_loc") def blame_loc(loc, repo, username, blame): """ Template filter putting the provided lines of code into a table @@ -340,16 +381,12 @@ def blame_loc(loc, repo, username, blame): return if not isinstance(loc, six.text_type): - raise ValueError( - '"loc" must be a unicode string, not %s' % type(loc)) + raise ValueError('"loc" must be a unicode string, not %s' % type(loc)) - output = [ - '
    ', - '' - ] + output = ['
    ', '
    '] - for idx, line in enumerate(loc.split('\n')): - if line == '': + for idx, line in enumerate(loc.split("\n")): + if line == "": break try: @@ -364,8 +401,7 @@ def blame_loc(loc, repo, username, blame): output.append( '' - % ({'cnt': idx + 1}) + '"%(cnt)s">' % ({"cnt": idx + 1}) ) committer = None @@ -374,52 +410,58 @@ def blame_loc(loc, repo, username, blame): except ValueError: pass output.append( - '' % (author_to_user( - committer, with_name=False) if committer else ' ') + '' + % ( + author_to_user(committer, with_name=False) + if committer + else " " + ) ) output.append( - '' % ( + '' + % ( flask.url_for( - 'ui_ns.view_commit', + "ui_ns.view_commit", repo=repo.name, username=username, namespace=repo.namespace, - commitid=diff.final_commit_id + commitid=diff.final_commit_id, ), - shorted_commit(diff.final_commit_id) + shorted_commit(diff.final_commit_id), ) ) output.append( - '' % line) - output.append('') + '' % line + ) + output.append("") - output.append('
    ' '%s%s%s%s
    %s
    %s
    ') + output.append("") - return '\n'.join(output) + return "\n".join(output) -@UI_NS.app_template_filter('wraps') +@UI_NS.app_template_filter("wraps") def text_wraps(text, size=10): """ Template filter to wrap text at a specified size """ if text: parts = textwrap.wrap(text, size) if len(parts) > 1: - parts = '%s...' % parts[0] + parts = "%s..." % parts[0] else: parts = parts[0] return parts -@UI_NS.app_template_filter('avatar') +@UI_NS.app_template_filter("avatar") def avatar(packager, size=64, css_class=None): """ Template filter that returns html for avatar of any given Username. """ if not isinstance(packager, six.text_type): - packager = packager.decode('utf-8') + packager = packager.decode("utf-8") - if '@' not in packager: + if "@" not in packager: user = pagure.lib.search_user(flask.g.session, username=packager) if user: packager = user.default_email @@ -430,26 +472,26 @@ def avatar(packager, size=64, css_class=None): output = '' % ( class_string, - avatar_url(packager, size) + avatar_url(packager, size), ) return output -@UI_NS.app_template_filter('avatar_url') +@UI_NS.app_template_filter("avatar_url") def avatar_url(email, size=64): """ Template filter that returns html for avatar of any given Email. """ return pagure.lib.avatar_url_from_email(email, size) -@UI_NS.app_template_filter('short') +@UI_NS.app_template_filter("short") def shorted_commit(cid): """Gets short version of the commit id""" - return ("%s" % cid)[:pagure_config['SHORT_LENGTH']] + return ("%s" % cid)[: pagure_config["SHORT_LENGTH"]] -@UI_NS.app_template_filter('markdown') +@UI_NS.app_template_filter("markdown") def markdown_filter(text): """ Template filter converting a string into html content using the markdown library. @@ -457,32 +499,34 @@ def markdown_filter(text): return pagure.lib.text2markdown(text) -@UI_NS.app_template_filter('patch_to_diff') +@UI_NS.app_template_filter("patch_to_diff") def patch_to_diff(patch): """Render a hunk as a diff""" content = [] for hunk in patch.hunks: - content.append("@@ -%i,%i +%i,%i @@\n" % ( - hunk.old_start, hunk.old_lines, hunk.new_start, hunk.new_lines)) + content.append( + "@@ -%i,%i +%i,%i @@\n" + % (hunk.old_start, hunk.old_lines, hunk.new_start, hunk.new_lines) + ) for line in hunk.lines: - if hasattr(line, 'content'): + if hasattr(line, "content"): origin = line.origin - if line.origin in ['<', '>', '=']: - origin = '' - content.append(origin + ' ' + line.content) + if line.origin in ["<", ">", "="]: + origin = "" + content.append(origin + " " + line.content) else: # Avoid situation where at the end of a file we get: # + foo< # \ No newline at end of file - if line[0] in ['<', '>', '=']: - line = ('', line[1]) - content.append(' '.join(line)) + if line[0] in ["<", ">", "="]: + line = ("", line[1]) + content.append(" ".join(line)) - return ''.join(content) + return "".join(content) -@UI_NS.app_template_filter('author2user') +@UI_NS.app_template_filter("author2user") def author_to_user(author, size=16, cssclass=None, with_name=True): """ Template filter transforming a pygit2 Author object into a text either with just the username or linking to the user in pagure. @@ -492,38 +536,43 @@ def author_to_user(author, size=16, cssclass=None, with_name=True): return output user = pagure.lib.search_user(flask.g.session, email=author.email) if user: - output = "%(avatar)s %(username)s" + ) if not with_name: - output = "%(avatar)s" + ) output = output % ( { - 'avatar': avatar(user.default_email, size), - 'url': flask.url_for( - 'ui_ns.view_user', username=user.username), - 'cssclass': ('class="%s"' % cssclass) if cssclass else '', - 'username': user.username, - 'name': escape(author.name), + "avatar": avatar(user.default_email, size), + "url": flask.url_for( + "ui_ns.view_user", username=user.username + ), + "cssclass": ('class="%s"' % cssclass) if cssclass else "", + "username": user.username, + "name": escape(author.name), } ) return output -@UI_NS.app_template_filter('author2avatar') +@UI_NS.app_template_filter("author2avatar") def author_to_avatar(author, size=32): """ Template filter transforming a pygit2 Author object into an avatar. """ if not author.email: - return '' + return "" user = pagure.lib.search_user(flask.g.session, email=author.email) output = user.default_email if user else author.email - return avatar(output.encode('utf-8'), size) + return avatar(output.encode("utf-8"), size) -@UI_NS.app_template_filter('author2user_commits') +@UI_NS.app_template_filter("author2user_commits") def author_to_user_commits(author, link, size=16, cssclass=None): """ Template filter transforming a pygit2 Author object into a text either with just the username or linking to the user in pagure. @@ -534,24 +583,24 @@ def author_to_user_commits(author, link, size=16, cssclass=None): user = pagure.lib.search_user(flask.g.session, email=author.email) if user: output = "%s %s" % ( - flask.url_for('ui_ns.view_user', username=user.username), + flask.url_for("ui_ns.view_user", username=user.username), avatar(user.default_email, size), link, - ('class="%s"' % cssclass) if cssclass else '', + ('class="%s"' % cssclass) if cssclass else "", author.name, ) return output -@UI_NS.app_template_filter('InsertDiv') +@UI_NS.app_template_filter("InsertDiv") def insert_div(content): """ Template filter inserting an opening
    and closing
    after the first title and then at the end of the content. """ # This is quite a hack but simpler solution using .replace() didn't work # for some reasons... - content = content.split('\n') + content = content.split("\n") output = [] for row in content: if row.startswith('
    ', '

    ' - '  ' + '  ', ) output.append(row) output = "\n".join(output) - output = output.replace('

    ', '\n
    ', 1) - output = output.replace('h1', 'h3') + output = output.replace("", "\n
    ", 1) + output = output.replace("h1", "h3") return output -@UI_NS.app_template_filter('noJS') +@UI_NS.app_template_filter("noJS") def no_js(content, ignore=None): """ Template filter replacing