From bff5ebf43e567fa05a4daa86ee2756be11411763 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Wed, 1 Mar 2023 17:23:43 +0000 Subject: [PATCH 1/8] ci: update workflows --- .github/workflows/comment-bot.yml | 22 +++++++++---------- .github/workflows/test.yml | 35 +++++++++++++------------------ README.rst | 4 ++-- 3 files changed, 26 insertions(+), 35 deletions(-) diff --git a/.github/workflows/comment-bot.yml b/.github/workflows/comment-bot.yml index 4451632..420c49b 100644 --- a/.github/workflows/comment-bot.yml +++ b/.github/workflows/comment-bot.yml @@ -1,25 +1,23 @@ name: Comment Bot on: - issue_comment: - types: [created] - pull_request_review_comment: - types: [created] + issue_comment: {types: [created]} + pull_request_review_comment: {types: [created]} jobs: tag: # /tag if: startsWith(github.event.comment.body, '/tag ') runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: React Seen - uses: actions/github-script@v2 + uses: actions/github-script@v6 with: script: | - const perm = await github.repos.getCollaboratorPermissionLevel({ + const perm = await github.rest.repos.getCollaboratorPermissionLevel({ owner: context.repo.owner, repo: context.repo.repo, username: context.payload.comment.user.login}) post = (context.eventName == "issue_comment" - ? github.reactions.createForIssueComment - : github.reactions.createForPullRequestReviewComment) + ? github.rest.reactions.createForIssueComment + : github.rest.reactions.createForPullRequestReviewComment) if (!["admin", "write"].includes(perm.data.permission)){ post({ owner: context.repo.owner, repo: context.repo.repo, @@ -39,12 +37,12 @@ jobs: BODY: ${{ github.event.comment.body }} GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} - name: React Success - uses: actions/github-script@v2 + uses: actions/github-script@v6 with: script: | post = (context.eventName == "issue_comment" - ? github.reactions.createForIssueComment - : github.reactions.createForPullRequestReviewComment) + ? github.rest.reactions.createForIssueComment + : github.rest.reactions.createForPullRequestReviewComment) post({ owner: context.repo.owner, repo: context.repo.repo, comment_id: context.payload.comment.id, content: "rocket"}) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index eb02da1..d1977c7 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -2,11 +2,10 @@ name: Test on: push: pull_request: - schedule: - - cron: '3 2 1 * *' # M H d m w (monthly at 2:03) + schedule: [{cron: '3 2 1 * *'}] # M H d m w (monthly at 2:03) jobs: check: - if: github.event_name != 'pull_request' || github.head_ref != 'devel' + if: github.event_name != 'pull_request' || !contains('OWNER,MEMBER,COLLABORATOR', github.event.pull_request.author_association) name: Check runs-on: ubuntu-latest steps: @@ -16,7 +15,7 @@ jobs: python-version: '3.x' - name: Prepare cache run: echo "PYSHA=$(python -VV | sha256sum | cut -d' ' -f1)" >> $GITHUB_ENV - - uses: actions/cache@v1 + - uses: actions/cache@v3 with: path: ~/.cache/pre-commit key: pre-commit|${{ env.PYSHA }}|${{ hashFiles('.pre-commit-config.yaml') }} @@ -39,17 +38,17 @@ jobs: - name: Lint run: pre-commit run -a --show-diff-on-failure test: - if: github.event_name != 'pull_request' || github.head_ref != 'devel' + if: github.event_name != 'pull_request' || !contains('OWNER,MEMBER,COLLABORATOR', github.event.pull_request.author_association) name: py${{ matrix.python }}-${{ matrix.os }} strategy: matrix: os: [ubuntu] - python: [2.7, 3.5, 3.6, 3.7, 3.8, 3.9, '3.10'] + python: [3.7, 3.8, 3.9, '3.10', 3.11] include: - os: macos - python: 3.9 + python: 3.11 - os: windows - python: 3.9 + python: 3.11 runs-on: ${{ matrix.os }}-latest defaults: run: @@ -75,7 +74,6 @@ jobs: COVERALLS_REPO_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} finish: - if: github.event_name != 'pull_request' || github.head_ref != 'devel' name: Coverage continue-on-error: ${{ github.event_name != 'push' }} needs: test @@ -111,7 +109,7 @@ jobs: with: password: ${{ secrets.TWINE_PASSWORD }} gpg_key: ${{ secrets.GPG_KEY }} - upload: ${{ github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags') }} + upload: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags') }} - id: collect_assets name: Collect assets run: | @@ -125,19 +123,14 @@ jobs: echo ::set-output name=docker_tags::devel echo ::set-output name=snap_channel::edge fi - echo ::set-output name=tag::${GITHUB_REF#refs/tags/} - git log --pretty='format:%d%n- %s%n%b---' $(git tag --sort=v:refname | tail -n2 | head -n1)..HEAD > _CHANGES.md - if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') - uses: softprops/action-gh-release@v1 + name: Release + run: | + changelog=$(git log --pretty='format:%d%n- %s%n%b---' $(git tag --sort=v:refname | tail -n2 | head -n1)..HEAD) + tag="${GITHUB_REF#refs/tags/}" + gh release create --title "git-fame $tag stable" --draft --notes "$changelog" "$tag" dist/${{ steps.dist.outputs.whl }} dist/${{ steps.dist.outputs.whl_asc }} env: - GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} - with: - name: git-fame ${{ steps.collect_assets.outputs.tag }} stable - body_path: _CHANGES.md - draft: true - files: | - dist/${{ steps.dist.outputs.whl }} - dist/${{ steps.dist.outputs.whl_asc }} + GH_TOKEN: ${{ github.token }} - uses: snapcore/action-build@v1 id: snap_build - if: github.event_name == 'push' && steps.collect_assets.outputs.snap_channel diff --git a/README.rst b/README.rst index 3e6cb76..0c0a507 100644 --- a/README.rst +++ b/README.rst @@ -327,8 +327,8 @@ We are grateful for all |GitHub-Contributions|. |README-Hits| -.. |Build-Status| image:: https://img.shields.io/github/workflow/status/casperdcl/git-fame/Test/master?logo=GitHub - :target: https://github.com/casperdcl/git-fame/actions?query=workflow%3ATest +.. |Build-Status| image:: https://img.shields.io/github/actions/workflow/status/casperdcl/git-fame/test.yml?branch=master&label=git-fame&logo=GitHub + :target: https://github.com/casperdcl/git-fame/actions/workflows/test.yml .. |Coverage-Status| image:: https://img.shields.io/coveralls/github/casperdcl/git-fame/master?logo=coveralls :target: https://coveralls.io/github/casperdcl/git-fame .. |Branch-Coverage-Status| image:: https://codecov.io/gh/casperdcl/git-fame/branch/master/graph/badge.svg From 38013931f3a8e6d290b89e9972e7dfd9bae4551a Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Wed, 1 Mar 2023 18:14:07 +0000 Subject: [PATCH 2/8] tests: update pre-commit, drop py<=3.6 --- .pre-commit-config.yaml | 15 ++++++++++----- LICENCE | 2 +- setup.cfg | 16 ++++++---------- tox.ini | 2 +- 4 files changed, 18 insertions(+), 17 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 72e196a..9a2654e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ default_language_version: python: python3 repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.3.0 + rev: v4.4.0 hooks: - id: check-added-large-files - id: check-case-conflict @@ -37,8 +37,8 @@ repos: - argopt - tabulate - tqdm -- repo: https://gitlab.com/pycqa/flake8 - rev: 3.9.2 +- repo: https://github.com/PyCQA/flake8 + rev: 5.0.4 hooks: - id: flake8 args: [-j8] @@ -49,8 +49,13 @@ repos: - flake8-debugger - flake8-isort - flake8-string-format - - flake8-type-annotations +- repo: https://github.com/google/yapf + rev: v0.32.0 + hooks: + - id: yapf + args: [-i] + additional_dependencies: [toml] - repo: https://github.com/PyCQA/isort - rev: 5.10.1 + rev: 5.12.0 hooks: - id: isort diff --git a/LICENCE b/LICENCE index 1e30313..7315c38 100644 --- a/LICENCE +++ b/LICENCE @@ -1,5 +1,5 @@ * files: * - MPLv2.0 2016-2022 (c) Casper da Costa-Luis + MPLv2.0 2016-2023 (c) Casper da Costa-Luis [casperdcl](https://github.com/casperdcl). diff --git a/setup.cfg b/setup.cfg index 530e0eb..c02e836 100644 --- a/setup.cfg +++ b/setup.cfg @@ -41,16 +41,13 @@ classifiers= Operating System :: POSIX :: SunOS/Solaris Operating System :: Unix Programming Language :: Python - Programming Language :: Python :: 2 - Programming Language :: Python :: 2.7 Programming Language :: Python :: 3 - Programming Language :: Python :: 3.5 - Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 + Programming Language :: Python :: 3 :: Only Programming Language :: Python :: Implementation Programming Language :: Python :: Implementation :: IronPython Programming Language :: Python :: Implementation :: PyPy @@ -73,7 +70,7 @@ classifiers= Topic :: Utilities [options] setup_requires=setuptools>=42; setuptools_scm[toml]>=3.4 -python_requires=>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.* +python_requires=>=3.7 install_requires=argopt>=0.3.5; setuptools; tabulate; tqdm>=4.44.0 tests_require=tox include_package_data=True @@ -90,8 +87,6 @@ console_scripts= exclude=tests [options.package_data] gitfame=git-fame.1 -[bdist_wheel] -universal=1 [flake8] extend_ignore=E111,E114 @@ -99,17 +94,19 @@ max_line_length=99 exclude=.eggs,.tox,build,dist,.git,__pycache__ [yapf] +spaces_before_comment=15, 20 +arithmetic_precedence_indication=true +allow_split_before_dict_value=false coalesce_brackets=True column_limit=99 each_dict_entry_on_separate_line=False -i18n_comment=NOQA space_between_ending_comma_and_closing_bracket=False split_before_named_assigns=False split_before_closing_bracket=False +blank_line_before_nested_class_or_def=False [isort] line_length=99 -multi_line_output=4 known_first_party=gitfame,tests [tool:pytest] @@ -124,6 +121,5 @@ branch=True omit= tests/* relative_files=True -#disable_warnings=include-ignored [coverage:report] show_missing=True diff --git a/tox.ini b/tox.ini index 787409d..a8573cf 100644 --- a/tox.ini +++ b/tox.ini @@ -35,7 +35,7 @@ commands= codecov -X pycov -e TOXENV [testenv] -passenv=TOXENV CI GITHUB_* CODECOV_* COVERALLS_* HOME +passenv=TOXENV,CI,GITHUB_*,CODECOV_*,COVERALLS_*,HOME deps= {[core]deps} tqdm From f926fc3b5a8a4408b3669fa0b8ce1c006fe460bf Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Wed, 1 Mar 2023 18:19:01 +0000 Subject: [PATCH 3/8] lint --- gitfame/__init__.py | 8 +- gitfame/__main__.py | 2 +- gitfame/_gitfame.py | 753 ++++++++++++++++++++--------------------- gitfame/_utils.py | 190 +++++------ setup.py | 6 +- tests/tests_gitfame.py | 246 +++++++------- tests/tests_utils.py | 42 +-- 7 files changed, 601 insertions(+), 646 deletions(-) diff --git a/gitfame/__init__.py b/gitfame/__init__.py index bf31531..353000d 100644 --- a/gitfame/__init__.py +++ b/gitfame/__init__.py @@ -1,5 +1,5 @@ -from ._gitfame import ( - __author__, __copyright__, __date__, __licence__, __license__, __version__, main) +from ._gitfame import (__author__, __copyright__, __date__, __licence__, __license__, __version__, + main) -__all__ = ['main', '__author__', '__date__', '__licence__', '__copyright__', - '__version__', '__license__'] +__all__ = [ + 'main', '__author__', '__date__', '__licence__', '__copyright__', '__version__', '__license__'] diff --git a/gitfame/__main__.py b/gitfame/__main__.py index 7d70827..be6a628 100644 --- a/gitfame/__main__.py +++ b/gitfame/__main__.py @@ -1,3 +1,3 @@ from ._gitfame import main # pragma: no cover -main() # pragma: no cover +main() # pragma: no cover diff --git a/gitfame/_gitfame.py b/gitfame/_gitfame.py index f33c706..f36af37 100755 --- a/gitfame/_gitfame.py +++ b/gitfame/_gitfame.py @@ -63,9 +63,8 @@ from functools import partial from os import path -from ._utils import ( - TERM_WIDTH, Str, TqdmStream, _str, check_output, fext, int_cast_or_len, merge_stats, - print_unicode, string_types, tqdm) +from ._utils import (TERM_WIDTH, Str, TqdmStream, _str, check_output, fext, int_cast_or_len, + merge_stats, print_unicode, string_types, tqdm) # version detector. Precedence: installed dist, git, 'UNKNOWN' try: @@ -81,13 +80,12 @@ __licence__ = "[MPLv2.0](https://mozilla.org/MPL/2.0/)" __all__ = ["main"] __copyright__ = ' '.join(("Copyright (c)", __date__, __author__, __licence__)) -__license__ = __licence__ # weird foreign language +__license__ = __licence__ # weird foreign language log = logging.getLogger(__name__) # processing `blame --line-porcelain` -RE_AUTHS_BLAME = re.compile( - r'^\w+ \d+ \d+ (\d+)\nauthor (.+?)$.*?\ncommitter-time (\d+)', - flags=re.M | re.DOTALL) +RE_AUTHS_BLAME = re.compile(r'^\w+ \d+ \d+ (\d+)\nauthor (.+?)$.*?\ncommitter-time (\d+)', + flags=re.M | re.DOTALL) RE_NCOM_AUTH_EM = re.compile(r'^\s*(\d+)\s+(.*?)\s+<(.*)>\s*$', flags=re.M) RE_BLAME_BOUNDS = re.compile( r'^\w+\s+\d+\s+\d+(\s+\d+)?\s*$[^\t]*?^boundary\s*$[^\t]*?^\t.*?$\r?\n', @@ -103,416 +101,389 @@ COST_MONTHS = {'cocomo', 'month', 'months'} COST_HOURS = {'commit', 'commits', 'hour', 'hours'} CHURN_SLOC = {'surv', 'survive', 'surviving'} -CHURN_INS = {'ins', 'insert', 'insertion', 'insertions', - 'add', 'addition', 'additions', '+'} +CHURN_INS = {'ins', 'insert', 'insertion', 'insertions', 'add', 'addition', 'additions', '+'} CHURN_DEL = {'del', 'deletion', 'deletions', 'delete', '-'} def hours(dates, maxCommitDiffInSec=120 * 60, firstCommitAdditionInMinutes=120): - """ + """ Convert list of commit times (in seconds) to an estimate of hours spent. https://github.com/kimmobrunfeldt/git-hours/blob/\ 8aaeee237cb9d9028e7a2592a25ad8468b1f45e4/index.js#L114-L143 """ - dates = sorted(dates) - diffInSec = [i - j for (i, j) in zip(dates[1:], dates[:-1])] - res = sum(filter(lambda i: i < maxCommitDiffInSec, diffInSec)) - return (res / 60.0 + firstCommitAdditionInMinutes) / 60.0 + dates = sorted(dates) + diffInSec = [i - j for (i, j) in zip(dates[1:], dates[:-1])] + res = sum(filter(lambda i: i < maxCommitDiffInSec, diffInSec)) + return (res/60.0 + firstCommitAdditionInMinutes) / 60.0 -def tabulate( - auth_stats, stats_tot, sort='loc', bytype=False, backend='md', - cost=None, row_nums=False): - """ +def tabulate(auth_stats, stats_tot, sort='loc', bytype=False, backend='md', cost=None, + row_nums=False): + """ backends : [default: md]|yaml|json|csv|tsv|tabulate| `in tabulate.tabulate_formats` """ - COL_NAMES = ['Author', 'loc', 'coms', 'fils', ' distribution'] - it_as = getattr(auth_stats, 'iteritems', auth_stats.items) - # get ready - tab = [[auth, - s['loc'], - s.get('commits', 0), - len(s.get('files', [])), - '/'.join(map('{0:4.1f}'.format, ( - 100 * s['loc'] / max(1, stats_tot['loc']), - 100 * s.get('commits', 0) / max(1, stats_tot['commits']), - 100 * len(s.get('files', [])) / max(1, stats_tot['files']) - ))).replace('/100.0/', '/ 100/')] - for (auth, s) in it_as()] - if cost: - stats_tot = dict(stats_tot) - if cost & COST_MONTHS: - COL_NAMES.insert(1, 'mths') - tab = [i[:1] + [3.2 * (i[1] / 1e3)**1.05] + i[1:] for i in tab] - stats_tot.setdefault('months', '%.1f' % sum(i[1] for i in tab)) - if cost & COST_HOURS: - COL_NAMES.insert(1, 'hrs') - tab = [i[:1] + [hours(auth_stats[i[0]]['ctimes'])] + i[1:] for i in tab] - - stats_tot.setdefault('hours', '%.1f' % sum(i[1] for i in tab)) - # log.debug(auth_stats) - - for i, j in [("commits", "coms"), ("files", "fils"), ("hours", "hrs"), - ("months", "mths")]: - sort = sort.replace(i, j) - tab.sort(key=lambda i: i[COL_NAMES.index(sort)], reverse=True) - if row_nums: - tab = [[str(i)] + j for i, j in enumerate(tab, 1)] - COL_NAMES.insert(0, '#') - - totals = 'Total ' + '\nTotal '.join( - "%s: %s" % i for i in sorted(stats_tot.items())) + '\n' - - backend = backend.lower() - if backend in ("tabulate", "md", "markdown"): - backend = "pipe" - - if backend in ['yaml', 'yml', 'json', 'csv', 'tsv']: - tab = [i[:-1] + [float(pc.strip()) for pc in i[-1].split('/')] for i in tab] - tab = {"total": stats_tot, "data": tab, - "columns": COL_NAMES[:-1] + ['%' + i for i in COL_NAMES[-4:-1]]} - if backend in ['yaml', 'yml']: - log.debug("backend:yaml") - from yaml import safe_dump as tabber - return tabber(tab).rstrip() - elif backend == 'json': - log.debug("backend:json") - from json import dumps as tabber - return tabber(tab, ensure_ascii=False) - elif backend in ['csv', 'tsv']: - log.debug("backend:csv") - from csv import writer as tabber - - from ._utils import StringIO - res = StringIO() - t = tabber(res, delimiter=',' if backend == 'csv' else '\t') - t.writerow(tab['columns']) - t.writerows(tab['data']) - t.writerow('') - t.writerow(list(tab['total'].keys())) - t.writerow(list(tab['total'].values())) - return res.getvalue().rstrip() - else: # pragma: nocover - raise RuntimeError("Should be unreachable") - else: - import tabulate as tabber - if backend not in tabber.tabulate_formats: - raise ValueError("Unknown backend:%s" % backend) - log.debug("backend:tabulate:" + backend) - COL_LENS = [max(len(Str(i[j])) for i in [COL_NAMES] + tab) - for j in range(len(COL_NAMES))] - COL_LENS[0] = min( - TERM_WIDTH - sum(COL_LENS[1:]) - len(COL_LENS) * 3 - 4, - COL_LENS[0]) - tab = [[i[0][:COL_LENS[0]]] + i[1:] for i in tab] - return totals + tabber.tabulate( - tab, COL_NAMES, tablefmt=backend, floatfmt='.0f') - # from ._utils import tighten - # return totals + tighten(tabber(...), max_width=TERM_WIDTH) - - -def _get_auth_stats( - gitdir, branch="HEAD", since=None, include_files=None, - exclude_files=None, silent_progress=False, ignore_whitespace=False, - M=False, C=False, warn_binary=False, bytype=False, show_email=False, - prefix_gitdir=False, churn=None, ignore_rev="", ignore_revs_file=None): - """Returns dict: {"": {"loc": int, "files": {}, "commits": int, + COL_NAMES = ['Author', 'loc', 'coms', 'fils', ' distribution'] + it_as = getattr(auth_stats, 'iteritems', auth_stats.items) + # get ready + tab = [[ + auth, s['loc'], + s.get('commits', 0), + len(s.get('files', [])), '/'.join( + map('{0:4.1f}'.format, + (100 * s['loc'] / max(1, stats_tot['loc']), + 100 * s.get('commits', 0) / max(1, stats_tot['commits']), + 100 * len(s.get('files', [])) / max(1, stats_tot['files'])))).replace( + '/100.0/', '/ 100/')] for (auth, s) in it_as()] + if cost: + stats_tot = dict(stats_tot) + if cost & COST_MONTHS: + COL_NAMES.insert(1, 'mths') + tab = [i[:1] + [3.2 * (i[1] / 1e3)**1.05] + i[1:] for i in tab] + stats_tot.setdefault('months', '%.1f' % sum(i[1] for i in tab)) + if cost & COST_HOURS: + COL_NAMES.insert(1, 'hrs') + tab = [i[:1] + [hours(auth_stats[i[0]]['ctimes'])] + i[1:] for i in tab] + + stats_tot.setdefault('hours', '%.1f' % sum(i[1] for i in tab)) + # log.debug(auth_stats) + + for i, j in [("commits", "coms"), ("files", "fils"), ("hours", "hrs"), ("months", "mths")]: + sort = sort.replace(i, j) + tab.sort(key=lambda i: i[COL_NAMES.index(sort)], reverse=True) + if row_nums: + tab = [[str(i)] + j for i, j in enumerate(tab, 1)] + COL_NAMES.insert(0, '#') + + totals = 'Total ' + '\nTotal '.join("%s: %s" % i for i in sorted(stats_tot.items())) + '\n' + + backend = backend.lower() + if backend in ("tabulate", "md", "markdown"): + backend = "pipe" + + if backend in ['yaml', 'yml', 'json', 'csv', 'tsv']: + tab = [i[:-1] + [float(pc.strip()) for pc in i[-1].split('/')] for i in tab] + tab = { + "total": stats_tot, "data": tab, + "columns": COL_NAMES[:-1] + ['%' + i for i in COL_NAMES[-4:-1]]} + if backend in ['yaml', 'yml']: + log.debug("backend:yaml") + from yaml import safe_dump as tabber + return tabber(tab).rstrip() + elif backend == 'json': + log.debug("backend:json") + from json import dumps as tabber + return tabber(tab, ensure_ascii=False) + elif backend in ['csv', 'tsv']: + log.debug("backend:csv") + from csv import writer as tabber + + from ._utils import StringIO + res = StringIO() + t = tabber(res, delimiter=',' if backend == 'csv' else '\t') + t.writerow(tab['columns']) + t.writerows(tab['data']) + t.writerow('') + t.writerow(list(tab['total'].keys())) + t.writerow(list(tab['total'].values())) + return res.getvalue().rstrip() + else: # pragma: nocover + raise RuntimeError("Should be unreachable") + else: + import tabulate as tabber + if backend not in tabber.tabulate_formats: + raise ValueError("Unknown backend:%s" % backend) + log.debug("backend:tabulate:" + backend) + COL_LENS = [max(len(Str(i[j])) for i in [COL_NAMES] + tab) for j in range(len(COL_NAMES))] + COL_LENS[0] = min(TERM_WIDTH - sum(COL_LENS[1:]) - len(COL_LENS) * 3 - 4, COL_LENS[0]) + tab = [[i[0][:COL_LENS[0]]] + i[1:] for i in tab] + return totals + tabber.tabulate(tab, COL_NAMES, tablefmt=backend, floatfmt='.0f') + # from ._utils import tighten + # return totals + tighten(tabber(...), max_width=TERM_WIDTH) + + +def _get_auth_stats(gitdir, branch="HEAD", since=None, include_files=None, exclude_files=None, + silent_progress=False, ignore_whitespace=False, M=False, C=False, + warn_binary=False, bytype=False, show_email=False, prefix_gitdir=False, + churn=None, ignore_rev="", ignore_revs_file=None): + """Returns dict: {"": {"loc": int, "files": {}, "commits": int, "ctimes": [int]}}""" - since = ["--since", since] if since else [] - git_cmd = ["git", "-C", gitdir] - log.debug("base command:" + ' '.join(git_cmd)) - file_list = check_output( - git_cmd + ["ls-files", "--with-tree", branch]).strip().split('\n') - if not hasattr(include_files, 'search'): - file_list = [i for i in file_list - if (not include_files or (i in include_files)) - if i not in exclude_files] - else: - file_list = [i for i in file_list - if include_files.search(i) - if not (exclude_files and exclude_files.search(i))] - log.log(logging.NOTSET, "files:\n" + '\n'.join(file_list)) - churn = churn or set() - - if churn & CHURN_SLOC: - base_cmd = git_cmd + ["blame", "--line-porcelain"] + since - if ignore_rev: - base_cmd.extend(["--ignore-rev", ignore_rev]) - if ignore_revs_file: - base_cmd.extend(["--ignore-revs-file", ignore_revs_file]) - else: - base_cmd = git_cmd + ["log", "--format=aN%aN ct%ct", "--numstat"] + since - - if ignore_whitespace: - base_cmd.append("-w") - if M: - base_cmd.append("-M") - if C: - base_cmd.extend(["-C", "-C"]) # twice to include file creation - - auth_stats = {} - - def stats_append(fname, auth, loc, tstamp): - auth = _str(auth) - tstamp = int(tstamp) - try: - auth_stats[auth]["loc"] += loc - except KeyError: - auth_stats[auth] = {"loc": loc, "files": {fname}, "ctimes": []} + since = ["--since", since] if since else [] + git_cmd = ["git", "-C", gitdir] + log.debug("base command:" + ' '.join(git_cmd)) + file_list = check_output(git_cmd + ["ls-files", "--with-tree", branch]).strip().split('\n') + if not hasattr(include_files, 'search'): + file_list = [ + i for i in file_list if (not include_files or (i in include_files)) + if i not in exclude_files] else: - auth_stats[auth]["files"].add(fname) - auth_stats[auth]["ctimes"].append(tstamp) - - if bytype: - fext_key = ("." + fext(fname)) if fext(fname) else "._None_ext" - # auth_stats[auth].setdefault(fext_key, 0) - try: - auth_stats[auth][fext_key] += loc - except KeyError: - auth_stats[auth][fext_key] = loc - - if churn & CHURN_SLOC: - for fname in tqdm(file_list, desc=gitdir if prefix_gitdir else "Processing", - disable=silent_progress, unit="file"): - - if prefix_gitdir: - fname = path.join(gitdir, fname) - try: - blame_out = check_output( - base_cmd + [branch, fname], stderr=subprocess.STDOUT) - except Exception as err: - getattr(log, "warn" if warn_binary else "debug")(fname + ':' + str(err)) - continue - log.log(logging.NOTSET, blame_out) - - # Strip boundary messages, - # preventing user with nearest commit to boundary owning the LOC - blame_out = RE_BLAME_BOUNDS.sub('', blame_out) - loc_auth_times = RE_AUTHS_BLAME.findall(blame_out) - - for loc, auth, tstamp in loc_auth_times: # for each chunk - loc = int(loc) - stats_append(fname, auth, loc, tstamp) - else: - with tqdm(total=1, desc=gitdir if prefix_gitdir else "Processing", - disable=silent_progress, unit="repo") as t: - blame_out = check_output(base_cmd + [branch], stderr=subprocess.STDOUT) - t.update() - log.log(logging.NOTSET, blame_out) - - # Strip binary files - for fname in set(RE_STAT_BINARY.findall(blame_out)): - getattr(log, "warn" if warn_binary else "debug")( - "binary:" + fname.strip()) - blame_out = RE_STAT_BINARY.sub('', blame_out) - - blame_out = RE_AUTHS_LOG.split(blame_out) - blame_out = zip(blame_out[1::3], blame_out[2::3], blame_out[3::3]) - for auth, tstamp, fnames in blame_out: - fnames = fnames.split('\naN', 1)[0] - for i in fnames.strip().split('\n'): - try: - inss, dels, fname = i.split('\t') - except ValueError: - log.warning(i) - else: - fname = RE_RENAME.sub(r'\\2', fname) - loc = ( - int(inss) if churn & CHURN_INS and inss else 0) + ( - int(dels) if churn & CHURN_DEL and dels else 0) - stats_append(fname, auth, loc, tstamp) - - # quickly count commits (even if no surviving loc) - log.log(logging.NOTSET, "authors:" + '; '.join(auth_stats.keys())) - auth_commits = check_output( - git_cmd + ["shortlog", "-s", "-e", branch] + since) - for stats in auth_stats.values(): - stats.setdefault("commits", 0) - log.debug(RE_NCOM_AUTH_EM.findall(auth_commits.strip())) - auth2em = {} - for (ncom, auth, em) in RE_NCOM_AUTH_EM.findall(auth_commits.strip()): - auth = _str(auth) - auth2em[auth] = em # TODO: count most used email? - try: - auth_stats[auth]["commits"] += int(ncom) - except KeyError: - auth_stats[auth] = {"loc": 0, - "files": set(), - "commits": int(ncom), - "ctimes": []} - if show_email: - # replace author name with email - log.debug(auth2em) - old = auth_stats + file_list = [ + i for i in file_list if include_files.search(i) + if not (exclude_files and exclude_files.search(i))] + log.log(logging.NOTSET, "files:\n" + '\n'.join(file_list)) + churn = churn or set() + + if churn & CHURN_SLOC: + base_cmd = git_cmd + ["blame", "--line-porcelain"] + since + if ignore_rev: + base_cmd.extend(["--ignore-rev", ignore_rev]) + if ignore_revs_file: + base_cmd.extend(["--ignore-revs-file", ignore_revs_file]) + else: + base_cmd = git_cmd + ["log", "--format=aN%aN ct%ct", "--numstat"] + since + + if ignore_whitespace: + base_cmd.append("-w") + if M: + base_cmd.append("-M") + if C: + base_cmd.extend(["-C", "-C"]) # twice to include file creation + auth_stats = {} - for auth, stats in getattr(old, 'iteritems', old.items)(): - i = auth_stats.setdefault(auth2em[auth], {"loc": 0, - "files": set(), - "commits": 0, - "ctimes": []}) - i["loc"] += stats["loc"] - i["files"].update(stats["files"]) - i["commits"] += stats["commits"] - i["ctimes"] += stats["ctimes"] - del old - return auth_stats + def stats_append(fname, auth, loc, tstamp): + auth = _str(auth) + tstamp = int(tstamp) + try: + auth_stats[auth]["loc"] += loc + except KeyError: + auth_stats[auth] = {"loc": loc, "files": {fname}, "ctimes": []} + else: + auth_stats[auth]["files"].add(fname) + auth_stats[auth]["ctimes"].append(tstamp) + + if bytype: + fext_key = ("." + fext(fname)) if fext(fname) else "._None_ext" + # auth_stats[auth].setdefault(fext_key, 0) + try: + auth_stats[auth][fext_key] += loc + except KeyError: + auth_stats[auth][fext_key] = loc + + if churn & CHURN_SLOC: + for fname in tqdm(file_list, desc=gitdir if prefix_gitdir else "Processing", + disable=silent_progress, unit="file"): + + if prefix_gitdir: + fname = path.join(gitdir, fname) + try: + blame_out = check_output(base_cmd + [branch, fname], stderr=subprocess.STDOUT) + except Exception as err: + getattr(log, "warn" if warn_binary else "debug")(fname + ':' + str(err)) + continue + log.log(logging.NOTSET, blame_out) + + # Strip boundary messages, + # preventing user with nearest commit to boundary owning the LOC + blame_out = RE_BLAME_BOUNDS.sub('', blame_out) + loc_auth_times = RE_AUTHS_BLAME.findall(blame_out) + + for loc, auth, tstamp in loc_auth_times: # for each chunk + loc = int(loc) + stats_append(fname, auth, loc, tstamp) + else: + with tqdm(total=1, desc=gitdir if prefix_gitdir else "Processing", disable=silent_progress, + unit="repo") as t: + blame_out = check_output(base_cmd + [branch], stderr=subprocess.STDOUT) + t.update() + log.log(logging.NOTSET, blame_out) + + # Strip binary files + for fname in set(RE_STAT_BINARY.findall(blame_out)): + getattr(log, "warn" if warn_binary else "debug")("binary:" + fname.strip()) + blame_out = RE_STAT_BINARY.sub('', blame_out) + + blame_out = RE_AUTHS_LOG.split(blame_out) + blame_out = zip(blame_out[1::3], blame_out[2::3], blame_out[3::3]) + for auth, tstamp, fnames in blame_out: + fnames = fnames.split('\naN', 1)[0] + for i in fnames.strip().split('\n'): + try: + inss, dels, fname = i.split('\t') + except ValueError: + log.warning(i) + else: + fname = RE_RENAME.sub(r'\\2', fname) + loc = (int(inss) if churn & CHURN_INS and inss else + 0) + (int(dels) if churn & CHURN_DEL and dels else 0) + stats_append(fname, auth, loc, tstamp) + + # quickly count commits (even if no surviving loc) + log.log(logging.NOTSET, "authors:" + '; '.join(auth_stats.keys())) + auth_commits = check_output(git_cmd + ["shortlog", "-s", "-e", branch] + since) + for stats in auth_stats.values(): + stats.setdefault("commits", 0) + log.debug(RE_NCOM_AUTH_EM.findall(auth_commits.strip())) + auth2em = {} + for (ncom, auth, em) in RE_NCOM_AUTH_EM.findall(auth_commits.strip()): + auth = _str(auth) + auth2em[auth] = em # TODO: count most used email? + try: + auth_stats[auth]["commits"] += int(ncom) + except KeyError: + auth_stats[auth] = {"loc": 0, "files": set(), "commits": int(ncom), "ctimes": []} + if show_email: + # replace author name with email + log.debug(auth2em) + old = auth_stats + auth_stats = {} + for auth, stats in getattr(old, 'iteritems', old.items)(): + i = auth_stats.setdefault(auth2em[auth], + {"loc": 0, "files": set(), "commits": 0, "ctimes": []}) + i["loc"] += stats["loc"] + i["files"].update(stats["files"]) + i["commits"] += stats["commits"] + i["ctimes"] += stats["ctimes"] + del old + + return auth_stats def run(args): - """args : Namespace (`argopt.DictAttrWrap` or from `argparse`)""" - log.debug("parsing args") - - if args.sort not in "loc commits files hours months".split(): - log.warning("--sort argument (%s) unrecognised\n%s" % ( - args.sort, __doc__)) - raise KeyError(args.sort) - - if not args.excl: - args.excl = "" - - if isinstance(args.gitdir, string_types): - args.gitdir = [args.gitdir] - # strip `/`, `.git` - gitdirs = [i.rstrip(os.sep) for i in args.gitdir] - gitdirs = [path.join(*path.split(i)[:-1]) if path.split(i)[-1] == '.git' else i - for i in args.gitdir] - # remove duplicates - for i, d in reversed(list(enumerate(gitdirs))): - if d in gitdirs[:i]: - gitdirs.pop(i) - # recurse - if args.recurse: - nDirs = len(gitdirs) - i = 0 - while i < nDirs: - if path.isdir(gitdirs[i]): - for root, dirs, fns in tqdm(os.walk(gitdirs[i]), desc="Recursing", unit="dir", - disable=args.silent_progress, leave=False): - if '.git' in fns + dirs: - if root not in gitdirs: - gitdirs.append(root) - if '.git' in dirs: - dirs.remove('.git') - i += 1 - - exclude_files = None - include_files = None - if args.no_regex: - exclude_files = set(RE_CSPILT.split(args.excl)) - include_files = set() - if args.incl == ".*": - args.incl = "" + """args : Namespace (`argopt.DictAttrWrap` or from `argparse`)""" + log.debug("parsing args") + + if args.sort not in "loc commits files hours months".split(): + log.warning("--sort argument (%s) unrecognised\n%s" % (args.sort, __doc__)) + raise KeyError(args.sort) + + if not args.excl: + args.excl = "" + + if isinstance(args.gitdir, string_types): + args.gitdir = [args.gitdir] + # strip `/`, `.git` + gitdirs = [i.rstrip(os.sep) for i in args.gitdir] + gitdirs = [ + path.join(*path.split(i)[:-1]) if path.split(i)[-1] == '.git' else i for i in args.gitdir] + # remove duplicates + for i, d in reversed(list(enumerate(gitdirs))): + if d in gitdirs[:i]: + gitdirs.pop(i) + # recurse + if args.recurse: + nDirs = len(gitdirs) + i = 0 + while i < nDirs: + if path.isdir(gitdirs[i]): + for root, dirs, fns in tqdm(os.walk(gitdirs[i]), desc="Recursing", unit="dir", + disable=args.silent_progress, leave=False): + if '.git' in fns + dirs: + if root not in gitdirs: + gitdirs.append(root) + if '.git' in dirs: + dirs.remove('.git') + i += 1 + + exclude_files = None + include_files = None + if args.no_regex: + exclude_files = set(RE_CSPILT.split(args.excl)) + include_files = set() + if args.incl == ".*": + args.incl = "" + else: + include_files.update(RE_CSPILT.split(args.incl)) else: - include_files.update(RE_CSPILT.split(args.incl)) - else: - # cannot use findall in case of grouping: - # for i in include_files: - # for i in [include_files]: - # for j in range(1, len(i)): - # if i[j] == '(' and i[j - 1] != '\\': - # raise ValueError('Parenthesis must be escaped' - # ' in include-files:\n\t' + i) - exclude_files = re.compile(args.excl) if args.excl else None - include_files = re.compile(args.incl) - # include_files = re.compile(args.incl, flags=re.M) - - cost = set(args.cost.lower().split(',')) if args.cost else set() - churn = set(args.loc.lower().split(',')) if args.loc else set() - if not churn: - if cost & COST_HOURS: - churn = CHURN_INS | CHURN_DEL - elif cost & COST_MONTHS: - churn = CHURN_INS + # cannot use findall in case of grouping: + # for i in include_files: + # for i in [include_files]: + # for j in range(1, len(i)): + # if i[j] == '(' and i[j - 1] != '\\': + # raise ValueError('Parenthesis must be escaped' + # ' in include-files:\n\t' + i) + exclude_files = re.compile(args.excl) if args.excl else None + include_files = re.compile(args.incl) + # include_files = re.compile(args.incl, flags=re.M) + + cost = set(args.cost.lower().split(',')) if args.cost else set() + churn = set(args.loc.lower().split(',')) if args.loc else set() + if not churn: + if cost & COST_HOURS: + churn = CHURN_INS | CHURN_DEL + elif cost & COST_MONTHS: + churn = CHURN_INS + else: + churn = CHURN_SLOC + + if churn & (CHURN_INS | CHURN_DEL) and args.excl: + log.warning("--loc=ins,del includes historical files" + " which may need to be added to --excl") + + auth_stats = {} + statter = partial(_get_auth_stats, branch=args.branch, since=args.since, + include_files=include_files, exclude_files=exclude_files, + silent_progress=args.silent_progress, + ignore_whitespace=args.ignore_whitespace, M=args.M, C=args.C, + warn_binary=args.warn_binary, bytype=args.bytype, show_email=args.show_email, + prefix_gitdir=len(gitdirs) > 1, churn=churn, ignore_rev=args.ignore_rev, + ignore_revs_file=args.ignore_revs_file) + + # concurrent multi-repo processing + if len(gitdirs) > 1: + try: + from concurrent.futures import ThreadPoolExecutor # NOQA + + from tqdm.contrib.concurrent import thread_map + mapper = partial(thread_map, desc="Repos", unit="repo", miniters=1, + disable=args.silent_progress or len(gitdirs) <= 1) + except ImportError: + mapper = map else: - churn = CHURN_SLOC - - if churn & (CHURN_INS | CHURN_DEL) and args.excl: - log.warning("--loc=ins,del includes historical files" - " which may need to be added to --excl") - - auth_stats = {} - statter = partial( - _get_auth_stats, - branch=args.branch, since=args.since, - include_files=include_files, exclude_files=exclude_files, - silent_progress=args.silent_progress, - ignore_whitespace=args.ignore_whitespace, M=args.M, C=args.C, - warn_binary=args.warn_binary, bytype=args.bytype, - show_email=args.show_email, prefix_gitdir=len(gitdirs) > 1, - churn=churn, ignore_rev=args.ignore_rev, - ignore_revs_file=args.ignore_revs_file) - - # concurrent multi-repo processing - if len(gitdirs) > 1: - try: - from concurrent.futures import ThreadPoolExecutor # NOQA - - from tqdm.contrib.concurrent import thread_map - mapper = partial(thread_map, desc="Repos", unit="repo", miniters=1, - disable=args.silent_progress or len(gitdirs) <= 1) - except ImportError: - mapper = map - else: - mapper = map - - for res in mapper(statter, gitdirs): - for auth, stats in getattr(res, 'iteritems', res.items)(): - if auth in auth_stats: - merge_stats(auth_stats[auth], stats) - else: - auth_stats[auth] = stats - - stats_tot = {k: 0 for stats in auth_stats.values() for k in stats} - log.debug(stats_tot) - for k in stats_tot: - stats_tot[k] = sum(int_cast_or_len(stats.get(k, 0)) - for stats in auth_stats.values()) - log.debug(stats_tot) - - # TODO: - # extns = set() - # if args.bytype: - # for stats in auth_stats.values(): - # extns.update([fext(i) for i in stats["files"]]) - # log.debug(extns) - - print_unicode(tabulate( - auth_stats, stats_tot, - args.sort, args.bytype, args.format, cost, args.enum)) + mapper = map + + for res in mapper(statter, gitdirs): + for auth, stats in getattr(res, 'iteritems', res.items)(): + if auth in auth_stats: + merge_stats(auth_stats[auth], stats) + else: + auth_stats[auth] = stats + + stats_tot = {k: 0 for stats in auth_stats.values() for k in stats} + log.debug(stats_tot) + for k in stats_tot: + stats_tot[k] = sum(int_cast_or_len(stats.get(k, 0)) for stats in auth_stats.values()) + log.debug(stats_tot) + + # TODO: + # extns = set() + # if args.bytype: + # for stats in auth_stats.values(): + # extns.update([fext(i) for i in stats["files"]]) + # log.debug(extns) + + print_unicode( + tabulate(auth_stats, stats_tot, args.sort, args.bytype, args.format, cost, args.enum)) def get_main_parser(): - from argopt import argopt - return argopt(__doc__ + '\n' + __copyright__, version=__version__) + from argopt import argopt + return argopt(__doc__ + '\n' + __copyright__, version=__version__) def main(args=None): - """args : list [default: sys.argv[1:]]""" - parser = get_main_parser() - args = parser.parse_args(args=args) - logging.basicConfig( - level=getattr(logging, args.log, logging.INFO), - stream=TqdmStream, - format="%(levelname)s:gitfame.%(funcName)s:%(lineno)d:%(message)s") - - log.debug(args) - if args.manpath is not None: - import sys - from os import path - from shutil import copyfile - - from pkg_resources import resource_filename - fi = resource_filename(__name__, 'git-fame.1') - fo = path.join(args.manpath, 'git-fame.1') - copyfile(fi, fo) - log.info("written:" + fo) - sys.exit(0) - - run(args) - - -if __name__ == "__main__": # pragma: no cover - main() + """args : list [default: sys.argv[1:]]""" + parser = get_main_parser() + args = parser.parse_args(args=args) + logging.basicConfig(level=getattr(logging, args.log, logging.INFO), stream=TqdmStream, + format="%(levelname)s:gitfame.%(funcName)s:%(lineno)d:%(message)s") + + log.debug(args) + if args.manpath is not None: + import sys + from os import path + from shutil import copyfile + + from pkg_resources import resource_filename + fi = resource_filename(__name__, 'git-fame.1') + fo = path.join(args.manpath, 'git-fame.1') + copyfile(fi, fo) + log.info("written:" + fo) + sys.exit(0) + + run(args) + + +if __name__ == "__main__": # pragma: no cover + main() diff --git a/gitfame/_utils.py b/gitfame/_utils.py index 0dcf92e..0d767a7 100644 --- a/gitfame/_utils.py +++ b/gitfame/_utils.py @@ -9,96 +9,96 @@ from tqdm.utils import _screen_shape_wrapper try: - # python2 - _str = unicode - _range = xrange - from StringIO import StringIO - string_types = (basestring,) + # python2 + _str = unicode + _range = xrange + from StringIO import StringIO + string_types = (basestring,) except NameError: - # python3 - _str = str - _range = range - from io import StringIO - string_types = (str,) + # python3 + _str = str + _range = range + from io import StringIO + string_types = (str,) try: - from threading import RLock + from threading import RLock except ImportError: - tqdm = tqdm_std + tqdm = tqdm_std else: - tqdm_std.set_lock(RLock()) - tqdm = partial(tqdm_std, lock_args=(False,)) + tqdm_std.set_lock(RLock()) + tqdm = partial(tqdm_std, lock_args=(False,)) __author__ = "Casper da Costa-Luis " __date__ = "2016-2020" __licence__ = "[MPLv2.0](https://mozilla.org/MPL/2.0/)" -__all__ = ["TERM_WIDTH", "int_cast_or_len", "Max", "fext", "_str", "tqdm", - "tighten", "check_output", "print_unicode", "StringIO", "Str"] +__all__ = [ + "TERM_WIDTH", "int_cast_or_len", "Max", "fext", "_str", "tqdm", "tighten", "check_output", + "print_unicode", "StringIO", "Str"] __copyright__ = ' '.join(("Copyright (c)", __date__, __author__, __licence__)) -__license__ = __licence__ # weird foreign language +__license__ = __licence__ # weird foreign language log = logging.getLogger(__name__) TERM_WIDTH = _screen_shape_wrapper()(sys.stdout)[0] if not TERM_WIDTH: - # non interactive pipe - TERM_WIDTH = 256 + # non interactive pipe + TERM_WIDTH = 256 class TqdmStream(object): - @classmethod - def write(cls, msg): - tqdm_std.write(msg, end='') + @classmethod + def write(cls, msg): + tqdm_std.write(msg, end='') def check_output(*a, **k): - log.debug(' '.join(a[0][3:])) - k.setdefault('stdout', subprocess.PIPE) - return subprocess.Popen(*a, **k).communicate()[0].decode( - 'utf-8', errors='replace') + log.debug(' '.join(a[0][3:])) + k.setdefault('stdout', subprocess.PIPE) + return subprocess.Popen(*a, **k).communicate()[0].decode('utf-8', errors='replace') def blank_col(rows, i, blanks): - return all(r[i] in blanks for r in rows) + return all(r[i] in blanks for r in rows) def tighten(t, max_width=256, blanks=' -=', seps='|+'): - """Tighten (default: grid) table padding""" - rows = t.strip().split('\n') - i = 1 - curr_blank = bool() - prev_blank = blank_col(rows, i - 1, blanks) - len_r = len(rows[0]) - while (i < len_r): - curr_blank = blank_col(rows, i, blanks) - if prev_blank and curr_blank: - rows = [r[:i - 1] + r[i:] for r in rows] - len_r -= 1 - i -= 1 - prev_blank = curr_blank - i += 1 - - if len_r > max_width: - have_first_line = False - for i in _range(len_r): - if blank_col(rows, i, seps): - if have_first_line: - if i > len_r - max_width: - return '\n'.join(r[:i - len_r + max_width] + r[i:] for r in - rows[:3] + rows[3::2] + [rows[-1]]) - break - else: - have_first_line = True - - return '\n'.join(rows[:3] + rows[3::2] + [rows[-1]]) + """Tighten (default: grid) table padding""" + rows = t.strip().split('\n') + i = 1 + curr_blank = bool() + prev_blank = blank_col(rows, i - 1, blanks) + len_r = len(rows[0]) + while (i < len_r): + curr_blank = blank_col(rows, i, blanks) + if prev_blank and curr_blank: + rows = [r[:i - 1] + r[i:] for r in rows] + len_r -= 1 + i -= 1 + prev_blank = curr_blank + i += 1 + + if len_r > max_width: + have_first_line = False + for i in _range(len_r): + if blank_col(rows, i, seps): + if have_first_line: + if i > len_r - max_width: + return '\n'.join(r[:i - len_r + max_width] + r[i:] + for r in rows[:3] + rows[3::2] + [rows[-1]]) + break + else: + have_first_line = True + + return '\n'.join(rows[:3] + rows[3::2] + [rows[-1]]) def fext(fn): - """File extension""" - res = fn.split('.') - return res[-1] if len(res) > 1 else '' + """File extension""" + res = fn.split('.') + return res[-1] if len(res) > 1 else '' def int_cast_or_len(i): - """ + """ >>> int_cast_or_len(range(10)) 10 >>> int_cast_or_len('90 foo') @@ -107,57 +107,57 @@ def int_cast_or_len(i): 90 """ - try: - return int(i) - except ValueError: - return len(i) - except TypeError: - return len(i) + try: + return int(i) + except ValueError: + return len(i) + except TypeError: + return len(i) def Max(it, empty_default=0): - """ + """ >>> Max(range(10), -1) 9 >>> Max(range(0), -1) -1 """ - try: - return max(it) - except ValueError as e: - if 'empty sequence' in str(e): - return empty_default - raise # pragma: no cover + try: + return max(it) + except ValueError as e: + if 'empty sequence' in str(e): + return empty_default + raise # pragma: no cover def print_unicode(msg, end='\n', err='?'): - """print `msg`, replacing unicode characters with `err` upon failure""" - for c in msg: - try: - print(c, end='') - except UnicodeEncodeError: - print(err, end='') - print('', end=end) + """print `msg`, replacing unicode characters with `err` upon failure""" + for c in msg: + try: + print(c, end='') + except UnicodeEncodeError: + print(err, end='') + print('', end=end) def Str(i): - """return `'%g' % i` if possible, else `_str(i)`""" - try: - return '%g' % i - except TypeError: - return _str(i) + """return `'%g' % i` if possible, else `_str(i)`""" + try: + return '%g' % i + except TypeError: + return _str(i) def merge_stats(left, right): - """Add `right`'s values to `left` (modifies `left` in-place)""" - for k, val in getattr(right, 'iteritems', right.items)(): - if isinstance(val, int): - left[k] = left.get(k, 0) + val - elif hasattr(val, 'extend'): - left[k].extend(val) - elif hasattr(val, 'update'): - left[k].update(val) - else: - raise TypeError(val) - return left + """Add `right`'s values to `left` (modifies `left` in-place)""" + for k, val in getattr(right, 'iteritems', right.items)(): + if isinstance(val, int): + left[k] = left.get(k, 0) + val + elif hasattr(val, 'extend'): + left[k].extend(val) + elif hasattr(val, 'update'): + left[k].update(val) + else: + raise TypeError(val) + return left diff --git a/setup.py b/setup.py index 02dc430..f0d7302 100755 --- a/setup.py +++ b/setup.py @@ -6,10 +6,11 @@ from setuptools import setup src_dir = path.abspath(path.dirname(__file__)) -if sys.argv[1].lower().strip() == 'make': # exec Makefile commands +if sys.argv[1].lower().strip() == 'make': # exec Makefile commands import pymake fpath = path.join(src_dir, 'Makefile') pymake.main(['-f', fpath] + sys.argv[2:]) + # Stop to avoid setup.py raising non-standard command error sys.exit(0) @@ -18,8 +19,7 @@ sys.argv.remove('--cython') try: from Cython.Build import cythonize - ext_modules = cythonize([ - "gitfame/_gitfame.py", "gitfame/_utils.py"], nthreads=2) + ext_modules = cythonize(["gitfame/_gitfame.py", "gitfame/_utils.py"], nthreads=2) except ImportError: pass diff --git a/tests/tests_gitfame.py b/tests/tests_gitfame.py index 3a541cd..712eb86 100644 --- a/tests/tests_gitfame.py +++ b/tests/tests_gitfame.py @@ -14,29 +14,27 @@ # test data auth_stats = { - u'Not Committed Yet': {'files': { - 'gitfame/_gitfame.py', 'gitfame/_utils.py', 'Makefile', 'MANIFEST.in' - }, + u'Not Committed Yet': { + 'files': {'gitfame/_gitfame.py', 'gitfame/_utils.py', 'Makefile', 'MANIFEST.in'}, 'loc': 75, 'ctimes': [], 'commits': 0}, - u'Casper da Costa-Luis': {'files': { - 'gitfame/_utils.py', 'gitfame/__main__.py', 'setup.cfg', - 'gitfame/_gitfame.py', 'gitfame/__init__.py', - 'git-fame_completion.bash', 'Makefile', 'MANIFEST.in', '.gitignore', - 'setup.py'}, 'loc': 538, 'ctimes': [ - 1510942009, 1517426360, 1532103452, 1543323944, 1548030670, 1459558286, - 1510942009, 1459559144, 1481150373, 1510942009, 1548030670, 1517178199, - 1481150379, 1517426360, 1548030670, 1459625059, 1510942009, 1517426360, - 1481150373, 1517337751, 1517426360, 1510942009, 1548030670, 1459099074, - 1459598664, 1517337751, 1517176447, 1552697404, 1546630326, 1543326881, - 1459558286, 1481150373, 1510930168, 1459598664, 1517596988], - 'commits': 35} -} + u'Casper da Costa-Luis': { + 'files': { + 'gitfame/_utils.py', 'gitfame/__main__.py', 'setup.cfg', 'gitfame/_gitfame.py', + 'gitfame/__init__.py', 'git-fame_completion.bash', 'Makefile', 'MANIFEST.in', + '.gitignore', 'setup.py'}, 'loc': 538, + 'ctimes': [ + 1510942009, 1517426360, 1532103452, 1543323944, 1548030670, 1459558286, 1510942009, + 1459559144, 1481150373, 1510942009, 1548030670, 1517178199, 1481150379, 1517426360, + 1548030670, 1459625059, 1510942009, 1517426360, 1481150373, 1517337751, 1517426360, + 1510942009, 1548030670, 1459099074, 1459598664, 1517337751, 1517176447, 1552697404, + 1546630326, 1543326881, 1459558286, 1481150373, 1510930168, 1459598664, 1517596988], + 'commits': 35}} stats_tot = {'files': 14, 'loc': 613, 'commits': 35} def test_tabulate(): - """Test builtin tabulate""" - assert (_gitfame.tabulate(auth_stats, stats_tot) == dedent("""\ + """Test builtin tabulate""" + assert (_gitfame.tabulate(auth_stats, stats_tot) == dedent("""\ Total commits: 35 Total files: 14 Total loc: 613 @@ -45,13 +43,12 @@ def test_tabulate(): | Casper da Costa-Luis | 538 | 35 | 10 | 87.8/ 100/71.4 | | Not Committed Yet | 75 | 0 | 4 | 12.2/ 0.0/28.6 |""")) - sys.stderr.write("\rTest builtin tabulate ... ") # `tqdm` may clear info + sys.stderr.write("\rTest builtin tabulate ... ") # `tqdm` may clear info def test_tabulate_cost(): - """Test cost estimates""" - assert (_gitfame.tabulate(auth_stats, stats_tot, cost={"hours", "months"}) == dedent( - """\ + """Test cost estimates""" + assert (_gitfame.tabulate(auth_stats, stats_tot, cost={"hours", "months"}) == dedent("""\ Total commits: 35 Total files: 14 Total hours: 5.5 @@ -68,9 +65,9 @@ def test_tabulate_cost(): def test_tabulate_yaml(): - """Test YAML tabulate""" - res = [ - dedent("""\ + """Test YAML tabulate""" + res = [ + dedent("""\ columns: - Author - loc @@ -98,23 +95,24 @@ def test_tabulate_yaml(): commits: 35 files: 14 loc: 613"""), - # pyyaml<5 - dedent("""\ + # pyyaml<5 + dedent("""\ columns: [Author, loc, coms, fils, '%loc', '%coms', '%fils'] data: - [Casper da Costa-Luis, 538, 35, 10, 87.8, 100.0, 71.4] - [Not Committed Yet, 75, 0, 4, 12.2, 0.0, 28.6] total: {commits: 35, files: 14, loc: 613}""")] - try: - assert (_gitfame.tabulate(auth_stats, stats_tot, backend='yaml') in res) - except ImportError as err: - raise skip(str(err)) + try: + assert (_gitfame.tabulate(auth_stats, stats_tot, backend='yaml') in res) + except ImportError as err: + raise skip(str(err)) def test_tabulate_json(): - """Test JSON tabulate""" - res = loads(_gitfame.tabulate(auth_stats, stats_tot, backend='json')) - assert (res == loads(dedent("""\ + """Test JSON tabulate""" + res = loads(_gitfame.tabulate(auth_stats, stats_tot, backend='json')) + assert (res == loads( + dedent("""\ {"total": {"files": 14, "loc": 613, "commits": 35}, "data": [["Casper da Costa-Luis", 538, 35, 10, 87.8, 100.0, 71.4], ["Not Committed Yet", 75, 0, 4, 12.2, 0.0, 28.6]], @@ -123,17 +121,16 @@ def test_tabulate_json(): def test_tabulate_csv(): - """Test CSV tabulate""" - csv = _gitfame.tabulate(auth_stats, stats_tot, backend='csv') - tsv = _gitfame.tabulate(auth_stats, stats_tot, backend='tsv') - assert (csv.replace(',', '\t') == tsv) + """Test CSV tabulate""" + csv = _gitfame.tabulate(auth_stats, stats_tot, backend='csv') + tsv = _gitfame.tabulate(auth_stats, stats_tot, backend='tsv') + assert (csv.replace(',', '\t') == tsv) def test_tabulate_tabulate(): - """Test external tabulate""" - try: - assert (_gitfame.tabulate( - auth_stats, stats_tot, backend='simple') == dedent("""\ + """Test external tabulate""" + try: + assert (_gitfame.tabulate(auth_stats, stats_tot, backend='simple') == dedent("""\ Total commits: 35 Total files: 14 Total loc: 613 @@ -141,107 +138,94 @@ def test_tabulate_tabulate(): -------------------- ----- ------ ------ --------------- Casper da Costa-Luis 538 35 10 87.8/ 100/71.4 Not Committed Yet 75 0 4 12.2/ 0.0/28.6""")) - except ImportError as err: - raise skip(str(err)) + except ImportError as err: + raise skip(str(err)) def test_tabulate_enum(): - """Test --enum tabulate""" - res = loads(_gitfame.tabulate( - auth_stats, stats_tot, backend='json', row_nums=True)) - assert res['columns'][0] == '#' - assert [int(i[0]) for i in res['data']] == [1, 2] + """Test --enum tabulate""" + res = loads(_gitfame.tabulate(auth_stats, stats_tot, backend='json', row_nums=True)) + assert res['columns'][0] == '#' + assert [int(i[0]) for i in res['data']] == [1, 2] def test_tabulate_unknown(): - """Test unknown tabulate format""" - try: - _gitfame.tabulate(auth_stats, stats_tot, backend='1337') - except ValueError as e: - if "unknown" not in str(e).lower(): - raise - else: - raise ValueError("Should not support unknown tabulate format") + """Test unknown tabulate format""" + try: + _gitfame.tabulate(auth_stats, stats_tot, backend='1337') + except ValueError as e: + if "unknown" not in str(e).lower(): + raise + else: + raise ValueError("Should not support unknown tabulate format") # WARNING: this should be the last test as it messes with sys.argv def test_main(): - """Test command line pipes""" - import subprocess - from os.path import dirname as dn + """Test command line pipes""" + import subprocess + from os.path import dirname as dn - res = subprocess.Popen( - (sys.executable, '-c', dedent('''\ + res = subprocess.Popen((sys.executable, '-c', + dedent('''\ import gitfame import sys sys.argv = ["", "--silent-progress", r"''' + dn(dn(__file__)) + '''"] gitfame.main() - ''')), - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT).communicate()[0] - - # actual test: - - assert ('Total commits' in str(res)) - - # semi-fake test which gets coverage: - - _SYS_AOE = sys.argv, sys.stdout, sys.stderr - sys.stdout = StringIO() - sys.stderr = sys.stdout - - # sys.argv = ['', '--silent-progress'] - # import gitfame.__main__ # NOQA - main(['--silent-progress']) - - sys.stdout.seek(0) - try: - main(['--bad', 'arg']) - except SystemExit: - res = ' '.join(sys.stdout.getvalue().strip().split()[:2]) - if res != "usage: gitfame": - raise ValueError(sys.stdout.getvalue()) - else: - raise ValueError("Expected --bad arg to fail") - - sys.stdout.seek(0) - try: - main(['-s', '--sort', 'badSortArg']) - except KeyError as e: - if "badSortArg" not in str(e): - raise ValueError("Expected `--sort=badSortArg` to fail") - - for params in [ - ['--sort', 'commits'], - ['--no-regex'], - ['--no-regex', '--incl', 'setup.py,README.rst'], - ['--excl', r'.*\.py'], - ['--loc', 'ins,del'], - ['--cost', 'hour'], - ['--cost', 'month'], - ['--cost', 'month', '--excl', r'.*\.py'], - ['-e'], - ['-w'], - ['-M'], - ['-C'], - ['-t'] - ]: - main(['-s'] + params) - - # test --manpath - tmp = mkdtemp() - man = path.join(tmp, "git-fame.1") - assert not path.exists(man) - try: - main(['--manpath', tmp]) - except SystemExit: - pass - else: - raise SystemExit("Expected system exit") - assert path.exists(man) - rmtree(tmp, True) - - # test multiple gitdirs - main(['.', '.']) - - sys.argv, sys.stdout, sys.stderr = _SYS_AOE + ''')), stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0] + + # actual test: + + assert ('Total commits' in str(res)) + + # semi-fake test which gets coverage: + + _SYS_AOE = sys.argv, sys.stdout, sys.stderr + sys.stdout = StringIO() + sys.stderr = sys.stdout + + # sys.argv = ['', '--silent-progress'] + # import gitfame.__main__ # NOQA + main(['--silent-progress']) + + sys.stdout.seek(0) + try: + main(['--bad', 'arg']) + except SystemExit: + res = ' '.join(sys.stdout.getvalue().strip().split()[:2]) + if res != "usage: gitfame": + raise ValueError(sys.stdout.getvalue()) + else: + raise ValueError("Expected --bad arg to fail") + + sys.stdout.seek(0) + try: + main(['-s', '--sort', 'badSortArg']) + except KeyError as e: + if "badSortArg" not in str(e): + raise ValueError("Expected `--sort=badSortArg` to fail") + + for params in [['--sort', 'commits'], ['--no-regex'], + ['--no-regex', '--incl', 'setup.py,README.rst'], ['--excl', r'.*\.py'], + ['--loc', 'ins,del'], ['--cost', 'hour'], ['--cost', 'month'], + ['--cost', 'month', '--excl', r'.*\.py'], ['-e'], ['-w'], ['-M'], ['-C'], + ['-t']]: + main(['-s'] + params) + + # test --manpath + tmp = mkdtemp() + man = path.join(tmp, "git-fame.1") + assert not path.exists(man) + try: + main(['--manpath', tmp]) + except SystemExit: + pass + else: + raise SystemExit("Expected system exit") + assert path.exists(man) + rmtree(tmp, True) + + # test multiple gitdirs + main(['.', '.']) + + sys.argv, sys.stdout, sys.stderr = _SYS_AOE diff --git a/tests/tests_utils.py b/tests/tests_utils.py index cce5d3d..4d3509c 100644 --- a/tests/tests_utils.py +++ b/tests/tests_utils.py @@ -4,9 +4,9 @@ def test_tighten(): - """Test (grid) table compression""" + """Test (grid) table compression""" - orig_tab = ''' + orig_tab = ''' +------------------------+-----+------+------+----------------------+ | Author | loc | coms | fils | distribution | +========================+=====+======+======+======================+ @@ -16,8 +16,8 @@ def test_tighten(): +------------------------+-----+------+------+----------------------+ ''' - # compress whitespace - assert (_utils.tighten(orig_tab, max_width=80) == '''\ + # compress whitespace + assert (_utils.tighten(orig_tab, max_width=80) == '''\ +----------------------+-----+------+------+----------------+ | Author | loc | coms | fils | distribution | +======================+=====+======+======+================+ @@ -25,8 +25,8 @@ def test_tighten(): | Not Committed Yet | 50 | 0 | 2 | 6.5/ 0.0/15.4 | +----------------------+-----+------+------+----------------+''') - # compress first column - assert (_utils.tighten(orig_tab, max_width=47) == '''\ + # compress first column + assert (_utils.tighten(orig_tab, max_width=47) == '''\ +--------+-----+------+------+----------------+ | Author | loc | coms | fils | distribution | +========+=====+======+======+================+ @@ -34,30 +34,30 @@ def test_tighten(): | Not Com| 50 | 0 | 2 | 6.5/ 0.0/15.4 | +--------+-----+------+------+----------------+''') - # too small width - no first column compression - assert (_utils.tighten(orig_tab, max_width=35) == _utils.tighten(orig_tab)) + # too small width - no first column compression + assert (_utils.tighten(orig_tab, max_width=35) == _utils.tighten(orig_tab)) def test_fext(): - """Test detection of file extensions""" - assert (_utils.fext('foo/bar.baz') == 'baz') - assert (_utils.fext('foo/.baz') == 'baz') - assert (_utils.fext('foo/bar') == '') + """Test detection of file extensions""" + assert (_utils.fext('foo/bar.baz') == 'baz') + assert (_utils.fext('foo/.baz') == 'baz') + assert (_utils.fext('foo/bar') == '') def test_Max(): - """Test max with defaults""" - assert (_utils.Max(range(10), -1) == 9) - assert (_utils.Max(range(0), -1) == -1) + """Test max with defaults""" + assert (_utils.Max(range(10), -1) == 9) + assert (_utils.Max(range(0), -1) == -1) def test_integer_stats(): - """Test integer representations""" - assert (_utils.int_cast_or_len(range(10)) == 10) - assert (_utils.int_cast_or_len('90 foo') == 6) - assert (_utils.int_cast_or_len('90') == 90) + """Test integer representations""" + assert (_utils.int_cast_or_len(range(10)) == 10) + assert (_utils.int_cast_or_len('90 foo') == 6) + assert (_utils.int_cast_or_len('90') == 90) def test_print(): - """Test printing of unicode""" - _utils.print_unicode("\x81") + """Test printing of unicode""" + _utils.print_unicode("\x81") From 6928685d57ca65ed134e70eb28c7340444d2f7c2 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Wed, 1 Mar 2023 18:19:33 +0000 Subject: [PATCH 4/8] drop py2 support --- gitfame/_gitfame.py | 10 +++++----- gitfame/_utils.py | 23 ++++++----------------- 2 files changed, 11 insertions(+), 22 deletions(-) diff --git a/gitfame/_gitfame.py b/gitfame/_gitfame.py index f36af37..fab62e7 100755 --- a/gitfame/_gitfame.py +++ b/gitfame/_gitfame.py @@ -63,8 +63,8 @@ from functools import partial from os import path -from ._utils import (TERM_WIDTH, Str, TqdmStream, _str, check_output, fext, int_cast_or_len, - merge_stats, print_unicode, string_types, tqdm) +from ._utils import (TERM_WIDTH, Str, TqdmStream, check_output, fext, int_cast_or_len, merge_stats, + print_unicode, tqdm) # version detector. Precedence: installed dist, git, 'UNKNOWN' try: @@ -243,7 +243,7 @@ def _get_auth_stats(gitdir, branch="HEAD", since=None, include_files=None, exclu auth_stats = {} def stats_append(fname, auth, loc, tstamp): - auth = _str(auth) + auth = str(auth) tstamp = int(tstamp) try: auth_stats[auth]["loc"] += loc @@ -317,7 +317,7 @@ def stats_append(fname, auth, loc, tstamp): log.debug(RE_NCOM_AUTH_EM.findall(auth_commits.strip())) auth2em = {} for (ncom, auth, em) in RE_NCOM_AUTH_EM.findall(auth_commits.strip()): - auth = _str(auth) + auth = str(auth) auth2em[auth] = em # TODO: count most used email? try: auth_stats[auth]["commits"] += int(ncom) @@ -351,7 +351,7 @@ def run(args): if not args.excl: args.excl = "" - if isinstance(args.gitdir, string_types): + if isinstance(args.gitdir, str): args.gitdir = [args.gitdir] # strip `/`, `.git` gitdirs = [i.rstrip(os.sep) for i in args.gitdir] diff --git a/gitfame/_utils.py b/gitfame/_utils.py index 0d767a7..3970a24 100644 --- a/gitfame/_utils.py +++ b/gitfame/_utils.py @@ -4,22 +4,11 @@ import subprocess import sys from functools import partial +from io import StringIO from tqdm import tqdm as tqdm_std from tqdm.utils import _screen_shape_wrapper -try: - # python2 - _str = unicode - _range = xrange - from StringIO import StringIO - string_types = (basestring,) -except NameError: - # python3 - _str = str - _range = range - from io import StringIO - string_types = (str,) try: from threading import RLock except ImportError: @@ -29,10 +18,10 @@ tqdm = partial(tqdm_std, lock_args=(False,)) __author__ = "Casper da Costa-Luis " -__date__ = "2016-2020" +__date__ = "2016-2023" __licence__ = "[MPLv2.0](https://mozilla.org/MPL/2.0/)" __all__ = [ - "TERM_WIDTH", "int_cast_or_len", "Max", "fext", "_str", "tqdm", "tighten", "check_output", + "TERM_WIDTH", "int_cast_or_len", "Max", "fext", "tqdm", "tighten", "check_output", "print_unicode", "StringIO", "Str"] __copyright__ = ' '.join(("Copyright (c)", __date__, __author__, __licence__)) __license__ = __licence__ # weird foreign language @@ -78,7 +67,7 @@ def tighten(t, max_width=256, blanks=' -=', seps='|+'): if len_r > max_width: have_first_line = False - for i in _range(len_r): + for i in range(len_r): if blank_col(rows, i, seps): if have_first_line: if i > len_r - max_width: @@ -142,11 +131,11 @@ def print_unicode(msg, end='\n', err='?'): def Str(i): - """return `'%g' % i` if possible, else `_str(i)`""" + """return `'%g' % i` if possible, else `str(i)`""" try: return '%g' % i except TypeError: - return _str(i) + return str(i) def merge_stats(left, right): From 8d3649d13cc2702d4268abc58b84788e524cbcd1 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Wed, 1 Mar 2023 18:28:32 +0000 Subject: [PATCH 5/8] fixup linting --- gitfame/__main__.py | 2 +- gitfame/_gitfame.py | 16 +++++++++------- setup.cfg | 2 +- tests/tests_gitfame.py | 3 +-- 4 files changed, 12 insertions(+), 11 deletions(-) diff --git a/gitfame/__main__.py b/gitfame/__main__.py index be6a628..0e457cf 100644 --- a/gitfame/__main__.py +++ b/gitfame/__main__.py @@ -1,3 +1,3 @@ -from ._gitfame import main # pragma: no cover +from ._gitfame import main # pragma: no cover, yapf: disable main() # pragma: no cover diff --git a/gitfame/_gitfame.py b/gitfame/_gitfame.py index fab62e7..7475c1d 100755 --- a/gitfame/_gitfame.py +++ b/gitfame/_gitfame.py @@ -199,8 +199,9 @@ def tabulate(auth_stats, stats_tot, sort='loc', bytype=False, backend='md', cost COL_LENS[0] = min(TERM_WIDTH - sum(COL_LENS[1:]) - len(COL_LENS) * 3 - 4, COL_LENS[0]) tab = [[i[0][:COL_LENS[0]]] + i[1:] for i in tab] return totals + tabber.tabulate(tab, COL_NAMES, tablefmt=backend, floatfmt='.0f') - # from ._utils import tighten - # return totals + tighten(tabber(...), max_width=TERM_WIDTH) + + # from ._utils import tighten + # return totals + tighten(tabber(...), max_width=TERM_WIDTH) def _get_auth_stats(gitdir, branch="HEAD", since=None, include_files=None, exclude_files=None, @@ -279,9 +280,10 @@ def stats_append(fname, auth, loc, tstamp): blame_out = RE_BLAME_BOUNDS.sub('', blame_out) loc_auth_times = RE_AUTHS_BLAME.findall(blame_out) - for loc, auth, tstamp in loc_auth_times: # for each chunk + for loc, auth, tstamp in loc_auth_times: # for each chunk loc = int(loc) stats_append(fname, auth, loc, tstamp) + else: with tqdm(total=1, desc=gitdir if prefix_gitdir else "Processing", disable=silent_progress, unit="repo") as t: @@ -318,16 +320,16 @@ def stats_append(fname, auth, loc, tstamp): auth2em = {} for (ncom, auth, em) in RE_NCOM_AUTH_EM.findall(auth_commits.strip()): auth = str(auth) - auth2em[auth] = em # TODO: count most used email? + auth2em[auth] = em # TODO: count most used email? try: auth_stats[auth]["commits"] += int(ncom) except KeyError: auth_stats[auth] = {"loc": 0, "files": set(), "commits": int(ncom), "ctimes": []} - if show_email: - # replace author name with email + if show_email: # replace author name with email log.debug(auth2em) old = auth_stats auth_stats = {} + for auth, stats in getattr(old, 'iteritems', old.items)(): i = auth_stats.setdefault(auth2em[auth], {"loc": 0, "files": set(), "commits": 0, "ctimes": []}) @@ -423,7 +425,7 @@ def run(args): # concurrent multi-repo processing if len(gitdirs) > 1: try: - from concurrent.futures import ThreadPoolExecutor # NOQA + from concurrent.futures import ThreadPoolExecutor # NOQA, yapf: disable from tqdm.contrib.concurrent import thread_map mapper = partial(thread_map, desc="Repos", unit="repo", miniters=1, diff --git a/setup.cfg b/setup.cfg index c02e836..2e7a8f1 100644 --- a/setup.cfg +++ b/setup.cfg @@ -89,8 +89,8 @@ exclude=tests gitfame=git-fame.1 [flake8] -extend_ignore=E111,E114 max_line_length=99 +extend_ignore=E261 exclude=.eggs,.tox,build,dist,.git,__pycache__ [yapf] diff --git a/tests/tests_gitfame.py b/tests/tests_gitfame.py index 712eb86..afcf546 100644 --- a/tests/tests_gitfame.py +++ b/tests/tests_gitfame.py @@ -95,7 +95,6 @@ def test_tabulate_yaml(): commits: 35 files: 14 loc: 613"""), - # pyyaml<5 dedent("""\ columns: [Author, loc, coms, fils, '%loc', '%coms', '%fils'] data: @@ -104,7 +103,7 @@ def test_tabulate_yaml(): total: {commits: 35, files: 14, loc: 613}""")] try: assert (_gitfame.tabulate(auth_stats, stats_tot, backend='yaml') in res) - except ImportError as err: + except ImportError as err: # lacking pyyaml<5 raise skip(str(err)) From 0eec1a204f36ffe78964b45c04122121a72ed12c Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Wed, 1 Mar 2023 18:32:45 +0000 Subject: [PATCH 6/8] bump dockerfile, rerender manpages, sync dates --- .meta/.git-fame.1.md | 2 +- Dockerfile | 2 +- gitfame/_gitfame.py | 2 +- gitfame/git-fame.1 | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.meta/.git-fame.1.md b/.meta/.git-fame.1.md index 627c2cc..f11ca79 100644 --- a/.meta/.git-fame.1.md +++ b/.meta/.git-fame.1.md @@ -1,6 +1,6 @@ % GIT-FAME(1) git-fame User Manuals % Casper da Costa-Luis -% 2016-2022 +% 2016-2023 # NAME diff --git a/Dockerfile b/Dockerfile index 241b488..d2f333a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9-alpine +FROM python:3.11-alpine RUN apk update && apk add --no-cache git COPY dist/*.whl . RUN pip install -U $(ls *.whl)[full] && rm *.whl diff --git a/gitfame/_gitfame.py b/gitfame/_gitfame.py index 7475c1d..eee8cdd 100755 --- a/gitfame/_gitfame.py +++ b/gitfame/_gitfame.py @@ -76,7 +76,7 @@ except (ImportError, LookupError): __version__ = "UNKNOWN" __author__ = "Casper da Costa-Luis " -__date__ = "2016-2020" +__date__ = "2016-2023" __licence__ = "[MPLv2.0](https://mozilla.org/MPL/2.0/)" __all__ = ["main"] __copyright__ = ' '.join(("Copyright (c)", __date__, __author__, __licence__)) diff --git a/gitfame/git-fame.1 b/gitfame/git-fame.1 index 0872b17..9636124 100644 --- a/gitfame/git-fame.1 +++ b/gitfame/git-fame.1 @@ -1,4 +1,4 @@ -.\" Automatically generated by Pandoc 2.18 +.\" Automatically generated by Pandoc 2.19.2 .\" .\" Define V font for inline verbatim, using C font in formats .\" that render this, and otherwise B font. @@ -14,7 +14,7 @@ . ftr VB CB . ftr VBI CBI .\} -.TH "GIT-FAME" "1" "2016-2018" "git-fame User Manuals" "" +.TH "GIT-FAME" "1" "2016-2023" "git-fame User Manuals" "" .hy .SH NAME .PP From b760fa8ec98ce20cfde016dba925f2ace29da637 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Wed, 1 Mar 2023 18:48:40 +0000 Subject: [PATCH 7/8] ci: cleanup GHA syntax --- .github/workflows/test.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index d1977c7..ac4b742 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -114,14 +114,14 @@ jobs: name: Collect assets run: | if [[ $GITHUB_REF == refs/tags/v* ]]; then - echo ::set-output name=docker_tags::latest,${GITHUB_REF/refs\/tags\/v/} - echo ::set-output name=snap_channel::stable,candidate,edge + echo docker_tags=latest,${GITHUB_REF/refs\/tags\/v/} >> $GITHUB_OUTPUT + echo snap_channel=stable,candidate,edge >> $GITHUB_OUTPUT elif [[ $GITHUB_REF == refs/heads/master ]]; then - echo ::set-output name=docker_tags::master - echo ::set-output name=snap_channel::candidate,edge + echo docker_tags=master >> $GITHUB_OUTPUT + echo snap_channel=candidate,edge >> $GITHUB_OUTPUT elif [[ $GITHUB_REF == refs/heads/devel ]]; then - echo ::set-output name=docker_tags::devel - echo ::set-output name=snap_channel::edge + echo docker_tags=devel >> $GITHUB_OUTPUT + echo snap_channel=edge >> $GITHUB_OUTPUT fi - if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') name: Release From 937937a49910b9a5fc7438d2ffb3e10252211bfb Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Wed, 1 Mar 2023 19:03:19 +0000 Subject: [PATCH 8/8] bump snap core18 => core22 --- snapcraft.yaml | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/snapcraft.yaml b/snapcraft.yaml index 93f2067..34c8e46 100644 --- a/snapcraft.yaml +++ b/snapcraft.yaml @@ -4,7 +4,7 @@ description: https://github.com/casperdcl/git-fame adopt-info: git-fame grade: stable confinement: strict -base: core18 +base: core22 license: MPL-2.0 parts: git-fame: @@ -16,14 +16,11 @@ parts: build-packages: [git] stage-packages: [git] override-build: | - snapcraftctl build - # prevent user site packages interfering with this snap - reference: - # https://github.com/snapcore/snapcraft/blob/19393ef36cd773a28131cec10cc0bfb3bf9c7e77/tools/snapcraft-override-build.sh#L18 - sed -ri 's/^(ENABLE_USER_SITE = )None$/\1False/' $SNAPCRAFT_PART_INSTALL/usr/lib/python*/site.py + craftctl default cp $SNAPCRAFT_PART_BUILD/git-fame_completion.bash $SNAPCRAFT_PART_INSTALL/completion.sh override-stage: | - snapcraftctl stage - snapcraftctl set-version $(usr/bin/python3 -m gitfame --version) + craftctl default + craftctl set version=$(bin/python3 -m gitfame --version) apps: git-fame: command: bin/git-fame