-
-
Notifications
You must be signed in to change notification settings - Fork 38
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
10 changed files
with
216 additions
and
159 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -16,6 +16,10 @@ | |
--incl=<f> Included files [default: .*]. See `--excl` for format. | ||
--since=<date> Date from which to check. Can be absoulte (eg: 1970-01-31) | ||
or relative to now (eg: 3.weeks). | ||
--cost=<method> Include time cost in person-months (COCOMO) or | ||
person-hours (based on commit times). | ||
Methods: month(s)|cocomo|hour(s)|commit(s). | ||
May be multiple comma-separated values. | ||
-n, --no-regex Assume <f> are comma-separated exact matches | ||
rather than regular expressions [default: False]. | ||
NB: if regex is enabled `,` is equivalent to `|`. | ||
|
@@ -27,8 +31,9 @@ | |
-M Detect intra-file line moves and copies [default: False]. | ||
-C Detect inter-file line moves and copies [default: False]. | ||
--format=<format> Table format | ||
[default: md]|markdown|yaml|yml|json|csv|tsv|tabulate. | ||
[default: pipe]|md|markdown|yaml|yml|json|csv|tsv|tabulate. | ||
May require `git-fame[<format>]`, e.g. `pip install git-fame[yaml]`. | ||
Any `tabulate.tabulate_formats` is also accepted. | ||
--manpath=<path> Directory in which to install git-fame man pages. | ||
--log=<lvl> FATAL|CRITICAL|ERROR|WARN(ING)|[default: INFO]|DEBUG|NOTSET. | ||
""" | ||
|
@@ -39,8 +44,8 @@ | |
import re | ||
import logging | ||
|
||
from ._utils import TERM_WIDTH, int_cast_or_len, Max, fext, _str, \ | ||
check_output, tqdm, TqdmStream, print_unicode | ||
from ._utils import TERM_WIDTH, int_cast_or_len, fext, _str, \ | ||
check_output, tqdm, TqdmStream, print_unicode, Str | ||
from ._version import __version__ # NOQA | ||
|
||
__author__ = "Casper da Costa-Luis <[email protected]>" | ||
|
@@ -51,21 +56,34 @@ | |
__license__ = __licence__ # weird foreign language | ||
|
||
|
||
RE_AUTHS = re.compile('^\w+ \d+ \d+ (\d+)\nauthor (.+)$', flags=re.M) | ||
RE_AUTHS = re.compile( | ||
r'^\w+ \d+ \d+ (\d+)\nauthor (.+?)$.*?committer-time (\d+)', | ||
flags=re.M | re.DOTALL) | ||
# finds all non-escaped commas | ||
# NB: does not support escaping of escaped character | ||
RE_CSPILT = re.compile(r'(?<!\\),') | ||
RE_NCOM_AUTH_EM = re.compile(r'^\s*(\d+)\s+(.*?)\s+<(.*)>\s*$', flags=re.M) | ||
|
||
|
||
def tr_hline(col_widths, hl='-', x='+'): | ||
return x + x.join(hl * i for i in col_widths) + x | ||
def hours(dates, maxCommitDiffInSec=120 * 60, firstCommitAdditionInMinutes=120): | ||
""" | ||
Convert list of commit times (in seconds) to an estimate of hours spent. | ||
https://github.com/kimmobrunfeldt/git-hours/blob/\ | ||
8aaeee237cb9d9028e7a2592a25ad8468b1f45e4/index.js#L114-L143 | ||
""" | ||
dates = sorted(dates) | ||
diffInSec = [i - j for (i, j) in zip(dates[1:], dates[:-1])] | ||
res = sum(filter(lambda i: i < maxCommitDiffInSec, diffInSec)) | ||
return (res / 60.0 + firstCommitAdditionInMinutes) / 60.0 | ||
|
||
|
||
def tabulate( | ||
auth_stats, stats_tot, sort='loc', bytype=False, backend='md'): | ||
auth_stats, stats_tot, sort='loc', bytype=False, backend='md', | ||
cost=None): | ||
""" | ||
backends : [default: md]|yaml|json|csv|tsv|tabulate | ||
backends : [default: md]|yaml|json|csv|tsv|tabulate| | ||
`in tabulate.tabulate_formats` | ||
""" | ||
log = logging.getLogger(__name__) | ||
COL_NAMES = ['Author', 'loc', 'coms', 'fils', ' distribution'] | ||
|
@@ -80,21 +98,34 @@ def tabulate( | |
100 * s.get('commits', 0) / max(1, stats_tot['commits']), | ||
100 * len(s.get('files', [])) / max(1, stats_tot['files']) | ||
))).replace('/100.0/', '/ 100/')] | ||
for (auth, s) in sorted(it_as(), | ||
key=lambda k: int_cast_or_len(k[1].get(sort, 0)), | ||
reverse=True)] | ||
for (auth, s) in sorted( | ||
it_as(), | ||
key=lambda k: int_cast_or_len(k[1].get(sort, 0)), | ||
reverse=True)] | ||
if cost is None: | ||
cost = '' | ||
if cost: | ||
cost = cost.lower() | ||
stats_tot = dict(stats_tot) | ||
if any(i in cost for i in ['cocomo', 'month']): | ||
COL_NAMES.insert(1, 'mths') | ||
tab = [i[:1] + [3.2 * (i[1] / 1e3)**1.05] + i[1:] for i in tab] | ||
stats_tot.setdefault('months', '%.1f' % sum(i[1] for i in tab)) | ||
if any(i in cost for i in ['commit', 'hour']): | ||
COL_NAMES.insert(1, 'hrs') | ||
tab = [i[:1] + [hours(auth_stats[i[0]]['ctimes'])] + i[1:] for i in tab] | ||
|
||
stats_tot.setdefault('hours', '%.1f' % sum(i[1] for i in tab)) | ||
# log.debug(auth_stats) | ||
|
||
totals = 'Total ' + '\nTotal '.join( | ||
"%s: %d" % i for i in sorted(stats_tot.items())) + '\n' | ||
"%s: %s" % i for i in sorted(stats_tot.items())) + '\n' | ||
|
||
backend = backend.lower() | ||
if backend == 'tabulate': | ||
from tabulate import tabulate as tabber | ||
log.debug("backend:tabulate") | ||
return totals + tabber(tab, COL_NAMES, tablefmt='grid', floatfmt='.0f') | ||
# from ._utils import tighten | ||
# return totals + tighten(tabber(...), max_width=TERM_WIDTH) | ||
elif backend in ['yaml', 'yml', 'json', 'csv', 'tsv']: | ||
if backend in ("tabulate", "md", "markdown"): | ||
backend = "pipe" | ||
|
||
if backend in ['yaml', 'yml', 'json', 'csv', 'tsv']: | ||
tab = [i[:-1] + [float(pc.strip()) for pc in i[-1].split('/')] for i in tab] | ||
tab = dict( | ||
total=stats_tot, data=tab, | ||
|
@@ -121,64 +152,21 @@ def tabulate( | |
return res.getvalue().rstrip() | ||
else: # pragma: nocover | ||
raise RuntimeError("Should be unreachable") | ||
elif backend not in ['md', 'markdown']: | ||
raise ValueError("Unknown backend:%s" % backend) | ||
|
||
log.debug("backend:md") | ||
# TODO: convert below to separate function for testing | ||
|
||
res = '' | ||
stats = list(auth_stats.values()) | ||
# Columns: Author | loc | coms | fils | distribution | ||
COL_LENS = [ | ||
max(6, Max(len(a) for a in auth_stats)), | ||
max(3, Max(len(str(i["loc"])) for i in stats)), | ||
max(4, Max(len(str(i.get("commits", 0))) for i in stats)), | ||
max(4, Max(len(str(len(i.get("files", [])))) for i in stats)), | ||
12 | ||
] | ||
|
||
COL_LENS[0] = min(TERM_WIDTH - sum(COL_LENS[1:]) - len(COL_LENS) * 3 - 3, | ||
COL_LENS[0]) | ||
|
||
COL_NAMES = [ | ||
"Author" + ' ' * (COL_LENS[0] - 6), | ||
' ' * (COL_LENS[1] - 3) + "loc", | ||
' ' * (COL_LENS[2] - 4) + "coms", | ||
' ' * (COL_LENS[3] - 4) + "fils", | ||
" distribution " | ||
] | ||
|
||
tbl_row_fmt = u"| {0:<%ds}| {1:>%dd} | {2:>%dd} | {3:>%dd} |" \ | ||
u" {4:4.1f}/{5:4.1f}/{6:4.1f} |" % (COL_LENS[0] + 1, | ||
COL_LENS[1], | ||
COL_LENS[2], | ||
COL_LENS[3]) | ||
|
||
TR_HLINE = tr_hline([len(i) + 2 for i in COL_NAMES]) | ||
res += TR_HLINE + '\n' | ||
res += ("| {0:s} | {1:s} | {2:s} | {3:s} | {4} |").format(*COL_NAMES) + '\n' | ||
res += tr_hline([len(i) + 2 for i in COL_NAMES], '=') + '\n' | ||
|
||
for (auth, stats) in tqdm( | ||
sorted( | ||
auth_stats.items(), | ||
key=lambda k: int_cast_or_len(k[1].get(sort, 0)), | ||
reverse=True), leave=False): | ||
# print (stats) | ||
loc = stats["loc"] | ||
commits = stats.get("commits", 0) | ||
files = len(stats.get("files", [])) | ||
if bytype: | ||
log.debug("TODO:NotImplemented:--bytype") | ||
# TODO: print ([stats.get("files", []) ]) | ||
res += (tbl_row_fmt.format( | ||
auth[:len(COL_NAMES[0]) + 1], loc, commits, files, | ||
100 * loc / max(1, stats_tot["loc"]), | ||
100 * commits / max(1, stats_tot["commits"]), | ||
100 * files / max(1, stats_tot["files"])).replace('100.0', ' 100')) \ | ||
+ '\n' | ||
return totals + res + TR_HLINE | ||
else: | ||
import tabulate as tabber | ||
if backend not in tabber.tabulate_formats: | ||
raise ValueError("Unknown backend:%s" % backend) | ||
log.debug("backend:tabulate:" + backend) | ||
COL_LENS = [max(len(Str(i[j])) for i in [COL_NAMES] + tab) | ||
for j in range(len(COL_NAMES))] | ||
COL_LENS[0] = min( | ||
TERM_WIDTH - sum(COL_LENS[1:]) - len(COL_LENS) * 3 - 4, | ||
COL_LENS[0]) | ||
tab = [[i[0][:COL_LENS[0]]] + i[1:] for i in tab] | ||
return totals + tabber.tabulate( | ||
tab, COL_NAMES, tablefmt=backend, floatfmt='.0f') | ||
# from ._utils import tighten | ||
# return totals + tighten(tabber(...), max_width=TERM_WIDTH) | ||
|
||
|
||
def run(args): | ||
|
@@ -188,8 +176,8 @@ def run(args): | |
log.debug("parsing args") | ||
|
||
if args.sort not in ["loc", "commits", "files"]: | ||
log.warn("--sort argument (" + args.sort + | ||
") unrecognised\n" + __doc__) | ||
log.warn("--sort argument (%s) unrecognised\n%s" % ( | ||
args.sort, __doc__)) | ||
|
||
if not args.excl: | ||
args.excl = "" | ||
|
@@ -252,17 +240,19 @@ def run(args): | |
log.warn(fname + ':' + str(e)) | ||
continue | ||
log.log(logging.NOTSET, blame_out) | ||
loc_auths = RE_AUTHS.findall(blame_out) | ||
loc_auth_times = RE_AUTHS.findall(blame_out) | ||
|
||
for loc, auth in loc_auths: # for each chunk | ||
for loc, auth, tstamp in loc_auth_times: # for each chunk | ||
loc = int(loc) | ||
auth = _str(auth) | ||
tstamp = int(tstamp) | ||
try: | ||
auth_stats[auth]["loc"] += loc | ||
except KeyError: | ||
auth_stats[auth] = {"loc": loc, "files": set([fname])} | ||
auth_stats[auth] = {"loc": loc, "files": set([fname]), "ctimes": []} | ||
else: | ||
auth_stats[auth]["files"].add(fname) | ||
auth_stats[auth]["ctimes"].append(tstamp) | ||
|
||
if args.bytype: | ||
fext_key = ("." + fext(fname)) if fext(fname) else "._None_ext" | ||
|
@@ -284,7 +274,8 @@ def run(args): | |
except KeyError: | ||
auth_stats[_str(auth)] = {"loc": 0, | ||
"files": set([]), | ||
"commits": int(ncom)} | ||
"commits": int(ncom), | ||
"ctimes": []} | ||
|
||
stats_tot = dict((k, 0) for stats in auth_stats.values() for k in stats) | ||
log.debug(stats_tot) | ||
|
@@ -301,7 +292,8 @@ def run(args): | |
# log.debug(extns) | ||
|
||
print_unicode(tabulate( | ||
auth_stats, stats_tot, args.sort, args.bytype, args.format)) | ||
auth_stats, stats_tot, | ||
args.sort, args.bytype, args.format, args.cost)) | ||
|
||
|
||
def main(args=None): | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.