diff --git a/local/bin/x-backup-folder.sh b/local/bin/x-backup-folder.sh new file mode 100755 index 0000000..db851ed --- /dev/null +++ b/local/bin/x-backup-folder.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +DIRECTORY=$1 +FILENAME=$2 + +if [ -z "${DIRECTORY}" ]; then + echo "DIRECTORY (first argument) is missing" + echo "Usage: $0 folder_to_backup" + exit +fi + +if [ -z "${FILENAME}" ]; then + FILENAME=$DIRECTORY +fi + +FILENAME=${FILENAME} | \ + tr '/' _ | \ + iconv -t ascii//TRANSLIT | \ + sed -r s/[^a-zA-Z0-9]+/_/g | \ + sed -r s/^_+\|-+$//g + +TIMESTAMP=$(date "+%Y%m%d_%H%M%S") +FILENAME_TIMESTAMP="${FILENAME}_${TIMESTAMP}" + +tar cvzf "${FILENAME_TIMESTAMP}.tar.gz" "${DIRECTORY}/" diff --git a/local/bin/x-git-largest-files.py b/local/bin/x-git-largest-files.py new file mode 100755 index 0000000..1ddb759 --- /dev/null +++ b/local/bin/x-git-largest-files.py @@ -0,0 +1,166 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Python script to find the largest files in a git repository. +# The general method is based on the script in this blog post: +# http://stubbisms.wordpress.com/2009/07/10/git-script-to-show-largest-pack-objects-and-trim-your-waist-line/ +# +# The above script worked for me, but was very slow on my 11GB repository. This version has a bunch +# of changes to speed things up to a more reasonable time. It takes less than a minute on repos with 250K objects. +# +# The MIT License (MIT) +# Copyright (c) 2015 Nick Kocharhook +# +# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +# associated documentation files (the "Software"), to deal in the Software without restriction, +# including without limitation the rights to use, copy, modify, merge, publish, distribute, +# sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all copies or +# substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT +# NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT +# OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +from subprocess import check_output, CalledProcessError, Popen, PIPE +import argparse +import signal +import sys + +sortByOnDiskSize = False + +def main(): + global sortByOnDiskSize + + signal.signal(signal.SIGINT, signal_handler) + + args = parseArguments() + sortByOnDiskSize = args.sortByOnDiskSize + sizeLimit = 1024*args.filesExceeding + + if args.filesExceeding > 0: + print "Finding objects larger than {}kB…".format(args.filesExceeding) + else: + print "Finding the {} largest objects…".format(args.matchCount) + + blobs = getTopBlobs(args.matchCount, sizeLimit) + + populateBlobPaths(blobs) + printOutBlobs(blobs) + +def getTopBlobs(count, sizeLimit): + sortColumn = 4 + + if sortByOnDiskSize: + sortColumn = 3 + + verifyPack = "git verify-pack -v `git rev-parse --git-dir`/objects/pack/pack-*.idx | grep blob | sort -k{}nr".format(sortColumn) + output = check_output(verifyPack, shell=True).split("\n")[:-1] + + blobs = dict() + compareBlob = Blob("a b {} {} c".format(sizeLimit, sizeLimit)) # use __lt__ to do the appropriate comparison + + for objLine in output: + blob = Blob(objLine) + + if sizeLimit > 0: + if compareBlob < blob: + blobs[blob.sha1] = blob + else: + break + else: + blobs[blob.sha1] = blob + + if len(blobs) == count: + break + + return blobs + + +def populateBlobPaths(blobs): + if len(blobs): + print "Finding object paths…" + + # Only include revs which have a path. Other revs aren't blobs. + revList = "git rev-list --all --objects | awk '$2 {print}'" + allObjectLines = check_output(revList, shell=True).split("\n")[:-1] + + outstandingKeys = blobs.keys() + + for line in allObjectLines: + cols = line.split() + sha1, path = cols[0], " ".join(cols[1:]) + + if (sha1 in outstandingKeys): + outstandingKeys.remove(sha1) + blobs[sha1].path = path + + # short-circuit the search if we're done + if not len(outstandingKeys): + break + + +def printOutBlobs(blobs): + if len(blobs): + csvLines = ["size,pack,hash,path"] + + for blob in sorted(blobs.values(), reverse=True): + csvLines.append(blob.csvLine()) + + p = Popen(["column", "-t", "-s", "','"], stdin=PIPE, stdout=PIPE, stderr=PIPE) + stdout, stderr = p.communicate("\n".join(csvLines)+"\n") + + print "\nAll sizes in kB. The pack column is the compressed size of the object inside the pack file.\n" + print stdout.rstrip('\n') + else: + print "No files found which match those criteria." + + +def parseArguments(): + parser = argparse.ArgumentParser(description='List the largest files in a git repository') + parser.add_argument('-c', '--match-count', dest='matchCount', type=int, default=10, + help='The number of files to return. Default is 10. Ignored if --files-exceeding is used.') + parser.add_argument('--files-exceeding', dest='filesExceeding', type=int, default=0, + help='The cutoff amount, in KB. Files with a pack size (or pyhsical size, with -p) larger than this will be printed.') + parser.add_argument('-p', '--physical-sort', dest='sortByOnDiskSize', action='store_true', default=False, + help='Sort by the on-disk size of the files. Default is to sort by the pack size.') + + return parser.parse_args() + + +def signal_handler(signal, frame): + print('Caught Ctrl-C. Exiting.') + sys.exit(0) + + +class Blob(object): + sha1 = '' + size = 0 + packedSize = 0 + path = '' + + def __init__(self, line): + cols = line.split() + self.sha1, self.size, self.packedSize = cols[0], int(cols[2]), int(cols[3]) + + def __repr__(self): + return '{} - {} - {} - {}'.format(self.sha1, self.size, self.packedSize, self.path) + + def __lt__(self, other): + if (sortByOnDiskSize): + return self.size < other.size + else: + return self.packedSize < other.packedSize + + def csvLine(self): + return "{},{},{},{}".format(self.size/1024, self.packedSize/1024, self.sha1, self.path) + + +# Default function is main() +if __name__ == '__main__': + main() diff --git a/local/bin/x-quota-usage.php b/local/bin/x-quota-usage.php new file mode 100755 index 0000000..777c9d4 --- /dev/null +++ b/local/bin/x-quota-usage.php @@ -0,0 +1,85 @@ +#!/usr/bin/env php + $line) { + if ($lineNum < 2) { + continue; + } + + $values = array_filter(explode(" ", $line)); + + if (count($values) != 4) { + continue; + } + + $result = array_combine(['fs', 'used', 'quota', 'limit'], $values); + + $result['used_percentage'] = round($result['used'] / $result['quota'] * 100, 3); + $result['used_gb'] = round($result['used'] / 1024 / 1024, 2); + $result['quota_gb'] = round($result['quota'] / 1024 / 1024, 2); + + $char = strlen($result['fs']); + if ($char > $fsCharLenght) { + $fsCharLenght = $char; + } + + $output[] = $result; +} + +if (!empty($output)) { + + $header = sprintf("%s | %s | %s | %s", + str_pad("Mount", $fsCharLenght), + 'Usage%', + 'Used/Total', + 'Bar' + ); + $headerWidth = strlen($header); + + echo "\n" . $header . "\n"; + echo str_repeat('-', $headerWidth + 24) . "\n"; + + foreach ($output as $i) { + $barUsed = round($i['used_percentage']) / 4; + echo sprintf( + "%s | %s | %s | [%s]", + str_pad($i['fs'], $fsCharLenght), + str_pad(round($i['used_percentage'], 1) . '%', 6, ' ', STR_PAD_LEFT), + str_pad($i['used_gb'] . '/' . $i['quota_gb'], 10, ' ', STR_PAD_LEFT), + str_pad(str_repeat('#', $barUsed), 25, '_') + ) . "\n"; + } + + echo "\n\n"; +} +