Annotation of embedaddon/rsync/packaging/pkglib.py, revision 1.1.1.1

1.1       misho       1: import os, sys, re, subprocess
                      2: 
                      3: # This python3 library provides a few helpful routines that are
                      4: # used by the latest packaging scripts.
                      5: 
                      6: default_encoding = 'utf-8'
                      7: 
                      8: # Output the msg args to stderr.  Accepts all the args that print() accepts.
                      9: def warn(*msg):
                     10:     print(*msg, file=sys.stderr)
                     11: 
                     12: 
                     13: # Output the msg args to stderr and die with a non-zero return-code.
                     14: # Accepts all the args that print() accepts.
                     15: def die(*msg):
                     16:     warn(*msg)
                     17:     sys.exit(1)
                     18: 
                     19: 
                     20: # Set this to an encoding name or set it to None to avoid the default encoding idiom.
                     21: def set_default_encoding(enc):
                     22:     default_encoding = enc
                     23: 
                     24: 
                     25: # Set shell=True if the cmd is a string; sets a default encoding unless raw=True was specified.
                     26: def _tweak_opts(cmd, opts, **maybe_set):
                     27:     # This sets any maybe_set value that isn't already set AND creates a copy of opts for us.
                     28:     opts = {**maybe_set, **opts}
                     29: 
                     30:     if type(cmd) == str:
                     31:         opts = {'shell': True, **opts}
                     32: 
                     33:     want_raw = opts.pop('raw', False)
                     34:     if default_encoding and not want_raw:
                     35:         opts = {'encoding': default_encoding, **opts}
                     36: 
                     37:     capture = opts.pop('capture', None)
                     38:     if capture:
                     39:         if capture == 'stdout':
                     40:             opts = {'stdout': subprocess.PIPE, **opts}
                     41:         elif capture == 'stderr':
                     42:             opts = {'stderr': subprocess.PIPE, **opts}
                     43:         elif capture == 'output':
                     44:             opts = {'stdout': subprocess.PIPE, 'stderr': subprocess.PIPE, **opts}
                     45:         elif capture == 'combined':
                     46:             opts = {'stdout': subprocess.PIPE, 'stderr': subprocess.STDOUT, **opts}
                     47: 
                     48:     discard = opts.pop('discard', None)
                     49:     if discard:
                     50:         # We DO want to override any already set stdout|stderr values (unlike above).
                     51:         if discard == 'stdout' or discard == 'output':
                     52:             opts['stdout'] = subprocess.DEVNULL
                     53:         if discard == 'stderr' or discard == 'output':
                     54:             opts['stderr'] = subprocess.DEVNULL
                     55: 
                     56:     return opts
                     57: 
                     58: 
                     59: # This does a normal subprocess.run() with some auto-args added to make life easier.
                     60: def cmd_run(cmd, **opts):
                     61:     return subprocess.run(cmd, **_tweak_opts(cmd, opts))
                     62: 
                     63: 
                     64: # Like cmd_run() with a default check=True specified.
                     65: def cmd_chk(cmd, **opts):
                     66:     return subprocess.run(cmd, **_tweak_opts(cmd, opts, check=True))
                     67: 
                     68: 
                     69: # Capture stdout in a string and return the (output, return_code) tuple.
                     70: # Use capture='combined' opt to get both stdout and stderr together.
                     71: def cmd_txt_status(cmd, **opts):
                     72:     input = opts.pop('input', None)
                     73:     if input is not None:
                     74:         opts['stdin'] = subprocess.PIPE
                     75:     proc = subprocess.Popen(cmd, **_tweak_opts(cmd, opts, capture='stdout'))
                     76:     out = proc.communicate(input=input)[0]
                     77:     return (out, proc.returncode)
                     78: 
                     79: 
                     80: # Like cmd_txt_status() but just return the output.
                     81: def cmd_txt(cmd, **opts):
                     82:     return cmd_txt_status(cmd, **opts)[0]
                     83: 
                     84: 
                     85: # Capture stdout in a string and return the output if the command has a 0 return code.
                     86: # Otherwise it throws an exception that indicates the return code and the output.
                     87: def cmd_txt_chk(cmd, **opts):
                     88:     out, rc = cmd_txt_status(cmd, **opts)
                     89:     if rc != 0:
                     90:         cmd_err = f'Command "{cmd}" returned non-zero exit status "{rc}" and output:\n{out}'
                     91:         raise Exception(cmd_err)
                     92:     return out
                     93: 
                     94: 
                     95: # Starts a piped-output command of stdout (by default) and leaves it up to you to read
                     96: # the output and call communicate() on the returned object.
                     97: def cmd_pipe(cmd, **opts):
                     98:     return subprocess.Popen(cmd, **_tweak_opts(cmd, opts, capture='stdout'))
                     99: 
                    100: 
                    101: # Runs a "git status" command and dies if the checkout is not clean (the
                    102: # arg fatal_unless_clean can be used to make that non-fatal.  Returns a
                    103: # tuple of the current branch, the is_clean flag, and the status text.
                    104: def check_git_status(fatal_unless_clean=True, subdir='.'):
                    105:     status_txt = cmd_txt_chk(f"cd '{subdir}' && git status")
                    106:     is_clean = re.search(r'\nnothing to commit.+working (directory|tree) clean', status_txt) != None
                    107: 
                    108:     if not is_clean and fatal_unless_clean:
                    109:         if subdir == '.':
                    110:             subdir = ''
                    111:         else:
                    112:             subdir = f" *{subdir}*"
                    113:         die(f"The{subdir} checkout is not clean:\n" + status_txt)
                    114: 
                    115:     m = re.match(r'^(?:# )?On branch (.+)\n', status_txt)
                    116:     cur_branch = m[1] if m else None
                    117: 
                    118:     return (cur_branch, is_clean, status_txt)
                    119: 
                    120: 
                    121: # Calls check_git_status() on the current git checkout and (optionally) a subdir path's
                    122: # checkout. Use fatal_unless_clean to indicate if an unclean checkout is fatal or not.
                    123: # The master_branch arg indicates what branch we want both checkouts to be using, and
                    124: # if the branch is wrong the user is given the option of either switching to the right
                    125: # branch or aborting.
                    126: def check_git_state(master_branch, fatal_unless_clean=True, check_extra_dir=None):
                    127:     cur_branch = check_git_status(fatal_unless_clean)[0]
                    128:     branch = re.sub(r'^patch/([^/]+)/[^/]+$', r'\1', cur_branch) # change patch/BRANCH/PATCH_NAME into BRANCH
                    129:     if branch != master_branch:
                    130:         print(f"The checkout is not on the {master_branch} branch.")
                    131:         if master_branch != 'master':
                    132:             sys.exit(1)
                    133:         ans = input(f"Do you want me to continue with --branch={branch}? [n] ")
                    134:         if not ans or not re.match(r'^y', ans, flags=re.I):
                    135:             sys.exit(1)
                    136:         master_branch = branch
                    137: 
                    138:     if check_extra_dir and os.path.isdir(os.path.join(check_extra_dir, '.git')):
                    139:         branch = check_git_status(fatal_unless_clean, check_extra_dir)[0]
                    140:         if branch != master_branch:
                    141:             print(f"The *{check_extra_dir}* checkout is on branch {branch}, not branch {master_branch}.")
                    142:             ans = input(f"Do you want to change it to branch {master_branch}? [n] ")
                    143:             if not ans or not re.match(r'^y', ans, flags=re.I):
                    144:                 sys.exit(1)
                    145:             subdir.check_call(f"cd {check_extra_dir} && git checkout '{master_branch}'", shell=True)
                    146: 
                    147:     return (cur_branch, master_branch)
                    148: 
                    149: 
                    150: # Return the git hash of the most recent commit.
                    151: def latest_git_hash(branch):
                    152:     out = cmd_txt_chk(['git', 'log', '-1', '--no-color', branch])
                    153:     m = re.search(r'^commit (\S+)', out, flags=re.M)
                    154:     if not m:
                    155:         die(f"Unable to determine commit hash for master branch: {branch}")
                    156:     return m[1]
                    157: 
                    158: 
                    159: # Return a set of all branch names that have the format "patch/BASE_BRANCH/NAME"
                    160: # for the given base_branch string.  Just the NAME portion is put into the set.
                    161: def get_patch_branches(base_branch):
                    162:     branches = set()
                    163:     proc = cmd_pipe('git branch -l'.split())
                    164:     for line in proc.stdout:
                    165:         m = re.search(r' patch/([^/]+)/(.+)', line)
                    166:         if m and m[1] == base_branch:
                    167:             branches.add(m[2])
                    168:     proc.communicate()
                    169:     return branches
                    170: 
                    171: 
                    172: def mandate_gensend_hook():
                    173:     hook = '.git/hooks/pre-push'
                    174:     if not os.path.exists(hook):
                    175:         print('Creating hook file:', hook)
                    176:         cmd_chk(['./rsync', '-a', 'packaging/pre-push', hook])
                    177:     else:
                    178:         out, rc = cmd_txt_status(['fgrep', 'make gensend', hook], discard='output')
                    179:         if rc:
                    180:             die('Please add a "make gensend" into your', hook, 'script.')
                    181: 
                    182: 
                    183: # Snag the GENFILES values out of the Makefile.in file and return them as a list.
                    184: def get_gen_files(want_dir_plus_list=False):
                    185:     cont_re = re.compile(r'\\\n')
                    186: 
                    187:     gen_files = [ ]
                    188: 
                    189:     auto_dir = os.path.join('auto-build-save', cmd_txt('git rev-parse --abbrev-ref HEAD').strip().replace('/', '%'))
                    190: 
                    191:     with open('Makefile.in', 'r', encoding='utf-8') as fh:
                    192:         for line in fh:
                    193:             if not gen_files:
                    194:                 chk = re.sub(r'^GENFILES=', '', line)
                    195:                 if line == chk:
                    196:                     continue
                    197:                 line = chk
                    198:             m = re.search(r'\\$', line)
                    199:             line = re.sub(r'^\s+|\s*\\\n?$|\s+$', '', line)
                    200:             gen_files += line.split()
                    201:             if not m:
                    202:                 break
                    203: 
                    204:     if want_dir_plus_list:
                    205:         return (auto_dir, gen_files)
                    206: 
                    207:     return [ os.path.join(auto_dir, fn) for fn in gen_files ]
                    208: 
                    209: 
                    210: def get_rsync_version():
                    211:     with open('version.h', 'r', encoding='utf-8') as fh:
                    212:         txt = fh.read()
                    213:     m = re.match(r'^#define\s+RSYNC_VERSION\s+"(\d.+?)"', txt)
                    214:     if m:
                    215:         return m[1]
                    216:     die("Unable to find RSYNC_VERSION define in version.h")
                    217: 
                    218: 
                    219: def get_NEWS_version_info():
                    220:     rel_re = re.compile(r'^\| \S{2} \w{3} \d{4}\s+\|\s+(?P<ver>\d+\.\d+\.\d+)\s+\|\s+(?P<pdate>\d{2} \w{3} \d{4})?\s+\|\s+(?P<pver>\d+)\s+\|')
                    221:     last_version = last_protocol_version = None
                    222:     pdate = { }
                    223: 
                    224:     with open('NEWS.md', 'r', encoding='utf-8') as fh:
                    225:         for line in fh:
                    226:             if not last_version: # Find the first non-dev|pre version with a release date.
                    227:                 m = re.search(r'rsync (\d+\.\d+\.\d+) .*\d\d\d\d', line)
                    228:                 if m:
                    229:                     last_version = m[1]
                    230:             m = rel_re.match(line)
                    231:             if m:
                    232:                 if m['pdate']:
                    233:                     pdate[m['ver']] = m['pdate']
                    234:                 if m['ver'] == last_version:
                    235:                     last_protocol_version = m['pver']
                    236: 
                    237:     if not last_protocol_version:
                    238:         die(f"Unable to determine protocol_version for {last_version}.")
                    239: 
                    240:     return last_version, last_protocol_version, pdate
                    241: 
                    242: 
                    243: def get_protocol_versions():
                    244:     protocol_version = subprotocol_version = None
                    245: 
                    246:     with open('rsync.h', 'r', encoding='utf-8') as fh:
                    247:         for line in fh:
                    248:             m = re.match(r'^#define\s+PROTOCOL_VERSION\s+(\d+)', line)
                    249:             if m:
                    250:                 protocol_version = m[1]
                    251:                 continue
                    252:             m = re.match(r'^#define\s+SUBPROTOCOL_VERSION\s+(\d+)', line)
                    253:             if m:
                    254:                 subprotocol_version = m[1]
                    255:                 break
                    256: 
                    257:     if not protocol_version:
                    258:         die("Unable to determine the current PROTOCOL_VERSION.")
                    259: 
                    260:     if not subprotocol_version:
                    261:         die("Unable to determine the current SUBPROTOCOL_VERSION.")
                    262: 
                    263:     return protocol_version, subprotocol_version
                    264: 
                    265: # vim: sw=4 et

FreeBSD-CVSweb <freebsd-cvsweb@FreeBSD.org>