almost working impl
This commit is contained in:
parent
89aa834bc7
commit
ee25e5dd62
18
TODO.md
18
TODO.md
|
@ -1,15 +1,15 @@
|
|||
|
||||
- [ ] implement MVP
|
||||
- [+] implement MVP
|
||||
- [+] minimal catalog
|
||||
- [+] search catalog
|
||||
- [+] local current versions
|
||||
- [ ] latest version, github strategy
|
||||
- [ ] intall, github strategy
|
||||
- [+] latest version, github strategy
|
||||
- [+] intall, github strategy
|
||||
|
||||
- [ ] code style
|
||||
- [*] code style => blake, defaults
|
||||
|
||||
- [ ] tests
|
||||
- [ ]
|
||||
- [ ] cli x (not there, there with old version, there with latest version)
|
||||
|
||||
- [ ] choose forge(s), one will be the golden source of the catalog
|
||||
* github
|
||||
|
@ -27,7 +27,8 @@
|
|||
- [ ] GH release
|
||||
- [ ] pypi package
|
||||
|
||||
- [ ] open issue in managed tool forge for them to add cliget install method
|
||||
- [ ] open an issue in managed tool forge asking to add support for `--version` and semver
|
||||
- [ ] open an issue in managed tool forge asking to add cliget install method
|
||||
|
||||
- [ ] communicate on geeks'platform : HN, lobsters, reddit, linuxfr
|
||||
|
||||
|
@ -44,7 +45,4 @@
|
|||
- [ ] build tests
|
||||
- [ ] async loading of versions ; fill output when available
|
||||
- [ ] cache GH response for a given time
|
||||
|
||||
|
||||
|
||||
|
||||
- [ ] explore a way to combine docopt and baker
|
||||
|
|
|
@ -281,7 +281,7 @@ websocat:
|
|||
|
||||
wrk:
|
||||
desc: Modern HTTP benchmarking tool
|
||||
github: wg/wrk:wq
|
||||
github: wg/wrk
|
||||
|
||||
ww:
|
||||
name: webwormhole
|
||||
|
|
408
cliget.py
408
cliget.py
|
@ -36,211 +36,259 @@ from subprocess import run, CalledProcessError, TimeoutExpired
|
|||
from fuzzywuzzy import fuzz
|
||||
import requests
|
||||
|
||||
|
||||
def trace(*mess):
|
||||
if "TRACE" in os.environ:
|
||||
print("TRACE", mess)
|
||||
if "TRACE" in os.environ:
|
||||
print("TRACE", mess)
|
||||
|
||||
|
||||
def info(*mess):
|
||||
print(*mess)
|
||||
|
||||
|
||||
def warn(*mess):
|
||||
print("WARN", mess)
|
||||
print("WARNING:", *mess)
|
||||
|
||||
|
||||
class DotDict(dict):
|
||||
def __getattr__(self, name):
|
||||
return self[name] if name in self else None
|
||||
def __getattr__(self, name):
|
||||
return self[name] if name in self else None
|
||||
|
||||
def _load_catalog(options)->dict:
|
||||
catalog=options.get('__catalog', 'catalog.yaml')
|
||||
o = load(open(catalog), SafeLoader)
|
||||
trace(o)
|
||||
return { k:DotDict(v) for k,v in o.items()}
|
||||
|
||||
def _find_semver(s:str) -> VersionInfo:
|
||||
ver = VersionInfo(0,0,0)
|
||||
try:
|
||||
ver = VersionInfo.parse(s)
|
||||
except ValueError:
|
||||
def _load_catalog(options) -> dict:
|
||||
catalog = options.get(
|
||||
"__catalog", "https://codeberg.org/setop/cliget/raw/branch/main/catalog.yaml"
|
||||
)
|
||||
if catalog.startswith("http"):
|
||||
# IMPROVE cache catalog for some time
|
||||
r = requests.get(catalog)
|
||||
o = load(r.content, SafeLoader)
|
||||
else:
|
||||
o = load(open(catalog), SafeLoader)
|
||||
trace(o)
|
||||
return {k: DotDict(v) for k, v in o.items()}
|
||||
|
||||
|
||||
def _find_semver(s: str) -> VersionInfo:
|
||||
ver = VersionInfo(0, 0, 0)
|
||||
try:
|
||||
ver = VersionInfo(*list(i.group(0) for i in re.finditer('\d+', s))[:3])
|
||||
except Exception as e:
|
||||
trace("parse error", e)
|
||||
return ver
|
||||
ver = VersionInfo.parse(s)
|
||||
except ValueError:
|
||||
try:
|
||||
ver = VersionInfo(*list(i.group(0) for i in re.finditer("\d+", s))[:3])
|
||||
except Exception as e:
|
||||
trace("parse error", e)
|
||||
return ver
|
||||
|
||||
|
||||
def _local_version(cmd):
|
||||
ver = VersionInfo(0)
|
||||
try:
|
||||
first_line = run([cmd, '--version'], input='', text=True, capture_output=True, check=True, timeout=0.1).stdout.split('\n')[0]
|
||||
trace(cmd, '=>', first_line)
|
||||
ver = _find_semver(first_line)
|
||||
except Exception as e:
|
||||
trace("run error", e)
|
||||
return ver
|
||||
|
||||
def _internal_list(options) -> tuple[str,VersionInfo]:
|
||||
"""list installed tools and their version"""
|
||||
ctl = _load_catalog(options)
|
||||
for cli, props in ctl.items():
|
||||
# search in path
|
||||
ver = VersionInfo(0)
|
||||
try:
|
||||
vers = _local_version(cli)
|
||||
trace(cli, vers)
|
||||
yield cli, props, vers
|
||||
except CalledProcessError:
|
||||
trace(cli, "call error")
|
||||
except TimeoutExpired:
|
||||
trace(cli, "timeout")
|
||||
except FileNotFoundError:
|
||||
trace(cli, "not found")
|
||||
first_line = run(
|
||||
[cmd, "--version"],
|
||||
input="",
|
||||
text=True,
|
||||
capture_output=True,
|
||||
check=True,
|
||||
timeout=0.1,
|
||||
).stdout.split("\n")[0]
|
||||
trace(cmd, "=>", first_line)
|
||||
ver = _find_semver(first_line)
|
||||
except Exception as e:
|
||||
trace("run error", e)
|
||||
return ver
|
||||
|
||||
|
||||
def _internal_list(options) -> tuple[str, VersionInfo]:
|
||||
"""list installed tools and their version"""
|
||||
ctl = _load_catalog(options)
|
||||
for cli, props in ctl.items():
|
||||
# search in path
|
||||
try:
|
||||
vers = _local_version(cli)
|
||||
trace(cli, vers)
|
||||
yield cli, props, vers
|
||||
except CalledProcessError:
|
||||
trace(cli, "call error")
|
||||
except TimeoutExpired:
|
||||
trace(cli, "timeout")
|
||||
except FileNotFoundError:
|
||||
trace(cli, "not found")
|
||||
|
||||
|
||||
def dolist(options):
|
||||
for (cli, _, ver) in _internal_list(options):
|
||||
print(cli, ver)
|
||||
for cli, _, ver in _internal_list(options):
|
||||
print(cli, ver)
|
||||
|
||||
|
||||
def doinfo(options):
|
||||
tool = options.TOOL
|
||||
ctl = _load_catalog(options)
|
||||
if tool in ctl:
|
||||
print(tool)
|
||||
for (k,v) in ctl[tool].items():
|
||||
print(f' {k}: {v}')
|
||||
else:
|
||||
warn(f'{tool} not in catalog')
|
||||
tool = options.TOOL
|
||||
ctl = _load_catalog(options)
|
||||
if tool in ctl:
|
||||
print(tool)
|
||||
for k, v in ctl[tool].items():
|
||||
print(f" {k}: {v}")
|
||||
else:
|
||||
warn(f"{tool} not in catalog")
|
||||
|
||||
|
||||
def dosearch(options):
|
||||
pat = options.PAT
|
||||
ctl = _load_catalog(options)
|
||||
L = []
|
||||
for cli, props in ctl.items():
|
||||
trace(cli, props)
|
||||
rtitle = fuzz.ratio(cli, pat)
|
||||
rdesc = fuzz.partial_token_set_ratio(props['desc'], pat)
|
||||
score = rtitle + rdesc if rtitle>60 else rdesc
|
||||
L.append((cli, props['desc'], score))
|
||||
L = sorted(L, key=lambda x: -x[-1])
|
||||
# TODO format a as table
|
||||
print("\n".join("|".join(map(str,l)) for l in L[:10]))
|
||||
pat = options.PAT
|
||||
ctl = _load_catalog(options)
|
||||
L = []
|
||||
for cli, props in ctl.items():
|
||||
trace(cli, props)
|
||||
rtitle = fuzz.ratio(cli, pat)
|
||||
rname = fuzz.ratio(props.name, pat) if 'name' in props else 0
|
||||
rdesc = fuzz.partial_token_set_ratio(props.desc, pat)
|
||||
score = rdesc + rname
|
||||
score = rtitle + score if rtitle > 60 else score
|
||||
L.append((cli, props.desc[:50], score))
|
||||
L = sorted(L, key=lambda x: -x[-1])
|
||||
L = [[ "cli", "desc", "rel" ]] + L[:10]
|
||||
from terminaltables import SingleTable
|
||||
table = SingleTable(L)
|
||||
table.inner_row_border=False
|
||||
print(table.table)
|
||||
|
||||
|
||||
def dolistupdate(options):
|
||||
print("look for updatables")
|
||||
for (cli, props, ver) in _internal_list(options):
|
||||
# get last version online
|
||||
if props.github:
|
||||
pass
|
||||
pass
|
||||
print("look for updatables")
|
||||
for cli, props, ver in _internal_list(options):
|
||||
# get last version online
|
||||
if props.github:
|
||||
pass
|
||||
pass
|
||||
|
||||
def _gh_versions(repo:str) -> [VersionInfo|None]:
|
||||
[owner, repo] = repo.split("/")
|
||||
url = f'https://api.github.com/repos/{owner}/{repo}/releases'
|
||||
response = requests.get(url)
|
||||
return [ _find_semver(o.get("tag_name")) for o in response.json()]
|
||||
|
||||
def _gh_version(repo:str) -> [VersionInfo|None]:
|
||||
[owner, repo] = repo.split("/")
|
||||
url = f'https://api.github.com/repos/{owner}/{repo}/releases/latest'
|
||||
response = requests.get(url)
|
||||
return _find_semver(response.json().get("tag_name"))
|
||||
def _gh_versions(repo: str) -> [VersionInfo | None]:
|
||||
[owner, repo] = repo.split("/")
|
||||
url = f"https://api.github.com/repos/{owner}/{repo}/releases"
|
||||
response = requests.get(url)
|
||||
return [_find_semver(o.get("tag_name")) for o in response.json()]
|
||||
|
||||
|
||||
def _gh_version(repo: str) -> [VersionInfo | None]:
|
||||
[owner, repo] = repo.split("/")
|
||||
url = f"https://api.github.com/repos/{owner}/{repo}/releases/latest"
|
||||
response = requests.get(url)
|
||||
return _find_semver(response.json().get("tag_name"))
|
||||
|
||||
|
||||
def doversions(options):
|
||||
tools = options.TOOLS
|
||||
ctl = _load_catalog(options)
|
||||
for cli in tools:
|
||||
if cli in ctl:
|
||||
props = ctl[cli]
|
||||
rver = _gh_version(props.github) if props.github else VersionInfo(0)
|
||||
lver = _local_version(cli)
|
||||
trace(lver)
|
||||
trace(rver)
|
||||
print(f"{cli} | {lver} | {rver}")
|
||||
else:
|
||||
warn(f'{tool} not in catalog')
|
||||
tools = options.TOOLS
|
||||
ctl = _load_catalog(options)
|
||||
for cli in tools:
|
||||
if cli in ctl:
|
||||
props = ctl[cli]
|
||||
rver = _gh_version(props.github) if props.github else VersionInfo(0)
|
||||
lver = _local_version(cli)
|
||||
trace(lver)
|
||||
trace(rver)
|
||||
print(f"{cli} | {lver} | {rver}")
|
||||
else:
|
||||
warn(f"{tool} not in catalog")
|
||||
|
||||
|
||||
def doallversions(options):
|
||||
tool = options.TOOL
|
||||
ctl = _load_catalog(options)
|
||||
if tool in ctl:
|
||||
props = ctl[tool]
|
||||
if props.github:
|
||||
vers = _gh_versions(props.github)
|
||||
trace(vers)
|
||||
print("\n".join(vers))
|
||||
else:
|
||||
warn(f'{tool} not in catalog')
|
||||
tool = options.TOOL
|
||||
ctl = _load_catalog(options)
|
||||
if tool in ctl:
|
||||
props = ctl[tool]
|
||||
if props.github:
|
||||
vers = _gh_versions(props.github)
|
||||
trace(vers)
|
||||
print("\n".join(vers))
|
||||
else:
|
||||
warn(f"{tool} not in catalog")
|
||||
|
||||
|
||||
def doinstall(options):
|
||||
tools = options.TOOLS
|
||||
ctl = _load_catalog(options)
|
||||
for tool in tools:
|
||||
if tool in ctl:
|
||||
props = ctl[tool]
|
||||
if props.github:
|
||||
rver = _gh_version(props.github)
|
||||
lver = _local_version(tool)
|
||||
trace(lver,rver)
|
||||
if rver > lver:
|
||||
_perform_install(tool, props.github)
|
||||
tools = options.TOOLS
|
||||
ctl = _load_catalog(options)
|
||||
for tool in tools:
|
||||
if tool in ctl:
|
||||
props = ctl[tool]
|
||||
if props.github:
|
||||
rver = _gh_version(props.github)
|
||||
lver = _local_version(tool)
|
||||
trace(lver, rver)
|
||||
if rver > lver:
|
||||
_perform_gh_install(tool, props.github)
|
||||
else:
|
||||
info(f"{tool} is already up do date ({lver})")
|
||||
else:
|
||||
warn(f'{tool} has no known install strategy')
|
||||
else:
|
||||
warn(f"{tool} not in catalog")
|
||||
|
||||
|
||||
def _match_arch_machine(name: str) -> bool:
|
||||
sysname = os.uname().sysname.lower() # os
|
||||
machine = os.uname().machine.lower() # arch
|
||||
return name.lower().find(sysname) > 0 and name.lower().find(machine) > 0
|
||||
|
||||
|
||||
def _perform_gh_install(cli, repo, version=None):
|
||||
# get asset list
|
||||
url = f"https://api.github.com/repos/{repo}/releases/latest"
|
||||
r = requests.get(url)
|
||||
assets = r.json()["assets"]
|
||||
trace(assets)
|
||||
# select right asset
|
||||
asset = DotDict(next(filter(lambda x: _match_arch_machine(x["name"]), assets)))
|
||||
trace(asset.name, asset.url)
|
||||
# mkdirs
|
||||
p = os.path
|
||||
home = os.environ["HOME"]
|
||||
assetshome = p.join(home, ".cache/cliget/assets")
|
||||
os.makedirs(assetshome, exist_ok=True)
|
||||
location = p.join(assetshome, asset.name)
|
||||
trace(f"will dl {location}")
|
||||
# dl asset if not already there
|
||||
if not p.exists(location):
|
||||
dlurl = requests.get(asset.url).json()["browser_download_url"]
|
||||
r = requests.get(dlurl, allow_redirects=True, stream=True)
|
||||
with open(location, "wb") as fd:
|
||||
shutil.copyfileobj(r.raw, fd)
|
||||
trace("downloaded")
|
||||
# unpack asset
|
||||
if not asset.name.endswith(".tar.gz"):
|
||||
raise ValueError("package type not handled")
|
||||
progloc = p.join(home, ".local/programs", cli)
|
||||
trace("process tgz")
|
||||
os.makedirs(progloc, exist_ok=True)
|
||||
run(["tar", "xfz", location, "-C", progloc])
|
||||
# symlink
|
||||
# TODO remove existing symlink
|
||||
os.symlink(p.join('../programs', cli, cli), p.join(home, ".local/bin", cli))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if "DEBUG" in os.environ:
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
logging.debug("debug is on")
|
||||
else:
|
||||
warn(f'{tool} not in catalog')
|
||||
|
||||
def _match_arch_machine(name:str) -> bool:
|
||||
sysname = os.uname().sysname.lower() # os
|
||||
machine = os.uname().machine.lower() # arch
|
||||
return name.lower().find(sysname)>0 and name.lower().find(machine)>0
|
||||
|
||||
def _perform_install(cli, repo, version=None):
|
||||
# get asset list
|
||||
[owner, repo] = repo.split("/")
|
||||
url = f'https://api.github.com/repos/{owner}/{repo}/releases/latest'
|
||||
r = requests.get(url)
|
||||
assets = r.json()['assets']
|
||||
trace(assets)
|
||||
# select right asset
|
||||
asset = DotDict(next(filter(lambda x: _match_arch_machine(x['name']),assets)))
|
||||
trace(asset.name, asset.url)
|
||||
# mkdirs
|
||||
p = os.path
|
||||
home = os.environ["HOME"]
|
||||
assetshome = p.join(home, '.cache/cliget/assets')
|
||||
os.makedirs(assetshome, exist_ok=True)
|
||||
location = p.join(assetshome, asset.name)
|
||||
trace(f"will dl {location}")
|
||||
# dl asset if not already there
|
||||
if not p.exists(location):
|
||||
dlurl = requests.get(asset.url).json()['browser_download_url']
|
||||
r = requests.get(dlurl, allow_redirects=True, stream=True)
|
||||
with open(location, 'wb') as fd:
|
||||
shutil.copyfileobj(r.raw, fd)
|
||||
trace("downloaded")
|
||||
# unpack asset
|
||||
if not asset.name.endswith('.tar.gz'):
|
||||
raise ValueError('package type not handled')
|
||||
progloc = p.join(home, '.local/program/'+cli)
|
||||
trace("process tgz")
|
||||
os.makedirs(progloc, exist_ok=True)
|
||||
run(["tar", "xfz", location, "-C", progloc])
|
||||
# symlink
|
||||
os.symlink(p.join(progloc, cli), p.join(home, '.local/bin', cli))
|
||||
|
||||
if __name__ == '__main__':
|
||||
if "DEBUG" in os.environ:
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
logging.debug("debug is on")
|
||||
else:
|
||||
logging.info("not in debug")
|
||||
options = docopt(__doc__, version='Cliget 0.1.0')
|
||||
options = DotDict({k.replace('-','_'):v for (k,v) in options.items() if v is not None})
|
||||
trace(options)
|
||||
if options.info:
|
||||
doinfo(options)
|
||||
elif options.list:
|
||||
dolist(options)
|
||||
elif options.search:
|
||||
dosearch(options)
|
||||
elif options.versions:
|
||||
doversions(options)
|
||||
elif options.allversions:
|
||||
doallversions(options)
|
||||
elif options.install or options.update:
|
||||
if not options.__all and len(options.TOOLS)==0:
|
||||
dolistupdate(options)
|
||||
elif len(options.TOOLS)>0:
|
||||
doinstall(options)
|
||||
else:
|
||||
print("not implemented")
|
||||
logging.info("not in debug")
|
||||
options = docopt(__doc__, version="Cliget 0.1.0")
|
||||
options = DotDict(
|
||||
{k.replace("-", "_"): v for (k, v) in options.items() if v is not None}
|
||||
)
|
||||
trace(options)
|
||||
if options.info:
|
||||
doinfo(options)
|
||||
elif options.list:
|
||||
dolist(options)
|
||||
elif options.search:
|
||||
dosearch(options)
|
||||
elif options.versions:
|
||||
doversions(options)
|
||||
elif options.allversions:
|
||||
doallversions(options)
|
||||
elif options.install or options.update:
|
||||
if not options.__all and len(options.TOOLS) == 0:
|
||||
dolistupdate(options)
|
||||
elif len(options.TOOLS) > 0:
|
||||
doinstall(options)
|
||||
else:
|
||||
print("update all not implemented")
|
||||
|
|
|
@ -5,3 +5,6 @@ semver
|
|||
thefuzz
|
||||
#python-Levenshtein
|
||||
requests
|
||||
#tabulate
|
||||
#termtables
|
||||
terminaltables
|
||||
|
|
Loading…
Reference in New Issue