From 6afd23ea8f8cdc60ec2162f26b5f024c96a5ef26 Mon Sep 17 00:00:00 2001 From: Kerem Turgutlu Date: Thu, 10 Apr 2025 17:57:14 +0300 Subject: [PATCH 1/3] add utilities to get desired repo files with contents --- 00_core.ipynb | 887 ++++++++++++++++++++++++++++++++++++++++++++- ghapi/_modidx.py | 5 + ghapi/actions.py | 46 +-- ghapi/auth.py | 22 +- ghapi/build_lib.py | 10 +- ghapi/cli.py | 18 +- ghapi/core.py | 105 ++++-- ghapi/event.py | 36 +- ghapi/page.py | 16 +- 9 files changed, 1039 insertions(+), 106 deletions(-) diff --git a/00_core.ipynb b/00_core.ipynb index 1626adb..d29d614 100644 --- a/00_core.ipynb +++ b/00_core.ipynb @@ -39,7 +39,7 @@ "from datetime import datetime, timedelta, timezone\n", "from pprint import pprint\n", "from time import sleep\n", - "import os, shutil, tempfile, subprocess" + "import os, shutil, tempfile, subprocess, fnmatch" ] }, { @@ -1834,6 +1834,887 @@ "api.update_contents('README.md', \"Revert README\", committer=person, author=person, content=readme[:-6]);" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "api = GhApi(token=token)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's implement a function to get all valid files of a repo recursively" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "@patch\n", + "def get_repo_files(self:GhApi, owner, repo, branch=\"main\"):\n", + " \"Get all file items of a repo.\"\n", + " tree = self.git.get_tree(owner=owner, repo=repo, tree_sha=branch, recursive=True)\n", + " res = []\n", + " for item in tree['tree']:\n", + " if item['type'] == 'blob': res.append(item) \n", + " return L(res)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(#3) [{'path': '.devcontainer.json', 'mode': '100644', 'type': 'blob', 'sha': '8bfa0e952eb318c5c74acaa26a0016c12e13418e', 'size': 569, 'url': 'https://api.github.com/repos/AnswerDotAI/fastcore/git/blobs/8bfa0e952eb318c5c74acaa26a0016c12e13418e'},{'path': '.gitattributes', 'mode': '100644', 'type': 'blob', 'sha': '753b249880d57c22306cf155601bff986622b1a0', 'size': 26, 'url': 'https://api.github.com/repos/AnswerDotAI/fastcore/git/blobs/753b249880d57c22306cf155601bff986622b1a0'},{'path': '.github/workflows/docs.yml', 'mode': '100644', 'type': 'blob', 'sha': 'cde13ab17f1a9cbc112928d71ecadee93cf30383', 'size': 296, 'url': 'https://api.github.com/repos/AnswerDotAI/fastcore/git/blobs/cde13ab17f1a9cbc112928d71ecadee93cf30383'}]" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "owner, repo, branch = \"AnswerDotAI\", \"fastcore\", \"main\"\n", + "repo_files = api.get_repo_files(owner,repo); repo_files[:3]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "It would be useful to add filter options to further filter these files. We can use [fnmatch](https://docs.python.org/3/library/fnmatch.html) to add Unix shell-style wildcard based filtering which is simple yet pretty flexible." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def _find_matches(path, pats):\n", + " \"Returns matched patterns\"\n", + " matches = []\n", + " for p in listify(pats):\n", + " if fnmatch.fnmatch(path,p): matches.append(p)\n", + " return matches" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['*.md', 'README.md']" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "_find_matches('README.md', ['*.py', '*test_*', '*/test*/*', '*.md', 'README.md'])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def _include(path, include, exclude):\n", + " \"Prioritize non-star matches, if both include and exclude star expr then pick longer.\"\n", + " include_matches = [\"*\"] if include is None else _find_matches(path, include)\n", + " exclude_matches = [] if exclude is None else _find_matches(path, exclude)\n", + " if include_matches and exclude_matches:\n", + " include_star = [m for m in include_matches if \"*\" in m]\n", + " exclude_star = [m for m in exclude_matches if \"*\" in m]\n", + " if include_star and exclude_star: return len(include_star) > len(exclude_star)\n", + " if include_star: return False\n", + " if exclude_star: return True \n", + " if include_matches: return True\n", + " if exclude_matches: return False" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Exclude all .md files expect for README.md" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "assert _include('README.md', ['README.md'], ['*.md'])\n", + "assert not _include('CONTRIBUTING.md', ['README.md'], ['*.md'])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Include all .py files except for tests" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "assert not _include('examples/test_fastcore2.py', ['*.py'], ['*test_*', '*/test*/*'])\n", + "assert not _include('examples/tests/some_test.py', ['*.py'], ['*test_*', '*/tests/*'])\n", + "assert not _include('examples/test/some_test.py', ['*.py'], ['*test_*', '*/test/*'])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "assert _include('cool/module.py', ['*.py'], ['setup.py'])\n", + "assert not _include('cool/_modidx', ['*.py'], ['*/_modidx'])\n", + "assert not _include('setup.py', ['*.py'], ['setup.py'])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "test_repo_files = ['README.md', 'CONTRIBUTING.md', 'dir/MARKDOWN.md', 'tests/file.py', \n", + " 'module/file.py', 'module/app/file.py', 'nbs/00.ipynb', 'file2.py',\n", + " '.gitignore', 'module/.dotfile', '_hidden.py', 'module/_hidden.py']" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here is an example where we filter to include all python files except for the ones under tests directory, include all notebooks, exclude all md files except for README.md, and all files starting with an underscore. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['README.md',\n", + " 'module/file.py',\n", + " 'module/app/file.py',\n", + " 'nbs/00.ipynb',\n", + " 'file2.py']" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "inc,exc = ['README.md', '*.py', '*.ipynb'], ['*.md', 'tests/*.py', '_*', '*/_*']\n", + "[fn for fn in test_repo_files if _include(fn,inc,exc)]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's exclude files starting with `test_` and `setup.py` too." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['*.md', 'tests/*.py', '_*', '*/_*', '*test_*.py', '*/*test*.py', 'setup.py']" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "exc += ['*test_*.py', '*/*test*.py', 'setup.py']; exc" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The list of files that are kept based on the filtering logic:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(39,\n", + " ['README.md',\n", + " 'fastcore/all.py',\n", + " 'fastcore/ansi.py',\n", + " 'fastcore/basics.py',\n", + " 'fastcore/dispatch.py',\n", + " 'fastcore/docments.py',\n", + " 'fastcore/docscrape.py',\n", + " 'fastcore/foundation.py',\n", + " 'fastcore/imghdr.py',\n", + " 'fastcore/imports.py',\n", + " 'fastcore/meta.py',\n", + " 'fastcore/nb_imports.py',\n", + " 'fastcore/net.py',\n", + " 'fastcore/parallel.py',\n", + " 'fastcore/py2pyi.py',\n", + " 'fastcore/script.py',\n", + " 'fastcore/shutil.py',\n", + " 'fastcore/style.py',\n", + " 'fastcore/transform.py',\n", + " 'fastcore/utils.py',\n", + " 'fastcore/xdg.py',\n", + " 'fastcore/xml.py',\n", + " 'fastcore/xtras.py',\n", + " 'nbs/000_tour.ipynb',\n", + " 'nbs/00_test.ipynb',\n", + " 'nbs/01_basics.ipynb',\n", + " 'nbs/02_foundation.ipynb',\n", + " 'nbs/03_xtras.ipynb',\n", + " 'nbs/03a_parallel.ipynb',\n", + " 'nbs/03b_net.ipynb',\n", + " 'nbs/04_docments.ipynb',\n", + " 'nbs/05_meta.ipynb',\n", + " 'nbs/06_script.ipynb',\n", + " 'nbs/07_xdg.ipynb',\n", + " 'nbs/08_style.ipynb',\n", + " 'nbs/09_xml.ipynb',\n", + " 'nbs/10_py2pyi.ipynb',\n", + " 'nbs/11_external.ipynb',\n", + " 'nbs/index.ipynb'])" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "repo_files_filtered = repo_files.filter(lambda o: _include(o.path, inc, exc))\n", + "len(repo_files_filtered), list(repo_files_filtered.map(lambda o: o.path))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Below we can see the files that got filtered out:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['.devcontainer.json',\n", + " '.gitattributes',\n", + " '.github/workflows/docs.yml',\n", + " '.github/workflows/main.yml',\n", + " '.gitignore',\n", + " 'CHANGELOG.md',\n", + " 'CODE_OF_CONDUCT.md',\n", + " 'CONTRIBUTING.md',\n", + " 'LICENSE',\n", + " 'MANIFEST.in',\n", + " 'docker-compose.yml',\n", + " 'examples/ansi.css',\n", + " 'examples/test_fastcore.py',\n", + " 'examples/test_fastcore2.py',\n", + " 'fastcore/__init__.py',\n", + " 'fastcore/_modidx.py',\n", + " 'fastcore/_nbdev.py',\n", + " 'fastcore/test.py',\n", + " 'images/att_00000.png',\n", + " 'images/att_00001.png',\n", + " 'images/att_00002.png',\n", + " 'nbs/.gitattributes',\n", + " 'nbs/.gitignore',\n", + " 'nbs/.nojekyll',\n", + " 'nbs/CNAME',\n", + " 'nbs/_parallel_win.ipynb',\n", + " 'nbs/_quarto.yml',\n", + " 'nbs/fastcore',\n", + " 'nbs/files/test.txt.bz2',\n", + " 'nbs/images/att_00000.png',\n", + " 'nbs/images/att_00005.png',\n", + " 'nbs/images/att_00006.png',\n", + " 'nbs/images/att_00007.png',\n", + " 'nbs/images/mnist3.png',\n", + " 'nbs/images/puppy.jpg',\n", + " 'nbs/llms-ctx-full.txt',\n", + " 'nbs/llms-ctx.txt',\n", + " 'nbs/llms.txt',\n", + " 'nbs/nbdev.yml',\n", + " 'nbs/parallel_test.py',\n", + " 'nbs/styles.css',\n", + " 'nbs/test_py2pyi.py',\n", + " 'nbs/test_py2pyi.pyi',\n", + " 'pyproject.toml',\n", + " 'settings.ini',\n", + " 'setup.py']" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "list(repo_files.filter(lambda o: o.path not in repo_files_filtered.attrgot('path')).attrgot('path'))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/markdown": [ + "# Welcome to fastcore\n", + "\n", + "\n", + "\n", + "\n", + "Python is a powerful, dynamic language. Rather than bake everything into\n", + "the language, it lets the programmer customize it to make it work for\n", + "them. `fastcore` uses this flexibility to add to Python features\n", + "inspired by other languages we’ve loved, mixins from Ruby, and currying,\n", + "binding, and more from Haskell. It also adds some “missing features” and\n", + "clean up some rough edges in the Python standard library, such as\n", + "simplifying parallel processing, and bringing ideas from NumPy over to\n", + "Python’s `list` type.\n", + "\n", + "## Getting started\n", + "\n", + "To install fastcore run: `conda install fastcore -c fastai` (if you use\n", + "Anaconda, which we recommend) or `pip install fastcore`. For an\n", + "[editable\n", + "install](https://stackoverflow.com/questions/35064426/when-would-the-e-editable-option-be-useful-with-pip-install),\n", + "clone this repo and run: `pip install -e \".[dev]\"`. fastcore is tested\n", + "to work on Ubuntu, macOS and Windows (versions tested are those shown\n", + "with the `-latest` suffix\n", + "[here](https://docs.github.com/en/actions/reference/specifications-for-github-hosted-runners#supported-runners-and-hardware-resources)).\n", + "\n", + "`fastcore` contains many features, including:\n", + "\n", + "- `fastcore.test`: Simple testing functions\n", + "- `fastcore.foundation`: Mixins, delegation, composition, and more\n", + "- `fastcore.xtras`: Utility functions to help with functional-style\n", + " programming, parallel processing, and more\n", + "\n", + "To get started, we recommend you read through [the fastcore\n", + "tour](https://fastcore.fast.ai/tour.html).\n", + "\n", + "## Contributing\n", + "\n", + "After you clone this repository, please run `nbdev_install_hooks` in\n", + "your terminal. This sets up git hooks, which clean up the notebooks to\n", + "remove the extraneous stuff stored in the notebooks (e.g. which cells\n", + "you ran) which causes unnecessary merge conflicts.\n", + "\n", + "To run the tests in parallel, launch `nbdev_test`.\n", + "\n", + "Before submitting a PR, check that the local library and notebooks\n", + "match.\n", + "\n", + "- If you made a change to the notebooks in one of the exported cells,\n", + " you can export it to the library with `nbdev_prepare`.\n", + "- If you made a change to the library, you can export it back to the\n", + " notebooks with `nbdev_update`.\n" + ], + "text/plain": [ + "" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from IPython.display import Markdown\n", + "item = repo_files_filtered[0]\n", + "content = api.repos.get_content(owner, repo, item['path'])\n", + "content['content_decoded'] = base64.b64decode(content.content).decode('utf-8')\n", + "Markdown(content.content_decoded)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's update `get_repo_files` with the filtering mechanism we've implemented above." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "@patch\n", + "def get_repo_files(self:GhApi, owner, repo, branch=\"main\", inc=None, exc=None):\n", + " \"Get all file items of a repo.\"\n", + " tree = self.git.get_tree(owner=owner, repo=repo, tree_sha=branch, recursive=True)\n", + " res = L()\n", + " for item in tree['tree']:\n", + " if item['type'] == 'blob': res.append(item) \n", + " return res.filter(lambda o: _include(o.path,inc,exc))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(#39) ['README.md','fastcore/all.py','fastcore/ansi.py','fastcore/basics.py','fastcore/dispatch.py','fastcore/docments.py','fastcore/docscrape.py','fastcore/foundation.py','fastcore/imghdr.py','fastcore/imports.py','fastcore/meta.py','fastcore/nb_imports.py','fastcore/net.py','fastcore/parallel.py','fastcore/py2pyi.py','fastcore/script.py','fastcore/shutil.py','fastcore/style.py','fastcore/transform.py','fastcore/utils.py'...]" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "repo_files = api.get_repo_files(owner, repo, inc=inc, exc=exc); repo_files.attrgot(\"path\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "@patch\n", + "def get_file_content(self:GhApi, path, owner, repo, branch=\"main\"):\n", + " o = self.repos.get_content(owner, repo, path, ref=branch)\n", + " o['content_decoded'] = base64.b64decode(o.content).decode('utf-8')\n", + " return o" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "https://github.com/AnswerDotAI/fastcore/blob/main/README.md\n", + "# Welcome to fastcore\n", + "\n", + "\n", + "\n", + "\n" + ] + } + ], + "source": [ + "o = api.get_file_content(repo_files[0].path, owner, repo)\n", + "_head = \"\\n\".join(o.content_decoded.split(\"\\n\")[:5])\n", + "print(f\"{o.html_url}\\n{_head}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "https://github.com/AnswerDotAI/fastcore/blob/main/README.md\n", + "# Welcome to fastcore\n", + "\n", + "\n", + "\n", + "\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/all.py\n", + "from .imports import *\n", + "from .foundation import *\n", + "from .utils import *\n", + "from .parallel import *\n", + "from .net import *\n" + ] + } + ], + "source": [ + "contents = parallel(api.get_file_content, repo_files[:2].attrgot(\"path\"), owner=owner, repo=repo)\n", + "for o in contents:\n", + " _head = \"\\n\".join(o.content_decoded.split(\"\\n\")[:5])\n", + " print(f\"{o.html_url}\\n{_head}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#|export\n", + "@patch\n", + "@delegates(GhApi.get_repo_files)\n", + "def get_repo_contents(self:GhApi, owner, repo, **kwargs):\n", + " repo_files = self.get_repo_files(owner, repo, **kwargs)\n", + " for s in ('inc','exc',): kwargs.pop(s)\n", + " return parallel(self.get_file_content, repo_files.attrgot(\"path\"), owner=owner, repo=repo, **kwargs)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "inc,exc = ['*.md', \"*.py\"],['*/_*.py', '*test*.py', '*/*test*.py', 'setup.py']" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "contents = api.get_repo_contents(owner,repo,branch=\"main\",inc=inc, exc=exc)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "https://github.com/AnswerDotAI/fastcore/blob/main/CHANGELOG.md\n", + "# Release notes\n", + "\n", + "\n", + "\n", + "## 1.8.1\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/CODE_OF_CONDUCT.md\n", + "# Contributor Covenant Code of Conduct\n", + "\n", + "## Our Pledge\n", + "\n", + "In the interest of fostering an open and welcoming environment, we as\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/CONTRIBUTING.md\n", + "# How to contribute\n", + "\n", + "## How to get started\n", + "\n", + "Clone the `fastcore` repository.\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/README.md\n", + "# Welcome to fastcore\n", + "\n", + "\n", + "\n", + "\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/all.py\n", + "from .imports import *\n", + "from .foundation import *\n", + "from .utils import *\n", + "from .parallel import *\n", + "from .net import *\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/ansi.py\n", + "\"Filters for processing ANSI colors.\"\n", + "\n", + "# Copyright (c) IPython Development Team.\n", + "# Modifications by Jeremy Howard.\n", + "\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/basics.py\n", + "\"\"\"Basic functionality used in the fastai library\"\"\"\n", + "\n", + "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/01_basics.ipynb.\n", + "\n", + "# %% auto 0\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/dispatch.py\n", + "def __getattr__(name):\n", + " raise ImportError(\n", + " f\"Could not import '{name}' from fastcore.dispatch - this module has been moved to the fasttransform package.\\n\"\n", + " \"To migrate your code, please see the migration guide at: https://answerdotai.github.io/fasttransform/fastcore_migration_guide.html\"\n", + " )\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/docments.py\n", + "\"\"\"Document parameters using comments.\"\"\"\n", + "\n", + "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/04_docments.ipynb.\n", + "\n", + "# %% ../nbs/04_docments.ipynb 2\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/docscrape.py\n", + "\"Parse numpy-style docstrings\"\n", + "\n", + "\"\"\"\n", + "Based on code from numpy, which is:\n", + "Copyright (c) 2005-2022, NumPy Developers.\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/foundation.py\n", + "\"\"\"The `L` class and helpers for it\"\"\"\n", + "\n", + "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/02_foundation.ipynb.\n", + "\n", + "# %% auto 0\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/imghdr.py\n", + "\"\"\"Recognize image file formats based on their first few bytes.\"\"\"\n", + "\n", + "from os import PathLike\n", + "import warnings\n", + "\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/imports.py\n", + "import sys,os,re,typing,itertools,operator,functools,math,warnings,functools,io,enum\n", + "\n", + "from operator import itemgetter,attrgetter\n", + "from warnings import warn\n", + "from typing import Iterable,Generator,Sequence,Iterator,List,Set,Dict,Union,Optional,Tuple\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/meta.py\n", + "\"\"\"Metaclasses\"\"\"\n", + "\n", + "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/05_meta.ipynb.\n", + "\n", + "# %% auto 0\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/nb_imports.py\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "import numbers,tempfile,pickle,random,inspect,shutil\n", + "\n", + "from PIL import Image\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/net.py\n", + "\"\"\"Network, HTTP, and URL functions\"\"\"\n", + "\n", + "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/03b_net.ipynb.\n", + "\n", + "# %% auto 0\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/parallel.py\n", + "\"\"\"Threading and multiprocessing functions\"\"\"\n", + "\n", + "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/03a_parallel.ipynb.\n", + "\n", + "# %% auto 0\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/py2pyi.py\n", + "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/10_py2pyi.ipynb.\n", + "\n", + "# %% auto 0\n", + "__all__ = ['functypes', 'imp_mod', 'has_deco', 'sig2str', 'ast_args', 'create_pyi', 'py2pyi', 'replace_wildcards']\n", + "\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/script.py\n", + "\"\"\"A fast way to turn your python function into a script.\"\"\"\n", + "\n", + "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/06_script.ipynb.\n", + "\n", + "# %% auto 0\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/shutil.py\n", + "from functools import wraps\n", + "import shutil\n", + "\n", + "__all__ = ['copymode', 'copystat', 'copy', 'copy2', 'move', 'copytree', 'rmtree', 'disk_usage', 'chown', 'rmtree']\n", + "\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/style.py\n", + "\"\"\"Fast styling for friendly CLIs.\"\"\"\n", + "\n", + "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/08_style.ipynb.\n", + "\n", + "# %% auto 0\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/transform.py\n", + "def __getattr__(name):\n", + " raise ImportError(\n", + " f\"Could not import '{name}' from fastcore.transform - this module has been moved to the fasttransform package.\\n\"\n", + " \"To migrate your code, please see the migration guide at: https://answerdotai.github.io/fasttransform/fastcore_migration_guide.html\"\n", + " )\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/utils.py\n", + "from .imports import *\n", + "from .foundation import *\n", + "from .basics import *\n", + "from .xtras import *\n", + "from .parallel import *\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/xdg.py\n", + "\"\"\"XDG Base Directory Specification helpers.\"\"\"\n", + "\n", + "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/07_xdg.ipynb.\n", + "\n", + "# %% auto 0\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/xml.py\n", + "\"\"\"Concise generation of XML.\"\"\"\n", + "\n", + "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/09_xml.ipynb.\n", + "\n", + "# %% auto 0\n", + "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/xtras.py\n", + "\"\"\"Utility functions used in the fastai library\"\"\"\n", + "\n", + "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/03_xtras.ipynb.\n", + "\n", + "# %% ../nbs/03_xtras.ipynb 1\n" + ] + } + ], + "source": [ + "for o in contents:\n", + " _head = \"\\n\".join(o.content_decoded.split(\"\\n\")[:5])\n", + " print(f\"{o.html_url}\\n{_head}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "contents = api.get_repo_contents(owner,\"ghapi\",branch=\"main\",inc=inc, exc=exc)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "https://github.com/AnswerDotAI/ghapi/blob/main/.github/scripts/build-tweet.py\n", + "import tweetrel\n", + "tweetrel.send_tweet()\n", + "https://github.com/AnswerDotAI/ghapi/blob/main/CHANGELOG.md\n", + "# Release notes\n", + "\n", + "\n", + "\n", + "## 1.0.6\n", + "https://github.com/AnswerDotAI/ghapi/blob/main/CONTRIBUTING.md\n", + "# How to contribute\n", + "\n", + "## How to get started\n", + "\n", + "Before anything else, please install the git hooks that run automatic scripts during each commit and merge to strip the notebooks of superfluous metadata (and avoid merge conflicts). After cloning the repository, run the following command inside it:\n", + "https://github.com/AnswerDotAI/ghapi/blob/main/README.md\n", + "# ghapi\n", + "\n", + "\n", + "\n", + "\n", + "https://github.com/AnswerDotAI/ghapi/blob/main/examples/build.py\n", + "#!/usr/bin/env python\n", + "from ghapi.build_lib import *\n", + "build_funcs()\n", + "\n", + "https://github.com/AnswerDotAI/ghapi/blob/main/ghapi/actions.py\n", + "\"\"\"Functionality for helping to create GitHub Actions workflows in Python\"\"\"\n", + "\n", + "# AUTOGENERATED! DO NOT EDIT! File to edit: ../01_actions.ipynb.\n", + "\n", + "# %% auto 0\n", + "https://github.com/AnswerDotAI/ghapi/blob/main/ghapi/all.py\n", + "from .core import *\n", + "from .actions import *\n", + "from .auth import *\n", + "from .page import *\n", + "from .event import *\n", + "https://github.com/AnswerDotAI/ghapi/blob/main/ghapi/auth.py\n", + "\"\"\"Helpers for creating GitHub API tokens\"\"\"\n", + "\n", + "# AUTOGENERATED! DO NOT EDIT! File to edit: ../02_auth.ipynb.\n", + "\n", + "# %% auto 0\n", + "https://github.com/AnswerDotAI/ghapi/blob/main/ghapi/build_lib.py\n", + "# AUTOGENERATED! DO NOT EDIT! File to edit: ../90_build_lib.ipynb.\n", + "\n", + "# %% auto 0\n", + "__all__ = ['GH_OPENAPI_URL', 'GhMeta', 'build_funcs']\n", + "\n", + "https://github.com/AnswerDotAI/ghapi/blob/main/ghapi/cli.py\n", + "\"\"\"Access to the GitHub API from the command line\"\"\"\n", + "\n", + "# AUTOGENERATED! DO NOT EDIT! File to edit: ../10_cli.ipynb.\n", + "\n", + "# %% auto 0\n", + "https://github.com/AnswerDotAI/ghapi/blob/main/ghapi/core.py\n", + "\"\"\"Detailed information on the GhApi API\"\"\"\n", + "\n", + "# AUTOGENERATED! DO NOT EDIT! File to edit: ../00_core.ipynb.\n", + "\n", + "# %% auto 0\n", + "https://github.com/AnswerDotAI/ghapi/blob/main/ghapi/event.py\n", + "\"\"\"Helpers for getting GitHub API events\"\"\"\n", + "\n", + "# AUTOGENERATED! DO NOT EDIT! File to edit: ../04_event.ipynb.\n", + "\n", + "# %% auto 0\n", + "https://github.com/AnswerDotAI/ghapi/blob/main/ghapi/metadata.py\n", + "funcs = [('/', 'get', 'meta/root', 'GitHub API Root', 'rest/meta/meta#github-api-root', [], [], ''),\n", + " ('/advisories',\n", + " 'get',\n", + " 'security-advisories/list-global-advisories',\n", + " 'List global security advisories',\n", + "https://github.com/AnswerDotAI/ghapi/blob/main/ghapi/page.py\n", + "\"\"\"Parallel and serial pagination\"\"\"\n", + "\n", + "# AUTOGENERATED! DO NOT EDIT! File to edit: ../03_page.ipynb.\n", + "\n", + "# %% auto 0\n", + "https://github.com/AnswerDotAI/ghapi/blob/main/ghapi/templates.py\n", + "wf_tmpl = \"\"\"name: $NAME\n", + "on:\n", + " workflow_dispatch:\n", + "$EVENT\n", + "defaults:\n" + ] + } + ], + "source": [ + "for o in contents:\n", + " _head = \"\\n\".join(o.content_decoded.split(\"\\n\")[:5])\n", + " print(f\"{o.html_url}\\n{_head}\")" + ] + }, { "cell_type": "markdown", "id": "85670522", @@ -1932,9 +2813,9 @@ ], "metadata": { "kernelspec": { - "display_name": "python3", + "display_name": "python", "language": "python", - "name": "python3" + "name": "python" } }, "nbformat": 4, diff --git a/ghapi/_modidx.py b/ghapi/_modidx.py index 25415b9..35f72b7 100644 --- a/ghapi/_modidx.py +++ b/ghapi/_modidx.py @@ -64,6 +64,9 @@ 'ghapi.core.GhApi.full_docs': ('core.html#ghapi.full_docs', 'ghapi/core.py'), 'ghapi.core.GhApi.get_branch': ('core.html#ghapi.get_branch', 'ghapi/core.py'), 'ghapi.core.GhApi.get_content': ('core.html#ghapi.get_content', 'ghapi/core.py'), + 'ghapi.core.GhApi.get_file_content': ('core.html#ghapi.get_file_content', 'ghapi/core.py'), + 'ghapi.core.GhApi.get_repo_contents': ('core.html#ghapi.get_repo_contents', 'ghapi/core.py'), + 'ghapi.core.GhApi.get_repo_files': ('core.html#ghapi.get_repo_files', 'ghapi/core.py'), 'ghapi.core.GhApi.list_branches': ('core.html#ghapi.list_branches', 'ghapi/core.py'), 'ghapi.core.GhApi.list_files': ('core.html#ghapi.list_files', 'ghapi/core.py'), 'ghapi.core.GhApi.list_tags': ('core.html#ghapi.list_tags', 'ghapi/core.py'), @@ -82,6 +85,8 @@ 'ghapi.core._GhVerbGroup.__str__': ('core.html#_ghverbgroup.__str__', 'ghapi/core.py'), 'ghapi.core._GhVerbGroup._repr_markdown_': ('core.html#_ghverbgroup._repr_markdown_', 'ghapi/core.py'), 'ghapi.core._decode_response': ('core.html#_decode_response', 'ghapi/core.py'), + 'ghapi.core._find_matches': ('core.html#_find_matches', 'ghapi/core.py'), + 'ghapi.core._include': ('core.html#_include', 'ghapi/core.py'), 'ghapi.core._mk_param': ('core.html#_mk_param', 'ghapi/core.py'), 'ghapi.core._mk_sig': ('core.html#_mk_sig', 'ghapi/core.py'), 'ghapi.core._mk_sig_detls': ('core.html#_mk_sig_detls', 'ghapi/core.py'), diff --git a/ghapi/actions.py b/ghapi/actions.py index 3d0da51..fbf1061 100644 --- a/ghapi/actions.py +++ b/ghapi/actions.py @@ -9,7 +9,7 @@ 'context_github', 'context_env', 'context_job', 'context_steps', 'context_runner', 'context_secrets', 'context_strategy', 'context_matrix', 'context_needs'] -# %% ../01_actions.ipynb #0f28e91f +# %% ../01_actions.ipynb #e4187aeb from fastcore.all import * from .core import * from .templates import * @@ -18,7 +18,7 @@ from contextlib import contextmanager from enum import Enum -# %% ../01_actions.ipynb #f1d8838a +# %% ../01_actions.ipynb #fb69ba90 # So we can run this outside of GitHub actions too, read from file if needed for a,b in (('CONTEXT_GITHUB',context_example), ('CONTEXT_NEEDS',needs_example), ('GITHUB_REPOSITORY','octocat/Hello-World')): if a not in os.environ: os.environ[a] = b @@ -27,18 +27,18 @@ for context in contexts: globals()[f'context_{context}'] = dict2obj(loads(os.getenv(f"CONTEXT_{context.upper()}", "{}"))) -# %% ../01_actions.ipynb #e842a54d +# %% ../01_actions.ipynb #393990e1 _all_ = ['context_github', 'context_env', 'context_job', 'context_steps', 'context_runner', 'context_secrets', 'context_strategy', 'context_matrix', 'context_needs'] -# %% ../01_actions.ipynb #1f119280 +# %% ../01_actions.ipynb #02ca0806 env_github = dict2obj({k[7:].lower():v for k,v in os.environ.items() if k.startswith('GITHUB_')}) -# %% ../01_actions.ipynb #83cc44c3 +# %% ../01_actions.ipynb #3170b688 def user_repo(): "List of `user,repo` from `env_github.repository" return env_github.repository.split('/') -# %% ../01_actions.ipynb #baba5eb9 +# %% ../01_actions.ipynb #deb389d7 Event = str_enum('Event', 'page_build','content_reference','repository_import','create','workflow_run','delete','organization','sponsorship', 'project_column','push','context','milestone','project_card','project','package','pull_request','repository_dispatch', @@ -48,14 +48,14 @@ def user_repo(): 'installation','release','issues','repository','gollum','membership','deployment','deploy_key','issue_comment','ping', 'deployment_status','fork','schedule') -# %% ../01_actions.ipynb #e42ac247 +# %% ../01_actions.ipynb #bf0b5888 def _create_file(path:Path, fname:str, contents): if contents and not (path/fname).exists(): (path/fname).write_text(contents) def _replace(s:str, find, repl, i:int=0, suf:str=''): return s.replace(find, textwrap.indent(repl, ' '*i)+suf) -# %% ../01_actions.ipynb #db6f1964 +# %% ../01_actions.ipynb #26eb64e5 def create_workflow_files(fname:str, workflow:str, build_script:str, prebuild:bool=False): "Create workflow and script files in suitable places in `github` folder" if not os.path.exists('.git'): return print('This does not appear to be the root of a git repo') @@ -67,7 +67,7 @@ def create_workflow_files(fname:str, workflow:str, build_script:str, prebuild:bo _create_file(scr_path, f'build-{fname}.py', build_script) if prebuild: _create_file(scr_path, f'prebuild-{fname}.py', build_script) -# %% ../01_actions.ipynb #f8d6bf37 +# %% ../01_actions.ipynb #3cf450a0 def fill_workflow_templates(name:str, event, run, context, script, opersys='ubuntu', prebuild=False): "Function to create a simple Ubuntu workflow that calls a Python `ghapi` script" c = wf_tmpl @@ -78,23 +78,23 @@ def fill_workflow_templates(name:str, event, run, context, script, opersys='ubun c = _replace(c, f'${find}', str(repl), i) create_workflow_files(name, c, script, prebuild=prebuild) -# %% ../01_actions.ipynb #3bfae412 +# %% ../01_actions.ipynb #bc2f80a5 def env_contexts(contexts): "Create a suitable `env:` line for a workflow to make a context available in the environment" contexts = uniqueify(['github'] + listify(contexts)) return "\n".join("CONTEXT_" + o.upper() + ": ${{ toJson(" + o.lower() + ") }}" for o in contexts) -# %% ../01_actions.ipynb #1068e627 +# %% ../01_actions.ipynb #561125fa def_pipinst = 'pip install -Uq ghapi' -# %% ../01_actions.ipynb #dd605316 +# %% ../01_actions.ipynb #39c5d429 def create_workflow(name:str, event:Event, contexts:list=None, opersys='ubuntu', prebuild=False): "Function to create a simple Ubuntu workflow that calls a Python `ghapi` script" script = "from fastcore.all import *\nfrom ghapi import *" fill_workflow_templates(name, f'{event}:', def_pipinst, env_contexts(contexts), script=script, opersys=opersys, prebuild=prebuild) -# %% ../01_actions.ipynb #b5e64efa +# %% ../01_actions.ipynb #e8482286 @call_parse def gh_create_workflow( name:str, # Name of the workflow file @@ -104,40 +104,40 @@ def gh_create_workflow( "Supports `gh-create-workflow`, a CLI wrapper for `create_workflow`." create_workflow(name, Event[event], contexts.split()) -# %% ../01_actions.ipynb #c69fe43a +# %% ../01_actions.ipynb #a85ea1f8 _example_url = 'https://raw.githubusercontent.com/fastai/ghapi/master/examples/{}.json' -# %% ../01_actions.ipynb #ad31da47 +# %% ../01_actions.ipynb #47cfc8b9 def example_payload(event): "Get an example of a JSON payload for `event`" return dict2obj(urljson(_example_url.format(event))) -# %% ../01_actions.ipynb #9ffb1dce +# %% ../01_actions.ipynb #778d7cc7 def github_token(): "Get GitHub token from `GITHUB_TOKEN` env var if available, or from `github` context" return os.getenv('GITHUB_TOKEN', context_github.get('token', None)) -# %% ../01_actions.ipynb #20832f3c +# %% ../01_actions.ipynb #88716812 def actions_output(name, value): "Print the special GitHub Actions `::set-output` line for `name::value`" print(f"::set-output name={name}::{value}") -# %% ../01_actions.ipynb #497020d4 +# %% ../01_actions.ipynb #50cad4c2 def actions_debug(message): "Print the special `::debug` line for `message`" print(f"::debug::{message}") -# %% ../01_actions.ipynb #3e75ffe1 +# %% ../01_actions.ipynb #5b6d15ab def actions_warn(message, details=''): "Print the special `::warning` line for `message`" print(f"::warning {details}::{message}") -# %% ../01_actions.ipynb #886ede87 +# %% ../01_actions.ipynb #e97ba982 def actions_error(message, details=''): "Print the special `::error` line for `message`" print(f"::error {details}::{message}") -# %% ../01_actions.ipynb #da04d233 +# %% ../01_actions.ipynb #a7ff989d @contextmanager def actions_group(title): "Context manager to print the special `::group`/`::endgroup` lines for `title`" @@ -145,12 +145,12 @@ def actions_group(title): yield print(f"::endgroup::") -# %% ../01_actions.ipynb #95fb29a7 +# %% ../01_actions.ipynb #3405cb7f def actions_mask(value): "Print the special `::add-mask` line for `value`" print(f"::add-mask::{value}") -# %% ../01_actions.ipynb #a1adceae +# %% ../01_actions.ipynb #8682ff46 def set_git_user(api=None): "Set git user name/email to authenticated user (if `api`) or GitHub Actions bot (otherwise)" if api: diff --git a/ghapi/auth.py b/ghapi/auth.py index 5109e8b..9b3238f 100644 --- a/ghapi/auth.py +++ b/ghapi/auth.py @@ -5,14 +5,14 @@ # %% auto #0 __all__ = ['Scope', 'scope_str', 'GhDeviceAuth', 'github_auth_device'] -# %% ../02_auth.ipynb #10a8702d +# %% ../02_auth.ipynb #e1ccb974 from fastcore.all import * from .core import * import webbrowser,time from urllib.parse import parse_qs,urlsplit -# %% ../02_auth.ipynb #6b060627 +# %% ../02_auth.ipynb #46620407 _scopes =( 'repo','repo:status','repo_deployment','public_repo','repo:invite','security_events','admin:repo_hook','write:repo_hook', 'read:repo_hook','admin:org','write:org','read:org','admin:public_key','write:public_key','read:public_key','admin:org_hook', @@ -20,18 +20,18 @@ 'write:packages','read:packages','delete:packages','admin:gpg_key','write:gpg_key','read:gpg_key','workflow' ) -# %% ../02_auth.ipynb #0870167c +# %% ../02_auth.ipynb #0cbc503b Scope = AttrDict({o.replace(':','_'):o for o in _scopes}) -# %% ../02_auth.ipynb #587e8748 +# %% ../02_auth.ipynb #404b7d4f def scope_str(*scopes)->str: "Convert `scopes` into a comma-separated string" return ','.join(str(o) for o in scopes if o) -# %% ../02_auth.ipynb #f0acfe01 +# %% ../02_auth.ipynb #7b224a0a _def_clientid = '71604a89b882ab8c8634' -# %% ../02_auth.ipynb #42b9eb35 +# %% ../02_auth.ipynb #2a614e72 class GhDeviceAuth(GetAttrBase): "Get an oauth token using the GitHub API device flow" _attr="params" @@ -42,20 +42,20 @@ def __init__(self, client_id=_def_clientid, *scopes): def _getattr(self,v): return v[0] -# %% ../02_auth.ipynb #ee32c6f1 +# %% ../02_auth.ipynb #d3764621 @patch def url_docs(self:GhDeviceAuth)->str: "Default instructions on how to authenticate" return f"""First copy your one-time code: {self.user_code} Then visit {self.verification_uri} in your browser, and paste the code when prompted.""" -# %% ../02_auth.ipynb #dcbc8387 +# %% ../02_auth.ipynb #255b299c @patch def open_browser(self:GhDeviceAuth): "Open a web browser with the verification URL" webbrowser.open(self.verification_uri) -# %% ../02_auth.ipynb #edaddf67 +# %% ../02_auth.ipynb #5091b2c8 @patch def auth(self:GhDeviceAuth)->str: "Return token if authentication complete, or `None` otherwise" @@ -68,7 +68,7 @@ def auth(self:GhDeviceAuth)->str: if err: raise Exception(resp['error_description'][0]) return resp['access_token'][0] -# %% ../02_auth.ipynb #e114ebb3 +# %% ../02_auth.ipynb #a7d55794 @patch def wait(self:GhDeviceAuth, cb:callable=None, n_polls=9999)->str: "Wait up to `n_polls` times for authentication to complete, calling `cb` after each poll, if passed" @@ -80,7 +80,7 @@ def wait(self:GhDeviceAuth, cb:callable=None, n_polls=9999)->str: if cb: cb() time.sleep(interval) -# %% ../02_auth.ipynb #fd873541 +# %% ../02_auth.ipynb #e329a3d2 def github_auth_device(wb='', n_polls=9999): "Authenticate with GitHub, polling up to `n_polls` times to wait for completion" auth = GhDeviceAuth() diff --git a/ghapi/build_lib.py b/ghapi/build_lib.py index 5b06791..e4d5714 100644 --- a/ghapi/build_lib.py +++ b/ghapi/build_lib.py @@ -3,7 +3,7 @@ # %% auto #0 __all__ = ['GH_OPENAPI_URL', 'GhMeta', 'build_funcs'] -# %% ../90_build_lib.ipynb #73ed725e +# %% ../90_build_lib.ipynb #cdbcf258 from fastcore.all import * import pprint @@ -11,11 +11,11 @@ from jsonref import loads from collections import namedtuple -# %% ../90_build_lib.ipynb #c322ea0b +# %% ../90_build_lib.ipynb #c215ed8d GH_OPENAPI_URL = 'https://github.com/github/rest-api-description/raw/main/descriptions/api.github.com/api.github.com.json?raw=true' _DOC_URL = 'https://docs.github.com/' -# %% ../90_build_lib.ipynb #a8fd97f6 +# %% ../90_build_lib.ipynb #ce33036d _lu_type = dict(zip( 'NA string object array boolean number integer'.split(), map(PrettyString,'object str dict list bool int int'.split()) @@ -34,7 +34,7 @@ def _find_data(d): if 'properties' in o: return o['properties'] return {} -# %% ../90_build_lib.ipynb #7b211864 +# %% ../90_build_lib.ipynb #cf7204d9 def build_funcs(nm='ghapi/metadata.py', url=GH_OPENAPI_URL, docurl=_DOC_URL): "Build module metadata.py from an Open API spec and optionally filter by a path `pre`" def _get_detls(o): @@ -53,5 +53,5 @@ def _get_detls(o): if 'externalDocs' in detls] Path(nm).write_text("funcs = " + pprint.pformat(_funcs, width=360)) -# %% ../90_build_lib.ipynb #495861d0 +# %% ../90_build_lib.ipynb #9d2307fc GhMeta = namedtuple('GhMeta', 'path verb oper_id summary doc_url params data preview'.split()) diff --git a/ghapi/cli.py b/ghapi/cli.py index 13b3563..ea18a8a 100644 --- a/ghapi/cli.py +++ b/ghapi/cli.py @@ -5,13 +5,13 @@ # %% auto #0 __all__ = ['ghapi', 'ghpath', 'ghraw', 'completion_ghapi'] -# %% ../10_cli.ipynb #6adcc120 +# %% ../10_cli.ipynb #e10a0628 from fastcore.all import * import ghapi.core as gh,inspect from .core import * from collections import defaultdict -# %% ../10_cli.ipynb #295d196c +# %% ../10_cli.ipynb #44ebb54f def _parse_args(a): "Extract positional and keyword arguments from `a`=`sys.argv`" pos,kw = [],{} @@ -43,34 +43,34 @@ def _call_api(f): call = f(pos, api) return call if kw.get('help', None) else call(*pos, **kw) -# %% ../10_cli.ipynb #1f1a4f1a +# %% ../10_cli.ipynb #55f9002f def _ghapi(arg, api): for part in arg.pop(0).split('.'): api = getattr(api,part) return api -# %% ../10_cli.ipynb #1f166d73 +# %% ../10_cli.ipynb #7557c309 def ghapi(): "Python backend for the `ghapi` command, which calls an endpoint by operation name" res = _call_api(_ghapi) if isinstance(res, (gh._GhObj,dict,L)): print(res) elif res: print(inspect.signature(res)) -# %% ../10_cli.ipynb #aa44c461 +# %% ../10_cli.ipynb #524623ba def _ghpath(arg, api): return api[arg.pop(0),arg.pop(0)] -# %% ../10_cli.ipynb #e6465efb +# %% ../10_cli.ipynb #723624b0 def ghpath(): "Python backend for the `ghpath` command, which calls an endpoint by path" print(_call_api(_ghpath) or '') -# %% ../10_cli.ipynb #fc26e8f5 +# %% ../10_cli.ipynb #93dbad3e def ghraw(): "Python backend for the `ghraw` command, which calls a fully-specified endpoint" cmd,api,pos,kw = _api() if not pos: return print(f"Usage: `{cmd}` operation ") print(api(*pos, **kw)) -# %% ../10_cli.ipynb #bf4322ee +# %% ../10_cli.ipynb #21033494 _TAB_COMPLETION=""" _do_ghapi_completions() { @@ -81,7 +81,7 @@ def ghraw(): complete -F _do_ghapi_completions ghapi """ -# %% ../10_cli.ipynb #007b595b +# %% ../10_cli.ipynb #9fe362ab def completion_ghapi(): "Python backend for `completion-ghapi` command" if len(sys.argv) == 2 and sys.argv[1] == '--install': diff --git a/ghapi/core.py b/ghapi/core.py index 6c0af50..29511ed 100644 --- a/ghapi/core.py +++ b/ghapi/core.py @@ -5,7 +5,7 @@ # %% auto #0 __all__ = ['GH_HOST', 'img_md_pat', 'EMPTY_TREE_SHA', 'print_summary', 'GhApi', 'date2gh', 'gh2date'] -# %% ../00_core.ipynb #b348803b +# %% ../00_core.ipynb #5b5cba7b from fastcore.all import * from .metadata import funcs @@ -17,13 +17,13 @@ from datetime import datetime, timedelta, timezone from pprint import pprint from time import sleep -import os, shutil, tempfile, subprocess +import os, shutil, tempfile, subprocess, fnmatch -# %% ../00_core.ipynb #a554c9a9 +# %% ../00_core.ipynb #8d2b1a54 GH_HOST = os.getenv('GH_HOST', "https://api.github.com") _DOC_URL = 'https://docs.github.com/' -# %% ../00_core.ipynb #5e259541 +# %% ../00_core.ipynb #ba730c65 def _preview_hdr(preview): return {'Accept': f'application/vnd.github.{preview}-preview+json'} if preview else {} def _mk_param(nm, **kwargs): return Parameter(nm, kind=Parameter.POSITIONAL_OR_KEYWORD, **kwargs) @@ -50,7 +50,7 @@ def _decode_response(path: str) -> bool: class _GhObj: pass -# %% ../00_core.ipynb #bdaa0c16 +# %% ../00_core.ipynb #d4a51c1a class _GhVerb(_GhObj): __slots__ = 'path,verb,tag,name,summary,url,route_ps,params,data,preview,client,decode,__doc__'.split(',') def __init__(self, path, verb, oper, summary, url, params, data, preview, client, kwargs): @@ -85,7 +85,7 @@ def _repr_markdown_(self): return f'[{self.tag}.{self.name}]({self.doc_url})({params}): *{self.summary}*' __repr__ = _repr_markdown_ -# %% ../00_core.ipynb #a982aba6 +# %% ../00_core.ipynb #dd7e6b61 class _GhVerbGroup(_GhObj): def __init__(self, name, verbs): self.name,self.verbs = name,verbs @@ -93,15 +93,15 @@ def __init__(self, name, verbs): def __str__(self): return "\n".join(str(v) for v in self.verbs) def _repr_markdown_(self): return "\n".join(f'- {v._repr_markdown_()}' for v in self.verbs) -# %% ../00_core.ipynb #93b6462f +# %% ../00_core.ipynb #531aee7d _docroot = 'https://docs.github.com/rest/reference/' -# %% ../00_core.ipynb #9f82bfae +# %% ../00_core.ipynb #f361159b def print_summary(req:Request): "Print `Request.summary` with the token (if any) removed" pprint(req.summary('Authorization')) -# %% ../00_core.ipynb #fa83108d +# %% ../00_core.ipynb #83e8a9ce class GhApi(_GhObj): def __init__(self, owner=None, repo=None, token=None, jwt_token=None, debug=None, limit_cb=None, gh_host=None, authenticate=True, **kwargs): @@ -152,17 +152,17 @@ def __getitem__(self, k): def full_docs(self): return '\n'.join(f'## {gn}\n\n{group._repr_markdown_()}\n' for gn,group in sorted(self.groups.items())) -# %% ../00_core.ipynb #d29c23e5 +# %% ../00_core.ipynb #05cbdf91 def date2gh(dt:datetime)->str: "Convert `dt` (which is assumed to be in UTC time zone) to a format suitable for GitHub API operations" return f'{dt.replace(microsecond=0).isoformat()}Z' -# %% ../00_core.ipynb #477abb7e +# %% ../00_core.ipynb #3f4c8b27 def gh2date(dtstr:str)->datetime: "Convert date string `dtstr` received from a GitHub API operation to a UTC `datetime`" return datetime.fromisoformat(dtstr.replace('Z', '')) -# %% ../00_core.ipynb #edf7e114 +# %% ../00_core.ipynb #16068542 img_md_pat = re.compile(r'!\[(?P.*?)\]\((?P[^\s]+)\)') def _run_subp(cmd): @@ -187,14 +187,14 @@ def create_gist(self:GhApi, description, content, filename='gist.txt', public=Fa content = img_md_pat.sub(lambda m: f"![{m['alt']}]({img_urls.get(m['url'], m['url'])})", content) return self.gists.update(gist.id, files={filename:{'content':content}}) -# %% ../00_core.ipynb #6c761af7 +# %% ../00_core.ipynb #4b7a278c @patch def delete_release(self:GhApi, release): "Delete a release and its associated tag" self.repos.delete_release(release.id) self.git.delete_ref(f'tags/{release.tag_name}') -# %% ../00_core.ipynb #334dc8bb +# %% ../00_core.ipynb #b2bf7e22 @patch def upload_file(self:GhApi, rel, fn): "Upload `fn` to endpoint for release `rel`" @@ -203,7 +203,7 @@ def upload_file(self:GhApi, rel, fn): mime = mimetypes.guess_type(fn, False)[0] or 'application/octet-stream' return self(url, 'POST', headers={'Content-Type':mime}, query = {'name':fn.name}, data=fn.read_bytes()) -# %% ../00_core.ipynb #bfb8aa77 +# %% ../00_core.ipynb #3cad71a4 @patch def create_release(self:GhApi, tag_name, branch='master', name=None, body='', draft=False, prerelease=False, files=None): @@ -214,23 +214,23 @@ def create_release(self:GhApi, tag_name, branch='master', name=None, body='', for file in listify(files): self.upload_file(rel, file) return rel -# %% ../00_core.ipynb #79dcc965 +# %% ../00_core.ipynb #2be73ae0 @patch def list_tags(self:GhApi, prefix:str=''): "List all tags, optionally filtered to those starting with `prefix`" return self.git.list_matching_refs(f'tags/{prefix}') -# %% ../00_core.ipynb #aa90ceca +# %% ../00_core.ipynb #303eeec6 @patch def list_branches(self:GhApi, prefix:str=''): "List all branches, optionally filtered to those starting with `prefix`" return self.git.list_matching_refs(f'heads/{prefix}') -# %% ../00_core.ipynb #1ae843b1 +# %% ../00_core.ipynb #eb85edd7 # See https://stackoverflow.com/questions/9765453 EMPTY_TREE_SHA = '4b825dc642cb6eb9a060e54bf8d69288fbee4904' -# %% ../00_core.ipynb #ee36e239 +# %% ../00_core.ipynb #ba6ab941 @patch def create_branch_empty(self:GhApi, branch): t = self.git.create_tree(base_tree=EMPTY_TREE_SHA, tree = [dict( @@ -238,38 +238,38 @@ def create_branch_empty(self:GhApi, branch): c = self.git.create_commit(f'create {branch}', t.sha) return self.git.create_ref(f'refs/heads/{branch}', c.sha) -# %% ../00_core.ipynb #0e8ff05c +# %% ../00_core.ipynb #68b150fc @patch def delete_tag(self:GhApi, tag:str): "Delete a tag" return self.git.delete_ref(f'tags/{tag}') -# %% ../00_core.ipynb #68d697f9 +# %% ../00_core.ipynb #75c168a1 @patch def delete_branch(self:GhApi, branch:str): "Delete a branch" return self.git.delete_ref(f'heads/{branch}') -# %% ../00_core.ipynb #d9c1e003 +# %% ../00_core.ipynb #96053795 @patch def get_branch(self:GhApi, branch=None): branch = branch or self.repos.get().default_branch return self.list_branches(branch)[0] -# %% ../00_core.ipynb #abbaf9f1 +# %% ../00_core.ipynb #93b24881 @patch def list_files(self:GhApi, branch=None): ref = self.get_branch(branch) res = self.git.get_tree(ref.object.sha).tree return {o.path:o for o in res} -# %% ../00_core.ipynb #76914452 +# %% ../00_core.ipynb #ffc347b2 @patch def get_content(self:GhApi, path): res = self.repos.get_content(path) return base64.b64decode(res.content) -# %% ../00_core.ipynb #a323a872 +# %% ../00_core.ipynb #9582ee1f @patch def create_or_update_file(self:GhApi, path, message, committer, author, content=None, sha=None, branch=''): if not branch: branch = api.repos.get()['default_branch'] @@ -279,13 +279,13 @@ def create_or_update_file(self:GhApi, path, message, committer, author, content= return self.repos.create_or_update_file_contents(path, message, content=content, branch=branch, committer=committer or {}, author=author or {}, **kwargs) -# %% ../00_core.ipynb #43950328 +# %% ../00_core.ipynb #5044445d @patch def create_file(self:GhApi, path, message, committer, author, content=None, branch=None): if not branch: branch = api.repos.get()['default_branch'] return self.create_or_update_file(path, message, branch=branch, committer=committer, content=content, author=author) -# %% ../00_core.ipynb #a31c90ae +# %% ../00_core.ipynb #4e74c337 @patch def delete_file(self:GhApi, path, message, committer, author, sha=None, branch=None): if not branch: branch = api.repos.get()['default_branch'] @@ -293,14 +293,61 @@ def delete_file(self:GhApi, path, message, committer, author, sha=None, branch=N return self.repos.delete_file(path, message=message, sha=sha, branch=branch, committer=committer, author=author) -# %% ../00_core.ipynb #4b42b350 +# %% ../00_core.ipynb #93f6b559 @patch def update_contents(self:GhApi, path, message, committer, author, content, sha=None, branch=None): if not branch: branch = api.repos.get()['default_branch'] if sha is None: sha = self.list_files()[path].sha return self.create_or_update_file(path, message, committer=committer, author=author, content=content, sha=sha, branch=branch) -# %% ../00_core.ipynb #e95416b0 +# %% ../00_core.ipynb #1815bdef +def _find_matches(path, pats): + "Returns matched patterns" + matches = [] + for p in listify(pats): + if fnmatch.fnmatch(path,p): matches.append(p) + return matches + +# %% ../00_core.ipynb #b6bbe221 +def _include(path, include, exclude): + "Prioritize non-star matches, if both include and exclude star expr then pick longer." + include_matches = ["*"] if include is None else _find_matches(path, include) + exclude_matches = [] if exclude is None else _find_matches(path, exclude) + if include_matches and exclude_matches: + include_star = [m for m in include_matches if "*" in m] + exclude_star = [m for m in exclude_matches if "*" in m] + if include_star and exclude_star: return len(include_star) > len(exclude_star) + if include_star: return False + if exclude_star: return True + if include_matches: return True + if exclude_matches: return False + +# %% ../00_core.ipynb #7016b664 +@patch +def get_repo_files(self:GhApi, owner, repo, branch="main", inc=None, exc=None): + "Get all file items of a repo." + tree = self.git.get_tree(owner=owner, repo=repo, tree_sha=branch, recursive=True) + res = L() + for item in tree['tree']: + if item['type'] == 'blob': res.append(item) + return res.filter(lambda o: _include(o.path,inc,exc)) + +# %% ../00_core.ipynb #860e5ad8 +@patch +def get_file_content(self:GhApi, path, owner, repo, branch="main"): + o = self.repos.get_content(owner, repo, path, ref=branch) + o['content_decoded'] = base64.b64decode(o.content).decode('utf-8') + return o + +# %% ../00_core.ipynb #1255603a +@patch +@delegates(GhApi.get_repo_files) +def get_repo_contents(self:GhApi, owner, repo, **kwargs): + repo_files = self.get_repo_files(owner, repo, **kwargs) + for s in ('inc','exc',): kwargs.pop(s) + return parallel(self.get_file_content, repo_files.attrgot("path"), owner=owner, repo=repo, **kwargs) + +# %% ../00_core.ipynb #ac4ab4e0 @patch def enable_pages(self:GhApi, branch=None, path="/"): "Enable or update pages for a repo to point to a `branch` and `path`." diff --git a/ghapi/event.py b/ghapi/event.py index 1e19639..5938a4d 100644 --- a/ghapi/event.py +++ b/ghapi/event.py @@ -15,7 +15,7 @@ 'ReleaseEvent', 'IssuesEvent', 'RepositoryEvent', 'GollumEvent', 'MembershipEvent', 'DeploymentEvent', 'DeployKeyEvent', 'IssueCommentEvent', 'PingEvent', 'DeploymentStatusEvent', 'ForkEvent', 'ScheduleEvent'] -# %% ../04_event.ipynb #32f0587f +# %% ../04_event.ipynb #637bba9f from fastcore.all import * from .core import * from .page import * @@ -24,7 +24,7 @@ import time,json,gzip from itertools import islice -# %% ../04_event.ipynb #83b9f3f0 +# %% ../04_event.ipynb #a19bf236 def _list_events(g, username=None, org=None, owner=None, repo=None): if (username or org or owner) and \ not (bool(username) ^ bool(org) ^ bool(owner)): raise Exception('Can not pass more than one of username, org, and owner') @@ -34,12 +34,12 @@ def _list_events(g, username=None, org=None, owner=None, repo=None): if username: return g.list_public_events_for_user,{'username':username} return g.list_public_events,{} -# %% ../04_event.ipynb #49b5c97b +# %% ../04_event.ipynb #566f4527 def _id2int(x): x.id = int(x.id) return x -# %% ../04_event.ipynb #2191a4b9 +# %% ../04_event.ipynb #c95e27c2 @patch @delegates(_list_events) def list_events(self:GhApi, per_page=30, page=1, **kwargs): @@ -47,7 +47,7 @@ def list_events(self:GhApi, per_page=30, page=1, **kwargs): oper,kw = _list_events(self.activity, **kwargs) return oper(per_page=per_page, page=page, **kw).map(_id2int) -# %% ../04_event.ipynb #e28af8ef +# %% ../04_event.ipynb #c7f44fc6 @patch @delegates(_list_events) def list_events_parallel(self:GhApi, per_page=30, n_pages=8, **kwargs): @@ -55,21 +55,21 @@ def list_events_parallel(self:GhApi, per_page=30, n_pages=8, **kwargs): oper,kw = _list_events(self.activity, **kwargs) return pages(oper, n_pages, per_page=per_page, **kw).concat().map(_id2int) -# %% ../04_event.ipynb #9b16d30b +# %% ../04_event.ipynb #129d3fa2 _bot_re = re.compile('b[o0]t') def _want_evt(o, types, incl_bot): if not incl_bot and _bot_re.search(nested_attr(o, 'actor.login') or ''): return False if types and o.type not in types: return False return True -# %% ../04_event.ipynb #cf0fb517 +# %% ../04_event.ipynb #e84331e2 class GhEvent(AttrDict): "Class for events returned from `fetch_events" pass for o in Event: exec(f'class {snake2camel(o)}Event(GhEvent):pass') -# %% ../04_event.ipynb #401046d5 +# %% ../04_event.ipynb #8232d504 _all_ = [ 'PageBuildEvent', 'ContentReferenceEvent', 'RepositoryImportEvent', 'CreateEvent', 'WorkflowRunEvent', 'DeleteEvent', 'OrganizationEvent', 'SponsorshipEvent', 'ProjectColumnEvent', 'PushEvent', 'ContextEvent', 'MilestoneEvent', 'ProjectCardEvent', 'ProjectEvent', 'PackageEvent', @@ -80,10 +80,10 @@ class GhEvent(AttrDict): 'InstallationEvent', 'ReleaseEvent', 'IssuesEvent', 'RepositoryEvent', 'GollumEvent', 'MembershipEvent', 'DeploymentEvent', 'DeployKeyEvent', 'IssueCommentEvent', 'PingEvent', 'DeploymentStatusEvent', 'ForkEvent', 'ScheduleEvent'] -# %% ../04_event.ipynb #1b01c937 +# %% ../04_event.ipynb #4de78d50 def _cast_evt(o): return globals()[o.type](o) -# %% ../04_event.ipynb #00e8ab14 +# %% ../04_event.ipynb #e3e61809 @patch @delegates(_list_events) def fetch_events(self:GhApi, n_pages=3, pause=0.4, per_page=30, types=None, incl_bot=False, **kwargs): @@ -98,7 +98,7 @@ def fetch_events(self:GhApi, n_pages=3, pause=0.4, per_page=30, types=None, incl yield from new_evts if pause: time.sleep(pause) -# %% ../04_event.ipynb #fbe3a9c5 +# %% ../04_event.ipynb #83cc4ca9 def load_sample_events(): "Load sample events, downloading if needed" name = 'sample_evts.json.gz' @@ -109,23 +109,23 @@ def load_sample_events(): if not path.exists():path.write_bytes(urlread(url, decode=False)) return dict2obj(json.load(open_file(path))).map(_cast_evt) -# %% ../04_event.ipynb #92c0a855 +# %% ../04_event.ipynb #4347a0fd def save_sample_events(n=5000): "Save the most recent `n` events as compressed JSON" evts = list(islice(api.fetch_events(incl_bot=True), n)) with gzip.open('sample_evts.json.gz', 'wt') as f: json.dump(obj2dict(evts), f) -# %% ../04_event.ipynb #05ab047a +# %% ../04_event.ipynb #5b935849 @patch(as_prop=True) def full_type(self:GhEvent): "Concatenation of `type` and `payload.action` (if available)" act = getattr(self.payload, 'action', '') return f'{self.type}_{act}' if act else self.type -# %% ../04_event.ipynb #d6cfce6f +# %% ../04_event.ipynb #b14ba037 _all_ = ['PageBuildEvent', 'ContentReferenceEvent', 'RepositoryImportEvent', 'CreateEvent', 'WorkflowRunEvent', 'DeleteEvent', 'OrganizationEvent', 'SponsorshipEvent', 'ProjectColumnEvent', 'PushEvent', 'ContextEvent', 'MilestoneEvent', 'ProjectCardEvent', 'ProjectEvent', 'PackageEvent', 'PullRequestEvent', 'RepositoryDispatchEvent', 'TeamAddEvent', 'WorkflowDispatchEvent', 'MemberEvent', 'MetaEvent', 'CodeScanningAlertEvent', 'PublicEvent', 'NeedsEvent', 'CheckRunEvent', 'SecurityAdvisoryEvent', 'PullRequestReviewCommentEvent', 'OrgBlockEvent', 'CommitCommentEvent', 'WatchEvent', 'MarketplacePurchaseEvent', 'StarEvent', 'InstallationRepositoriesEvent', 'CheckSuiteEvent', 'GithubAppAuthorizationEvent', 'TeamEvent', 'StatusEvent', 'RepositoryVulnerabilityAlertEvent', 'PullRequestReviewEvent', 'LabelEvent', 'InstallationEvent', 'ReleaseEvent', 'IssuesEvent', 'RepositoryEvent', 'GollumEvent', 'MembershipEvent', 'DeploymentEvent', 'DeployKeyEvent', 'IssueCommentEvent', 'PingEvent', 'DeploymentStatusEvent', 'ForkEvent', 'ScheduleEvent'] -# %% ../04_event.ipynb #f1384ddf +# %% ../04_event.ipynb #83be1955 evt_emojis = dict( PushEvent= '⭐', CreateEvent= '🏭', @@ -148,7 +148,7 @@ def full_type(self:GhEvent): PullRequestEvent_reopened= '🔁' ) -# %% ../04_event.ipynb #b65f5b0c +# %% ../04_event.ipynb #281d36b4 def _ref(pay, pre=''): return f'{pre} "{pay.ref.split("/")[-1]}"' if pay.ref else '' def _ref_detl(pay): return pay.ref_type + _ref(pay) @@ -164,7 +164,7 @@ def _action(self): 'watching' if isinstance(self,WatchEvent) else '') if det: return f'{pay.action} {det}' -# %% ../04_event.ipynb #51aa4c32 +# %% ../04_event.ipynb #e4e71098 @patch(as_prop=True) def description(self:GhEvent): "Description of event" @@ -187,7 +187,7 @@ def emoji(self:GhEvent): "Emoji for event from `evt_emojis`" return evt_emojis.get(self.full_type, '❌') -# %% ../04_event.ipynb #d73ab51f +# %% ../04_event.ipynb #daad2a54 described_evts = (PushEvent,CreateEvent,IssueCommentEvent,WatchEvent,PullRequestEvent,PullRequestReviewEvent,PullRequestReviewCommentEvent, DeleteEvent,ForkEvent,IssuesEvent,ReleaseEvent,MemberEvent,CommitCommentEvent,GollumEvent,PublicEvent) diff --git a/ghapi/page.py b/ghapi/page.py index b2c82f8..f511b54 100644 --- a/ghapi/page.py +++ b/ghapi/page.py @@ -5,19 +5,19 @@ # %% auto #0 __all__ = ['paged', 'parse_link_hdr', 'pages'] -# %% ../03_page.ipynb #a305a71c +# %% ../03_page.ipynb #6a0bf465 from fastcore.all import * from .core import * import re from urllib.parse import parse_qs,urlsplit -# %% ../03_page.ipynb #8ed683c4 +# %% ../03_page.ipynb #cea42302 def paged(oper, *args, per_page=30, max_pages=9999, **kwargs): "Convert operation `oper(*args,**kwargs)` into an iterator" yield from itertools.takewhile(noop, (oper(*args, per_page=per_page, page=i, **kwargs) for i in range(1,max_pages+1))) -# %% ../03_page.ipynb #b5353c32 +# %% ../03_page.ipynb #56bd053b class _Scanner: def __init__(self, buf): self.buf,self.match = buf,None def __getitem__(self, key): return self.match.group(key) @@ -31,7 +31,7 @@ def scan(self, pattern): _RE_COMMA_HREF = r' *,? *< *([^>]*) *> *' _RE_ATTR = rf'{_TOKEN} *(?:= *({_TOKEN}|{_QUOTED}))? *' -# %% ../03_page.ipynb #fa57cd86 +# %% ../03_page.ipynb #c8de1f90 def _parse_link_hdr(header): "Parse an RFC 5988 link header, returning a `list` of `tuple`s of URL and attr `dict`" scanner,links = _Scanner(header),[] @@ -47,12 +47,12 @@ def _parse_link_hdr(header): if scanner.buf: raise Exception(f"parse() failed at {scanner.buf!r}") return links -# %% ../03_page.ipynb #02f757cb +# %% ../03_page.ipynb #5af9f0d7 def parse_link_hdr(header): "Parse an RFC 5988 link header, returning a `dict` from rels to a `tuple` of URL and attrs `dict`" return {a.pop('rel'):(u,a) for u,a in _parse_link_hdr(header)} -# %% ../03_page.ipynb #524b8c17 +# %% ../03_page.ipynb #581beb00 @patch def last_page(self:GhApi): "Parse RFC 5988 link header from most recent operation, and extract the last page" @@ -61,11 +61,11 @@ def last_page(self:GhApi): qs = parse_qs(urlsplit(last).query) return int(nested_idx(qs,'page',0) or 0) -# %% ../03_page.ipynb #caa0b86f +# %% ../03_page.ipynb #f34f91bd def _call_page(i, oper, args, kwargs, per_page): return oper(*args, per_page=per_page, page=i, **kwargs) -# %% ../03_page.ipynb #4b571af9 +# %% ../03_page.ipynb #75052160 def pages(oper, n_pages, *args, n_workers=None, per_page=100, **kwargs): "Get `n_pages` pages from `oper(*args,**kwargs)`" return parallel(_call_page, range(1,n_pages+1), oper=oper, per_page=per_page, args=args, kwargs=kwargs, From f238bec95dfd642980f0ec8ec9fe5aa4ca87aba3 Mon Sep 17 00:00:00 2001 From: Kerem Turgutlu Date: Mon, 19 Jan 2026 14:52:43 +0300 Subject: [PATCH 2/3] refactor --- 00_core.ipynb | 882 +++++++++++------------------------------------ ghapi/_modidx.py | 1 + ghapi/core.py | 51 ++- 3 files changed, 217 insertions(+), 717 deletions(-) diff --git a/00_core.ipynb b/00_core.ipynb index d29d614..d5e1715 100644 --- a/00_core.ipynb +++ b/00_core.ipynb @@ -3,7 +3,7 @@ { "cell_type": "code", "execution_count": null, - "id": "46cc3d01", + "id": "8335c5b6", "metadata": {}, "outputs": [], "source": [ @@ -50,7 +50,8 @@ "outputs": [], "source": [ "#| hide\n", - "from nbdev import *" + "from nbdev import *\n", + "from IPython.display import Markdown" ] }, { @@ -292,22 +293,28 @@ "\n", "### GhApi.__call__\n", "\n", - "> GhApi.__call__ (path:str, verb:str=None, headers:dict=None,\n", - "> route:dict=None, query:dict=None, data=None,\n", - "> timeout=None, decode=True)\n", + "```python\n", + "\n", + "def __call__(\n", + " path:str, verb:str=None, headers:dict=None, route:dict=None, query:dict=None, data:NoneType=None,\n", + " timeout:NoneType=None, decode:bool=True\n", + "):\n", + "\n", + "\n", + "```\n", "\n", "*Call a fully specified `path` using HTTP `verb`, passing arguments to `fastcore.core.urlsend`*" ], "text/plain": [ - "---\n", + "```python\n", "\n", - "[source](https://github.com/fastai/ghapi/blob/main/ghapi/core.py#L123){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "def __call__(\n", + " path:str, verb:str=None, headers:dict=None, route:dict=None, query:dict=None, data:NoneType=None,\n", + " timeout:NoneType=None, decode:bool=True\n", + "):\n", "\n", - "### GhApi.__call__\n", "\n", - "> GhApi.__call__ (path:str, verb:str=None, headers:dict=None,\n", - "> route:dict=None, query:dict=None, data=None,\n", - "> timeout=None, decode=True)\n", + "```\n", "\n", "*Call a fully specified `path` using HTTP `verb`, passing arguments to `fastcore.core.urlsend`*" ] @@ -348,11 +355,11 @@ { "data": { "text/markdown": [ - "```json\n", + "```python\n", "{ 'node_id': 'MDM6UmVmMzE1NzEyNTg4OnJlZnMvaGVhZHMvbWFzdGVy',\n", - " 'object': { 'sha': '958659bf10a8c275fb04f2f6832f1a0f4d07fa85',\n", + " 'object': { 'sha': 'b72d6c87a9237ca3c26298a64a6acf06217ace4a',\n", " 'type': 'commit',\n", - " 'url': 'https://api.github.com/repos/fastai/ghapi-test/git/commits/958659bf10a8c275fb04f2f6832f1a0f4d07fa85'},\n", + " 'url': 'https://api.github.com/repos/fastai/ghapi-test/git/commits/b72d6c87a9237ca3c26298a64a6acf06217ace4a'},\n", " 'ref': 'refs/heads/master',\n", " 'url': 'https://api.github.com/repos/fastai/ghapi-test/git/refs/heads/master'}\n", "```" @@ -361,9 +368,9 @@ "{'ref': 'refs/heads/master',\n", " 'node_id': 'MDM6UmVmMzE1NzEyNTg4OnJlZnMvaGVhZHMvbWFzdGVy',\n", " 'url': 'https://api.github.com/repos/fastai/ghapi-test/git/refs/heads/master',\n", - " 'object': {'sha': '958659bf10a8c275fb04f2f6832f1a0f4d07fa85',\n", + " 'object': {'sha': 'b72d6c87a9237ca3c26298a64a6acf06217ace4a',\n", " 'type': 'commit',\n", - " 'url': 'https://api.github.com/repos/fastai/ghapi-test/git/commits/958659bf10a8c275fb04f2f6832f1a0f4d07fa85'}}" + " 'url': 'https://api.github.com/repos/fastai/ghapi-test/git/commits/b72d6c87a9237ca3c26298a64a6acf06217ace4a'}}" ] }, "execution_count": null, @@ -391,18 +398,26 @@ "\n", "### GhApi.__getitem__\n", "\n", - "> GhApi.__getitem__ (k)\n", + "```python\n", + "\n", + "def __getitem__(\n", + " k\n", + "):\n", + "\n", + "\n", + "```\n", "\n", "*Lookup and call an endpoint by path and verb (which defaults to 'GET')*" ], "text/plain": [ - "---\n", + "```python\n", "\n", - "[source](https://github.com/fastai/ghapi/blob/main/ghapi/core.py#L147){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "def __getitem__(\n", + " k\n", + "):\n", "\n", - "### GhApi.__getitem__\n", "\n", - "> GhApi.__getitem__ (k)\n", + "```\n", "\n", "*Lookup and call an endpoint by path and verb (which defaults to 'GET')*" ] @@ -433,11 +448,11 @@ { "data": { "text/markdown": [ - "```json\n", + "```python\n", "{ 'node_id': 'MDM6UmVmMzE1NzEyNTg4OnJlZnMvaGVhZHMvbWFzdGVy',\n", - " 'object': { 'sha': '958659bf10a8c275fb04f2f6832f1a0f4d07fa85',\n", + " 'object': { 'sha': 'b72d6c87a9237ca3c26298a64a6acf06217ace4a',\n", " 'type': 'commit',\n", - " 'url': 'https://api.github.com/repos/fastai/ghapi-test/git/commits/958659bf10a8c275fb04f2f6832f1a0f4d07fa85'},\n", + " 'url': 'https://api.github.com/repos/fastai/ghapi-test/git/commits/b72d6c87a9237ca3c26298a64a6acf06217ace4a'},\n", " 'ref': 'refs/heads/master',\n", " 'url': 'https://api.github.com/repos/fastai/ghapi-test/git/refs/heads/master'}\n", "```" @@ -446,9 +461,9 @@ "{'ref': 'refs/heads/master',\n", " 'node_id': 'MDM6UmVmMzE1NzEyNTg4OnJlZnMvaGVhZHMvbWFzdGVy',\n", " 'url': 'https://api.github.com/repos/fastai/ghapi-test/git/refs/heads/master',\n", - " 'object': {'sha': '958659bf10a8c275fb04f2f6832f1a0f4d07fa85',\n", + " 'object': {'sha': 'b72d6c87a9237ca3c26298a64a6acf06217ace4a',\n", " 'type': 'commit',\n", - " 'url': 'https://api.github.com/repos/fastai/ghapi-test/git/commits/958659bf10a8c275fb04f2f6832f1a0f4d07fa85'}}" + " 'url': 'https://api.github.com/repos/fastai/ghapi-test/git/commits/b72d6c87a9237ca3c26298a64a6acf06217ace4a'}}" ] }, "execution_count": null, @@ -485,7 +500,7 @@ { "data": { "text/plain": [ - "'958659bf10a8c275fb04f2f6832f1a0f4d07fa85'" + "'b72d6c87a9237ca3c26298a64a6acf06217ace4a'" ] }, "execution_count": null, @@ -525,7 +540,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Quota remaining: 4907 of 5000\n" + "Quota remaining: 4799 of 5000\n" ] }, { @@ -563,7 +578,7 @@ { "data": { "text/plain": [ - "'4907'" + "'4799'" ] }, "execution_count": null, @@ -602,7 +617,7 @@ "metadata": {}, "outputs": [], "source": [ - "api = GhApi(owner='fastai', repo='ghapi-test', token=token)" + "api = GhApi(owner='AnswerDotAI', repo='ghapi-test', token=token)" ] }, { @@ -647,6 +662,9 @@ "- [dependabot](https://docs.github.com/rest/reference/dependabot)\n", "- [dependency_graph](https://docs.github.com/rest/reference/dependency-graph)\n", "- [emojis](https://docs.github.com/rest/reference/emojis)\n", + "- [enterprise_team_memberships](https://docs.github.com/rest/reference/enterprise-team-memberships)\n", + "- [enterprise_team_organizations](https://docs.github.com/rest/reference/enterprise-team-organizations)\n", + "- [enterprise_teams](https://docs.github.com/rest/reference/enterprise-teams)\n", "- [gists](https://docs.github.com/rest/reference/gists)\n", "- [git](https://docs.github.com/rest/reference/git)\n", "- [gitignore](https://docs.github.com/rest/reference/gitignore)\n", @@ -662,7 +680,6 @@ "- [packages](https://docs.github.com/rest/reference/packages)\n", "- [private_registries](https://docs.github.com/rest/reference/private-registries)\n", "- [projects](https://docs.github.com/rest/reference/projects)\n", - "- [projects_classic](https://docs.github.com/rest/reference/projects-classic)\n", "- [pulls](https://docs.github.com/rest/reference/pulls)\n", "- [rate_limit](https://docs.github.com/rest/reference/rate-limit)\n", "- [reactions](https://docs.github.com/rest/reference/reactions)\n", @@ -816,7 +833,7 @@ { "data": { "text/plain": [ - "('/repos/fastai/ghapi-test/git/ref/{ref}', 'get')" + "('/repos/AnswerDotAI/ghapi-test/git/ref/{ref}', 'get')" ] }, "execution_count": null, @@ -961,7 +978,7 @@ { "data": { "text/markdown": [ - "```json\n", + "```python\n", "{}\n", "```" ], @@ -1016,7 +1033,7 @@ { "data": { "text/plain": [ - "0" + "4" ] }, "execution_count": null, @@ -1160,7 +1177,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "https://gist.github.com/KeremTurgutlu/c2150e4efd1177439ff35a4633584144\n" + "https://gist.github.com/KeremTurgutlu/299c624d1175231e3d92e91be81afb41\n" ] } ], @@ -1200,7 +1217,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "https://gist.github.com/KeremTurgutlu/4c60f8b06baf7b57dc225dd86b65028f\n" + "https://gist.github.com/KeremTurgutlu/5e4197bbcaa63d426b990a841ed55d49\n" ] } ], @@ -1218,7 +1235,7 @@ { "data": { "text/plain": [ - "'some image\\n\\n![image](https://gist.githubusercontent.com/KeremTurgutlu/4c60f8b06baf7b57dc225dd86b65028f/raw/c7f420c839f58c6ac0c05f1116317645d31d7e80/puppy.jpg)'" + "'some image\\n\\n![image](https://gist.githubusercontent.com/KeremTurgutlu/5e4197bbcaa63d426b990a841ed55d49/raw/c7f420c839f58c6ac0c05f1116317645d31d7e80/puppy.jpg)'" ] }, "execution_count": null, @@ -1373,22 +1390,30 @@ "text/markdown": [ "---\n", "\n", - "[source](https://github.com/fastai/ghapi/blob/main/ghapi/core.py#L191){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "[source](https://github.com/fastai/ghapi/blob/main/ghapi/core.py#L192){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", "\n", "### GhApi.delete_release\n", "\n", - "> GhApi.delete_release (release)\n", + "```python\n", + "\n", + "def delete_release(\n", + " release\n", + "):\n", + "\n", + "\n", + "```\n", "\n", "*Delete a release and its associated tag*" ], "text/plain": [ - "---\n", + "```python\n", "\n", - "[source](https://github.com/fastai/ghapi/blob/main/ghapi/core.py#L191){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", + "def delete_release(\n", + " release\n", + "):\n", "\n", - "### GhApi.delete_release\n", "\n", - "> GhApi.delete_release (release)\n", + "```\n", "\n", "*Delete a release and its associated tag*" ] @@ -1651,13 +1676,13 @@ { "data": { "text/markdown": [ - "```json\n", + "```python\n", "{ 'mode': '100644',\n", " 'path': 'README.md',\n", " 'sha': 'eaea0f2698e76c75602058bf4e2e9fd7940ac4e3',\n", " 'size': 72,\n", " 'type': 'blob',\n", - " 'url': 'https://api.github.com/repos/fastai/ghapi-test/git/blobs/eaea0f2698e76c75602058bf4e2e9fd7940ac4e3'}\n", + " 'url': 'https://api.github.com/repos/AnswerDotAI/ghapi-test/git/blobs/eaea0f2698e76c75602058bf4e2e9fd7940ac4e3'}\n", "```" ], "text/plain": [ @@ -1666,7 +1691,7 @@ " 'type': 'blob',\n", " 'sha': 'eaea0f2698e76c75602058bf4e2e9fd7940ac4e3',\n", " 'size': 72,\n", - " 'url': 'https://api.github.com/repos/fastai/ghapi-test/git/blobs/eaea0f2698e76c75602058bf4e2e9fd7940ac4e3'}" + " 'url': 'https://api.github.com/repos/AnswerDotAI/ghapi-test/git/blobs/eaea0f2698e76c75602058bf4e2e9fd7940ac4e3'}" ] }, "execution_count": null, @@ -1837,81 +1862,48 @@ { "cell_type": "code", "execution_count": null, + "id": "a8e88cac", "metadata": {}, "outputs": [], "source": [ "api = GhApi(token=token)" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Let's implement a function to get all valid files of a repo recursively" - ] - }, { "cell_type": "code", "execution_count": null, + "id": "c9c4f899", "metadata": {}, "outputs": [], "source": [ - "@patch\n", - "def get_repo_files(self:GhApi, owner, repo, branch=\"main\"):\n", - " \"Get all file items of a repo.\"\n", - " tree = self.git.get_tree(owner=owner, repo=repo, tree_sha=branch, recursive=True)\n", - " res = []\n", - " for item in tree['tree']:\n", - " if item['type'] == 'blob': res.append(item) \n", - " return L(res)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(#3) [{'path': '.devcontainer.json', 'mode': '100644', 'type': 'blob', 'sha': '8bfa0e952eb318c5c74acaa26a0016c12e13418e', 'size': 569, 'url': 'https://api.github.com/repos/AnswerDotAI/fastcore/git/blobs/8bfa0e952eb318c5c74acaa26a0016c12e13418e'},{'path': '.gitattributes', 'mode': '100644', 'type': 'blob', 'sha': '753b249880d57c22306cf155601bff986622b1a0', 'size': 26, 'url': 'https://api.github.com/repos/AnswerDotAI/fastcore/git/blobs/753b249880d57c22306cf155601bff986622b1a0'},{'path': '.github/workflows/docs.yml', 'mode': '100644', 'type': 'blob', 'sha': 'cde13ab17f1a9cbc112928d71ecadee93cf30383', 'size': 296, 'url': 'https://api.github.com/repos/AnswerDotAI/fastcore/git/blobs/cde13ab17f1a9cbc112928d71ecadee93cf30383'}]" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "owner, repo, branch = \"AnswerDotAI\", \"fastcore\", \"main\"\n", - "repo_files = api.get_repo_files(owner,repo); repo_files[:3]" + "owner, repo, branch = \"AnswerDotAI\", \"fastcore\", \"main\"" ] }, { "cell_type": "markdown", + "id": "a042e703", "metadata": {}, "source": [ - "It would be useful to add filter options to further filter these files. We can use [fnmatch](https://docs.python.org/3/library/fnmatch.html) to add Unix shell-style wildcard based filtering which is simple yet pretty flexible." + "Repo files can be filtered using [fnmatch](https://docs.python.org/3/library/fnmatch.html) Unix shell-style wildcards." ] }, { "cell_type": "code", "execution_count": null, + "id": "444784ce", "metadata": {}, "outputs": [], "source": [ "#| export\n", "def _find_matches(path, pats):\n", " \"Returns matched patterns\"\n", - " matches = []\n", - " for p in listify(pats):\n", - " if fnmatch.fnmatch(path,p): matches.append(p)\n", - " return matches" + " return L(pats).filter(lambda p: fnmatch.fnmatch(path, p))" ] }, { "cell_type": "code", "execution_count": null, + "id": "9c43628d", "metadata": {}, "outputs": [ { @@ -1929,46 +1921,51 @@ "_find_matches('README.md', ['*.py', '*test_*', '*/test*/*', '*.md', 'README.md'])" ] }, + { + "cell_type": "markdown", + "id": "4d4256b4", + "metadata": {}, + "source": [ + "The include/exclude logic follows the **rsync/grep model**: a file must match at least one `include` pattern (if specified), AND must not match any `exclude` pattern. Exclude always wins—there's no ambiguity. This is simpler and more predictable than gitignore-style ordering rules. Additionally, LLMs are already familiar with this common pattern from tools like `rg` and `rsync`, making it natural to use when this function is provided as an AI tool." + ] + }, { "cell_type": "code", "execution_count": null, + "id": "9ffb3b8f", "metadata": {}, "outputs": [], "source": [ "#| export\n", "def _include(path, include, exclude):\n", - " \"Prioritize non-star matches, if both include and exclude star expr then pick longer.\"\n", - " include_matches = [\"*\"] if include is None else _find_matches(path, include)\n", - " exclude_matches = [] if exclude is None else _find_matches(path, exclude)\n", - " if include_matches and exclude_matches:\n", - " include_star = [m for m in include_matches if \"*\" in m]\n", - " exclude_star = [m for m in exclude_matches if \"*\" in m]\n", - " if include_star and exclude_star: return len(include_star) > len(exclude_star)\n", - " if include_star: return False\n", - " if exclude_star: return True \n", - " if include_matches: return True\n", - " if exclude_matches: return False" + " \"Returns True if path matches include patterns (if any) and doesn't match any exclude pattern.\"\n", + " if include and not any(fnmatch.fnmatch(path, p) for p in listify(include)): return False\n", + " if exclude and any(fnmatch.fnmatch(path, p) for p in listify(exclude)): return False\n", + " return True" ] }, { "cell_type": "markdown", + "id": "d94a7f1f", "metadata": {}, "source": [ - "Exclude all .md files expect for README.md" + "With rsync/grep style, exclude always wins. To get \"all .md except README.md\", you'd include README.md explicitly in your results separately." ] }, { "cell_type": "code", "execution_count": null, + "id": "0fc16b2a", "metadata": {}, "outputs": [], "source": [ - "assert _include('README.md', ['README.md'], ['*.md'])\n", + "assert not _include('README.md', ['README.md'], ['*.md']) # exclude wins\n", "assert not _include('CONTRIBUTING.md', ['README.md'], ['*.md'])" ] }, { "cell_type": "markdown", + "id": "9d22afb5", "metadata": {}, "source": [ "Include all .py files except for tests" @@ -1977,6 +1974,7 @@ { "cell_type": "code", "execution_count": null, + "id": "68b0ea85", "metadata": {}, "outputs": [], "source": [ @@ -1988,6 +1986,7 @@ { "cell_type": "code", "execution_count": null, + "id": "67b14bec", "metadata": {}, "outputs": [], "source": [ @@ -1999,6 +1998,7 @@ { "cell_type": "code", "execution_count": null, + "id": "8e6485c0", "metadata": {}, "outputs": [], "source": [ @@ -2009,14 +2009,16 @@ }, { "cell_type": "markdown", + "id": "2c624654", "metadata": {}, "source": [ - "Here is an example where we filter to include all python files except for the ones under tests directory, include all notebooks, exclude all md files except for README.md, and all files starting with an underscore. " + "Here is an example where we filter to include the README, all python files except for the ones under tests directory, include all notebooks, and exclude all files starting with an underscore." ] }, { "cell_type": "code", "execution_count": null, + "id": "00984122", "metadata": {}, "outputs": [ { @@ -2035,12 +2037,13 @@ } ], "source": [ - "inc,exc = ['README.md', '*.py', '*.ipynb'], ['*.md', 'tests/*.py', '_*', '*/_*']\n", + "inc,exc = ['README.md', '*.py', '*.ipynb'], ['tests/*.py', '_*', '*/_*']\n", "[fn for fn in test_repo_files if _include(fn,inc,exc)]" ] }, { "cell_type": "markdown", + "id": "04e4a0a8", "metadata": {}, "source": [ "Let's exclude files starting with `test_` and `setup.py` too." @@ -2049,12 +2052,13 @@ { "cell_type": "code", "execution_count": null, + "id": "f2d0222f", "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "['*.md', 'tests/*.py', '_*', '*/_*', '*test_*.py', '*/*test*.py', 'setup.py']" + "['tests/*.py', '_*', '*/_*', '*test_*.py', '*/*test*.py', 'setup.py']" ] }, "execution_count": null, @@ -2068,253 +2072,49 @@ }, { "cell_type": "markdown", + "id": "fe54a556", "metadata": {}, "source": [ - "The list of files that are kept based on the filtering logic:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(39,\n", - " ['README.md',\n", - " 'fastcore/all.py',\n", - " 'fastcore/ansi.py',\n", - " 'fastcore/basics.py',\n", - " 'fastcore/dispatch.py',\n", - " 'fastcore/docments.py',\n", - " 'fastcore/docscrape.py',\n", - " 'fastcore/foundation.py',\n", - " 'fastcore/imghdr.py',\n", - " 'fastcore/imports.py',\n", - " 'fastcore/meta.py',\n", - " 'fastcore/nb_imports.py',\n", - " 'fastcore/net.py',\n", - " 'fastcore/parallel.py',\n", - " 'fastcore/py2pyi.py',\n", - " 'fastcore/script.py',\n", - " 'fastcore/shutil.py',\n", - " 'fastcore/style.py',\n", - " 'fastcore/transform.py',\n", - " 'fastcore/utils.py',\n", - " 'fastcore/xdg.py',\n", - " 'fastcore/xml.py',\n", - " 'fastcore/xtras.py',\n", - " 'nbs/000_tour.ipynb',\n", - " 'nbs/00_test.ipynb',\n", - " 'nbs/01_basics.ipynb',\n", - " 'nbs/02_foundation.ipynb',\n", - " 'nbs/03_xtras.ipynb',\n", - " 'nbs/03a_parallel.ipynb',\n", - " 'nbs/03b_net.ipynb',\n", - " 'nbs/04_docments.ipynb',\n", - " 'nbs/05_meta.ipynb',\n", - " 'nbs/06_script.ipynb',\n", - " 'nbs/07_xdg.ipynb',\n", - " 'nbs/08_style.ipynb',\n", - " 'nbs/09_xml.ipynb',\n", - " 'nbs/10_py2pyi.ipynb',\n", - " 'nbs/11_external.ipynb',\n", - " 'nbs/index.ipynb'])" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "repo_files_filtered = repo_files.filter(lambda o: _include(o.path, inc, exc))\n", - "len(repo_files_filtered), list(repo_files_filtered.map(lambda o: o.path))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Below we can see the files that got filtered out:" + "A function to get repo files with optional filtering" ] }, { "cell_type": "code", "execution_count": null, + "id": "802737b1", "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "['.devcontainer.json',\n", - " '.gitattributes',\n", - " '.github/workflows/docs.yml',\n", - " '.github/workflows/main.yml',\n", - " '.gitignore',\n", - " 'CHANGELOG.md',\n", - " 'CODE_OF_CONDUCT.md',\n", - " 'CONTRIBUTING.md',\n", - " 'LICENSE',\n", - " 'MANIFEST.in',\n", - " 'docker-compose.yml',\n", - " 'examples/ansi.css',\n", - " 'examples/test_fastcore.py',\n", - " 'examples/test_fastcore2.py',\n", - " 'fastcore/__init__.py',\n", - " 'fastcore/_modidx.py',\n", - " 'fastcore/_nbdev.py',\n", - " 'fastcore/test.py',\n", - " 'images/att_00000.png',\n", - " 'images/att_00001.png',\n", - " 'images/att_00002.png',\n", - " 'nbs/.gitattributes',\n", - " 'nbs/.gitignore',\n", - " 'nbs/.nojekyll',\n", - " 'nbs/CNAME',\n", - " 'nbs/_parallel_win.ipynb',\n", - " 'nbs/_quarto.yml',\n", - " 'nbs/fastcore',\n", - " 'nbs/files/test.txt.bz2',\n", - " 'nbs/images/att_00000.png',\n", - " 'nbs/images/att_00005.png',\n", - " 'nbs/images/att_00006.png',\n", - " 'nbs/images/att_00007.png',\n", - " 'nbs/images/mnist3.png',\n", - " 'nbs/images/puppy.jpg',\n", - " 'nbs/llms-ctx-full.txt',\n", - " 'nbs/llms-ctx.txt',\n", - " 'nbs/llms.txt',\n", - " 'nbs/nbdev.yml',\n", - " 'nbs/parallel_test.py',\n", - " 'nbs/styles.css',\n", - " 'nbs/test_py2pyi.py',\n", - " 'nbs/test_py2pyi.pyi',\n", - " 'pyproject.toml',\n", - " 'settings.ini',\n", - " 'setup.py']" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "list(repo_files.filter(lambda o: o.path not in repo_files_filtered.attrgot('path')).attrgot('path'))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "# Welcome to fastcore\n", - "\n", - "\n", - "\n", - "\n", - "Python is a powerful, dynamic language. Rather than bake everything into\n", - "the language, it lets the programmer customize it to make it work for\n", - "them. `fastcore` uses this flexibility to add to Python features\n", - "inspired by other languages we’ve loved, mixins from Ruby, and currying,\n", - "binding, and more from Haskell. It also adds some “missing features” and\n", - "clean up some rough edges in the Python standard library, such as\n", - "simplifying parallel processing, and bringing ideas from NumPy over to\n", - "Python’s `list` type.\n", - "\n", - "## Getting started\n", - "\n", - "To install fastcore run: `conda install fastcore -c fastai` (if you use\n", - "Anaconda, which we recommend) or `pip install fastcore`. For an\n", - "[editable\n", - "install](https://stackoverflow.com/questions/35064426/when-would-the-e-editable-option-be-useful-with-pip-install),\n", - "clone this repo and run: `pip install -e \".[dev]\"`. fastcore is tested\n", - "to work on Ubuntu, macOS and Windows (versions tested are those shown\n", - "with the `-latest` suffix\n", - "[here](https://docs.github.com/en/actions/reference/specifications-for-github-hosted-runners#supported-runners-and-hardware-resources)).\n", - "\n", - "`fastcore` contains many features, including:\n", - "\n", - "- `fastcore.test`: Simple testing functions\n", - "- `fastcore.foundation`: Mixins, delegation, composition, and more\n", - "- `fastcore.xtras`: Utility functions to help with functional-style\n", - " programming, parallel processing, and more\n", - "\n", - "To get started, we recommend you read through [the fastcore\n", - "tour](https://fastcore.fast.ai/tour.html).\n", - "\n", - "## Contributing\n", - "\n", - "After you clone this repository, please run `nbdev_install_hooks` in\n", - "your terminal. This sets up git hooks, which clean up the notebooks to\n", - "remove the extraneous stuff stored in the notebooks (e.g. which cells\n", - "you ran) which causes unnecessary merge conflicts.\n", - "\n", - "To run the tests in parallel, launch `nbdev_test`.\n", - "\n", - "Before submitting a PR, check that the local library and notebooks\n", - "match.\n", - "\n", - "- If you made a change to the notebooks in one of the exported cells,\n", - " you can export it to the library with `nbdev_prepare`.\n", - "- If you made a change to the library, you can export it back to the\n", - " notebooks with `nbdev_update`.\n" - ], - "text/plain": [ - "" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ - "from IPython.display import Markdown\n", - "item = repo_files_filtered[0]\n", - "content = api.repos.get_content(owner, repo, item['path'])\n", - "content['content_decoded'] = base64.b64decode(content.content).decode('utf-8')\n", - "Markdown(content.content_decoded)" + "#| export\n", + "@patch\n", + "def _get_repo_files(self:GhApi, owner, repo, branch=\"main\"):\n", + " return self.git.get_tree(owner=owner, repo=repo, tree_sha=branch, recursive=True)\n", + "\n", + "@patch\n", + "def get_repo_files(self:GhApi, owner, repo, branch=\"main\", inc=None, exc=None):\n", + " \"Get all file items of a repo, optionally filtered.\"\n", + " tree = self._get_repo_files(owner, repo, branch)\n", + " return L(tree['tree']).filter(lambda o: o['type'] == 'blob' and _include(o.path, inc, exc))" ] }, { "cell_type": "markdown", + "id": "1cd39eaf", "metadata": {}, "source": [ - "Let's update `get_repo_files` with the filtering mechanism we've implemented above." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "@patch\n", - "def get_repo_files(self:GhApi, owner, repo, branch=\"main\", inc=None, exc=None):\n", - " \"Get all file items of a repo.\"\n", - " tree = self.git.get_tree(owner=owner, repo=repo, tree_sha=branch, recursive=True)\n", - " res = L()\n", - " for item in tree['tree']:\n", - " if item['type'] == 'blob': res.append(item) \n", - " return res.filter(lambda o: _include(o.path,inc,exc))" + "The list of files that are kept based on the filtering logic:" ] }, { "cell_type": "code", "execution_count": null, + "id": "0e386c78", "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(#39) ['README.md','fastcore/all.py','fastcore/ansi.py','fastcore/basics.py','fastcore/dispatch.py','fastcore/docments.py','fastcore/docscrape.py','fastcore/foundation.py','fastcore/imghdr.py','fastcore/imports.py','fastcore/meta.py','fastcore/nb_imports.py','fastcore/net.py','fastcore/parallel.py','fastcore/py2pyi.py','fastcore/script.py','fastcore/shutil.py','fastcore/style.py','fastcore/transform.py','fastcore/utils.py'...]" + "['README.md', 'fastcore/all.py', 'fastcore/ansi.py', 'fastcore/basics.py', 'fastcore/dispatch.py', 'fastcore/docments.py', 'fastcore/docscrape.py', 'fastcore/foundation.py', 'fastcore/imghdr.py', 'fastcore/imports.py', 'fastcore/meta.py', 'fastcore/nb_imports.py', 'fastcore/net.py', 'fastcore/parallel.py', 'fastcore/py2pyi.py', 'fastcore/script.py', 'fastcore/shutil.py', 'fastcore/style.py', 'fastcore/tools.py', 'fastcore/transform.py', 'fastcore/utils.py', 'fastcore/xdg.py', 'fastcore/xml.py', 'fastcore/xtras.py', 'nbs/000_tour.ipynb', 'nbs/00_test.ipynb', 'nbs/01_basics.ipynb', 'nbs/02_foundation.ipynb', 'nbs/03_xtras.ipynb', 'nbs/03a_parallel.ipynb', 'nbs/03b_net.ipynb', 'nbs/04_docments.ipynb', 'nbs/05_meta.ipynb', 'nbs/06_script.ipynb', 'nbs/07_xdg.ipynb', 'nbs/08_style.ipynb', 'nbs/09_xml.ipynb', 'nbs/10_py2pyi.ipynb', 'nbs/11_external.ipynb', 'nbs/12_tools.ipynb', 'nbs/index.ipynb']" ] }, "execution_count": null, @@ -2323,12 +2123,14 @@ } ], "source": [ - "repo_files = api.get_repo_files(owner, repo, inc=inc, exc=exc); repo_files.attrgot(\"path\")" + "repo_files = api.get_repo_files(owner, repo, inc=inc, exc=exc)\n", + "test_eq(len(repo_files), 41); repo_files.attrgot(\"path\")" ] }, { "cell_type": "code", "execution_count": null, + "id": "5ef71bf5", "metadata": {}, "outputs": [], "source": [ @@ -2343,376 +2145,67 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "https://github.com/AnswerDotAI/fastcore/blob/main/README.md\n", - "# Welcome to fastcore\n", - "\n", - "\n", - "\n", - "\n" - ] - } - ], - "source": [ - "o = api.get_file_content(repo_files[0].path, owner, repo)\n", - "_head = \"\\n\".join(o.content_decoded.split(\"\\n\")[:5])\n", - "print(f\"{o.html_url}\\n{_head}\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "https://github.com/AnswerDotAI/fastcore/blob/main/README.md\n", - "# Welcome to fastcore\n", - "\n", - "\n", - "\n", - "\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/all.py\n", - "from .imports import *\n", - "from .foundation import *\n", - "from .utils import *\n", - "from .parallel import *\n", - "from .net import *\n" - ] - } - ], - "source": [ - "contents = parallel(api.get_file_content, repo_files[:2].attrgot(\"path\"), owner=owner, repo=repo)\n", - "for o in contents:\n", - " _head = \"\\n\".join(o.content_decoded.split(\"\\n\")[:5])\n", - " print(f\"{o.html_url}\\n{_head}\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, + "id": "d02d1bab", "metadata": {}, "outputs": [], "source": [ - "#|export\n", + "#| export\n", "@patch\n", "@delegates(GhApi.get_repo_files)\n", - "def get_repo_contents(self:GhApi, owner, repo, **kwargs):\n", + "def get_repo_contents(self:GhApi, owner, repo, branch='main', **kwargs):\n", " repo_files = self.get_repo_files(owner, repo, **kwargs)\n", " for s in ('inc','exc',): kwargs.pop(s)\n", - " return parallel(self.get_file_content, repo_files.attrgot(\"path\"), owner=owner, repo=repo, **kwargs)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "inc,exc = ['*.md', \"*.py\"],['*/_*.py', '*test*.py', '*/*test*.py', 'setup.py']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "contents = api.get_repo_contents(owner,repo,branch=\"main\",inc=inc, exc=exc)" + " return parallel(self.get_file_content, repo_files.attrgot(\"path\"), owner=owner, repo=repo, branch=branch)" ] }, { "cell_type": "code", "execution_count": null, + "id": "d0b0cebb", "metadata": {}, "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "https://github.com/AnswerDotAI/fastcore/blob/main/CHANGELOG.md\n", - "# Release notes\n", - "\n", - "\n", - "\n", - "## 1.8.1\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/CODE_OF_CONDUCT.md\n", - "# Contributor Covenant Code of Conduct\n", - "\n", - "## Our Pledge\n", - "\n", - "In the interest of fostering an open and welcoming environment, we as\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/CONTRIBUTING.md\n", - "# How to contribute\n", - "\n", - "## How to get started\n", - "\n", - "Clone the `fastcore` repository.\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/README.md\n", - "# Welcome to fastcore\n", - "\n", - "\n", - "\n", - "\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/all.py\n", - "from .imports import *\n", - "from .foundation import *\n", - "from .utils import *\n", - "from .parallel import *\n", - "from .net import *\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/ansi.py\n", - "\"Filters for processing ANSI colors.\"\n", - "\n", - "# Copyright (c) IPython Development Team.\n", - "# Modifications by Jeremy Howard.\n", - "\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/basics.py\n", - "\"\"\"Basic functionality used in the fastai library\"\"\"\n", - "\n", - "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/01_basics.ipynb.\n", - "\n", - "# %% auto 0\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/dispatch.py\n", - "def __getattr__(name):\n", - " raise ImportError(\n", - " f\"Could not import '{name}' from fastcore.dispatch - this module has been moved to the fasttransform package.\\n\"\n", - " \"To migrate your code, please see the migration guide at: https://answerdotai.github.io/fasttransform/fastcore_migration_guide.html\"\n", - " )\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/docments.py\n", - "\"\"\"Document parameters using comments.\"\"\"\n", - "\n", - "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/04_docments.ipynb.\n", - "\n", - "# %% ../nbs/04_docments.ipynb 2\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/docscrape.py\n", - "\"Parse numpy-style docstrings\"\n", - "\n", - "\"\"\"\n", - "Based on code from numpy, which is:\n", - "Copyright (c) 2005-2022, NumPy Developers.\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/foundation.py\n", - "\"\"\"The `L` class and helpers for it\"\"\"\n", - "\n", - "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/02_foundation.ipynb.\n", - "\n", - "# %% auto 0\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/imghdr.py\n", - "\"\"\"Recognize image file formats based on their first few bytes.\"\"\"\n", - "\n", - "from os import PathLike\n", - "import warnings\n", - "\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/imports.py\n", - "import sys,os,re,typing,itertools,operator,functools,math,warnings,functools,io,enum\n", - "\n", - "from operator import itemgetter,attrgetter\n", - "from warnings import warn\n", - "from typing import Iterable,Generator,Sequence,Iterator,List,Set,Dict,Union,Optional,Tuple\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/meta.py\n", - "\"\"\"Metaclasses\"\"\"\n", - "\n", - "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/05_meta.ipynb.\n", - "\n", - "# %% auto 0\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/nb_imports.py\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", - "import numbers,tempfile,pickle,random,inspect,shutil\n", - "\n", - "from PIL import Image\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/net.py\n", - "\"\"\"Network, HTTP, and URL functions\"\"\"\n", - "\n", - "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/03b_net.ipynb.\n", - "\n", - "# %% auto 0\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/parallel.py\n", - "\"\"\"Threading and multiprocessing functions\"\"\"\n", - "\n", - "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/03a_parallel.ipynb.\n", - "\n", - "# %% auto 0\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/py2pyi.py\n", - "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/10_py2pyi.ipynb.\n", - "\n", - "# %% auto 0\n", - "__all__ = ['functypes', 'imp_mod', 'has_deco', 'sig2str', 'ast_args', 'create_pyi', 'py2pyi', 'replace_wildcards']\n", - "\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/script.py\n", - "\"\"\"A fast way to turn your python function into a script.\"\"\"\n", - "\n", - "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/06_script.ipynb.\n", - "\n", - "# %% auto 0\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/shutil.py\n", - "from functools import wraps\n", - "import shutil\n", - "\n", - "__all__ = ['copymode', 'copystat', 'copy', 'copy2', 'move', 'copytree', 'rmtree', 'disk_usage', 'chown', 'rmtree']\n", - "\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/style.py\n", - "\"\"\"Fast styling for friendly CLIs.\"\"\"\n", - "\n", - "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/08_style.ipynb.\n", - "\n", - "# %% auto 0\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/transform.py\n", - "def __getattr__(name):\n", - " raise ImportError(\n", - " f\"Could not import '{name}' from fastcore.transform - this module has been moved to the fasttransform package.\\n\"\n", - " \"To migrate your code, please see the migration guide at: https://answerdotai.github.io/fasttransform/fastcore_migration_guide.html\"\n", - " )\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/utils.py\n", - "from .imports import *\n", - "from .foundation import *\n", - "from .basics import *\n", - "from .xtras import *\n", - "from .parallel import *\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/xdg.py\n", - "\"\"\"XDG Base Directory Specification helpers.\"\"\"\n", - "\n", - "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/07_xdg.ipynb.\n", - "\n", - "# %% auto 0\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/xml.py\n", - "\"\"\"Concise generation of XML.\"\"\"\n", - "\n", - "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/09_xml.ipynb.\n", - "\n", - "# %% auto 0\n", - "https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/xtras.py\n", - "\"\"\"Utility functions used in the fastai library\"\"\"\n", - "\n", - "# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/03_xtras.ipynb.\n", - "\n", - "# %% ../nbs/03_xtras.ipynb 1\n" - ] - } - ], - "source": [ - "for o in contents:\n", - " _head = \"\\n\".join(o.content_decoded.split(\"\\n\")[:5])\n", - " print(f\"{o.html_url}\\n{_head}\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "contents = api.get_repo_contents(owner,\"ghapi\",branch=\"main\",inc=inc, exc=exc)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "https://github.com/AnswerDotAI/ghapi/blob/main/.github/scripts/build-tweet.py\n", - "import tweetrel\n", - "tweetrel.send_tweet()\n", - "https://github.com/AnswerDotAI/ghapi/blob/main/CHANGELOG.md\n", - "# Release notes\n", - "\n", - "\n", - "\n", - "## 1.0.6\n", - "https://github.com/AnswerDotAI/ghapi/blob/main/CONTRIBUTING.md\n", - "# How to contribute\n", - "\n", - "## How to get started\n", - "\n", - "Before anything else, please install the git hooks that run automatic scripts during each commit and merge to strip the notebooks of superfluous metadata (and avoid merge conflicts). After cloning the repository, run the following command inside it:\n", - "https://github.com/AnswerDotAI/ghapi/blob/main/README.md\n", - "# ghapi\n", - "\n", - "\n", - "\n", - "\n", - "https://github.com/AnswerDotAI/ghapi/blob/main/examples/build.py\n", - "#!/usr/bin/env python\n", - "from ghapi.build_lib import *\n", - "build_funcs()\n", - "\n", - "https://github.com/AnswerDotAI/ghapi/blob/main/ghapi/actions.py\n", - "\"\"\"Functionality for helping to create GitHub Actions workflows in Python\"\"\"\n", - "\n", - "# AUTOGENERATED! DO NOT EDIT! File to edit: ../01_actions.ipynb.\n", - "\n", - "# %% auto 0\n", - "https://github.com/AnswerDotAI/ghapi/blob/main/ghapi/all.py\n", - "from .core import *\n", - "from .actions import *\n", - "from .auth import *\n", - "from .page import *\n", - "from .event import *\n", - "https://github.com/AnswerDotAI/ghapi/blob/main/ghapi/auth.py\n", - "\"\"\"Helpers for creating GitHub API tokens\"\"\"\n", - "\n", - "# AUTOGENERATED! DO NOT EDIT! File to edit: ../02_auth.ipynb.\n", - "\n", - "# %% auto 0\n", - "https://github.com/AnswerDotAI/ghapi/blob/main/ghapi/build_lib.py\n", - "# AUTOGENERATED! DO NOT EDIT! File to edit: ../90_build_lib.ipynb.\n", - "\n", - "# %% auto 0\n", - "__all__ = ['GH_OPENAPI_URL', 'GhMeta', 'build_funcs']\n", - "\n", - "https://github.com/AnswerDotAI/ghapi/blob/main/ghapi/cli.py\n", - "\"\"\"Access to the GitHub API from the command line\"\"\"\n", - "\n", - "# AUTOGENERATED! DO NOT EDIT! File to edit: ../10_cli.ipynb.\n", - "\n", - "# %% auto 0\n", - "https://github.com/AnswerDotAI/ghapi/blob/main/ghapi/core.py\n", - "\"\"\"Detailed information on the GhApi API\"\"\"\n", - "\n", - "# AUTOGENERATED! DO NOT EDIT! File to edit: ../00_core.ipynb.\n", - "\n", - "# %% auto 0\n", - "https://github.com/AnswerDotAI/ghapi/blob/main/ghapi/event.py\n", - "\"\"\"Helpers for getting GitHub API events\"\"\"\n", - "\n", - "# AUTOGENERATED! DO NOT EDIT! File to edit: ../04_event.ipynb.\n", - "\n", - "# %% auto 0\n", - "https://github.com/AnswerDotAI/ghapi/blob/main/ghapi/metadata.py\n", - "funcs = [('/', 'get', 'meta/root', 'GitHub API Root', 'rest/meta/meta#github-api-root', [], [], ''),\n", - " ('/advisories',\n", - " 'get',\n", - " 'security-advisories/list-global-advisories',\n", - " 'List global security advisories',\n", - "https://github.com/AnswerDotAI/ghapi/blob/main/ghapi/page.py\n", - "\"\"\"Parallel and serial pagination\"\"\"\n", - "\n", - "# AUTOGENERATED! DO NOT EDIT! File to edit: ../03_page.ipynb.\n", - "\n", - "# %% auto 0\n", - "https://github.com/AnswerDotAI/ghapi/blob/main/ghapi/templates.py\n", - "wf_tmpl = \"\"\"name: $NAME\n", - "on:\n", - " workflow_dispatch:\n", - "$EVENT\n", - "defaults:\n" - ] + "data": { + "text/markdown": [ + "**[README.md](https://github.com/AnswerDotAI/fastcore/blob/main/README.md)**\n", + "```md\n", + "# Welcome to fastcore\n", + "\n", + "\n", + "\n", + "\n", + "```\n", + "\n", + "**[fastcore/all.py](https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/all.py)**\n", + "```py\n", + "from .imports import *\n", + "from .foundation import *\n", + "from .utils import *\n", + "from .parallel import *\n", + "from .net import *\n", + "```\n", + "\n", + "**[fastcore/ansi.py](https://github.com/AnswerDotAI/fastcore/blob/main/fastcore/ansi.py)**\n", + "```py\n", + "\"Filters for processing ANSI colors.\"\n", + "\n", + "# Copyright (c) IPython Development Team.\n", + "# Modifications by Jeremy Howard.\n", + "\n", + "```" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" } ], "source": [ - "for o in contents:\n", - " _head = \"\\n\".join(o.content_decoded.split(\"\\n\")[:5])\n", - " print(f\"{o.html_url}\\n{_head}\")" + "contents = api.get_repo_contents(owner, repo, inc=inc, exc=exc)\n", + "md = \"\\n\\n\".join(f\"**[{o.path}]({o.html_url})**\\n```{o.path.split('.')[-1]}\\n{chr(10).join(o.content_decoded.split(chr(10))[:5])}\\n```\" for o in contents[:3])\n", + "display(Markdown(md))" ] }, { @@ -2751,6 +2244,16 @@ "`branch` is set to the default branch if `None`. `path` must be `/docs` or `/`." ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "20156da9", + "metadata": {}, + "outputs": [], + "source": [ + "api = GhApi(owner='AnswerDotAI', repo='ghapi-test', token=token)" + ] + }, { "cell_type": "code", "execution_count": null, @@ -2760,7 +2263,7 @@ { "data": { "text/markdown": [ - "```json\n", + "```python\n", "{}\n", "```" ], @@ -2796,7 +2299,18 @@ "execution_count": null, "id": "af900e71", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/Users/keremturgutlu/aai_git/aai_repos/nbdev/nbdev/export.py:88: UserWarning: Notebook '/Users/keremturgutlu/aai_git/aai_repos/ghapi/00_core.ipynb' uses `#| export` without `#| default_exp` cell.\n", + "Note nbdev2 no longer supports nbdev1 syntax. Run `nbdev_migrate` to upgrade.\n", + "See https://nbdev.fast.ai/getting_started.html for more information.\n", + " warn(f\"Notebook '{nbname}' uses `#| export` without `#| default_exp` cell.\\n\"\n" + ] + } + ], "source": [ "#| hide\n", "import nbdev; nbdev.nbdev_export()" @@ -2811,13 +2325,7 @@ "source": [] } ], - "metadata": { - "kernelspec": { - "display_name": "python", - "language": "python", - "name": "python" - } - }, + "metadata": {}, "nbformat": 4, "nbformat_minor": 5 } diff --git a/ghapi/_modidx.py b/ghapi/_modidx.py index 35f72b7..8edb303 100644 --- a/ghapi/_modidx.py +++ b/ghapi/_modidx.py @@ -50,6 +50,7 @@ 'ghapi.core.GhApi.__getattr__': ('core.html#ghapi.__getattr__', 'ghapi/core.py'), 'ghapi.core.GhApi.__getitem__': ('core.html#ghapi.__getitem__', 'ghapi/core.py'), 'ghapi.core.GhApi.__init__': ('core.html#ghapi.__init__', 'ghapi/core.py'), + 'ghapi.core.GhApi._get_repo_files': ('core.html#ghapi._get_repo_files', 'ghapi/core.py'), 'ghapi.core.GhApi._repr_markdown_': ('core.html#ghapi._repr_markdown_', 'ghapi/core.py'), 'ghapi.core.GhApi.create_branch_empty': ('core.html#ghapi.create_branch_empty', 'ghapi/core.py'), 'ghapi.core.GhApi.create_file': ('core.html#ghapi.create_file', 'ghapi/core.py'), diff --git a/ghapi/core.py b/ghapi/core.py index 29511ed..c2d0266 100644 --- a/ghapi/core.py +++ b/ghapi/core.py @@ -300,52 +300,43 @@ def update_contents(self:GhApi, path, message, committer, author, content, sha=N if sha is None: sha = self.list_files()[path].sha return self.create_or_update_file(path, message, committer=committer, author=author, content=content, sha=sha, branch=branch) -# %% ../00_core.ipynb #1815bdef +# %% ../00_core.ipynb #444784ce def _find_matches(path, pats): "Returns matched patterns" - matches = [] - for p in listify(pats): - if fnmatch.fnmatch(path,p): matches.append(p) - return matches + return L(pats).filter(lambda p: fnmatch.fnmatch(path, p)) -# %% ../00_core.ipynb #b6bbe221 +# %% ../00_core.ipynb #9ffb3b8f def _include(path, include, exclude): - "Prioritize non-star matches, if both include and exclude star expr then pick longer." - include_matches = ["*"] if include is None else _find_matches(path, include) - exclude_matches = [] if exclude is None else _find_matches(path, exclude) - if include_matches and exclude_matches: - include_star = [m for m in include_matches if "*" in m] - exclude_star = [m for m in exclude_matches if "*" in m] - if include_star and exclude_star: return len(include_star) > len(exclude_star) - if include_star: return False - if exclude_star: return True - if include_matches: return True - if exclude_matches: return False - -# %% ../00_core.ipynb #7016b664 + "Returns True if path matches include patterns (if any) and doesn't match any exclude pattern." + if include and not any(fnmatch.fnmatch(path, p) for p in listify(include)): return False + if exclude and any(fnmatch.fnmatch(path, p) for p in listify(exclude)): return False + return True + +# %% ../00_core.ipynb #802737b1 +@patch +def _get_repo_files(self:GhApi, owner, repo, branch="main"): + return self.git.get_tree(owner=owner, repo=repo, tree_sha=branch, recursive=True) + @patch def get_repo_files(self:GhApi, owner, repo, branch="main", inc=None, exc=None): - "Get all file items of a repo." - tree = self.git.get_tree(owner=owner, repo=repo, tree_sha=branch, recursive=True) - res = L() - for item in tree['tree']: - if item['type'] == 'blob': res.append(item) - return res.filter(lambda o: _include(o.path,inc,exc)) - -# %% ../00_core.ipynb #860e5ad8 + "Get all file items of a repo, optionally filtered." + tree = self._get_repo_files(owner, repo, branch) + return L(tree['tree']).filter(lambda o: o['type'] == 'blob' and _include(o.path, inc, exc)) + +# %% ../00_core.ipynb #5ef71bf5 @patch def get_file_content(self:GhApi, path, owner, repo, branch="main"): o = self.repos.get_content(owner, repo, path, ref=branch) o['content_decoded'] = base64.b64decode(o.content).decode('utf-8') return o -# %% ../00_core.ipynb #1255603a +# %% ../00_core.ipynb #d02d1bab @patch @delegates(GhApi.get_repo_files) -def get_repo_contents(self:GhApi, owner, repo, **kwargs): +def get_repo_contents(self:GhApi, owner, repo, branch='main', **kwargs): repo_files = self.get_repo_files(owner, repo, **kwargs) for s in ('inc','exc',): kwargs.pop(s) - return parallel(self.get_file_content, repo_files.attrgot("path"), owner=owner, repo=repo, **kwargs) + return parallel(self.get_file_content, repo_files.attrgot("path"), owner=owner, repo=repo, branch=branch) # %% ../00_core.ipynb #ac4ab4e0 @patch From f12817d72cccf5a3ea30314c6775667fc8353a33 Mon Sep 17 00:00:00 2001 From: Kerem Turgutlu Date: Mon, 19 Jan 2026 15:03:26 +0300 Subject: [PATCH 3/3] clean --- 00_core.ipynb | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/00_core.ipynb b/00_core.ipynb index d5e1715..fa77fa6 100644 --- a/00_core.ipynb +++ b/00_core.ipynb @@ -2299,18 +2299,7 @@ "execution_count": null, "id": "af900e71", "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/Users/keremturgutlu/aai_git/aai_repos/nbdev/nbdev/export.py:88: UserWarning: Notebook '/Users/keremturgutlu/aai_git/aai_repos/ghapi/00_core.ipynb' uses `#| export` without `#| default_exp` cell.\n", - "Note nbdev2 no longer supports nbdev1 syntax. Run `nbdev_migrate` to upgrade.\n", - "See https://nbdev.fast.ai/getting_started.html for more information.\n", - " warn(f\"Notebook '{nbname}' uses `#| export` without `#| default_exp` cell.\\n\"\n" - ] - } - ], + "outputs": [], "source": [ "#| hide\n", "import nbdev; nbdev.nbdev_export()"