mirror of
https://github.com/gradio-app/gradio.git
synced 2025-02-17 11:29:58 +08:00
merge main (#9050)
This commit is contained in:
parent
382a5615b9
commit
7ad19b2944
6
.changeset/cuddly-loops-write.md
Normal file
6
.changeset/cuddly-loops-write.md
Normal file
@ -0,0 +1,6 @@
|
||||
---
|
||||
"gradio": patch
|
||||
"gradio_client": patch
|
||||
---
|
||||
|
||||
feat:Improvements to FRP client download and usage
|
5
.changeset/ten-lands-change.md
Normal file
5
.changeset/ten-lands-change.md
Normal file
@ -0,0 +1,5 @@
|
||||
---
|
||||
"gradio": patch
|
||||
---
|
||||
|
||||
feat:Some tweaks to is_in_or_equal
|
9
.github/workflows/previews-build.yml
vendored
9
.github/workflows/previews-build.yml
vendored
@ -1,11 +1,8 @@
|
||||
name: "previews-build"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- 5.0-dev
|
||||
|
||||
env:
|
||||
CI: true
|
||||
@ -35,7 +32,7 @@ jobs:
|
||||
name: "previews-build"
|
||||
runs-on: ubuntu-22.04
|
||||
needs: changes
|
||||
if: needs.changes.outputs.should_run == 'true'
|
||||
if: needs.changes.outputs.should_run == 'true' || github.event_name == 'workflow_dispatch'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: install dependencies
|
||||
@ -72,7 +69,7 @@ jobs:
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: all_demos
|
||||
path: demo/all_demos
|
||||
path: demo
|
||||
- name: Create JS client tarball
|
||||
id: create_js_tarball
|
||||
continue-on-error: true
|
||||
|
12
.github/workflows/previews-deploy.yml
vendored
12
.github/workflows/previews-deploy.yml
vendored
@ -1,7 +1,6 @@
|
||||
name: "previews-deploy"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_run:
|
||||
workflows: ["previews-build"]
|
||||
types:
|
||||
@ -15,6 +14,7 @@ jobs:
|
||||
changes:
|
||||
name: "changes"
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.workflow_run.conclusion == 'success'
|
||||
permissions:
|
||||
actions: read
|
||||
outputs:
|
||||
@ -41,7 +41,7 @@ jobs:
|
||||
comment-spaces-start:
|
||||
needs: changes
|
||||
uses: "./.github/workflows/comment-queue.yml"
|
||||
if: ${{ needs.changes.outputs.should_run == 'true' }}
|
||||
if: ${{github.event.workflow_run.conclusion == 'success' && needs.changes.outputs.should_run == 'true' }}
|
||||
secrets:
|
||||
gh_token: ${{ secrets.COMMENT_TOKEN }}
|
||||
with:
|
||||
@ -54,7 +54,7 @@ jobs:
|
||||
space_url: ${{ steps.upload-demo.outputs.SPACE_URL }}
|
||||
js_tarball_url: ${{ steps.upload_js_tarball.outputs.js_tarball_url }}
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.should_run == 'true' }}
|
||||
if: ${{ (github.event.workflow_run.conclusion == 'success' && needs.changes.outputs.should_run == 'true') || github.event.workflow_run.event == 'workflow_dispatch' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
@ -104,7 +104,7 @@ jobs:
|
||||
curl https://raw.githubusercontent.com/gradio-app/gradio/main/scripts/upload_demo_to_space.py -o scripts/upload_demo_to_space.py
|
||||
curl https://raw.githubusercontent.com/gradio-app/gradio/main/scripts/upload_website_demos.py -o scripts/upload_website_demos.py
|
||||
- name: make dirs
|
||||
run: mkdir -p demo/all_demos && mv all_demos/* demo/all_demos/
|
||||
run: mkdir -p demo && mv all_demos/* demo/
|
||||
- name: Upload demo to spaces
|
||||
if: github.event.workflow_run.event == 'pull_request'
|
||||
id: upload-demo
|
||||
@ -115,7 +115,7 @@ jobs:
|
||||
--gradio-version ${{ needs.changes.outputs.gradio_version }} > url.txt
|
||||
echo "SPACE_URL=$(cat url.txt)" >> $GITHUB_OUTPUT
|
||||
- name: Upload Website Demos
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
if: github.event.workflow_run.event == 'workflow_dispatch'
|
||||
id: upload-website-demos
|
||||
run: |
|
||||
python scripts/upload_website_demos.py --AUTH_TOKEN ${{ secrets.WEBSITE_SPACES_DEPLOY_TOKEN }} \
|
||||
@ -150,7 +150,7 @@ jobs:
|
||||
comment-spaces-failure:
|
||||
uses: "./.github/workflows/comment-queue.yml"
|
||||
needs: [deploy, changes]
|
||||
if: always() && needs.deploy.result == 'failure' && needs.changes.outputs.should_run == 'true'
|
||||
if: always() && github.event.workflow_run.conclusion == 'success' && needs.deploy.result == 'failure' && needs.changes.outputs.should_run == 'true'
|
||||
secrets:
|
||||
gh_token: ${{ secrets.COMMENT_TOKEN }}
|
||||
with:
|
||||
|
2
.github/workflows/publish.yml
vendored
2
.github/workflows/publish.yml
vendored
@ -76,4 +76,4 @@ jobs:
|
||||
- name: trigger spaces deploy workflow
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.COMMENT_TOKEN }}
|
||||
run: gh workflow run deploy-spaces.yml
|
||||
run: gh workflow run previews-build.yml
|
||||
|
9
.github/workflows/storybook-deploy.yml
vendored
9
.github/workflows/storybook-deploy.yml
vendored
@ -14,6 +14,7 @@ jobs:
|
||||
changes:
|
||||
name: "changes"
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.workflow_run.conclusion == 'success'
|
||||
permissions:
|
||||
actions: read
|
||||
outputs:
|
||||
@ -48,7 +49,7 @@ jobs:
|
||||
update-status:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.should_run == 'false' || contains(needs.changes.outputs.labels, 'no-visual-update') }}
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' && (needs.changes.outputs.should_run == 'false' || contains(needs.changes.outputs.labels, 'no-visual-update')) }}
|
||||
steps:
|
||||
- name: update status
|
||||
uses: gradio-app/github/actions/set-commit-status@main
|
||||
@ -61,7 +62,7 @@ jobs:
|
||||
environment: storybook
|
||||
name: "storybook-deploy"
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.should_run == 'true' && github.repository == 'gradio-app/gradio' && !contains(needs.changes.outputs.labels, 'no-visual-update') }}
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' && needs.changes.outputs.should_run == 'true' && github.repository == 'gradio-app/gradio' && !contains(needs.changes.outputs.labels, 'no-visual-update') }}
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
changes: ${{ steps.publish-chromatic.outputs.changeCount }}
|
||||
@ -103,14 +104,14 @@ jobs:
|
||||
secrets:
|
||||
gh_token: ${{ secrets.COMMENT_TOKEN }}
|
||||
with:
|
||||
pr_number: ${{ needs.changes.outputs.pr_number }}
|
||||
pr_number: ${{ github.event.workflow_run.conclusion == 'success' && needs.changes.outputs.pr_number }}
|
||||
message: |
|
||||
storybook~success~${{ needs.deploy.outputs.storybook_url }}
|
||||
|
||||
comment-chromatic-fail:
|
||||
uses: "./.github/workflows/comment-queue.yml"
|
||||
needs: [deploy, changes]
|
||||
if: always() && needs.deploy.result == 'failure'
|
||||
if: always() && github.event.workflow_run.conclusion == 'success' && needs.deploy.result == 'failure'
|
||||
secrets:
|
||||
gh_token: ${{ secrets.COMMENT_TOKEN }}
|
||||
with:
|
||||
|
54
.github/workflows/test-functional-lite.yml
vendored
Normal file
54
.github/workflows/test-functional-lite.yml
vendored
Normal file
@ -0,0 +1,54 @@
|
||||
name: "functional-lite"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- 5.0-dev
|
||||
|
||||
concurrency:
|
||||
group: "${{ github.event.pull_request.number }}-${{ github.ref_name }}-${{ github.workflow }}"
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
changes:
|
||||
name: "changes"
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run: ${{ steps.changes.outputs.should_run }}
|
||||
sha: ${{ steps.changes.outputs.sha }}
|
||||
pr_number: ${{ steps.changes.outputs.pr_number }}
|
||||
source_branch: ${{ steps.changes.outputs.source_branch }}
|
||||
source_repo: ${{ steps.changes.outputs.source_repo }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: "gradio-app/gradio/.github/actions/changes@main"
|
||||
id: changes
|
||||
with:
|
||||
filter: "functional"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
test:
|
||||
name: "functional-test-lite"
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: needs.changes.outputs.should_run == 'true'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: install dependencies
|
||||
id: install_deps
|
||||
uses: "gradio-app/gradio/.github/actions/install-all-deps@main"
|
||||
with:
|
||||
always_install_pnpm: true
|
||||
build_lite: true
|
||||
skip_build: true
|
||||
- run: pnpm exec playwright install chromium firefox
|
||||
- name: Run Lite E2E tests
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pnpm --filter @gradio/app test:browser:lite
|
||||
- name: Get the performance result
|
||||
run: |
|
||||
export LITE_APP_LOAD_TIME=$(jq -r '.app_load_time' .lite-perf.json)
|
||||
echo "LITE_APP_LOAD_TIME=$LITE_APP_LOAD_TIME" >> $GITHUB_ENV
|
||||
cat .lite-perf.json # For debugging
|
11
.github/workflows/test-functional.yml
vendored
11
.github/workflows/test-functional.yml
vendored
@ -41,7 +41,7 @@ jobs:
|
||||
with:
|
||||
always_install_pnpm: true
|
||||
build_lite: true
|
||||
- name: install demo dependencies
|
||||
- name: install outbreak_forecast dependencies
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python -m pip install -r demo/outbreak_forecast/requirements.txt
|
||||
@ -74,12 +74,3 @@ jobs:
|
||||
name: reload-mode-playwright-screenshots
|
||||
path: |
|
||||
./test-results
|
||||
- name: Run Lite E2E tests
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pnpm --filter @gradio/app test:browser:lite
|
||||
- name: Get the performance result
|
||||
run: |
|
||||
export LITE_APP_LOAD_TIME=$(jq -r '.app_load_time' .lite-perf.json)
|
||||
echo "LITE_APP_LOAD_TIME=$LITE_APP_LOAD_TIME" >> $GITHUB_ENV
|
||||
cat .lite-perf.json # For debugging
|
||||
|
5
.github/workflows/update-checks.yml
vendored
5
.github/workflows/update-checks.yml
vendored
@ -2,7 +2,7 @@ name: "update-checks"
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["python", "js", "functional"]
|
||||
workflows: ["python", "js", "functional", "functional-lite"]
|
||||
types:
|
||||
- completed
|
||||
|
||||
@ -50,6 +50,9 @@ jobs:
|
||||
- name: set functional check name
|
||||
if: github.event.workflow_run.name == 'functional'
|
||||
run: echo "CHECK_NAME=functional / functional-test (pull_request)" >> $GITHUB_ENV
|
||||
- name: set functional-lite check name
|
||||
if: github.event.workflow_run.name == 'functional-lite'
|
||||
run: echo "CHECK_NAME=functional-lite / functional-test-lite (pull_request)" >> $GITHUB_ENV
|
||||
- name: update status
|
||||
uses: gradio-app/github/actions/set-commit-status@main
|
||||
with:
|
||||
|
3
.github/workflows/website-build.yml
vendored
3
.github/workflows/website-build.yml
vendored
@ -35,7 +35,7 @@ jobs:
|
||||
name: "website-build"
|
||||
runs-on: ubuntu-22.04
|
||||
needs: changes
|
||||
if: needs.changes.outputs.should_run == 'true'
|
||||
if: needs.changes.outputs.should_run == 'true' || (github.ref_name == 'main' && github.repository == 'gradio-app/gradio')
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: install dependencies
|
||||
@ -43,7 +43,6 @@ jobs:
|
||||
with:
|
||||
always_install_pnpm: true
|
||||
skip_build: true
|
||||
# unsafe - pr could have modified the build script
|
||||
- name: build client
|
||||
run: pnpm --filter @gradio/client build
|
||||
|
||||
|
7
.github/workflows/website-deploy.yml
vendored
7
.github/workflows/website-deploy.yml
vendored
@ -38,6 +38,7 @@ jobs:
|
||||
comment-deploy-start:
|
||||
needs: changes
|
||||
uses: "./.github/workflows/comment-queue.yml"
|
||||
if: github.event.workflow_run.conclusion == 'success' && needs.changes.outputs.should_run == 'true'
|
||||
secrets:
|
||||
gh_token: ${{ secrets.COMMENT_TOKEN }}
|
||||
with:
|
||||
@ -48,7 +49,7 @@ jobs:
|
||||
name: "website-deploy"
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: needs.changes.outputs.should_run == 'true'
|
||||
if: github.event.workflow_run.conclusion == 'success' && (needs.changes.outputs.should_run == 'true' || github.event.workflow_run.event == 'push')
|
||||
permissions:
|
||||
actions: read
|
||||
outputs:
|
||||
@ -115,7 +116,7 @@ jobs:
|
||||
comment-deploy-success:
|
||||
uses: "./.github/workflows/comment-queue.yml"
|
||||
needs: [deploy, changes]
|
||||
if: needs.deploy.result == 'success' && needs.changes.outputs.pr_number != 'false'
|
||||
if: github.event.workflow_run.conclusion == 'success' && needs.deploy.result == 'success' && needs.changes.outputs.pr_number != 'false'
|
||||
secrets:
|
||||
gh_token: ${{ secrets.COMMENT_TOKEN }}
|
||||
with:
|
||||
@ -124,7 +125,7 @@ jobs:
|
||||
comment-deploy-failure:
|
||||
uses: "./.github/workflows/comment-queue.yml"
|
||||
needs: [deploy, changes]
|
||||
if: always() && needs.deploy.result == 'failure' && needs.changes.outputs.pr_number != 'false'
|
||||
if: always() && github.event.workflow_run.conclusion == 'success' && needs.deploy.result == 'failure' && needs.changes.outputs.pr_number != 'false'
|
||||
secrets:
|
||||
gh_token: ${{ secrets.COMMENT_TOKEN }}
|
||||
with:
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -39,6 +39,7 @@ coverage.xml
|
||||
test.txt
|
||||
**/snapshots/**/*.png
|
||||
playwright-report/
|
||||
.hypothesis
|
||||
|
||||
# Demos
|
||||
demo/tmp.zip
|
||||
|
@ -1365,7 +1365,7 @@ class Endpoint:
|
||||
if self.client.output_dir is not None:
|
||||
os.makedirs(self.client.output_dir, exist_ok=True)
|
||||
|
||||
sha1 = hashlib.sha1()
|
||||
sha = hashlib.sha256()
|
||||
temp_dir = Path(tempfile.gettempdir()) / secrets.token_hex(20)
|
||||
temp_dir.mkdir(exist_ok=True, parents=True)
|
||||
|
||||
@ -1380,11 +1380,11 @@ class Endpoint:
|
||||
) as response:
|
||||
response.raise_for_status()
|
||||
with open(temp_dir / Path(url_path).name, "wb") as f:
|
||||
for chunk in response.iter_bytes(chunk_size=128 * sha1.block_size):
|
||||
sha1.update(chunk)
|
||||
for chunk in response.iter_bytes(chunk_size=128 * sha.block_size):
|
||||
sha.update(chunk)
|
||||
f.write(chunk)
|
||||
|
||||
directory = Path(self.client.output_dir) / sha1.hexdigest()
|
||||
directory = Path(self.client.output_dir) / sha.hexdigest()
|
||||
directory.mkdir(exist_ok=True, parents=True)
|
||||
dest = directory / Path(url_path).name
|
||||
shutil.move(temp_dir / Path(url_path).name, dest)
|
||||
|
@ -1 +1 @@
|
||||
{"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: gif_maker"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio opencv-python"]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import cv2\n", "import gradio as gr\n", "\n", "def gif_maker(img_files):\n", " img_array = []\n", " size = (1, 1)\n", " for filename in img_files:\n", " img = cv2.imread(filename.name)\n", " height, width, _ = img.shape\n", " size = (width,height)\n", " img_array.append(img)\n", " output_file = \"test.mp4\"\n", " out = cv2.VideoWriter(output_file,cv2.VideoWriter_fourcc(*'h264'), 15, size)\n", " for i in range(len(img_array)):\n", " out.write(img_array[i])\n", " out.release()\n", " return output_file\n", "\n", "demo = gr.Interface(gif_maker, inputs=gr.File(file_count=\"multiple\"), outputs=gr.Video())\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
|
||||
{"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: gif_maker"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio opencv-python"]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import cv2\n", "import gradio as gr\n", "\n", "def gif_maker(img_files):\n", " img_array = []\n", " size = (1, 1)\n", " for filename in img_files:\n", " img = cv2.imread(filename.name)\n", " height, width, _ = img.shape\n", " size = (width,height)\n", " img_array.append(img)\n", " output_file = \"test.mp4\"\n", " out = cv2.VideoWriter(output_file,cv2.VideoWriter_fourcc(*'h264'), 15, size) # type: ignore\n", " for i in range(len(img_array)):\n", " out.write(img_array[i])\n", " out.release()\n", " return output_file\n", "\n", "demo = gr.Interface(gif_maker, inputs=gr.File(file_count=\"multiple\"), outputs=gr.Video())\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
|
@ -10,7 +10,7 @@ def gif_maker(img_files):
|
||||
size = (width,height)
|
||||
img_array.append(img)
|
||||
output_file = "test.mp4"
|
||||
out = cv2.VideoWriter(output_file,cv2.VideoWriter_fourcc(*'h264'), 15, size)
|
||||
out = cv2.VideoWriter(output_file,cv2.VideoWriter_fourcc(*'h264'), 15, size) # type: ignore
|
||||
for i in range(len(img_array)):
|
||||
out.write(img_array[i])
|
||||
out.release()
|
||||
|
@ -1 +1 @@
|
||||
{"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: white_noise_vid_not_playable"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio opencv-python"]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import cv2\n", "import gradio as gr\n", "import numpy as np\n", "\n", "def gif_maker():\n", " img_array = []\n", " height, width = 50, 50\n", " for i in range(30):\n", " img_array.append(np.random.randint(0, 255, size=(height, width, 3)).astype(np.uint8))\n", " output_file = \"test.mp4\"\n", " out = cv2.VideoWriter(output_file, cv2.VideoWriter_fourcc(*'mp4v'), 15, (height, width))\n", " for i in range(len(img_array)):\n", " out.write(img_array[i])\n", " out.release()\n", " return output_file, output_file\n", "\n", "demo = gr.Interface(gif_maker, inputs=None, outputs=[gr.Video(), gr.File()])\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
|
||||
{"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: white_noise_vid_not_playable"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio opencv-python"]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import cv2\n", "import gradio as gr\n", "import numpy as np\n", "\n", "def gif_maker():\n", " img_array = []\n", " height, width = 50, 50\n", " for i in range(30):\n", " img_array.append(np.random.randint(0, 255, size=(height, width, 3)).astype(np.uint8))\n", " output_file = \"test.mp4\"\n", " out = cv2.VideoWriter(output_file, cv2.VideoWriter_fourcc(*'mp4v'), 15, (height, width)) # type: ignore\n", " for i in range(len(img_array)):\n", " out.write(img_array[i])\n", " out.release()\n", " return output_file, output_file\n", "\n", "demo = gr.Interface(gif_maker, inputs=None, outputs=[gr.Video(), gr.File()])\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
|
@ -8,7 +8,7 @@ def gif_maker():
|
||||
for i in range(30):
|
||||
img_array.append(np.random.randint(0, 255, size=(height, width, 3)).astype(np.uint8))
|
||||
output_file = "test.mp4"
|
||||
out = cv2.VideoWriter(output_file, cv2.VideoWriter_fourcc(*'mp4v'), 15, (height, width))
|
||||
out = cv2.VideoWriter(output_file, cv2.VideoWriter_fourcc(*'mp4v'), 15, (height, width)) # type: ignore
|
||||
for i in range(len(img_array)):
|
||||
out.write(img_array[i])
|
||||
out.release()
|
||||
|
@ -56,7 +56,13 @@ from gradio.context import (
|
||||
get_render_context,
|
||||
set_render_context,
|
||||
)
|
||||
from gradio.data_classes import BlocksConfigDict, FileData, GradioModel, GradioRootModel
|
||||
from gradio.data_classes import (
|
||||
BlocksConfigDict,
|
||||
DeveloperPath,
|
||||
FileData,
|
||||
GradioModel,
|
||||
GradioRootModel,
|
||||
)
|
||||
from gradio.events import (
|
||||
EventData,
|
||||
EventListener,
|
||||
@ -410,7 +416,7 @@ class BlockContext(Block):
|
||||
render=render,
|
||||
)
|
||||
|
||||
TEMPLATE_DIR = "./templates/"
|
||||
TEMPLATE_DIR = DeveloperPath("./templates/")
|
||||
FRONTEND_DIR = "../../frontend/"
|
||||
|
||||
@property
|
||||
|
@ -21,6 +21,7 @@ from gradio.blocks import Block, BlockContext
|
||||
from gradio.component_meta import ComponentMeta
|
||||
from gradio.data_classes import (
|
||||
BaseModel,
|
||||
DeveloperPath,
|
||||
FileDataDict,
|
||||
GradioDataModel,
|
||||
MediaStreamChunk,
|
||||
@ -233,7 +234,7 @@ class Component(ComponentBase, Block):
|
||||
|
||||
self.component_class_id = self.__class__.get_component_class_id()
|
||||
|
||||
TEMPLATE_DIR = "./templates/"
|
||||
TEMPLATE_DIR = DeveloperPath("./templates/")
|
||||
FRONTEND_DIR = "../../frontend/"
|
||||
|
||||
def get_config(self):
|
||||
|
@ -10,7 +10,8 @@ from typing import TYPE_CHECKING, Any, Callable, List, Literal, Sequence
|
||||
from gradio_client.documentation import document
|
||||
|
||||
from gradio.components.base import Component, server
|
||||
from gradio.data_classes import GradioRootModel
|
||||
from gradio.data_classes import DeveloperPath, GradioRootModel, UserProvidedPath
|
||||
from gradio.utils import safe_join
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from gradio.components import Timer
|
||||
@ -77,7 +78,7 @@ class FileExplorer(Component):
|
||||
render: If False, component will not render be rendered in the Blocks context. Should be used if the intention is to assign event listeners now but render the component later.
|
||||
key: if assigned, will be used to assume identity across a re-render. Components that have the same key across a re-render will have their value preserved.
|
||||
"""
|
||||
self.root_dir = os.path.abspath(root_dir)
|
||||
self.root_dir = DeveloperPath(os.path.abspath(root_dir))
|
||||
self.glob = glob
|
||||
self.ignore_glob = ignore_glob
|
||||
valid_file_count = ["single", "multiple"]
|
||||
@ -194,11 +195,8 @@ class FileExplorer(Component):
|
||||
|
||||
return folders + files
|
||||
|
||||
def _safe_join(self, folders):
|
||||
combined_path = os.path.join(self.root_dir, *folders)
|
||||
absolute_path = os.path.abspath(combined_path)
|
||||
if os.path.commonprefix([self.root_dir, absolute_path]) != os.path.abspath(
|
||||
self.root_dir
|
||||
):
|
||||
raise ValueError("Attempted to navigate outside of root directory")
|
||||
return absolute_path
|
||||
def _safe_join(self, folders: list[str]):
|
||||
if not folders or len(folders) == 0:
|
||||
return self.root_dir
|
||||
combined_path = UserProvidedPath(os.path.join(*folders))
|
||||
return safe_join(self.root_dir, combined_path)
|
||||
|
@ -8,7 +8,17 @@ import secrets
|
||||
import shutil
|
||||
from abc import ABC, abstractmethod
|
||||
from enum import Enum, auto
|
||||
from typing import Any, Iterator, List, Literal, Optional, Tuple, TypedDict, Union
|
||||
from typing import (
|
||||
Any,
|
||||
Iterator,
|
||||
List,
|
||||
Literal,
|
||||
NewType,
|
||||
Optional,
|
||||
Tuple,
|
||||
TypedDict,
|
||||
Union,
|
||||
)
|
||||
|
||||
from fastapi import Request
|
||||
from gradio_client.documentation import document
|
||||
@ -21,6 +31,9 @@ try:
|
||||
except ImportError:
|
||||
JsonValue = Any
|
||||
|
||||
DeveloperPath = NewType("DeveloperPath", str)
|
||||
UserProvidedPath = NewType("UserProvidedPath", str)
|
||||
|
||||
|
||||
class CancelBody(BaseModel):
|
||||
session_hash: str
|
||||
|
@ -98,3 +98,7 @@ class Error(Exception):
|
||||
|
||||
class ComponentDefinitionError(NotImplementedError):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidPathError(ValueError):
|
||||
pass
|
||||
|
@ -5,6 +5,7 @@ creating tunnels.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import ipaddress
|
||||
import os
|
||||
import time
|
||||
import warnings
|
||||
@ -35,6 +36,12 @@ def setup_tunnel(
|
||||
raise RuntimeError(
|
||||
"Could not get share link from Gradio API Server."
|
||||
) from e
|
||||
try:
|
||||
ipaddress.ip_address(remote_host)
|
||||
except ValueError as e:
|
||||
raise ValueError(
|
||||
f"Invalid IP address received from Gradio API Server: {remote_host}"
|
||||
) from e
|
||||
else:
|
||||
remote_host, remote_port = share_server_address.split(":")
|
||||
remote_port = int(remote_port)
|
||||
|
@ -184,35 +184,35 @@ hash_seed = get_hash_seed().encode("utf-8")
|
||||
|
||||
|
||||
def hash_file(file_path: str | Path, chunk_num_blocks: int = 128) -> str:
|
||||
sha1 = hashlib.sha1()
|
||||
sha1.update(hash_seed)
|
||||
sha = hashlib.sha256()
|
||||
sha.update(hash_seed)
|
||||
with open(file_path, "rb") as f:
|
||||
for chunk in iter(lambda: f.read(chunk_num_blocks * sha1.block_size), b""):
|
||||
sha1.update(chunk)
|
||||
return sha1.hexdigest()
|
||||
for chunk in iter(lambda: f.read(chunk_num_blocks * sha.block_size), b""):
|
||||
sha.update(chunk)
|
||||
return sha.hexdigest()
|
||||
|
||||
|
||||
def hash_url(url: str) -> str:
|
||||
sha1 = hashlib.sha1()
|
||||
sha1.update(hash_seed)
|
||||
sha1.update(url.encode("utf-8"))
|
||||
return sha1.hexdigest()
|
||||
sha = hashlib.sha256()
|
||||
sha.update(hash_seed)
|
||||
sha.update(url.encode("utf-8"))
|
||||
return sha.hexdigest()
|
||||
|
||||
|
||||
def hash_bytes(bytes: bytes):
|
||||
sha1 = hashlib.sha1()
|
||||
sha1.update(hash_seed)
|
||||
sha1.update(bytes)
|
||||
return sha1.hexdigest()
|
||||
sha = hashlib.sha256()
|
||||
sha.update(hash_seed)
|
||||
sha.update(bytes)
|
||||
return sha.hexdigest()
|
||||
|
||||
|
||||
def hash_base64(base64_encoding: str, chunk_num_blocks: int = 128) -> str:
|
||||
sha1 = hashlib.sha1()
|
||||
sha1.update(hash_seed)
|
||||
for i in range(0, len(base64_encoding), chunk_num_blocks * sha1.block_size):
|
||||
data = base64_encoding[i : i + chunk_num_blocks * sha1.block_size]
|
||||
sha1.update(data.encode("utf-8"))
|
||||
return sha1.hexdigest()
|
||||
sha = hashlib.sha256()
|
||||
sha.update(hash_seed)
|
||||
for i in range(0, len(base64_encoding), chunk_num_blocks * sha.block_size):
|
||||
data = base64_encoding[i : i + chunk_num_blocks * sha.block_size]
|
||||
sha.update(data.encode("utf-8"))
|
||||
return sha.hexdigest()
|
||||
|
||||
|
||||
def save_pil_to_cache(
|
||||
|
@ -383,7 +383,7 @@ class GradioUploadFile(UploadFile):
|
||||
headers: Headers | None = None,
|
||||
) -> None:
|
||||
super().__init__(file, size=size, filename=filename, headers=headers)
|
||||
self.sha = hashlib.sha1()
|
||||
self.sha = hashlib.sha256()
|
||||
self.sha.update(processing_utils.hash_seed)
|
||||
|
||||
|
||||
|
@ -18,7 +18,6 @@ import inspect
|
||||
import json
|
||||
import mimetypes
|
||||
import os
|
||||
import posixpath
|
||||
import secrets
|
||||
import time
|
||||
import traceback
|
||||
@ -35,6 +34,7 @@ from typing import (
|
||||
Optional,
|
||||
Type,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
import fastapi
|
||||
@ -74,10 +74,13 @@ from gradio.data_classes import (
|
||||
ComponentServerBlobBody,
|
||||
ComponentServerJSONBody,
|
||||
DataWithFiles,
|
||||
DeveloperPath,
|
||||
PredictBody,
|
||||
ResetBody,
|
||||
SimplePredictBody,
|
||||
UserProvidedPath,
|
||||
)
|
||||
from gradio.exceptions import InvalidPathError
|
||||
from gradio.oauth import attach_oauth
|
||||
from gradio.route_utils import ( # noqa: F401
|
||||
CustomCORSMiddleware,
|
||||
@ -110,9 +113,18 @@ if TYPE_CHECKING:
|
||||
|
||||
mimetypes.init()
|
||||
|
||||
STATIC_TEMPLATE_LIB = files("gradio").joinpath("templates").as_posix() # type: ignore
|
||||
STATIC_PATH_LIB = files("gradio").joinpath("templates", "frontend", "static").as_posix() # type: ignore
|
||||
BUILD_PATH_LIB = files("gradio").joinpath("templates", "frontend", "assets").as_posix() # type: ignore
|
||||
STATIC_TEMPLATE_LIB = cast(
|
||||
DeveloperPath,
|
||||
files("gradio").joinpath("templates").as_posix(), # type: ignore
|
||||
)
|
||||
STATIC_PATH_LIB = cast(
|
||||
DeveloperPath,
|
||||
files("gradio").joinpath("templates", "frontend", "static").as_posix(), # type: ignore
|
||||
)
|
||||
BUILD_PATH_LIB = cast(
|
||||
DeveloperPath,
|
||||
files("gradio").joinpath("templates", "frontend", "assets").as_posix(), # type: ignore
|
||||
)
|
||||
VERSION = get_package_version()
|
||||
|
||||
|
||||
@ -448,7 +460,7 @@ class App(FastAPI):
|
||||
|
||||
@app.get("/static/{path:path}")
|
||||
def static_resource(path: str):
|
||||
static_file = safe_join(STATIC_PATH_LIB, path)
|
||||
static_file = routes_safe_join(STATIC_PATH_LIB, UserProvidedPath(path))
|
||||
return FileResponse(static_file)
|
||||
|
||||
@app.get("/custom_component/{id}/{type}/{file_name}")
|
||||
@ -460,7 +472,6 @@ class App(FastAPI):
|
||||
location = next(
|
||||
(item for item in components if item["component_class_id"] == id), None
|
||||
)
|
||||
|
||||
if location is None:
|
||||
raise HTTPException(status_code=404, detail="Component not found.")
|
||||
|
||||
@ -472,9 +483,14 @@ class App(FastAPI):
|
||||
if module_path is None or component_instance is None:
|
||||
raise HTTPException(status_code=404, detail="Component not found.")
|
||||
|
||||
path = safe_join(
|
||||
str(Path(module_path).parent),
|
||||
f"{component_instance.__class__.TEMPLATE_DIR}/{type}/{file_name}",
|
||||
requested_path = utils.safe_join(
|
||||
component_instance.__class__.TEMPLATE_DIR,
|
||||
UserProvidedPath(f"{type}/{file_name}"),
|
||||
)
|
||||
|
||||
path = routes_safe_join(
|
||||
DeveloperPath(str(Path(module_path).parent)),
|
||||
UserProvidedPath(requested_path),
|
||||
)
|
||||
|
||||
key = f"{id}-{type}-{file_name}"
|
||||
@ -496,7 +512,7 @@ class App(FastAPI):
|
||||
|
||||
@app.get("/assets/{path:path}")
|
||||
def build_resource(path: str):
|
||||
build_file = safe_join(BUILD_PATH_LIB, path)
|
||||
build_file = routes_safe_join(BUILD_PATH_LIB, UserProvidedPath(path))
|
||||
return FileResponse(build_file)
|
||||
|
||||
@app.get("/favicon.ico")
|
||||
@ -545,7 +561,7 @@ class App(FastAPI):
|
||||
|
||||
is_dir = abs_path.is_dir()
|
||||
|
||||
if in_blocklist or is_dir:
|
||||
if is_dir or in_blocklist:
|
||||
raise HTTPException(403, f"File not allowed: {path_or_url}.")
|
||||
|
||||
created_by_app = False
|
||||
@ -1179,7 +1195,14 @@ class App(FastAPI):
|
||||
name = f"tmp{secrets.token_hex(5)}"
|
||||
directory = Path(app.uploaded_file_dir) / temp_file.sha.hexdigest()
|
||||
directory.mkdir(exist_ok=True, parents=True)
|
||||
dest = (directory / name).resolve()
|
||||
try:
|
||||
dest = utils.safe_join(
|
||||
DeveloperPath(str(directory)), UserProvidedPath(name)
|
||||
)
|
||||
except InvalidPathError as err:
|
||||
raise HTTPException(
|
||||
status_code=400, detail=f"Invalid file name: {name}"
|
||||
) from err
|
||||
temp_file.file.close()
|
||||
# we need to move the temp file to the cache directory
|
||||
# but that's possibly blocking and we're in an async function
|
||||
@ -1190,9 +1213,9 @@ class App(FastAPI):
|
||||
os.rename(temp_file.file.name, dest)
|
||||
except OSError:
|
||||
files_to_copy.append(temp_file.file.name)
|
||||
locations.append(str(dest))
|
||||
locations.append(dest)
|
||||
output_files.append(dest)
|
||||
blocks.upload_file_set.add(str(dest))
|
||||
blocks.upload_file_set.add(dest)
|
||||
if files_to_copy:
|
||||
bg_tasks.add_task(
|
||||
move_uploaded_files_to_cache, files_to_copy, locations
|
||||
@ -1255,32 +1278,22 @@ class App(FastAPI):
|
||||
########
|
||||
|
||||
|
||||
def safe_join(directory: str, path: str) -> str:
|
||||
"""Safely path to a base directory to avoid escaping the base directory.
|
||||
Borrowed from: werkzeug.security.safe_join"""
|
||||
_os_alt_seps: List[str] = [
|
||||
sep for sep in [os.path.sep, os.path.altsep] if sep is not None and sep != "/"
|
||||
]
|
||||
|
||||
def routes_safe_join(directory: DeveloperPath, path: UserProvidedPath) -> str:
|
||||
"""Safely join the user path to the directory while performing some additional http-related checks,
|
||||
e.g. ensuring that the full path exists on the local file system and is not a directory"""
|
||||
if path == "":
|
||||
raise HTTPException(400)
|
||||
raise fastapi.HTTPException(400)
|
||||
if route_utils.starts_with_protocol(path):
|
||||
raise HTTPException(403)
|
||||
filename = posixpath.normpath(path)
|
||||
fullpath = os.path.join(directory, filename)
|
||||
if (
|
||||
any(sep in filename for sep in _os_alt_seps)
|
||||
or os.path.isabs(filename)
|
||||
or filename == ".."
|
||||
or filename.startswith("../")
|
||||
or os.path.isdir(fullpath)
|
||||
):
|
||||
raise HTTPException(403)
|
||||
|
||||
if not os.path.exists(fullpath):
|
||||
raise HTTPException(404, "File not found")
|
||||
|
||||
return fullpath
|
||||
raise fastapi.HTTPException(403)
|
||||
try:
|
||||
fullpath = Path(utils.safe_join(directory, path))
|
||||
except InvalidPathError as e:
|
||||
raise fastapi.HTTPException(403) from e
|
||||
if fullpath.is_dir():
|
||||
raise fastapi.HTTPException(403)
|
||||
if not fullpath.exists():
|
||||
raise fastapi.HTTPException(404)
|
||||
return str(fullpath)
|
||||
|
||||
|
||||
def get_types(cls_set: List[Type]):
|
||||
|
@ -1,4 +1,5 @@
|
||||
import atexit
|
||||
import hashlib
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
@ -6,6 +7,7 @@ import stat
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
import warnings
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
@ -22,6 +24,15 @@ BINARY_REMOTE_NAME = f"frpc_{platform.system().lower()}_{machine.lower()}"
|
||||
EXTENSION = ".exe" if os.name == "nt" else ""
|
||||
BINARY_URL = f"https://cdn-media.huggingface.co/frpc-gradio-{VERSION}/{BINARY_REMOTE_NAME}{EXTENSION}"
|
||||
|
||||
CHECKSUMS = {
|
||||
"https://cdn-media.huggingface.co/frpc-gradio-0.2/frpc_windows_amd64.exe": "cdd756e16622e0e60b697022d8da827a11fefe689325861c58c1003f2f8aa519",
|
||||
"https://cdn-media.huggingface.co/frpc-gradio-0.2/frpc_linux_amd64": "fb74b665633589410540c49dfcef5b6f0fd4a9bd7c9558bcdee2f0e43da0774d",
|
||||
"https://cdn-media.huggingface.co/frpc-gradio-0.2/frpc_linux_arm64": "af13b93897512079ead398224bd58bbaa136fcc5679af023780ee6c0538b3d82",
|
||||
"https://cdn-media.huggingface.co/frpc-gradio-0.2/frpc_darwin_amd64": "6d3bd9f7e92e82fe557ba1d223bdd25317fbc296173a829601926526263c6092",
|
||||
"https://cdn-media.huggingface.co/frpc-gradio-0.2/frpc_darwin_arm64": "0227ae6dafbe59d4e2c4a827d983ecc463eaa61f152216a3ec809c429c08eb31",
|
||||
}
|
||||
CHUNK_SIZE = 128
|
||||
|
||||
BINARY_FILENAME = f"{BINARY_REMOTE_NAME}_v{VERSION}"
|
||||
BINARY_FOLDER = Path(__file__).parent
|
||||
BINARY_PATH = f"{BINARY_FOLDER / BINARY_FILENAME}"
|
||||
@ -62,6 +73,18 @@ class Tunnel:
|
||||
st = os.stat(BINARY_PATH)
|
||||
os.chmod(BINARY_PATH, st.st_mode | stat.S_IEXEC)
|
||||
|
||||
if BINARY_URL in CHECKSUMS:
|
||||
sha = hashlib.sha256()
|
||||
with open(BINARY_PATH, "rb") as f:
|
||||
for chunk in iter(lambda: f.read(CHUNK_SIZE * sha.block_size), b""):
|
||||
sha.update(chunk)
|
||||
calculated_hash = sha.hexdigest()
|
||||
|
||||
if calculated_hash != CHECKSUMS[BINARY_URL]:
|
||||
warnings.warn(
|
||||
f"Checksum of downloaded binary for creating share links does not match expected value. Please verify the integrity of the downloaded binary located at {BINARY_PATH}."
|
||||
)
|
||||
|
||||
def start_tunnel(self) -> str:
|
||||
self.download_binary()
|
||||
self.url = self._start_tunnel(BINARY_PATH)
|
||||
|
@ -14,6 +14,7 @@ import json
|
||||
import json.decoder
|
||||
import os
|
||||
import pkgutil
|
||||
import posixpath
|
||||
import re
|
||||
import sys
|
||||
import tempfile
|
||||
@ -39,6 +40,7 @@ from typing import (
|
||||
Generic,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Literal,
|
||||
Optional,
|
||||
Sequence,
|
||||
@ -54,8 +56,13 @@ from typing_extensions import ParamSpec
|
||||
|
||||
import gradio
|
||||
from gradio.context import get_blocks_context
|
||||
from gradio.data_classes import BlocksConfigDict, FileData
|
||||
from gradio.exceptions import Error
|
||||
from gradio.data_classes import (
|
||||
BlocksConfigDict,
|
||||
DeveloperPath,
|
||||
FileData,
|
||||
UserProvidedPath,
|
||||
)
|
||||
from gradio.exceptions import Error, InvalidPathError
|
||||
|
||||
if TYPE_CHECKING: # Only import for type checking (is False at runtime).
|
||||
from gradio.blocks import BlockContext, Blocks
|
||||
@ -1053,24 +1060,10 @@ def tex2svg(formula, *_args):
|
||||
|
||||
|
||||
def abspath(path: str | Path) -> Path:
|
||||
"""Returns absolute path of a str or Path path, but does not resolve symlinks."""
|
||||
path = Path(path)
|
||||
|
||||
if path.is_absolute():
|
||||
return path
|
||||
|
||||
# recursively check if there is a symlink within the path
|
||||
is_symlink = path.is_symlink() or any(
|
||||
parent.is_symlink() for parent in path.parents
|
||||
)
|
||||
|
||||
if is_symlink or path == path.resolve(): # in case path couldn't be resolved
|
||||
return Path.cwd() / path
|
||||
else:
|
||||
return path.resolve()
|
||||
return Path(os.path.abspath(str(path)))
|
||||
|
||||
|
||||
def is_in_or_equal(path_1: str | Path, path_2: str | Path):
|
||||
def is_in_or_equal(path_1: str | Path, path_2: str | Path) -> bool:
|
||||
"""
|
||||
True if path_1 is a descendant (i.e. located within) path_2 or if the paths are the
|
||||
same, returns False otherwise.
|
||||
@ -1087,7 +1080,6 @@ def is_in_or_equal(path_1: str | Path, path_2: str | Path):
|
||||
return ".." not in str(relative_path)
|
||||
except ValueError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
@document()
|
||||
@ -1463,3 +1455,23 @@ class UnhashableKeyDict(MutableMapping):
|
||||
|
||||
def as_list(self):
|
||||
return [v for _, v in self.data]
|
||||
|
||||
|
||||
def safe_join(directory: DeveloperPath, path: UserProvidedPath) -> str:
|
||||
"""Safely path to a base directory to avoid escaping the base directory.
|
||||
Borrowed from: werkzeug.security.safe_join"""
|
||||
_os_alt_seps: List[str] = [
|
||||
sep for sep in [os.path.sep, os.path.altsep] if sep is not None and sep != "/"
|
||||
]
|
||||
|
||||
filename = posixpath.normpath(path)
|
||||
fullpath = os.path.join(directory, filename)
|
||||
if (
|
||||
any(sep in filename for sep in _os_alt_seps)
|
||||
or os.path.isabs(filename)
|
||||
or filename == ".."
|
||||
or filename.startswith("../")
|
||||
):
|
||||
raise InvalidPathError()
|
||||
|
||||
return fullpath
|
||||
|
@ -1,7 +1,10 @@
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
import gradio as gr
|
||||
from gradio.components.file_explorer import FileExplorerData
|
||||
from gradio.exceptions import InvalidPathError
|
||||
|
||||
|
||||
class TestFileExplorer:
|
||||
@ -61,3 +64,9 @@ class TestFileExplorer:
|
||||
{"name": "file2.txt", "type": "file", "valid": True},
|
||||
]
|
||||
assert tree == answer
|
||||
|
||||
def test_file_explorer_prevents_path_traversal(self, tmpdir):
|
||||
file_explorer = gr.FileExplorer(glob="*.txt", root_dir=Path(tmpdir))
|
||||
|
||||
with pytest.raises(InvalidPathError):
|
||||
file_explorer.preprocess(FileExplorerData(root=[["../file.txt"]]))
|
||||
|
@ -20,7 +20,7 @@ appnope==0.1.4
|
||||
# via ipython
|
||||
asyncio==3.4.3
|
||||
# via -r requirements.in
|
||||
attrs==21.4.0
|
||||
attrs==23.1.0
|
||||
# via
|
||||
# jsonschema
|
||||
# pytest
|
||||
@ -106,6 +106,7 @@ huggingface-hub==0.21.4
|
||||
# gradio-client
|
||||
# tokenizers
|
||||
# transformers
|
||||
hypothesis==6.108.9
|
||||
idna==3.3
|
||||
# via
|
||||
# anyio
|
||||
|
@ -5,6 +5,7 @@ import os
|
||||
import tempfile
|
||||
import time
|
||||
from contextlib import asynccontextmanager, closing
|
||||
from pathlib import Path
|
||||
from typing import Dict
|
||||
from unittest.mock import patch
|
||||
|
||||
@ -1364,3 +1365,45 @@ def test_docs_url():
|
||||
assert r.status_code == 200
|
||||
finally:
|
||||
demo.close()
|
||||
|
||||
|
||||
def test_file_access():
|
||||
with gr.Blocks() as demo:
|
||||
gr.Markdown("Test")
|
||||
|
||||
allowed_dir = (Path(tempfile.gettempdir()) / "test_file_access_dir").resolve()
|
||||
allowed_dir.mkdir(parents=True, exist_ok=True)
|
||||
allowed_file = Path(allowed_dir / "allowed.txt")
|
||||
allowed_file.touch()
|
||||
|
||||
not_allowed_file = Path(tempfile.gettempdir()) / "not_allowed.txt"
|
||||
not_allowed_file.touch()
|
||||
|
||||
app, _, _ = demo.launch(
|
||||
prevent_thread_lock=True,
|
||||
blocked_paths=["test/test_files"],
|
||||
allowed_paths=[str(allowed_dir)],
|
||||
)
|
||||
test_client = TestClient(app)
|
||||
try:
|
||||
with test_client:
|
||||
r = test_client.get(f"/file={allowed_dir}/allowed.txt")
|
||||
assert r.status_code == 200
|
||||
r = test_client.get(f"/file={allowed_dir}/../not_allowed.txt")
|
||||
assert r.status_code == 403
|
||||
r = test_client.get("/file=//test/test_files/cheetah1.jpg")
|
||||
assert r.status_code == 403
|
||||
r = test_client.get("/file=test/test_files/cheetah1.jpg")
|
||||
assert r.status_code == 403
|
||||
r = test_client.get("/file=//test/test_files/cheetah1.jpg")
|
||||
assert r.status_code == 403
|
||||
tmp = Path(tempfile.gettempdir()) / "upload_test.txt"
|
||||
tmp.write_text("Hello")
|
||||
with open(str(tmp), "rb") as f:
|
||||
files = {"files": ("..", f)}
|
||||
response = test_client.post("/upload", files=files)
|
||||
assert response.status_code == 400
|
||||
finally:
|
||||
demo.close()
|
||||
not_allowed_file.unlink()
|
||||
allowed_file.unlink()
|
||||
|
@ -9,6 +9,8 @@ from unittest.mock import MagicMock, patch
|
||||
|
||||
import numpy as np
|
||||
import pytest
|
||||
from hypothesis import given, settings
|
||||
from hypothesis import strategies as st
|
||||
from typing_extensions import Literal
|
||||
|
||||
from gradio import EventData, Request
|
||||
@ -369,6 +371,69 @@ def test_is_in_or_equal():
|
||||
assert not is_in_or_equal("/safe_dir/subdir/../../unsafe_file.txt", "/safe_dir/")
|
||||
|
||||
|
||||
def create_path_string():
|
||||
return st.lists(
|
||||
st.one_of(
|
||||
st.text(
|
||||
alphabet="abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-",
|
||||
min_size=1,
|
||||
),
|
||||
st.just(".."),
|
||||
st.just("."),
|
||||
),
|
||||
min_size=1,
|
||||
max_size=10, # Limit depth to avoid excessively long paths
|
||||
).map(lambda x: os.path.join(*x))
|
||||
|
||||
|
||||
def my_check(path_1, path_2):
|
||||
try:
|
||||
path_1 = Path(path_1).resolve()
|
||||
path_2 = Path(path_2).resolve()
|
||||
_ = path_1.relative_to(path_2)
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
||||
@settings(derandomize=os.getenv("CI") is not None)
|
||||
@given(
|
||||
path_1=create_path_string(),
|
||||
path_2=create_path_string(),
|
||||
)
|
||||
def test_is_in_or_equal_fuzzer(path_1, path_2):
|
||||
try:
|
||||
# Convert to absolute paths
|
||||
abs_path_1 = abspath(path_1)
|
||||
abs_path_2 = abspath(path_2)
|
||||
result = is_in_or_equal(abs_path_1, abs_path_2)
|
||||
assert result == my_check(abs_path_1, abs_path_2)
|
||||
|
||||
except Exception as e:
|
||||
pytest.fail(f"Exception raised: {e}")
|
||||
|
||||
|
||||
# Additional test for known edge cases
|
||||
@pytest.mark.parametrize(
|
||||
"path_1,path_2,expected",
|
||||
[
|
||||
("/AAA/a/../a", "/AAA", True),
|
||||
("//AA/a", "/tmp", False),
|
||||
("/AAA/..", "/AAA", False),
|
||||
("/a/b/c", "/d/e/f", False),
|
||||
(".", "..", True),
|
||||
("..", ".", False),
|
||||
("/a/b/./c", "/a/b", True),
|
||||
("/a/b/../c", "/a", True),
|
||||
("/a/b/c", "/a/b/c/../d", False),
|
||||
("/", "/a", False),
|
||||
("/a", "/", True),
|
||||
],
|
||||
)
|
||||
def test_is_in_or_equal_edge_cases(path_1, path_2, expected):
|
||||
assert is_in_or_equal(path_1, path_2) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"path_or_url, extension",
|
||||
[
|
||||
|
@ -25,7 +25,7 @@ We check to see which source files have changed and run the necessary checks. A
|
||||
|
||||
- **Python checks** - whenever Python source, dependencies or config change.
|
||||
- **Javascript checks** - whenever JavaScript source, dependencies or config change.
|
||||
- **functional and visual checks** - whenever any sopurce of config changes (most of the time).
|
||||
- **functional and visual checks** - whenever any source of config changes (most of the time).
|
||||
- **repo hygiene checks** - always.
|
||||
|
||||
Checks almost always run when the CI config has changed.
|
||||
@ -40,21 +40,22 @@ All tests have a name of something like `test-<type>-<os>-<stability-level>`. `o
|
||||
|
||||
This is a simple breakdown of our current quality checks:
|
||||
|
||||
| Language | Check | operating system | Workflow file | Notes |
|
||||
| ---------- | --------------- | ---------------- | ------------------------ | -------------------------------------------- |
|
||||
| Python | Linting | linux | `test-python.yml` | |
|
||||
| Python | Formatting | linux | `test-python.yml` | |
|
||||
| Python | Type-checking | linux | `test-python.yml` | |
|
||||
| Python | Unit tests | linux | `test-python.yml` | |
|
||||
| Python | Unit tests | windows | `test-python.yml` | |
|
||||
| JavaScript | Linting | linux | `test-js.yml` | |
|
||||
| JavaScript | Formatting | linux | `test-js.yml` | |
|
||||
| JavaScript | Type-checking | linux | `test-js.yml` | |
|
||||
| JavaScript | Unit tests | linux | `test-js.yml` | |
|
||||
| n/a | Functional | linux | `test-functional/yml` | |
|
||||
| n/a | Visual | linux | `deploy+test-visual/yml` | |
|
||||
| n/a | Large files | linux | `test-hygiene.yml` | Checks that all files are below 5 MB |
|
||||
| n/a | Notebooks match | linux | `test-hygiene.yml` | Ensures that notebooks and demos are in sync |
|
||||
| Language | Check | operating system | Workflow file | Notes |
|
||||
| ---------- | --------------- | ---------------- | -------------------------- | -------------------------------------------- |
|
||||
| Python | Linting | linux | `test-python.yml` | |
|
||||
| Python | Formatting | linux | `test-python.yml` | |
|
||||
| Python | Type-checking | linux | `test-python.yml` | |
|
||||
| Python | Unit tests | linux | `test-python.yml` | |
|
||||
| Python | Unit tests | windows | `test-python.yml` | |
|
||||
| JavaScript | Linting | linux | `test-js.yml` | |
|
||||
| JavaScript | Formatting | linux | `test-js.yml` | |
|
||||
| JavaScript | Type-checking | linux | `test-js.yml` | |
|
||||
| JavaScript | Unit tests | linux | `test-js.yml` | |
|
||||
| n/a | Functional | linux | `test-functional.yml` | |
|
||||
| n/a | Functional Lite | linux | `test-functional-lite.yml` | |
|
||||
| n/a | Visual | linux | `deploy+test-visual/yml` | |
|
||||
| n/a | Large files | linux | `test-hygiene.yml` | Checks that all files are below 5 MB |
|
||||
| n/a | Notebooks match | linux | `test-hygiene.yml` | Ensures that notebooks and demos are in sync |
|
||||
|
||||
One important thing to note is that we split 'flaky' and 'non-flaky' Python unit/integration tests out.
|
||||
These tests are flaky because of network requests that they make. They are typically fine, but anything that can cause a red check in PRs makes us less trustworthy of our CI and confidence is the goal!
|
||||
@ -369,7 +370,11 @@ For the reasons described above, we chose to use `workflow_run` _heavily_ for th
|
||||
- This event runs in the context of main, it doesn't offer any of the conveniences that `push` and `pull_request` events give you, it knows very very little about the workflow run even that triggered it. It _does not_ inherit the triggering workflow's context. This is a huge problem.
|
||||
- This workflow kind of runs in the void. It is run in the context of the default branch and so maintains references to that branch, however, it isn't really 'attached' to a commit or ref in any meaningful way and the status of the run (the 'check') is not added to any commits anywhere.
|
||||
|
||||
Both of these problems were eventually solve by using the GitHub API in combination with the information we get from the workflow event's context. Getting the commit reference of the pull request that triggered the workflow is the main challenge, when we have that, creating statuses on commits is trivial.
|
||||
Both of these problems were eventually solved by using the GitHub API in combination with the information we get from the workflow event's context. Getting the commit reference of the pull request that triggered the workflow is the main challenge, when we have that, creating statuses on commits is trivial.
|
||||
|
||||
In addition to this we actually have a fresh security problem when we start running workflows in the context of the default branch. These kinds of runs are 'privileged' and have full access to all secrets, while we have never intentionally expossed any screts to user code, it is possible using some rather esoteric approaches to get them. With this in mind we have to be careful that we do not running user code in these privileged workflows.
|
||||
|
||||
Examples of user code can obviously be scripts that live in the contributed branch that we directly call , but also anythinbg that can run some kind of hook or executes code indirectly. For example, the vite config that is used to build the frontend will execute any code in the `vite.config.js` file upon importing it. Python builds can execute various build hooks or plugins, package install can run pre or postinstall hooks, and so on. There are many examples of this.
|
||||
|
||||
##### What branch am I even in?
|
||||
|
||||
@ -430,3 +435,9 @@ To solve this particular problem we _always_ trigger our workflows but don't alw
|
||||
- If it does run, the workflow does its thing and then updates the commit status to `success` or `failure` depending on the outcome.
|
||||
|
||||
We use a composite action to colocate the change detection logic and reuse that across workflows. We use a custom JavaScript action to create the commit statuses, again for easier reuse.
|
||||
|
||||
##### A note on security
|
||||
|
||||
We have a few security concerns as mentioned above. The solution to this, for the most part, is to only checkout and run user code in unprivileged workflows. Practically speaking this means that we should only run user code in workflows that are triggered by a `pull_request` event. For certain tasks we actually need to build a users code in order to do something privileged, so we build in `pull_request` and save the artifacts which are later reused in the `workflow_run` workflow. In these workflows we do not checkout any code at all in most cases, we only checkout the artifacts we saved in the `pull_request` workflow. The one exception to this is the visual tests which require the git history in order to correctly figure out what has changed.
|
||||
|
||||
As a further hardening step, all repository secrets are created inside a github environment and the default `GITHUB_TOKEN` is set to read-only permissions. This means that any workflow that requires secrets to run has to opt into them by setting the correct environment. This achieves two things, making a job pirivileged becomes an intentional step rather than a default, and workflows only have access to secrets that they need to run, minimising damage if one workflow becomes vulnerable.
|
Loading…
Reference in New Issue
Block a user