Compare commits
	
		
			83 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | 59dd6814f8 | ||
|   | f7abf3db1b | ||
|   | cf1d2148ac | ||
|   | b5f2bd9b0e | ||
|   | ba2670e99c | ||
|   | 6ffc4d9756 | ||
|   | 595d2c76ac | ||
|   | d9796e9b1e | ||
|   | 404c5f9f9e | ||
|   | a937e08ef0 | ||
|   | ef4f058a6c | ||
|   | 69c5dde9bf | ||
|   | 945885d501 | ||
|   | 9d0b54c90d | ||
|   | 2e5c288fea | ||
|   | f32ef20b74 | ||
|   | e2eefaac55 | ||
|   | e1cfbf0fd9 | ||
|   | 08c5689441 | ||
|   | 8dbda247d6 | ||
|   | 71a631237d | ||
|   | e22ff3828b | ||
|   | b1b12e004e | ||
|   | 0ba57d4701 | ||
|   | 54ca6a6178 | ||
|   | 7dd4a78cf2 | ||
|   | 52ff49512a | ||
|   | 5a48b94089 | ||
|   | ba1c73d947 | ||
|   | 4732b6bdfa | ||
|   | a6e78b70ab | ||
|   | bb1174afc5 | ||
|   | df8abe9cfd | ||
|   | c3bca97ee1 | ||
|   | c3b6fa1bba | ||
|   | 94d496afe1 | ||
|   | 7b7a572f9b | ||
|   | 1b8cb742f9 | ||
|   | 3492d180a8 | ||
|   | 021da38373 | ||
|   | ac784759d5 | ||
|   | 36eda2cd62 | ||
|   | 08a4b3013f | ||
|   | 1dd0332e8b | ||
|   | a90877ac31 | ||
|   | 8b7ea27a48 | ||
|   | 8df80e276b | ||
|   | 30572c972d | ||
|   | 53da4dd091 | ||
|   | 108a4a99c7 | ||
|   | 7c180376d6 | ||
|   | f39b8b32f7 | ||
|   | c543d19f8a | ||
|   | 80fca9aef7 | ||
|   | 5bb9aa0c2c | ||
|   | 83c746ee57 | ||
|   | aff6604636 | ||
|   | 2c80571a8a | ||
|   | d964b552af | ||
|   | 48f8b37b74 | ||
|   | 141be0028d | ||
|   | a140c47195 | ||
|   | 0c3a8392f2 | ||
|   | 16875b1f41 | ||
|   | b1f31f2eeb | ||
|   | d16b9e5a02 | ||
|   | 680484bdc8 | ||
|   | 05cd44b5dd | ||
|   | ba374139f4 | ||
|   | 72a745bfd5 | ||
|   | 3a6fac7d59 | ||
|   | 28ba8e53df | ||
|   | 9b26358e63 | ||
|   | e21521f45c | ||
|   | 30479765cb | ||
|   | 53a571ec6c | ||
|   | ad97cac313 | ||
|   | 1a352ddf55 | ||
|   | 5ba43decf2 | ||
|   | 8f06d035cb | ||
|   | b716f48c84 | ||
|   | 42b1e7143e | ||
|   | eba7821a6d | 
							
								
								
									
										24
									
								
								.codeclimate.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										24
									
								
								.codeclimate.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,24 @@ | ||||
| exclude_patterns: | ||||
|   - "sanic/__main__.py" | ||||
|   - "sanic/reloader_helpers.py" | ||||
|   - "sanic/simple.py" | ||||
|   - "sanic/utils.py" | ||||
|   - ".github/" | ||||
|   - "changelogs/" | ||||
|   - "docker/" | ||||
|   - "docs/" | ||||
|   - "examples/" | ||||
|   - "hack/" | ||||
|   - "scripts/" | ||||
|   - "tests/" | ||||
| checks: | ||||
|   argument-count: | ||||
|     enabled: false | ||||
|   file-lines: | ||||
|     config: | ||||
|       threshold: 1000 | ||||
|   method-count: | ||||
|     config: | ||||
|       threshold: 40 | ||||
|   complex-logic: | ||||
|     enabled: false | ||||
| @@ -1,7 +1,12 @@ | ||||
| [run] | ||||
| branch = True | ||||
| source = sanic | ||||
| omit = site-packages, sanic/utils.py, sanic/__main__.py | ||||
| omit = | ||||
|     site-packages | ||||
|     sanic/__main__.py | ||||
|     sanic/reloader_helpers.py | ||||
|     sanic/simple.py | ||||
|     sanic/utils.py | ||||
|  | ||||
| [html] | ||||
| directory = coverage | ||||
|   | ||||
							
								
								
									
										37
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										37
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,22 +1,10 @@ | ||||
| # For most projects, this workflow file will not need changing; you simply need | ||||
| # to commit it to your repository. | ||||
| # | ||||
| # You may wish to alter this file to override the set of languages analyzed, | ||||
| # or to provide custom queries or build logic. | ||||
| # | ||||
| # ******** NOTE ******** | ||||
| # We have attempted to detect the languages in your repository. Please check | ||||
| # the `language` matrix defined below to confirm you have the correct set of | ||||
| # supported CodeQL languages. | ||||
| # | ||||
| name: "CodeQL" | ||||
|  | ||||
| on: | ||||
|   push: | ||||
|     branches: [ master ] | ||||
|     branches: [ main ] | ||||
|   pull_request: | ||||
|     # The branches below must be a subset of the branches above | ||||
|     branches: [ master ] | ||||
|     branches: [ main ] | ||||
|   schedule: | ||||
|     - cron: '25 16 * * 0' | ||||
|  | ||||
| @@ -29,39 +17,18 @@ jobs: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         language: [ 'python' ] | ||||
|         # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] | ||||
|         # Learn more: | ||||
|         # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed | ||||
|  | ||||
|     steps: | ||||
|     - name: Checkout repository | ||||
|       uses: actions/checkout@v2 | ||||
|  | ||||
|     # Initializes the CodeQL tools for scanning. | ||||
|     - name: Initialize CodeQL | ||||
|       uses: github/codeql-action/init@v1 | ||||
|       with: | ||||
|         languages: ${{ matrix.language }} | ||||
|         # If you wish to specify custom queries, you can do so here or in a config file. | ||||
|         # By default, queries listed here will override any specified in a config file. | ||||
|         # Prefix the list here with "+" to use these queries and those in the config file. | ||||
|         # queries: ./path/to/local/query, your-org/your-repo/queries@main | ||||
|  | ||||
|     # Autobuild attempts to build any compiled languages  (C/C++, C#, or Java). | ||||
|     # If this step fails, then you should remove it and run the build manually (see below) | ||||
|     - name: Autobuild | ||||
|       uses: github/codeql-action/autobuild@v1 | ||||
|  | ||||
|     # ℹ️ Command-line programs to run using the OS shell. | ||||
|     # 📚 https://git.io/JvXDl | ||||
|  | ||||
|     # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines | ||||
|     #    and modify them (or add more) to build your code if your project | ||||
|     #    uses a compiled language | ||||
|  | ||||
|     #- run: | | ||||
|     #   make bootstrap | ||||
|     #   make release | ||||
|  | ||||
|     - name: Perform CodeQL Analysis | ||||
|       uses: github/codeql-action/analyze@v1 | ||||
|   | ||||
							
								
								
									
										40
									
								
								.github/workflows/coverage.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										40
									
								
								.github/workflows/coverage.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,40 @@ | ||||
| name: Coverage check | ||||
| # on: | ||||
| #   push: | ||||
| #     branches: | ||||
| #       - main | ||||
| #     tags: | ||||
| #       - "!*" # Do not execute on tags | ||||
| #     paths: | ||||
| #       - sanic/* | ||||
| #       - tests/* | ||||
| #   pull_request: | ||||
| #     paths: | ||||
| #       - "!*.MD" | ||||
| on: [push, pull_request] | ||||
| jobs: | ||||
|   test: | ||||
|     runs-on: ${{ matrix.os }} | ||||
|     strategy: | ||||
|       matrix: | ||||
|         python-version: [3.9] | ||||
|         os: [ubuntu-latest] | ||||
|       fail-fast: false | ||||
|  | ||||
|     steps: | ||||
|       - uses: actions/checkout@v2 | ||||
|  | ||||
|       - uses: actions/setup-python@v1 | ||||
|         with: | ||||
|           python-version: ${{ matrix.python-version }} | ||||
|  | ||||
|       - name: Install dependencies 🔨 | ||||
|         run: | | ||||
|           python -m pip install --upgrade pip | ||||
|           pip install tox | ||||
|       - uses: paambaati/codeclimate-action@v2.5.3 | ||||
|         if: always() | ||||
|         env: | ||||
|           CC_TEST_REPORTER_ID: ${{ secrets.CODECLIMATE }} | ||||
|         with: | ||||
|           coverageCommand: tox -e coverage | ||||
							
								
								
									
										39
									
								
								.github/workflows/on-demand.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										39
									
								
								.github/workflows/on-demand.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,39 @@ | ||||
| name: On Demand Task | ||||
| on: | ||||
|   workflow_dispatch: | ||||
|     inputs: | ||||
|       python-version: | ||||
|         description: 'Version of Python to use for running Test' | ||||
|         required: false | ||||
|         default: "3.8" | ||||
|       tox-env: | ||||
|         description: 'Test Environment to Run' | ||||
|         required: true | ||||
|         default: '' | ||||
|       os: | ||||
|         description: 'Operating System to Run Test on' | ||||
|         required: false | ||||
|         default: ubuntu-latest | ||||
| jobs: | ||||
|   onDemand: | ||||
|     name: tox-${{ matrix.config.tox-env }}-on-${{ matrix.os }} | ||||
|     runs-on: ${{ matrix.os }} | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         os: ["${{ github.event.inputs.os}}"] | ||||
|         config: | ||||
|           - { tox-env: "${{ github.event.inputs.tox-env }}", py-version: "${{ github.event.inputs.python-version }}"} | ||||
|     steps: | ||||
|       - name: Checkout Repository | ||||
|         uses: actions/checkout@v2 | ||||
|  | ||||
|       - name: Run tests | ||||
|         uses: harshanarayana/custom-actions@main | ||||
|         with: | ||||
|           python-version: ${{ matrix.config.py-version }} | ||||
|           test-infra-tool: tox | ||||
|           test-infra-version: latest | ||||
|           action: tests | ||||
|           test-additional-args: "-e=${{ matrix.config.tox-env }}" | ||||
|           experimental-ignore-error: "yes" | ||||
							
								
								
									
										32
									
								
								.github/workflows/pr-bandit.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								.github/workflows/pr-bandit.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,32 @@ | ||||
| name: Security Analysis | ||||
| on: | ||||
|   pull_request: | ||||
|     branches: | ||||
|       - main | ||||
|  | ||||
| jobs: | ||||
|   bandit: | ||||
|     name: type-check-${{ matrix.config.python-version }} | ||||
|     runs-on: ${{ matrix.os }} | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         os: [ubuntu-latest] | ||||
|         config: | ||||
|           - { python-version: 3.7, tox-env: security} | ||||
|           - { python-version: 3.8, tox-env: security} | ||||
|           - { python-version: 3.9, tox-env: security} | ||||
|     steps: | ||||
|       - name: Checkout the repository | ||||
|         uses: actions/checkout@v2 | ||||
|         id: checkout-branch | ||||
|  | ||||
|       - name: Run Linter Checks | ||||
|         id: linter-check | ||||
|         uses: harshanarayana/custom-actions@main | ||||
|         with: | ||||
|           python-version: ${{ matrix.config.python-version }} | ||||
|           test-infra-tool: tox | ||||
|           test-infra-version: latest | ||||
|           action: tests | ||||
|           test-additional-args: "-e=${{ matrix.config.tox-env }}" | ||||
							
								
								
									
										29
									
								
								.github/workflows/pr-docs.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										29
									
								
								.github/workflows/pr-docs.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,29 @@ | ||||
| name: Document Linter | ||||
| on: | ||||
|   pull_request: | ||||
|     branches: | ||||
|       - main | ||||
|  | ||||
| jobs: | ||||
|   docsLinter: | ||||
|     name: Lint Documentation | ||||
|     runs-on: ubuntu-latest | ||||
|     strategy: | ||||
|       matrix: | ||||
|         config: | ||||
|           - {python-version: "3.8", tox-env: "docs"} | ||||
|       fail-fast: false | ||||
|  | ||||
|  | ||||
|     steps: | ||||
|       - name: Checkout repository | ||||
|         uses: actions/checkout@v2 | ||||
|  | ||||
|       - name: Run Document Linter | ||||
|         uses: harshanarayana/custom-actions@main | ||||
|         with: | ||||
|           python-version: ${{ matrix.config.python-version }} | ||||
|           test-infra-tool: tox | ||||
|           test-infra-version: latest | ||||
|           action: tests | ||||
|           test-additional-args: "-e=${{ matrix.config.tox-env }}" | ||||
							
								
								
									
										30
									
								
								.github/workflows/pr-linter.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										30
									
								
								.github/workflows/pr-linter.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,30 @@ | ||||
| name: Linter Checks | ||||
| on: | ||||
|   pull_request: | ||||
|     branches: | ||||
|       - main | ||||
|  | ||||
| jobs: | ||||
|   linter: | ||||
|     name: lint | ||||
|     runs-on: ${{ matrix.os }} | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         os: [ubuntu-latest] | ||||
|         config: | ||||
|           - { python-version: 3.8, tox-env: lint} | ||||
|     steps: | ||||
|       - name: Checkout the repository | ||||
|         uses: actions/checkout@v2 | ||||
|         id: checkout-branch | ||||
|  | ||||
|       - name: Run Linter Checks | ||||
|         id: linter-check | ||||
|         uses: harshanarayana/custom-actions@main | ||||
|         with: | ||||
|           python-version: ${{ matrix.config.python-version }} | ||||
|           test-infra-tool: tox | ||||
|           test-infra-version: latest | ||||
|           action: tests | ||||
|           test-additional-args: "-e=${{ matrix.config.tox-env }}" | ||||
							
								
								
									
										41
									
								
								.github/workflows/pr-python-pypy.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										41
									
								
								.github/workflows/pr-python-pypy.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,41 @@ | ||||
| name: Python PyPy Tests | ||||
| on: | ||||
|   workflow_dispatch: | ||||
|     inputs: | ||||
|       tox-env: | ||||
|         description: "Tox Env to run on the PyPy Infra" | ||||
|         required: false | ||||
|         default: "pypy37" | ||||
|       pypy-version: | ||||
|         description: "Version of PyPy to use" | ||||
|         required: false | ||||
|         default: "pypy-3.7" | ||||
| jobs: | ||||
|   testPyPy: | ||||
|     name: ut-${{ matrix.config.tox-env }}-${{ matrix.os }} | ||||
|     runs-on: ${{ matrix.os }} | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         # os: [ubuntu-latest, macos-latest] | ||||
|         os: [ubuntu-latest] | ||||
|         config: | ||||
|           - { | ||||
|               python-version: "${{ github.event.inputs.pypy-version }}", | ||||
|               tox-env: "${{ github.event.inputs.tox-env }}", | ||||
|             } | ||||
|     steps: | ||||
|       - name: Checkout the Repository | ||||
|         uses: actions/checkout@v2 | ||||
|         id: checkout-branch | ||||
|  | ||||
|       - name: Run Unit Tests | ||||
|         uses: harshanarayana/custom-actions@main | ||||
|         with: | ||||
|           python-version: ${{ matrix.config.python-version }} | ||||
|           test-infra-tool: tox | ||||
|           test-infra-version: latest | ||||
|           action: tests | ||||
|           test-additional-args: "-e=${{ matrix.config.tox-env }}" | ||||
|           experimental-ignore-error: "true" | ||||
|           command-timeout: "600000" | ||||
							
								
								
									
										38
									
								
								.github/workflows/pr-python37.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										38
									
								
								.github/workflows/pr-python37.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,38 @@ | ||||
| name: Python 3.7 Tests | ||||
| on: | ||||
|   pull_request: | ||||
|     branches: | ||||
|       - main | ||||
|   push: | ||||
|     branches: | ||||
|       - main | ||||
|     paths: | ||||
|       - sanic/* | ||||
|       - tests/* | ||||
|  | ||||
| jobs: | ||||
|   testPy37: | ||||
|     name: ut-${{ matrix.config.tox-env }}-${{ matrix.os }} | ||||
|     runs-on: ${{ matrix.os }} | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         #         os: [ubuntu-latest, macos-latest] | ||||
|         os: [ubuntu-latest] | ||||
|         config: | ||||
|           - { python-version: 3.7, tox-env: py37 } | ||||
|           - { python-version: 3.7, tox-env: py37-no-ext } | ||||
|     steps: | ||||
|       - name: Checkout the Repository | ||||
|         uses: actions/checkout@v2 | ||||
|         id: checkout-branch | ||||
|  | ||||
|       - name: Run Unit Tests | ||||
|         uses: harshanarayana/custom-actions@main | ||||
|         with: | ||||
|           python-version: ${{ matrix.config.python-version }} | ||||
|           test-infra-tool: tox | ||||
|           test-infra-version: latest | ||||
|           action: tests | ||||
|           test-additional-args: "-e=${{ matrix.config.tox-env }}" | ||||
|           test-failure-retry: "3" | ||||
							
								
								
									
										38
									
								
								.github/workflows/pr-python38.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										38
									
								
								.github/workflows/pr-python38.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,38 @@ | ||||
| name: Python 3.8 Tests | ||||
| on: | ||||
|   pull_request: | ||||
|     branches: | ||||
|       - main | ||||
|   push: | ||||
|     branches: | ||||
|       - main | ||||
|     paths: | ||||
|       - sanic/* | ||||
|       - tests/* | ||||
|  | ||||
| jobs: | ||||
|   testPy38: | ||||
|     name: ut-${{ matrix.config.tox-env }}-${{ matrix.os }} | ||||
|     runs-on: ${{ matrix.os }} | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         # os: [ubuntu-latest, macos-latest] | ||||
|         os: [ubuntu-latest] | ||||
|         config: | ||||
|           - { python-version: 3.8, tox-env: py38 } | ||||
|           - { python-version: 3.8, tox-env: py38-no-ext } | ||||
|     steps: | ||||
|       - name: Checkout the Repository | ||||
|         uses: actions/checkout@v2 | ||||
|         id: checkout-branch | ||||
|  | ||||
|       - name: Run Unit Tests | ||||
|         uses: harshanarayana/custom-actions@main | ||||
|         with: | ||||
|           python-version: ${{ matrix.config.python-version }} | ||||
|           test-infra-tool: tox | ||||
|           test-infra-version: latest | ||||
|           action: tests | ||||
|           test-additional-args: "-e=${{ matrix.config.tox-env }}" | ||||
|           test-failure-retry: "3" | ||||
							
								
								
									
										50
									
								
								.github/workflows/pr-python39.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										50
									
								
								.github/workflows/pr-python39.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,50 @@ | ||||
| name: Python 3.9 Tests | ||||
| on: | ||||
|   pull_request: | ||||
|     branches: | ||||
|       - main | ||||
|   push: | ||||
|     branches: | ||||
|       - main | ||||
|     paths: | ||||
|       - sanic/* | ||||
|       - tests/* | ||||
|  | ||||
| jobs: | ||||
|   testPy39: | ||||
|     name: ut-${{ matrix.config.tox-env }}-${{ matrix.os }} | ||||
|     runs-on: ${{ matrix.os }} | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         # os: [ubuntu-latest, macos-latest] | ||||
|         os: [ubuntu-latest] | ||||
|         config: | ||||
|           - { | ||||
|               python-version: 3.9, | ||||
|               tox-env: py39, | ||||
|               ignore-error-flake: "false", | ||||
|               command-timeout: "0", | ||||
|             } | ||||
|           - { | ||||
|               python-version: 3.9, | ||||
|               tox-env: py39-no-ext, | ||||
|               ignore-error-flake: "true", | ||||
|               command-timeout: "600000", | ||||
|             } | ||||
|     steps: | ||||
|       - name: Checkout the Repository | ||||
|         uses: actions/checkout@v2 | ||||
|         id: checkout-branch | ||||
|  | ||||
|       - name: Run Unit Tests | ||||
|         uses: harshanarayana/custom-actions@main | ||||
|         with: | ||||
|           python-version: ${{ matrix.config.python-version }} | ||||
|           test-infra-tool: tox | ||||
|           test-infra-version: latest | ||||
|           action: tests | ||||
|           test-additional-args: "-e=${{ matrix.config.tox-env }},-vv=''" | ||||
|           experimental-ignore-error: "${{ matrix.config.ignore-error-flake }}" | ||||
|           command-timeout: "${{ matrix.config.command-timeout }}" | ||||
|           test-failure-retry: "3" | ||||
							
								
								
									
										32
									
								
								.github/workflows/pr-type-check.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								.github/workflows/pr-type-check.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,32 @@ | ||||
| name: Typing Checks | ||||
| on: | ||||
|   pull_request: | ||||
|     branches: | ||||
|       - main | ||||
|  | ||||
| jobs: | ||||
|   typeChecking: | ||||
|     name: type-check-${{ matrix.config.python-version }} | ||||
|     runs-on: ${{ matrix.os }} | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         os: [ubuntu-latest] | ||||
|         config: | ||||
|           - { python-version: 3.7, tox-env: type-checking} | ||||
|           - { python-version: 3.8, tox-env: type-checking} | ||||
|           - { python-version: 3.9, tox-env: type-checking} | ||||
|     steps: | ||||
|       - name: Checkout the repository | ||||
|         uses: actions/checkout@v2 | ||||
|         id: checkout-branch | ||||
|  | ||||
|       - name: Run Linter Checks | ||||
|         id: linter-check | ||||
|         uses: harshanarayana/custom-actions@main | ||||
|         with: | ||||
|           python-version: ${{ matrix.config.python-version }} | ||||
|           test-infra-tool: tox | ||||
|           test-infra-version: latest | ||||
|           action: tests | ||||
|           test-additional-args: "-e=${{ matrix.config.tox-env }}" | ||||
							
								
								
									
										34
									
								
								.github/workflows/pr-windows.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										34
									
								
								.github/workflows/pr-windows.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,34 @@ | ||||
| name: Run Unit Tests on Windows | ||||
| on: | ||||
|   pull_request: | ||||
|     branches: | ||||
|       - main | ||||
|  | ||||
| jobs: | ||||
|   testsOnWindows: | ||||
|     name: ut-${{ matrix.config.tox-env }} | ||||
|     runs-on: windows-latest | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         config: | ||||
|           - { python-version: 3.7, tox-env: py37-no-ext } | ||||
|           - { python-version: 3.8, tox-env: py38-no-ext } | ||||
|           - { python-version: 3.9, tox-env: py39-no-ext } | ||||
|           - { python-version: pypy-3.7, tox-env: pypy37-no-ext } | ||||
|  | ||||
|     steps: | ||||
|       - name: Checkout Repository | ||||
|         uses: actions/checkout@v2 | ||||
|  | ||||
|       - name: Run Unit Tests | ||||
|         uses: ahopkins/custom-actions@pip-extra-args | ||||
|         with: | ||||
|           python-version: ${{ matrix.config.python-version }} | ||||
|           test-infra-tool: tox | ||||
|           test-infra-version: latest | ||||
|           action: tests | ||||
|           test-additional-args: "-e=${{ matrix.config.tox-env }}" | ||||
|           experimental-ignore-error: "true" | ||||
|           command-timeout: "600000" | ||||
|           pip-extra-args: "--user" | ||||
							
								
								
									
										48
									
								
								.github/workflows/publish-images.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										48
									
								
								.github/workflows/publish-images.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,48 @@ | ||||
| name: Publish Docker Images | ||||
| on: | ||||
|   workflow_run: | ||||
|     workflows: | ||||
|       - 'Publish Artifacts' | ||||
|     types: | ||||
|       - completed | ||||
|  | ||||
| jobs: | ||||
|   publishDockerImages: | ||||
|     name: Docker Image Build [${{ matrix.python-version }}] | ||||
|     runs-on: ubuntu-latest | ||||
|  | ||||
|     strategy: | ||||
|       fail-fast: true | ||||
|       matrix: | ||||
|         python-version: ["3.7", "3.8", "3.9"] | ||||
|  | ||||
|     steps: | ||||
|       - name: Checkout repository | ||||
|         uses: actions/checkout@v2 | ||||
|  | ||||
|       - name: Build Latest Base images for ${{ matrix.python-version }} | ||||
|         uses: harshanarayana/custom-actions@main | ||||
|         with: | ||||
|           docker-image-base-name: sanicframework/sanic-build | ||||
|           ignore-python-setup: 'true' | ||||
|           dockerfile-base-dir: './docker' | ||||
|           action: 'image-publish' | ||||
|           docker-image-tag: "${{ matrix.python-version }}" | ||||
|           docker-file-suffix: "base" | ||||
|           docker-build-args: "PYTHON_VERSION=${{ matrix.python-version }}" | ||||
|           registry-auth-user: ${{ secrets.DOCKER_ACCESS_USER }} | ||||
|           registry-auth-password: ${{ secrets.DOCKER_ACCESS_TOKEN }} | ||||
|           push-images: 'true' | ||||
|  | ||||
|       - name: Publish Sanic Docker Image for ${{ matrix.python-version }} | ||||
|         uses: harshanarayana/custom-actions@main | ||||
|         with: | ||||
|           docker-image-base-name: sanicframework/sanic | ||||
|           ignore-python-setup: 'true' | ||||
|           dockerfile-base-dir: './docker' | ||||
|           action: 'image-publish' | ||||
|           docker-build-args: "BASE_IMAGE_TAG=${{ matrix.python-version }}" | ||||
|           docker-image-prefix: "${{ matrix.python-version }}" | ||||
|           registry-auth-user: ${{ secrets.DOCKER_ACCESS_USER }} | ||||
|           registry-auth-password: ${{ secrets.DOCKER_ACCESS_TOKEN }} | ||||
|           push-images: 'true' | ||||
							
								
								
									
										28
									
								
								.github/workflows/publish-package.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										28
									
								
								.github/workflows/publish-package.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,28 @@ | ||||
| name: Publish Artifacts | ||||
| on: | ||||
|   release: | ||||
|     types: [created] | ||||
|  | ||||
| jobs: | ||||
|   publishPythonPackage: | ||||
|     name: Publishing Sanic Release Artifacts | ||||
|     runs-on: ubuntu-latest | ||||
|  | ||||
|     strategy: | ||||
|       fail-fast: true | ||||
|       matrix: | ||||
|         python-version: ["3.8"] | ||||
|  | ||||
|     steps: | ||||
|       - name: Checkout Repository | ||||
|         uses: actions/checkout@v2 | ||||
|  | ||||
|       - name: Publish Python Package | ||||
|         uses: harshanarayana/custom-actions@main | ||||
|         with: | ||||
|           python-version: ${{ matrix.python-version }} | ||||
|           package-infra-name: "twine" | ||||
|           pypi-user: __token__ | ||||
|           pypi-access-token: ${{ secrets.PYPI_ACCESS_TOKEN }} | ||||
|           action: "package-publish" | ||||
|           pypi-verify-metadata: "true" | ||||
							
								
								
									
										4
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -6,6 +6,7 @@ | ||||
| .coverage | ||||
| .coverage.* | ||||
| coverage | ||||
| coverage.xml | ||||
| .tox | ||||
| settings.py | ||||
| .idea/* | ||||
| @@ -18,3 +19,6 @@ build/* | ||||
| .DS_Store | ||||
| dist/* | ||||
| pip-wheel-metadata/ | ||||
| .pytest_cache/* | ||||
| .venv/* | ||||
| .vscode/* | ||||
|   | ||||
							
								
								
									
										94
									
								
								.travis.yml
									
									
									
									
									
								
							
							
						
						
									
										94
									
								
								.travis.yml
									
									
									
									
									
								
							| @@ -1,94 +0,0 @@ | ||||
| sudo: false | ||||
| language: python | ||||
| cache: | ||||
|   directories: | ||||
|     - $HOME/.cache/pip | ||||
| matrix: | ||||
|   include: | ||||
|     - env: TOX_ENV=py37 | ||||
|       python: 3.7 | ||||
|       dist: xenial | ||||
|       sudo: true | ||||
|       name: "Python 3.7 with Extensions" | ||||
|     - env: TOX_ENV=py37-no-ext | ||||
|       python: 3.7 | ||||
|       dist: xenial | ||||
|       sudo: true | ||||
|       name: "Python 3.7 without Extensions" | ||||
|     - env: TOX_ENV=py38 | ||||
|       python: 3.8 | ||||
|       dist: xenial | ||||
|       sudo: true | ||||
|       name: "Python 3.8 with Extensions" | ||||
|     - env: TOX_ENV=py38-no-ext | ||||
|       python: 3.8 | ||||
|       dist: xenial | ||||
|       sudo: true | ||||
|       name: "Python 3.8 without Extensions" | ||||
|     - env: TOX_ENV=py39 | ||||
|       python: 3.9 | ||||
|       dist: bionic | ||||
|       sudo: true | ||||
|       name: "Python 3.9 with Extensions" | ||||
|     - env: TOX_ENV=py39-no-ext | ||||
|       python: 3.9 | ||||
|       dist: bionic | ||||
|       sudo: true | ||||
|       name: "Python 3.9 without Extensions" | ||||
|     - env: TOX_ENV=type-checking | ||||
|       python: 3.7 | ||||
|       name: "Python 3.7 Type checks" | ||||
|     - env: TOX_ENV=type-checking | ||||
|       python: 3.8 | ||||
|       name: "Python 3.8 Type checks" | ||||
|     - env: TOX_ENV=type-checking | ||||
|       python: 3.9 | ||||
|       dist: bionic | ||||
|       name: "Python 3.9 Type checks" | ||||
|     - env: TOX_ENV=security | ||||
|       python: 3.7 | ||||
|       dist: xenial | ||||
|       sudo: true | ||||
|       name: "Python 3.7 Bandit security scan" | ||||
|     - env: TOX_ENV=security | ||||
|       python: 3.8 | ||||
|       dist: xenial | ||||
|       sudo: true | ||||
|       name: "Python 3.8 Bandit security scan" | ||||
|     - env: TOX_ENV=security | ||||
|       python: 3.9 | ||||
|       dist: bionic | ||||
|       sudo: true | ||||
|       name: "Python 3.9 Bandit security scan" | ||||
|     - env: TOX_ENV=docs | ||||
|       python: 3.7 | ||||
|       dist: xenial | ||||
|       sudo: true | ||||
|       name: "Python 3.7 Documentation tests" | ||||
|     - env: TOX_ENV=pyNightly | ||||
|       python: "nightly" | ||||
|       name: "Python nightly with Extensions" | ||||
|     - env: TOX_ENV=pyNightly-no-ext | ||||
|       python: "nightly" | ||||
|       name: "Python nightly without Extensions" | ||||
|   allow_failures: | ||||
|     - env: TOX_ENV=pyNightly | ||||
|       python: "nightly" | ||||
|       name: "Python nightly with Extensions" | ||||
|     - env: TOX_ENV=pyNightly-no-ext | ||||
|       python: "nightly" | ||||
|       name: "Python nightly without Extensions" | ||||
| install: | ||||
|   - pip install -U tox | ||||
|   - pip install codecov | ||||
| script: travis_retry tox -e $TOX_ENV | ||||
| after_success: | ||||
|   - codecov | ||||
| deploy: | ||||
|   provider: pypi | ||||
|   user: brewmaster | ||||
|   password: | ||||
|     secure: "GoawLwmbtJOgKB6AJ0ZSYUUnNwIoonseHBxaAUH3zu79TS/Afrq+yB3lsVaMSG0CbyDgN4FrfD1phT1NzbvZ1VcLIOTDtCrmpQ1kLDw+zwgF40ab8sp8fPkKVHHHfCCs1mjltHIpxQa5lZTJcAs6Bpi/lbUWWwYxFzSV8pHw4W4hY09EHUd2o+evLTSVxaploetSt725DJUYKICUr2eAtCC11IDnIW4CzBJEx6krVV3uhzfTJW0Ls17x0c6sdZ9icMnV/G9xO/eQH6RIHe4xcrWJ6cmLDNKoGAkJp+BKr1CeVVg7Jw/MzPjvZKL2/ki6Beue1y6GUIy7lOS7jPVaOEhJ23b0zQwFcLMZw+Tt+E3v6QfHk+B/WBBBnM3zUZed9UI+QyW8+lqLLt39sQX0FO0P3eaDh8qTXtUuon2jTyFMMAMTFRTNpJmpAzuBH9yeMmDeALPTh0HphI+BkoUl5q1QbWFYjjnZMH2CatApxpLybt9A7rwm//PbOG0TSI93GEKNQ4w5DYryKTfwHzRBptNSephJSuxZYEfJsmUtas5es1D7Fe0PkyjxNNSU+eO+8wsTlitLUsJO4k0jAgy+cEKdU7YJ3J0GZVXocSkrNnUfd2hQPcJ3UtEJx3hLqqr8EM7EZBAasc1yGHh36NFetclzFY24YPih0G1+XurhTys=" | ||||
|   on: | ||||
|     tags: true | ||||
|   distributions: "sdist bdist_wheel" | ||||
							
								
								
									
										111
									
								
								CHANGELOG.rst
									
									
									
									
									
								
							
							
						
						
									
										111
									
								
								CHANGELOG.rst
									
									
									
									
									
								
							| @@ -1,3 +1,114 @@ | ||||
| .. note:: | ||||
|  | ||||
|   From v21.9, CHANGELOG files are maintained in ``./docs/sanic/releases`` | ||||
|  | ||||
| Version 21.6.1 | ||||
| -------------- | ||||
|  | ||||
| Bugfixes | ||||
| ******** | ||||
|  | ||||
|   * `#2178 <https://github.com/sanic-org/sanic/pull/2178>`_ | ||||
|     Update sanic-routing to allow for better splitting of complex URI templates | ||||
|   * `#2183 <https://github.com/sanic-org/sanic/pull/2183>`_ | ||||
|     Proper handling of chunked request bodies to resolve phantom 503 in logs | ||||
|   * `#2181 <https://github.com/sanic-org/sanic/pull/2181>`_ | ||||
|     Resolve regression in exception logging | ||||
|   * `#2201 <https://github.com/sanic-org/sanic/pull/2201>`_ | ||||
|     Cleanup request info in pipelined requests | ||||
|  | ||||
| Version 21.6.0 | ||||
| -------------- | ||||
|  | ||||
| Features | ||||
| ******** | ||||
|  | ||||
|   * `#2094 <https://github.com/sanic-org/sanic/pull/2094>`_ | ||||
|     Add ``response.eof()`` method for closing a stream in a handler | ||||
|   * `#2097 <https://github.com/sanic-org/sanic/pull/2097>`_ | ||||
|     Allow case-insensitive HTTP Upgrade header | ||||
|   * `#2104 <https://github.com/sanic-org/sanic/pull/2104>`_ | ||||
|     Explicit usage of CIMultiDict getters | ||||
|   * `#2109 <https://github.com/sanic-org/sanic/pull/2109>`_ | ||||
|     Consistent use of error loggers | ||||
|   * `#2114 <https://github.com/sanic-org/sanic/pull/2114>`_ | ||||
|     New ``client_ip`` access of connection info instance | ||||
|   * `#2119 <https://github.com/sanic-org/sanic/pull/2119>`_ | ||||
|     Alternatate classes on instantiation for ``Config`` and ``Sanic.ctx`` | ||||
|   * `#2133 <https://github.com/sanic-org/sanic/pull/2133>`_ | ||||
|     Implement new version of AST router | ||||
|  | ||||
|       * Proper differentiation between ``alpha`` and ``string`` param types | ||||
|       * Adds a ``slug`` param type, example: ``<foo:slug>`` | ||||
|       * Deprecates ``<foo:string>`` in favor of ``<foo:str>`` | ||||
|       * Deprecates ``<foo:number>`` in favor of ``<foo:float>`` | ||||
|       * Adds a ``route.uri`` accessor | ||||
|   * `#2136 <https://github.com/sanic-org/sanic/pull/2136>`_ | ||||
|     CLI improvements with new optional params | ||||
|   * `#2137 <https://github.com/sanic-org/sanic/pull/2137>`_ | ||||
|     Add ``version_prefix`` to URL builders | ||||
|   * `#2140 <https://github.com/sanic-org/sanic/pull/2140>`_ | ||||
|     Event autoregistration with ``EVENT_AUTOREGISTER`` | ||||
|   * `#2146 <https://github.com/sanic-org/sanic/pull/2146>`_, `#2147 <https://github.com/sanic-org/sanic/pull/2147>`_ | ||||
|     Require stricter names on  ``Sanic()`` and ``Blueprint()`` | ||||
|   * `#2150 <https://github.com/sanic-org/sanic/pull/2150>`_ | ||||
|     Infinitely reusable and nestable ``Blueprint`` and ``BlueprintGroup`` | ||||
|   * `#2154 <https://github.com/sanic-org/sanic/pull/2154>`_ | ||||
|     Upgrade ``websockets`` dependency to min version | ||||
|   * `#2155 <https://github.com/sanic-org/sanic/pull/2155>`_ | ||||
|     Allow for maximum header sizes to be increased: ``REQUEST_MAX_HEADER_SIZE`` | ||||
|   * `#2157 <https://github.com/sanic-org/sanic/pull/2157>`_ | ||||
|     Allow app factory pattern in CLI | ||||
|   * `#2165 <https://github.com/sanic-org/sanic/pull/2165>`_ | ||||
|     Change HTTP methods to enums | ||||
|   * `#2167 <https://github.com/sanic-org/sanic/pull/2167>`_ | ||||
|     Allow auto-reloading on additional directories | ||||
|   * `#2168 <https://github.com/sanic-org/sanic/pull/2168>`_ | ||||
|     Add simple HTTP server to CLI | ||||
|   * `#2170 <https://github.com/sanic-org/sanic/pull/2170>`_ | ||||
|     Additional methods for attaching ``HTTPMethodView`` | ||||
|  | ||||
| Bugfixes | ||||
| ******** | ||||
|  | ||||
|   * `#2091 <https://github.com/sanic-org/sanic/pull/2091>`_ | ||||
|     Fix ``UserWarning`` in ASGI mode for missing ``__slots__`` | ||||
|   * `#2099 <https://github.com/sanic-org/sanic/pull/2099>`_ | ||||
|     Fix static request handler logging exception on 404 | ||||
|   * `#2110 <https://github.com/sanic-org/sanic/pull/2110>`_ | ||||
|     Fix request.args.pop removes parameters inconsistently | ||||
|   * `#2107 <https://github.com/sanic-org/sanic/pull/2107>`_ | ||||
|     Fix type hinting for load_env | ||||
|   * `#2127 <https://github.com/sanic-org/sanic/pull/2127>`_ | ||||
|     Make sure ASGI ws subprotocols is a list | ||||
|   * `#2128 <https://github.com/sanic-org/sanic/pull/2128>`_ | ||||
|     Fix issue where Blueprint exception handlers do not consistently route to proper handler | ||||
|  | ||||
|  | ||||
| Deprecations and Removals | ||||
| ************************* | ||||
|  | ||||
|   * `#2156 <https://github.com/sanic-org/sanic/pull/2156>`_ | ||||
|     Remove config value ``REQUEST_BUFFER_QUEUE_SIZE`` | ||||
|   * `#2170 <https://github.com/sanic-org/sanic/pull/2170>`_ | ||||
|     ``CompositionView`` deprecated and marked for removal in 21.12 | ||||
|   * `#2172 <https://github.com/sanic-org/sanic/pull/2170>`_ | ||||
|     Deprecate StreamingHTTPResponse | ||||
|  | ||||
| Developer infrastructure | ||||
| ************************ | ||||
|  | ||||
|   * `#2149 <https://github.com/sanic-org/sanic/pull/2149>`_ | ||||
|     Remove Travis CI in favor of GitHub Actions | ||||
|  | ||||
| Improved Documentation | ||||
| ********************** | ||||
|  | ||||
|   * `#2164 <https://github.com/sanic-org/sanic/pull/2164>`_ | ||||
|     Fix typo in documentation | ||||
|   * `#2100 <https://github.com/sanic-org/sanic/pull/2100>`_ | ||||
|     Remove documentation for non-existent arguments | ||||
|  | ||||
| Version 21.3.2 | ||||
| -------------- | ||||
|  | ||||
|   | ||||
| @@ -87,7 +87,7 @@ Permform ``flake8``\ , ``black`` and ``isort`` checks. | ||||
|    tox -e lint | ||||
|  | ||||
| Run type annotation checks | ||||
| --------------- | ||||
| -------------------------- | ||||
|  | ||||
| ``tox`` environment -> ``[testenv:type-checking]`` | ||||
|  | ||||
|   | ||||
							
								
								
									
										6
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										6
									
								
								Makefile
									
									
									
									
									
								
							| @@ -49,6 +49,9 @@ test: clean | ||||
| test-coverage: clean | ||||
| 	python setup.py test --pytest-args="--cov sanic --cov-report term --cov-append " | ||||
|  | ||||
| view-coverage: | ||||
| 	sanic ./coverage --simple | ||||
|  | ||||
| install: | ||||
| 	python setup.py install | ||||
|  | ||||
| @@ -85,8 +88,7 @@ docs-test: docs-clean | ||||
| 	cd docs && make dummy | ||||
|  | ||||
| docs-serve: | ||||
| 	# python -m http.server --directory=./docs/_build/html 9999 | ||||
| 	sphinx-autobuild docs docs/_build/html --port 9999 --watch ./sanic | ||||
| 	sphinx-autobuild docs docs/_build/html --port 9999 --watch ./ | ||||
|  | ||||
| changelog: | ||||
| 	python scripts/changelog.py | ||||
|   | ||||
							
								
								
									
										24
									
								
								README.rst
									
									
									
									
									
								
							
							
						
						
									
										24
									
								
								README.rst
									
									
									
									
									
								
							| @@ -11,7 +11,7 @@ Sanic | Build fast. Run fast. | ||||
|     :stub-columns: 1 | ||||
|  | ||||
|     * - Build | ||||
|       - | |Build Status| |AppVeyor Build Status| |Codecov| | ||||
|       - | |Py39Test| |Py38Test| |Py37Test| |Codecov| | ||||
|     * - Docs | ||||
|       - | |UserGuide| |Documentation| | ||||
|     * - Package | ||||
| @@ -29,10 +29,12 @@ Sanic | Build fast. Run fast. | ||||
|    :target: https://discord.gg/FARQzAEMAA | ||||
| .. |Codecov| image:: https://codecov.io/gh/sanic-org/sanic/branch/master/graph/badge.svg | ||||
|     :target: https://codecov.io/gh/sanic-org/sanic | ||||
| .. |Build Status| image:: https://travis-ci.com/sanic-org/sanic.svg?branch=master | ||||
|    :target: https://travis-ci.com/sanic-org/sanic | ||||
| .. |AppVeyor Build Status| image:: https://ci.appveyor.com/api/projects/status/d8pt3ids0ynexi8c/branch/master?svg=true | ||||
|    :target: https://ci.appveyor.com/project/sanic-org/sanic | ||||
| .. |Py39Test| image:: https://github.com/sanic-org/sanic/actions/workflows/pr-python39.yml/badge.svg?branch=main | ||||
|    :target: https://github.com/sanic-org/sanic/actions/workflows/pr-python39.yml | ||||
| .. |Py38Test| image:: https://github.com/sanic-org/sanic/actions/workflows/pr-python38.yml/badge.svg?branch=main | ||||
|    :target: https://github.com/sanic-org/sanic/actions/workflows/pr-python38.yml | ||||
| .. |Py37Test| image:: https://github.com/sanic-org/sanic/actions/workflows/pr-python37.yml/badge.svg?branch=main | ||||
|    :target: https://github.com/sanic-org/sanic/actions/workflows/pr-python37.yml | ||||
| .. |Documentation| image:: https://readthedocs.org/projects/sanic/badge/?version=latest | ||||
|    :target: http://sanic.readthedocs.io/en/latest/?badge=latest | ||||
| .. |PyPI| image:: https://img.shields.io/pypi/v/sanic.svg | ||||
| @@ -75,17 +77,7 @@ The goal of the project is to provide a simple way to get up and running a highl | ||||
| Sponsor | ||||
| ------- | ||||
|  | ||||
| |Try CodeStream| | ||||
|  | ||||
| .. |Try CodeStream| image:: https://alt-images.codestream.com/codestream_logo_sanicorg.png | ||||
|    :target: https://codestream.com/?utm_source=github&utm_campaign=sanicorg&utm_medium=banner | ||||
|    :alt: Try CodeStream | ||||
|  | ||||
| Manage pull requests and conduct code reviews in your IDE with full source-tree context. Comment on any line, not just the diffs. Use jump-to-definition, your favorite keybindings, and code intelligence with more of your workflow. | ||||
|  | ||||
| `Learn More <https://codestream.com/?utm_source=github&utm_campaign=sanicorg&utm_medium=banner>`_ | ||||
|  | ||||
| Thank you to our sponsor. Check out `open collective <https://opencollective.com/sanic-org>`_ to learn more about helping to fund Sanic. | ||||
| Check out `open collective <https://opencollective.com/sanic-org>`_ to learn more about helping to fund Sanic. | ||||
|  | ||||
| Installation | ||||
| ------------ | ||||
|   | ||||
							
								
								
									
										14
									
								
								codecov.yml
									
									
									
									
									
								
							
							
						
						
									
										14
									
								
								codecov.yml
									
									
									
									
									
								
							| @@ -1,14 +0,0 @@ | ||||
| codecov: | ||||
|   require_ci_to_pass: no | ||||
| coverage: | ||||
|   precision: 3 | ||||
|   round: nearest | ||||
|   status: | ||||
|     project: | ||||
|       default: | ||||
|         target: auto | ||||
|         threshold: 0.5% | ||||
|     patch: | ||||
|       default: | ||||
|         target: auto | ||||
|         threshold: 0.75% | ||||
| @@ -1,28 +1,9 @@ | ||||
| FROM alpine:3.7 | ||||
| ARG BASE_IMAGE_TAG | ||||
|  | ||||
| RUN apk add --no-cache --update \ | ||||
|         curl \ | ||||
|         bash \ | ||||
|         build-base \ | ||||
|         ca-certificates \ | ||||
|         git \ | ||||
|         bzip2-dev \ | ||||
|         linux-headers \ | ||||
|         ncurses-dev \ | ||||
|         openssl \ | ||||
|         openssl-dev \ | ||||
|         readline-dev \ | ||||
|         sqlite-dev | ||||
| FROM sanicframework/sanic-build:${BASE_IMAGE_TAG} | ||||
|  | ||||
| RUN apk update | ||||
| RUN update-ca-certificates | ||||
| RUN rm -rf /var/cache/apk/* | ||||
|  | ||||
| ENV PYENV_ROOT="/root/.pyenv" | ||||
| ENV PATH="$PYENV_ROOT/bin:$PATH" | ||||
|  | ||||
| ADD . /app | ||||
| WORKDIR /app | ||||
|  | ||||
| RUN /app/docker/bin/install_python.sh 3.5.4 3.6.4 | ||||
|  | ||||
| ENTRYPOINT ["./docker/bin/entrypoint.sh"] | ||||
| RUN pip install sanic | ||||
| RUN apk del build-base | ||||
|   | ||||
							
								
								
									
										9
									
								
								docker/Dockerfile-base
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9
									
								
								docker/Dockerfile-base
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,9 @@ | ||||
| ARG PYTHON_VERSION | ||||
|  | ||||
| FROM python:${PYTHON_VERSION}-alpine | ||||
| RUN apk update | ||||
| RUN apk add --no-cache --update build-base \ | ||||
|         ca-certificates \ | ||||
|         openssl | ||||
| RUN update-ca-certificates | ||||
| RUN rm -rf /var/cache/apk/* | ||||
| @@ -1,11 +0,0 @@ | ||||
| #!/bin/bash | ||||
| set -e | ||||
|  | ||||
| eval "$(pyenv init -)" | ||||
| eval "$(pyenv virtualenv-init -)" | ||||
| source /root/.pyenv/completions/pyenv.bash | ||||
|  | ||||
| pip install tox | ||||
|  | ||||
| exec $@ | ||||
|  | ||||
| @@ -1,17 +0,0 @@ | ||||
| #!/bin/bash | ||||
| set -e | ||||
|  | ||||
| export CFLAGS='-O2' | ||||
| export EXTRA_CFLAGS="-DTHREAD_STACK_SIZE=0x100000" | ||||
|  | ||||
| curl -L https://raw.githubusercontent.com/pyenv/pyenv-installer/master/bin/pyenv-installer | bash | ||||
| eval "$(pyenv init -)" | ||||
|  | ||||
| for ver in $@ | ||||
| do | ||||
|     pyenv install $ver | ||||
| done | ||||
|  | ||||
| pyenv global $@ | ||||
| pip install --upgrade pip | ||||
| pyenv rehash | ||||
							
								
								
									
										20
									
								
								docs/conf.py
									
									
									
									
									
								
							
							
						
						
									
										20
									
								
								docs/conf.py
									
									
									
									
									
								
							| @@ -10,10 +10,8 @@ | ||||
| import os | ||||
| import sys | ||||
|  | ||||
| # Add support for auto-doc | ||||
| import recommonmark | ||||
|  | ||||
| from recommonmark.transform import AutoStructify | ||||
| # Add support for auto-doc | ||||
|  | ||||
|  | ||||
| # Ensure that sanic is present in the path, to allow sphinx-apidoc to | ||||
| @@ -26,7 +24,7 @@ import sanic | ||||
|  | ||||
| # -- General configuration ------------------------------------------------ | ||||
|  | ||||
| extensions = ["sphinx.ext.autodoc", "recommonmark"] | ||||
| extensions = ["sphinx.ext.autodoc", "m2r2"] | ||||
|  | ||||
| templates_path = ["_templates"] | ||||
|  | ||||
| @@ -162,20 +160,6 @@ autodoc_default_options = { | ||||
|     "member-order": "groupwise", | ||||
| } | ||||
|  | ||||
|  | ||||
| # app setup hook | ||||
| def setup(app): | ||||
|     app.add_config_value( | ||||
|         "recommonmark_config", | ||||
|         { | ||||
|             "enable_eval_rst": True, | ||||
|             "enable_auto_doc_ref": False, | ||||
|         }, | ||||
|         True, | ||||
|     ) | ||||
|     app.add_transform(AutoStructify) | ||||
|  | ||||
|  | ||||
| html_theme_options = { | ||||
|     "style_external_links": False, | ||||
| } | ||||
|   | ||||
							
								
								
									
										17
									
								
								docs/sanic/api/app.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										17
									
								
								docs/sanic/api/app.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,17 @@ | ||||
| Application | ||||
| =========== | ||||
|  | ||||
| sanic.app | ||||
| --------- | ||||
|  | ||||
| .. automodule:: sanic.app | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|     :inherited-members: | ||||
|  | ||||
| sanic.config | ||||
| ------------ | ||||
|  | ||||
| .. automodule:: sanic.config | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
							
								
								
									
										17
									
								
								docs/sanic/api/blueprints.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										17
									
								
								docs/sanic/api/blueprints.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,17 @@ | ||||
| Blueprints | ||||
| ========== | ||||
|  | ||||
| sanic.blueprints | ||||
| ---------------- | ||||
|  | ||||
| .. automodule:: sanic.blueprints | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|     :inherited-members: | ||||
|  | ||||
| sanic.blueprint_group | ||||
| --------------------- | ||||
|  | ||||
| .. automodule:: sanic.blueprint_group | ||||
|     :members: | ||||
|     :special-members: | ||||
							
								
								
									
										47
									
								
								docs/sanic/api/core.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										47
									
								
								docs/sanic/api/core.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,47 @@ | ||||
| Core | ||||
| ==== | ||||
|  | ||||
| sanic.cookies | ||||
| ------------- | ||||
|  | ||||
| .. automodule:: sanic.cookies | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|  | ||||
|  | ||||
| sanic.handlers | ||||
| -------------- | ||||
|  | ||||
| .. automodule:: sanic.handlers | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|  | ||||
|  | ||||
| sanic.request | ||||
| ------------- | ||||
|  | ||||
| .. automodule:: sanic.request | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|  | ||||
| sanic.response | ||||
| -------------- | ||||
|  | ||||
| .. automodule:: sanic.response | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|  | ||||
|  | ||||
| sanic.views | ||||
| ----------- | ||||
|  | ||||
| .. automodule:: sanic.views | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|  | ||||
| sanic.websocket | ||||
| --------------- | ||||
|  | ||||
| .. automodule:: sanic.websocket | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
							
								
								
									
										16
									
								
								docs/sanic/api/exceptions.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								docs/sanic/api/exceptions.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,16 @@ | ||||
| Exceptions | ||||
| ========== | ||||
|  | ||||
| sanic.errorpages | ||||
| ---------------- | ||||
|  | ||||
| .. automodule:: sanic.errorpages | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|  | ||||
| sanic.exceptions | ||||
| ---------------- | ||||
|  | ||||
| .. automodule:: sanic.exceptions | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
							
								
								
									
										18
									
								
								docs/sanic/api/router.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								docs/sanic/api/router.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,18 @@ | ||||
| Routing | ||||
| ======= | ||||
|  | ||||
| sanic_routing models | ||||
| -------------------- | ||||
|  | ||||
| .. autoclass:: sanic_routing.route::Route | ||||
|     :members: | ||||
|  | ||||
| .. autoclass:: sanic_routing.group::RouteGroup | ||||
|     :members: | ||||
|  | ||||
| sanic.router | ||||
| ------------ | ||||
|  | ||||
| .. automodule:: sanic.router | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
							
								
								
									
										25
									
								
								docs/sanic/api/server.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										25
									
								
								docs/sanic/api/server.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,25 @@ | ||||
| Sanic Server | ||||
| ============ | ||||
|  | ||||
| sanic.http | ||||
| ---------- | ||||
|  | ||||
| .. automodule:: sanic.http | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|  | ||||
|  | ||||
| sanic.server | ||||
| ------------ | ||||
|  | ||||
| .. automodule:: sanic.server | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|  | ||||
|  | ||||
| sanic.worker | ||||
| ------------ | ||||
|  | ||||
| .. automodule:: sanic.worker | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
							
								
								
									
										16
									
								
								docs/sanic/api/utility.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								docs/sanic/api/utility.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,16 @@ | ||||
| Utility | ||||
| ======= | ||||
|  | ||||
| sanic.compat | ||||
| ------------ | ||||
|  | ||||
| .. automodule:: sanic.compat | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|  | ||||
| sanic.log | ||||
| --------- | ||||
|  | ||||
| .. automodule:: sanic.log | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
| @@ -1,132 +1,13 @@ | ||||
| 📑 API Reference | ||||
| ================ | ||||
|  | ||||
| sanic.app | ||||
| --------- | ||||
| .. toctree:: | ||||
|    :maxdepth: 2 | ||||
|  | ||||
| .. automodule:: sanic.app | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|     :inherited-members: | ||||
|  | ||||
| sanic.blueprints | ||||
| ---------------- | ||||
|  | ||||
| .. automodule:: sanic.blueprints | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|     :inherited-members: | ||||
|  | ||||
| sanic.blueprint_group | ||||
| --------------------- | ||||
|  | ||||
| .. automodule:: sanic.blueprint_group | ||||
|     :members: | ||||
|     :special-members: | ||||
|  | ||||
|  | ||||
| sanic.compat | ||||
| ------------ | ||||
|  | ||||
| .. automodule:: sanic.compat | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|  | ||||
| sanic.config | ||||
| ------------ | ||||
|  | ||||
| .. automodule:: sanic.config | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|  | ||||
| sanic.cookies | ||||
| ------------- | ||||
|  | ||||
| .. automodule:: sanic.cookies | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|  | ||||
| sanic.errorpages | ||||
| ---------------- | ||||
|  | ||||
| .. automodule:: sanic.errorpages | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|  | ||||
| sanic.exceptions | ||||
| ---------------- | ||||
|  | ||||
| .. automodule:: sanic.exceptions | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|  | ||||
| sanic.handlers | ||||
| -------------- | ||||
|  | ||||
| .. automodule:: sanic.handlers | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|  | ||||
| sanic.http | ||||
| ---------- | ||||
|  | ||||
| .. automodule:: sanic.http | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|  | ||||
| sanic.log | ||||
| --------- | ||||
|  | ||||
| .. automodule:: sanic.log | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|  | ||||
| sanic.request | ||||
| ------------- | ||||
|  | ||||
| .. automodule:: sanic.request | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|  | ||||
| sanic.response | ||||
| -------------- | ||||
|  | ||||
| .. automodule:: sanic.response | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|  | ||||
| sanic.router | ||||
| ------------ | ||||
|  | ||||
| .. automodule:: sanic.router | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|  | ||||
| sanic.server | ||||
| ------------ | ||||
|  | ||||
| .. automodule:: sanic.server | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|  | ||||
|  | ||||
| sanic.views | ||||
| ----------- | ||||
|  | ||||
| .. automodule:: sanic.views | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|  | ||||
| sanic.websocket | ||||
| --------------- | ||||
|  | ||||
| .. automodule:: sanic.websocket | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|  | ||||
| sanic.worker | ||||
| ------------ | ||||
|  | ||||
| .. automodule:: sanic.worker | ||||
|     :members: | ||||
|     :show-inheritance: | ||||
|    api/app | ||||
|    api/blueprints | ||||
|    api/core | ||||
|    api/exceptions | ||||
|    api/router | ||||
|    api/server | ||||
|    api/utility | ||||
|   | ||||
| @@ -1,4 +1,6 @@ | ||||
| 📜 Changelog | ||||
| ============ | ||||
|  | ||||
| .. mdinclude:: ./releases/21.9.md | ||||
|  | ||||
| .. include:: ../../CHANGELOG.rst | ||||
|   | ||||
							
								
								
									
										40
									
								
								docs/sanic/releases/21.9.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										40
									
								
								docs/sanic/releases/21.9.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,40 @@ | ||||
| ## Version 21.9 | ||||
|  | ||||
| ### Features | ||||
| - [#2158](https://github.com/sanic-org/sanic/pull/2158), [#2248](https://github.com/sanic-org/sanic/pull/2248) Complete overhaul of I/O to websockets | ||||
| - [#2160](https://github.com/sanic-org/sanic/pull/2160) Add new 17 signals into server and request lifecycles | ||||
| - [#2162](https://github.com/sanic-org/sanic/pull/2162) Smarter `auto` fallback formatting upon exception | ||||
| - [#2184](https://github.com/sanic-org/sanic/pull/2184) Introduce implementation for copying a Blueprint | ||||
| - [#2200](https://github.com/sanic-org/sanic/pull/2200) Accept header parsing | ||||
| - [#2207](https://github.com/sanic-org/sanic/pull/2207) Log remote address if available | ||||
| - [#2209](https://github.com/sanic-org/sanic/pull/2209) Add convenience methods to BP groups | ||||
| - [#2216](https://github.com/sanic-org/sanic/pull/2216) Add default messages to SanicExceptions | ||||
| - [#2225](https://github.com/sanic-org/sanic/pull/2225) Type annotation convenience for annotated handlers with path parameters | ||||
| - [#2236](https://github.com/sanic-org/sanic/pull/2236) Allow Falsey (but not-None) responses from route handlers | ||||
| - [#2238](https://github.com/sanic-org/sanic/pull/2238) Add `exception` decorator to Blueprint Groups | ||||
| - [#2244](https://github.com/sanic-org/sanic/pull/2244) Explicit static directive for serving file or dir (ex: `static(..., resource_type="file")`) | ||||
| - [#2245](https://github.com/sanic-org/sanic/pull/2245) Close HTTP loop when connection task cancelled | ||||
|  | ||||
| ### Bugfixes | ||||
| - [#2188](https://github.com/sanic-org/sanic/pull/2188) Fix the handling of the end of a chunked request | ||||
| - [#2195](https://github.com/sanic-org/sanic/pull/2195) Resolve unexpected error handling on static requests | ||||
| - [#2208](https://github.com/sanic-org/sanic/pull/2208) Make blueprint-based exceptions attach and trigger in a more intuitive manner | ||||
| - [#2211](https://github.com/sanic-org/sanic/pull/2211) Fixed for handling exceptions of asgi app call | ||||
| - [#2213](https://github.com/sanic-org/sanic/pull/2213) Fix bug where ws exceptions not being logged | ||||
| - [#2231](https://github.com/sanic-org/sanic/pull/2231) Cleaner closing of tasks by using `abort()` in strategic places to avoid dangling sockets | ||||
| - [#2247](https://github.com/sanic-org/sanic/pull/2247) Fix logging of auto-reload status in debug mode | ||||
| - [#2246](https://github.com/sanic-org/sanic/pull/2246) Account for BP with exception handler but no routes | ||||
|  | ||||
| ### Developer infrastructure   | ||||
| - [#2194](https://github.com/sanic-org/sanic/pull/2194) HTTP unit tests with raw client | ||||
| - [#2199](https://github.com/sanic-org/sanic/pull/2199) Switch to codeclimate | ||||
| - [#2214](https://github.com/sanic-org/sanic/pull/2214) Try Reopening Windows Tests | ||||
| - [#2229](https://github.com/sanic-org/sanic/pull/2229) Refactor `HttpProtocol` into a base class | ||||
| - [#2230](https://github.com/sanic-org/sanic/pull/2230) Refactor `server.py` into multi-file module | ||||
|  | ||||
| ### Miscellaneous | ||||
| - [#2173](https://github.com/sanic-org/sanic/pull/2173) Remove Duplicated Dependencies and PEP 517 Support  | ||||
| - [#2193](https://github.com/sanic-org/sanic/pull/2193), [#2196](https://github.com/sanic-org/sanic/pull/2196), [#2217](https://github.com/sanic-org/sanic/pull/2217) Type annotation changes | ||||
|  | ||||
|  | ||||
|  | ||||
| @@ -1,29 +1,44 @@ | ||||
| from sanic import Sanic | ||||
| from sanic import response | ||||
| from signal import signal, SIGINT | ||||
| import asyncio | ||||
|  | ||||
| from signal import SIGINT, signal | ||||
|  | ||||
| import uvloop | ||||
|  | ||||
| from sanic import Sanic, response | ||||
| from sanic.server import AsyncioServer | ||||
|  | ||||
|  | ||||
| app = Sanic(__name__) | ||||
|  | ||||
| @app.listener('after_server_start') | ||||
|  | ||||
| @app.listener("after_server_start") | ||||
| async def after_start_test(app, loop): | ||||
|     print("Async Server Started!") | ||||
|  | ||||
|  | ||||
| @app.route("/") | ||||
| async def test(request): | ||||
|     return response.json({"answer": "42"}) | ||||
|  | ||||
|  | ||||
| asyncio.set_event_loop(uvloop.new_event_loop()) | ||||
| serv_coro = app.create_server(host="0.0.0.0", port=8000, return_asyncio_server=True) | ||||
| serv_coro = app.create_server( | ||||
|     host="0.0.0.0", port=8000, return_asyncio_server=True | ||||
| ) | ||||
| loop = asyncio.get_event_loop() | ||||
| serv_task = asyncio.ensure_future(serv_coro, loop=loop) | ||||
| signal(SIGINT, lambda s, f: loop.stop()) | ||||
| server = loop.run_until_complete(serv_task) | ||||
| server: AsyncioServer = loop.run_until_complete(serv_task)  # type: ignore | ||||
| server.startup() | ||||
|  | ||||
| # When using app.run(), this actually triggers before the serv_coro. | ||||
| # But, in this example, we are using the convenience method, even if it is | ||||
| # out of order. | ||||
| server.before_start() | ||||
| server.after_start() | ||||
| try: | ||||
|     loop.run_forever() | ||||
| except KeyboardInterrupt as e: | ||||
| except KeyboardInterrupt: | ||||
|     loop.stop() | ||||
| finally: | ||||
|     server.before_stop() | ||||
|   | ||||
| @@ -1,13 +1,14 @@ | ||||
| from sanic import Sanic | ||||
| from sanic.response import file | ||||
| from sanic.response import redirect | ||||
|  | ||||
| app = Sanic(__name__) | ||||
|  | ||||
|  | ||||
| @app.route('/') | ||||
| async def index(request): | ||||
|     return await file('websocket.html') | ||||
| app.static('index.html', "websocket.html") | ||||
|  | ||||
| @app.route('/') | ||||
| def index(request): | ||||
|     return redirect("index.html") | ||||
|  | ||||
| @app.websocket('/feed') | ||||
| async def feed(request, ws): | ||||
|   | ||||
							
								
								
									
										6
									
								
								hack/Dockerfile
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								hack/Dockerfile
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | ||||
| FROM catthehacker/ubuntu:act-latest | ||||
| SHELL [ "/bin/bash", "-c" ] | ||||
| ENTRYPOINT [] | ||||
| RUN apt-get update | ||||
| RUN apt-get install gcc -y | ||||
| RUN apt-get install -y --no-install-recommends g++ | ||||
							
								
								
									
										3
									
								
								pyproject.toml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								pyproject.toml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | ||||
| [build-system] | ||||
| requires = ["setuptools", "wheel"] | ||||
| build-backend = "setuptools.build_meta" | ||||
| @@ -1,6 +1,7 @@ | ||||
| from sanic.__version__ import __version__ | ||||
| from sanic.app import Sanic | ||||
| from sanic.blueprints import Blueprint | ||||
| from sanic.constants import HTTPMethod | ||||
| from sanic.request import Request | ||||
| from sanic.response import HTTPResponse, html, json, text | ||||
|  | ||||
| @@ -9,6 +10,7 @@ __all__ = ( | ||||
|     "__version__", | ||||
|     "Sanic", | ||||
|     "Blueprint", | ||||
|     "HTTPMethod", | ||||
|     "HTTPResponse", | ||||
|     "Request", | ||||
|     "html", | ||||
|   | ||||
| @@ -1,21 +1,25 @@ | ||||
| import os | ||||
| import sys | ||||
|  | ||||
| from argparse import ArgumentParser, RawDescriptionHelpFormatter | ||||
| from argparse import ArgumentParser, RawTextHelpFormatter | ||||
| from importlib import import_module | ||||
| from pathlib import Path | ||||
| from typing import Any, Dict, Optional | ||||
|  | ||||
| from sanic_routing import __version__ as __routing_version__  # type: ignore | ||||
|  | ||||
| from sanic import __version__ | ||||
| from sanic.app import Sanic | ||||
| from sanic.config import BASE_LOGO | ||||
| from sanic.log import logger | ||||
| from sanic.log import error_logger | ||||
| from sanic.simple import create_simple_server | ||||
|  | ||||
|  | ||||
| class SanicArgumentParser(ArgumentParser): | ||||
|     def add_bool_arguments(self, *args, **kwargs): | ||||
|         group = self.add_mutually_exclusive_group() | ||||
|         group.add_argument(*args, action="store_true", **kwargs) | ||||
|         kwargs["help"] = "no " + kwargs["help"] | ||||
|         kwargs["help"] = f"no {kwargs['help']}\n " | ||||
|         group.add_argument( | ||||
|             "--no-" + args[0][2:], *args[1:], action="store_false", **kwargs | ||||
|         ) | ||||
| @@ -25,7 +29,30 @@ def main(): | ||||
|     parser = SanicArgumentParser( | ||||
|         prog="sanic", | ||||
|         description=BASE_LOGO, | ||||
|         formatter_class=RawDescriptionHelpFormatter, | ||||
|         formatter_class=lambda prog: RawTextHelpFormatter( | ||||
|             prog, max_help_position=33 | ||||
|         ), | ||||
|     ) | ||||
|     parser.add_argument( | ||||
|         "-v", | ||||
|         "--version", | ||||
|         action="version", | ||||
|         version=f"Sanic {__version__}; Routing {__routing_version__}", | ||||
|     ) | ||||
|     parser.add_argument( | ||||
|         "--factory", | ||||
|         action="store_true", | ||||
|         help=( | ||||
|             "Treat app as an application factory, " | ||||
|             "i.e. a () -> <Sanic app> callable" | ||||
|         ), | ||||
|     ) | ||||
|     parser.add_argument( | ||||
|         "-s", | ||||
|         "--simple", | ||||
|         dest="simple", | ||||
|         action="store_true", | ||||
|         help="Run Sanic as a Simple Server (module arg should be a path)\n ", | ||||
|     ) | ||||
|     parser.add_argument( | ||||
|         "-H", | ||||
| @@ -33,7 +60,7 @@ def main(): | ||||
|         dest="host", | ||||
|         type=str, | ||||
|         default="127.0.0.1", | ||||
|         help="host address [default 127.0.0.1]", | ||||
|         help="Host address [default 127.0.0.1]", | ||||
|     ) | ||||
|     parser.add_argument( | ||||
|         "-p", | ||||
| @@ -41,7 +68,7 @@ def main(): | ||||
|         dest="port", | ||||
|         type=int, | ||||
|         default=8000, | ||||
|         help="port to serve on [default 8000]", | ||||
|         help="Port to serve on [default 8000]", | ||||
|     ) | ||||
|     parser.add_argument( | ||||
|         "-u", | ||||
| @@ -49,13 +76,16 @@ def main(): | ||||
|         dest="unix", | ||||
|         type=str, | ||||
|         default="", | ||||
|         help="location of unix socket", | ||||
|         help="location of unix socket\n ", | ||||
|     ) | ||||
|     parser.add_argument( | ||||
|         "--cert", dest="cert", type=str, help="location of certificate for SSL" | ||||
|         "--cert", dest="cert", type=str, help="Location of certificate for SSL" | ||||
|     ) | ||||
|     parser.add_argument( | ||||
|         "--key", dest="key", type=str, help="location of keyfile for SSL." | ||||
|         "--key", dest="key", type=str, help="location of keyfile for SSL\n " | ||||
|     ) | ||||
|     parser.add_bool_arguments( | ||||
|         "--access-logs", dest="access_log", help="display access logs" | ||||
|     ) | ||||
|     parser.add_argument( | ||||
|         "-w", | ||||
| @@ -63,20 +93,31 @@ def main(): | ||||
|         dest="workers", | ||||
|         type=int, | ||||
|         default=1, | ||||
|         help="number of worker processes [default 1]", | ||||
|         help="number of worker processes [default 1]\n ", | ||||
|     ) | ||||
|     parser.add_argument("--debug", dest="debug", action="store_true") | ||||
|     parser.add_bool_arguments( | ||||
|         "--access-logs", dest="access_log", help="display access logs" | ||||
|     parser.add_argument("-d", "--debug", dest="debug", action="store_true") | ||||
|     parser.add_argument( | ||||
|         "-r", | ||||
|         "--reload", | ||||
|         "--auto-reload", | ||||
|         dest="auto_reload", | ||||
|         action="store_true", | ||||
|         help="Watch source directory for file changes and reload on changes", | ||||
|     ) | ||||
|     parser.add_argument( | ||||
|         "-v", | ||||
|         "--version", | ||||
|         action="version", | ||||
|         version=f"Sanic {__version__}", | ||||
|         "-R", | ||||
|         "--reload-dir", | ||||
|         dest="path", | ||||
|         action="append", | ||||
|         help="Extra directories to watch and reload on changes\n ", | ||||
|     ) | ||||
|     parser.add_argument( | ||||
|         "module", help="path to your Sanic app. Example: path.to.server:app" | ||||
|         "module", | ||||
|         help=( | ||||
|             "Path to your Sanic app. Example: path.to.server:app\n" | ||||
|             "If running a Simple Server, path to directory to serve. " | ||||
|             "Example: ./\n" | ||||
|         ), | ||||
|     ) | ||||
|     args = parser.parse_args() | ||||
|  | ||||
| @@ -85,47 +126,71 @@ def main(): | ||||
|         if module_path not in sys.path: | ||||
|             sys.path.append(module_path) | ||||
|  | ||||
|         if ":" in args.module: | ||||
|             module_name, app_name = args.module.rsplit(":", 1) | ||||
|         if args.simple: | ||||
|             path = Path(args.module) | ||||
|             app = create_simple_server(path) | ||||
|         else: | ||||
|             module_parts = args.module.split(".") | ||||
|             module_name = ".".join(module_parts[:-1]) | ||||
|             app_name = module_parts[-1] | ||||
|             delimiter = ":" if ":" in args.module else "." | ||||
|             module_name, app_name = args.module.rsplit(delimiter, 1) | ||||
|  | ||||
|             if app_name.endswith("()"): | ||||
|                 args.factory = True | ||||
|                 app_name = app_name[:-2] | ||||
|  | ||||
|             module = import_module(module_name) | ||||
|             app = getattr(module, app_name, None) | ||||
|         app_name = type(app).__name__ | ||||
|             if args.factory: | ||||
|                 app = app() | ||||
|  | ||||
|             app_type_name = type(app).__name__ | ||||
|  | ||||
|             if not isinstance(app, Sanic): | ||||
|                 raise ValueError( | ||||
|                 f"Module is not a Sanic app, it is a {app_name}.  " | ||||
|                     f"Module is not a Sanic app, it is a {app_type_name}.  " | ||||
|                     f"Perhaps you meant {args.module}.app?" | ||||
|                 ) | ||||
|         if args.cert is not None or args.key is not None: | ||||
|             ssl = { | ||||
|             ssl: Optional[Dict[str, Any]] = { | ||||
|                 "cert": args.cert, | ||||
|                 "key": args.key, | ||||
|             }  # type: Optional[Dict[str, Any]] | ||||
|             } | ||||
|         else: | ||||
|             ssl = None | ||||
|  | ||||
|         app.run( | ||||
|             host=args.host, | ||||
|             port=args.port, | ||||
|             unix=args.unix, | ||||
|             workers=args.workers, | ||||
|             debug=args.debug, | ||||
|             access_log=args.access_log, | ||||
|             ssl=ssl, | ||||
|         kwargs = { | ||||
|             "host": args.host, | ||||
|             "port": args.port, | ||||
|             "unix": args.unix, | ||||
|             "workers": args.workers, | ||||
|             "debug": args.debug, | ||||
|             "access_log": args.access_log, | ||||
|             "ssl": ssl, | ||||
|         } | ||||
|         if args.auto_reload: | ||||
|             kwargs["auto_reload"] = True | ||||
|  | ||||
|         if args.path: | ||||
|             if args.auto_reload or args.debug: | ||||
|                 kwargs["reload_dir"] = args.path | ||||
|             else: | ||||
|                 error_logger.warning( | ||||
|                     "Ignoring '--reload-dir' since auto reloading was not " | ||||
|                     "enabled. If you would like to watch directories for " | ||||
|                     "changes, consider using --debug or --auto-reload." | ||||
|                 ) | ||||
|  | ||||
|         app.run(**kwargs) | ||||
|     except ImportError as e: | ||||
|         logger.error( | ||||
|         if module_name.startswith(e.name): | ||||
|             error_logger.error( | ||||
|                 f"No module named {e.name} found.\n" | ||||
|             f"  Example File: project/sanic_server.py -> app\n" | ||||
|             f"  Example Module: project.sanic_server.app" | ||||
|                 "  Example File: project/sanic_server.py -> app\n" | ||||
|                 "  Example Module: project.sanic_server.app" | ||||
|             ) | ||||
|         else: | ||||
|             raise e | ||||
|     except ValueError: | ||||
|         logger.exception("Failed to run app") | ||||
|         error_logger.exception("Failed to run app") | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|   | ||||
| @@ -1 +1 @@ | ||||
| __version__ = "21.3.4" | ||||
| __version__ = "21.9.0" | ||||
|   | ||||
							
								
								
									
										435
									
								
								sanic/app.py
									
									
									
									
									
								
							
							
						
						
									
										435
									
								
								sanic/app.py
									
									
									
									
									
								
							| @@ -1,9 +1,12 @@ | ||||
| from __future__ import annotations | ||||
|  | ||||
| import logging | ||||
| import logging.config | ||||
| import os | ||||
| import re | ||||
|  | ||||
| from asyncio import ( | ||||
|     AbstractEventLoop, | ||||
|     CancelledError, | ||||
|     Protocol, | ||||
|     ensure_future, | ||||
| @@ -14,12 +17,14 @@ from asyncio.futures import Future | ||||
| from collections import defaultdict, deque | ||||
| from functools import partial | ||||
| from inspect import isawaitable | ||||
| from pathlib import Path | ||||
| from socket import socket | ||||
| from ssl import Purpose, SSLContext, create_default_context | ||||
| from traceback import format_exc | ||||
| from types import SimpleNamespace | ||||
| from typing import ( | ||||
|     Any, | ||||
|     AnyStr, | ||||
|     Awaitable, | ||||
|     Callable, | ||||
|     Coroutine, | ||||
| @@ -29,6 +34,7 @@ from typing import ( | ||||
|     List, | ||||
|     Optional, | ||||
|     Set, | ||||
|     Tuple, | ||||
|     Type, | ||||
|     Union, | ||||
| ) | ||||
| @@ -43,7 +49,7 @@ from sanic.asgi import ASGIApp | ||||
| from sanic.base import BaseSanic | ||||
| from sanic.blueprint_group import BlueprintGroup | ||||
| from sanic.blueprints import Blueprint | ||||
| from sanic.config import BASE_LOGO, Config | ||||
| from sanic.config import BASE_LOGO, SANIC_PREFIX, Config | ||||
| from sanic.exceptions import ( | ||||
|     InvalidUsage, | ||||
|     SanicException, | ||||
| @@ -68,19 +74,29 @@ from sanic.router import Router | ||||
| from sanic.server import AsyncioServer, HttpProtocol | ||||
| from sanic.server import Signal as ServerSignal | ||||
| from sanic.server import serve, serve_multiple, serve_single | ||||
| from sanic.server.protocols.websocket_protocol import WebSocketProtocol | ||||
| from sanic.server.websockets.impl import ConnectionClosed | ||||
| from sanic.signals import Signal, SignalRouter | ||||
| from sanic.websocket import ConnectionClosed, WebSocketProtocol | ||||
| from sanic.touchup import TouchUp, TouchUpMeta | ||||
|  | ||||
|  | ||||
| class Sanic(BaseSanic): | ||||
| class Sanic(BaseSanic, metaclass=TouchUpMeta): | ||||
|     """ | ||||
|     The main application instance | ||||
|     """ | ||||
|  | ||||
|     __touchup__ = ( | ||||
|         "handle_request", | ||||
|         "handle_exception", | ||||
|         "_run_response_middleware", | ||||
|         "_run_request_middleware", | ||||
|     ) | ||||
|     __fake_slots__ = ( | ||||
|         "_asgi_app", | ||||
|         "_app_registry", | ||||
|         "_asgi_client", | ||||
|         "_blueprint_order", | ||||
|         "_delayed_tasks", | ||||
|         "_future_routes", | ||||
|         "_future_statics", | ||||
|         "_future_middleware", | ||||
| @@ -89,6 +105,7 @@ class Sanic(BaseSanic): | ||||
|         "_future_signals", | ||||
|         "_test_client", | ||||
|         "_test_manager", | ||||
|         "auto_reload", | ||||
|         "asgi", | ||||
|         "blueprints", | ||||
|         "config", | ||||
| @@ -103,6 +120,7 @@ class Sanic(BaseSanic): | ||||
|         "name", | ||||
|         "named_request_middleware", | ||||
|         "named_response_middleware", | ||||
|         "reload_dirs", | ||||
|         "request_class", | ||||
|         "request_middleware", | ||||
|         "response_middleware", | ||||
| @@ -121,45 +139,55 @@ class Sanic(BaseSanic): | ||||
|     def __init__( | ||||
|         self, | ||||
|         name: str = None, | ||||
|         config: Optional[Config] = None, | ||||
|         ctx: Optional[Any] = None, | ||||
|         router: Optional[Router] = None, | ||||
|         signal_router: Optional[SignalRouter] = None, | ||||
|         error_handler: Optional[ErrorHandler] = None, | ||||
|         load_env: bool = True, | ||||
|         load_env: Union[bool, str] = True, | ||||
|         env_prefix: Optional[str] = SANIC_PREFIX, | ||||
|         request_class: Optional[Type[Request]] = None, | ||||
|         strict_slashes: bool = False, | ||||
|         log_config: Optional[Dict[str, Any]] = None, | ||||
|         configure_logging: bool = True, | ||||
|         register: Optional[bool] = None, | ||||
|         dumps: Optional[Callable[..., str]] = None, | ||||
|         dumps: Optional[Callable[..., AnyStr]] = None, | ||||
|     ) -> None: | ||||
|         super().__init__() | ||||
|         super().__init__(name=name) | ||||
|  | ||||
|         if name is None: | ||||
|             raise SanicException( | ||||
|                 "Sanic instance cannot be unnamed. " | ||||
|                 "Please use Sanic(name='your_application_name') instead.", | ||||
|             ) | ||||
|         # logging | ||||
|         if configure_logging: | ||||
|             logging.config.dictConfig(log_config or LOGGING_CONFIG_DEFAULTS) | ||||
|  | ||||
|         if config and (load_env is not True or env_prefix != SANIC_PREFIX): | ||||
|             raise SanicException( | ||||
|                 "When instantiating Sanic with config, you cannot also pass " | ||||
|                 "load_env or env_prefix" | ||||
|             ) | ||||
|  | ||||
|         self._asgi_client = None | ||||
|         self._blueprint_order: List[Blueprint] = [] | ||||
|         self._delayed_tasks: List[str] = [] | ||||
|         self._test_client = None | ||||
|         self._test_manager = None | ||||
|         self.asgi = False | ||||
|         self.auto_reload = False | ||||
|         self.blueprints: Dict[str, Blueprint] = {} | ||||
|         self.config = Config(load_env=load_env) | ||||
|         self.config = config or Config( | ||||
|             load_env=load_env, env_prefix=env_prefix | ||||
|         ) | ||||
|         self.configure_logging = configure_logging | ||||
|         self.ctx = SimpleNamespace() | ||||
|         self.ctx = ctx or SimpleNamespace() | ||||
|         self.debug = None | ||||
|         self.error_handler = error_handler or ErrorHandler() | ||||
|         self.error_handler = error_handler or ErrorHandler( | ||||
|             fallback=self.config.FALLBACK_ERROR_FORMAT, | ||||
|         ) | ||||
|         self.is_running = False | ||||
|         self.is_stopping = False | ||||
|         self.listeners: Dict[str, List[ListenerType]] = defaultdict(list) | ||||
|         self.name = name | ||||
|         self.named_request_middleware: Dict[str, Deque[MiddlewareType]] = {} | ||||
|         self.named_response_middleware: Dict[str, Deque[MiddlewareType]] = {} | ||||
|         self.reload_dirs: Set[Path] = set() | ||||
|         self.request_class = request_class | ||||
|         self.request_middleware: Deque[MiddlewareType] = deque() | ||||
|         self.response_middleware: Deque[MiddlewareType] = deque() | ||||
| @@ -175,14 +203,14 @@ class Sanic(BaseSanic): | ||||
|  | ||||
|         if register is not None: | ||||
|             self.config.REGISTER = register | ||||
|  | ||||
|         if self.config.REGISTER: | ||||
|             self.__class__.register_app(self) | ||||
|  | ||||
|         self.router.ctx.app = self | ||||
|         self.signal_router.ctx.app = self | ||||
|  | ||||
|         if dumps: | ||||
|             BaseHTTPResponse._dumps = dumps | ||||
|             BaseHTTPResponse._dumps = dumps  # type: ignore | ||||
|  | ||||
|     @property | ||||
|     def loop(self): | ||||
| @@ -220,9 +248,12 @@ class Sanic(BaseSanic): | ||||
|             loop = self.loop  # Will raise SanicError if loop is not started | ||||
|             self._loop_add_task(task, self, loop) | ||||
|         except SanicException: | ||||
|             self.listener("before_server_start")( | ||||
|                 partial(self._loop_add_task, task) | ||||
|             ) | ||||
|             task_name = f"sanic.delayed_task.{hash(task)}" | ||||
|             if not self._delayed_tasks: | ||||
|                 self.after_server_start(partial(self.dispatch_delayed_tasks)) | ||||
|  | ||||
|             self.signal(task_name)(partial(self.run_delayed_task, task=task)) | ||||
|             self._delayed_tasks.append(task_name) | ||||
|  | ||||
|     def register_listener(self, listener: Callable, event: str) -> Any: | ||||
|         """ | ||||
| @@ -234,12 +265,20 @@ class Sanic(BaseSanic): | ||||
|         """ | ||||
|  | ||||
|         try: | ||||
|             _event = ListenerEvent(event) | ||||
|         except ValueError: | ||||
|             valid = ", ".join(ListenerEvent.__members__.values()) | ||||
|             _event = ListenerEvent[event.upper()] | ||||
|         except (ValueError, AttributeError): | ||||
|             valid = ", ".join( | ||||
|                 map(lambda x: x.lower(), ListenerEvent.__members__.keys()) | ||||
|             ) | ||||
|             raise InvalidUsage(f"Invalid event: {event}. Use one of: {valid}") | ||||
|  | ||||
|         self.listeners[_event].append(listener) | ||||
|         if "." in _event: | ||||
|             self.signal(_event.value)( | ||||
|                 partial(self._listener, listener=listener) | ||||
|             ) | ||||
|         else: | ||||
|             self.listeners[_event.value].append(listener) | ||||
|  | ||||
|         return listener | ||||
|  | ||||
|     def register_middleware(self, middleware, attach_to: str = "request"): | ||||
| @@ -298,7 +337,11 @@ class Sanic(BaseSanic): | ||||
|                     self.named_response_middleware[_rn].appendleft(middleware) | ||||
|         return middleware | ||||
|  | ||||
|     def _apply_exception_handler(self, handler: FutureException): | ||||
|     def _apply_exception_handler( | ||||
|         self, | ||||
|         handler: FutureException, | ||||
|         route_names: Optional[List[str]] = None, | ||||
|     ): | ||||
|         """Decorate a function to be registered as a handler for exceptions | ||||
|  | ||||
|         :param exceptions: exceptions | ||||
| @@ -308,9 +351,9 @@ class Sanic(BaseSanic): | ||||
|         for exception in handler.exceptions: | ||||
|             if isinstance(exception, (tuple, list)): | ||||
|                 for e in exception: | ||||
|                     self.error_handler.add(e, handler.handler) | ||||
|                     self.error_handler.add(e, handler.handler, route_names) | ||||
|             else: | ||||
|                 self.error_handler.add(exception, handler.handler) | ||||
|                 self.error_handler.add(exception, handler.handler, route_names) | ||||
|         return handler.handler | ||||
|  | ||||
|     def _apply_listener(self, listener: FutureListener): | ||||
| @@ -367,18 +410,32 @@ class Sanic(BaseSanic): | ||||
|         *, | ||||
|         condition: Optional[Dict[str, str]] = None, | ||||
|         context: Optional[Dict[str, Any]] = None, | ||||
|         fail_not_found: bool = True, | ||||
|         inline: bool = False, | ||||
|         reverse: bool = False, | ||||
|     ) -> Coroutine[Any, Any, Awaitable[Any]]: | ||||
|         return self.signal_router.dispatch( | ||||
|             event, | ||||
|             context=context, | ||||
|             condition=condition, | ||||
|             inline=inline, | ||||
|             reverse=reverse, | ||||
|             fail_not_found=fail_not_found, | ||||
|         ) | ||||
|  | ||||
|     def event(self, event: str, timeout: Optional[Union[int, float]] = None): | ||||
|     async def event( | ||||
|         self, event: str, timeout: Optional[Union[int, float]] = None | ||||
|     ): | ||||
|         signal = self.signal_router.name_index.get(event) | ||||
|         if not signal: | ||||
|             if self.config.EVENT_AUTOREGISTER: | ||||
|                 self.signal_router.reset() | ||||
|                 self.add_signal(None, event) | ||||
|                 signal = self.signal_router.name_index[event] | ||||
|                 self.signal_router.finalize() | ||||
|             else: | ||||
|                 raise NotFound("Could not find signal %s" % event) | ||||
|         return wait_for(signal.ctx.event.wait(), timeout=timeout) | ||||
|         return await wait_for(signal.ctx.event.wait(), timeout=timeout) | ||||
|  | ||||
|     def enable_websocket(self, enable=True): | ||||
|         """Enable or disable the support for websocket. | ||||
| @@ -393,7 +450,13 @@ class Sanic(BaseSanic): | ||||
|  | ||||
|         self.websocket_enabled = enable | ||||
|  | ||||
|     def blueprint(self, blueprint, **options): | ||||
|     def blueprint( | ||||
|         self, | ||||
|         blueprint: Union[ | ||||
|             Blueprint, List[Blueprint], Tuple[Blueprint], BlueprintGroup | ||||
|         ], | ||||
|         **options: Any, | ||||
|     ): | ||||
|         """Register a blueprint on the application. | ||||
|  | ||||
|         :param blueprint: Blueprint object or (list, tuple) thereof | ||||
| @@ -402,7 +465,33 @@ class Sanic(BaseSanic): | ||||
|         """ | ||||
|         if isinstance(blueprint, (list, tuple, BlueprintGroup)): | ||||
|             for item in blueprint: | ||||
|                 self.blueprint(item, **options) | ||||
|                 params = {**options} | ||||
|                 if isinstance(blueprint, BlueprintGroup): | ||||
|                     if blueprint.url_prefix: | ||||
|                         merge_from = [ | ||||
|                             options.get("url_prefix", ""), | ||||
|                             blueprint.url_prefix, | ||||
|                         ] | ||||
|                         if not isinstance(item, BlueprintGroup): | ||||
|                             merge_from.append(item.url_prefix or "") | ||||
|                         merged_prefix = "/".join( | ||||
|                             u.strip("/") for u in merge_from | ||||
|                         ).rstrip("/") | ||||
|                         params["url_prefix"] = f"/{merged_prefix}" | ||||
|  | ||||
|                     for _attr in ["version", "strict_slashes"]: | ||||
|                         if getattr(item, _attr) is None: | ||||
|                             params[_attr] = getattr( | ||||
|                                 blueprint, _attr | ||||
|                             ) or options.get(_attr) | ||||
|                     if item.version_prefix == "/v": | ||||
|                         if blueprint.version_prefix == "/v": | ||||
|                             params["version_prefix"] = options.get( | ||||
|                                 "version_prefix" | ||||
|                             ) | ||||
|                         else: | ||||
|                             params["version_prefix"] = blueprint.version_prefix | ||||
|                 self.blueprint(item, **params) | ||||
|             return | ||||
|         if blueprint.name in self.blueprints: | ||||
|             assert self.blueprints[blueprint.name] is blueprint, ( | ||||
| @@ -567,7 +656,12 @@ class Sanic(BaseSanic): | ||||
|             # determine if the parameter supplied by the caller | ||||
|             # passes the test in the URL | ||||
|             if param_info.pattern: | ||||
|                 passes_pattern = param_info.pattern.match(supplied_param) | ||||
|                 pattern = ( | ||||
|                     param_info.pattern[1] | ||||
|                     if isinstance(param_info.pattern, tuple) | ||||
|                     else param_info.pattern | ||||
|                 ) | ||||
|                 passes_pattern = pattern.match(supplied_param) | ||||
|                 if not passes_pattern: | ||||
|                     if param_info.cast != str: | ||||
|                         msg = ( | ||||
| @@ -575,13 +669,13 @@ class Sanic(BaseSanic): | ||||
|                             f"for parameter `{param_info.name}` does " | ||||
|                             "not match pattern for type " | ||||
|                             f"`{param_info.cast.__name__}`: " | ||||
|                             f"{param_info.pattern.pattern}" | ||||
|                             f"{pattern.pattern}" | ||||
|                         ) | ||||
|                     else: | ||||
|                         msg = ( | ||||
|                             f'Value "{supplied_param}" for parameter ' | ||||
|                             f"`{param_info.name}` does not satisfy " | ||||
|                             f"pattern {param_info.pattern.pattern}" | ||||
|                             f"pattern {pattern.pattern}" | ||||
|                         ) | ||||
|                     raise URLBuildError(msg) | ||||
|  | ||||
| @@ -602,7 +696,7 @@ class Sanic(BaseSanic): | ||||
|  | ||||
|     async def handle_exception( | ||||
|         self, request: Request, exception: BaseException | ||||
|     ): | ||||
|     ):  # no cov | ||||
|         """ | ||||
|         A handler that catches specific exceptions and outputs a response. | ||||
|  | ||||
| @@ -612,6 +706,12 @@ class Sanic(BaseSanic): | ||||
|         :type exception: BaseException | ||||
|         :raises ServerError: response 500 | ||||
|         """ | ||||
|         await self.dispatch( | ||||
|             "http.lifecycle.exception", | ||||
|             inline=True, | ||||
|             context={"request": request, "exception": exception}, | ||||
|         ) | ||||
|  | ||||
|         # -------------------------------------------- # | ||||
|         # Request Middleware | ||||
|         # -------------------------------------------- # | ||||
| @@ -658,41 +758,60 @@ class Sanic(BaseSanic): | ||||
|                 f"Invalid response type {response!r} (need HTTPResponse)" | ||||
|             ) | ||||
|  | ||||
|     async def handle_request(self, request: Request): | ||||
|     async def handle_request(self, request: Request):  # no cov | ||||
|         """Take a request from the HTTP Server and return a response object | ||||
|         to be sent back The HTTP Server only expects a response object, so | ||||
|         exception handling must be done here | ||||
|  | ||||
|         :param request: HTTP Request object | ||||
|         :param write_callback: Synchronous response function to be | ||||
|             called with the response as the only argument | ||||
|         :param stream_callback: Coroutine that handles streaming a | ||||
|             StreamingHTTPResponse if produced by the handler. | ||||
|  | ||||
|         :return: Nothing | ||||
|         """ | ||||
|         await self.dispatch( | ||||
|             "http.lifecycle.handle", | ||||
|             inline=True, | ||||
|             context={"request": request}, | ||||
|         ) | ||||
|  | ||||
|         # Define `response` var here to remove warnings about | ||||
|         # allocation before assignment below. | ||||
|         response = None | ||||
|         try: | ||||
|  | ||||
|             await self.dispatch( | ||||
|                 "http.routing.before", | ||||
|                 inline=True, | ||||
|                 context={"request": request}, | ||||
|             ) | ||||
|             # Fetch handler from router | ||||
|             route, handler, kwargs = self.router.get( | ||||
|                 request.path, request.method, request.headers.get("host") | ||||
|                 request.path, | ||||
|                 request.method, | ||||
|                 request.headers.getone("host", None), | ||||
|             ) | ||||
|  | ||||
|             request._match_info = kwargs | ||||
|             request._match_info = {**kwargs} | ||||
|             request.route = route | ||||
|  | ||||
|             await self.dispatch( | ||||
|                 "http.routing.after", | ||||
|                 inline=True, | ||||
|                 context={ | ||||
|                     "request": request, | ||||
|                     "route": route, | ||||
|                     "kwargs": kwargs, | ||||
|                     "handler": handler, | ||||
|                 }, | ||||
|             ) | ||||
|  | ||||
|             if ( | ||||
|                 request.stream.request_body  # type: ignore | ||||
|                 request.stream | ||||
|                 and request.stream.request_body | ||||
|                 and not route.ctx.ignore_body | ||||
|             ): | ||||
|  | ||||
|                 if hasattr(handler, "is_stream"): | ||||
|                     # Streaming handler: lift the size limit | ||||
|                     request.stream.request_max_size = float(  # type: ignore | ||||
|                         "inf" | ||||
|                     ) | ||||
|                     request.stream.request_max_size = float("inf") | ||||
|                 else: | ||||
|                     # Non-streaming handler: preload body | ||||
|                     await request.receive_body() | ||||
| @@ -719,23 +838,28 @@ class Sanic(BaseSanic): | ||||
|                     ) | ||||
|  | ||||
|                 # Run response handler | ||||
|                 response = handler(request, **kwargs) | ||||
|                 response = handler(request, **request.match_info) | ||||
|                 if isawaitable(response): | ||||
|                     response = await response | ||||
|  | ||||
|             if response: | ||||
|             if response is not None: | ||||
|                 response = await request.respond(response) | ||||
|             else: | ||||
|             elif not hasattr(handler, "is_websocket"): | ||||
|                 response = request.stream.response  # type: ignore | ||||
|             # Make sure that response is finished / run StreamingHTTP callback | ||||
|  | ||||
|             # Make sure that response is finished / run StreamingHTTP callback | ||||
|             if isinstance(response, BaseHTTPResponse): | ||||
|                 await self.dispatch( | ||||
|                     "http.lifecycle.response", | ||||
|                     inline=True, | ||||
|                     context={ | ||||
|                         "request": request, | ||||
|                         "response": response, | ||||
|                     }, | ||||
|                 ) | ||||
|                 await response.send(end_stream=True) | ||||
|             else: | ||||
|                 try: | ||||
|                     # Fastest method for checking if the property exists | ||||
|                     handler.is_websocket  # type: ignore | ||||
|                 except AttributeError: | ||||
|                 if not hasattr(handler, "is_websocket"): | ||||
|                     raise ServerError( | ||||
|                         f"Invalid response type {response!r} " | ||||
|                         "(need HTTPResponse)" | ||||
| @@ -750,22 +874,11 @@ class Sanic(BaseSanic): | ||||
|     async def _websocket_handler( | ||||
|         self, handler, request, *args, subprotocols=None, **kwargs | ||||
|     ): | ||||
|         request.app = self | ||||
|         if not getattr(handler, "__blueprintname__", False): | ||||
|             request._name = handler.__name__ | ||||
|         else: | ||||
|             request._name = ( | ||||
|                 getattr(handler, "__blueprintname__", "") + handler.__name__ | ||||
|             ) | ||||
|  | ||||
|             pass | ||||
|  | ||||
|         if self.asgi: | ||||
|             ws = request.transport.get_websocket_connection() | ||||
|             await ws.accept(subprotocols) | ||||
|         else: | ||||
|             protocol = request.transport.get_protocol() | ||||
|             protocol.app = self | ||||
|  | ||||
|             ws = await protocol.websocket_handshake(request, subprotocols) | ||||
|  | ||||
|         # schedule the application handler | ||||
| @@ -773,12 +886,18 @@ class Sanic(BaseSanic): | ||||
|         # needs to be cancelled due to the server being stopped | ||||
|         fut = ensure_future(handler(request, ws, *args, **kwargs)) | ||||
|         self.websocket_tasks.add(fut) | ||||
|         cancelled = False | ||||
|         try: | ||||
|             await fut | ||||
|         except Exception as e: | ||||
|             self.error_handler.log(request, e) | ||||
|         except (CancelledError, ConnectionClosed): | ||||
|             pass | ||||
|             cancelled = True | ||||
|         finally: | ||||
|             self.websocket_tasks.remove(fut) | ||||
|             if cancelled: | ||||
|                 ws.end_connection(1000) | ||||
|             else: | ||||
|                 await ws.close() | ||||
|  | ||||
|     # -------------------------------------------------------------------- # | ||||
| @@ -825,7 +944,7 @@ class Sanic(BaseSanic): | ||||
|         *, | ||||
|         debug: bool = False, | ||||
|         auto_reload: Optional[bool] = None, | ||||
|         ssl: Union[dict, SSLContext, None] = None, | ||||
|         ssl: Union[Dict[str, str], SSLContext, None] = None, | ||||
|         sock: Optional[socket] = None, | ||||
|         workers: int = 1, | ||||
|         protocol: Optional[Type[Protocol]] = None, | ||||
| @@ -834,6 +953,7 @@ class Sanic(BaseSanic): | ||||
|         access_log: Optional[bool] = None, | ||||
|         unix: Optional[str] = None, | ||||
|         loop: None = None, | ||||
|         reload_dir: Optional[Union[List[str], str]] = None, | ||||
|     ) -> None: | ||||
|         """ | ||||
|         Run the HTTP Server and listen until keyboard interrupt or term | ||||
| @@ -868,6 +988,18 @@ class Sanic(BaseSanic): | ||||
|         :type unix: str | ||||
|         :return: Nothing | ||||
|         """ | ||||
|         if reload_dir: | ||||
|             if isinstance(reload_dir, str): | ||||
|                 reload_dir = [reload_dir] | ||||
|  | ||||
|             for directory in reload_dir: | ||||
|                 direc = Path(directory) | ||||
|                 if not direc.is_dir(): | ||||
|                     logger.warning( | ||||
|                         f"Directory {directory} could not be located" | ||||
|                     ) | ||||
|                 self.reload_dirs.add(Path(directory)) | ||||
|  | ||||
|         if loop is not None: | ||||
|             raise TypeError( | ||||
|                 "loop is not a valid argument. To use an existing loop, " | ||||
| @@ -877,8 +1009,9 @@ class Sanic(BaseSanic): | ||||
|             ) | ||||
|  | ||||
|         if auto_reload or auto_reload is None and debug: | ||||
|             self.auto_reload = True | ||||
|             if os.environ.get("SANIC_SERVER_RUNNING") != "true": | ||||
|                 return reloader_helpers.watchdog(1.0) | ||||
|                 return reloader_helpers.watchdog(1.0, self) | ||||
|  | ||||
|         if sock is None: | ||||
|             host, port = host or "127.0.0.1", port or 8000 | ||||
| @@ -941,7 +1074,7 @@ class Sanic(BaseSanic): | ||||
|         port: Optional[int] = None, | ||||
|         *, | ||||
|         debug: bool = False, | ||||
|         ssl: Union[dict, SSLContext, None] = None, | ||||
|         ssl: Union[Dict[str, str], SSLContext, None] = None, | ||||
|         sock: Optional[socket] = None, | ||||
|         protocol: Type[Protocol] = None, | ||||
|         backlog: int = 100, | ||||
| @@ -1013,11 +1146,6 @@ class Sanic(BaseSanic): | ||||
|             run_async=return_asyncio_server, | ||||
|         ) | ||||
|  | ||||
|         # Trigger before_start events | ||||
|         await self.trigger_events( | ||||
|             server_settings.get("before_start", []), | ||||
|             server_settings.get("loop"), | ||||
|         ) | ||||
|         main_start = server_settings.pop("main_start", None) | ||||
|         main_stop = server_settings.pop("main_stop", None) | ||||
|         if main_start or main_stop: | ||||
| @@ -1030,17 +1158,9 @@ class Sanic(BaseSanic): | ||||
|             asyncio_server_kwargs=asyncio_server_kwargs, **server_settings | ||||
|         ) | ||||
|  | ||||
|     async def trigger_events(self, events, loop): | ||||
|         """Trigger events (functions or async) | ||||
|         :param events: one or more sync or async functions to execute | ||||
|         :param loop: event loop | ||||
|         """ | ||||
|         for event in events: | ||||
|             result = event(loop) | ||||
|             if isawaitable(result): | ||||
|                 await result | ||||
|  | ||||
|     async def _run_request_middleware(self, request, request_name=None): | ||||
|     async def _run_request_middleware( | ||||
|         self, request, request_name=None | ||||
|     ):  # no cov | ||||
|         # The if improves speed.  I don't know why | ||||
|         named_middleware = self.named_request_middleware.get( | ||||
|             request_name, deque() | ||||
| @@ -1053,25 +1173,67 @@ class Sanic(BaseSanic): | ||||
|             request.request_middleware_started = True | ||||
|  | ||||
|             for middleware in applicable_middleware: | ||||
|                 await self.dispatch( | ||||
|                     "http.middleware.before", | ||||
|                     inline=True, | ||||
|                     context={ | ||||
|                         "request": request, | ||||
|                         "response": None, | ||||
|                     }, | ||||
|                     condition={"attach_to": "request"}, | ||||
|                 ) | ||||
|  | ||||
|                 response = middleware(request) | ||||
|                 if isawaitable(response): | ||||
|                     response = await response | ||||
|  | ||||
|                 await self.dispatch( | ||||
|                     "http.middleware.after", | ||||
|                     inline=True, | ||||
|                     context={ | ||||
|                         "request": request, | ||||
|                         "response": None, | ||||
|                     }, | ||||
|                     condition={"attach_to": "request"}, | ||||
|                 ) | ||||
|  | ||||
|                 if response: | ||||
|                     return response | ||||
|         return None | ||||
|  | ||||
|     async def _run_response_middleware( | ||||
|         self, request, response, request_name=None | ||||
|     ): | ||||
|     ):  # no cov | ||||
|         named_middleware = self.named_response_middleware.get( | ||||
|             request_name, deque() | ||||
|         ) | ||||
|         applicable_middleware = self.response_middleware + named_middleware | ||||
|         if applicable_middleware: | ||||
|             for middleware in applicable_middleware: | ||||
|                 await self.dispatch( | ||||
|                     "http.middleware.before", | ||||
|                     inline=True, | ||||
|                     context={ | ||||
|                         "request": request, | ||||
|                         "response": response, | ||||
|                     }, | ||||
|                     condition={"attach_to": "response"}, | ||||
|                 ) | ||||
|  | ||||
|                 _response = middleware(request, response) | ||||
|                 if isawaitable(_response): | ||||
|                     _response = await _response | ||||
|  | ||||
|                 await self.dispatch( | ||||
|                     "http.middleware.after", | ||||
|                     inline=True, | ||||
|                     context={ | ||||
|                         "request": request, | ||||
|                         "response": _response if _response else response, | ||||
|                     }, | ||||
|                     condition={"attach_to": "response"}, | ||||
|                 ) | ||||
|  | ||||
|                 if _response: | ||||
|                     response = _response | ||||
|                     if isinstance(response, BaseHTTPResponse): | ||||
| @@ -1097,10 +1259,6 @@ class Sanic(BaseSanic): | ||||
|     ): | ||||
|         """Helper function used by `run` and `create_server`.""" | ||||
|  | ||||
|         self.listeners["before_server_start"] = [ | ||||
|             self.finalize | ||||
|         ] + self.listeners["before_server_start"] | ||||
|  | ||||
|         if isinstance(ssl, dict): | ||||
|             # try common aliaseses | ||||
|             cert = ssl.get("cert") or ssl.get("certificate") | ||||
| @@ -1137,10 +1295,6 @@ class Sanic(BaseSanic): | ||||
|         # Register start/stop events | ||||
|  | ||||
|         for event_name, settings_name, reverse in ( | ||||
|             ("before_server_start", "before_start", False), | ||||
|             ("after_server_start", "after_start", False), | ||||
|             ("before_server_stop", "before_stop", True), | ||||
|             ("after_server_stop", "after_stop", True), | ||||
|             ("main_process_start", "main_start", False), | ||||
|             ("main_process_stop", "main_stop", True), | ||||
|         ): | ||||
| @@ -1177,6 +1331,11 @@ class Sanic(BaseSanic): | ||||
|             else: | ||||
|                 logger.info(f"Goin' Fast @ {proto}://{host}:{port}") | ||||
|  | ||||
|         debug_mode = "enabled" if self.debug else "disabled" | ||||
|         reload_mode = "enabled" if auto_reload else "disabled" | ||||
|         logger.debug(f"Sanic auto-reload: {reload_mode}") | ||||
|         logger.debug(f"Sanic debug mode: {debug_mode}") | ||||
|  | ||||
|         return server_settings | ||||
|  | ||||
|     def _build_endpoint_name(self, *parts): | ||||
| @@ -1184,20 +1343,44 @@ class Sanic(BaseSanic): | ||||
|         return ".".join(parts) | ||||
|  | ||||
|     @classmethod | ||||
|     def _loop_add_task(cls, task, app, loop): | ||||
|     def _prep_task(cls, task, app, loop): | ||||
|         if callable(task): | ||||
|             try: | ||||
|                 loop.create_task(task(app)) | ||||
|                 task = task(app) | ||||
|             except TypeError: | ||||
|                 loop.create_task(task()) | ||||
|         else: | ||||
|             loop.create_task(task) | ||||
|                 task = task() | ||||
|  | ||||
|         return task | ||||
|  | ||||
|     @classmethod | ||||
|     def _loop_add_task(cls, task, app, loop): | ||||
|         prepped = cls._prep_task(task, app, loop) | ||||
|         loop.create_task(prepped) | ||||
|  | ||||
|     @classmethod | ||||
|     def _cancel_websocket_tasks(cls, app, loop): | ||||
|         for task in app.websocket_tasks: | ||||
|             task.cancel() | ||||
|  | ||||
|     @staticmethod | ||||
|     async def dispatch_delayed_tasks(app, loop): | ||||
|         for name in app._delayed_tasks: | ||||
|             await app.dispatch(name, context={"app": app, "loop": loop}) | ||||
|         app._delayed_tasks.clear() | ||||
|  | ||||
|     @staticmethod | ||||
|     async def run_delayed_task(app, loop, task): | ||||
|         prepped = app._prep_task(task, app, loop) | ||||
|         await prepped | ||||
|  | ||||
|     @staticmethod | ||||
|     async def _listener( | ||||
|         app: Sanic, loop: AbstractEventLoop, listener: ListenerType | ||||
|     ): | ||||
|         maybe_coro = listener(app, loop) | ||||
|         if maybe_coro and isawaitable(maybe_coro): | ||||
|             await maybe_coro | ||||
|  | ||||
|     # -------------------------------------------------------------------- # | ||||
|     # ASGI | ||||
|     # -------------------------------------------------------------------- # | ||||
| @@ -1271,15 +1454,51 @@ class Sanic(BaseSanic): | ||||
|             raise SanicException(f'Sanic app name "{name}" not found.') | ||||
|  | ||||
|     # -------------------------------------------------------------------- # | ||||
|     # Static methods | ||||
|     # Lifecycle | ||||
|     # -------------------------------------------------------------------- # | ||||
|  | ||||
|     @staticmethod | ||||
|     async def finalize(app, _): | ||||
|     def finalize(self): | ||||
|         try: | ||||
|             app.router.finalize() | ||||
|             if app.signal_router.routes: | ||||
|                 app.signal_router.finalize()  # noqa | ||||
|             self.router.finalize() | ||||
|         except FinalizationError as e: | ||||
|             if not Sanic.test_mode: | ||||
|                 raise e  # noqa | ||||
|                 raise e | ||||
|  | ||||
|     def signalize(self): | ||||
|         try: | ||||
|             self.signal_router.finalize() | ||||
|         except FinalizationError as e: | ||||
|             if not Sanic.test_mode: | ||||
|                 raise e | ||||
|  | ||||
|     async def _startup(self): | ||||
|         self.signalize() | ||||
|         self.finalize() | ||||
|         TouchUp.run(self) | ||||
|  | ||||
|     async def _server_event( | ||||
|         self, | ||||
|         concern: str, | ||||
|         action: str, | ||||
|         loop: Optional[AbstractEventLoop] = None, | ||||
|     ) -> None: | ||||
|         event = f"server.{concern}.{action}" | ||||
|         if action not in ("before", "after") or concern not in ( | ||||
|             "init", | ||||
|             "shutdown", | ||||
|         ): | ||||
|             raise SanicException(f"Invalid server event: {event}") | ||||
|         logger.debug(f"Triggering server events: {event}") | ||||
|         reverse = concern == "shutdown" | ||||
|         if loop is None: | ||||
|             loop = self.loop | ||||
|         await self.dispatch( | ||||
|             event, | ||||
|             fail_not_found=False, | ||||
|             reverse=reverse, | ||||
|             inline=True, | ||||
|             context={ | ||||
|                 "app": self, | ||||
|                 "loop": loop, | ||||
|             }, | ||||
|         ) | ||||
|   | ||||
| @@ -1,6 +1,5 @@ | ||||
| import warnings | ||||
|  | ||||
| from inspect import isawaitable | ||||
| from typing import Optional | ||||
| from urllib.parse import quote | ||||
|  | ||||
| @@ -11,21 +10,27 @@ from sanic.exceptions import ServerError | ||||
| from sanic.models.asgi import ASGIReceive, ASGIScope, ASGISend, MockTransport | ||||
| from sanic.request import Request | ||||
| from sanic.server import ConnInfo | ||||
| from sanic.websocket import WebSocketConnection | ||||
| from sanic.server.websockets.connection import WebSocketConnection | ||||
|  | ||||
|  | ||||
| class Lifespan: | ||||
|     def __init__(self, asgi_app: "ASGIApp") -> None: | ||||
|         self.asgi_app = asgi_app | ||||
|  | ||||
|         if "before_server_start" in self.asgi_app.sanic_app.listeners: | ||||
|         if ( | ||||
|             "server.init.before" | ||||
|             in self.asgi_app.sanic_app.signal_router.name_index | ||||
|         ): | ||||
|             warnings.warn( | ||||
|                 'You have set a listener for "before_server_start" ' | ||||
|                 "in ASGI mode. " | ||||
|                 "It will be executed as early as possible, but not before " | ||||
|                 "the ASGI server is started." | ||||
|             ) | ||||
|         if "after_server_stop" in self.asgi_app.sanic_app.listeners: | ||||
|         if ( | ||||
|             "server.shutdown.after" | ||||
|             in self.asgi_app.sanic_app.signal_router.name_index | ||||
|         ): | ||||
|             warnings.warn( | ||||
|                 'You have set a listener for "after_server_stop" ' | ||||
|                 "in ASGI mode. " | ||||
| @@ -42,19 +47,9 @@ class Lifespan: | ||||
|         in sequence since the ASGI lifespan protocol only supports a single | ||||
|         startup event. | ||||
|         """ | ||||
|         self.asgi_app.sanic_app.router.finalize() | ||||
|         if self.asgi_app.sanic_app.signal_router.routes: | ||||
|             self.asgi_app.sanic_app.signal_router.finalize() | ||||
|         listeners = self.asgi_app.sanic_app.listeners.get( | ||||
|             "before_server_start", [] | ||||
|         ) + self.asgi_app.sanic_app.listeners.get("after_server_start", []) | ||||
|  | ||||
|         for handler in listeners: | ||||
|             response = handler( | ||||
|                 self.asgi_app.sanic_app, self.asgi_app.sanic_app.loop | ||||
|             ) | ||||
|             if response and isawaitable(response): | ||||
|                 await response | ||||
|         await self.asgi_app.sanic_app._startup() | ||||
|         await self.asgi_app.sanic_app._server_event("init", "before") | ||||
|         await self.asgi_app.sanic_app._server_event("init", "after") | ||||
|  | ||||
|     async def shutdown(self) -> None: | ||||
|         """ | ||||
| @@ -65,16 +60,8 @@ class Lifespan: | ||||
|         in sequence since the ASGI lifespan protocol only supports a single | ||||
|         shutdown event. | ||||
|         """ | ||||
|         listeners = self.asgi_app.sanic_app.listeners.get( | ||||
|             "before_server_stop", [] | ||||
|         ) + self.asgi_app.sanic_app.listeners.get("after_server_stop", []) | ||||
|  | ||||
|         for handler in listeners: | ||||
|             response = handler( | ||||
|                 self.asgi_app.sanic_app, self.asgi_app.sanic_app.loop | ||||
|             ) | ||||
|             if response and isawaitable(response): | ||||
|                 await response | ||||
|         await self.asgi_app.sanic_app._server_event("shutdown", "before") | ||||
|         await self.asgi_app.sanic_app._server_event("shutdown", "after") | ||||
|  | ||||
|     async def __call__( | ||||
|         self, scope: ASGIScope, receive: ASGIReceive, send: ASGISend | ||||
| @@ -140,7 +127,6 @@ class ASGIApp: | ||||
|                 instance.ws = instance.transport.create_websocket_connection( | ||||
|                     send, receive | ||||
|                 ) | ||||
|                 await instance.ws.accept() | ||||
|             else: | ||||
|                 raise ServerError("Received unknown ASGI scope") | ||||
|  | ||||
| @@ -208,4 +194,7 @@ class ASGIApp: | ||||
|         """ | ||||
|         Handle the incoming request. | ||||
|         """ | ||||
|         try: | ||||
|             await self.sanic_app.handle_request(self.request) | ||||
|         except Exception as e: | ||||
|             await self.sanic_app.handle_exception(self.request, e) | ||||
|   | ||||
| @@ -1,6 +1,9 @@ | ||||
| import re | ||||
|  | ||||
| from typing import Any, Tuple | ||||
| from warnings import warn | ||||
|  | ||||
| from sanic.exceptions import SanicException | ||||
| from sanic.mixins.exceptions import ExceptionMixin | ||||
| from sanic.mixins.listeners import ListenerMixin | ||||
| from sanic.mixins.middleware import MiddlewareMixin | ||||
| @@ -8,6 +11,9 @@ from sanic.mixins.routes import RouteMixin | ||||
| from sanic.mixins.signals import SignalMixin | ||||
|  | ||||
|  | ||||
| VALID_NAME = re.compile(r"^[a-zA-Z][a-zA-Z0-9_\-]*$") | ||||
|  | ||||
|  | ||||
| class BaseSanic( | ||||
|     RouteMixin, | ||||
|     MiddlewareMixin, | ||||
| @@ -17,7 +23,25 @@ class BaseSanic( | ||||
| ): | ||||
|     __fake_slots__: Tuple[str, ...] | ||||
|  | ||||
|     def __init__(self, *args, **kwargs) -> None: | ||||
|     def __init__(self, name: str = None, *args, **kwargs) -> None: | ||||
|         class_name = self.__class__.__name__ | ||||
|  | ||||
|         if name is None: | ||||
|             raise SanicException( | ||||
|                 f"{class_name} instance cannot be unnamed. " | ||||
|                 "Please use Sanic(name='your_application_name') instead.", | ||||
|             ) | ||||
|  | ||||
|         if not VALID_NAME.match(name): | ||||
|             warn( | ||||
|                 f"{class_name} instance named '{name}' uses a format that is" | ||||
|                 f"deprecated. Starting in version 21.12, {class_name} objects " | ||||
|                 "must be named only using alphanumeric characters, _, or -.", | ||||
|                 DeprecationWarning, | ||||
|             ) | ||||
|  | ||||
|         self.name = name | ||||
|  | ||||
|         for base in BaseSanic.__bases__: | ||||
|             base.__init__(self, *args, **kwargs)  # type: ignore | ||||
|  | ||||
| @@ -34,8 +58,9 @@ class BaseSanic( | ||||
|         if name not in self.__fake_slots__: | ||||
|             warn( | ||||
|                 f"Setting variables on {self.__class__.__name__} instances is " | ||||
|                 "deprecated and will be removed in version 21.9. You should " | ||||
|                 "deprecated and will be removed in version 21.12. You should " | ||||
|                 f"change your {self.__class__.__name__} instance to use " | ||||
|                 f"instance.ctx.{name} instead." | ||||
|                 f"instance.ctx.{name} instead.", | ||||
|                 DeprecationWarning, | ||||
|             ) | ||||
|         super().__setattr__(name, value) | ||||
|   | ||||
| @@ -1,7 +1,8 @@ | ||||
| from collections.abc import MutableSequence | ||||
| from typing import TYPE_CHECKING, List, Optional, Union | ||||
| from __future__ import annotations | ||||
|  | ||||
| import sanic | ||||
| from collections.abc import MutableSequence | ||||
| from functools import partial | ||||
| from typing import TYPE_CHECKING, List, Optional, Union | ||||
|  | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
| @@ -58,13 +59,20 @@ class BlueprintGroup(MutableSequence): | ||||
|         app.blueprint(bpg) | ||||
|     """ | ||||
|  | ||||
|     __slots__ = ("_blueprints", "_url_prefix", "_version", "_strict_slashes") | ||||
|     __slots__ = ( | ||||
|         "_blueprints", | ||||
|         "_url_prefix", | ||||
|         "_version", | ||||
|         "_strict_slashes", | ||||
|         "_version_prefix", | ||||
|     ) | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         url_prefix: Optional[str] = None, | ||||
|         version: Optional[Union[int, str, float]] = None, | ||||
|         strict_slashes: Optional[bool] = None, | ||||
|         version_prefix: str = "/v", | ||||
|     ): | ||||
|         """ | ||||
|         Create a new Blueprint Group | ||||
| @@ -77,6 +85,7 @@ class BlueprintGroup(MutableSequence): | ||||
|         self._blueprints: List[Blueprint] = [] | ||||
|         self._url_prefix = url_prefix | ||||
|         self._version = version | ||||
|         self._version_prefix = version_prefix | ||||
|         self._strict_slashes = strict_slashes | ||||
|  | ||||
|     @property | ||||
| @@ -89,7 +98,7 @@ class BlueprintGroup(MutableSequence): | ||||
|         return self._url_prefix | ||||
|  | ||||
|     @property | ||||
|     def blueprints(self) -> List["sanic.Blueprint"]: | ||||
|     def blueprints(self) -> List[Blueprint]: | ||||
|         """ | ||||
|         Retrieve a list of all the available blueprints under this group. | ||||
|  | ||||
| @@ -116,6 +125,15 @@ class BlueprintGroup(MutableSequence): | ||||
|         """ | ||||
|         return self._strict_slashes | ||||
|  | ||||
|     @property | ||||
|     def version_prefix(self) -> str: | ||||
|         """ | ||||
|         Version prefix; defaults to ``/v`` | ||||
|  | ||||
|         :return: str | ||||
|         """ | ||||
|         return self._version_prefix | ||||
|  | ||||
|     def __iter__(self): | ||||
|         """ | ||||
|         Tun the class Blueprint Group into an Iterable item | ||||
| @@ -170,34 +188,37 @@ class BlueprintGroup(MutableSequence): | ||||
|         """ | ||||
|         return len(self._blueprints) | ||||
|  | ||||
|     def _sanitize_blueprint(self, bp: "sanic.Blueprint") -> "sanic.Blueprint": | ||||
|         """ | ||||
|         Sanitize the Blueprint Entity to override the Version and strict slash | ||||
|         behaviors as required. | ||||
|  | ||||
|         :param bp: Sanic Blueprint entity Object | ||||
|         :return: Modified Blueprint | ||||
|         """ | ||||
|         if self._url_prefix: | ||||
|             merged_prefix = "/".join( | ||||
|                 u.strip("/") for u in [self._url_prefix, bp.url_prefix or ""] | ||||
|             ).rstrip("/") | ||||
|             bp.url_prefix = f"/{merged_prefix}" | ||||
|         for _attr in ["version", "strict_slashes"]: | ||||
|             if getattr(bp, _attr) is None: | ||||
|                 setattr(bp, _attr, getattr(self, _attr)) | ||||
|         return bp | ||||
|  | ||||
|     def append(self, value: "sanic.Blueprint") -> None: | ||||
|     def append(self, value: Blueprint) -> None: | ||||
|         """ | ||||
|         The Abstract class `MutableSequence` leverages this append method to | ||||
|         perform the `BlueprintGroup.append` operation. | ||||
|         :param value: New `Blueprint` object. | ||||
|         :return: None | ||||
|         """ | ||||
|         self._blueprints.append(self._sanitize_blueprint(bp=value)) | ||||
|         self._blueprints.append(value) | ||||
|  | ||||
|     def insert(self, index: int, item: "sanic.Blueprint") -> None: | ||||
|     def exception(self, *exceptions, **kwargs): | ||||
|         """ | ||||
|         A decorator that can be used to implement a global exception handler | ||||
|         for all the Blueprints that belong to this Blueprint Group. | ||||
|  | ||||
|         In case of nested Blueprint Groups, the same handler is applied | ||||
|         across each of the Blueprints recursively. | ||||
|  | ||||
|         :param args: List of Python exceptions to be caught by the handler | ||||
|         :param kwargs: Additional optional arguments to be passed to the | ||||
|             exception handler | ||||
|         :return a decorated method to handle global exceptions for any | ||||
|             blueprint registered under this group. | ||||
|         """ | ||||
|  | ||||
|         def register_exception_handler_for_blueprints(fn): | ||||
|             for blueprint in self.blueprints: | ||||
|                 blueprint.exception(*exceptions, **kwargs)(fn) | ||||
|  | ||||
|         return register_exception_handler_for_blueprints | ||||
|  | ||||
|     def insert(self, index: int, item: Blueprint) -> None: | ||||
|         """ | ||||
|         The Abstract class `MutableSequence` leverages this insert method to | ||||
|         perform the `BlueprintGroup.append` operation. | ||||
| @@ -206,7 +227,7 @@ class BlueprintGroup(MutableSequence): | ||||
|         :param item: New `Blueprint` object. | ||||
|         :return: None | ||||
|         """ | ||||
|         self._blueprints.insert(index, self._sanitize_blueprint(item)) | ||||
|         self._blueprints.insert(index, item) | ||||
|  | ||||
|     def middleware(self, *args, **kwargs): | ||||
|         """ | ||||
| @@ -230,3 +251,15 @@ class BlueprintGroup(MutableSequence): | ||||
|             args = list(args)[1:] | ||||
|             return register_middleware_for_blueprints(fn) | ||||
|         return register_middleware_for_blueprints | ||||
|  | ||||
|     def on_request(self, middleware=None): | ||||
|         if callable(middleware): | ||||
|             return self.middleware(middleware, "request") | ||||
|         else: | ||||
|             return partial(self.middleware, attach_to="request") | ||||
|  | ||||
|     def on_response(self, middleware=None): | ||||
|         if callable(middleware): | ||||
|             return self.middleware(middleware, "response") | ||||
|         else: | ||||
|             return partial(self.middleware, attach_to="response") | ||||
|   | ||||
| @@ -3,6 +3,7 @@ from __future__ import annotations | ||||
| import asyncio | ||||
|  | ||||
| from collections import defaultdict | ||||
| from copy import deepcopy | ||||
| from types import SimpleNamespace | ||||
| from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Set, Union | ||||
|  | ||||
| @@ -12,6 +13,7 @@ from sanic_routing.route import Route  # type: ignore | ||||
| from sanic.base import BaseSanic | ||||
| from sanic.blueprint_group import BlueprintGroup | ||||
| from sanic.exceptions import SanicException | ||||
| from sanic.helpers import Default, _default | ||||
| from sanic.models.futures import FutureRoute, FutureStatic | ||||
| from sanic.models.handler_types import ( | ||||
|     ListenerType, | ||||
| @@ -40,7 +42,7 @@ class Blueprint(BaseSanic): | ||||
|     :param host: IP Address of FQDN for the sanic server to use. | ||||
|     :param version: Blueprint Version | ||||
|     :param strict_slashes: Enforce the API urls are requested with a | ||||
|         training */* | ||||
|         trailing */* | ||||
|     """ | ||||
|  | ||||
|     __fake_slots__ = ( | ||||
| @@ -62,28 +64,23 @@ class Blueprint(BaseSanic): | ||||
|         "strict_slashes", | ||||
|         "url_prefix", | ||||
|         "version", | ||||
|         "version_prefix", | ||||
|         "websocket_routes", | ||||
|     ) | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         name: str, | ||||
|         name: str = None, | ||||
|         url_prefix: Optional[str] = None, | ||||
|         host: Optional[str] = None, | ||||
|         version: Optional[Union[int, str, float]] = None, | ||||
|         strict_slashes: Optional[bool] = None, | ||||
|         version_prefix: str = "/v", | ||||
|     ): | ||||
|         super().__init__() | ||||
|  | ||||
|         self._apps: Set[Sanic] = set() | ||||
|         super().__init__(name=name) | ||||
|         self.reset() | ||||
|         self.ctx = SimpleNamespace() | ||||
|         self.exceptions: List[RouteHandler] = [] | ||||
|         self.host = host | ||||
|         self.listeners: Dict[str, List[ListenerType]] = {} | ||||
|         self.middlewares: List[MiddlewareType] = [] | ||||
|         self.name = name | ||||
|         self.routes: List[Route] = [] | ||||
|         self.statics: List[RouteHandler] = [] | ||||
|         self.strict_slashes = strict_slashes | ||||
|         self.url_prefix = ( | ||||
|             url_prefix[:-1] | ||||
| @@ -91,7 +88,7 @@ class Blueprint(BaseSanic): | ||||
|             else url_prefix | ||||
|         ) | ||||
|         self.version = version | ||||
|         self.websocket_routes: List[Route] = [] | ||||
|         self.version_prefix = version_prefix | ||||
|  | ||||
|     def __repr__(self) -> str: | ||||
|         args = ", ".join( | ||||
| @@ -142,8 +139,89 @@ class Blueprint(BaseSanic): | ||||
|         kwargs["apply"] = False | ||||
|         return super().signal(event, *args, **kwargs) | ||||
|  | ||||
|     def reset(self): | ||||
|         self._apps: Set[Sanic] = set() | ||||
|         self.exceptions: List[RouteHandler] = [] | ||||
|         self.listeners: Dict[str, List[ListenerType]] = {} | ||||
|         self.middlewares: List[MiddlewareType] = [] | ||||
|         self.routes: List[Route] = [] | ||||
|         self.statics: List[RouteHandler] = [] | ||||
|         self.websocket_routes: List[Route] = [] | ||||
|  | ||||
|     def copy( | ||||
|         self, | ||||
|         name: str, | ||||
|         url_prefix: Optional[Union[str, Default]] = _default, | ||||
|         version: Optional[Union[int, str, float, Default]] = _default, | ||||
|         version_prefix: Union[str, Default] = _default, | ||||
|         strict_slashes: Optional[Union[bool, Default]] = _default, | ||||
|         with_registration: bool = True, | ||||
|         with_ctx: bool = False, | ||||
|     ): | ||||
|         """ | ||||
|         Copy a blueprint instance with some optional parameters to | ||||
|         override the values of attributes in the old instance. | ||||
|  | ||||
|         :param name: unique name of the blueprint | ||||
|         :param url_prefix: URL to be prefixed before all route URLs | ||||
|         :param version: Blueprint Version | ||||
|         :param version_prefix: the prefix of the version number shown in the | ||||
|             URL. | ||||
|         :param strict_slashes: Enforce the API urls are requested with a | ||||
|             trailing */* | ||||
|         :param with_registration: whether register new blueprint instance with | ||||
|             sanic apps that were registered with the old instance or not. | ||||
|         :param with_ctx: whether ``ctx`` will be copied or not. | ||||
|         """ | ||||
|  | ||||
|         attrs_backup = { | ||||
|             "_apps": self._apps, | ||||
|             "routes": self.routes, | ||||
|             "websocket_routes": self.websocket_routes, | ||||
|             "middlewares": self.middlewares, | ||||
|             "exceptions": self.exceptions, | ||||
|             "listeners": self.listeners, | ||||
|             "statics": self.statics, | ||||
|         } | ||||
|  | ||||
|         self.reset() | ||||
|         new_bp = deepcopy(self) | ||||
|         new_bp.name = name | ||||
|  | ||||
|         if not isinstance(url_prefix, Default): | ||||
|             new_bp.url_prefix = url_prefix | ||||
|         if not isinstance(version, Default): | ||||
|             new_bp.version = version | ||||
|         if not isinstance(strict_slashes, Default): | ||||
|             new_bp.strict_slashes = strict_slashes | ||||
|         if not isinstance(version_prefix, Default): | ||||
|             new_bp.version_prefix = version_prefix | ||||
|  | ||||
|         for key, value in attrs_backup.items(): | ||||
|             setattr(self, key, value) | ||||
|  | ||||
|         if with_registration and self._apps: | ||||
|             if new_bp._future_statics: | ||||
|                 raise SanicException( | ||||
|                     "Static routes registered with the old blueprint instance," | ||||
|                     " cannot be registered again." | ||||
|                 ) | ||||
|             for app in self._apps: | ||||
|                 app.blueprint(new_bp) | ||||
|  | ||||
|         if not with_ctx: | ||||
|             new_bp.ctx = SimpleNamespace() | ||||
|  | ||||
|         return new_bp | ||||
|  | ||||
|     @staticmethod | ||||
|     def group(*blueprints, url_prefix="", version=None, strict_slashes=None): | ||||
|     def group( | ||||
|         *blueprints: Union[Blueprint, BlueprintGroup], | ||||
|         url_prefix: Optional[str] = None, | ||||
|         version: Optional[Union[int, str, float]] = None, | ||||
|         strict_slashes: Optional[bool] = None, | ||||
|         version_prefix: str = "/v", | ||||
|     ): | ||||
|         """ | ||||
|         Create a list of blueprints, optionally grouping them under a | ||||
|         general URL prefix. | ||||
| @@ -160,8 +238,6 @@ class Blueprint(BaseSanic): | ||||
|             for i in nested: | ||||
|                 if isinstance(i, (list, tuple)): | ||||
|                     yield from chain(i) | ||||
|                 elif isinstance(i, BlueprintGroup): | ||||
|                     yield from i.blueprints | ||||
|                 else: | ||||
|                     yield i | ||||
|  | ||||
| @@ -169,6 +245,7 @@ class Blueprint(BaseSanic): | ||||
|             url_prefix=url_prefix, | ||||
|             version=version, | ||||
|             strict_slashes=strict_slashes, | ||||
|             version_prefix=version_prefix, | ||||
|         ) | ||||
|         for bp in chain(blueprints): | ||||
|             bps.append(bp) | ||||
| @@ -186,6 +263,12 @@ class Blueprint(BaseSanic): | ||||
|  | ||||
|         self._apps.add(app) | ||||
|         url_prefix = options.get("url_prefix", self.url_prefix) | ||||
|         opt_version = options.get("version", None) | ||||
|         opt_strict_slashes = options.get("strict_slashes", None) | ||||
|         opt_version_prefix = options.get("version_prefix", self.version_prefix) | ||||
|         error_format = options.get( | ||||
|             "error_format", app.config.FALLBACK_ERROR_FORMAT | ||||
|         ) | ||||
|  | ||||
|         routes = [] | ||||
|         middleware = [] | ||||
| @@ -200,12 +283,22 @@ class Blueprint(BaseSanic): | ||||
|             # Prepend the blueprint URI prefix if available | ||||
|             uri = url_prefix + future.uri if url_prefix else future.uri | ||||
|  | ||||
|             strict_slashes = ( | ||||
|                 self.strict_slashes | ||||
|                 if future.strict_slashes is None | ||||
|                 and self.strict_slashes is not None | ||||
|                 else future.strict_slashes | ||||
|             version_prefix = self.version_prefix | ||||
|             for prefix in ( | ||||
|                 future.version_prefix, | ||||
|                 opt_version_prefix, | ||||
|             ): | ||||
|                 if prefix and prefix != "/v": | ||||
|                     version_prefix = prefix | ||||
|                     break | ||||
|  | ||||
|             version = self._extract_value( | ||||
|                 future.version, opt_version, self.version | ||||
|             ) | ||||
|             strict_slashes = self._extract_value( | ||||
|                 future.strict_slashes, opt_strict_slashes, self.strict_slashes | ||||
|             ) | ||||
|  | ||||
|             name = app._generate_name(future.name) | ||||
|  | ||||
|             apply_route = FutureRoute( | ||||
| @@ -215,13 +308,15 @@ class Blueprint(BaseSanic): | ||||
|                 future.host or self.host, | ||||
|                 strict_slashes, | ||||
|                 future.stream, | ||||
|                 future.version or self.version, | ||||
|                 version, | ||||
|                 name, | ||||
|                 future.ignore_body, | ||||
|                 future.websocket, | ||||
|                 future.subprotocols, | ||||
|                 future.unquote, | ||||
|                 future.static, | ||||
|                 version_prefix, | ||||
|                 error_format, | ||||
|             ) | ||||
|  | ||||
|             route = app._apply_route(apply_route) | ||||
| @@ -240,26 +335,27 @@ class Blueprint(BaseSanic): | ||||
|  | ||||
|         route_names = [route.name for route in routes if route] | ||||
|  | ||||
|         # Middleware | ||||
|         if route_names: | ||||
|             # Middleware | ||||
|             for future in self._future_middleware: | ||||
|                 middleware.append(app._apply_middleware(future, route_names)) | ||||
|  | ||||
|             # Exceptions | ||||
|             for future in self._future_exceptions: | ||||
|             exception_handlers.append(app._apply_exception_handler(future)) | ||||
|                 exception_handlers.append( | ||||
|                     app._apply_exception_handler(future, route_names) | ||||
|                 ) | ||||
|  | ||||
|         # Event listeners | ||||
|         for listener in self._future_listeners: | ||||
|             listeners[listener.event].append(app._apply_listener(listener)) | ||||
|  | ||||
|         # Signals | ||||
|         for signal in self._future_signals: | ||||
|             signal.condition.update({"blueprint": self.name}) | ||||
|             app._apply_signal(signal) | ||||
|  | ||||
|         self.routes = [route for route in routes if isinstance(route, Route)] | ||||
|  | ||||
|         # Deprecate these in 21.6 | ||||
|         self.websocket_routes = [ | ||||
|             route for route in self.routes if route.ctx.websocket | ||||
|         ] | ||||
| @@ -288,3 +384,12 @@ class Blueprint(BaseSanic): | ||||
|             return_when=asyncio.FIRST_COMPLETED, | ||||
|             timeout=timeout, | ||||
|         ) | ||||
|  | ||||
|     @staticmethod | ||||
|     def _extract_value(*values): | ||||
|         value = values[-1] | ||||
|         for v in values: | ||||
|             if v is not None: | ||||
|                 value = v | ||||
|                 break | ||||
|         return value | ||||
|   | ||||
							
								
								
									
										104
									
								
								sanic/config.py
									
									
									
									
									
								
							
							
						
						
									
										104
									
								
								sanic/config.py
									
									
									
									
									
								
							| @@ -1,7 +1,11 @@ | ||||
| from inspect import isclass | ||||
| from os import environ | ||||
| from pathlib import Path | ||||
| from typing import Any, Union | ||||
| from typing import Any, Dict, Optional, Union | ||||
| from warnings import warn | ||||
|  | ||||
| from sanic.errorpages import check_error_format | ||||
| from sanic.http import Http | ||||
|  | ||||
| from .utils import load_module_from_file_location, str_to_bool | ||||
|  | ||||
| @@ -15,33 +19,59 @@ BASE_LOGO = """ | ||||
| """ | ||||
|  | ||||
| DEFAULT_CONFIG = { | ||||
|     "REQUEST_MAX_SIZE": 100000000,  # 100 megabytes | ||||
|     "REQUEST_BUFFER_QUEUE_SIZE": 100, | ||||
|     "ACCESS_LOG": True, | ||||
|     "EVENT_AUTOREGISTER": False, | ||||
|     "FALLBACK_ERROR_FORMAT": "auto", | ||||
|     "FORWARDED_FOR_HEADER": "X-Forwarded-For", | ||||
|     "FORWARDED_SECRET": None, | ||||
|     "GRACEFUL_SHUTDOWN_TIMEOUT": 15.0,  # 15 sec | ||||
|     "KEEP_ALIVE_TIMEOUT": 5,  # 5 seconds | ||||
|     "KEEP_ALIVE": True, | ||||
|     "PROXIES_COUNT": None, | ||||
|     "REAL_IP_HEADER": None, | ||||
|     "REGISTER": True, | ||||
|     "REQUEST_BUFFER_SIZE": 65536,  # 64 KiB | ||||
|     "REQUEST_MAX_HEADER_SIZE": 8192,  # 8 KiB, but cannot exceed 16384 | ||||
|     "REQUEST_ID_HEADER": "X-Request-ID", | ||||
|     "REQUEST_MAX_SIZE": 100000000,  # 100 megabytes | ||||
|     "REQUEST_TIMEOUT": 60,  # 60 seconds | ||||
|     "RESPONSE_TIMEOUT": 60,  # 60 seconds | ||||
|     "KEEP_ALIVE": True, | ||||
|     "KEEP_ALIVE_TIMEOUT": 5,  # 5 seconds | ||||
|     "WEBSOCKET_MAX_SIZE": 2 ** 20,  # 1 megabyte | ||||
|     "WEBSOCKET_MAX_QUEUE": 32, | ||||
|     "WEBSOCKET_READ_LIMIT": 2 ** 16, | ||||
|     "WEBSOCKET_WRITE_LIMIT": 2 ** 16, | ||||
|     "WEBSOCKET_PING_TIMEOUT": 20, | ||||
|     "WEBSOCKET_PING_INTERVAL": 20, | ||||
|     "GRACEFUL_SHUTDOWN_TIMEOUT": 15.0,  # 15 sec | ||||
|     "ACCESS_LOG": True, | ||||
|     "FORWARDED_SECRET": None, | ||||
|     "REAL_IP_HEADER": None, | ||||
|     "PROXIES_COUNT": None, | ||||
|     "FORWARDED_FOR_HEADER": "X-Forwarded-For", | ||||
|     "REQUEST_ID_HEADER": "X-Request-ID", | ||||
|     "FALLBACK_ERROR_FORMAT": "html", | ||||
|     "REGISTER": True, | ||||
|     "WEBSOCKET_PING_TIMEOUT": 20, | ||||
| } | ||||
|  | ||||
|  | ||||
| class Config(dict): | ||||
|     def __init__(self, defaults=None, load_env=True, keep_alive=None): | ||||
|     ACCESS_LOG: bool | ||||
|     EVENT_AUTOREGISTER: bool | ||||
|     FALLBACK_ERROR_FORMAT: str | ||||
|     FORWARDED_FOR_HEADER: str | ||||
|     FORWARDED_SECRET: Optional[str] | ||||
|     GRACEFUL_SHUTDOWN_TIMEOUT: float | ||||
|     KEEP_ALIVE_TIMEOUT: int | ||||
|     KEEP_ALIVE: bool | ||||
|     PROXIES_COUNT: Optional[int] | ||||
|     REAL_IP_HEADER: Optional[str] | ||||
|     REGISTER: bool | ||||
|     REQUEST_BUFFER_SIZE: int | ||||
|     REQUEST_MAX_HEADER_SIZE: int | ||||
|     REQUEST_ID_HEADER: str | ||||
|     REQUEST_MAX_SIZE: int | ||||
|     REQUEST_TIMEOUT: int | ||||
|     RESPONSE_TIMEOUT: int | ||||
|     SERVER_NAME: str | ||||
|     WEBSOCKET_MAX_SIZE: int | ||||
|     WEBSOCKET_PING_INTERVAL: int | ||||
|     WEBSOCKET_PING_TIMEOUT: int | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         defaults: Dict[str, Union[str, bool, int, float, None]] = None, | ||||
|         load_env: Optional[Union[bool, str]] = True, | ||||
|         env_prefix: Optional[str] = SANIC_PREFIX, | ||||
|         keep_alive: Optional[bool] = None, | ||||
|     ): | ||||
|         defaults = defaults or {} | ||||
|         super().__init__({**DEFAULT_CONFIG, **defaults}) | ||||
|  | ||||
| @@ -50,9 +80,23 @@ class Config(dict): | ||||
|         if keep_alive is not None: | ||||
|             self.KEEP_ALIVE = keep_alive | ||||
|  | ||||
|         if env_prefix != SANIC_PREFIX: | ||||
|             if env_prefix: | ||||
|                 self.load_environment_vars(env_prefix) | ||||
|         elif load_env is not True: | ||||
|             if load_env: | ||||
|             prefix = SANIC_PREFIX if load_env is True else load_env | ||||
|             self.load_environment_vars(prefix=prefix) | ||||
|                 self.load_environment_vars(prefix=load_env) | ||||
|             warn( | ||||
|                 "Use of load_env is deprecated and will be removed in " | ||||
|                 "21.12. Modify the configuration prefix by passing " | ||||
|                 "env_prefix instead.", | ||||
|                 DeprecationWarning, | ||||
|             ) | ||||
|         else: | ||||
|             self.load_environment_vars(SANIC_PREFIX) | ||||
|  | ||||
|         self._configure_header_size() | ||||
|         self._check_error_format() | ||||
|  | ||||
|     def __getattr__(self, attr): | ||||
|         try: | ||||
| @@ -62,6 +106,24 @@ class Config(dict): | ||||
|  | ||||
|     def __setattr__(self, attr, value): | ||||
|         self[attr] = value | ||||
|         if attr in ( | ||||
|             "REQUEST_MAX_HEADER_SIZE", | ||||
|             "REQUEST_BUFFER_SIZE", | ||||
|             "REQUEST_MAX_SIZE", | ||||
|         ): | ||||
|             self._configure_header_size() | ||||
|         elif attr == "FALLBACK_ERROR_FORMAT": | ||||
|             self._check_error_format() | ||||
|  | ||||
|     def _configure_header_size(self): | ||||
|         Http.set_header_max_size( | ||||
|             self.REQUEST_MAX_HEADER_SIZE, | ||||
|             self.REQUEST_BUFFER_SIZE - 4096, | ||||
|             self.REQUEST_MAX_SIZE, | ||||
|         ) | ||||
|  | ||||
|     def _check_error_format(self): | ||||
|         check_error_format(self.FALLBACK_ERROR_FORMAT) | ||||
|  | ||||
|     def load_environment_vars(self, prefix=SANIC_PREFIX): | ||||
|         """ | ||||
|   | ||||
| @@ -1,2 +1,28 @@ | ||||
| HTTP_METHODS = ("GET", "POST", "PUT", "HEAD", "OPTIONS", "PATCH", "DELETE") | ||||
| from enum import Enum, auto | ||||
|  | ||||
|  | ||||
| class HTTPMethod(str, Enum): | ||||
|     def _generate_next_value_(name, start, count, last_values): | ||||
|         return name.upper() | ||||
|  | ||||
|     def __eq__(self, value: object) -> bool: | ||||
|         value = str(value).upper() | ||||
|         return super().__eq__(value) | ||||
|  | ||||
|     def __hash__(self) -> int: | ||||
|         return hash(self.value) | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return self.value | ||||
|  | ||||
|     GET = auto() | ||||
|     POST = auto() | ||||
|     PUT = auto() | ||||
|     HEAD = auto() | ||||
|     OPTIONS = auto() | ||||
|     PATCH = auto() | ||||
|     DELETE = auto() | ||||
|  | ||||
|  | ||||
| HTTP_METHODS = tuple(HTTPMethod.__members__.values()) | ||||
| DEFAULT_HTTP_CONTENT_TYPE = "application/octet-stream" | ||||
|   | ||||
| @@ -340,41 +340,138 @@ RENDERERS_BY_CONFIG = { | ||||
| } | ||||
|  | ||||
| RENDERERS_BY_CONTENT_TYPE = { | ||||
|     "multipart/form-data": HTMLRenderer, | ||||
|     "application/json": JSONRenderer, | ||||
|     "text/plain": TextRenderer, | ||||
|     "application/json": JSONRenderer, | ||||
|     "multipart/form-data": HTMLRenderer, | ||||
|     "text/html": HTMLRenderer, | ||||
| } | ||||
| CONTENT_TYPE_BY_RENDERERS = { | ||||
|     v: k for k, v in RENDERERS_BY_CONTENT_TYPE.items() | ||||
| } | ||||
|  | ||||
| RESPONSE_MAPPING = { | ||||
|     "empty": "html", | ||||
|     "json": "json", | ||||
|     "text": "text", | ||||
|     "raw": "text", | ||||
|     "html": "html", | ||||
|     "file": "html", | ||||
|     "file_stream": "text", | ||||
|     "stream": "text", | ||||
|     "redirect": "html", | ||||
|     "text/plain": "text", | ||||
|     "text/html": "html", | ||||
|     "application/json": "json", | ||||
| } | ||||
|  | ||||
|  | ||||
| def check_error_format(format): | ||||
|     if format not in RENDERERS_BY_CONFIG and format != "auto": | ||||
|         raise SanicException(f"Unknown format: {format}") | ||||
|  | ||||
|  | ||||
| def exception_response( | ||||
|     request: Request, | ||||
|     exception: Exception, | ||||
|     debug: bool, | ||||
|     fallback: str, | ||||
|     base: t.Type[BaseRenderer], | ||||
|     renderer: t.Type[t.Optional[BaseRenderer]] = None, | ||||
| ) -> HTTPResponse: | ||||
|     """ | ||||
|     Render a response for the default FALLBACK exception handler. | ||||
|     """ | ||||
|     content_type = None | ||||
|  | ||||
|     if not renderer: | ||||
|         renderer = HTMLRenderer | ||||
|         # Make sure we have something set | ||||
|         renderer = base | ||||
|         render_format = fallback | ||||
|  | ||||
|         if request: | ||||
|             if request.app.config.FALLBACK_ERROR_FORMAT == "auto": | ||||
|             # If there is a request, try and get the format | ||||
|             # from the route | ||||
|             if request.route: | ||||
|                 try: | ||||
|                     renderer = JSONRenderer if request.json else HTMLRenderer | ||||
|                 except InvalidUsage: | ||||
|                     render_format = request.route.ctx.error_format | ||||
|                 except AttributeError: | ||||
|                     ... | ||||
|  | ||||
|             content_type = request.headers.getone("content-type", "").split( | ||||
|                 ";" | ||||
|             )[0] | ||||
|  | ||||
|             acceptable = request.accept | ||||
|  | ||||
|             # If the format is auto still, make a guess | ||||
|             if render_format == "auto": | ||||
|                 # First, if there is an Accept header, check if text/html | ||||
|                 # is the first option | ||||
|                 # According to MDN Web Docs, all major browsers use text/html | ||||
|                 # as the primary value in Accept (with the exception of IE 8, | ||||
|                 # and, well, if you are supporting IE 8, then you have bigger | ||||
|                 # problems to concern yourself with than what default exception | ||||
|                 # renderer is used) | ||||
|                 # Source: | ||||
|                 # https://developer.mozilla.org/en-US/docs/Web/HTTP/Content_negotiation/List_of_default_Accept_values | ||||
|  | ||||
|                 if acceptable and acceptable[0].match( | ||||
|                     "text/html", | ||||
|                     allow_type_wildcard=False, | ||||
|                     allow_subtype_wildcard=False, | ||||
|                 ): | ||||
|                     renderer = HTMLRenderer | ||||
|  | ||||
|                 content_type, *_ = request.headers.get( | ||||
|                     "content-type", "" | ||||
|                 ).split(";") | ||||
|                 renderer = RENDERERS_BY_CONTENT_TYPE.get( | ||||
|                     content_type, renderer | ||||
|                 # Second, if there is an Accept header, check if | ||||
|                 # application/json is an option, or if the content-type | ||||
|                 # is application/json | ||||
|                 elif ( | ||||
|                     acceptable | ||||
|                     and acceptable.match( | ||||
|                         "application/json", | ||||
|                         allow_type_wildcard=False, | ||||
|                         allow_subtype_wildcard=False, | ||||
|                     ) | ||||
|                     or content_type == "application/json" | ||||
|                 ): | ||||
|                     renderer = JSONRenderer | ||||
|  | ||||
|                 # Third, if there is no Accept header, assume we want text. | ||||
|                 # The likely use case here is a raw socket. | ||||
|                 elif not acceptable: | ||||
|                     renderer = TextRenderer | ||||
|                 else: | ||||
|                     # Fourth, look to see if there was a JSON body | ||||
|                     # When in this situation, the request is probably coming | ||||
|                     # from curl, an API client like Postman or Insomnia, or a | ||||
|                     # package like requests or httpx | ||||
|                     try: | ||||
|                         # Give them the benefit of the doubt if they did: | ||||
|                         # $ curl localhost:8000 -d '{"foo": "bar"}' | ||||
|                         # And provide them with JSONRenderer | ||||
|                         renderer = JSONRenderer if request.json else base | ||||
|                     except InvalidUsage: | ||||
|                         renderer = base | ||||
|             else: | ||||
|                 render_format = request.app.config.FALLBACK_ERROR_FORMAT | ||||
|                 renderer = RENDERERS_BY_CONFIG.get(render_format, renderer) | ||||
|  | ||||
|             # Lastly, if there is an Accept header, make sure | ||||
|             # our choice is okay | ||||
|             if acceptable: | ||||
|                 type_ = CONTENT_TYPE_BY_RENDERERS.get(renderer)  # type: ignore | ||||
|                 if type_ and type_ not in acceptable: | ||||
|                     # If the renderer selected is not in the Accept header | ||||
|                     # look through what is in the Accept header, and select | ||||
|                     # the first option that matches. Otherwise, just drop back | ||||
|                     # to the original default | ||||
|                     for accept in acceptable: | ||||
|                         mtype = f"{accept.type_}/{accept.subtype}" | ||||
|                         maybe = RENDERERS_BY_CONTENT_TYPE.get(mtype) | ||||
|                         if maybe: | ||||
|                             renderer = maybe | ||||
|                             break | ||||
|                     else: | ||||
|                         renderer = base | ||||
|  | ||||
|     renderer = t.cast(t.Type[BaseRenderer], renderer) | ||||
|     return renderer(request, exception, debug).render() | ||||
|   | ||||
| @@ -3,26 +3,22 @@ from typing import Optional, Union | ||||
| from sanic.helpers import STATUS_CODES | ||||
|  | ||||
|  | ||||
| _sanic_exceptions = {} | ||||
|  | ||||
|  | ||||
| def add_status_code(code, quiet=None): | ||||
|     """ | ||||
|     Decorator used for adding exceptions to :class:`SanicException`. | ||||
|     """ | ||||
|  | ||||
|     def class_decorator(cls): | ||||
|         cls.status_code = code | ||||
|         if quiet or quiet is None and code != 500: | ||||
|             cls.quiet = True | ||||
|         _sanic_exceptions[code] = cls | ||||
|         return cls | ||||
|  | ||||
|     return class_decorator | ||||
|  | ||||
|  | ||||
| class SanicException(Exception): | ||||
|     def __init__(self, message, status_code=None, quiet=None): | ||||
|     message: str = "" | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         message: Optional[Union[str, bytes]] = None, | ||||
|         status_code: Optional[int] = None, | ||||
|         quiet: Optional[bool] = None, | ||||
|     ) -> None: | ||||
|         if message is None: | ||||
|             if self.message: | ||||
|                 message = self.message | ||||
|             elif status_code is not None: | ||||
|                 msg: bytes = STATUS_CODES.get(status_code, b"") | ||||
|                 message = msg.decode("utf8") | ||||
|  | ||||
|         super().__init__(message) | ||||
|  | ||||
|         if status_code is not None: | ||||
| @@ -33,45 +29,45 @@ class SanicException(Exception): | ||||
|             self.quiet = True | ||||
|  | ||||
|  | ||||
| @add_status_code(404) | ||||
| class NotFound(SanicException): | ||||
|     """ | ||||
|     **Status**: 404 Not Found | ||||
|     """ | ||||
|  | ||||
|     pass | ||||
|     status_code = 404 | ||||
|     quiet = True | ||||
|  | ||||
|  | ||||
| @add_status_code(400) | ||||
| class InvalidUsage(SanicException): | ||||
|     """ | ||||
|     **Status**: 400 Bad Request | ||||
|     """ | ||||
|  | ||||
|     pass | ||||
|     status_code = 400 | ||||
|     quiet = True | ||||
|  | ||||
|  | ||||
| @add_status_code(405) | ||||
| class MethodNotSupported(SanicException): | ||||
|     """ | ||||
|     **Status**: 405 Method Not Allowed | ||||
|     """ | ||||
|  | ||||
|     status_code = 405 | ||||
|     quiet = True | ||||
|  | ||||
|     def __init__(self, message, method, allowed_methods): | ||||
|         super().__init__(message) | ||||
|         self.headers = {"Allow": ", ".join(allowed_methods)} | ||||
|  | ||||
|  | ||||
| @add_status_code(500) | ||||
| class ServerError(SanicException): | ||||
|     """ | ||||
|     **Status**: 500 Internal Server Error | ||||
|     """ | ||||
|  | ||||
|     pass | ||||
|     status_code = 500 | ||||
|  | ||||
|  | ||||
| @add_status_code(503) | ||||
| class ServiceUnavailable(SanicException): | ||||
|     """ | ||||
|     **Status**: 503 Service Unavailable | ||||
| @@ -80,7 +76,8 @@ class ServiceUnavailable(SanicException): | ||||
|     down for maintenance). Generally, this is a temporary state. | ||||
|     """ | ||||
|  | ||||
|     pass | ||||
|     status_code = 503 | ||||
|     quiet = True | ||||
|  | ||||
|  | ||||
| class URLBuildError(ServerError): | ||||
| @@ -88,7 +85,7 @@ class URLBuildError(ServerError): | ||||
|     **Status**: 500 Internal Server Error | ||||
|     """ | ||||
|  | ||||
|     pass | ||||
|     status_code = 500 | ||||
|  | ||||
|  | ||||
| class FileNotFound(NotFound): | ||||
| @@ -102,7 +99,6 @@ class FileNotFound(NotFound): | ||||
|         self.relative_url = relative_url | ||||
|  | ||||
|  | ||||
| @add_status_code(408) | ||||
| class RequestTimeout(SanicException): | ||||
|     """The Web server (running the Web site) thinks that there has been too | ||||
|     long an interval of time between 1) the establishment of an IP | ||||
| @@ -112,16 +108,17 @@ class RequestTimeout(SanicException): | ||||
|     server has 'timed out' on that particular socket connection. | ||||
|     """ | ||||
|  | ||||
|     pass | ||||
|     status_code = 408 | ||||
|     quiet = True | ||||
|  | ||||
|  | ||||
| @add_status_code(413) | ||||
| class PayloadTooLarge(SanicException): | ||||
|     """ | ||||
|     **Status**: 413 Payload Too Large | ||||
|     """ | ||||
|  | ||||
|     pass | ||||
|     status_code = 413 | ||||
|     quiet = True | ||||
|  | ||||
|  | ||||
| class HeaderNotFound(InvalidUsage): | ||||
| @@ -129,36 +126,42 @@ class HeaderNotFound(InvalidUsage): | ||||
|     **Status**: 400 Bad Request | ||||
|     """ | ||||
|  | ||||
|     pass | ||||
|  | ||||
| class InvalidHeader(InvalidUsage): | ||||
|     """ | ||||
|     **Status**: 400 Bad Request | ||||
|     """ | ||||
|  | ||||
|  | ||||
| @add_status_code(416) | ||||
| class ContentRangeError(SanicException): | ||||
|     """ | ||||
|     **Status**: 416 Range Not Satisfiable | ||||
|     """ | ||||
|  | ||||
|     status_code = 416 | ||||
|     quiet = True | ||||
|  | ||||
|     def __init__(self, message, content_range): | ||||
|         super().__init__(message) | ||||
|         self.headers = {"Content-Range": f"bytes */{content_range.total}"} | ||||
|  | ||||
|  | ||||
| @add_status_code(417) | ||||
| class HeaderExpectationFailed(SanicException): | ||||
|     """ | ||||
|     **Status**: 417 Expectation Failed | ||||
|     """ | ||||
|  | ||||
|     pass | ||||
|     status_code = 417 | ||||
|     quiet = True | ||||
|  | ||||
|  | ||||
| @add_status_code(403) | ||||
| class Forbidden(SanicException): | ||||
|     """ | ||||
|     **Status**: 403 Forbidden | ||||
|     """ | ||||
|  | ||||
|     pass | ||||
|     status_code = 403 | ||||
|     quiet = True | ||||
|  | ||||
|  | ||||
| class InvalidRangeType(ContentRangeError): | ||||
| @@ -166,7 +169,8 @@ class InvalidRangeType(ContentRangeError): | ||||
|     **Status**: 416 Range Not Satisfiable | ||||
|     """ | ||||
|  | ||||
|     pass | ||||
|     status_code = 416 | ||||
|     quiet = True | ||||
|  | ||||
|  | ||||
| class PyFileError(Exception): | ||||
| @@ -174,7 +178,6 @@ class PyFileError(Exception): | ||||
|         super().__init__("could not execute config file %s", file) | ||||
|  | ||||
|  | ||||
| @add_status_code(401) | ||||
| class Unauthorized(SanicException): | ||||
|     """ | ||||
|     **Status**: 401 Unauthorized | ||||
| @@ -210,6 +213,9 @@ class Unauthorized(SanicException): | ||||
|                            realm="Restricted Area") | ||||
|     """ | ||||
|  | ||||
|     status_code = 401 | ||||
|     quiet = True | ||||
|  | ||||
|     def __init__(self, message, status_code=None, scheme=None, **kwargs): | ||||
|         super().__init__(message, status_code) | ||||
|  | ||||
| @@ -231,6 +237,11 @@ class InvalidSignal(SanicException): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class WebsocketClosed(SanicException): | ||||
|     quiet = True | ||||
|     message = "Client has closed the websocket connection" | ||||
|  | ||||
|  | ||||
| def abort(status_code: int, message: Optional[Union[str, bytes]] = None): | ||||
|     """ | ||||
|     Raise an exception based on SanicException. Returns the HTTP response | ||||
| @@ -241,9 +252,13 @@ def abort(status_code: int, message: Optional[Union[str, bytes]] = None): | ||||
|     :param status_code: The HTTP status code to return. | ||||
|     :param message: The HTTP response body. Defaults to the messages in | ||||
|     """ | ||||
|     if message is None: | ||||
|         msg: bytes = STATUS_CODES[status_code] | ||||
|         # These are stored as bytes in the STATUS_CODES dict | ||||
|         message = msg.decode("utf8") | ||||
|     sanic_exception = _sanic_exceptions.get(status_code, SanicException) | ||||
|     raise sanic_exception(message=message, status_code=status_code) | ||||
|     import warnings | ||||
|  | ||||
|     warnings.warn( | ||||
|         "sanic.exceptions.abort has been marked as deprecated, and will be " | ||||
|         "removed in release 21.12.\n To migrate your code, simply replace " | ||||
|         "abort(status_code, msg) with raise SanicException(msg, status_code), " | ||||
|         "or even better, raise an appropriate SanicException subclass." | ||||
|     ) | ||||
|  | ||||
|     raise SanicException(message=message, status_code=status_code) | ||||
|   | ||||
| @@ -1,12 +1,13 @@ | ||||
| from traceback import format_exc | ||||
| from typing import Dict, List, Optional, Tuple, Type | ||||
|  | ||||
| from sanic.errorpages import exception_response | ||||
| from sanic.errorpages import BaseRenderer, HTMLRenderer, exception_response | ||||
| from sanic.exceptions import ( | ||||
|     ContentRangeError, | ||||
|     HeaderNotFound, | ||||
|     InvalidRangeType, | ||||
| ) | ||||
| from sanic.log import logger | ||||
| from sanic.log import error_logger | ||||
| from sanic.models.handler_types import RouteHandler | ||||
| from sanic.response import text | ||||
|  | ||||
|  | ||||
| @@ -23,16 +24,17 @@ class ErrorHandler: | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     handlers = None | ||||
|     cached_handlers = None | ||||
|     _missing = object() | ||||
|  | ||||
|     def __init__(self): | ||||
|         self.handlers = [] | ||||
|         self.cached_handlers = {} | ||||
|     # Beginning in v22.3, the base renderer will be TextRenderer | ||||
|     def __init__(self, fallback: str, base: Type[BaseRenderer] = HTMLRenderer): | ||||
|         self.handlers: List[Tuple[Type[BaseException], RouteHandler]] = [] | ||||
|         self.cached_handlers: Dict[ | ||||
|             Tuple[Type[BaseException], Optional[str]], Optional[RouteHandler] | ||||
|         ] = {} | ||||
|         self.debug = False | ||||
|         self.fallback = fallback | ||||
|         self.base = base | ||||
|  | ||||
|     def add(self, exception, handler): | ||||
|     def add(self, exception, handler, route_names: Optional[List[str]] = None): | ||||
|         """ | ||||
|         Add a new exception handler to an already existing handler object. | ||||
|  | ||||
| @@ -45,9 +47,16 @@ class ErrorHandler: | ||||
|  | ||||
|         :return: None | ||||
|         """ | ||||
|         # self.handlers is deprecated and will be removed in version 22.3 | ||||
|         self.handlers.append((exception, handler)) | ||||
|  | ||||
|     def lookup(self, exception): | ||||
|         if route_names: | ||||
|             for route in route_names: | ||||
|                 self.cached_handlers[(exception, route)] = handler | ||||
|         else: | ||||
|             self.cached_handlers[(exception, None)] = handler | ||||
|  | ||||
|     def lookup(self, exception, route_name: Optional[str]): | ||||
|         """ | ||||
|         Lookup the existing instance of :class:`ErrorHandler` and fetch the | ||||
|         registered handler for a specific type of exception. | ||||
| @@ -61,13 +70,27 @@ class ErrorHandler: | ||||
|  | ||||
|         :return: Registered function if found ``None`` otherwise | ||||
|         """ | ||||
|         handler = self.cached_handlers.get(type(exception), self._missing) | ||||
|         if handler is self._missing: | ||||
|             for exception_class, handler in self.handlers: | ||||
|                 if isinstance(exception, exception_class): | ||||
|                     self.cached_handlers[type(exception)] = handler | ||||
|         exception_class = type(exception) | ||||
|  | ||||
|         for name in (route_name, None): | ||||
|             exception_key = (exception_class, name) | ||||
|             handler = self.cached_handlers.get(exception_key) | ||||
|             if handler: | ||||
|                 return handler | ||||
|             self.cached_handlers[type(exception)] = None | ||||
|  | ||||
|         for name in (route_name, None): | ||||
|             for ancestor in type.mro(exception_class): | ||||
|                 exception_key = (ancestor, name) | ||||
|                 if exception_key in self.cached_handlers: | ||||
|                     handler = self.cached_handlers[exception_key] | ||||
|                     self.cached_handlers[ | ||||
|                         (exception_class, route_name) | ||||
|                     ] = handler | ||||
|                     return handler | ||||
|  | ||||
|                 if ancestor is BaseException: | ||||
|                     break | ||||
|         self.cached_handlers[(exception_class, route_name)] = None | ||||
|         handler = None | ||||
|         return handler | ||||
|  | ||||
| @@ -85,7 +108,8 @@ class ErrorHandler: | ||||
|         :return: Wrap the return value obtained from :func:`default` | ||||
|             or registered handler for that type of exception. | ||||
|         """ | ||||
|         handler = self.lookup(exception) | ||||
|         route_name = request.name if request else None | ||||
|         handler = self.lookup(exception, route_name) | ||||
|         response = None | ||||
|         try: | ||||
|             if handler: | ||||
| @@ -93,7 +117,6 @@ class ErrorHandler: | ||||
|             if response is None: | ||||
|                 response = self.default(request, exception) | ||||
|         except Exception: | ||||
|             self.log(format_exc()) | ||||
|             try: | ||||
|                 url = repr(request.url) | ||||
|             except AttributeError: | ||||
| @@ -101,7 +124,7 @@ class ErrorHandler: | ||||
|             response_message = ( | ||||
|                 "Exception raised in exception handler " '"%s" for uri: %s' | ||||
|             ) | ||||
|             logger.exception(response_message, handler.__name__, url) | ||||
|             error_logger.exception(response_message, handler.__name__, url) | ||||
|  | ||||
|             if self.debug: | ||||
|                 return text(response_message % (handler.__name__, url), 500) | ||||
| @@ -109,11 +132,6 @@ class ErrorHandler: | ||||
|                 return text("An error occurred while handling an error", 500) | ||||
|         return response | ||||
|  | ||||
|     def log(self, message, level="error"): | ||||
|         """ | ||||
|         Deprecated, do not use. | ||||
|         """ | ||||
|  | ||||
|     def default(self, request, exception): | ||||
|         """ | ||||
|         Provide a default behavior for the objects of :class:`ErrorHandler`. | ||||
| @@ -129,6 +147,17 @@ class ErrorHandler: | ||||
|             :class:`Exception` | ||||
|         :return: | ||||
|         """ | ||||
|         self.log(request, exception) | ||||
|         return exception_response( | ||||
|             request, | ||||
|             exception, | ||||
|             debug=self.debug, | ||||
|             base=self.base, | ||||
|             fallback=self.fallback, | ||||
|         ) | ||||
|  | ||||
|     @staticmethod | ||||
|     def log(request, exception): | ||||
|         quiet = getattr(exception, "quiet", False) | ||||
|         if quiet is False: | ||||
|             try: | ||||
| @@ -136,10 +165,9 @@ class ErrorHandler: | ||||
|             except AttributeError: | ||||
|                 url = "unknown" | ||||
|  | ||||
|             self.log(format_exc()) | ||||
|             logger.exception("Exception occurred while handling uri: %s", url) | ||||
|  | ||||
|         return exception_response(request, exception, self.debug) | ||||
|             error_logger.exception( | ||||
|                 "Exception occurred while handling uri: %s", url | ||||
|             ) | ||||
|  | ||||
|  | ||||
| class ContentRangeHandler: | ||||
| @@ -165,7 +193,7 @@ class ContentRangeHandler: | ||||
|  | ||||
|     def __init__(self, request, stats): | ||||
|         self.total = stats.st_size | ||||
|         _range = request.headers.get("Range") | ||||
|         _range = request.headers.getone("range", None) | ||||
|         if _range is None: | ||||
|             raise HeaderNotFound("Range Header Not Found") | ||||
|         unit, _, value = tuple(map(str.strip, _range.partition("="))) | ||||
|   | ||||
							
								
								
									
										204
									
								
								sanic/headers.py
									
									
									
									
									
								
							
							
						
						
									
										204
									
								
								sanic/headers.py
									
									
									
									
									
								
							| @@ -1,8 +1,11 @@ | ||||
| from __future__ import annotations | ||||
|  | ||||
| import re | ||||
|  | ||||
| from typing import Any, Dict, Iterable, List, Optional, Tuple, Union | ||||
| from urllib.parse import unquote | ||||
|  | ||||
| from sanic.exceptions import InvalidHeader | ||||
| from sanic.helpers import STATUS_CODES | ||||
|  | ||||
|  | ||||
| @@ -30,6 +33,175 @@ _host_re = re.compile( | ||||
| # For more information, consult ../tests/test_requests.py | ||||
|  | ||||
|  | ||||
| def parse_arg_as_accept(f): | ||||
|     def func(self, other, *args, **kwargs): | ||||
|         if not isinstance(other, Accept) and other: | ||||
|             other = Accept.parse(other) | ||||
|         return f(self, other, *args, **kwargs) | ||||
|  | ||||
|     return func | ||||
|  | ||||
|  | ||||
| class MediaType(str): | ||||
|     def __new__(cls, value: str): | ||||
|         return str.__new__(cls, value) | ||||
|  | ||||
|     def __init__(self, value: str) -> None: | ||||
|         self.value = value | ||||
|         self.is_wildcard = self.check_if_wildcard(value) | ||||
|  | ||||
|     def __eq__(self, other): | ||||
|         if self.is_wildcard: | ||||
|             return True | ||||
|  | ||||
|         if self.match(other): | ||||
|             return True | ||||
|  | ||||
|         other_is_wildcard = ( | ||||
|             other.is_wildcard | ||||
|             if isinstance(other, MediaType) | ||||
|             else self.check_if_wildcard(other) | ||||
|         ) | ||||
|  | ||||
|         return other_is_wildcard | ||||
|  | ||||
|     def match(self, other): | ||||
|         other_value = other.value if isinstance(other, MediaType) else other | ||||
|         return self.value == other_value | ||||
|  | ||||
|     @staticmethod | ||||
|     def check_if_wildcard(value): | ||||
|         return value == "*" | ||||
|  | ||||
|  | ||||
| class Accept(str): | ||||
|     def __new__(cls, value: str, *args, **kwargs): | ||||
|         return str.__new__(cls, value) | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         value: str, | ||||
|         type_: MediaType, | ||||
|         subtype: MediaType, | ||||
|         *, | ||||
|         q: str = "1.0", | ||||
|         **kwargs: str, | ||||
|     ): | ||||
|         qvalue = float(q) | ||||
|         if qvalue > 1 or qvalue < 0: | ||||
|             raise InvalidHeader( | ||||
|                 f"Accept header qvalue must be between 0 and 1, not: {qvalue}" | ||||
|             ) | ||||
|         self.value = value | ||||
|         self.type_ = type_ | ||||
|         self.subtype = subtype | ||||
|         self.qvalue = qvalue | ||||
|         self.params = kwargs | ||||
|  | ||||
|     def _compare(self, other, method): | ||||
|         try: | ||||
|             return method(self.qvalue, other.qvalue) | ||||
|         except (AttributeError, TypeError): | ||||
|             return NotImplemented | ||||
|  | ||||
|     @parse_arg_as_accept | ||||
|     def __lt__(self, other: Union[str, Accept]): | ||||
|         return self._compare(other, lambda s, o: s < o) | ||||
|  | ||||
|     @parse_arg_as_accept | ||||
|     def __le__(self, other: Union[str, Accept]): | ||||
|         return self._compare(other, lambda s, o: s <= o) | ||||
|  | ||||
|     @parse_arg_as_accept | ||||
|     def __eq__(self, other: Union[str, Accept]):  # type: ignore | ||||
|         return self._compare(other, lambda s, o: s == o) | ||||
|  | ||||
|     @parse_arg_as_accept | ||||
|     def __ge__(self, other: Union[str, Accept]): | ||||
|         return self._compare(other, lambda s, o: s >= o) | ||||
|  | ||||
|     @parse_arg_as_accept | ||||
|     def __gt__(self, other: Union[str, Accept]): | ||||
|         return self._compare(other, lambda s, o: s > o) | ||||
|  | ||||
|     @parse_arg_as_accept | ||||
|     def __ne__(self, other: Union[str, Accept]):  # type: ignore | ||||
|         return self._compare(other, lambda s, o: s != o) | ||||
|  | ||||
|     @parse_arg_as_accept | ||||
|     def match( | ||||
|         self, | ||||
|         other, | ||||
|         *, | ||||
|         allow_type_wildcard: bool = True, | ||||
|         allow_subtype_wildcard: bool = True, | ||||
|     ) -> bool: | ||||
|         type_match = ( | ||||
|             self.type_ == other.type_ | ||||
|             if allow_type_wildcard | ||||
|             else ( | ||||
|                 self.type_.match(other.type_) | ||||
|                 and not self.type_.is_wildcard | ||||
|                 and not other.type_.is_wildcard | ||||
|             ) | ||||
|         ) | ||||
|         subtype_match = ( | ||||
|             self.subtype == other.subtype | ||||
|             if allow_subtype_wildcard | ||||
|             else ( | ||||
|                 self.subtype.match(other.subtype) | ||||
|                 and not self.subtype.is_wildcard | ||||
|                 and not other.subtype.is_wildcard | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|         return type_match and subtype_match | ||||
|  | ||||
|     @classmethod | ||||
|     def parse(cls, raw: str) -> Accept: | ||||
|         invalid = False | ||||
|         mtype = raw.strip() | ||||
|  | ||||
|         try: | ||||
|             media, *raw_params = mtype.split(";") | ||||
|             type_, subtype = media.split("/") | ||||
|         except ValueError: | ||||
|             invalid = True | ||||
|  | ||||
|         if invalid or not type_ or not subtype: | ||||
|             raise InvalidHeader(f"Header contains invalid Accept value: {raw}") | ||||
|  | ||||
|         params = dict( | ||||
|             [ | ||||
|                 (key.strip(), value.strip()) | ||||
|                 for key, value in (param.split("=", 1) for param in raw_params) | ||||
|             ] | ||||
|         ) | ||||
|  | ||||
|         return cls(mtype, MediaType(type_), MediaType(subtype), **params) | ||||
|  | ||||
|  | ||||
| class AcceptContainer(list): | ||||
|     def __contains__(self, o: object) -> bool: | ||||
|         return any(item.match(o) for item in self) | ||||
|  | ||||
|     def match( | ||||
|         self, | ||||
|         o: object, | ||||
|         *, | ||||
|         allow_type_wildcard: bool = True, | ||||
|         allow_subtype_wildcard: bool = True, | ||||
|     ) -> bool: | ||||
|         return any( | ||||
|             item.match( | ||||
|                 o, | ||||
|                 allow_type_wildcard=allow_type_wildcard, | ||||
|                 allow_subtype_wildcard=allow_subtype_wildcard, | ||||
|             ) | ||||
|             for item in self | ||||
|         ) | ||||
|  | ||||
|  | ||||
| def parse_content_header(value: str) -> Tuple[str, Options]: | ||||
|     """Parse content-type and content-disposition header values. | ||||
|  | ||||
| @@ -102,7 +274,7 @@ def parse_xforwarded(headers, config) -> Optional[Options]: | ||||
|     """Parse traditional proxy headers.""" | ||||
|     real_ip_header = config.REAL_IP_HEADER | ||||
|     proxies_count = config.PROXIES_COUNT | ||||
|     addr = real_ip_header and headers.get(real_ip_header) | ||||
|     addr = real_ip_header and headers.getone(real_ip_header, None) | ||||
|     if not addr and proxies_count: | ||||
|         assert proxies_count > 0 | ||||
|         try: | ||||
| @@ -131,7 +303,7 @@ def parse_xforwarded(headers, config) -> Optional[Options]: | ||||
|             ("port", "x-forwarded-port"), | ||||
|             ("path", "x-forwarded-path"), | ||||
|         ): | ||||
|             yield key, headers.get(header) | ||||
|             yield key, headers.getone(header, None) | ||||
|  | ||||
|     return fwd_normalize(options()) | ||||
|  | ||||
| @@ -194,3 +366,31 @@ def format_http1_response(status: int, headers: HeaderBytesIterable) -> bytes: | ||||
|         ret += b"%b: %b\r\n" % h | ||||
|     ret += b"\r\n" | ||||
|     return ret | ||||
|  | ||||
|  | ||||
| def _sort_accept_value(accept: Accept): | ||||
|     return ( | ||||
|         accept.qvalue, | ||||
|         len(accept.params), | ||||
|         accept.subtype != "*", | ||||
|         accept.type_ != "*", | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def parse_accept(accept: str) -> AcceptContainer: | ||||
|     """Parse an Accept header and order the acceptable media types in | ||||
|     accorsing to RFC 7231, s. 5.3.2 | ||||
|     https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.2 | ||||
|     """ | ||||
|     media_types = accept.split(",") | ||||
|     accept_list: List[Accept] = [] | ||||
|  | ||||
|     for mtype in media_types: | ||||
|         if not mtype: | ||||
|             continue | ||||
|  | ||||
|         accept_list.append(Accept.parse(mtype)) | ||||
|  | ||||
|     return AcceptContainer( | ||||
|         sorted(accept_list, key=_sort_accept_value, reverse=True) | ||||
|     ) | ||||
|   | ||||
| @@ -155,3 +155,17 @@ def import_string(module_name, package=None): | ||||
|     if ismodule(obj): | ||||
|         return obj | ||||
|     return obj() | ||||
|  | ||||
|  | ||||
| class Default: | ||||
|     """ | ||||
|     It is used to replace `None` or `object()` as a sentinel | ||||
|     that represents a default value. Sometimes we want to set | ||||
|     a value to `None` so we cannot use `None` to represent the | ||||
|     default value, and `object()` is hard to be typed. | ||||
|     """ | ||||
|  | ||||
|     pass | ||||
|  | ||||
|  | ||||
| _default = Default() | ||||
|   | ||||
							
								
								
									
										112
									
								
								sanic/http.py
									
									
									
									
									
								
							
							
						
						
									
										112
									
								
								sanic/http.py
									
									
									
									
									
								
							| @@ -20,7 +20,8 @@ from sanic.exceptions import ( | ||||
| ) | ||||
| from sanic.headers import format_http1_response | ||||
| from sanic.helpers import has_message_body | ||||
| from sanic.log import access_logger, logger | ||||
| from sanic.log import access_logger, error_logger, logger | ||||
| from sanic.touchup import TouchUpMeta | ||||
|  | ||||
|  | ||||
| class Stage(Enum): | ||||
| @@ -45,7 +46,7 @@ class Stage(Enum): | ||||
| HTTP_CONTINUE = b"HTTP/1.1 100 Continue\r\n\r\n" | ||||
|  | ||||
|  | ||||
| class Http: | ||||
| class Http(metaclass=TouchUpMeta): | ||||
|     """ | ||||
|     Internal helper for managing the HTTP request/response cycle | ||||
|  | ||||
| @@ -64,9 +65,18 @@ class Http: | ||||
|     :raises RuntimeError: | ||||
|     """ | ||||
|  | ||||
|     HEADER_CEILING = 16_384 | ||||
|     HEADER_MAX_SIZE = 0 | ||||
|  | ||||
|     __touchup__ = ( | ||||
|         "http1_request_header", | ||||
|         "http1_response_header", | ||||
|         "read", | ||||
|     ) | ||||
|     __slots__ = [ | ||||
|         "_send", | ||||
|         "_receive_more", | ||||
|         "dispatch", | ||||
|         "recv_buffer", | ||||
|         "protocol", | ||||
|         "expecting_continue", | ||||
| @@ -92,19 +102,24 @@ class Http: | ||||
|         self._receive_more = protocol.receive_more | ||||
|         self.recv_buffer = protocol.recv_buffer | ||||
|         self.protocol = protocol | ||||
|         self.expecting_continue: bool = False | ||||
|         self.keep_alive = True | ||||
|         self.stage: Stage = Stage.IDLE | ||||
|         self.dispatch = self.protocol.app.dispatch | ||||
|         self.init_for_request() | ||||
|  | ||||
|     def init_for_request(self): | ||||
|         """Init/reset all per-request variables.""" | ||||
|         self.exception = None | ||||
|         self.expecting_continue: bool = False | ||||
|         self.head_only = None | ||||
|         self.request_body = None | ||||
|         self.request_bytes = None | ||||
|         self.request_bytes_left = None | ||||
|         self.request_max_size = protocol.request_max_size | ||||
|         self.keep_alive = True | ||||
|         self.head_only = None | ||||
|         self.request_max_size = self.protocol.request_max_size | ||||
|         self.request: Request = None | ||||
|         self.response: BaseHTTPResponse = None | ||||
|         self.exception = None | ||||
|         self.url = None | ||||
|         self.upgrade_websocket = False | ||||
|         self.url = None | ||||
|  | ||||
|     def __bool__(self): | ||||
|         """Test if request handling is in progress""" | ||||
| @@ -133,6 +148,12 @@ class Http: | ||||
|                     await self.response.send(end_stream=True) | ||||
|             except CancelledError: | ||||
|                 # Write an appropriate response before exiting | ||||
|                 if not self.protocol.transport: | ||||
|                     logger.info( | ||||
|                         f"Request: {self.request.method} {self.request.url} " | ||||
|                         "stopped. Transport is closed." | ||||
|                     ) | ||||
|                     return | ||||
|                 e = self.exception or ServiceUnavailable("Cancelled") | ||||
|                 self.exception = None | ||||
|                 self.keep_alive = False | ||||
| @@ -144,8 +165,11 @@ class Http: | ||||
|             # Try to consume any remaining request body | ||||
|             if self.request_body: | ||||
|                 if self.response and 200 <= self.response.status < 300: | ||||
|                     logger.error(f"{self.request} body not consumed.") | ||||
|  | ||||
|                     error_logger.error(f"{self.request} body not consumed.") | ||||
|                 # Limit the size because the handler may have set it infinite | ||||
|                 self.request_max_size = min( | ||||
|                     self.request_max_size, self.protocol.request_max_size | ||||
|                 ) | ||||
|                 try: | ||||
|                     async for _ in self: | ||||
|                         pass | ||||
| @@ -157,19 +181,26 @@ class Http: | ||||
|                     await sleep(0.001) | ||||
|                     self.keep_alive = False | ||||
|  | ||||
|             # Clean up to free memory and for the next request | ||||
|             if self.request: | ||||
|                 self.request.stream = None | ||||
|                 if self.response: | ||||
|                     self.response.stream = None | ||||
|  | ||||
|             # Exit and disconnect if no more requests can be taken | ||||
|             if self.stage is not Stage.IDLE or not self.keep_alive: | ||||
|                 break | ||||
|  | ||||
|             # Wait for next request | ||||
|             self.init_for_request() | ||||
|  | ||||
|             # Wait for the next request | ||||
|             if not self.recv_buffer: | ||||
|                 await self._receive_more() | ||||
|  | ||||
|     async def http1_request_header(self): | ||||
|     async def http1_request_header(self):  # no cov | ||||
|         """ | ||||
|         Receive and parse request header into self.request. | ||||
|         """ | ||||
|         HEADER_MAX_SIZE = min(8192, self.request_max_size) | ||||
|         # Receive until full header is in buffer | ||||
|         buf = self.recv_buffer | ||||
|         pos = 0 | ||||
| @@ -180,12 +211,12 @@ class Http: | ||||
|                 break | ||||
|  | ||||
|             pos = max(0, len(buf) - 3) | ||||
|             if pos >= HEADER_MAX_SIZE: | ||||
|             if pos >= self.HEADER_MAX_SIZE: | ||||
|                 break | ||||
|  | ||||
|             await self._receive_more() | ||||
|  | ||||
|         if pos >= HEADER_MAX_SIZE: | ||||
|         if pos >= self.HEADER_MAX_SIZE: | ||||
|             raise PayloadTooLarge("Request header exceeds the size limit") | ||||
|  | ||||
|         # Parse header content | ||||
| @@ -195,6 +226,12 @@ class Http: | ||||
|             reqline, *split_headers = raw_headers.split("\r\n") | ||||
|             method, self.url, protocol = reqline.split(" ") | ||||
|  | ||||
|             await self.dispatch( | ||||
|                 "http.lifecycle.read_head", | ||||
|                 inline=True, | ||||
|                 context={"head": bytes(head)}, | ||||
|             ) | ||||
|  | ||||
|             if protocol == "HTTP/1.1": | ||||
|                 self.keep_alive = True | ||||
|             elif protocol == "HTTP/1.0": | ||||
| @@ -219,7 +256,9 @@ class Http: | ||||
|             raise InvalidUsage("Bad Request") | ||||
|  | ||||
|         headers_instance = Header(headers) | ||||
|         self.upgrade_websocket = headers_instance.get("upgrade") == "websocket" | ||||
|         self.upgrade_websocket = ( | ||||
|             headers_instance.getone("upgrade", "").lower() == "websocket" | ||||
|         ) | ||||
|  | ||||
|         # Prepare a Request object | ||||
|         request = self.protocol.request_class( | ||||
| @@ -231,12 +270,17 @@ class Http: | ||||
|             transport=self.protocol.transport, | ||||
|             app=self.protocol.app, | ||||
|         ) | ||||
|         await self.dispatch( | ||||
|             "http.lifecycle.request", | ||||
|             inline=True, | ||||
|             context={"request": request}, | ||||
|         ) | ||||
|  | ||||
|         # Prepare for request body | ||||
|         self.request_bytes_left = self.request_bytes = 0 | ||||
|         if request_body: | ||||
|             headers = request.headers | ||||
|             expect = headers.get("expect") | ||||
|             expect = headers.getone("expect", None) | ||||
|  | ||||
|             if expect is not None: | ||||
|                 if expect.lower() == "100-continue": | ||||
| @@ -244,7 +288,7 @@ class Http: | ||||
|                 else: | ||||
|                     raise HeaderExpectationFailed(f"Unknown Expect: {expect}") | ||||
|  | ||||
|             if headers.get("transfer-encoding") == "chunked": | ||||
|             if headers.getone("transfer-encoding", None) == "chunked": | ||||
|                 self.request_body = "chunked" | ||||
|                 pos -= 2  # One CRLF stays in buffer | ||||
|             else: | ||||
| @@ -261,7 +305,7 @@ class Http: | ||||
|  | ||||
|     async def http1_response_header( | ||||
|         self, data: bytes, end_stream: bool | ||||
|     ) -> None: | ||||
|     ) -> None:  # no cov | ||||
|         res = self.response | ||||
|  | ||||
|         # Compatibility with simple response body | ||||
| @@ -433,8 +477,8 @@ class Http: | ||||
|             "request": "nil", | ||||
|         } | ||||
|         if req is not None: | ||||
|             if req.ip: | ||||
|                 extra["host"] = f"{req.ip}:{req.port}" | ||||
|             if req.remote_addr or req.ip: | ||||
|                 extra["host"] = f"{req.remote_addr or req.ip}:{req.port}" | ||||
|             extra["request"] = f"{req.method} {req.url}" | ||||
|         access_logger.info("", extra=extra) | ||||
|  | ||||
| @@ -450,7 +494,7 @@ class Http: | ||||
|             if data: | ||||
|                 yield data | ||||
|  | ||||
|     async def read(self) -> Optional[bytes]: | ||||
|     async def read(self) -> Optional[bytes]:  # no cov | ||||
|         """ | ||||
|         Read some bytes of request body. | ||||
|         """ | ||||
| @@ -482,8 +526,6 @@ class Http: | ||||
|                 self.keep_alive = False | ||||
|                 raise InvalidUsage("Bad chunked encoding") | ||||
|  | ||||
|             del buf[: pos + 2] | ||||
|  | ||||
|             if size <= 0: | ||||
|                 self.request_body = None | ||||
|  | ||||
| @@ -491,8 +533,17 @@ class Http: | ||||
|                     self.keep_alive = False | ||||
|                     raise InvalidUsage("Bad chunked encoding") | ||||
|  | ||||
|                 # Consume CRLF, chunk size 0 and the two CRLF that follow | ||||
|                 pos += 4 | ||||
|                 # Might need to wait for the final CRLF | ||||
|                 while len(buf) < pos: | ||||
|                     await self._receive_more() | ||||
|                 del buf[:pos] | ||||
|                 return None | ||||
|  | ||||
|             # Remove CRLF, chunk size and the CRLF that follows | ||||
|             del buf[: pos + 2] | ||||
|  | ||||
|             self.request_bytes_left = size | ||||
|             self.request_bytes += size | ||||
|  | ||||
| @@ -517,6 +568,12 @@ class Http: | ||||
|  | ||||
|         self.request_bytes_left -= size | ||||
|  | ||||
|         await self.dispatch( | ||||
|             "http.lifecycle.read_body", | ||||
|             inline=True, | ||||
|             context={"body": data}, | ||||
|         ) | ||||
|  | ||||
|         return data | ||||
|  | ||||
|     # Response methods | ||||
| @@ -539,3 +596,10 @@ class Http: | ||||
|     @property | ||||
|     def send(self): | ||||
|         return self.response_func | ||||
|  | ||||
|     @classmethod | ||||
|     def set_header_max_size(cls, *sizes: int): | ||||
|         cls.HEADER_MAX_SIZE = min( | ||||
|             *sizes, | ||||
|             cls.HEADER_CEILING, | ||||
|         ) | ||||
|   | ||||
| @@ -1,18 +1,19 @@ | ||||
| from enum import Enum, auto | ||||
| from functools import partial | ||||
| from typing import Any, Callable, Coroutine, List, Optional, Union | ||||
| from typing import List, Optional, Union | ||||
|  | ||||
| from sanic.models.futures import FutureListener | ||||
| from sanic.models.handler_types import ListenerType | ||||
|  | ||||
|  | ||||
| class ListenerEvent(str, Enum): | ||||
|     def _generate_next_value_(name: str, *args) -> str:  # type: ignore | ||||
|         return name.lower() | ||||
|  | ||||
|     BEFORE_SERVER_START = auto() | ||||
|     AFTER_SERVER_START = auto() | ||||
|     BEFORE_SERVER_STOP = auto() | ||||
|     AFTER_SERVER_STOP = auto() | ||||
|     BEFORE_SERVER_START = "server.init.before" | ||||
|     AFTER_SERVER_START = "server.init.after" | ||||
|     BEFORE_SERVER_STOP = "server.shutdown.before" | ||||
|     AFTER_SERVER_STOP = "server.shutdown.after" | ||||
|     MAIN_PROCESS_START = auto() | ||||
|     MAIN_PROCESS_STOP = auto() | ||||
|  | ||||
| @@ -26,16 +27,14 @@ class ListenerMixin: | ||||
|  | ||||
|     def listener( | ||||
|         self, | ||||
|         listener_or_event: Union[ | ||||
|             Callable[..., Coroutine[Any, Any, None]], str | ||||
|         ], | ||||
|         listener_or_event: Union[ListenerType, str], | ||||
|         event_or_none: Optional[str] = None, | ||||
|         apply: bool = True, | ||||
|     ): | ||||
|         """ | ||||
|         Create a listener from a decorated function. | ||||
|  | ||||
|         To be used as a deocrator: | ||||
|         To be used as a decorator: | ||||
|  | ||||
|         .. code-block:: python | ||||
|  | ||||
| @@ -63,20 +62,20 @@ class ListenerMixin: | ||||
|         else: | ||||
|             return partial(register_listener, event=listener_or_event) | ||||
|  | ||||
|     def main_process_start(self, listener): | ||||
|     def main_process_start(self, listener: ListenerType) -> ListenerType: | ||||
|         return self.listener(listener, "main_process_start") | ||||
|  | ||||
|     def main_process_stop(self, listener): | ||||
|     def main_process_stop(self, listener: ListenerType) -> ListenerType: | ||||
|         return self.listener(listener, "main_process_stop") | ||||
|  | ||||
|     def before_server_start(self, listener): | ||||
|     def before_server_start(self, listener: ListenerType) -> ListenerType: | ||||
|         return self.listener(listener, "before_server_start") | ||||
|  | ||||
|     def after_server_start(self, listener): | ||||
|     def after_server_start(self, listener: ListenerType) -> ListenerType: | ||||
|         return self.listener(listener, "after_server_start") | ||||
|  | ||||
|     def before_server_stop(self, listener): | ||||
|     def before_server_stop(self, listener: ListenerType) -> ListenerType: | ||||
|         return self.listener(listener, "before_server_stop") | ||||
|  | ||||
|     def after_server_stop(self, listener): | ||||
|     def after_server_stop(self, listener: ListenerType) -> ListenerType: | ||||
|         return self.listener(listener, "after_server_stop") | ||||
|   | ||||
| @@ -1,17 +1,20 @@ | ||||
| from ast import NodeVisitor, Return, parse | ||||
| from functools import partial, wraps | ||||
| from inspect import signature | ||||
| from inspect import getsource, signature | ||||
| from mimetypes import guess_type | ||||
| from os import path | ||||
| from pathlib import PurePath | ||||
| from re import sub | ||||
| from textwrap import dedent | ||||
| from time import gmtime, strftime | ||||
| from typing import Iterable, List, Optional, Set, Union | ||||
| from typing import Any, Callable, Iterable, List, Optional, Set, Tuple, Union | ||||
| from urllib.parse import unquote | ||||
|  | ||||
| from sanic_routing.route import Route  # type: ignore | ||||
|  | ||||
| from sanic.compat import stat_async | ||||
| from sanic.constants import DEFAULT_HTTP_CONTENT_TYPE, HTTP_METHODS | ||||
| from sanic.errorpages import RESPONSE_MAPPING | ||||
| from sanic.exceptions import ( | ||||
|     ContentRangeError, | ||||
|     FileNotFound, | ||||
| @@ -21,15 +24,22 @@ from sanic.exceptions import ( | ||||
| from sanic.handlers import ContentRangeHandler | ||||
| from sanic.log import error_logger | ||||
| from sanic.models.futures import FutureRoute, FutureStatic | ||||
| from sanic.models.handler_types import RouteHandler | ||||
| from sanic.response import HTTPResponse, file, file_stream | ||||
| from sanic.views import CompositionView | ||||
|  | ||||
|  | ||||
| RouteWrapper = Callable[ | ||||
|     [RouteHandler], Union[RouteHandler, Tuple[Route, RouteHandler]] | ||||
| ] | ||||
|  | ||||
|  | ||||
| class RouteMixin: | ||||
|     name: str | ||||
|  | ||||
|     def __init__(self, *args, **kwargs) -> None: | ||||
|         self._future_routes: Set[FutureRoute] = set() | ||||
|         self._future_statics: Set[FutureStatic] = set() | ||||
|         self.name = "" | ||||
|         self.strict_slashes: Optional[bool] = False | ||||
|  | ||||
|     def _apply_route(self, route: FutureRoute) -> List[Route]: | ||||
| @@ -53,7 +63,9 @@ class RouteMixin: | ||||
|         websocket: bool = False, | ||||
|         unquote: bool = False, | ||||
|         static: bool = False, | ||||
|     ): | ||||
|         version_prefix: str = "/v", | ||||
|         error_format: Optional[str] = None, | ||||
|     ) -> RouteWrapper: | ||||
|         """ | ||||
|         Decorate a function to be registered as a route | ||||
|  | ||||
| @@ -66,6 +78,8 @@ class RouteMixin: | ||||
|         :param name: user defined route name for url_for | ||||
|         :param ignore_body: whether the handler should ignore request | ||||
|             body (eg. GET requests) | ||||
|         :param version_prefix: URL path that should be before the version | ||||
|             value; default: ``/v`` | ||||
|         :return: tuple of routes, decorated function | ||||
|         """ | ||||
|  | ||||
| @@ -92,6 +106,8 @@ class RouteMixin: | ||||
|             nonlocal subprotocols | ||||
|             nonlocal websocket | ||||
|             nonlocal static | ||||
|             nonlocal version_prefix | ||||
|             nonlocal error_format | ||||
|  | ||||
|             if isinstance(handler, tuple): | ||||
|                 # if a handler fn is already wrapped in a route, the handler | ||||
| @@ -110,10 +126,16 @@ class RouteMixin: | ||||
|                         "Expected either string or Iterable of host strings, " | ||||
|                         "not %s" % host | ||||
|                     ) | ||||
|  | ||||
|             if isinstance(subprotocols, (list, tuple, set)): | ||||
|             if isinstance(subprotocols, list): | ||||
|                 # Ordered subprotocols, maintain order | ||||
|                 subprotocols = tuple(subprotocols) | ||||
|             elif isinstance(subprotocols, set): | ||||
|                 # subprotocol is unordered, keep it unordered | ||||
|                 subprotocols = frozenset(subprotocols) | ||||
|  | ||||
|             if not error_format or error_format == "auto": | ||||
|                 error_format = self._determine_error_format(handler) | ||||
|  | ||||
|             route = FutureRoute( | ||||
|                 handler, | ||||
|                 uri, | ||||
| @@ -128,6 +150,8 @@ class RouteMixin: | ||||
|                 subprotocols, | ||||
|                 unquote, | ||||
|                 static, | ||||
|                 version_prefix, | ||||
|                 error_format, | ||||
|             ) | ||||
|  | ||||
|             self._future_routes.add(route) | ||||
| @@ -154,13 +178,15 @@ class RouteMixin: | ||||
|             if apply: | ||||
|                 self._apply_route(route) | ||||
|  | ||||
|             if static: | ||||
|                 return route, handler | ||||
|             return handler | ||||
|  | ||||
|         return decorator | ||||
|  | ||||
|     def add_route( | ||||
|         self, | ||||
|         handler, | ||||
|         handler: RouteHandler, | ||||
|         uri: str, | ||||
|         methods: Iterable[str] = frozenset({"GET"}), | ||||
|         host: Optional[str] = None, | ||||
| @@ -168,7 +194,9 @@ class RouteMixin: | ||||
|         version: Optional[int] = None, | ||||
|         name: Optional[str] = None, | ||||
|         stream: bool = False, | ||||
|     ): | ||||
|         version_prefix: str = "/v", | ||||
|         error_format: Optional[str] = None, | ||||
|     ) -> RouteHandler: | ||||
|         """A helper method to register class instance or | ||||
|         functions as a handler to the application url | ||||
|         routes. | ||||
| @@ -182,6 +210,8 @@ class RouteMixin: | ||||
|         :param version: | ||||
|         :param name: user defined route name for url_for | ||||
|         :param stream: boolean specifying if the handler is a stream handler | ||||
|         :param version_prefix: URL path that should be before the version | ||||
|             value; default: ``/v`` | ||||
|         :return: function or class instance | ||||
|         """ | ||||
|         # Handle HTTPMethodView differently | ||||
| @@ -189,7 +219,8 @@ class RouteMixin: | ||||
|             methods = set() | ||||
|  | ||||
|             for method in HTTP_METHODS: | ||||
|                 _handler = getattr(handler.view_class, method.lower(), None) | ||||
|                 view_class = getattr(handler, "view_class") | ||||
|                 _handler = getattr(view_class, method.lower(), None) | ||||
|                 if _handler: | ||||
|                     methods.add(method) | ||||
|                     if hasattr(_handler, "is_stream"): | ||||
| @@ -214,6 +245,8 @@ class RouteMixin: | ||||
|             stream=stream, | ||||
|             version=version, | ||||
|             name=name, | ||||
|             version_prefix=version_prefix, | ||||
|             error_format=error_format, | ||||
|         )(handler) | ||||
|         return handler | ||||
|  | ||||
| @@ -226,7 +259,9 @@ class RouteMixin: | ||||
|         version: Optional[int] = None, | ||||
|         name: Optional[str] = None, | ||||
|         ignore_body: bool = True, | ||||
|     ): | ||||
|         version_prefix: str = "/v", | ||||
|         error_format: Optional[str] = None, | ||||
|     ) -> RouteWrapper: | ||||
|         """ | ||||
|         Add an API URL under the **GET** *HTTP* method | ||||
|  | ||||
| @@ -236,6 +271,8 @@ class RouteMixin: | ||||
|             URLs need to terminate with a */* | ||||
|         :param version: API Version | ||||
|         :param name: Unique name that can be used to identify the Route | ||||
|         :param version_prefix: URL path that should be before the version | ||||
|             value; default: ``/v`` | ||||
|         :return: Object decorated with :func:`route` method | ||||
|         """ | ||||
|         return self.route( | ||||
| @@ -246,6 +283,8 @@ class RouteMixin: | ||||
|             version=version, | ||||
|             name=name, | ||||
|             ignore_body=ignore_body, | ||||
|             version_prefix=version_prefix, | ||||
|             error_format=error_format, | ||||
|         ) | ||||
|  | ||||
|     def post( | ||||
| @@ -256,7 +295,9 @@ class RouteMixin: | ||||
|         stream: bool = False, | ||||
|         version: Optional[int] = None, | ||||
|         name: Optional[str] = None, | ||||
|     ): | ||||
|         version_prefix: str = "/v", | ||||
|         error_format: Optional[str] = None, | ||||
|     ) -> RouteWrapper: | ||||
|         """ | ||||
|         Add an API URL under the **POST** *HTTP* method | ||||
|  | ||||
| @@ -266,6 +307,8 @@ class RouteMixin: | ||||
|             URLs need to terminate with a */* | ||||
|         :param version: API Version | ||||
|         :param name: Unique name that can be used to identify the Route | ||||
|         :param version_prefix: URL path that should be before the version | ||||
|             value; default: ``/v`` | ||||
|         :return: Object decorated with :func:`route` method | ||||
|         """ | ||||
|         return self.route( | ||||
| @@ -276,6 +319,8 @@ class RouteMixin: | ||||
|             stream=stream, | ||||
|             version=version, | ||||
|             name=name, | ||||
|             version_prefix=version_prefix, | ||||
|             error_format=error_format, | ||||
|         ) | ||||
|  | ||||
|     def put( | ||||
| @@ -286,7 +331,9 @@ class RouteMixin: | ||||
|         stream: bool = False, | ||||
|         version: Optional[int] = None, | ||||
|         name: Optional[str] = None, | ||||
|     ): | ||||
|         version_prefix: str = "/v", | ||||
|         error_format: Optional[str] = None, | ||||
|     ) -> RouteWrapper: | ||||
|         """ | ||||
|         Add an API URL under the **PUT** *HTTP* method | ||||
|  | ||||
| @@ -296,6 +343,8 @@ class RouteMixin: | ||||
|             URLs need to terminate with a */* | ||||
|         :param version: API Version | ||||
|         :param name: Unique name that can be used to identify the Route | ||||
|         :param version_prefix: URL path that should be before the version | ||||
|             value; default: ``/v`` | ||||
|         :return: Object decorated with :func:`route` method | ||||
|         """ | ||||
|         return self.route( | ||||
| @@ -306,6 +355,8 @@ class RouteMixin: | ||||
|             stream=stream, | ||||
|             version=version, | ||||
|             name=name, | ||||
|             version_prefix=version_prefix, | ||||
|             error_format=error_format, | ||||
|         ) | ||||
|  | ||||
|     def head( | ||||
| @@ -316,7 +367,9 @@ class RouteMixin: | ||||
|         version: Optional[int] = None, | ||||
|         name: Optional[str] = None, | ||||
|         ignore_body: bool = True, | ||||
|     ): | ||||
|         version_prefix: str = "/v", | ||||
|         error_format: Optional[str] = None, | ||||
|     ) -> RouteWrapper: | ||||
|         """ | ||||
|         Add an API URL under the **HEAD** *HTTP* method | ||||
|  | ||||
| @@ -334,6 +387,8 @@ class RouteMixin: | ||||
|         :param ignore_body: whether the handler should ignore request | ||||
|             body (eg. GET requests), defaults to True | ||||
|         :type ignore_body: bool, optional | ||||
|         :param version_prefix: URL path that should be before the version | ||||
|             value; default: ``/v`` | ||||
|         :return: Object decorated with :func:`route` method | ||||
|         """ | ||||
|         return self.route( | ||||
| @@ -344,6 +399,8 @@ class RouteMixin: | ||||
|             version=version, | ||||
|             name=name, | ||||
|             ignore_body=ignore_body, | ||||
|             version_prefix=version_prefix, | ||||
|             error_format=error_format, | ||||
|         ) | ||||
|  | ||||
|     def options( | ||||
| @@ -354,7 +411,9 @@ class RouteMixin: | ||||
|         version: Optional[int] = None, | ||||
|         name: Optional[str] = None, | ||||
|         ignore_body: bool = True, | ||||
|     ): | ||||
|         version_prefix: str = "/v", | ||||
|         error_format: Optional[str] = None, | ||||
|     ) -> RouteWrapper: | ||||
|         """ | ||||
|         Add an API URL under the **OPTIONS** *HTTP* method | ||||
|  | ||||
| @@ -372,6 +431,8 @@ class RouteMixin: | ||||
|         :param ignore_body: whether the handler should ignore request | ||||
|             body (eg. GET requests), defaults to True | ||||
|         :type ignore_body: bool, optional | ||||
|         :param version_prefix: URL path that should be before the version | ||||
|             value; default: ``/v`` | ||||
|         :return: Object decorated with :func:`route` method | ||||
|         """ | ||||
|         return self.route( | ||||
| @@ -382,6 +443,8 @@ class RouteMixin: | ||||
|             version=version, | ||||
|             name=name, | ||||
|             ignore_body=ignore_body, | ||||
|             version_prefix=version_prefix, | ||||
|             error_format=error_format, | ||||
|         ) | ||||
|  | ||||
|     def patch( | ||||
| @@ -392,7 +455,9 @@ class RouteMixin: | ||||
|         stream=False, | ||||
|         version: Optional[int] = None, | ||||
|         name: Optional[str] = None, | ||||
|     ): | ||||
|         version_prefix: str = "/v", | ||||
|         error_format: Optional[str] = None, | ||||
|     ) -> RouteWrapper: | ||||
|         """ | ||||
|         Add an API URL under the **PATCH** *HTTP* method | ||||
|  | ||||
| @@ -412,6 +477,8 @@ class RouteMixin: | ||||
|         :param ignore_body: whether the handler should ignore request | ||||
|             body (eg. GET requests), defaults to True | ||||
|         :type ignore_body: bool, optional | ||||
|         :param version_prefix: URL path that should be before the version | ||||
|             value; default: ``/v`` | ||||
|         :return: Object decorated with :func:`route` method | ||||
|         """ | ||||
|         return self.route( | ||||
| @@ -422,6 +489,8 @@ class RouteMixin: | ||||
|             stream=stream, | ||||
|             version=version, | ||||
|             name=name, | ||||
|             version_prefix=version_prefix, | ||||
|             error_format=error_format, | ||||
|         ) | ||||
|  | ||||
|     def delete( | ||||
| @@ -432,7 +501,9 @@ class RouteMixin: | ||||
|         version: Optional[int] = None, | ||||
|         name: Optional[str] = None, | ||||
|         ignore_body: bool = True, | ||||
|     ): | ||||
|         version_prefix: str = "/v", | ||||
|         error_format: Optional[str] = None, | ||||
|     ) -> RouteWrapper: | ||||
|         """ | ||||
|         Add an API URL under the **DELETE** *HTTP* method | ||||
|  | ||||
| @@ -442,6 +513,8 @@ class RouteMixin: | ||||
|             URLs need to terminate with a */* | ||||
|         :param version: API Version | ||||
|         :param name: Unique name that can be used to identify the Route | ||||
|         :param version_prefix: URL path that should be before the version | ||||
|             value; default: ``/v`` | ||||
|         :return: Object decorated with :func:`route` method | ||||
|         """ | ||||
|         return self.route( | ||||
| @@ -452,6 +525,8 @@ class RouteMixin: | ||||
|             version=version, | ||||
|             name=name, | ||||
|             ignore_body=ignore_body, | ||||
|             version_prefix=version_prefix, | ||||
|             error_format=error_format, | ||||
|         ) | ||||
|  | ||||
|     def websocket( | ||||
| @@ -463,6 +538,8 @@ class RouteMixin: | ||||
|         version: Optional[int] = None, | ||||
|         name: Optional[str] = None, | ||||
|         apply: bool = True, | ||||
|         version_prefix: str = "/v", | ||||
|         error_format: Optional[str] = None, | ||||
|     ): | ||||
|         """ | ||||
|         Decorate a function to be registered as a websocket route | ||||
| @@ -474,6 +551,8 @@ class RouteMixin: | ||||
|         :param subprotocols: optional list of str with supported subprotocols | ||||
|         :param name: A unique name assigned to the URL so that it can | ||||
|                      be used with :func:`url_for` | ||||
|         :param version_prefix: URL path that should be before the version | ||||
|             value; default: ``/v`` | ||||
|         :return: tuple of routes, decorated function | ||||
|         """ | ||||
|         return self.route( | ||||
| @@ -486,6 +565,8 @@ class RouteMixin: | ||||
|             apply=apply, | ||||
|             subprotocols=subprotocols, | ||||
|             websocket=True, | ||||
|             version_prefix=version_prefix, | ||||
|             error_format=error_format, | ||||
|         ) | ||||
|  | ||||
|     def add_websocket_route( | ||||
| @@ -497,6 +578,8 @@ class RouteMixin: | ||||
|         subprotocols=None, | ||||
|         version: Optional[int] = None, | ||||
|         name: Optional[str] = None, | ||||
|         version_prefix: str = "/v", | ||||
|         error_format: Optional[str] = None, | ||||
|     ): | ||||
|         """ | ||||
|         A helper method to register a function as a websocket route. | ||||
| @@ -513,6 +596,8 @@ class RouteMixin: | ||||
|                 handshake | ||||
|         :param name: A unique name assigned to the URL so that it can | ||||
|                 be used with :func:`url_for` | ||||
|         :param version_prefix: URL path that should be before the version | ||||
|             value; default: ``/v`` | ||||
|         :return: Objected decorated by :func:`websocket` | ||||
|         """ | ||||
|         return self.websocket( | ||||
| @@ -522,6 +607,8 @@ class RouteMixin: | ||||
|             subprotocols=subprotocols, | ||||
|             version=version, | ||||
|             name=name, | ||||
|             version_prefix=version_prefix, | ||||
|             error_format=error_format, | ||||
|         )(handler) | ||||
|  | ||||
|     def static( | ||||
| @@ -537,6 +624,7 @@ class RouteMixin: | ||||
|         strict_slashes=None, | ||||
|         content_type=None, | ||||
|         apply=True, | ||||
|         resource_type=None, | ||||
|     ): | ||||
|         """ | ||||
|         Register a root to serve files from. The input can either be a | ||||
| @@ -586,6 +674,7 @@ class RouteMixin: | ||||
|             host, | ||||
|             strict_slashes, | ||||
|             content_type, | ||||
|             resource_type, | ||||
|         ) | ||||
|         self._future_statics.add(static) | ||||
|  | ||||
| @@ -665,7 +754,10 @@ class RouteMixin: | ||||
|                 modified_since = strftime( | ||||
|                     "%a, %d %b %Y %H:%M:%S GMT", gmtime(stats.st_mtime) | ||||
|                 ) | ||||
|                 if request.headers.get("If-Modified-Since") == modified_since: | ||||
|                 if ( | ||||
|                     request.headers.getone("if-modified-since", None) | ||||
|                     == modified_since | ||||
|                 ): | ||||
|                     return HTTPResponse(status=304) | ||||
|                 headers["Last-Modified"] = modified_since | ||||
|             _range = None | ||||
| @@ -718,16 +810,19 @@ class RouteMixin: | ||||
|                 return await file(file_path, headers=headers, _range=_range) | ||||
|         except ContentRangeError: | ||||
|             raise | ||||
|         except Exception: | ||||
|             error_logger.exception( | ||||
|                 f"File not found: path={file_or_directory}, " | ||||
|                 f"relative_url={__file_uri__}" | ||||
|             ) | ||||
|         except FileNotFoundError: | ||||
|             raise FileNotFound( | ||||
|                 "File not found", | ||||
|                 path=file_or_directory, | ||||
|                 relative_url=__file_uri__, | ||||
|             ) | ||||
|         except Exception: | ||||
|             error_logger.exception( | ||||
|                 f"Exception in static request handler: " | ||||
|                 f"path={file_or_directory}, " | ||||
|                 f"relative_url={__file_uri__}" | ||||
|             ) | ||||
|             raise | ||||
|  | ||||
|     def _register_static( | ||||
|         self, | ||||
| @@ -775,8 +870,27 @@ class RouteMixin: | ||||
|         name = static.name | ||||
|         # If we're not trying to match a file directly, | ||||
|         # serve from the folder | ||||
|         if not static.resource_type: | ||||
|             if not path.isfile(file_or_directory): | ||||
|                 uri += "/<__file_uri__:path>" | ||||
|         elif static.resource_type == "dir": | ||||
|             if path.isfile(file_or_directory): | ||||
|                 raise TypeError( | ||||
|                     "Resource type improperly identified as directory. " | ||||
|                     f"'{file_or_directory}'" | ||||
|                 ) | ||||
|             uri += "/<__file_uri__:path>" | ||||
|         elif static.resource_type == "file" and not path.isfile( | ||||
|             file_or_directory | ||||
|         ): | ||||
|             raise TypeError( | ||||
|                 "Resource type improperly identified as file. " | ||||
|                 f"'{file_or_directory}'" | ||||
|             ) | ||||
|         elif static.resource_type != "file": | ||||
|             raise ValueError( | ||||
|                 "The resource_type should be set to 'file' or 'dir'" | ||||
|             ) | ||||
|  | ||||
|         # special prefix for static files | ||||
|         # if not static.name.startswith("_static_"): | ||||
| @@ -793,7 +907,7 @@ class RouteMixin: | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|         route, _ = self.route( | ||||
|         route, _ = self.route(  # type: ignore | ||||
|             uri=uri, | ||||
|             methods=["GET", "HEAD"], | ||||
|             name=name, | ||||
| @@ -803,3 +917,43 @@ class RouteMixin: | ||||
|         )(_handler) | ||||
|  | ||||
|         return route | ||||
|  | ||||
|     def _determine_error_format(self, handler) -> str: | ||||
|         if not isinstance(handler, CompositionView): | ||||
|             try: | ||||
|                 src = dedent(getsource(handler)) | ||||
|                 tree = parse(src) | ||||
|                 http_response_types = self._get_response_types(tree) | ||||
|  | ||||
|                 if len(http_response_types) == 1: | ||||
|                     return next(iter(http_response_types)) | ||||
|             except (OSError, TypeError): | ||||
|                 ... | ||||
|  | ||||
|         return "auto" | ||||
|  | ||||
|     def _get_response_types(self, node): | ||||
|         types = set() | ||||
|  | ||||
|         class HttpResponseVisitor(NodeVisitor): | ||||
|             def visit_Return(self, node: Return) -> Any: | ||||
|                 nonlocal types | ||||
|  | ||||
|                 try: | ||||
|                     checks = [node.value.func.id]  # type: ignore | ||||
|                     if node.value.keywords:  # type: ignore | ||||
|                         checks += [ | ||||
|                             k.value | ||||
|                             for k in node.value.keywords  # type: ignore | ||||
|                             if k.arg == "content_type" | ||||
|                         ] | ||||
|  | ||||
|                     for check in checks: | ||||
|                         if check in RESPONSE_MAPPING: | ||||
|                             types.add(RESPONSE_MAPPING[check]) | ||||
|                 except AttributeError: | ||||
|                     ... | ||||
|  | ||||
|         HttpResponseVisitor().visit(node) | ||||
|  | ||||
|         return types | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| from typing import Any, Callable, Dict, Set | ||||
| from typing import Any, Callable, Dict, Optional, Set | ||||
|  | ||||
| from sanic.models.futures import FutureSignal | ||||
| from sanic.models.handler_types import SignalHandler | ||||
| @@ -23,7 +23,7 @@ class SignalMixin: | ||||
|         *, | ||||
|         apply: bool = True, | ||||
|         condition: Dict[str, Any] = None, | ||||
|     ) -> Callable[[SignalHandler], FutureSignal]: | ||||
|     ) -> Callable[[SignalHandler], SignalHandler]: | ||||
|         """ | ||||
|         For creating a signal handler, used similar to a route handler: | ||||
|  | ||||
| @@ -54,16 +54,22 @@ class SignalMixin: | ||||
|             if apply: | ||||
|                 self._apply_signal(future_signal) | ||||
|  | ||||
|             return future_signal | ||||
|             return handler | ||||
|  | ||||
|         return decorator | ||||
|  | ||||
|     def add_signal( | ||||
|         self, | ||||
|         handler, | ||||
|         handler: Optional[Callable[..., Any]], | ||||
|         event: str, | ||||
|         condition: Dict[str, Any] = None, | ||||
|     ): | ||||
|         if not handler: | ||||
|  | ||||
|             async def noop(): | ||||
|                 ... | ||||
|  | ||||
|             handler = noop | ||||
|         self.signal(event=event, condition=condition)(handler) | ||||
|         return handler | ||||
|  | ||||
|   | ||||
| @@ -3,7 +3,7 @@ import asyncio | ||||
| from typing import Any, Awaitable, Callable, MutableMapping, Optional, Union | ||||
|  | ||||
| from sanic.exceptions import InvalidUsage | ||||
| from sanic.websocket import WebSocketConnection | ||||
| from sanic.server.websockets.connection import WebSocketConnection | ||||
|  | ||||
|  | ||||
| ASGIScope = MutableMapping[str, Any] | ||||
|   | ||||
| @@ -23,6 +23,8 @@ class FutureRoute(NamedTuple): | ||||
|     subprotocols: Optional[List[str]] | ||||
|     unquote: bool | ||||
|     static: bool | ||||
|     version_prefix: str | ||||
|     error_format: Optional[str] | ||||
|  | ||||
|  | ||||
| class FutureListener(NamedTuple): | ||||
| @@ -51,6 +53,7 @@ class FutureStatic(NamedTuple): | ||||
|     host: Optional[str] | ||||
|     strict_slashes: Optional[bool] | ||||
|     content_type: Optional[bool] | ||||
|     resource_type: Optional[str] | ||||
|  | ||||
|  | ||||
| class FutureSignal(NamedTuple): | ||||
|   | ||||
| @@ -21,5 +21,5 @@ MiddlewareType = Union[RequestMiddlewareType, ResponseMiddlewareType] | ||||
| ListenerType = Callable[ | ||||
|     [Sanic, AbstractEventLoop], Optional[Coroutine[Any, Any, None]] | ||||
| ] | ||||
| RouteHandler = Callable[..., Coroutine[Any, Any, HTTPResponse]] | ||||
| RouteHandler = Callable[..., Coroutine[Any, Any, Optional[HTTPResponse]]] | ||||
| SignalHandler = Callable[..., Coroutine[Any, Any, None]] | ||||
|   | ||||
							
								
								
									
										52
									
								
								sanic/models/server_types.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										52
									
								
								sanic/models/server_types.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,52 @@ | ||||
| from types import SimpleNamespace | ||||
|  | ||||
| from sanic.models.protocol_types import TransportProtocol | ||||
|  | ||||
|  | ||||
| class Signal: | ||||
|     stopped = False | ||||
|  | ||||
|  | ||||
| class ConnInfo: | ||||
|     """ | ||||
|     Local and remote addresses and SSL status info. | ||||
|     """ | ||||
|  | ||||
|     __slots__ = ( | ||||
|         "client_port", | ||||
|         "client", | ||||
|         "client_ip", | ||||
|         "ctx", | ||||
|         "peername", | ||||
|         "server_port", | ||||
|         "server", | ||||
|         "sockname", | ||||
|         "ssl", | ||||
|     ) | ||||
|  | ||||
|     def __init__(self, transport: TransportProtocol, unix=None): | ||||
|         self.ctx = SimpleNamespace() | ||||
|         self.peername = None | ||||
|         self.server = self.client = "" | ||||
|         self.server_port = self.client_port = 0 | ||||
|         self.client_ip = "" | ||||
|         self.sockname = addr = transport.get_extra_info("sockname") | ||||
|         self.ssl: bool = bool(transport.get_extra_info("sslcontext")) | ||||
|  | ||||
|         if isinstance(addr, str):  # UNIX socket | ||||
|             self.server = unix or addr | ||||
|             return | ||||
|  | ||||
|         # IPv4 (ip, port) or IPv6 (ip, port, flowinfo, scopeid) | ||||
|         if isinstance(addr, tuple): | ||||
|             self.server = addr[0] if len(addr) == 2 else f"[{addr[0]}]" | ||||
|             self.server_port = addr[1] | ||||
|             # self.server gets non-standard port appended | ||||
|             if addr[1] != (443 if self.ssl else 80): | ||||
|                 self.server = f"{self.server}:{addr[1]}" | ||||
|         self.peername = addr = transport.get_extra_info("peername") | ||||
|  | ||||
|         if isinstance(addr, tuple): | ||||
|             self.client = addr[0] if len(addr) == 2 else f"[{addr[0]}]" | ||||
|             self.client_ip = addr[0] | ||||
|             self.client_port = addr[1] | ||||
| @@ -1,3 +1,4 @@ | ||||
| import itertools | ||||
| import os | ||||
| import signal | ||||
| import subprocess | ||||
| @@ -5,6 +6,9 @@ import sys | ||||
|  | ||||
| from time import sleep | ||||
|  | ||||
| from sanic.config import BASE_LOGO | ||||
| from sanic.log import logger | ||||
|  | ||||
|  | ||||
| def _iter_module_files(): | ||||
|     """This iterates over all relevant Python files. | ||||
| @@ -56,7 +60,21 @@ def restart_with_reloader(): | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def watchdog(sleep_interval): | ||||
| def _check_file(filename, mtimes): | ||||
|     need_reload = False | ||||
|  | ||||
|     mtime = os.stat(filename).st_mtime | ||||
|     old_time = mtimes.get(filename) | ||||
|     if old_time is None: | ||||
|         mtimes[filename] = mtime | ||||
|     elif mtime > old_time: | ||||
|         mtimes[filename] = mtime | ||||
|         need_reload = True | ||||
|  | ||||
|     return need_reload | ||||
|  | ||||
|  | ||||
| def watchdog(sleep_interval, app): | ||||
|     """Watch project files, restart worker process if a change happened. | ||||
|  | ||||
|     :param sleep_interval: interval in second. | ||||
| @@ -73,21 +91,25 @@ def watchdog(sleep_interval): | ||||
|  | ||||
|     worker_process = restart_with_reloader() | ||||
|  | ||||
|     if app.config.LOGO: | ||||
|         logger.debug( | ||||
|             app.config.LOGO if isinstance(app.config.LOGO, str) else BASE_LOGO | ||||
|         ) | ||||
|  | ||||
|     try: | ||||
|         while True: | ||||
|             need_reload = False | ||||
|  | ||||
|             for filename in _iter_module_files(): | ||||
|             for filename in itertools.chain( | ||||
|                 _iter_module_files(), | ||||
|                 *(d.glob("**/*") for d in app.reload_dirs), | ||||
|             ): | ||||
|                 try: | ||||
|                     mtime = os.stat(filename).st_mtime | ||||
|                     check = _check_file(filename, mtimes) | ||||
|                 except OSError: | ||||
|                     continue | ||||
|  | ||||
|                 old_time = mtimes.get(filename) | ||||
|                 if old_time is None: | ||||
|                     mtimes[filename] = mtime | ||||
|                 elif mtime > old_time: | ||||
|                     mtimes[filename] = mtime | ||||
|                 if check: | ||||
|                     need_reload = True | ||||
|  | ||||
|             if need_reload: | ||||
|   | ||||
| @@ -34,7 +34,9 @@ from sanic.compat import CancelledErrors, Header | ||||
| from sanic.constants import DEFAULT_HTTP_CONTENT_TYPE | ||||
| from sanic.exceptions import InvalidUsage | ||||
| from sanic.headers import ( | ||||
|     AcceptContainer, | ||||
|     Options, | ||||
|     parse_accept, | ||||
|     parse_content_header, | ||||
|     parse_forwarded, | ||||
|     parse_host, | ||||
| @@ -94,6 +96,7 @@ class Request: | ||||
|         "head", | ||||
|         "headers", | ||||
|         "method", | ||||
|         "parsed_accept", | ||||
|         "parsed_args", | ||||
|         "parsed_not_grouped_args", | ||||
|         "parsed_files", | ||||
| @@ -125,7 +128,7 @@ class Request: | ||||
|         self._name: Optional[str] = None | ||||
|         self.app = app | ||||
|  | ||||
|         self.headers = headers | ||||
|         self.headers = Header(headers) | ||||
|         self.version = version | ||||
|         self.method = method | ||||
|         self.transport = transport | ||||
| @@ -136,6 +139,7 @@ class Request: | ||||
|         self.conn_info: Optional[ConnInfo] = None | ||||
|         self.ctx = SimpleNamespace() | ||||
|         self.parsed_forwarded: Optional[Options] = None | ||||
|         self.parsed_accept: Optional[AcceptContainer] = None | ||||
|         self.parsed_json = None | ||||
|         self.parsed_form = None | ||||
|         self.parsed_files = None | ||||
| @@ -262,7 +266,7 @@ class Request: | ||||
|             app = Sanic("MyApp", request_class=IntRequest) | ||||
|         """ | ||||
|         if not self._id: | ||||
|             self._id = self.headers.get( | ||||
|             self._id = self.headers.getone( | ||||
|                 self.app.config.REQUEST_ID_HEADER, | ||||
|                 self.__class__.generate_id(self),  # type: ignore | ||||
|             ) | ||||
| @@ -296,6 +300,13 @@ class Request: | ||||
|  | ||||
|         return self.parsed_json | ||||
|  | ||||
|     @property | ||||
|     def accept(self) -> AcceptContainer: | ||||
|         if self.parsed_accept is None: | ||||
|             accept_header = self.headers.getone("accept", "") | ||||
|             self.parsed_accept = parse_accept(accept_header) | ||||
|         return self.parsed_accept | ||||
|  | ||||
|     @property | ||||
|     def token(self): | ||||
|         """Attempt to return the auth header token. | ||||
| @@ -303,7 +314,7 @@ class Request: | ||||
|         :return: token related to request | ||||
|         """ | ||||
|         prefixes = ("Bearer", "Token") | ||||
|         auth_header = self.headers.get("Authorization") | ||||
|         auth_header = self.headers.getone("authorization", None) | ||||
|  | ||||
|         if auth_header is not None: | ||||
|             for prefix in prefixes: | ||||
| @@ -317,8 +328,8 @@ class Request: | ||||
|         if self.parsed_form is None: | ||||
|             self.parsed_form = RequestParameters() | ||||
|             self.parsed_files = RequestParameters() | ||||
|             content_type = self.headers.get( | ||||
|                 "Content-Type", DEFAULT_HTTP_CONTENT_TYPE | ||||
|             content_type = self.headers.getone( | ||||
|                 "content-type", DEFAULT_HTTP_CONTENT_TYPE | ||||
|             ) | ||||
|             content_type, parameters = parse_content_header(content_type) | ||||
|             try: | ||||
| @@ -378,9 +389,12 @@ class Request: | ||||
|         :type errors: str | ||||
|         :return: RequestParameters | ||||
|         """ | ||||
|         if not self.parsed_args[ | ||||
|             (keep_blank_values, strict_parsing, encoding, errors) | ||||
|         ]: | ||||
|         if ( | ||||
|             keep_blank_values, | ||||
|             strict_parsing, | ||||
|             encoding, | ||||
|             errors, | ||||
|         ) not in self.parsed_args: | ||||
|             if self.query_string: | ||||
|                 self.parsed_args[ | ||||
|                     (keep_blank_values, strict_parsing, encoding, errors) | ||||
| @@ -434,9 +448,12 @@ class Request: | ||||
|         :type errors: str | ||||
|         :return: list | ||||
|         """ | ||||
|         if not self.parsed_not_grouped_args[ | ||||
|             (keep_blank_values, strict_parsing, encoding, errors) | ||||
|         ]: | ||||
|         if ( | ||||
|             keep_blank_values, | ||||
|             strict_parsing, | ||||
|             encoding, | ||||
|             errors, | ||||
|         ) not in self.parsed_not_grouped_args: | ||||
|             if self.query_string: | ||||
|                 self.parsed_not_grouped_args[ | ||||
|                     (keep_blank_values, strict_parsing, encoding, errors) | ||||
| @@ -465,7 +482,7 @@ class Request: | ||||
|         """ | ||||
|  | ||||
|         if self._cookies is None: | ||||
|             cookie = self.headers.get("Cookie") | ||||
|             cookie = self.headers.getone("cookie", None) | ||||
|             if cookie is not None: | ||||
|                 cookies: SimpleCookie = SimpleCookie() | ||||
|                 cookies.load(cookie) | ||||
| @@ -482,7 +499,7 @@ class Request: | ||||
|         :return: Content-Type header form the request | ||||
|         :rtype: str | ||||
|         """ | ||||
|         return self.headers.get("Content-Type", DEFAULT_HTTP_CONTENT_TYPE) | ||||
|         return self.headers.getone("content-type", DEFAULT_HTTP_CONTENT_TYPE) | ||||
|  | ||||
|     @property | ||||
|     def match_info(self): | ||||
| @@ -491,6 +508,10 @@ class Request: | ||||
|         """ | ||||
|         return self._match_info | ||||
|  | ||||
|     @match_info.setter | ||||
|     def match_info(self, value): | ||||
|         self._match_info = value | ||||
|  | ||||
|     # Transport properties (obtained from local interface only) | ||||
|  | ||||
|     @property | ||||
| @@ -499,7 +520,7 @@ class Request: | ||||
|         :return: peer ip of the socket | ||||
|         :rtype: str | ||||
|         """ | ||||
|         return self.conn_info.client if self.conn_info else "" | ||||
|         return self.conn_info.client_ip if self.conn_info else "" | ||||
|  | ||||
|     @property | ||||
|     def port(self) -> int: | ||||
| @@ -581,7 +602,7 @@ class Request: | ||||
|  | ||||
|         if ( | ||||
|             self.app.websocket_enabled | ||||
|             and self.headers.get("upgrade") == "websocket" | ||||
|             and self.headers.getone("upgrade", "").lower() == "websocket" | ||||
|         ): | ||||
|             scheme = "ws" | ||||
|         else: | ||||
| @@ -608,7 +629,9 @@ class Request: | ||||
|         server_name = self.app.config.get("SERVER_NAME") | ||||
|         if server_name: | ||||
|             return server_name.split("//", 1)[-1].split("/", 1)[0] | ||||
|         return str(self.forwarded.get("host") or self.headers.get("host", "")) | ||||
|         return str( | ||||
|             self.forwarded.get("host") or self.headers.getone("host", "") | ||||
|         ) | ||||
|  | ||||
|     @property | ||||
|     def server_name(self) -> str: | ||||
|   | ||||
| @@ -143,7 +143,7 @@ class StreamingHTTPResponse(BaseHTTPResponse): | ||||
|  | ||||
|     .. warning:: | ||||
|  | ||||
|         **Deprecated** and set for removal in v21.6. You can now achieve the | ||||
|         **Deprecated** and set for removal in v21.12. You can now achieve the | ||||
|         same functionality without a callback. | ||||
|  | ||||
|         .. code-block:: python | ||||
| @@ -174,12 +174,16 @@ class StreamingHTTPResponse(BaseHTTPResponse): | ||||
|         status: int = 200, | ||||
|         headers: Optional[Union[Header, Dict[str, str]]] = None, | ||||
|         content_type: str = "text/plain; charset=utf-8", | ||||
|         chunked="deprecated", | ||||
|         ignore_deprecation_notice: bool = False, | ||||
|     ): | ||||
|         if chunked != "deprecated": | ||||
|         if not ignore_deprecation_notice: | ||||
|             warn( | ||||
|                 "The chunked argument has been deprecated and will be " | ||||
|                 "removed in v21.6" | ||||
|                 "Use of the StreamingHTTPResponse is deprecated in v21.6, and " | ||||
|                 "will be removed in v21.12. Please upgrade your streaming " | ||||
|                 "response implementation. You can learn more here: " | ||||
|                 "https://sanicframework.org/en/guide/advanced/streaming.html" | ||||
|                 "#response-streaming. If you use the builtin stream() or " | ||||
|                 "file_stream() methods, this upgrade will be be done for you." | ||||
|             ) | ||||
|  | ||||
|         super().__init__() | ||||
| @@ -203,6 +207,9 @@ class StreamingHTTPResponse(BaseHTTPResponse): | ||||
|             self.streaming_fn = None | ||||
|         await super().send(*args, **kwargs) | ||||
|  | ||||
|     async def eof(self): | ||||
|         raise NotImplementedError | ||||
|  | ||||
|  | ||||
| class HTTPResponse(BaseHTTPResponse): | ||||
|     """ | ||||
| @@ -235,6 +242,15 @@ class HTTPResponse(BaseHTTPResponse): | ||||
|         self.headers = Header(headers or {}) | ||||
|         self._cookies = None | ||||
|  | ||||
|     async def eof(self): | ||||
|         await self.send("", True) | ||||
|  | ||||
|     async def __aenter__(self): | ||||
|         return self.send | ||||
|  | ||||
|     async def __aexit__(self, *_): | ||||
|         await self.eof() | ||||
|  | ||||
|  | ||||
| def empty( | ||||
|     status=204, headers: Optional[Dict[str, str]] = None | ||||
| @@ -396,7 +412,6 @@ async def file_stream( | ||||
|     mime_type: Optional[str] = None, | ||||
|     headers: Optional[Dict[str, str]] = None, | ||||
|     filename: Optional[str] = None, | ||||
|     chunked="deprecated", | ||||
|     _range: Optional[Range] = None, | ||||
| ) -> StreamingHTTPResponse: | ||||
|     """Return a streaming response object with file data. | ||||
| @@ -409,12 +424,6 @@ async def file_stream( | ||||
|     :param chunked: Deprecated | ||||
|     :param _range: | ||||
|     """ | ||||
|     if chunked != "deprecated": | ||||
|         warn( | ||||
|             "The chunked argument has been deprecated and will be " | ||||
|             "removed in v21.6" | ||||
|         ) | ||||
|  | ||||
|     headers = headers or {} | ||||
|     if filename: | ||||
|         headers.setdefault( | ||||
| @@ -453,6 +462,7 @@ async def file_stream( | ||||
|         status=status, | ||||
|         headers=headers, | ||||
|         content_type=mime_type, | ||||
|         ignore_deprecation_notice=True, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @@ -461,7 +471,6 @@ def stream( | ||||
|     status: int = 200, | ||||
|     headers: Optional[Dict[str, str]] = None, | ||||
|     content_type: str = "text/plain; charset=utf-8", | ||||
|     chunked="deprecated", | ||||
| ): | ||||
|     """Accepts an coroutine `streaming_fn` which can be used to | ||||
|     write chunks to a streaming response. Returns a `StreamingHTTPResponse`. | ||||
| @@ -482,17 +491,12 @@ def stream( | ||||
|     :param headers: Custom Headers. | ||||
|     :param chunked: Deprecated | ||||
|     """ | ||||
|     if chunked != "deprecated": | ||||
|         warn( | ||||
|             "The chunked argument has been deprecated and will be " | ||||
|             "removed in v21.6" | ||||
|         ) | ||||
|  | ||||
|     return StreamingHTTPResponse( | ||||
|         streaming_fn, | ||||
|         headers=headers, | ||||
|         content_type=content_type, | ||||
|         status=status, | ||||
|         ignore_deprecation_notice=True, | ||||
|     ) | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -1,5 +1,9 @@ | ||||
| from __future__ import annotations | ||||
|  | ||||
| from functools import lru_cache | ||||
| from inspect import signature | ||||
| from typing import Any, Dict, Iterable, List, Optional, Tuple, Union | ||||
| from uuid import UUID | ||||
|  | ||||
| from sanic_routing import BaseRouter  # type: ignore | ||||
| from sanic_routing.exceptions import NoMethod  # type: ignore | ||||
| @@ -9,6 +13,7 @@ from sanic_routing.exceptions import ( | ||||
| from sanic_routing.route import Route  # type: ignore | ||||
|  | ||||
| from sanic.constants import HTTP_METHODS | ||||
| from sanic.errorpages import check_error_format | ||||
| from sanic.exceptions import MethodNotSupported, NotFound, SanicException | ||||
| from sanic.models.handler_types import RouteHandler | ||||
|  | ||||
| @@ -73,6 +78,8 @@ class Router(BaseRouter): | ||||
|         name: Optional[str] = None, | ||||
|         unquote: bool = False, | ||||
|         static: bool = False, | ||||
|         version_prefix: str = "/v", | ||||
|         error_format: Optional[str] = None, | ||||
|     ) -> Union[Route, List[Route]]: | ||||
|         """ | ||||
|         Add a handler to the router | ||||
| @@ -103,12 +110,14 @@ class Router(BaseRouter): | ||||
|         """ | ||||
|         if version is not None: | ||||
|             version = str(version).strip("/").lstrip("v") | ||||
|             uri = "/".join([f"/v{version}", uri.lstrip("/")]) | ||||
|             uri = "/".join([f"{version_prefix}{version}", uri.lstrip("/")]) | ||||
|  | ||||
|         uri = self._normalize(uri, handler) | ||||
|  | ||||
|         params = dict( | ||||
|             path=uri, | ||||
|             handler=handler, | ||||
|             methods=methods, | ||||
|             methods=frozenset(map(str, methods)) if methods else None, | ||||
|             name=name, | ||||
|             strict=strict_slashes, | ||||
|             unquote=unquote, | ||||
| @@ -130,6 +139,11 @@ class Router(BaseRouter): | ||||
|             route.ctx.stream = stream | ||||
|             route.ctx.hosts = hosts | ||||
|             route.ctx.static = static | ||||
|             route.ctx.error_format = ( | ||||
|                 error_format or self.ctx.app.config.FALLBACK_ERROR_FORMAT | ||||
|             ) | ||||
|  | ||||
|             check_error_format(route.ctx.error_format) | ||||
|  | ||||
|             routes.append(route) | ||||
|  | ||||
| @@ -186,3 +200,24 @@ class Router(BaseRouter): | ||||
|                 raise SanicException( | ||||
|                     f"Invalid route: {route}. Parameter names cannot use '__'." | ||||
|                 ) | ||||
|  | ||||
|     def _normalize(self, uri: str, handler: RouteHandler) -> str: | ||||
|         if "<" not in uri: | ||||
|             return uri | ||||
|  | ||||
|         sig = signature(handler) | ||||
|         mapping = { | ||||
|             param.name: param.annotation.__name__.lower() | ||||
|             for param in sig.parameters.values() | ||||
|             if param.annotation in (str, int, float, UUID) | ||||
|         } | ||||
|  | ||||
|         reconstruction = [] | ||||
|         for part in uri.split("/"): | ||||
|             if part.startswith("<") and ":" not in part: | ||||
|                 name = part[1:-1] | ||||
|                 annotation = mapping.get(name) | ||||
|                 if annotation: | ||||
|                     part = f"<{name}:{annotation}>" | ||||
|             reconstruction.append(part) | ||||
|         return "/".join(reconstruction) | ||||
|   | ||||
							
								
								
									
										794
									
								
								sanic/server.py
									
									
									
									
									
								
							
							
						
						
									
										794
									
								
								sanic/server.py
									
									
									
									
									
								
							| @@ -1,794 +0,0 @@ | ||||
| from __future__ import annotations | ||||
|  | ||||
| from ssl import SSLContext | ||||
| from types import SimpleNamespace | ||||
| from typing import ( | ||||
|     TYPE_CHECKING, | ||||
|     Any, | ||||
|     Callable, | ||||
|     Dict, | ||||
|     Iterable, | ||||
|     Optional, | ||||
|     Type, | ||||
|     Union, | ||||
| ) | ||||
|  | ||||
| from sanic.models.handler_types import ListenerType | ||||
|  | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from sanic.app import Sanic | ||||
|  | ||||
| import asyncio | ||||
| import multiprocessing | ||||
| import os | ||||
| import secrets | ||||
| import socket | ||||
| import stat | ||||
|  | ||||
| from asyncio import CancelledError | ||||
| from asyncio.transports import Transport | ||||
| from functools import partial | ||||
| from inspect import isawaitable | ||||
| from ipaddress import ip_address | ||||
| from signal import SIG_IGN, SIGINT, SIGTERM, Signals | ||||
| from signal import signal as signal_func | ||||
| from time import monotonic as current_time | ||||
|  | ||||
| from sanic.compat import OS_IS_WINDOWS, ctrlc_workaround_for_windows | ||||
| from sanic.config import Config | ||||
| from sanic.exceptions import RequestTimeout, ServiceUnavailable | ||||
| from sanic.http import Http, Stage | ||||
| from sanic.log import logger | ||||
| from sanic.models.protocol_types import TransportProtocol | ||||
| from sanic.request import Request | ||||
|  | ||||
|  | ||||
| try: | ||||
|     import uvloop  # type: ignore | ||||
|  | ||||
|     if not isinstance(asyncio.get_event_loop_policy(), uvloop.EventLoopPolicy): | ||||
|         asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) | ||||
| except ImportError: | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class Signal: | ||||
|     stopped = False | ||||
|  | ||||
|  | ||||
| class ConnInfo: | ||||
|     """ | ||||
|     Local and remote addresses and SSL status info. | ||||
|     """ | ||||
|  | ||||
|     __slots__ = ( | ||||
|         "client_port", | ||||
|         "client", | ||||
|         "ctx", | ||||
|         "peername", | ||||
|         "server_port", | ||||
|         "server", | ||||
|         "sockname", | ||||
|         "ssl", | ||||
|     ) | ||||
|  | ||||
|     def __init__(self, transport: TransportProtocol, unix=None): | ||||
|         self.ctx = SimpleNamespace() | ||||
|         self.peername = None | ||||
|         self.server = self.client = "" | ||||
|         self.server_port = self.client_port = 0 | ||||
|         self.sockname = addr = transport.get_extra_info("sockname") | ||||
|         self.ssl: bool = bool(transport.get_extra_info("sslcontext")) | ||||
|  | ||||
|         if isinstance(addr, str):  # UNIX socket | ||||
|             self.server = unix or addr | ||||
|             return | ||||
|  | ||||
|         # IPv4 (ip, port) or IPv6 (ip, port, flowinfo, scopeid) | ||||
|         if isinstance(addr, tuple): | ||||
|             self.server = addr[0] if len(addr) == 2 else f"[{addr[0]}]" | ||||
|             self.server_port = addr[1] | ||||
|             # self.server gets non-standard port appended | ||||
|             if addr[1] != (443 if self.ssl else 80): | ||||
|                 self.server = f"{self.server}:{addr[1]}" | ||||
|         self.peername = addr = transport.get_extra_info("peername") | ||||
|  | ||||
|         if isinstance(addr, tuple): | ||||
|             self.client = addr[0] if len(addr) == 2 else f"[{addr[0]}]" | ||||
|             self.client_port = addr[1] | ||||
|  | ||||
|  | ||||
| class HttpProtocol(asyncio.Protocol): | ||||
|     """ | ||||
|     This class provides a basic HTTP implementation of the sanic framework. | ||||
|     """ | ||||
|  | ||||
|     __slots__ = ( | ||||
|         # app | ||||
|         "app", | ||||
|         # event loop, connection | ||||
|         "loop", | ||||
|         "transport", | ||||
|         "connections", | ||||
|         "signal", | ||||
|         "conn_info", | ||||
|         "ctx", | ||||
|         # request params | ||||
|         "request", | ||||
|         # request config | ||||
|         "request_handler", | ||||
|         "request_timeout", | ||||
|         "response_timeout", | ||||
|         "keep_alive_timeout", | ||||
|         "request_max_size", | ||||
|         "request_buffer_queue_size", | ||||
|         "request_class", | ||||
|         "error_handler", | ||||
|         # enable or disable access log purpose | ||||
|         "access_log", | ||||
|         # connection management | ||||
|         "state", | ||||
|         "url", | ||||
|         "_handler_task", | ||||
|         "_can_write", | ||||
|         "_data_received", | ||||
|         "_time", | ||||
|         "_task", | ||||
|         "_http", | ||||
|         "_exception", | ||||
|         "recv_buffer", | ||||
|         "_unix", | ||||
|     ) | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         *, | ||||
|         loop, | ||||
|         app: Sanic, | ||||
|         signal=None, | ||||
|         connections=None, | ||||
|         state=None, | ||||
|         unix=None, | ||||
|         **kwargs, | ||||
|     ): | ||||
|         asyncio.set_event_loop(loop) | ||||
|         self.loop = loop | ||||
|         self.app: Sanic = app | ||||
|         self.url = None | ||||
|         self.transport: Optional[Transport] = None | ||||
|         self.conn_info: Optional[ConnInfo] = None | ||||
|         self.request: Optional[Request] = None | ||||
|         self.signal = signal or Signal() | ||||
|         self.access_log = self.app.config.ACCESS_LOG | ||||
|         self.connections = connections if connections is not None else set() | ||||
|         self.request_handler = self.app.handle_request | ||||
|         self.error_handler = self.app.error_handler | ||||
|         self.request_timeout = self.app.config.REQUEST_TIMEOUT | ||||
|         self.request_buffer_queue_size = ( | ||||
|             self.app.config.REQUEST_BUFFER_QUEUE_SIZE | ||||
|         ) | ||||
|         self.response_timeout = self.app.config.RESPONSE_TIMEOUT | ||||
|         self.keep_alive_timeout = self.app.config.KEEP_ALIVE_TIMEOUT | ||||
|         self.request_max_size = self.app.config.REQUEST_MAX_SIZE | ||||
|         self.request_class = self.app.request_class or Request | ||||
|         self.state = state if state else {} | ||||
|         if "requests_count" not in self.state: | ||||
|             self.state["requests_count"] = 0 | ||||
|         self._data_received = asyncio.Event() | ||||
|         self._can_write = asyncio.Event() | ||||
|         self._can_write.set() | ||||
|         self._exception = None | ||||
|         self._unix = unix | ||||
|  | ||||
|     def _setup_connection(self): | ||||
|         self._http = Http(self) | ||||
|         self._time = current_time() | ||||
|         self.check_timeouts() | ||||
|  | ||||
|     async def connection_task(self): | ||||
|         """ | ||||
|         Run a HTTP connection. | ||||
|  | ||||
|         Timeouts and some additional error handling occur here, while most of | ||||
|         everything else happens in class Http or in code called from there. | ||||
|         """ | ||||
|         try: | ||||
|             self._setup_connection() | ||||
|             await self._http.http1() | ||||
|         except CancelledError: | ||||
|             pass | ||||
|         except Exception: | ||||
|             logger.exception("protocol.connection_task uncaught") | ||||
|         finally: | ||||
|             if self.app.debug and self._http: | ||||
|                 ip = self.transport.get_extra_info("peername") | ||||
|                 logger.error( | ||||
|                     "Connection lost before response written" | ||||
|                     f" @ {ip} {self._http.request}" | ||||
|                 ) | ||||
|             self._http = None | ||||
|             self._task = None | ||||
|             try: | ||||
|                 self.close() | ||||
|             except BaseException: | ||||
|                 logger.exception("Closing failed") | ||||
|  | ||||
|     async def receive_more(self): | ||||
|         """ | ||||
|         Wait until more data is received into the Server protocol's buffer | ||||
|         """ | ||||
|         self.transport.resume_reading() | ||||
|         self._data_received.clear() | ||||
|         await self._data_received.wait() | ||||
|  | ||||
|     def check_timeouts(self): | ||||
|         """ | ||||
|         Runs itself periodically to enforce any expired timeouts. | ||||
|         """ | ||||
|         try: | ||||
|             if not self._task: | ||||
|                 return | ||||
|             duration = current_time() - self._time | ||||
|             stage = self._http.stage | ||||
|             if stage is Stage.IDLE and duration > self.keep_alive_timeout: | ||||
|                 logger.debug("KeepAlive Timeout. Closing connection.") | ||||
|             elif stage is Stage.REQUEST and duration > self.request_timeout: | ||||
|                 logger.debug("Request Timeout. Closing connection.") | ||||
|                 self._http.exception = RequestTimeout("Request Timeout") | ||||
|             elif stage is Stage.HANDLER and self._http.upgrade_websocket: | ||||
|                 logger.debug("Handling websocket. Timeouts disabled.") | ||||
|                 return | ||||
|             elif ( | ||||
|                 stage in (Stage.HANDLER, Stage.RESPONSE, Stage.FAILED) | ||||
|                 and duration > self.response_timeout | ||||
|             ): | ||||
|                 logger.debug("Response Timeout. Closing connection.") | ||||
|                 self._http.exception = ServiceUnavailable("Response Timeout") | ||||
|             else: | ||||
|                 interval = ( | ||||
|                     min( | ||||
|                         self.keep_alive_timeout, | ||||
|                         self.request_timeout, | ||||
|                         self.response_timeout, | ||||
|                     ) | ||||
|                     / 2 | ||||
|                 ) | ||||
|                 self.loop.call_later(max(0.1, interval), self.check_timeouts) | ||||
|                 return | ||||
|             self._task.cancel() | ||||
|         except Exception: | ||||
|             logger.exception("protocol.check_timeouts") | ||||
|  | ||||
|     async def send(self, data): | ||||
|         """ | ||||
|         Writes data with backpressure control. | ||||
|         """ | ||||
|         await self._can_write.wait() | ||||
|         if self.transport.is_closing(): | ||||
|             raise CancelledError | ||||
|         self.transport.write(data) | ||||
|         self._time = current_time() | ||||
|  | ||||
|     def close_if_idle(self) -> bool: | ||||
|         """ | ||||
|         Close the connection if a request is not being sent or received | ||||
|  | ||||
|         :return: boolean - True if closed, false if staying open | ||||
|         """ | ||||
|         if self._http is None or self._http.stage is Stage.IDLE: | ||||
|             self.close() | ||||
|             return True | ||||
|         return False | ||||
|  | ||||
|     def close(self): | ||||
|         """ | ||||
|         Force close the connection. | ||||
|         """ | ||||
|         # Cause a call to connection_lost where further cleanup occurs | ||||
|         if self.transport: | ||||
|             self.transport.close() | ||||
|             self.transport = None | ||||
|  | ||||
|     # -------------------------------------------- # | ||||
|     # Only asyncio.Protocol callbacks below this | ||||
|     # -------------------------------------------- # | ||||
|  | ||||
|     def connection_made(self, transport): | ||||
|         try: | ||||
|             # TODO: Benchmark to find suitable write buffer limits | ||||
|             transport.set_write_buffer_limits(low=16384, high=65536) | ||||
|             self.connections.add(self) | ||||
|             self.transport = transport | ||||
|             self._task = self.loop.create_task(self.connection_task()) | ||||
|             self.recv_buffer = bytearray() | ||||
|             self.conn_info = ConnInfo(self.transport, unix=self._unix) | ||||
|         except Exception: | ||||
|             logger.exception("protocol.connect_made") | ||||
|  | ||||
|     def connection_lost(self, exc): | ||||
|         try: | ||||
|             self.connections.discard(self) | ||||
|             self.resume_writing() | ||||
|             if self._task: | ||||
|                 self._task.cancel() | ||||
|         except Exception: | ||||
|             logger.exception("protocol.connection_lost") | ||||
|  | ||||
|     def pause_writing(self): | ||||
|         self._can_write.clear() | ||||
|  | ||||
|     def resume_writing(self): | ||||
|         self._can_write.set() | ||||
|  | ||||
|     def data_received(self, data: bytes): | ||||
|         try: | ||||
|             self._time = current_time() | ||||
|             if not data: | ||||
|                 return self.close() | ||||
|             self.recv_buffer += data | ||||
|  | ||||
|             if ( | ||||
|                 len(self.recv_buffer) > self.app.config.REQUEST_BUFFER_SIZE | ||||
|                 and self.transport | ||||
|             ): | ||||
|                 self.transport.pause_reading() | ||||
|  | ||||
|             if self._data_received: | ||||
|                 self._data_received.set() | ||||
|         except Exception: | ||||
|             logger.exception("protocol.data_received") | ||||
|  | ||||
|  | ||||
| def trigger_events(events: Optional[Iterable[Callable[..., Any]]], loop): | ||||
|     """ | ||||
|     Trigger event callbacks (functions or async) | ||||
|  | ||||
|     :param events: one or more sync or async functions to execute | ||||
|     :param loop: event loop | ||||
|     """ | ||||
|     if events: | ||||
|         for event in events: | ||||
|             result = event(loop) | ||||
|             if isawaitable(result): | ||||
|                 loop.run_until_complete(result) | ||||
|  | ||||
|  | ||||
| class AsyncioServer: | ||||
|     """ | ||||
|     Wraps an asyncio server with functionality that might be useful to | ||||
|     a user who needs to manage the server lifecycle manually. | ||||
|     """ | ||||
|  | ||||
|     __slots__ = ( | ||||
|         "loop", | ||||
|         "serve_coro", | ||||
|         "_after_start", | ||||
|         "_before_stop", | ||||
|         "_after_stop", | ||||
|         "server", | ||||
|         "connections", | ||||
|     ) | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         loop, | ||||
|         serve_coro, | ||||
|         connections, | ||||
|         after_start: Optional[Iterable[ListenerType]], | ||||
|         before_stop: Optional[Iterable[ListenerType]], | ||||
|         after_stop: Optional[Iterable[ListenerType]], | ||||
|     ): | ||||
|         # Note, Sanic already called "before_server_start" events | ||||
|         # before this helper was even created. So we don't need it here. | ||||
|         self.loop = loop | ||||
|         self.serve_coro = serve_coro | ||||
|         self._after_start = after_start | ||||
|         self._before_stop = before_stop | ||||
|         self._after_stop = after_stop | ||||
|         self.server = None | ||||
|         self.connections = connections | ||||
|  | ||||
|     def after_start(self): | ||||
|         """ | ||||
|         Trigger "after_server_start" events | ||||
|         """ | ||||
|         trigger_events(self._after_start, self.loop) | ||||
|  | ||||
|     def before_stop(self): | ||||
|         """ | ||||
|         Trigger "before_server_stop" events | ||||
|         """ | ||||
|         trigger_events(self._before_stop, self.loop) | ||||
|  | ||||
|     def after_stop(self): | ||||
|         """ | ||||
|         Trigger "after_server_stop" events | ||||
|         """ | ||||
|         trigger_events(self._after_stop, self.loop) | ||||
|  | ||||
|     def is_serving(self) -> bool: | ||||
|         if self.server: | ||||
|             return self.server.is_serving() | ||||
|         return False | ||||
|  | ||||
|     def wait_closed(self): | ||||
|         if self.server: | ||||
|             return self.server.wait_closed() | ||||
|  | ||||
|     def close(self): | ||||
|         if self.server: | ||||
|             self.server.close() | ||||
|             coro = self.wait_closed() | ||||
|             task = asyncio.ensure_future(coro, loop=self.loop) | ||||
|             return task | ||||
|  | ||||
|     def start_serving(self): | ||||
|         if self.server: | ||||
|             try: | ||||
|                 return self.server.start_serving() | ||||
|             except AttributeError: | ||||
|                 raise NotImplementedError( | ||||
|                     "server.start_serving not available in this version " | ||||
|                     "of asyncio or uvloop." | ||||
|                 ) | ||||
|  | ||||
|     def serve_forever(self): | ||||
|         if self.server: | ||||
|             try: | ||||
|                 return self.server.serve_forever() | ||||
|             except AttributeError: | ||||
|                 raise NotImplementedError( | ||||
|                     "server.serve_forever not available in this version " | ||||
|                     "of asyncio or uvloop." | ||||
|                 ) | ||||
|  | ||||
|     def __await__(self): | ||||
|         """ | ||||
|         Starts the asyncio server, returns AsyncServerCoro | ||||
|         """ | ||||
|         task = asyncio.ensure_future(self.serve_coro) | ||||
|         while not task.done(): | ||||
|             yield | ||||
|         self.server = task.result() | ||||
|         return self | ||||
|  | ||||
|  | ||||
| def serve( | ||||
|     host, | ||||
|     port, | ||||
|     app, | ||||
|     before_start: Optional[Iterable[ListenerType]] = None, | ||||
|     after_start: Optional[Iterable[ListenerType]] = None, | ||||
|     before_stop: Optional[Iterable[ListenerType]] = None, | ||||
|     after_stop: Optional[Iterable[ListenerType]] = None, | ||||
|     ssl: Optional[SSLContext] = None, | ||||
|     sock: Optional[socket.socket] = None, | ||||
|     unix: Optional[str] = None, | ||||
|     reuse_port: bool = False, | ||||
|     loop=None, | ||||
|     protocol: Type[asyncio.Protocol] = HttpProtocol, | ||||
|     backlog: int = 100, | ||||
|     register_sys_signals: bool = True, | ||||
|     run_multiple: bool = False, | ||||
|     run_async: bool = False, | ||||
|     connections=None, | ||||
|     signal=Signal(), | ||||
|     state=None, | ||||
|     asyncio_server_kwargs=None, | ||||
| ): | ||||
|     """Start asynchronous HTTP Server on an individual process. | ||||
|  | ||||
|     :param host: Address to host on | ||||
|     :param port: Port to host on | ||||
|     :param before_start: function to be executed before the server starts | ||||
|                          listening. Takes arguments `app` instance and `loop` | ||||
|     :param after_start: function to be executed after the server starts | ||||
|                         listening. Takes  arguments `app` instance and `loop` | ||||
|     :param before_stop: function to be executed when a stop signal is | ||||
|                         received before it is respected. Takes arguments | ||||
|                         `app` instance and `loop` | ||||
|     :param after_stop: function to be executed when a stop signal is | ||||
|                        received after it is respected. Takes arguments | ||||
|                        `app` instance and `loop` | ||||
|     :param ssl: SSLContext | ||||
|     :param sock: Socket for the server to accept connections from | ||||
|     :param unix: Unix socket to listen on instead of TCP port | ||||
|     :param reuse_port: `True` for multiple workers | ||||
|     :param loop: asyncio compatible event loop | ||||
|     :param run_async: bool: Do not create a new event loop for the server, | ||||
|                       and return an AsyncServer object rather than running it | ||||
|     :param asyncio_server_kwargs: key-value args for asyncio/uvloop | ||||
|                                   create_server method | ||||
|     :return: Nothing | ||||
|     """ | ||||
|     if not run_async and not loop: | ||||
|         # create new event_loop after fork | ||||
|         loop = asyncio.new_event_loop() | ||||
|         asyncio.set_event_loop(loop) | ||||
|  | ||||
|     if app.debug: | ||||
|         loop.set_debug(app.debug) | ||||
|  | ||||
|     app.asgi = False | ||||
|  | ||||
|     connections = connections if connections is not None else set() | ||||
|     protocol_kwargs = _build_protocol_kwargs(protocol, app.config) | ||||
|     server = partial( | ||||
|         protocol, | ||||
|         loop=loop, | ||||
|         connections=connections, | ||||
|         signal=signal, | ||||
|         app=app, | ||||
|         state=state, | ||||
|         unix=unix, | ||||
|         **protocol_kwargs, | ||||
|     ) | ||||
|     asyncio_server_kwargs = ( | ||||
|         asyncio_server_kwargs if asyncio_server_kwargs else {} | ||||
|     ) | ||||
|     # UNIX sockets are always bound by us (to preserve semantics between modes) | ||||
|     if unix: | ||||
|         sock = bind_unix_socket(unix, backlog=backlog) | ||||
|     server_coroutine = loop.create_server( | ||||
|         server, | ||||
|         None if sock else host, | ||||
|         None if sock else port, | ||||
|         ssl=ssl, | ||||
|         reuse_port=reuse_port, | ||||
|         sock=sock, | ||||
|         backlog=backlog, | ||||
|         **asyncio_server_kwargs, | ||||
|     ) | ||||
|  | ||||
|     if run_async: | ||||
|         return AsyncioServer( | ||||
|             loop=loop, | ||||
|             serve_coro=server_coroutine, | ||||
|             connections=connections, | ||||
|             after_start=after_start, | ||||
|             before_stop=before_stop, | ||||
|             after_stop=after_stop, | ||||
|         ) | ||||
|  | ||||
|     trigger_events(before_start, loop) | ||||
|  | ||||
|     try: | ||||
|         http_server = loop.run_until_complete(server_coroutine) | ||||
|     except BaseException: | ||||
|         logger.exception("Unable to start server") | ||||
|         return | ||||
|  | ||||
|     trigger_events(after_start, loop) | ||||
|  | ||||
|     # Ignore SIGINT when run_multiple | ||||
|     if run_multiple: | ||||
|         signal_func(SIGINT, SIG_IGN) | ||||
|  | ||||
|     # Register signals for graceful termination | ||||
|     if register_sys_signals: | ||||
|         if OS_IS_WINDOWS: | ||||
|             ctrlc_workaround_for_windows(app) | ||||
|         else: | ||||
|             for _signal in [SIGTERM] if run_multiple else [SIGINT, SIGTERM]: | ||||
|                 loop.add_signal_handler(_signal, app.stop) | ||||
|     pid = os.getpid() | ||||
|     try: | ||||
|         logger.info("Starting worker [%s]", pid) | ||||
|         loop.run_forever() | ||||
|     finally: | ||||
|         logger.info("Stopping worker [%s]", pid) | ||||
|  | ||||
|         # Run the on_stop function if provided | ||||
|         trigger_events(before_stop, loop) | ||||
|  | ||||
|         # Wait for event loop to finish and all connections to drain | ||||
|         http_server.close() | ||||
|         loop.run_until_complete(http_server.wait_closed()) | ||||
|  | ||||
|         # Complete all tasks on the loop | ||||
|         signal.stopped = True | ||||
|         for connection in connections: | ||||
|             connection.close_if_idle() | ||||
|  | ||||
|         # Gracefully shutdown timeout. | ||||
|         # We should provide graceful_shutdown_timeout, | ||||
|         # instead of letting connection hangs forever. | ||||
|         # Let's roughly calcucate time. | ||||
|         graceful = app.config.GRACEFUL_SHUTDOWN_TIMEOUT | ||||
|         start_shutdown: float = 0 | ||||
|         while connections and (start_shutdown < graceful): | ||||
|             loop.run_until_complete(asyncio.sleep(0.1)) | ||||
|             start_shutdown = start_shutdown + 0.1 | ||||
|  | ||||
|         # Force close non-idle connection after waiting for | ||||
|         # graceful_shutdown_timeout | ||||
|         coros = [] | ||||
|         for conn in connections: | ||||
|             if hasattr(conn, "websocket") and conn.websocket: | ||||
|                 coros.append(conn.websocket.close_connection()) | ||||
|             else: | ||||
|                 conn.close() | ||||
|  | ||||
|         _shutdown = asyncio.gather(*coros) | ||||
|         loop.run_until_complete(_shutdown) | ||||
|  | ||||
|         trigger_events(after_stop, loop) | ||||
|  | ||||
|         remove_unix_socket(unix) | ||||
|  | ||||
|  | ||||
| def _build_protocol_kwargs( | ||||
|     protocol: Type[asyncio.Protocol], config: Config | ||||
| ) -> Dict[str, Union[int, float]]: | ||||
|     if hasattr(protocol, "websocket_handshake"): | ||||
|         return { | ||||
|             "websocket_max_size": config.WEBSOCKET_MAX_SIZE, | ||||
|             "websocket_max_queue": config.WEBSOCKET_MAX_QUEUE, | ||||
|             "websocket_read_limit": config.WEBSOCKET_READ_LIMIT, | ||||
|             "websocket_write_limit": config.WEBSOCKET_WRITE_LIMIT, | ||||
|             "websocket_ping_timeout": config.WEBSOCKET_PING_TIMEOUT, | ||||
|             "websocket_ping_interval": config.WEBSOCKET_PING_INTERVAL, | ||||
|         } | ||||
|     return {} | ||||
|  | ||||
|  | ||||
| def bind_socket(host: str, port: int, *, backlog=100) -> socket.socket: | ||||
|     """Create TCP server socket. | ||||
|     :param host: IPv4, IPv6 or hostname may be specified | ||||
|     :param port: TCP port number | ||||
|     :param backlog: Maximum number of connections to queue | ||||
|     :return: socket.socket object | ||||
|     """ | ||||
|     try:  # IP address: family must be specified for IPv6 at least | ||||
|         ip = ip_address(host) | ||||
|         host = str(ip) | ||||
|         sock = socket.socket( | ||||
|             socket.AF_INET6 if ip.version == 6 else socket.AF_INET | ||||
|         ) | ||||
|     except ValueError:  # Hostname, may become AF_INET or AF_INET6 | ||||
|         sock = socket.socket() | ||||
|     sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) | ||||
|     sock.bind((host, port)) | ||||
|     sock.listen(backlog) | ||||
|     return sock | ||||
|  | ||||
|  | ||||
| def bind_unix_socket(path: str, *, mode=0o666, backlog=100) -> socket.socket: | ||||
|     """Create unix socket. | ||||
|     :param path: filesystem path | ||||
|     :param backlog: Maximum number of connections to queue | ||||
|     :return: socket.socket object | ||||
|     """ | ||||
|     """Open or atomically replace existing socket with zero downtime.""" | ||||
|     # Sanitise and pre-verify socket path | ||||
|     path = os.path.abspath(path) | ||||
|     folder = os.path.dirname(path) | ||||
|     if not os.path.isdir(folder): | ||||
|         raise FileNotFoundError(f"Socket folder does not exist: {folder}") | ||||
|     try: | ||||
|         if not stat.S_ISSOCK(os.stat(path, follow_symlinks=False).st_mode): | ||||
|             raise FileExistsError(f"Existing file is not a socket: {path}") | ||||
|     except FileNotFoundError: | ||||
|         pass | ||||
|     # Create new socket with a random temporary name | ||||
|     tmp_path = f"{path}.{secrets.token_urlsafe()}" | ||||
|     sock = socket.socket(socket.AF_UNIX) | ||||
|     try: | ||||
|         # Critical section begins (filename races) | ||||
|         sock.bind(tmp_path) | ||||
|         try: | ||||
|             os.chmod(tmp_path, mode) | ||||
|             # Start listening before rename to avoid connection failures | ||||
|             sock.listen(backlog) | ||||
|             os.rename(tmp_path, path) | ||||
|         except:  # noqa: E722 | ||||
|             try: | ||||
|                 os.unlink(tmp_path) | ||||
|             finally: | ||||
|                 raise | ||||
|     except:  # noqa: E722 | ||||
|         try: | ||||
|             sock.close() | ||||
|         finally: | ||||
|             raise | ||||
|     return sock | ||||
|  | ||||
|  | ||||
| def remove_unix_socket(path: Optional[str]) -> None: | ||||
|     """Remove dead unix socket during server exit.""" | ||||
|     if not path: | ||||
|         return | ||||
|     try: | ||||
|         if stat.S_ISSOCK(os.stat(path, follow_symlinks=False).st_mode): | ||||
|             # Is it actually dead (doesn't belong to a new server instance)? | ||||
|             with socket.socket(socket.AF_UNIX) as testsock: | ||||
|                 try: | ||||
|                     testsock.connect(path) | ||||
|                 except ConnectionRefusedError: | ||||
|                     os.unlink(path) | ||||
|     except FileNotFoundError: | ||||
|         pass | ||||
|  | ||||
|  | ||||
| def serve_single(server_settings): | ||||
|     main_start = server_settings.pop("main_start", None) | ||||
|     main_stop = server_settings.pop("main_stop", None) | ||||
|  | ||||
|     if not server_settings.get("run_async"): | ||||
|         # create new event_loop after fork | ||||
|         loop = asyncio.new_event_loop() | ||||
|         asyncio.set_event_loop(loop) | ||||
|         server_settings["loop"] = loop | ||||
|  | ||||
|     trigger_events(main_start, server_settings["loop"]) | ||||
|     serve(**server_settings) | ||||
|     trigger_events(main_stop, server_settings["loop"]) | ||||
|  | ||||
|     server_settings["loop"].close() | ||||
|  | ||||
|  | ||||
| def serve_multiple(server_settings, workers): | ||||
|     """Start multiple server processes simultaneously.  Stop on interrupt | ||||
|     and terminate signals, and drain connections when complete. | ||||
|  | ||||
|     :param server_settings: kw arguments to be passed to the serve function | ||||
|     :param workers: number of workers to launch | ||||
|     :param stop_event: if provided, is used as a stop signal | ||||
|     :return: | ||||
|     """ | ||||
|     server_settings["reuse_port"] = True | ||||
|     server_settings["run_multiple"] = True | ||||
|  | ||||
|     main_start = server_settings.pop("main_start", None) | ||||
|     main_stop = server_settings.pop("main_stop", None) | ||||
|     loop = asyncio.new_event_loop() | ||||
|     asyncio.set_event_loop(loop) | ||||
|  | ||||
|     trigger_events(main_start, loop) | ||||
|  | ||||
|     # Create a listening socket or use the one in settings | ||||
|     sock = server_settings.get("sock") | ||||
|     unix = server_settings["unix"] | ||||
|     backlog = server_settings["backlog"] | ||||
|     if unix: | ||||
|         sock = bind_unix_socket(unix, backlog=backlog) | ||||
|         server_settings["unix"] = unix | ||||
|     if sock is None: | ||||
|         sock = bind_socket( | ||||
|             server_settings["host"], server_settings["port"], backlog=backlog | ||||
|         ) | ||||
|         sock.set_inheritable(True) | ||||
|         server_settings["sock"] = sock | ||||
|         server_settings["host"] = None | ||||
|         server_settings["port"] = None | ||||
|  | ||||
|     processes = [] | ||||
|  | ||||
|     def sig_handler(signal, frame): | ||||
|         logger.info("Received signal %s. Shutting down.", Signals(signal).name) | ||||
|         for process in processes: | ||||
|             os.kill(process.pid, SIGTERM) | ||||
|  | ||||
|     signal_func(SIGINT, lambda s, f: sig_handler(s, f)) | ||||
|     signal_func(SIGTERM, lambda s, f: sig_handler(s, f)) | ||||
|     mp = multiprocessing.get_context("fork") | ||||
|  | ||||
|     for _ in range(workers): | ||||
|         process = mp.Process(target=serve, kwargs=server_settings) | ||||
|         process.daemon = True | ||||
|         process.start() | ||||
|         processes.append(process) | ||||
|  | ||||
|     for process in processes: | ||||
|         process.join() | ||||
|  | ||||
|     # the above processes will block this until they're stopped | ||||
|     for process in processes: | ||||
|         process.terminate() | ||||
|  | ||||
|     trigger_events(main_stop, loop) | ||||
|  | ||||
|     sock.close() | ||||
|     loop.close() | ||||
|     remove_unix_socket(unix) | ||||
							
								
								
									
										26
									
								
								sanic/server/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								sanic/server/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,26 @@ | ||||
| import asyncio | ||||
|  | ||||
| from sanic.models.server_types import ConnInfo, Signal | ||||
| from sanic.server.async_server import AsyncioServer | ||||
| from sanic.server.protocols.http_protocol import HttpProtocol | ||||
| from sanic.server.runners import serve, serve_multiple, serve_single | ||||
|  | ||||
|  | ||||
| try: | ||||
|     import uvloop  # type: ignore | ||||
|  | ||||
|     if not isinstance(asyncio.get_event_loop_policy(), uvloop.EventLoopPolicy): | ||||
|         asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) | ||||
| except ImportError: | ||||
|     pass | ||||
|  | ||||
|  | ||||
| __all__ = ( | ||||
|     "AsyncioServer", | ||||
|     "ConnInfo", | ||||
|     "HttpProtocol", | ||||
|     "Signal", | ||||
|     "serve", | ||||
|     "serve_multiple", | ||||
|     "serve_single", | ||||
| ) | ||||
							
								
								
									
										115
									
								
								sanic/server/async_server.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										115
									
								
								sanic/server/async_server.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,115 @@ | ||||
| from __future__ import annotations | ||||
|  | ||||
| import asyncio | ||||
|  | ||||
| from sanic.exceptions import SanicException | ||||
|  | ||||
|  | ||||
| class AsyncioServer: | ||||
|     """ | ||||
|     Wraps an asyncio server with functionality that might be useful to | ||||
|     a user who needs to manage the server lifecycle manually. | ||||
|     """ | ||||
|  | ||||
|     __slots__ = ("app", "connections", "loop", "serve_coro", "server", "init") | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         app, | ||||
|         loop, | ||||
|         serve_coro, | ||||
|         connections, | ||||
|     ): | ||||
|         # Note, Sanic already called "before_server_start" events | ||||
|         # before this helper was even created. So we don't need it here. | ||||
|         self.app = app | ||||
|         self.connections = connections | ||||
|         self.loop = loop | ||||
|         self.serve_coro = serve_coro | ||||
|         self.server = None | ||||
|         self.init = False | ||||
|  | ||||
|     def startup(self): | ||||
|         """ | ||||
|         Trigger "before_server_start" events | ||||
|         """ | ||||
|         self.init = True | ||||
|         return self.app._startup() | ||||
|  | ||||
|     def before_start(self): | ||||
|         """ | ||||
|         Trigger "before_server_start" events | ||||
|         """ | ||||
|         return self._server_event("init", "before") | ||||
|  | ||||
|     def after_start(self): | ||||
|         """ | ||||
|         Trigger "after_server_start" events | ||||
|         """ | ||||
|         return self._server_event("init", "after") | ||||
|  | ||||
|     def before_stop(self): | ||||
|         """ | ||||
|         Trigger "before_server_stop" events | ||||
|         """ | ||||
|         return self._server_event("shutdown", "before") | ||||
|  | ||||
|     def after_stop(self): | ||||
|         """ | ||||
|         Trigger "after_server_stop" events | ||||
|         """ | ||||
|         return self._server_event("shutdown", "after") | ||||
|  | ||||
|     def is_serving(self) -> bool: | ||||
|         if self.server: | ||||
|             return self.server.is_serving() | ||||
|         return False | ||||
|  | ||||
|     def wait_closed(self): | ||||
|         if self.server: | ||||
|             return self.server.wait_closed() | ||||
|  | ||||
|     def close(self): | ||||
|         if self.server: | ||||
|             self.server.close() | ||||
|             coro = self.wait_closed() | ||||
|             task = asyncio.ensure_future(coro, loop=self.loop) | ||||
|             return task | ||||
|  | ||||
|     def start_serving(self): | ||||
|         if self.server: | ||||
|             try: | ||||
|                 return self.server.start_serving() | ||||
|             except AttributeError: | ||||
|                 raise NotImplementedError( | ||||
|                     "server.start_serving not available in this version " | ||||
|                     "of asyncio or uvloop." | ||||
|                 ) | ||||
|  | ||||
|     def serve_forever(self): | ||||
|         if self.server: | ||||
|             try: | ||||
|                 return self.server.serve_forever() | ||||
|             except AttributeError: | ||||
|                 raise NotImplementedError( | ||||
|                     "server.serve_forever not available in this version " | ||||
|                     "of asyncio or uvloop." | ||||
|                 ) | ||||
|  | ||||
|     def _server_event(self, concern: str, action: str): | ||||
|         if not self.init: | ||||
|             raise SanicException( | ||||
|                 "Cannot dispatch server event without " | ||||
|                 "first running server.startup()" | ||||
|             ) | ||||
|         return self.app._server_event(concern, action, loop=self.loop) | ||||
|  | ||||
|     def __await__(self): | ||||
|         """ | ||||
|         Starts the asyncio server, returns AsyncServerCoro | ||||
|         """ | ||||
|         task = asyncio.ensure_future(self.serve_coro) | ||||
|         while not task.done(): | ||||
|             yield | ||||
|         self.server = task.result() | ||||
|         return self | ||||
							
								
								
									
										16
									
								
								sanic/server/events.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								sanic/server/events.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,16 @@ | ||||
| from inspect import isawaitable | ||||
| from typing import Any, Callable, Iterable, Optional | ||||
|  | ||||
|  | ||||
| def trigger_events(events: Optional[Iterable[Callable[..., Any]]], loop): | ||||
|     """ | ||||
|     Trigger event callbacks (functions or async) | ||||
|  | ||||
|     :param events: one or more sync or async functions to execute | ||||
|     :param loop: event loop | ||||
|     """ | ||||
|     if events: | ||||
|         for event in events: | ||||
|             result = event(loop) | ||||
|             if isawaitable(result): | ||||
|                 loop.run_until_complete(result) | ||||
							
								
								
									
										0
									
								
								sanic/server/protocols/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								sanic/server/protocols/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
								
								
									
										143
									
								
								sanic/server/protocols/base_protocol.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										143
									
								
								sanic/server/protocols/base_protocol.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,143 @@ | ||||
| from __future__ import annotations | ||||
|  | ||||
| from typing import TYPE_CHECKING, Optional | ||||
|  | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from sanic.app import Sanic | ||||
|  | ||||
| import asyncio | ||||
|  | ||||
| from asyncio import CancelledError | ||||
| from asyncio.transports import Transport | ||||
| from time import monotonic as current_time | ||||
|  | ||||
| from sanic.log import error_logger | ||||
| from sanic.models.server_types import ConnInfo, Signal | ||||
|  | ||||
|  | ||||
| class SanicProtocol(asyncio.Protocol): | ||||
|     __slots__ = ( | ||||
|         "app", | ||||
|         # event loop, connection | ||||
|         "loop", | ||||
|         "transport", | ||||
|         "connections", | ||||
|         "conn_info", | ||||
|         "signal", | ||||
|         "_can_write", | ||||
|         "_time", | ||||
|         "_task", | ||||
|         "_unix", | ||||
|         "_data_received", | ||||
|     ) | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         *, | ||||
|         loop, | ||||
|         app: Sanic, | ||||
|         signal=None, | ||||
|         connections=None, | ||||
|         unix=None, | ||||
|         **kwargs, | ||||
|     ): | ||||
|         asyncio.set_event_loop(loop) | ||||
|         self.loop = loop | ||||
|         self.app: Sanic = app | ||||
|         self.signal = signal or Signal() | ||||
|         self.transport: Optional[Transport] = None | ||||
|         self.connections = connections if connections is not None else set() | ||||
|         self.conn_info: Optional[ConnInfo] = None | ||||
|         self._can_write = asyncio.Event() | ||||
|         self._can_write.set() | ||||
|         self._unix = unix | ||||
|         self._time = 0.0  # type: float | ||||
|         self._task = None  # type: Optional[asyncio.Task] | ||||
|         self._data_received = asyncio.Event() | ||||
|  | ||||
|     @property | ||||
|     def ctx(self): | ||||
|         if self.conn_info is not None: | ||||
|             return self.conn_info.ctx | ||||
|         else: | ||||
|             return None | ||||
|  | ||||
|     async def send(self, data): | ||||
|         """ | ||||
|         Generic data write implementation with backpressure control. | ||||
|         """ | ||||
|         await self._can_write.wait() | ||||
|         if self.transport.is_closing(): | ||||
|             raise CancelledError | ||||
|         self.transport.write(data) | ||||
|         self._time = current_time() | ||||
|  | ||||
|     async def receive_more(self): | ||||
|         """ | ||||
|         Wait until more data is received into the Server protocol's buffer | ||||
|         """ | ||||
|         self.transport.resume_reading() | ||||
|         self._data_received.clear() | ||||
|         await self._data_received.wait() | ||||
|  | ||||
|     def close(self, timeout: Optional[float] = None): | ||||
|         """ | ||||
|         Attempt close the connection. | ||||
|         """ | ||||
|         # Cause a call to connection_lost where further cleanup occurs | ||||
|         if self.transport: | ||||
|             self.transport.close() | ||||
|             if timeout is None: | ||||
|                 timeout = self.app.config.GRACEFUL_SHUTDOWN_TIMEOUT | ||||
|             self.loop.call_later(timeout, self.abort) | ||||
|  | ||||
|     def abort(self): | ||||
|         """ | ||||
|         Force close the connection. | ||||
|         """ | ||||
|         # Cause a call to connection_lost where further cleanup occurs | ||||
|         if self.transport: | ||||
|             self.transport.abort() | ||||
|             self.transport = None | ||||
|  | ||||
|     # asyncio.Protocol API Callbacks # | ||||
|     # ------------------------------ # | ||||
|     def connection_made(self, transport): | ||||
|         """ | ||||
|         Generic connection-made, with no connection_task, and no recv_buffer. | ||||
|         Override this for protocol-specific connection implementations. | ||||
|         """ | ||||
|         try: | ||||
|             transport.set_write_buffer_limits(low=16384, high=65536) | ||||
|             self.connections.add(self) | ||||
|             self.transport = transport | ||||
|             self.conn_info = ConnInfo(self.transport, unix=self._unix) | ||||
|         except Exception: | ||||
|             error_logger.exception("protocol.connect_made") | ||||
|  | ||||
|     def connection_lost(self, exc): | ||||
|         try: | ||||
|             self.connections.discard(self) | ||||
|             self.resume_writing() | ||||
|             if self._task: | ||||
|                 self._task.cancel() | ||||
|         except BaseException: | ||||
|             error_logger.exception("protocol.connection_lost") | ||||
|  | ||||
|     def pause_writing(self): | ||||
|         self._can_write.clear() | ||||
|  | ||||
|     def resume_writing(self): | ||||
|         self._can_write.set() | ||||
|  | ||||
|     def data_received(self, data: bytes): | ||||
|         try: | ||||
|             self._time = current_time() | ||||
|             if not data: | ||||
|                 return self.close() | ||||
|  | ||||
|             if self._data_received: | ||||
|                 self._data_received.set() | ||||
|         except BaseException: | ||||
|             error_logger.exception("protocol.data_received") | ||||
							
								
								
									
										238
									
								
								sanic/server/protocols/http_protocol.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										238
									
								
								sanic/server/protocols/http_protocol.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,238 @@ | ||||
| from __future__ import annotations | ||||
|  | ||||
| from typing import TYPE_CHECKING, Optional | ||||
|  | ||||
| from sanic.touchup.meta import TouchUpMeta | ||||
|  | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from sanic.app import Sanic | ||||
|  | ||||
| from asyncio import CancelledError | ||||
| from time import monotonic as current_time | ||||
|  | ||||
| from sanic.exceptions import RequestTimeout, ServiceUnavailable | ||||
| from sanic.http import Http, Stage | ||||
| from sanic.log import error_logger, logger | ||||
| from sanic.models.server_types import ConnInfo | ||||
| from sanic.request import Request | ||||
| from sanic.server.protocols.base_protocol import SanicProtocol | ||||
|  | ||||
|  | ||||
| class HttpProtocol(SanicProtocol, metaclass=TouchUpMeta): | ||||
|     """ | ||||
|     This class provides implements the HTTP 1.1 protocol on top of our | ||||
|     Sanic Server transport | ||||
|     """ | ||||
|  | ||||
|     __touchup__ = ( | ||||
|         "send", | ||||
|         "connection_task", | ||||
|     ) | ||||
|     __slots__ = ( | ||||
|         # request params | ||||
|         "request", | ||||
|         # request config | ||||
|         "request_handler", | ||||
|         "request_timeout", | ||||
|         "response_timeout", | ||||
|         "keep_alive_timeout", | ||||
|         "request_max_size", | ||||
|         "request_class", | ||||
|         "error_handler", | ||||
|         # enable or disable access log purpose | ||||
|         "access_log", | ||||
|         # connection management | ||||
|         "state", | ||||
|         "url", | ||||
|         "_handler_task", | ||||
|         "_http", | ||||
|         "_exception", | ||||
|         "recv_buffer", | ||||
|     ) | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         *, | ||||
|         loop, | ||||
|         app: Sanic, | ||||
|         signal=None, | ||||
|         connections=None, | ||||
|         state=None, | ||||
|         unix=None, | ||||
|         **kwargs, | ||||
|     ): | ||||
|         super().__init__( | ||||
|             loop=loop, | ||||
|             app=app, | ||||
|             signal=signal, | ||||
|             connections=connections, | ||||
|             unix=unix, | ||||
|         ) | ||||
|         self.url = None | ||||
|         self.request: Optional[Request] = None | ||||
|         self.access_log = self.app.config.ACCESS_LOG | ||||
|         self.request_handler = self.app.handle_request | ||||
|         self.error_handler = self.app.error_handler | ||||
|         self.request_timeout = self.app.config.REQUEST_TIMEOUT | ||||
|         self.response_timeout = self.app.config.RESPONSE_TIMEOUT | ||||
|         self.keep_alive_timeout = self.app.config.KEEP_ALIVE_TIMEOUT | ||||
|         self.request_max_size = self.app.config.REQUEST_MAX_SIZE | ||||
|         self.request_class = self.app.request_class or Request | ||||
|         self.state = state if state else {} | ||||
|         if "requests_count" not in self.state: | ||||
|             self.state["requests_count"] = 0 | ||||
|         self._exception = None | ||||
|  | ||||
|     def _setup_connection(self): | ||||
|         self._http = Http(self) | ||||
|         self._time = current_time() | ||||
|         self.check_timeouts() | ||||
|  | ||||
|     async def connection_task(self):  # no cov | ||||
|         """ | ||||
|         Run a HTTP connection. | ||||
|  | ||||
|         Timeouts and some additional error handling occur here, while most of | ||||
|         everything else happens in class Http or in code called from there. | ||||
|         """ | ||||
|         try: | ||||
|             self._setup_connection() | ||||
|             await self.app.dispatch( | ||||
|                 "http.lifecycle.begin", | ||||
|                 inline=True, | ||||
|                 context={"conn_info": self.conn_info}, | ||||
|             ) | ||||
|             await self._http.http1() | ||||
|         except CancelledError: | ||||
|             pass | ||||
|         except Exception: | ||||
|             error_logger.exception("protocol.connection_task uncaught") | ||||
|         finally: | ||||
|             if ( | ||||
|                 self.app.debug | ||||
|                 and self._http | ||||
|                 and self.transport | ||||
|                 and not self._http.upgrade_websocket | ||||
|             ): | ||||
|                 ip = self.transport.get_extra_info("peername") | ||||
|                 error_logger.error( | ||||
|                     "Connection lost before response written" | ||||
|                     f" @ {ip} {self._http.request}" | ||||
|                 ) | ||||
|             self._http = None | ||||
|             self._task = None | ||||
|             try: | ||||
|                 self.close() | ||||
|             except BaseException: | ||||
|                 error_logger.exception("Closing failed") | ||||
|             finally: | ||||
|                 await self.app.dispatch( | ||||
|                     "http.lifecycle.complete", | ||||
|                     inline=True, | ||||
|                     context={"conn_info": self.conn_info}, | ||||
|                 ) | ||||
|                 # Important to keep this Ellipsis here for the TouchUp module | ||||
|                 ... | ||||
|  | ||||
|     def check_timeouts(self): | ||||
|         """ | ||||
|         Runs itself periodically to enforce any expired timeouts. | ||||
|         """ | ||||
|         try: | ||||
|             if not self._task: | ||||
|                 return | ||||
|             duration = current_time() - self._time | ||||
|             stage = self._http.stage | ||||
|             if stage is Stage.IDLE and duration > self.keep_alive_timeout: | ||||
|                 logger.debug("KeepAlive Timeout. Closing connection.") | ||||
|             elif stage is Stage.REQUEST and duration > self.request_timeout: | ||||
|                 logger.debug("Request Timeout. Closing connection.") | ||||
|                 self._http.exception = RequestTimeout("Request Timeout") | ||||
|             elif stage is Stage.HANDLER and self._http.upgrade_websocket: | ||||
|                 logger.debug("Handling websocket. Timeouts disabled.") | ||||
|                 return | ||||
|             elif ( | ||||
|                 stage in (Stage.HANDLER, Stage.RESPONSE, Stage.FAILED) | ||||
|                 and duration > self.response_timeout | ||||
|             ): | ||||
|                 logger.debug("Response Timeout. Closing connection.") | ||||
|                 self._http.exception = ServiceUnavailable("Response Timeout") | ||||
|             else: | ||||
|                 interval = ( | ||||
|                     min( | ||||
|                         self.keep_alive_timeout, | ||||
|                         self.request_timeout, | ||||
|                         self.response_timeout, | ||||
|                     ) | ||||
|                     / 2 | ||||
|                 ) | ||||
|                 self.loop.call_later(max(0.1, interval), self.check_timeouts) | ||||
|                 return | ||||
|             self._task.cancel() | ||||
|         except Exception: | ||||
|             error_logger.exception("protocol.check_timeouts") | ||||
|  | ||||
|     async def send(self, data):  # no cov | ||||
|         """ | ||||
|         Writes HTTP data with backpressure control. | ||||
|         """ | ||||
|         await self._can_write.wait() | ||||
|         if self.transport.is_closing(): | ||||
|             raise CancelledError | ||||
|         await self.app.dispatch( | ||||
|             "http.lifecycle.send", | ||||
|             inline=True, | ||||
|             context={"data": data}, | ||||
|         ) | ||||
|         self.transport.write(data) | ||||
|         self._time = current_time() | ||||
|  | ||||
|     def close_if_idle(self) -> bool: | ||||
|         """ | ||||
|         Close the connection if a request is not being sent or received | ||||
|  | ||||
|         :return: boolean - True if closed, false if staying open | ||||
|         """ | ||||
|         if self._http is None or self._http.stage is Stage.IDLE: | ||||
|             self.close() | ||||
|             return True | ||||
|         return False | ||||
|  | ||||
|     # -------------------------------------------- # | ||||
|     # Only asyncio.Protocol callbacks below this | ||||
|     # -------------------------------------------- # | ||||
|  | ||||
|     def connection_made(self, transport): | ||||
|         """ | ||||
|         HTTP-protocol-specific new connection handler | ||||
|         """ | ||||
|         try: | ||||
|             # TODO: Benchmark to find suitable write buffer limits | ||||
|             transport.set_write_buffer_limits(low=16384, high=65536) | ||||
|             self.connections.add(self) | ||||
|             self.transport = transport | ||||
|             self._task = self.loop.create_task(self.connection_task()) | ||||
|             self.recv_buffer = bytearray() | ||||
|             self.conn_info = ConnInfo(self.transport, unix=self._unix) | ||||
|         except Exception: | ||||
|             error_logger.exception("protocol.connect_made") | ||||
|  | ||||
|     def data_received(self, data: bytes): | ||||
|  | ||||
|         try: | ||||
|             self._time = current_time() | ||||
|             if not data: | ||||
|                 return self.close() | ||||
|             self.recv_buffer += data | ||||
|  | ||||
|             if ( | ||||
|                 len(self.recv_buffer) >= self.app.config.REQUEST_BUFFER_SIZE | ||||
|                 and self.transport | ||||
|             ): | ||||
|                 self.transport.pause_reading() | ||||
|  | ||||
|             if self._data_received: | ||||
|                 self._data_received.set() | ||||
|         except Exception: | ||||
|             error_logger.exception("protocol.data_received") | ||||
							
								
								
									
										164
									
								
								sanic/server/protocols/websocket_protocol.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										164
									
								
								sanic/server/protocols/websocket_protocol.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,164 @@ | ||||
| from typing import TYPE_CHECKING, Optional, Sequence | ||||
|  | ||||
| from websockets.connection import CLOSED, CLOSING, OPEN | ||||
| from websockets.server import ServerConnection | ||||
|  | ||||
| from sanic.exceptions import ServerError | ||||
| from sanic.log import error_logger | ||||
| from sanic.server import HttpProtocol | ||||
|  | ||||
| from ..websockets.impl import WebsocketImplProtocol | ||||
|  | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from websockets import http11 | ||||
|  | ||||
|  | ||||
| class WebSocketProtocol(HttpProtocol): | ||||
|  | ||||
|     websocket: Optional[WebsocketImplProtocol] | ||||
|     websocket_timeout: float | ||||
|     websocket_max_size = Optional[int] | ||||
|     websocket_ping_interval = Optional[float] | ||||
|     websocket_ping_timeout = Optional[float] | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         *args, | ||||
|         websocket_timeout: float = 10.0, | ||||
|         websocket_max_size: Optional[int] = None, | ||||
|         websocket_max_queue: Optional[int] = None,  # max_queue is deprecated | ||||
|         websocket_read_limit: Optional[int] = None,  # read_limit is deprecated | ||||
|         websocket_write_limit: Optional[int] = None,  # write_limit deprecated | ||||
|         websocket_ping_interval: Optional[float] = 20.0, | ||||
|         websocket_ping_timeout: Optional[float] = 20.0, | ||||
|         **kwargs, | ||||
|     ): | ||||
|         super().__init__(*args, **kwargs) | ||||
|         self.websocket = None | ||||
|         self.websocket_timeout = websocket_timeout | ||||
|         self.websocket_max_size = websocket_max_size | ||||
|         if websocket_max_queue is not None and websocket_max_queue > 0: | ||||
|             # TODO: Reminder remove this warning in v22.3 | ||||
|             error_logger.warning( | ||||
|                 DeprecationWarning( | ||||
|                     "Websocket no longer uses queueing, so websocket_max_queue" | ||||
|                     " is no longer required." | ||||
|                 ) | ||||
|             ) | ||||
|         if websocket_read_limit is not None and websocket_read_limit > 0: | ||||
|             # TODO: Reminder remove this warning in v22.3 | ||||
|             error_logger.warning( | ||||
|                 DeprecationWarning( | ||||
|                     "Websocket no longer uses read buffers, so " | ||||
|                     "websocket_read_limit is not required." | ||||
|                 ) | ||||
|             ) | ||||
|         if websocket_write_limit is not None and websocket_write_limit > 0: | ||||
|             # TODO: Reminder remove this warning in v22.3 | ||||
|             error_logger.warning( | ||||
|                 DeprecationWarning( | ||||
|                     "Websocket no longer uses write buffers, so " | ||||
|                     "websocket_write_limit is not required." | ||||
|                 ) | ||||
|             ) | ||||
|         self.websocket_ping_interval = websocket_ping_interval | ||||
|         self.websocket_ping_timeout = websocket_ping_timeout | ||||
|  | ||||
|     def connection_lost(self, exc): | ||||
|         if self.websocket is not None: | ||||
|             self.websocket.connection_lost(exc) | ||||
|         super().connection_lost(exc) | ||||
|  | ||||
|     def data_received(self, data): | ||||
|         if self.websocket is not None: | ||||
|             self.websocket.data_received(data) | ||||
|         else: | ||||
|             # Pass it to HttpProtocol handler first | ||||
|             # That will (hopefully) upgrade it to a websocket. | ||||
|             super().data_received(data) | ||||
|  | ||||
|     def eof_received(self) -> Optional[bool]: | ||||
|         if self.websocket is not None: | ||||
|             return self.websocket.eof_received() | ||||
|         else: | ||||
|             return False | ||||
|  | ||||
|     def close(self, timeout: Optional[float] = None): | ||||
|         # Called by HttpProtocol at the end of connection_task | ||||
|         # If we've upgraded to websocket, we do our own closing | ||||
|         if self.websocket is not None: | ||||
|             # Note, we don't want to use websocket.close() | ||||
|             # That is used for user's application code to send a | ||||
|             # websocket close packet. This is different. | ||||
|             self.websocket.end_connection(1001) | ||||
|         else: | ||||
|             super().close() | ||||
|  | ||||
|     def close_if_idle(self): | ||||
|         # Called by Sanic Server when shutting down | ||||
|         # If we've upgraded to websocket, shut it down | ||||
|         if self.websocket is not None: | ||||
|             if self.websocket.connection.state in (CLOSING, CLOSED): | ||||
|                 return True | ||||
|             elif self.websocket.loop is not None: | ||||
|                 self.websocket.loop.create_task(self.websocket.close(1001)) | ||||
|             else: | ||||
|                 self.websocket.end_connection(1001) | ||||
|         else: | ||||
|             return super().close_if_idle() | ||||
|  | ||||
|     async def websocket_handshake( | ||||
|         self, request, subprotocols=Optional[Sequence[str]] | ||||
|     ): | ||||
|         # let the websockets package do the handshake with the client | ||||
|         try: | ||||
|             if subprotocols is not None: | ||||
|                 # subprotocols can be a set or frozenset, | ||||
|                 # but ServerConnection needs a list | ||||
|                 subprotocols = list(subprotocols) | ||||
|             ws_conn = ServerConnection( | ||||
|                 max_size=self.websocket_max_size, | ||||
|                 subprotocols=subprotocols, | ||||
|                 state=OPEN, | ||||
|                 logger=error_logger, | ||||
|             ) | ||||
|             resp: "http11.Response" = ws_conn.accept(request) | ||||
|         except Exception: | ||||
|             msg = ( | ||||
|                 "Failed to open a WebSocket connection.\n" | ||||
|                 "See server log for more information.\n" | ||||
|             ) | ||||
|             raise ServerError(msg, status_code=500) | ||||
|         if 100 <= resp.status_code <= 299: | ||||
|             rbody = "".join( | ||||
|                 [ | ||||
|                     "HTTP/1.1 ", | ||||
|                     str(resp.status_code), | ||||
|                     " ", | ||||
|                     resp.reason_phrase, | ||||
|                     "\r\n", | ||||
|                 ] | ||||
|             ) | ||||
|             rbody += "".join(f"{k}: {v}\r\n" for k, v in resp.headers.items()) | ||||
|             if resp.body is not None: | ||||
|                 rbody += f"\r\n{resp.body}\r\n\r\n" | ||||
|             else: | ||||
|                 rbody += "\r\n" | ||||
|             await super().send(rbody.encode()) | ||||
|         else: | ||||
|             raise ServerError(resp.body, resp.status_code) | ||||
|         self.websocket = WebsocketImplProtocol( | ||||
|             ws_conn, | ||||
|             ping_interval=self.websocket_ping_interval, | ||||
|             ping_timeout=self.websocket_ping_timeout, | ||||
|             close_timeout=self.websocket_timeout, | ||||
|         ) | ||||
|         loop = ( | ||||
|             request.transport.loop | ||||
|             if hasattr(request, "transport") | ||||
|             and hasattr(request.transport, "loop") | ||||
|             else None | ||||
|         ) | ||||
|         await self.websocket.connection_made(self, loop=loop) | ||||
|         return self.websocket | ||||
							
								
								
									
										280
									
								
								sanic/server/runners.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										280
									
								
								sanic/server/runners.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,280 @@ | ||||
| from __future__ import annotations | ||||
|  | ||||
| from ssl import SSLContext | ||||
| from typing import TYPE_CHECKING, Dict, Optional, Type, Union | ||||
|  | ||||
| from sanic.config import Config | ||||
| from sanic.server.events import trigger_events | ||||
|  | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from sanic.app import Sanic | ||||
|  | ||||
| import asyncio | ||||
| import multiprocessing | ||||
| import os | ||||
| import socket | ||||
|  | ||||
| from functools import partial | ||||
| from signal import SIG_IGN, SIGINT, SIGTERM, Signals | ||||
| from signal import signal as signal_func | ||||
|  | ||||
| from sanic.compat import OS_IS_WINDOWS, ctrlc_workaround_for_windows | ||||
| from sanic.log import error_logger, logger | ||||
| from sanic.models.server_types import Signal | ||||
| from sanic.server.async_server import AsyncioServer | ||||
| from sanic.server.protocols.http_protocol import HttpProtocol | ||||
| from sanic.server.socket import ( | ||||
|     bind_socket, | ||||
|     bind_unix_socket, | ||||
|     remove_unix_socket, | ||||
| ) | ||||
|  | ||||
|  | ||||
| def serve( | ||||
|     host, | ||||
|     port, | ||||
|     app: Sanic, | ||||
|     ssl: Optional[SSLContext] = None, | ||||
|     sock: Optional[socket.socket] = None, | ||||
|     unix: Optional[str] = None, | ||||
|     reuse_port: bool = False, | ||||
|     loop=None, | ||||
|     protocol: Type[asyncio.Protocol] = HttpProtocol, | ||||
|     backlog: int = 100, | ||||
|     register_sys_signals: bool = True, | ||||
|     run_multiple: bool = False, | ||||
|     run_async: bool = False, | ||||
|     connections=None, | ||||
|     signal=Signal(), | ||||
|     state=None, | ||||
|     asyncio_server_kwargs=None, | ||||
| ): | ||||
|     """Start asynchronous HTTP Server on an individual process. | ||||
|  | ||||
|     :param host: Address to host on | ||||
|     :param port: Port to host on | ||||
|     :param before_start: function to be executed before the server starts | ||||
|                          listening. Takes arguments `app` instance and `loop` | ||||
|     :param after_start: function to be executed after the server starts | ||||
|                         listening. Takes  arguments `app` instance and `loop` | ||||
|     :param before_stop: function to be executed when a stop signal is | ||||
|                         received before it is respected. Takes arguments | ||||
|                         `app` instance and `loop` | ||||
|     :param after_stop: function to be executed when a stop signal is | ||||
|                        received after it is respected. Takes arguments | ||||
|                        `app` instance and `loop` | ||||
|     :param ssl: SSLContext | ||||
|     :param sock: Socket for the server to accept connections from | ||||
|     :param unix: Unix socket to listen on instead of TCP port | ||||
|     :param reuse_port: `True` for multiple workers | ||||
|     :param loop: asyncio compatible event loop | ||||
|     :param run_async: bool: Do not create a new event loop for the server, | ||||
|                       and return an AsyncServer object rather than running it | ||||
|     :param asyncio_server_kwargs: key-value args for asyncio/uvloop | ||||
|                                   create_server method | ||||
|     :return: Nothing | ||||
|     """ | ||||
|     if not run_async and not loop: | ||||
|         # create new event_loop after fork | ||||
|         loop = asyncio.new_event_loop() | ||||
|         asyncio.set_event_loop(loop) | ||||
|  | ||||
|     if app.debug: | ||||
|         loop.set_debug(app.debug) | ||||
|  | ||||
|     app.asgi = False | ||||
|  | ||||
|     connections = connections if connections is not None else set() | ||||
|     protocol_kwargs = _build_protocol_kwargs(protocol, app.config) | ||||
|     server = partial( | ||||
|         protocol, | ||||
|         loop=loop, | ||||
|         connections=connections, | ||||
|         signal=signal, | ||||
|         app=app, | ||||
|         state=state, | ||||
|         unix=unix, | ||||
|         **protocol_kwargs, | ||||
|     ) | ||||
|     asyncio_server_kwargs = ( | ||||
|         asyncio_server_kwargs if asyncio_server_kwargs else {} | ||||
|     ) | ||||
|     # UNIX sockets are always bound by us (to preserve semantics between modes) | ||||
|     if unix: | ||||
|         sock = bind_unix_socket(unix, backlog=backlog) | ||||
|     server_coroutine = loop.create_server( | ||||
|         server, | ||||
|         None if sock else host, | ||||
|         None if sock else port, | ||||
|         ssl=ssl, | ||||
|         reuse_port=reuse_port, | ||||
|         sock=sock, | ||||
|         backlog=backlog, | ||||
|         **asyncio_server_kwargs, | ||||
|     ) | ||||
|  | ||||
|     if run_async: | ||||
|         return AsyncioServer( | ||||
|             app=app, | ||||
|             loop=loop, | ||||
|             serve_coro=server_coroutine, | ||||
|             connections=connections, | ||||
|         ) | ||||
|  | ||||
|     loop.run_until_complete(app._startup()) | ||||
|     loop.run_until_complete(app._server_event("init", "before")) | ||||
|  | ||||
|     try: | ||||
|         http_server = loop.run_until_complete(server_coroutine) | ||||
|     except BaseException: | ||||
|         error_logger.exception("Unable to start server") | ||||
|         return | ||||
|  | ||||
|     # Ignore SIGINT when run_multiple | ||||
|     if run_multiple: | ||||
|         signal_func(SIGINT, SIG_IGN) | ||||
|  | ||||
|     # Register signals for graceful termination | ||||
|     if register_sys_signals: | ||||
|         if OS_IS_WINDOWS: | ||||
|             ctrlc_workaround_for_windows(app) | ||||
|         else: | ||||
|             for _signal in [SIGTERM] if run_multiple else [SIGINT, SIGTERM]: | ||||
|                 loop.add_signal_handler(_signal, app.stop) | ||||
|  | ||||
|     loop.run_until_complete(app._server_event("init", "after")) | ||||
|     pid = os.getpid() | ||||
|     try: | ||||
|         logger.info("Starting worker [%s]", pid) | ||||
|         loop.run_forever() | ||||
|     finally: | ||||
|         logger.info("Stopping worker [%s]", pid) | ||||
|  | ||||
|         # Run the on_stop function if provided | ||||
|         loop.run_until_complete(app._server_event("shutdown", "before")) | ||||
|  | ||||
|         # Wait for event loop to finish and all connections to drain | ||||
|         http_server.close() | ||||
|         loop.run_until_complete(http_server.wait_closed()) | ||||
|  | ||||
|         # Complete all tasks on the loop | ||||
|         signal.stopped = True | ||||
|         for connection in connections: | ||||
|             connection.close_if_idle() | ||||
|  | ||||
|         # Gracefully shutdown timeout. | ||||
|         # We should provide graceful_shutdown_timeout, | ||||
|         # instead of letting connection hangs forever. | ||||
|         # Let's roughly calcucate time. | ||||
|         graceful = app.config.GRACEFUL_SHUTDOWN_TIMEOUT | ||||
|         start_shutdown: float = 0 | ||||
|         while connections and (start_shutdown < graceful): | ||||
|             loop.run_until_complete(asyncio.sleep(0.1)) | ||||
|             start_shutdown = start_shutdown + 0.1 | ||||
|  | ||||
|         # Force close non-idle connection after waiting for | ||||
|         # graceful_shutdown_timeout | ||||
|         for conn in connections: | ||||
|             if hasattr(conn, "websocket") and conn.websocket: | ||||
|                 conn.websocket.fail_connection(code=1001) | ||||
|             else: | ||||
|                 conn.abort() | ||||
|         loop.run_until_complete(app._server_event("shutdown", "after")) | ||||
|  | ||||
|         remove_unix_socket(unix) | ||||
|  | ||||
|  | ||||
| def serve_single(server_settings): | ||||
|     main_start = server_settings.pop("main_start", None) | ||||
|     main_stop = server_settings.pop("main_stop", None) | ||||
|  | ||||
|     if not server_settings.get("run_async"): | ||||
|         # create new event_loop after fork | ||||
|         loop = asyncio.new_event_loop() | ||||
|         asyncio.set_event_loop(loop) | ||||
|         server_settings["loop"] = loop | ||||
|  | ||||
|     trigger_events(main_start, server_settings["loop"]) | ||||
|     serve(**server_settings) | ||||
|     trigger_events(main_stop, server_settings["loop"]) | ||||
|  | ||||
|     server_settings["loop"].close() | ||||
|  | ||||
|  | ||||
| def serve_multiple(server_settings, workers): | ||||
|     """Start multiple server processes simultaneously.  Stop on interrupt | ||||
|     and terminate signals, and drain connections when complete. | ||||
|  | ||||
|     :param server_settings: kw arguments to be passed to the serve function | ||||
|     :param workers: number of workers to launch | ||||
|     :param stop_event: if provided, is used as a stop signal | ||||
|     :return: | ||||
|     """ | ||||
|     server_settings["reuse_port"] = True | ||||
|     server_settings["run_multiple"] = True | ||||
|  | ||||
|     main_start = server_settings.pop("main_start", None) | ||||
|     main_stop = server_settings.pop("main_stop", None) | ||||
|     loop = asyncio.new_event_loop() | ||||
|     asyncio.set_event_loop(loop) | ||||
|  | ||||
|     trigger_events(main_start, loop) | ||||
|  | ||||
|     # Create a listening socket or use the one in settings | ||||
|     sock = server_settings.get("sock") | ||||
|     unix = server_settings["unix"] | ||||
|     backlog = server_settings["backlog"] | ||||
|     if unix: | ||||
|         sock = bind_unix_socket(unix, backlog=backlog) | ||||
|         server_settings["unix"] = unix | ||||
|     if sock is None: | ||||
|         sock = bind_socket( | ||||
|             server_settings["host"], server_settings["port"], backlog=backlog | ||||
|         ) | ||||
|         sock.set_inheritable(True) | ||||
|         server_settings["sock"] = sock | ||||
|         server_settings["host"] = None | ||||
|         server_settings["port"] = None | ||||
|  | ||||
|     processes = [] | ||||
|  | ||||
|     def sig_handler(signal, frame): | ||||
|         logger.info("Received signal %s. Shutting down.", Signals(signal).name) | ||||
|         for process in processes: | ||||
|             os.kill(process.pid, SIGTERM) | ||||
|  | ||||
|     signal_func(SIGINT, lambda s, f: sig_handler(s, f)) | ||||
|     signal_func(SIGTERM, lambda s, f: sig_handler(s, f)) | ||||
|     mp = multiprocessing.get_context("fork") | ||||
|  | ||||
|     for _ in range(workers): | ||||
|         process = mp.Process(target=serve, kwargs=server_settings) | ||||
|         process.daemon = True | ||||
|         process.start() | ||||
|         processes.append(process) | ||||
|  | ||||
|     for process in processes: | ||||
|         process.join() | ||||
|  | ||||
|     # the above processes will block this until they're stopped | ||||
|     for process in processes: | ||||
|         process.terminate() | ||||
|  | ||||
|     trigger_events(main_stop, loop) | ||||
|  | ||||
|     sock.close() | ||||
|     loop.close() | ||||
|     remove_unix_socket(unix) | ||||
|  | ||||
|  | ||||
| def _build_protocol_kwargs( | ||||
|     protocol: Type[asyncio.Protocol], config: Config | ||||
| ) -> Dict[str, Union[int, float]]: | ||||
|     if hasattr(protocol, "websocket_handshake"): | ||||
|         return { | ||||
|             "websocket_max_size": config.WEBSOCKET_MAX_SIZE, | ||||
|             "websocket_ping_timeout": config.WEBSOCKET_PING_TIMEOUT, | ||||
|             "websocket_ping_interval": config.WEBSOCKET_PING_INTERVAL, | ||||
|         } | ||||
|     return {} | ||||
							
								
								
									
										87
									
								
								sanic/server/socket.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										87
									
								
								sanic/server/socket.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,87 @@ | ||||
| from __future__ import annotations | ||||
|  | ||||
| import os | ||||
| import secrets | ||||
| import socket | ||||
| import stat | ||||
|  | ||||
| from ipaddress import ip_address | ||||
| from typing import Optional | ||||
|  | ||||
|  | ||||
| def bind_socket(host: str, port: int, *, backlog=100) -> socket.socket: | ||||
|     """Create TCP server socket. | ||||
|     :param host: IPv4, IPv6 or hostname may be specified | ||||
|     :param port: TCP port number | ||||
|     :param backlog: Maximum number of connections to queue | ||||
|     :return: socket.socket object | ||||
|     """ | ||||
|     try:  # IP address: family must be specified for IPv6 at least | ||||
|         ip = ip_address(host) | ||||
|         host = str(ip) | ||||
|         sock = socket.socket( | ||||
|             socket.AF_INET6 if ip.version == 6 else socket.AF_INET | ||||
|         ) | ||||
|     except ValueError:  # Hostname, may become AF_INET or AF_INET6 | ||||
|         sock = socket.socket() | ||||
|     sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) | ||||
|     sock.bind((host, port)) | ||||
|     sock.listen(backlog) | ||||
|     return sock | ||||
|  | ||||
|  | ||||
| def bind_unix_socket(path: str, *, mode=0o666, backlog=100) -> socket.socket: | ||||
|     """Create unix socket. | ||||
|     :param path: filesystem path | ||||
|     :param backlog: Maximum number of connections to queue | ||||
|     :return: socket.socket object | ||||
|     """ | ||||
|     """Open or atomically replace existing socket with zero downtime.""" | ||||
|     # Sanitise and pre-verify socket path | ||||
|     path = os.path.abspath(path) | ||||
|     folder = os.path.dirname(path) | ||||
|     if not os.path.isdir(folder): | ||||
|         raise FileNotFoundError(f"Socket folder does not exist: {folder}") | ||||
|     try: | ||||
|         if not stat.S_ISSOCK(os.stat(path, follow_symlinks=False).st_mode): | ||||
|             raise FileExistsError(f"Existing file is not a socket: {path}") | ||||
|     except FileNotFoundError: | ||||
|         pass | ||||
|     # Create new socket with a random temporary name | ||||
|     tmp_path = f"{path}.{secrets.token_urlsafe()}" | ||||
|     sock = socket.socket(socket.AF_UNIX) | ||||
|     try: | ||||
|         # Critical section begins (filename races) | ||||
|         sock.bind(tmp_path) | ||||
|         try: | ||||
|             os.chmod(tmp_path, mode) | ||||
|             # Start listening before rename to avoid connection failures | ||||
|             sock.listen(backlog) | ||||
|             os.rename(tmp_path, path) | ||||
|         except:  # noqa: E722 | ||||
|             try: | ||||
|                 os.unlink(tmp_path) | ||||
|             finally: | ||||
|                 raise | ||||
|     except:  # noqa: E722 | ||||
|         try: | ||||
|             sock.close() | ||||
|         finally: | ||||
|             raise | ||||
|     return sock | ||||
|  | ||||
|  | ||||
| def remove_unix_socket(path: Optional[str]) -> None: | ||||
|     """Remove dead unix socket during server exit.""" | ||||
|     if not path: | ||||
|         return | ||||
|     try: | ||||
|         if stat.S_ISSOCK(os.stat(path, follow_symlinks=False).st_mode): | ||||
|             # Is it actually dead (doesn't belong to a new server instance)? | ||||
|             with socket.socket(socket.AF_UNIX) as testsock: | ||||
|                 try: | ||||
|                     testsock.connect(path) | ||||
|                 except ConnectionRefusedError: | ||||
|                     os.unlink(path) | ||||
|     except FileNotFoundError: | ||||
|         pass | ||||
							
								
								
									
										0
									
								
								sanic/server/websockets/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								sanic/server/websockets/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
								
								
									
										82
									
								
								sanic/server/websockets/connection.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										82
									
								
								sanic/server/websockets/connection.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,82 @@ | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Awaitable, | ||||
|     Callable, | ||||
|     Dict, | ||||
|     List, | ||||
|     MutableMapping, | ||||
|     Optional, | ||||
|     Union, | ||||
| ) | ||||
|  | ||||
|  | ||||
| ASIMessage = MutableMapping[str, Any] | ||||
|  | ||||
|  | ||||
| class WebSocketConnection: | ||||
|     """ | ||||
|     This is for ASGI Connections. | ||||
|     It provides an interface similar to WebsocketProtocol, but | ||||
|     sends/receives over an ASGI connection. | ||||
|     """ | ||||
|  | ||||
|     # TODO | ||||
|     # - Implement ping/pong | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         send: Callable[[ASIMessage], Awaitable[None]], | ||||
|         receive: Callable[[], Awaitable[ASIMessage]], | ||||
|         subprotocols: Optional[List[str]] = None, | ||||
|     ) -> None: | ||||
|         self._send = send | ||||
|         self._receive = receive | ||||
|         self._subprotocols = subprotocols or [] | ||||
|  | ||||
|     async def send(self, data: Union[str, bytes], *args, **kwargs) -> None: | ||||
|         message: Dict[str, Union[str, bytes]] = {"type": "websocket.send"} | ||||
|  | ||||
|         if isinstance(data, bytes): | ||||
|             message.update({"bytes": data}) | ||||
|         else: | ||||
|             message.update({"text": str(data)}) | ||||
|  | ||||
|         await self._send(message) | ||||
|  | ||||
|     async def recv(self, *args, **kwargs) -> Optional[str]: | ||||
|         message = await self._receive() | ||||
|  | ||||
|         if message["type"] == "websocket.receive": | ||||
|             return message["text"] | ||||
|         elif message["type"] == "websocket.disconnect": | ||||
|             pass | ||||
|  | ||||
|         return None | ||||
|  | ||||
|     receive = recv | ||||
|  | ||||
|     async def accept(self, subprotocols: Optional[List[str]] = None) -> None: | ||||
|         subprotocol = None | ||||
|         if subprotocols: | ||||
|             for subp in subprotocols: | ||||
|                 if subp in self.subprotocols: | ||||
|                     subprotocol = subp | ||||
|                     break | ||||
|  | ||||
|         await self._send( | ||||
|             { | ||||
|                 "type": "websocket.accept", | ||||
|                 "subprotocol": subprotocol, | ||||
|             } | ||||
|         ) | ||||
|  | ||||
|     async def close(self, code: int = 1000, reason: str = "") -> None: | ||||
|         pass | ||||
|  | ||||
|     @property | ||||
|     def subprotocols(self): | ||||
|         return self._subprotocols | ||||
|  | ||||
|     @subprotocols.setter | ||||
|     def subprotocols(self, subprotocols: Optional[List[str]] = None): | ||||
|         self._subprotocols = subprotocols or [] | ||||
							
								
								
									
										294
									
								
								sanic/server/websockets/frame.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										294
									
								
								sanic/server/websockets/frame.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,294 @@ | ||||
| import asyncio | ||||
| import codecs | ||||
|  | ||||
| from typing import TYPE_CHECKING, AsyncIterator, List, Optional | ||||
|  | ||||
| from websockets.frames import Frame, Opcode | ||||
| from websockets.typing import Data | ||||
|  | ||||
| from sanic.exceptions import ServerError | ||||
|  | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from .impl import WebsocketImplProtocol | ||||
|  | ||||
| UTF8Decoder = codecs.getincrementaldecoder("utf-8") | ||||
|  | ||||
|  | ||||
| class WebsocketFrameAssembler: | ||||
|     """ | ||||
|     Assemble a message from frames. | ||||
|     Code borrowed from aaugustin/websockets project: | ||||
|     https://github.com/aaugustin/websockets/blob/6eb98dd8fa5b2c896b9f6be7e8d117708da82a39/src/websockets/sync/messages.py | ||||
|     """ | ||||
|  | ||||
|     __slots__ = ( | ||||
|         "protocol", | ||||
|         "read_mutex", | ||||
|         "write_mutex", | ||||
|         "message_complete", | ||||
|         "message_fetched", | ||||
|         "get_in_progress", | ||||
|         "decoder", | ||||
|         "completed_queue", | ||||
|         "chunks", | ||||
|         "chunks_queue", | ||||
|         "paused", | ||||
|         "get_id", | ||||
|         "put_id", | ||||
|     ) | ||||
|     if TYPE_CHECKING: | ||||
|         protocol: "WebsocketImplProtocol" | ||||
|         read_mutex: asyncio.Lock | ||||
|         write_mutex: asyncio.Lock | ||||
|         message_complete: asyncio.Event | ||||
|         message_fetched: asyncio.Event | ||||
|         completed_queue: asyncio.Queue | ||||
|         get_in_progress: bool | ||||
|         decoder: Optional[codecs.IncrementalDecoder] | ||||
|         # For streaming chunks rather than messages: | ||||
|         chunks: List[Data] | ||||
|         chunks_queue: Optional[asyncio.Queue[Optional[Data]]] | ||||
|         paused: bool | ||||
|  | ||||
|     def __init__(self, protocol) -> None: | ||||
|  | ||||
|         self.protocol = protocol | ||||
|  | ||||
|         self.read_mutex = asyncio.Lock() | ||||
|         self.write_mutex = asyncio.Lock() | ||||
|  | ||||
|         self.completed_queue = asyncio.Queue( | ||||
|             maxsize=1 | ||||
|         )  # type: asyncio.Queue[Data] | ||||
|  | ||||
|         # put() sets this event to tell get() that a message can be fetched. | ||||
|         self.message_complete = asyncio.Event() | ||||
|         # get() sets this event to let put() | ||||
|         self.message_fetched = asyncio.Event() | ||||
|  | ||||
|         # This flag prevents concurrent calls to get() by user code. | ||||
|         self.get_in_progress = False | ||||
|  | ||||
|         # Decoder for text frames, None for binary frames. | ||||
|         self.decoder = None | ||||
|  | ||||
|         # Buffer data from frames belonging to the same message. | ||||
|         self.chunks = [] | ||||
|  | ||||
|         # When switching from "buffering" to "streaming", we use a thread-safe | ||||
|         # queue for transferring frames from the writing thread (library code) | ||||
|         # to the reading thread (user code). We're buffering when chunks_queue | ||||
|         # is None and streaming when it's a Queue. None is a sentinel | ||||
|         # value marking the end of the stream, superseding message_complete. | ||||
|  | ||||
|         # Stream data from frames belonging to the same message. | ||||
|         self.chunks_queue = None | ||||
|  | ||||
|         # Flag to indicate we've paused the protocol | ||||
|         self.paused = False | ||||
|  | ||||
|     async def get(self, timeout: Optional[float] = None) -> Optional[Data]: | ||||
|         """ | ||||
|         Read the next message. | ||||
|         :meth:`get` returns a single :class:`str` or :class:`bytes`. | ||||
|         If the :message was fragmented, :meth:`get` waits until the last frame | ||||
|         is received, then it reassembles the message. | ||||
|         If ``timeout`` is set and elapses before a complete message is | ||||
|         received, :meth:`get` returns ``None``. | ||||
|         """ | ||||
|         async with self.read_mutex: | ||||
|             if timeout is not None and timeout <= 0: | ||||
|                 if not self.message_complete.is_set(): | ||||
|                     return None | ||||
|             if self.get_in_progress: | ||||
|                 # This should be guarded against with the read_mutex, | ||||
|                 # exception is only here as a failsafe | ||||
|                 raise ServerError( | ||||
|                     "Called get() on Websocket frame assembler " | ||||
|                     "while asynchronous get is already in progress." | ||||
|                 ) | ||||
|             self.get_in_progress = True | ||||
|  | ||||
|             # If the message_complete event isn't set yet, release the lock to | ||||
|             # allow put() to run and eventually set it. | ||||
|             # Locking with get_in_progress ensures only one task can get here. | ||||
|             if timeout is None: | ||||
|                 completed = await self.message_complete.wait() | ||||
|             elif timeout <= 0: | ||||
|                 completed = self.message_complete.is_set() | ||||
|             else: | ||||
|                 try: | ||||
|                     await asyncio.wait_for( | ||||
|                         self.message_complete.wait(), timeout=timeout | ||||
|                     ) | ||||
|                 except asyncio.TimeoutError: | ||||
|                     ... | ||||
|                 finally: | ||||
|                     completed = self.message_complete.is_set() | ||||
|  | ||||
|             # Unpause the transport, if its paused | ||||
|             if self.paused: | ||||
|                 self.protocol.resume_frames() | ||||
|                 self.paused = False | ||||
|             if not self.get_in_progress: | ||||
|                 # This should be guarded against with the read_mutex, | ||||
|                 # exception is here as a failsafe | ||||
|                 raise ServerError( | ||||
|                     "State of Websocket frame assembler was modified while an " | ||||
|                     "asynchronous get was in progress." | ||||
|                 ) | ||||
|             self.get_in_progress = False | ||||
|  | ||||
|             # Waiting for a complete message timed out. | ||||
|             if not completed: | ||||
|                 return None | ||||
|             if not self.message_complete.is_set(): | ||||
|                 return None | ||||
|  | ||||
|             self.message_complete.clear() | ||||
|  | ||||
|             joiner: Data = b"" if self.decoder is None else "" | ||||
|             # mypy cannot figure out that chunks have the proper type. | ||||
|             message: Data = joiner.join(self.chunks)  # type: ignore | ||||
|             if self.message_fetched.is_set(): | ||||
|                 # This should be guarded against with the read_mutex, | ||||
|                 # and get_in_progress check, this exception is here | ||||
|                 # as a failsafe | ||||
|                 raise ServerError( | ||||
|                     "Websocket get() found a message when " | ||||
|                     "state was already fetched." | ||||
|                 ) | ||||
|             self.message_fetched.set() | ||||
|             self.chunks = [] | ||||
|             # this should already be None, but set it here for safety | ||||
|             self.chunks_queue = None | ||||
|             return message | ||||
|  | ||||
|     async def get_iter(self) -> AsyncIterator[Data]: | ||||
|         """ | ||||
|         Stream the next message. | ||||
|         Iterating the return value of :meth:`get_iter` yields a :class:`str` | ||||
|         or :class:`bytes` for each frame in the message. | ||||
|         """ | ||||
|         async with self.read_mutex: | ||||
|             if self.get_in_progress: | ||||
|                 # This should be guarded against with the read_mutex, | ||||
|                 # exception is only here as a failsafe | ||||
|                 raise ServerError( | ||||
|                     "Called get_iter on Websocket frame assembler " | ||||
|                     "while asynchronous get is already in progress." | ||||
|                 ) | ||||
|             self.get_in_progress = True | ||||
|  | ||||
|             chunks = self.chunks | ||||
|             self.chunks = [] | ||||
|             self.chunks_queue = asyncio.Queue() | ||||
|  | ||||
|             # Sending None in chunk_queue supersedes setting message_complete | ||||
|             # when switching to "streaming". If message is already complete | ||||
|             # when the switch happens, put() didn't send None, so we have to. | ||||
|             if self.message_complete.is_set(): | ||||
|                 await self.chunks_queue.put(None) | ||||
|  | ||||
|             # Locking with get_in_progress ensures only one task can get here | ||||
|             for c in chunks: | ||||
|                 yield c | ||||
|             while True: | ||||
|                 chunk = await self.chunks_queue.get() | ||||
|                 if chunk is None: | ||||
|                     break | ||||
|                 yield chunk | ||||
|  | ||||
|             # Unpause the transport, if its paused | ||||
|             if self.paused: | ||||
|                 self.protocol.resume_frames() | ||||
|                 self.paused = False | ||||
|             if not self.get_in_progress: | ||||
|                 # This should be guarded against with the read_mutex, | ||||
|                 # exception is here as a failsafe | ||||
|                 raise ServerError( | ||||
|                     "State of Websocket frame assembler was modified while an " | ||||
|                     "asynchronous get was in progress." | ||||
|                 ) | ||||
|             self.get_in_progress = False | ||||
|             if not self.message_complete.is_set(): | ||||
|                 # This should be guarded against with the read_mutex, | ||||
|                 # exception is here as a failsafe | ||||
|                 raise ServerError( | ||||
|                     "Websocket frame assembler chunks queue ended before " | ||||
|                     "message was complete." | ||||
|                 ) | ||||
|             self.message_complete.clear() | ||||
|             if self.message_fetched.is_set(): | ||||
|                 # This should be guarded against with the read_mutex, | ||||
|                 # and get_in_progress check, this exception is | ||||
|                 # here as a failsafe | ||||
|                 raise ServerError( | ||||
|                     "Websocket get_iter() found a message when state was " | ||||
|                     "already fetched." | ||||
|                 ) | ||||
|  | ||||
|             self.message_fetched.set() | ||||
|             # this should already be empty, but set it here for safety | ||||
|             self.chunks = [] | ||||
|             self.chunks_queue = None | ||||
|  | ||||
|     async def put(self, frame: Frame) -> None: | ||||
|         """ | ||||
|         Add ``frame`` to the next message. | ||||
|         When ``frame`` is the final frame in a message, :meth:`put` waits | ||||
|         until the message is fetched, either by calling :meth:`get` or by | ||||
|         iterating the return value of :meth:`get_iter`. | ||||
|         :meth:`put` assumes that the stream of frames respects the protocol. | ||||
|         If it doesn't, the behavior is undefined. | ||||
|         """ | ||||
|  | ||||
|         async with self.write_mutex: | ||||
|             if frame.opcode is Opcode.TEXT: | ||||
|                 self.decoder = UTF8Decoder(errors="strict") | ||||
|             elif frame.opcode is Opcode.BINARY: | ||||
|                 self.decoder = None | ||||
|             elif frame.opcode is Opcode.CONT: | ||||
|                 pass | ||||
|             else: | ||||
|                 # Ignore control frames. | ||||
|                 return | ||||
|             data: Data | ||||
|             if self.decoder is not None: | ||||
|                 data = self.decoder.decode(frame.data, frame.fin) | ||||
|             else: | ||||
|                 data = frame.data | ||||
|             if self.chunks_queue is None: | ||||
|                 self.chunks.append(data) | ||||
|             else: | ||||
|                 await self.chunks_queue.put(data) | ||||
|  | ||||
|             if not frame.fin: | ||||
|                 return | ||||
|             if not self.get_in_progress: | ||||
|                 # nobody is waiting for this frame, so try to pause subsequent | ||||
|                 # frames at the protocol level | ||||
|                 self.paused = self.protocol.pause_frames() | ||||
|             # Message is complete. Wait until it's fetched to return. | ||||
|  | ||||
|             if self.chunks_queue is not None: | ||||
|                 await self.chunks_queue.put(None) | ||||
|             if self.message_complete.is_set(): | ||||
|                 # This should be guarded against with the write_mutex | ||||
|                 raise ServerError( | ||||
|                     "Websocket put() got a new message when a message was " | ||||
|                     "already in its chamber." | ||||
|                 ) | ||||
|             self.message_complete.set()  # Signal to get() it can serve the | ||||
|             if self.message_fetched.is_set(): | ||||
|                 # This should be guarded against with the write_mutex | ||||
|                 raise ServerError( | ||||
|                     "Websocket put() got a new message when the previous " | ||||
|                     "message was not yet fetched." | ||||
|                 ) | ||||
|  | ||||
|             # Allow get() to run and eventually set the event. | ||||
|             await self.message_fetched.wait() | ||||
|             self.message_fetched.clear() | ||||
|             self.decoder = None | ||||
							
								
								
									
										834
									
								
								sanic/server/websockets/impl.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										834
									
								
								sanic/server/websockets/impl.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,834 @@ | ||||
| import asyncio | ||||
| import random | ||||
| import struct | ||||
|  | ||||
| from typing import ( | ||||
|     AsyncIterator, | ||||
|     Dict, | ||||
|     Iterable, | ||||
|     Mapping, | ||||
|     Optional, | ||||
|     Sequence, | ||||
|     Union, | ||||
| ) | ||||
|  | ||||
| from websockets.connection import CLOSED, CLOSING, OPEN, Event | ||||
| from websockets.exceptions import ConnectionClosed, ConnectionClosedError | ||||
| from websockets.frames import Frame, Opcode | ||||
| from websockets.server import ServerConnection | ||||
| from websockets.typing import Data | ||||
|  | ||||
| from sanic.log import error_logger, logger | ||||
| from sanic.server.protocols.base_protocol import SanicProtocol | ||||
|  | ||||
| from ...exceptions import ServerError, WebsocketClosed | ||||
| from .frame import WebsocketFrameAssembler | ||||
|  | ||||
|  | ||||
| class WebsocketImplProtocol: | ||||
|     connection: ServerConnection | ||||
|     io_proto: Optional[SanicProtocol] | ||||
|     loop: Optional[asyncio.AbstractEventLoop] | ||||
|     max_queue: int | ||||
|     close_timeout: float | ||||
|     ping_interval: Optional[float] | ||||
|     ping_timeout: Optional[float] | ||||
|     assembler: WebsocketFrameAssembler | ||||
|     # Dict[bytes, asyncio.Future[None]] | ||||
|     pings: Dict[bytes, asyncio.Future] | ||||
|     conn_mutex: asyncio.Lock | ||||
|     recv_lock: asyncio.Lock | ||||
|     recv_cancel: Optional[asyncio.Future] | ||||
|     process_event_mutex: asyncio.Lock | ||||
|     can_pause: bool | ||||
|     # Optional[asyncio.Future[None]] | ||||
|     data_finished_fut: Optional[asyncio.Future] | ||||
|     # Optional[asyncio.Future[None]] | ||||
|     pause_frame_fut: Optional[asyncio.Future] | ||||
|     # Optional[asyncio.Future[None]] | ||||
|     connection_lost_waiter: Optional[asyncio.Future] | ||||
|     keepalive_ping_task: Optional[asyncio.Task] | ||||
|     auto_closer_task: Optional[asyncio.Task] | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         connection, | ||||
|         max_queue=None, | ||||
|         ping_interval: Optional[float] = 20, | ||||
|         ping_timeout: Optional[float] = 20, | ||||
|         close_timeout: float = 10, | ||||
|         loop=None, | ||||
|     ): | ||||
|         self.connection = connection | ||||
|         self.io_proto = None | ||||
|         self.loop = None | ||||
|         self.max_queue = max_queue | ||||
|         self.close_timeout = close_timeout | ||||
|         self.ping_interval = ping_interval | ||||
|         self.ping_timeout = ping_timeout | ||||
|         self.assembler = WebsocketFrameAssembler(self) | ||||
|         self.pings = {} | ||||
|         self.conn_mutex = asyncio.Lock() | ||||
|         self.recv_lock = asyncio.Lock() | ||||
|         self.recv_cancel = None | ||||
|         self.process_event_mutex = asyncio.Lock() | ||||
|         self.data_finished_fut = None | ||||
|         self.can_pause = True | ||||
|         self.pause_frame_fut = None | ||||
|         self.keepalive_ping_task = None | ||||
|         self.auto_closer_task = None | ||||
|         self.connection_lost_waiter = None | ||||
|  | ||||
|     @property | ||||
|     def subprotocol(self): | ||||
|         return self.connection.subprotocol | ||||
|  | ||||
|     def pause_frames(self): | ||||
|         if not self.can_pause: | ||||
|             return False | ||||
|         if self.pause_frame_fut: | ||||
|             logger.debug("Websocket connection already paused.") | ||||
|             return False | ||||
|         if (not self.loop) or (not self.io_proto): | ||||
|             return False | ||||
|         if self.io_proto.transport: | ||||
|             self.io_proto.transport.pause_reading() | ||||
|         self.pause_frame_fut = self.loop.create_future() | ||||
|         logger.debug("Websocket connection paused.") | ||||
|         return True | ||||
|  | ||||
|     def resume_frames(self): | ||||
|         if not self.pause_frame_fut: | ||||
|             logger.debug("Websocket connection not paused.") | ||||
|             return False | ||||
|         if (not self.loop) or (not self.io_proto): | ||||
|             logger.debug( | ||||
|                 "Websocket attempting to resume reading frames, " | ||||
|                 "but connection is gone." | ||||
|             ) | ||||
|             return False | ||||
|         if self.io_proto.transport: | ||||
|             self.io_proto.transport.resume_reading() | ||||
|         self.pause_frame_fut.set_result(None) | ||||
|         self.pause_frame_fut = None | ||||
|         logger.debug("Websocket connection unpaused.") | ||||
|         return True | ||||
|  | ||||
|     async def connection_made( | ||||
|         self, | ||||
|         io_proto: SanicProtocol, | ||||
|         loop: Optional[asyncio.AbstractEventLoop] = None, | ||||
|     ): | ||||
|         if not loop: | ||||
|             try: | ||||
|                 loop = getattr(io_proto, "loop") | ||||
|             except AttributeError: | ||||
|                 loop = asyncio.get_event_loop() | ||||
|         if not loop: | ||||
|             # This catch is for mypy type checker | ||||
|             # to assert loop is not None here. | ||||
|             raise ServerError("Connection received with no asyncio loop.") | ||||
|         if self.auto_closer_task: | ||||
|             raise ServerError( | ||||
|                 "Cannot call connection_made more than once " | ||||
|                 "on a websocket connection." | ||||
|             ) | ||||
|         self.loop = loop | ||||
|         self.io_proto = io_proto | ||||
|         self.connection_lost_waiter = self.loop.create_future() | ||||
|         self.data_finished_fut = asyncio.shield(self.loop.create_future()) | ||||
|  | ||||
|         if self.ping_interval: | ||||
|             self.keepalive_ping_task = asyncio.create_task( | ||||
|                 self.keepalive_ping() | ||||
|             ) | ||||
|         self.auto_closer_task = asyncio.create_task( | ||||
|             self.auto_close_connection() | ||||
|         ) | ||||
|  | ||||
|     async def wait_for_connection_lost(self, timeout=None) -> bool: | ||||
|         """ | ||||
|         Wait until the TCP connection is closed or ``timeout`` elapses. | ||||
|         If timeout is None, wait forever. | ||||
|         Recommend you should pass in self.close_timeout as timeout | ||||
|  | ||||
|         Return ``True`` if the connection is closed and ``False`` otherwise. | ||||
|  | ||||
|         """ | ||||
|         if not self.connection_lost_waiter: | ||||
|             return False | ||||
|         if self.connection_lost_waiter.done(): | ||||
|             return True | ||||
|         else: | ||||
|             try: | ||||
|                 await asyncio.wait_for( | ||||
|                     asyncio.shield(self.connection_lost_waiter), timeout | ||||
|                 ) | ||||
|                 return True | ||||
|             except asyncio.TimeoutError: | ||||
|                 # Re-check self.connection_lost_waiter.done() synchronously | ||||
|                 # because connection_lost() could run between the moment the | ||||
|                 # timeout occurs and the moment this coroutine resumes running | ||||
|                 return self.connection_lost_waiter.done() | ||||
|  | ||||
|     async def process_events(self, events: Sequence[Event]) -> None: | ||||
|         """ | ||||
|         Process a list of incoming events. | ||||
|         """ | ||||
|         # Wrapped in a mutex lock, to prevent other incoming events | ||||
|         # from processing at the same time | ||||
|         async with self.process_event_mutex: | ||||
|             for event in events: | ||||
|                 if not isinstance(event, Frame): | ||||
|                     # Event is not a frame. Ignore it. | ||||
|                     continue | ||||
|                 if event.opcode == Opcode.PONG: | ||||
|                     await self.process_pong(event) | ||||
|                 elif event.opcode == Opcode.CLOSE: | ||||
|                     if self.recv_cancel: | ||||
|                         self.recv_cancel.cancel() | ||||
|                 else: | ||||
|                     await self.assembler.put(event) | ||||
|  | ||||
|     async def process_pong(self, frame: Frame) -> None: | ||||
|         if frame.data in self.pings: | ||||
|             # Acknowledge all pings up to the one matching this pong. | ||||
|             ping_ids = [] | ||||
|             for ping_id, ping in self.pings.items(): | ||||
|                 ping_ids.append(ping_id) | ||||
|                 if not ping.done(): | ||||
|                     ping.set_result(None) | ||||
|                 if ping_id == frame.data: | ||||
|                     break | ||||
|             else:  # noqa | ||||
|                 raise ServerError("ping_id is not in self.pings") | ||||
|             # Remove acknowledged pings from self.pings. | ||||
|             for ping_id in ping_ids: | ||||
|                 del self.pings[ping_id] | ||||
|  | ||||
|     async def keepalive_ping(self) -> None: | ||||
|         """ | ||||
|         Send a Ping frame and wait for a Pong frame at regular intervals. | ||||
|         This coroutine exits when the connection terminates and one of the | ||||
|         following happens: | ||||
|         - :meth:`ping` raises :exc:`ConnectionClosed`, or | ||||
|         - :meth:`auto_close_connection` cancels :attr:`keepalive_ping_task`. | ||||
|         """ | ||||
|         if self.ping_interval is None: | ||||
|             return | ||||
|  | ||||
|         try: | ||||
|             while True: | ||||
|                 await asyncio.sleep(self.ping_interval) | ||||
|  | ||||
|                 # ping() raises CancelledError if the connection is closed, | ||||
|                 # when auto_close_connection() cancels keepalive_ping_task. | ||||
|  | ||||
|                 # ping() raises ConnectionClosed if the connection is lost, | ||||
|                 # when connection_lost() calls abort_pings(). | ||||
|  | ||||
|                 ping_waiter = await self.ping() | ||||
|  | ||||
|                 if self.ping_timeout is not None: | ||||
|                     try: | ||||
|                         await asyncio.wait_for(ping_waiter, self.ping_timeout) | ||||
|                     except asyncio.TimeoutError: | ||||
|                         error_logger.warning( | ||||
|                             "Websocket timed out waiting for pong" | ||||
|                         ) | ||||
|                         self.fail_connection(1011) | ||||
|                         break | ||||
|         except asyncio.CancelledError: | ||||
|             # It is expected for this task to be cancelled during during | ||||
|             # normal operation, when the connection is closed. | ||||
|             logger.debug("Websocket keepalive ping task was cancelled.") | ||||
|         except (ConnectionClosed, WebsocketClosed): | ||||
|             logger.debug("Websocket closed. Keepalive ping task exiting.") | ||||
|         except Exception as e: | ||||
|             error_logger.warning( | ||||
|                 "Unexpected exception in websocket keepalive ping task." | ||||
|             ) | ||||
|             logger.debug(str(e)) | ||||
|  | ||||
|     def _force_disconnect(self) -> bool: | ||||
|         """ | ||||
|         Internal methdod used by end_connection and fail_connection | ||||
|         only when the graceful auto-closer cannot be used | ||||
|         """ | ||||
|         if self.auto_closer_task and not self.auto_closer_task.done(): | ||||
|             self.auto_closer_task.cancel() | ||||
|         if self.data_finished_fut and not self.data_finished_fut.done(): | ||||
|             self.data_finished_fut.cancel() | ||||
|             self.data_finished_fut = None | ||||
|         if self.keepalive_ping_task and not self.keepalive_ping_task.done(): | ||||
|             self.keepalive_ping_task.cancel() | ||||
|             self.keepalive_ping_task = None | ||||
|         if self.loop and self.io_proto and self.io_proto.transport: | ||||
|             self.io_proto.transport.close() | ||||
|             self.loop.call_later( | ||||
|                 self.close_timeout, self.io_proto.transport.abort | ||||
|             ) | ||||
|         # We were never open, or already closed | ||||
|         return True | ||||
|  | ||||
|     def fail_connection(self, code: int = 1006, reason: str = "") -> bool: | ||||
|         """ | ||||
|         Fail the WebSocket Connection | ||||
|         This requires: | ||||
|         1. Stopping all processing of incoming data, which means cancelling | ||||
|            pausing the underlying io protocol. The close code will be 1006 | ||||
|            unless a close frame was received earlier. | ||||
|         2. Sending a close frame with an appropriate code if the opening | ||||
|            handshake succeeded and the other side is likely to process it. | ||||
|         3. Closing the connection. :meth:`auto_close_connection` takes care | ||||
|            of this. | ||||
|         (The specification describes these steps in the opposite order.) | ||||
|         """ | ||||
|         if self.io_proto and self.io_proto.transport: | ||||
|             # Stop new data coming in | ||||
|             # In Python Version 3.7: pause_reading is idempotent | ||||
|             # ut can be called when the transport is already paused or closed | ||||
|             self.io_proto.transport.pause_reading() | ||||
|  | ||||
|             # Keeping fail_connection() synchronous guarantees it can't | ||||
|             # get stuck and simplifies the implementation of the callers. | ||||
|             # Not draining the write buffer is acceptable in this context. | ||||
|  | ||||
|             # clear the send buffer | ||||
|             _ = self.connection.data_to_send() | ||||
|             # If we're not already CLOSED or CLOSING, then send the close. | ||||
|             if self.connection.state is OPEN: | ||||
|                 if code in (1000, 1001): | ||||
|                     self.connection.send_close(code, reason) | ||||
|                 else: | ||||
|                     self.connection.fail(code, reason) | ||||
|                 try: | ||||
|                     data_to_send = self.connection.data_to_send() | ||||
|                     while ( | ||||
|                         len(data_to_send) | ||||
|                         and self.io_proto | ||||
|                         and self.io_proto.transport | ||||
|                     ): | ||||
|                         frame_data = data_to_send.pop(0) | ||||
|                         self.io_proto.transport.write(frame_data) | ||||
|                 except Exception: | ||||
|                     # sending close frames may fail if the | ||||
|                     # transport closes during this period | ||||
|                     ... | ||||
|         if code == 1006: | ||||
|             # Special case: 1006 consider the transport already closed | ||||
|             self.connection.state = CLOSED | ||||
|         if self.data_finished_fut and not self.data_finished_fut.done(): | ||||
|             # We have a graceful auto-closer. Use it to close the connection. | ||||
|             self.data_finished_fut.cancel() | ||||
|             self.data_finished_fut = None | ||||
|         if (not self.auto_closer_task) or self.auto_closer_task.done(): | ||||
|             return self._force_disconnect() | ||||
|         return False | ||||
|  | ||||
|     def end_connection(self, code=1000, reason=""): | ||||
|         # This is like slightly more graceful form of fail_connection | ||||
|         # Use this instead of close() when you need an immediate | ||||
|         # close and cannot await websocket.close() handshake. | ||||
|  | ||||
|         if code == 1006 or not self.io_proto or not self.io_proto.transport: | ||||
|             return self.fail_connection(code, reason) | ||||
|  | ||||
|         # Stop new data coming in | ||||
|         # In Python Version 3.7: pause_reading is idempotent | ||||
|         # i.e. it can be called when the transport is already paused or closed. | ||||
|         self.io_proto.transport.pause_reading() | ||||
|         if self.connection.state == OPEN: | ||||
|             data_to_send = self.connection.data_to_send() | ||||
|             self.connection.send_close(code, reason) | ||||
|             data_to_send.extend(self.connection.data_to_send()) | ||||
|             try: | ||||
|                 while ( | ||||
|                     len(data_to_send) | ||||
|                     and self.io_proto | ||||
|                     and self.io_proto.transport | ||||
|                 ): | ||||
|                     frame_data = data_to_send.pop(0) | ||||
|                     self.io_proto.transport.write(frame_data) | ||||
|             except Exception: | ||||
|                 # sending close frames may fail if the | ||||
|                 # transport closes during this period | ||||
|                 # But that doesn't matter at this point | ||||
|                 ... | ||||
|         if self.data_finished_fut and not self.data_finished_fut.done(): | ||||
|             # We have the ability to signal the auto-closer | ||||
|             # try to trigger it to auto-close the connection | ||||
|             self.data_finished_fut.cancel() | ||||
|             self.data_finished_fut = None | ||||
|         if (not self.auto_closer_task) or self.auto_closer_task.done(): | ||||
|             # Auto-closer is not running, do force disconnect | ||||
|             return self._force_disconnect() | ||||
|         return False | ||||
|  | ||||
|     async def auto_close_connection(self) -> None: | ||||
|         """ | ||||
|         Close the WebSocket Connection | ||||
|         When the opening handshake succeeds, :meth:`connection_open` starts | ||||
|         this coroutine in a task. It waits for the data transfer phase to | ||||
|         complete then it closes the TCP connection cleanly. | ||||
|         When the opening handshake fails, :meth:`fail_connection` does the | ||||
|         same. There's no data transfer phase in that case. | ||||
|         """ | ||||
|         try: | ||||
|             # Wait for the data transfer phase to complete. | ||||
|             if self.data_finished_fut: | ||||
|                 try: | ||||
|                     await self.data_finished_fut | ||||
|                     logger.debug( | ||||
|                         "Websocket task finished. Closing the connection." | ||||
|                     ) | ||||
|                 except asyncio.CancelledError: | ||||
|                     # Cancelled error is called when data phase is cancelled | ||||
|                     # if an error occurred or the client closed the connection | ||||
|                     logger.debug( | ||||
|                         "Websocket handler cancelled. Closing the connection." | ||||
|                     ) | ||||
|  | ||||
|             # Cancel the keepalive ping task. | ||||
|             if self.keepalive_ping_task: | ||||
|                 self.keepalive_ping_task.cancel() | ||||
|                 self.keepalive_ping_task = None | ||||
|  | ||||
|             # Half-close the TCP connection if possible (when there's no TLS). | ||||
|             if ( | ||||
|                 self.io_proto | ||||
|                 and self.io_proto.transport | ||||
|                 and self.io_proto.transport.can_write_eof() | ||||
|             ): | ||||
|                 logger.debug("Websocket half-closing TCP connection") | ||||
|                 self.io_proto.transport.write_eof() | ||||
|                 if self.connection_lost_waiter: | ||||
|                     if await self.wait_for_connection_lost(timeout=0): | ||||
|                         return | ||||
|         except asyncio.CancelledError: | ||||
|             ... | ||||
|         finally: | ||||
|             # The try/finally ensures that the transport never remains open, | ||||
|             # even if this coroutine is cancelled (for example). | ||||
|             if (not self.io_proto) or (not self.io_proto.transport): | ||||
|                 # we were never open, or done. Can't do any finalization. | ||||
|                 return | ||||
|             elif ( | ||||
|                 self.connection_lost_waiter | ||||
|                 and self.connection_lost_waiter.done() | ||||
|             ): | ||||
|                 # connection confirmed closed already, proceed to abort waiter | ||||
|                 ... | ||||
|             elif self.io_proto.transport.is_closing(): | ||||
|                 # Connection is already closing (due to half-close above) | ||||
|                 # proceed to abort waiter | ||||
|                 ... | ||||
|             else: | ||||
|                 self.io_proto.transport.close() | ||||
|             if not self.connection_lost_waiter: | ||||
|                 # Our connection monitor task isn't running. | ||||
|                 try: | ||||
|                     await asyncio.sleep(self.close_timeout) | ||||
|                 except asyncio.CancelledError: | ||||
|                     ... | ||||
|                 if self.io_proto and self.io_proto.transport: | ||||
|                     self.io_proto.transport.abort() | ||||
|             else: | ||||
|                 if await self.wait_for_connection_lost( | ||||
|                     timeout=self.close_timeout | ||||
|                 ): | ||||
|                     # Connection aborted before the timeout expired. | ||||
|                     return | ||||
|                 error_logger.warning( | ||||
|                     "Timeout waiting for TCP connection to close. Aborting" | ||||
|                 ) | ||||
|                 if self.io_proto and self.io_proto.transport: | ||||
|                     self.io_proto.transport.abort() | ||||
|  | ||||
|     def abort_pings(self) -> None: | ||||
|         """ | ||||
|         Raise ConnectionClosed in pending keepalive pings. | ||||
|         They'll never receive a pong once the connection is closed. | ||||
|         """ | ||||
|         if self.connection.state is not CLOSED: | ||||
|             raise ServerError( | ||||
|                 "Webscoket about_pings should only be called " | ||||
|                 "after connection state is changed to CLOSED" | ||||
|             ) | ||||
|  | ||||
|         for ping in self.pings.values(): | ||||
|             ping.set_exception(ConnectionClosedError(None, None)) | ||||
|             # If the exception is never retrieved, it will be logged when ping | ||||
|             # is garbage-collected. This is confusing for users. | ||||
|             # Given that ping is done (with an exception), canceling it does | ||||
|             # nothing, but it prevents logging the exception. | ||||
|             ping.cancel() | ||||
|  | ||||
|     async def close(self, code: int = 1000, reason: str = "") -> None: | ||||
|         """ | ||||
|         Perform the closing handshake. | ||||
|         This is a websocket-protocol level close. | ||||
|         :meth:`close` waits for the other end to complete the handshake and | ||||
|         for the TCP connection to terminate. | ||||
|         :meth:`close` is idempotent: it doesn't do anything once the | ||||
|         connection is closed. | ||||
|         :param code: WebSocket close code | ||||
|         :param reason: WebSocket close reason | ||||
|         """ | ||||
|         if code == 1006: | ||||
|             self.fail_connection(code, reason) | ||||
|             return | ||||
|         async with self.conn_mutex: | ||||
|             if self.connection.state is OPEN: | ||||
|                 self.connection.send_close(code, reason) | ||||
|                 data_to_send = self.connection.data_to_send() | ||||
|                 await self.send_data(data_to_send) | ||||
|  | ||||
|     async def recv(self, timeout: Optional[float] = None) -> Optional[Data]: | ||||
|         """ | ||||
|         Receive the next message. | ||||
|         Return a :class:`str` for a text frame and :class:`bytes` for a binary | ||||
|         frame. | ||||
|         When the end of the message stream is reached, :meth:`recv` raises | ||||
|         :exc:`~websockets.exceptions.ConnectionClosed`. Specifically, it | ||||
|         raises :exc:`~websockets.exceptions.ConnectionClosedOK` after a normal | ||||
|         connection closure and | ||||
|         :exc:`~websockets.exceptions.ConnectionClosedError` after a protocol | ||||
|         error or a network failure. | ||||
|         If ``timeout`` is ``None``, block until a message is received. Else, | ||||
|         if no message is received within ``timeout`` seconds, return ``None``. | ||||
|         Set ``timeout`` to ``0`` to check if a message was already received. | ||||
|         :raises ~websockets.exceptions.ConnectionClosed: when the | ||||
|             connection is closed | ||||
|         :raises asyncio.CancelledError: if the websocket closes while waiting | ||||
|         :raises ServerError: if two tasks call :meth:`recv` or | ||||
|             :meth:`recv_streaming` concurrently | ||||
|         """ | ||||
|  | ||||
|         if self.recv_lock.locked(): | ||||
|             raise ServerError( | ||||
|                 "cannot call recv while another task is " | ||||
|                 "already waiting for the next message" | ||||
|             ) | ||||
|         await self.recv_lock.acquire() | ||||
|         if self.connection.state is CLOSED: | ||||
|             self.recv_lock.release() | ||||
|             raise WebsocketClosed( | ||||
|                 "Cannot receive from websocket interface after it is closed." | ||||
|             ) | ||||
|         try: | ||||
|             self.recv_cancel = asyncio.Future() | ||||
|             done, pending = await asyncio.wait( | ||||
|                 (self.recv_cancel, self.assembler.get(timeout)), | ||||
|                 return_when=asyncio.FIRST_COMPLETED, | ||||
|             ) | ||||
|             done_task = next(iter(done)) | ||||
|             if done_task is self.recv_cancel: | ||||
|                 # recv was cancelled | ||||
|                 for p in pending: | ||||
|                     p.cancel() | ||||
|                 raise asyncio.CancelledError() | ||||
|             else: | ||||
|                 self.recv_cancel.cancel() | ||||
|                 return done_task.result() | ||||
|         finally: | ||||
|             self.recv_cancel = None | ||||
|             self.recv_lock.release() | ||||
|  | ||||
|     async def recv_burst(self, max_recv=256) -> Sequence[Data]: | ||||
|         """ | ||||
|         Receive the messages which have arrived since last checking. | ||||
|         Return a :class:`list` containing :class:`str` for a text frame | ||||
|         and :class:`bytes` for a binary frame. | ||||
|         When the end of the message stream is reached, :meth:`recv_burst` | ||||
|         raises :exc:`~websockets.exceptions.ConnectionClosed`. Specifically, | ||||
|         it raises :exc:`~websockets.exceptions.ConnectionClosedOK` after a | ||||
|         normal connection closure and | ||||
|         :exc:`~websockets.exceptions.ConnectionClosedError` after a protocol | ||||
|         error or a network failure. | ||||
|         :raises ~websockets.exceptions.ConnectionClosed: when the | ||||
|             connection is closed | ||||
|         :raises ServerError: if two tasks call :meth:`recv_burst` or | ||||
|             :meth:`recv_streaming` concurrently | ||||
|         """ | ||||
|  | ||||
|         if self.recv_lock.locked(): | ||||
|             raise ServerError( | ||||
|                 "cannot call recv_burst while another task is already waiting " | ||||
|                 "for the next message" | ||||
|             ) | ||||
|         await self.recv_lock.acquire() | ||||
|         if self.connection.state is CLOSED: | ||||
|             self.recv_lock.release() | ||||
|             raise WebsocketClosed( | ||||
|                 "Cannot receive from websocket interface after it is closed." | ||||
|             ) | ||||
|         messages = [] | ||||
|         try: | ||||
|             # Prevent pausing the transport when we're | ||||
|             # receiving a burst of messages | ||||
|             self.can_pause = False | ||||
|             self.recv_cancel = asyncio.Future() | ||||
|             while True: | ||||
|                 done, pending = await asyncio.wait( | ||||
|                     (self.recv_cancel, self.assembler.get(timeout=0)), | ||||
|                     return_when=asyncio.FIRST_COMPLETED, | ||||
|                 ) | ||||
|                 done_task = next(iter(done)) | ||||
|                 if done_task is self.recv_cancel: | ||||
|                     # recv_burst was cancelled | ||||
|                     for p in pending: | ||||
|                         p.cancel() | ||||
|                     raise asyncio.CancelledError() | ||||
|                 m = done_task.result() | ||||
|                 if m is None: | ||||
|                     # None left in the burst. This is good! | ||||
|                     break | ||||
|                 messages.append(m) | ||||
|                 if len(messages) >= max_recv: | ||||
|                     # Too much data in the pipe. Hit our burst limit. | ||||
|                     break | ||||
|                 # Allow an eventloop iteration for the | ||||
|                 # next message to pass into the Assembler | ||||
|                 await asyncio.sleep(0) | ||||
|             self.recv_cancel.cancel() | ||||
|         finally: | ||||
|             self.recv_cancel = None | ||||
|             self.can_pause = True | ||||
|             self.recv_lock.release() | ||||
|         return messages | ||||
|  | ||||
|     async def recv_streaming(self) -> AsyncIterator[Data]: | ||||
|         """ | ||||
|         Receive the next message frame by frame. | ||||
|         Return an iterator of :class:`str` for a text frame and :class:`bytes` | ||||
|         for a binary frame. The iterator should be exhausted, or else the | ||||
|         connection will become unusable. | ||||
|         With the exception of the return value, :meth:`recv_streaming` behaves | ||||
|         like :meth:`recv`. | ||||
|         """ | ||||
|         if self.recv_lock.locked(): | ||||
|             raise ServerError( | ||||
|                 "Cannot call recv_streaming while another task " | ||||
|                 "is already waiting for the next message" | ||||
|             ) | ||||
|         await self.recv_lock.acquire() | ||||
|         if self.connection.state is CLOSED: | ||||
|             self.recv_lock.release() | ||||
|             raise WebsocketClosed( | ||||
|                 "Cannot receive from websocket interface after it is closed." | ||||
|             ) | ||||
|         try: | ||||
|             cancelled = False | ||||
|             self.recv_cancel = asyncio.Future() | ||||
|             self.can_pause = False | ||||
|             async for m in self.assembler.get_iter(): | ||||
|                 if self.recv_cancel.done(): | ||||
|                     cancelled = True | ||||
|                     break | ||||
|                 yield m | ||||
|             if cancelled: | ||||
|                 raise asyncio.CancelledError() | ||||
|         finally: | ||||
|             self.can_pause = True | ||||
|             self.recv_cancel = None | ||||
|             self.recv_lock.release() | ||||
|  | ||||
|     async def send(self, message: Union[Data, Iterable[Data]]) -> None: | ||||
|         """ | ||||
|         Send a message. | ||||
|         A string (:class:`str`) is sent as a `Text frame`_. A bytestring or | ||||
|         bytes-like object (:class:`bytes`, :class:`bytearray`, or | ||||
|         :class:`memoryview`) is sent as a `Binary frame`_. | ||||
|         .. _Text frame: https://tools.ietf.org/html/rfc6455#section-5.6 | ||||
|         .. _Binary frame: https://tools.ietf.org/html/rfc6455#section-5.6 | ||||
|         :meth:`send` also accepts an iterable of strings, bytestrings, or | ||||
|         bytes-like objects. In that case the message is fragmented. Each item | ||||
|         is treated as a message fragment and sent in its own frame. All items | ||||
|         must be of the same type, or else :meth:`send` will raise a | ||||
|         :exc:`TypeError` and the connection will be closed. | ||||
|         :meth:`send` rejects dict-like objects because this is often an error. | ||||
|         If you wish to send the keys of a dict-like object as fragments, call | ||||
|         its :meth:`~dict.keys` method and pass the result to :meth:`send`. | ||||
|         :raises TypeError: for unsupported inputs | ||||
|         """ | ||||
|         async with self.conn_mutex: | ||||
|  | ||||
|             if self.connection.state in (CLOSED, CLOSING): | ||||
|                 raise WebsocketClosed( | ||||
|                     "Cannot write to websocket interface after it is closed." | ||||
|                 ) | ||||
|             if (not self.data_finished_fut) or self.data_finished_fut.done(): | ||||
|                 raise ServerError( | ||||
|                     "Cannot write to websocket interface after it is finished." | ||||
|                 ) | ||||
|  | ||||
|             # Unfragmented message -- this case must be handled first because | ||||
|             # strings and bytes-like objects are iterable. | ||||
|  | ||||
|             if isinstance(message, str): | ||||
|                 self.connection.send_text(message.encode("utf-8")) | ||||
|                 await self.send_data(self.connection.data_to_send()) | ||||
|  | ||||
|             elif isinstance(message, (bytes, bytearray, memoryview)): | ||||
|                 self.connection.send_binary(message) | ||||
|                 await self.send_data(self.connection.data_to_send()) | ||||
|  | ||||
|             elif isinstance(message, Mapping): | ||||
|                 # Catch a common mistake -- passing a dict to send(). | ||||
|                 raise TypeError("data is a dict-like object") | ||||
|  | ||||
|             elif isinstance(message, Iterable): | ||||
|                 # Fragmented message -- regular iterator. | ||||
|                 raise NotImplementedError( | ||||
|                     "Fragmented websocket messages are not supported." | ||||
|                 ) | ||||
|             else: | ||||
|                 raise TypeError("Websocket data must be bytes, str.") | ||||
|  | ||||
|     async def ping(self, data: Optional[Data] = None) -> asyncio.Future: | ||||
|         """ | ||||
|         Send a ping. | ||||
|         Return an :class:`~asyncio.Future` that will be resolved when the | ||||
|         corresponding pong is received. You can ignore it if you don't intend | ||||
|         to wait. | ||||
|         A ping may serve as a keepalive or as a check that the remote endpoint | ||||
|         received all messages up to this point:: | ||||
|             await pong_event = ws.ping() | ||||
|             await pong_event # only if you want to wait for the pong | ||||
|         By default, the ping contains four random bytes. This payload may be | ||||
|         overridden with the optional ``data`` argument which must be a string | ||||
|         (which will be encoded to UTF-8) or a bytes-like object. | ||||
|         """ | ||||
|         async with self.conn_mutex: | ||||
|             if self.connection.state in (CLOSED, CLOSING): | ||||
|                 raise WebsocketClosed( | ||||
|                     "Cannot send a ping when the websocket interface " | ||||
|                     "is closed." | ||||
|                 ) | ||||
|             if (not self.io_proto) or (not self.io_proto.loop): | ||||
|                 raise ServerError( | ||||
|                     "Cannot send a ping when the websocket has no I/O " | ||||
|                     "protocol attached." | ||||
|                 ) | ||||
|             if data is not None: | ||||
|                 if isinstance(data, str): | ||||
|                     data = data.encode("utf-8") | ||||
|                 elif isinstance(data, (bytearray, memoryview)): | ||||
|                     data = bytes(data) | ||||
|  | ||||
|             # Protect against duplicates if a payload is explicitly set. | ||||
|             if data in self.pings: | ||||
|                 raise ValueError( | ||||
|                     "already waiting for a pong with the same data" | ||||
|                 ) | ||||
|  | ||||
|             # Generate a unique random payload otherwise. | ||||
|             while data is None or data in self.pings: | ||||
|                 data = struct.pack("!I", random.getrandbits(32)) | ||||
|  | ||||
|             self.pings[data] = self.io_proto.loop.create_future() | ||||
|  | ||||
|             self.connection.send_ping(data) | ||||
|             await self.send_data(self.connection.data_to_send()) | ||||
|  | ||||
|             return asyncio.shield(self.pings[data]) | ||||
|  | ||||
|     async def pong(self, data: Data = b"") -> None: | ||||
|         """ | ||||
|         Send a pong. | ||||
|         An unsolicited pong may serve as a unidirectional heartbeat. | ||||
|         The payload may be set with the optional ``data`` argument which must | ||||
|         be a string (which will be encoded to UTF-8) or a bytes-like object. | ||||
|         """ | ||||
|         async with self.conn_mutex: | ||||
|             if self.connection.state in (CLOSED, CLOSING): | ||||
|                 # Cannot send pong after transport is shutting down | ||||
|                 return | ||||
|             if isinstance(data, str): | ||||
|                 data = data.encode("utf-8") | ||||
|             elif isinstance(data, (bytearray, memoryview)): | ||||
|                 data = bytes(data) | ||||
|             self.connection.send_pong(data) | ||||
|             await self.send_data(self.connection.data_to_send()) | ||||
|  | ||||
|     async def send_data(self, data_to_send): | ||||
|         for data in data_to_send: | ||||
|             if data: | ||||
|                 await self.io_proto.send(data) | ||||
|             else: | ||||
|                 # Send an EOF - We don't actually send it, | ||||
|                 # just trigger to autoclose the connection | ||||
|                 if ( | ||||
|                     self.auto_closer_task | ||||
|                     and not self.auto_closer_task.done() | ||||
|                     and self.data_finished_fut | ||||
|                     and not self.data_finished_fut.done() | ||||
|                 ): | ||||
|                     # Auto-close the connection | ||||
|                     self.data_finished_fut.set_result(None) | ||||
|                 else: | ||||
|                     # This will fail the connection appropriately | ||||
|                     SanicProtocol.close(self.io_proto, timeout=1.0) | ||||
|  | ||||
|     async def async_data_received(self, data_to_send, events_to_process): | ||||
|         if self.connection.state in (OPEN, CLOSING) and len(data_to_send) > 0: | ||||
|             # receiving data can generate data to send (eg, pong for a ping) | ||||
|             # send connection.data_to_send() | ||||
|             await self.send_data(data_to_send) | ||||
|         if len(events_to_process) > 0: | ||||
|             await self.process_events(events_to_process) | ||||
|  | ||||
|     def data_received(self, data): | ||||
|         self.connection.receive_data(data) | ||||
|         data_to_send = self.connection.data_to_send() | ||||
|         events_to_process = self.connection.events_received() | ||||
|         if len(data_to_send) > 0 or len(events_to_process) > 0: | ||||
|             asyncio.create_task( | ||||
|                 self.async_data_received(data_to_send, events_to_process) | ||||
|             ) | ||||
|  | ||||
|     async def async_eof_received(self, data_to_send, events_to_process): | ||||
|         # receiving EOF can generate data to send | ||||
|         # send connection.data_to_send() | ||||
|         if self.connection.state in (OPEN, CLOSING) and len(data_to_send) > 0: | ||||
|             await self.send_data(data_to_send) | ||||
|         if len(events_to_process) > 0: | ||||
|             await self.process_events(events_to_process) | ||||
|         if self.recv_cancel: | ||||
|             self.recv_cancel.cancel() | ||||
|         if ( | ||||
|             self.auto_closer_task | ||||
|             and not self.auto_closer_task.done() | ||||
|             and self.data_finished_fut | ||||
|             and not self.data_finished_fut.done() | ||||
|         ): | ||||
|             # Auto-close the connection | ||||
|             self.data_finished_fut.set_result(None) | ||||
|             # Cancel the running handler if its waiting | ||||
|         else: | ||||
|             # This will fail the connection appropriately | ||||
|             SanicProtocol.close(self.io_proto, timeout=1.0) | ||||
|  | ||||
|     def eof_received(self) -> Optional[bool]: | ||||
|         self.connection.receive_eof() | ||||
|         data_to_send = self.connection.data_to_send() | ||||
|         events_to_process = self.connection.events_received() | ||||
|         asyncio.create_task( | ||||
|             self.async_eof_received(data_to_send, events_to_process) | ||||
|         ) | ||||
|         return False | ||||
|  | ||||
|     def connection_lost(self, exc): | ||||
|         """ | ||||
|         The WebSocket Connection is Closed. | ||||
|         """ | ||||
|         if not self.connection.state == CLOSED: | ||||
|             # signal to the websocket connection handler | ||||
|             # we've lost the connection | ||||
|             self.connection.fail(code=1006) | ||||
|             self.connection.state = CLOSED | ||||
|  | ||||
|         self.abort_pings() | ||||
|         if self.connection_lost_waiter: | ||||
|             self.connection_lost_waiter.set_result(None) | ||||
| @@ -10,13 +10,39 @@ from sanic_routing.exceptions import NotFound  # type: ignore | ||||
| from sanic_routing.utils import path_to_parts  # type: ignore | ||||
|  | ||||
| from sanic.exceptions import InvalidSignal | ||||
| from sanic.log import error_logger, logger | ||||
| from sanic.models.handler_types import SignalHandler | ||||
|  | ||||
|  | ||||
| RESERVED_NAMESPACES = ( | ||||
|     "server", | ||||
|     "http", | ||||
| ) | ||||
| RESERVED_NAMESPACES = { | ||||
|     "server": ( | ||||
|         # "server.main.start", | ||||
|         # "server.main.stop", | ||||
|         "server.init.before", | ||||
|         "server.init.after", | ||||
|         "server.shutdown.before", | ||||
|         "server.shutdown.after", | ||||
|     ), | ||||
|     "http": ( | ||||
|         "http.lifecycle.begin", | ||||
|         "http.lifecycle.complete", | ||||
|         "http.lifecycle.exception", | ||||
|         "http.lifecycle.handle", | ||||
|         "http.lifecycle.read_body", | ||||
|         "http.lifecycle.read_head", | ||||
|         "http.lifecycle.request", | ||||
|         "http.lifecycle.response", | ||||
|         "http.routing.after", | ||||
|         "http.routing.before", | ||||
|         "http.lifecycle.send", | ||||
|         "http.middleware.after", | ||||
|         "http.middleware.before", | ||||
|     ), | ||||
| } | ||||
|  | ||||
|  | ||||
| def _blank(): | ||||
|     ... | ||||
|  | ||||
|  | ||||
| class Signal(Route): | ||||
| @@ -48,7 +74,7 @@ class SignalRouter(BaseRouter): | ||||
|                 f".{event}", | ||||
|                 self.DEFAULT_METHOD, | ||||
|                 self, | ||||
|                 {"__params__": {}}, | ||||
|                 {"__params__": {}, "__matches__": {}}, | ||||
|                 extra=extra, | ||||
|             ) | ||||
|         except NotFound: | ||||
| @@ -59,7 +85,18 @@ class SignalRouter(BaseRouter): | ||||
|                 terms.append(extra) | ||||
|             raise NotFound(message % tuple(terms)) | ||||
|  | ||||
|         params = param_basket.pop("__params__") | ||||
|         # Regex routes evaluate and can extract params directly. They are set | ||||
|         # on param_basket["__params__"] | ||||
|         params = param_basket["__params__"] | ||||
|         if not params: | ||||
|             # If param_basket["__params__"] does not exist, we might have | ||||
|             # param_basket["__matches__"], which are indexed based matches | ||||
|             # on path segments. They should already be cast types. | ||||
|             params = { | ||||
|                 param.name: param_basket["__matches__"][idx] | ||||
|                 for idx, param in group.params.items() | ||||
|             } | ||||
|  | ||||
|         return group, [route.handler for route in group], params | ||||
|  | ||||
|     async def _dispatch( | ||||
| @@ -67,8 +104,18 @@ class SignalRouter(BaseRouter): | ||||
|         event: str, | ||||
|         context: Optional[Dict[str, Any]] = None, | ||||
|         condition: Optional[Dict[str, str]] = None, | ||||
|     ) -> None: | ||||
|         fail_not_found: bool = True, | ||||
|         reverse: bool = False, | ||||
|     ) -> Any: | ||||
|         try: | ||||
|             group, handlers, params = self.get(event, condition=condition) | ||||
|         except NotFound as e: | ||||
|             if fail_not_found: | ||||
|                 raise e | ||||
|             else: | ||||
|                 if self.ctx.app.debug: | ||||
|                     error_logger.warning(str(e)) | ||||
|                 return None | ||||
|  | ||||
|         events = [signal.ctx.event for signal in group] | ||||
|         for signal_event in events: | ||||
| @@ -76,12 +123,19 @@ class SignalRouter(BaseRouter): | ||||
|         if context: | ||||
|             params.update(context) | ||||
|  | ||||
|         if not reverse: | ||||
|             handlers = handlers[::-1] | ||||
|         try: | ||||
|             for handler in handlers: | ||||
|                 if condition is None or condition == handler.__requirements__: | ||||
|                     maybe_coroutine = handler(**params) | ||||
|                     if isawaitable(maybe_coroutine): | ||||
|                         await maybe_coroutine | ||||
|                         retval = await maybe_coroutine | ||||
|                         if retval: | ||||
|                             return retval | ||||
|                     elif maybe_coroutine: | ||||
|                         return maybe_coroutine | ||||
|             return None | ||||
|         finally: | ||||
|             for signal_event in events: | ||||
|                 signal_event.clear() | ||||
| @@ -92,14 +146,23 @@ class SignalRouter(BaseRouter): | ||||
|         *, | ||||
|         context: Optional[Dict[str, Any]] = None, | ||||
|         condition: Optional[Dict[str, str]] = None, | ||||
|     ) -> asyncio.Task: | ||||
|         task = self.ctx.loop.create_task( | ||||
|             self._dispatch( | ||||
|         fail_not_found: bool = True, | ||||
|         inline: bool = False, | ||||
|         reverse: bool = False, | ||||
|     ) -> Union[asyncio.Task, Any]: | ||||
|         dispatch = self._dispatch( | ||||
|             event, | ||||
|             context=context, | ||||
|             condition=condition, | ||||
|             fail_not_found=fail_not_found and inline, | ||||
|             reverse=reverse, | ||||
|         ) | ||||
|         ) | ||||
|         logger.debug(f"Dispatching signal: {event}") | ||||
|  | ||||
|         if inline: | ||||
|             return await dispatch | ||||
|  | ||||
|         task = asyncio.get_running_loop().create_task(dispatch) | ||||
|         await asyncio.sleep(0) | ||||
|         return task | ||||
|  | ||||
| @@ -125,7 +188,9 @@ class SignalRouter(BaseRouter): | ||||
|             append=True, | ||||
|         )  # type: ignore | ||||
|  | ||||
|     def finalize(self, do_compile: bool = True): | ||||
|     def finalize(self, do_compile: bool = True, do_optimize: bool = False): | ||||
|         self.add(_blank, "sanic.__signal__.__init__") | ||||
|  | ||||
|         try: | ||||
|             self.ctx.loop = asyncio.get_running_loop() | ||||
|         except RuntimeError: | ||||
| @@ -134,7 +199,7 @@ class SignalRouter(BaseRouter): | ||||
|         for signal in self.routes: | ||||
|             signal.ctx.event = asyncio.Event() | ||||
|  | ||||
|         return super().finalize(do_compile=do_compile) | ||||
|         return super().finalize(do_compile=do_compile, do_optimize=do_optimize) | ||||
|  | ||||
|     def _build_event_parts(self, event: str) -> Tuple[str, str, str]: | ||||
|         parts = path_to_parts(event, self.delimiter) | ||||
| @@ -145,7 +210,11 @@ class SignalRouter(BaseRouter): | ||||
|         ): | ||||
|             raise InvalidSignal("Invalid signal event: %s" % event) | ||||
|  | ||||
|         if parts[0] in RESERVED_NAMESPACES: | ||||
|         if ( | ||||
|             parts[0] in RESERVED_NAMESPACES | ||||
|             and event not in RESERVED_NAMESPACES[parts[0]] | ||||
|             and not (parts[2].startswith("<") and parts[2].endswith(">")) | ||||
|         ): | ||||
|             raise InvalidSignal( | ||||
|                 "Cannot declare reserved signal event: %s" % event | ||||
|             ) | ||||
|   | ||||
							
								
								
									
										21
									
								
								sanic/simple.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										21
									
								
								sanic/simple.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,21 @@ | ||||
| from pathlib import Path | ||||
|  | ||||
| from sanic import Sanic | ||||
| from sanic.exceptions import SanicException | ||||
| from sanic.response import redirect | ||||
|  | ||||
|  | ||||
| def create_simple_server(directory: Path): | ||||
|     if not directory.is_dir(): | ||||
|         raise SanicException( | ||||
|             "Cannot setup Sanic Simple Server without a path to a directory" | ||||
|         ) | ||||
|  | ||||
|     app = Sanic("SimpleServer") | ||||
|     app.static("/", directory, name="main") | ||||
|  | ||||
|     @app.get("/") | ||||
|     def index(_): | ||||
|         return redirect(app.url_for("main", filename="index.html")) | ||||
|  | ||||
|     return app | ||||
							
								
								
									
										8
									
								
								sanic/touchup/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										8
									
								
								sanic/touchup/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,8 @@ | ||||
| from .meta import TouchUpMeta | ||||
| from .service import TouchUp | ||||
|  | ||||
|  | ||||
| __all__ = ( | ||||
|     "TouchUp", | ||||
|     "TouchUpMeta", | ||||
| ) | ||||
							
								
								
									
										22
									
								
								sanic/touchup/meta.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								sanic/touchup/meta.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,22 @@ | ||||
| from sanic.exceptions import SanicException | ||||
|  | ||||
| from .service import TouchUp | ||||
|  | ||||
|  | ||||
| class TouchUpMeta(type): | ||||
|     def __new__(cls, name, bases, attrs, **kwargs): | ||||
|         gen_class = super().__new__(cls, name, bases, attrs, **kwargs) | ||||
|  | ||||
|         methods = attrs.get("__touchup__") | ||||
|         attrs["__touched__"] = False | ||||
|         if methods: | ||||
|  | ||||
|             for method in methods: | ||||
|                 if method not in attrs: | ||||
|                     raise SanicException( | ||||
|                         "Cannot perform touchup on non-existent method: " | ||||
|                         f"{name}.{method}" | ||||
|                     ) | ||||
|                 TouchUp.register(gen_class, method) | ||||
|  | ||||
|         return gen_class | ||||
							
								
								
									
										5
									
								
								sanic/touchup/schemes/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								sanic/touchup/schemes/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,5 @@ | ||||
| from .base import BaseScheme | ||||
| from .ode import OptionalDispatchEvent  # noqa | ||||
|  | ||||
|  | ||||
| __all__ = ("BaseScheme",) | ||||
							
								
								
									
										20
									
								
								sanic/touchup/schemes/base.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								sanic/touchup/schemes/base.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,20 @@ | ||||
| from abc import ABC, abstractmethod | ||||
| from typing import Set, Type | ||||
|  | ||||
|  | ||||
| class BaseScheme(ABC): | ||||
|     ident: str | ||||
|     _registry: Set[Type] = set() | ||||
|  | ||||
|     def __init__(self, app) -> None: | ||||
|         self.app = app | ||||
|  | ||||
|     @abstractmethod | ||||
|     def run(self, method, module_globals) -> None: | ||||
|         ... | ||||
|  | ||||
|     def __init_subclass__(cls): | ||||
|         BaseScheme._registry.add(cls) | ||||
|  | ||||
|     def __call__(self, method, module_globals): | ||||
|         return self.run(method, module_globals) | ||||
							
								
								
									
										67
									
								
								sanic/touchup/schemes/ode.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										67
									
								
								sanic/touchup/schemes/ode.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,67 @@ | ||||
| from ast import Attribute, Await, Dict, Expr, NodeTransformer, parse | ||||
| from inspect import getsource | ||||
| from textwrap import dedent | ||||
| from typing import Any | ||||
|  | ||||
| from sanic.log import logger | ||||
|  | ||||
| from .base import BaseScheme | ||||
|  | ||||
|  | ||||
| class OptionalDispatchEvent(BaseScheme): | ||||
|     ident = "ODE" | ||||
|  | ||||
|     def __init__(self, app) -> None: | ||||
|         super().__init__(app) | ||||
|  | ||||
|         self._registered_events = [ | ||||
|             signal.path for signal in app.signal_router.routes | ||||
|         ] | ||||
|  | ||||
|     def run(self, method, module_globals): | ||||
|         raw_source = getsource(method) | ||||
|         src = dedent(raw_source) | ||||
|         tree = parse(src) | ||||
|         node = RemoveDispatch(self._registered_events).visit(tree) | ||||
|         compiled_src = compile(node, method.__name__, "exec") | ||||
|         exec_locals: Dict[str, Any] = {} | ||||
|         exec(compiled_src, module_globals, exec_locals)  # nosec | ||||
|  | ||||
|         return exec_locals[method.__name__] | ||||
|  | ||||
|  | ||||
| class RemoveDispatch(NodeTransformer): | ||||
|     def __init__(self, registered_events) -> None: | ||||
|         self._registered_events = registered_events | ||||
|  | ||||
|     def visit_Expr(self, node: Expr) -> Any: | ||||
|         call = node.value | ||||
|         if isinstance(call, Await): | ||||
|             call = call.value | ||||
|  | ||||
|         func = getattr(call, "func", None) | ||||
|         args = getattr(call, "args", None) | ||||
|         if not func or not args: | ||||
|             return node | ||||
|  | ||||
|         if isinstance(func, Attribute) and func.attr == "dispatch": | ||||
|             event = args[0] | ||||
|             if hasattr(event, "s"): | ||||
|                 event_name = getattr(event, "value", event.s) | ||||
|                 if self._not_registered(event_name): | ||||
|                     logger.debug(f"Disabling event: {event_name}") | ||||
|                     return None | ||||
|         return node | ||||
|  | ||||
|     def _not_registered(self, event_name): | ||||
|         dynamic = [] | ||||
|         for event in self._registered_events: | ||||
|             if event.endswith(">"): | ||||
|                 namespace_concern, _ = event.rsplit(".", 1) | ||||
|                 dynamic.append(namespace_concern) | ||||
|  | ||||
|         namespace_concern, _ = event_name.rsplit(".", 1) | ||||
|         return ( | ||||
|             event_name not in self._registered_events | ||||
|             and namespace_concern not in dynamic | ||||
|         ) | ||||
							
								
								
									
										33
									
								
								sanic/touchup/service.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										33
									
								
								sanic/touchup/service.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,33 @@ | ||||
| from inspect import getmembers, getmodule | ||||
| from typing import Set, Tuple, Type | ||||
|  | ||||
| from .schemes import BaseScheme | ||||
|  | ||||
|  | ||||
| class TouchUp: | ||||
|     _registry: Set[Tuple[Type, str]] = set() | ||||
|  | ||||
|     @classmethod | ||||
|     def run(cls, app): | ||||
|         for target, method_name in cls._registry: | ||||
|             method = getattr(target, method_name) | ||||
|  | ||||
|             if app.test_mode: | ||||
|                 placeholder = f"_{method_name}" | ||||
|                 if hasattr(target, placeholder): | ||||
|                     method = getattr(target, placeholder) | ||||
|                 else: | ||||
|                     setattr(target, placeholder, method) | ||||
|  | ||||
|             module = getmodule(target) | ||||
|             module_globals = dict(getmembers(module)) | ||||
|  | ||||
|             for scheme in BaseScheme._registry: | ||||
|                 modified = scheme(app)(method, module_globals) | ||||
|                 setattr(target, method_name, modified) | ||||
|  | ||||
|             target.__touched__ = True | ||||
|  | ||||
|     @classmethod | ||||
|     def register(cls, target, method_name): | ||||
|         cls._registry.add((target, method_name)) | ||||
| @@ -105,6 +105,7 @@ def load_module_from_file_location( | ||||
|             _mod_spec = spec_from_file_location( | ||||
|                 name, location, *args, **kwargs | ||||
|             ) | ||||
|             assert _mod_spec is not None  # type assertion for mypy | ||||
|             module = module_from_spec(_mod_spec) | ||||
|             _mod_spec.loader.exec_module(module)  # type: ignore | ||||
|  | ||||
|   | ||||
| @@ -1,7 +1,24 @@ | ||||
| from typing import Any, Callable, List | ||||
| from __future__ import annotations | ||||
|  | ||||
| from typing import ( | ||||
|     TYPE_CHECKING, | ||||
|     Any, | ||||
|     Callable, | ||||
|     Iterable, | ||||
|     List, | ||||
|     Optional, | ||||
|     Union, | ||||
| ) | ||||
| from warnings import warn | ||||
|  | ||||
| from sanic.constants import HTTP_METHODS | ||||
| from sanic.exceptions import InvalidUsage | ||||
| from sanic.models.handler_types import RouteHandler | ||||
|  | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from sanic import Sanic | ||||
|     from sanic.blueprints import Blueprint | ||||
|  | ||||
|  | ||||
| class HTTPMethodView: | ||||
| @@ -40,12 +57,37 @@ class HTTPMethodView: | ||||
|  | ||||
|     decorators: List[Callable[[Callable[..., Any]], Callable[..., Any]]] = [] | ||||
|  | ||||
|     def __init_subclass__( | ||||
|         cls, | ||||
|         attach: Optional[Union[Sanic, Blueprint]] = None, | ||||
|         uri: str = "", | ||||
|         methods: Iterable[str] = frozenset({"GET"}), | ||||
|         host: Optional[str] = None, | ||||
|         strict_slashes: Optional[bool] = None, | ||||
|         version: Optional[int] = None, | ||||
|         name: Optional[str] = None, | ||||
|         stream: bool = False, | ||||
|         version_prefix: str = "/v", | ||||
|     ) -> None: | ||||
|         if attach: | ||||
|             cls.attach( | ||||
|                 attach, | ||||
|                 uri=uri, | ||||
|                 methods=methods, | ||||
|                 host=host, | ||||
|                 strict_slashes=strict_slashes, | ||||
|                 version=version, | ||||
|                 name=name, | ||||
|                 stream=stream, | ||||
|                 version_prefix=version_prefix, | ||||
|             ) | ||||
|  | ||||
|     def dispatch_request(self, request, *args, **kwargs): | ||||
|         handler = getattr(self, request.method.lower(), None) | ||||
|         return handler(request, *args, **kwargs) | ||||
|  | ||||
|     @classmethod | ||||
|     def as_view(cls, *class_args, **class_kwargs): | ||||
|     def as_view(cls, *class_args: Any, **class_kwargs: Any) -> RouteHandler: | ||||
|         """Return view function for use with the routing system, that | ||||
|         dispatches request to appropriate handler method. | ||||
|         """ | ||||
| @@ -59,12 +101,37 @@ class HTTPMethodView: | ||||
|             for decorator in cls.decorators: | ||||
|                 view = decorator(view) | ||||
|  | ||||
|         view.view_class = cls | ||||
|         view.view_class = cls  # type: ignore | ||||
|         view.__doc__ = cls.__doc__ | ||||
|         view.__module__ = cls.__module__ | ||||
|         view.__name__ = cls.__name__ | ||||
|         return view | ||||
|  | ||||
|     @classmethod | ||||
|     def attach( | ||||
|         cls, | ||||
|         to: Union[Sanic, Blueprint], | ||||
|         uri: str, | ||||
|         methods: Iterable[str] = frozenset({"GET"}), | ||||
|         host: Optional[str] = None, | ||||
|         strict_slashes: Optional[bool] = None, | ||||
|         version: Optional[int] = None, | ||||
|         name: Optional[str] = None, | ||||
|         stream: bool = False, | ||||
|         version_prefix: str = "/v", | ||||
|     ) -> None: | ||||
|         to.add_route( | ||||
|             cls.as_view(), | ||||
|             uri=uri, | ||||
|             methods=methods, | ||||
|             host=host, | ||||
|             strict_slashes=strict_slashes, | ||||
|             version=version, | ||||
|             name=name, | ||||
|             stream=stream, | ||||
|             version_prefix=version_prefix, | ||||
|         ) | ||||
|  | ||||
|  | ||||
| def stream(func): | ||||
|     func.is_stream = True | ||||
| @@ -91,6 +158,11 @@ class CompositionView: | ||||
|     def __init__(self): | ||||
|         self.handlers = {} | ||||
|         self.name = self.__class__.__name__ | ||||
|         warn( | ||||
|             "CompositionView has been deprecated and will be removed in " | ||||
|             "v21.12. Please update your view to HTTPMethodView.", | ||||
|             DeprecationWarning, | ||||
|         ) | ||||
|  | ||||
|     def __name__(self): | ||||
|         return self.name | ||||
|   | ||||
| @@ -1,184 +0,0 @@ | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Awaitable, | ||||
|     Callable, | ||||
|     Dict, | ||||
|     List, | ||||
|     MutableMapping, | ||||
|     Optional, | ||||
|     Union, | ||||
| ) | ||||
|  | ||||
| from httptools import HttpParserUpgrade  # type: ignore | ||||
| from websockets import (  # type: ignore | ||||
|     ConnectionClosed, | ||||
|     InvalidHandshake, | ||||
|     WebSocketCommonProtocol, | ||||
|     handshake, | ||||
| ) | ||||
|  | ||||
| from sanic.exceptions import InvalidUsage | ||||
| from sanic.server import HttpProtocol | ||||
|  | ||||
|  | ||||
| __all__ = ["ConnectionClosed", "WebSocketProtocol", "WebSocketConnection"] | ||||
|  | ||||
| ASIMessage = MutableMapping[str, Any] | ||||
|  | ||||
|  | ||||
| class WebSocketProtocol(HttpProtocol): | ||||
|     def __init__( | ||||
|         self, | ||||
|         *args, | ||||
|         websocket_timeout=10, | ||||
|         websocket_max_size=None, | ||||
|         websocket_max_queue=None, | ||||
|         websocket_read_limit=2 ** 16, | ||||
|         websocket_write_limit=2 ** 16, | ||||
|         websocket_ping_interval=20, | ||||
|         websocket_ping_timeout=20, | ||||
|         **kwargs | ||||
|     ): | ||||
|         super().__init__(*args, **kwargs) | ||||
|         self.websocket = None | ||||
|         # self.app = None | ||||
|         self.websocket_timeout = websocket_timeout | ||||
|         self.websocket_max_size = websocket_max_size | ||||
|         self.websocket_max_queue = websocket_max_queue | ||||
|         self.websocket_read_limit = websocket_read_limit | ||||
|         self.websocket_write_limit = websocket_write_limit | ||||
|         self.websocket_ping_interval = websocket_ping_interval | ||||
|         self.websocket_ping_timeout = websocket_ping_timeout | ||||
|  | ||||
|     # timeouts make no sense for websocket routes | ||||
|     def request_timeout_callback(self): | ||||
|         if self.websocket is None: | ||||
|             super().request_timeout_callback() | ||||
|  | ||||
|     def response_timeout_callback(self): | ||||
|         if self.websocket is None: | ||||
|             super().response_timeout_callback() | ||||
|  | ||||
|     def keep_alive_timeout_callback(self): | ||||
|         if self.websocket is None: | ||||
|             super().keep_alive_timeout_callback() | ||||
|  | ||||
|     def connection_lost(self, exc): | ||||
|         if self.websocket is not None: | ||||
|             self.websocket.connection_lost(exc) | ||||
|         super().connection_lost(exc) | ||||
|  | ||||
|     def data_received(self, data): | ||||
|         if self.websocket is not None: | ||||
|             # pass the data to the websocket protocol | ||||
|             self.websocket.data_received(data) | ||||
|         else: | ||||
|             try: | ||||
|                 super().data_received(data) | ||||
|             except HttpParserUpgrade: | ||||
|                 # this is okay, it just indicates we've got an upgrade request | ||||
|                 pass | ||||
|  | ||||
|     def write_response(self, response): | ||||
|         if self.websocket is not None: | ||||
|             # websocket requests do not write a response | ||||
|             self.transport.close() | ||||
|         else: | ||||
|             super().write_response(response) | ||||
|  | ||||
|     async def websocket_handshake(self, request, subprotocols=None): | ||||
|         # let the websockets package do the handshake with the client | ||||
|         headers = {} | ||||
|  | ||||
|         try: | ||||
|             key = handshake.check_request(request.headers) | ||||
|             handshake.build_response(headers, key) | ||||
|         except InvalidHandshake: | ||||
|             raise InvalidUsage("Invalid websocket request") | ||||
|  | ||||
|         subprotocol = None | ||||
|         if subprotocols and "Sec-Websocket-Protocol" in request.headers: | ||||
|             # select a subprotocol | ||||
|             client_subprotocols = [ | ||||
|                 p.strip() | ||||
|                 for p in request.headers["Sec-Websocket-Protocol"].split(",") | ||||
|             ] | ||||
|             for p in client_subprotocols: | ||||
|                 if p in subprotocols: | ||||
|                     subprotocol = p | ||||
|                     headers["Sec-Websocket-Protocol"] = subprotocol | ||||
|                     break | ||||
|  | ||||
|         # write the 101 response back to the client | ||||
|         rv = b"HTTP/1.1 101 Switching Protocols\r\n" | ||||
|         for k, v in headers.items(): | ||||
|             rv += k.encode("utf-8") + b": " + v.encode("utf-8") + b"\r\n" | ||||
|         rv += b"\r\n" | ||||
|         request.transport.write(rv) | ||||
|  | ||||
|         # hook up the websocket protocol | ||||
|         self.websocket = WebSocketCommonProtocol( | ||||
|             close_timeout=self.websocket_timeout, | ||||
|             max_size=self.websocket_max_size, | ||||
|             max_queue=self.websocket_max_queue, | ||||
|             read_limit=self.websocket_read_limit, | ||||
|             write_limit=self.websocket_write_limit, | ||||
|             ping_interval=self.websocket_ping_interval, | ||||
|             ping_timeout=self.websocket_ping_timeout, | ||||
|         ) | ||||
|         # Following two lines are required for websockets 8.x | ||||
|         self.websocket.is_client = False | ||||
|         self.websocket.side = "server" | ||||
|         self.websocket.subprotocol = subprotocol | ||||
|         self.websocket.connection_made(request.transport) | ||||
|         self.websocket.connection_open() | ||||
|         return self.websocket | ||||
|  | ||||
|  | ||||
| class WebSocketConnection: | ||||
|  | ||||
|     # TODO | ||||
|     # - Implement ping/pong | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         send: Callable[[ASIMessage], Awaitable[None]], | ||||
|         receive: Callable[[], Awaitable[ASIMessage]], | ||||
|         subprotocols: Optional[List[str]] = None, | ||||
|     ) -> None: | ||||
|         self._send = send | ||||
|         self._receive = receive | ||||
|         self.subprotocols = subprotocols or [] | ||||
|  | ||||
|     async def send(self, data: Union[str, bytes], *args, **kwargs) -> None: | ||||
|         message: Dict[str, Union[str, bytes]] = {"type": "websocket.send"} | ||||
|  | ||||
|         if isinstance(data, bytes): | ||||
|             message.update({"bytes": data}) | ||||
|         else: | ||||
|             message.update({"text": str(data)}) | ||||
|  | ||||
|         await self._send(message) | ||||
|  | ||||
|     async def recv(self, *args, **kwargs) -> Optional[str]: | ||||
|         message = await self._receive() | ||||
|  | ||||
|         if message["type"] == "websocket.receive": | ||||
|             return message["text"] | ||||
|         elif message["type"] == "websocket.disconnect": | ||||
|             pass | ||||
|  | ||||
|         return None | ||||
|  | ||||
|     receive = recv | ||||
|  | ||||
|     async def accept(self) -> None: | ||||
|         await self._send( | ||||
|             { | ||||
|                 "type": "websocket.accept", | ||||
|                 "subprotocol": ",".join(list(self.subprotocols)), | ||||
|             } | ||||
|         ) | ||||
|  | ||||
|     async def close(self) -> None: | ||||
|         pass | ||||
| @@ -8,8 +8,8 @@ import traceback | ||||
| from gunicorn.workers import base  # type: ignore | ||||
|  | ||||
| from sanic.log import logger | ||||
| from sanic.server import HttpProtocol, Signal, serve, trigger_events | ||||
| from sanic.websocket import WebSocketProtocol | ||||
| from sanic.server import HttpProtocol, Signal, serve | ||||
| from sanic.server.protocols.websocket_protocol import WebSocketProtocol | ||||
|  | ||||
|  | ||||
| try: | ||||
| @@ -68,10 +68,10 @@ class GunicornWorker(base.Worker): | ||||
|         ) | ||||
|         self._server_settings["signal"] = self.signal | ||||
|         self._server_settings.pop("sock") | ||||
|         trigger_events( | ||||
|             self._server_settings.get("before_start", []), self.loop | ||||
|         self._await(self.app.callable._startup()) | ||||
|         self._await( | ||||
|             self.app.callable._server_event("init", "before", loop=self.loop) | ||||
|         ) | ||||
|         self._server_settings["before_start"] = () | ||||
|  | ||||
|         main_start = self._server_settings.pop("main_start", None) | ||||
|         main_stop = self._server_settings.pop("main_stop", None) | ||||
| @@ -82,24 +82,29 @@ class GunicornWorker(base.Worker): | ||||
|                 "with GunicornWorker" | ||||
|             ) | ||||
|  | ||||
|         self._runner = asyncio.ensure_future(self._run(), loop=self.loop) | ||||
|         try: | ||||
|             self.loop.run_until_complete(self._runner) | ||||
|             self._await(self._run()) | ||||
|             self.app.callable.is_running = True | ||||
|             trigger_events( | ||||
|                 self._server_settings.get("after_start", []), self.loop | ||||
|             self._await( | ||||
|                 self.app.callable._server_event( | ||||
|                     "init", "after", loop=self.loop | ||||
|                 ) | ||||
|             ) | ||||
|             self.loop.run_until_complete(self._check_alive()) | ||||
|             trigger_events( | ||||
|                 self._server_settings.get("before_stop", []), self.loop | ||||
|             self._await( | ||||
|                 self.app.callable._server_event( | ||||
|                     "shutdown", "before", loop=self.loop | ||||
|                 ) | ||||
|             ) | ||||
|             self.loop.run_until_complete(self.close()) | ||||
|         except BaseException: | ||||
|             traceback.print_exc() | ||||
|         finally: | ||||
|             try: | ||||
|                 trigger_events( | ||||
|                     self._server_settings.get("after_stop", []), self.loop | ||||
|                 self._await( | ||||
|                     self.app.callable._server_event( | ||||
|                         "shutdown", "after", loop=self.loop | ||||
|                     ) | ||||
|                 ) | ||||
|             except BaseException: | ||||
|                 traceback.print_exc() | ||||
| @@ -137,14 +142,11 @@ class GunicornWorker(base.Worker): | ||||
|  | ||||
|             # Force close non-idle connection after waiting for | ||||
|             # graceful_shutdown_timeout | ||||
|             coros = [] | ||||
|             for conn in self.connections: | ||||
|                 if hasattr(conn, "websocket") and conn.websocket: | ||||
|                     coros.append(conn.websocket.close_connection()) | ||||
|                     conn.websocket.fail_connection(code=1001) | ||||
|                 else: | ||||
|                     conn.close() | ||||
|             _shutdown = asyncio.gather(*coros, loop=self.loop) | ||||
|             await _shutdown | ||||
|                     conn.abort() | ||||
|  | ||||
|     async def _run(self): | ||||
|         for sock in self.sockets: | ||||
| @@ -238,3 +240,7 @@ class GunicornWorker(base.Worker): | ||||
|         self.exit_code = 1 | ||||
|         self.cfg.worker_abort(self) | ||||
|         sys.exit(1) | ||||
|  | ||||
|     def _await(self, coro): | ||||
|         fut = asyncio.ensure_future(coro, loop=self.loop) | ||||
|         self.loop.run_until_complete(fut) | ||||
|   | ||||
							
								
								
									
										35
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										35
									
								
								setup.py
									
									
									
									
									
								
							| @@ -81,60 +81,63 @@ env_dependency = ( | ||||
| ) | ||||
| ujson = "ujson>=1.35" + env_dependency | ||||
| uvloop = "uvloop>=0.5.3" + env_dependency | ||||
|  | ||||
| types_ujson = "types-ujson" + env_dependency | ||||
| requirements = [ | ||||
|     "sanic-routing>=0.6.0", | ||||
|     "sanic-routing~=0.7", | ||||
|     "httptools>=0.0.10", | ||||
|     uvloop, | ||||
|     ujson, | ||||
|     "aiofiles>=0.6.0", | ||||
|     "websockets>=8.1,<9.0", | ||||
|     "websockets>=10.0", | ||||
|     "multidict>=5.0,<6.0", | ||||
| ] | ||||
|  | ||||
| tests_require = [ | ||||
|     "sanic-testing", | ||||
|     "sanic-testing>=0.7.0", | ||||
|     "pytest==5.2.1", | ||||
|     "multidict>=5.0,<6.0", | ||||
|     "coverage==5.3", | ||||
|     "gunicorn==20.0.4", | ||||
|     "pytest-cov", | ||||
|     "beautifulsoup4", | ||||
|     uvloop, | ||||
|     ujson, | ||||
|     "pytest-sanic", | ||||
|     "pytest-sugar", | ||||
|     "pytest-benchmark", | ||||
|     "chardet==3.*", | ||||
|     "flake8", | ||||
|     "black", | ||||
|     "isort>=5.0.0", | ||||
|     "bandit", | ||||
|     "mypy>=0.901", | ||||
|     "docutils", | ||||
|     "pygments", | ||||
|     "uvicorn<0.15.0", | ||||
|     types_ujson, | ||||
| ] | ||||
|  | ||||
| docs_require = [ | ||||
|     "sphinx>=2.1.2", | ||||
|     "sphinx_rtd_theme", | ||||
|     "recommonmark>=0.5.0", | ||||
|     "sphinx_rtd_theme>=0.4.3", | ||||
|     "docutils", | ||||
|     "pygments", | ||||
|     "m2r2", | ||||
| ] | ||||
|  | ||||
| dev_require = tests_require + [ | ||||
|     "aiofiles", | ||||
|     "tox", | ||||
|     "black", | ||||
|     "flake8", | ||||
|     "bandit", | ||||
|     "towncrier", | ||||
| ] | ||||
|  | ||||
| all_require = dev_require + docs_require | ||||
| all_require = list(set(dev_require + docs_require)) | ||||
|  | ||||
| if strtobool(os.environ.get("SANIC_NO_UJSON", "no")): | ||||
|     print("Installing without uJSON") | ||||
|     requirements.remove(ujson) | ||||
|     tests_require.remove(ujson) | ||||
|     tests_require.remove(types_ujson) | ||||
|  | ||||
| # 'nt' means windows OS | ||||
| if strtobool(os.environ.get("SANIC_NO_UVLOOP", "no")): | ||||
|     print("Installing without uvLoop") | ||||
|     requirements.remove(uvloop) | ||||
|     tests_require.remove(uvloop) | ||||
|  | ||||
| extras_require = { | ||||
|     "test": tests_require, | ||||
|   | ||||
| @@ -1,3 +1,5 @@ | ||||
| import asyncio | ||||
| import logging | ||||
| import random | ||||
| import re | ||||
| import string | ||||
| @@ -9,12 +11,15 @@ from typing import Tuple | ||||
| import pytest | ||||
|  | ||||
| from sanic_routing.exceptions import RouteExists | ||||
| from sanic_testing.testing import PORT | ||||
|  | ||||
| from sanic import Sanic | ||||
| from sanic.constants import HTTP_METHODS | ||||
| from sanic.router import Router | ||||
| from sanic.touchup.service import TouchUp | ||||
|  | ||||
|  | ||||
| slugify = re.compile(r"[^a-zA-Z0-9_\-]") | ||||
| random.seed("Pack my box with five dozen liquor jugs.") | ||||
| Sanic.test_mode = True | ||||
|  | ||||
| @@ -22,11 +27,6 @@ if sys.platform in ["win32", "cygwin"]: | ||||
|     collect_ignore = ["test_worker.py"] | ||||
|  | ||||
|  | ||||
| @pytest.fixture | ||||
| def caplog(caplog): | ||||
|     yield caplog | ||||
|  | ||||
|  | ||||
| async def _handler(request): | ||||
|     """ | ||||
|     Dummy placeholder method used for route resolver when creating a new | ||||
| @@ -40,33 +40,32 @@ async def _handler(request): | ||||
|  | ||||
|  | ||||
| TYPE_TO_GENERATOR_MAP = { | ||||
|     "string": lambda: "".join( | ||||
|     "str": lambda: "".join( | ||||
|         [random.choice(string.ascii_lowercase) for _ in range(4)] | ||||
|     ), | ||||
|     "int": lambda: random.choice(range(1000000)), | ||||
|     "number": lambda: random.random(), | ||||
|     "float": lambda: random.random(), | ||||
|     "alpha": lambda: "".join( | ||||
|         [random.choice(string.ascii_lowercase) for _ in range(4)] | ||||
|     ), | ||||
|     "uuid": lambda: str(uuid.uuid1()), | ||||
| } | ||||
|  | ||||
| CACHE = {} | ||||
|  | ||||
|  | ||||
| class RouteStringGenerator: | ||||
|  | ||||
|     ROUTE_COUNT_PER_DEPTH = 100 | ||||
|     HTTP_METHODS = HTTP_METHODS | ||||
|     ROUTE_PARAM_TYPES = ["string", "int", "number", "alpha", "uuid"] | ||||
|     ROUTE_PARAM_TYPES = ["str", "int", "float", "alpha", "uuid"] | ||||
|  | ||||
|     def generate_random_direct_route(self, max_route_depth=4): | ||||
|         routes = [] | ||||
|         for depth in range(1, max_route_depth + 1): | ||||
|             for _ in range(self.ROUTE_COUNT_PER_DEPTH): | ||||
|                 route = "/".join( | ||||
|                     [ | ||||
|                         TYPE_TO_GENERATOR_MAP.get("string")() | ||||
|                         for _ in range(depth) | ||||
|                     ] | ||||
|                     [TYPE_TO_GENERATOR_MAP.get("str")() for _ in range(depth)] | ||||
|                 ) | ||||
|                 route = route.replace(".", "", -1) | ||||
|                 route_detail = (random.choice(self.HTTP_METHODS), route) | ||||
| @@ -82,7 +81,7 @@ class RouteStringGenerator: | ||||
|             new_route_part = "/".join( | ||||
|                 [ | ||||
|                     "<{}:{}>".format( | ||||
|                         TYPE_TO_GENERATOR_MAP.get("string")(), | ||||
|                         TYPE_TO_GENERATOR_MAP.get("str")(), | ||||
|                         random.choice(self.ROUTE_PARAM_TYPES), | ||||
|                     ) | ||||
|                     for _ in range(max_route_depth - current_length) | ||||
| @@ -97,7 +96,7 @@ class RouteStringGenerator: | ||||
|     def generate_url_for_template(template): | ||||
|         url = template | ||||
|         for pattern, param_type in re.findall( | ||||
|             re.compile(r"((?:<\w+:(string|int|number|alpha|uuid)>)+)"), | ||||
|             re.compile(r"((?:<\w+:(str|int|float|alpha|uuid)>)+)"), | ||||
|             template, | ||||
|         ): | ||||
|             value = TYPE_TO_GENERATOR_MAP.get(param_type)() | ||||
| @@ -110,6 +109,7 @@ def sanic_router(app): | ||||
|     # noinspection PyProtectedMember | ||||
|     def _setup(route_details: tuple) -> Tuple[Router, tuple]: | ||||
|         router = Router() | ||||
|         router.ctx.app = app | ||||
|         added_router = [] | ||||
|         for method, route in route_details: | ||||
|             try: | ||||
| @@ -140,5 +140,33 @@ def url_param_generator(): | ||||
|  | ||||
| @pytest.fixture(scope="function") | ||||
| def app(request): | ||||
|     app = Sanic(request.node.name) | ||||
|     return app | ||||
|     if not CACHE: | ||||
|         for target, method_name in TouchUp._registry: | ||||
|             CACHE[method_name] = getattr(target, method_name) | ||||
|     app = Sanic(slugify.sub("-", request.node.name)) | ||||
|     yield app | ||||
|     for target, method_name in TouchUp._registry: | ||||
|         setattr(target, method_name, CACHE[method_name]) | ||||
|  | ||||
|  | ||||
| @pytest.fixture(scope="function") | ||||
| def run_startup(caplog): | ||||
|     def run(app): | ||||
|         nonlocal caplog | ||||
|         loop = asyncio.new_event_loop() | ||||
|         asyncio.set_event_loop(loop) | ||||
|         with caplog.at_level(logging.DEBUG): | ||||
|             server = app.create_server( | ||||
|                 debug=True, return_asyncio_server=True, port=PORT | ||||
|             ) | ||||
|             loop._stopping = False | ||||
|  | ||||
|             _server = loop.run_until_complete(server) | ||||
|  | ||||
|             _server.close() | ||||
|             loop.run_until_complete(_server.wait_closed()) | ||||
|             app.stop() | ||||
|  | ||||
|         return caplog.record_tuples | ||||
|  | ||||
|     return run | ||||
|   | ||||
							
								
								
									
										36
									
								
								tests/fake/server.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										36
									
								
								tests/fake/server.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,36 @@ | ||||
| import json | ||||
| import logging | ||||
|  | ||||
| from sanic import Sanic, text | ||||
| from sanic.log import LOGGING_CONFIG_DEFAULTS, logger | ||||
|  | ||||
|  | ||||
| LOGGING_CONFIG = {**LOGGING_CONFIG_DEFAULTS} | ||||
| LOGGING_CONFIG["formatters"]["generic"]["format"] = "%(message)s" | ||||
| LOGGING_CONFIG["loggers"]["sanic.root"]["level"] = "DEBUG" | ||||
|  | ||||
| app = Sanic(__name__, log_config=LOGGING_CONFIG) | ||||
|  | ||||
|  | ||||
| @app.get("/") | ||||
| async def handler(request): | ||||
|     return text(request.ip) | ||||
|  | ||||
|  | ||||
| @app.before_server_start | ||||
| async def app_info_dump(app: Sanic, _): | ||||
|     app_data = { | ||||
|         "access_log": app.config.ACCESS_LOG, | ||||
|         "auto_reload": app.auto_reload, | ||||
|         "debug": app.debug, | ||||
|     } | ||||
|     logger.info(json.dumps(app_data)) | ||||
|  | ||||
|  | ||||
| @app.after_server_start | ||||
| async def shutdown(app: Sanic, _): | ||||
|     app.stop() | ||||
|  | ||||
|  | ||||
| def create_app(): | ||||
|     return app | ||||
| @@ -9,6 +9,7 @@ from unittest.mock import Mock, patch | ||||
| import pytest | ||||
|  | ||||
| from sanic import Sanic | ||||
| from sanic.config import Config | ||||
| from sanic.exceptions import SanicException | ||||
| from sanic.response import text | ||||
|  | ||||
| @@ -177,9 +178,6 @@ def test_app_enable_websocket(app, websocket_enabled, enable): | ||||
| @patch("sanic.app.WebSocketProtocol") | ||||
| def test_app_websocket_parameters(websocket_protocol_mock, app): | ||||
|     app.config.WEBSOCKET_MAX_SIZE = 44 | ||||
|     app.config.WEBSOCKET_MAX_QUEUE = 45 | ||||
|     app.config.WEBSOCKET_READ_LIMIT = 46 | ||||
|     app.config.WEBSOCKET_WRITE_LIMIT = 47 | ||||
|     app.config.WEBSOCKET_PING_TIMEOUT = 48 | ||||
|     app.config.WEBSOCKET_PING_INTERVAL = 50 | ||||
|  | ||||
| @@ -196,11 +194,6 @@ def test_app_websocket_parameters(websocket_protocol_mock, app): | ||||
|     websocket_protocol_call_args = websocket_protocol_mock.call_args | ||||
|     ws_kwargs = websocket_protocol_call_args[1] | ||||
|     assert ws_kwargs["websocket_max_size"] == app.config.WEBSOCKET_MAX_SIZE | ||||
|     assert ws_kwargs["websocket_max_queue"] == app.config.WEBSOCKET_MAX_QUEUE | ||||
|     assert ws_kwargs["websocket_read_limit"] == app.config.WEBSOCKET_READ_LIMIT | ||||
|     assert ( | ||||
|         ws_kwargs["websocket_write_limit"] == app.config.WEBSOCKET_WRITE_LIMIT | ||||
|     ) | ||||
|     assert ( | ||||
|         ws_kwargs["websocket_ping_timeout"] | ||||
|         == app.config.WEBSOCKET_PING_TIMEOUT | ||||
| @@ -276,7 +269,7 @@ def test_handle_request_with_nested_sanic_exception(app, monkeypatch, caplog): | ||||
|     assert response.status == 500 | ||||
|     assert "Mock SanicException" in response.text | ||||
|     assert ( | ||||
|         "sanic.root", | ||||
|         "sanic.error", | ||||
|         logging.ERROR, | ||||
|         f"Exception occurred while handling uri: 'http://127.0.0.1:{port}/'", | ||||
|     ) in caplog.record_tuples | ||||
| @@ -389,13 +382,13 @@ def test_app_no_registry_env(): | ||||
|  | ||||
|  | ||||
| def test_app_set_attribute_warning(app): | ||||
|     with pytest.warns(UserWarning) as record: | ||||
|     with pytest.warns(DeprecationWarning) as record: | ||||
|         app.foo = 1 | ||||
|  | ||||
|     assert len(record) == 1 | ||||
|     assert record[0].message.args[0] == ( | ||||
|         "Setting variables on Sanic instances is deprecated " | ||||
|         "and will be removed in version 21.9. You should change your " | ||||
|         "and will be removed in version 21.12. You should change your " | ||||
|         "Sanic instance to use instance.ctx.foo instead." | ||||
|     ) | ||||
|  | ||||
| @@ -412,3 +405,42 @@ def test_subclass_initialisation(): | ||||
|         pass | ||||
|  | ||||
|     CustomSanic("test_subclass_initialisation") | ||||
|  | ||||
|  | ||||
| def test_bad_custom_config(): | ||||
|     with pytest.raises( | ||||
|         SanicException, | ||||
|         match=( | ||||
|             "When instantiating Sanic with config, you cannot also pass " | ||||
|             "load_env or env_prefix" | ||||
|         ), | ||||
|     ): | ||||
|         Sanic("test", config=1, load_env=1) | ||||
|     with pytest.raises( | ||||
|         SanicException, | ||||
|         match=( | ||||
|             "When instantiating Sanic with config, you cannot also pass " | ||||
|             "load_env or env_prefix" | ||||
|         ), | ||||
|     ): | ||||
|         Sanic("test", config=1, env_prefix=1) | ||||
|  | ||||
|  | ||||
| def test_custom_config(): | ||||
|     class CustomConfig(Config): | ||||
|         ... | ||||
|  | ||||
|     config = CustomConfig() | ||||
|     app = Sanic("custom", config=config) | ||||
|  | ||||
|     assert app.config == config | ||||
|  | ||||
|  | ||||
| def test_custom_context(): | ||||
|     class CustomContext: | ||||
|         ... | ||||
|  | ||||
|     ctx = CustomContext() | ||||
|     app = Sanic("custom", ctx=ctx) | ||||
|  | ||||
|     assert app.ctx == ctx | ||||
|   | ||||
| @@ -1,5 +1,4 @@ | ||||
| import asyncio | ||||
| import sys | ||||
|  | ||||
| from collections import deque, namedtuple | ||||
|  | ||||
| @@ -8,10 +7,10 @@ import uvicorn | ||||
|  | ||||
| from sanic import Sanic | ||||
| from sanic.asgi import MockTransport | ||||
| from sanic.exceptions import InvalidUsage | ||||
| from sanic.exceptions import Forbidden, InvalidUsage, ServiceUnavailable | ||||
| from sanic.request import Request | ||||
| from sanic.response import json, text | ||||
| from sanic.websocket import WebSocketConnection | ||||
| from sanic.server.websockets.connection import WebSocketConnection | ||||
|  | ||||
|  | ||||
| @pytest.fixture | ||||
| @@ -219,7 +218,7 @@ async def test_websocket_accept_with_no_subprotocols( | ||||
|  | ||||
|     message = message_stack.popleft() | ||||
|     assert message["type"] == "websocket.accept" | ||||
|     assert message["subprotocol"] == "" | ||||
|     assert message["subprotocol"] is None | ||||
|     assert "bytes" not in message | ||||
|  | ||||
|  | ||||
| @@ -228,7 +227,7 @@ async def test_websocket_accept_with_subprotocol(send, receive, message_stack): | ||||
|     subprotocols = ["graphql-ws"] | ||||
|  | ||||
|     ws = WebSocketConnection(send, receive, subprotocols) | ||||
|     await ws.accept() | ||||
|     await ws.accept(subprotocols) | ||||
|  | ||||
|     assert len(message_stack) == 1 | ||||
|  | ||||
| @@ -245,13 +244,13 @@ async def test_websocket_accept_with_multiple_subprotocols( | ||||
|     subprotocols = ["graphql-ws", "hello", "world"] | ||||
|  | ||||
|     ws = WebSocketConnection(send, receive, subprotocols) | ||||
|     await ws.accept() | ||||
|     await ws.accept(["hello", "world"]) | ||||
|  | ||||
|     assert len(message_stack) == 1 | ||||
|  | ||||
|     message = message_stack.popleft() | ||||
|     assert message["type"] == "websocket.accept" | ||||
|     assert message["subprotocol"] == "graphql-ws,hello,world" | ||||
|     assert message["subprotocol"] == "hello" | ||||
|     assert "bytes" not in message | ||||
|  | ||||
|  | ||||
| @@ -347,3 +346,33 @@ async def test_content_type(app): | ||||
|  | ||||
|     _, response = await app.asgi_client.get("/custom") | ||||
|     assert response.headers.get("content-type") == "somethingelse" | ||||
|  | ||||
|  | ||||
| @pytest.mark.asyncio | ||||
| async def test_request_handle_exception(app): | ||||
|     @app.get("/error-prone") | ||||
|     def _request(request): | ||||
|         raise ServiceUnavailable(message="Service unavailable") | ||||
|  | ||||
|     _, response = await app.asgi_client.get("/wrong-path") | ||||
|     assert response.status_code == 404 | ||||
|  | ||||
|     _, response = await app.asgi_client.get("/error-prone") | ||||
|     assert response.status_code == 503 | ||||
|  | ||||
|  | ||||
| @pytest.mark.asyncio | ||||
| async def test_request_exception_suppressed_by_middleware(app): | ||||
|     @app.get("/error-prone") | ||||
|     def _request(request): | ||||
|         raise ServiceUnavailable(message="Service unavailable") | ||||
|  | ||||
|     @app.on_request | ||||
|     def forbidden(request): | ||||
|         raise Forbidden(message="forbidden") | ||||
|  | ||||
|     _, response = await app.asgi_client.get("/wrong-path") | ||||
|     assert response.status_code == 403 | ||||
|  | ||||
|     _, response = await app.asgi_client.get("/error-prone") | ||||
|     assert response.status_code == 403 | ||||
|   | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user