mirror of
https://github.com/google/brotli.git
synced 2026-02-04 13:44:57 +00:00
Compare commits
29 Commits
dev/null
...
test_83089
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
01c3f685f3 | ||
|
|
61af0e5b94 | ||
|
|
a0d2679607 | ||
|
|
52ad34cea4 | ||
|
|
7c77ca0b18 | ||
|
|
f81d6bc7f0 | ||
|
|
fa925d0c15 | ||
|
|
8e4d912826 | ||
|
|
2138ac6153 | ||
|
|
ea5b5c10dd | ||
|
|
8a9ab54e2e | ||
|
|
c83197f8eb | ||
|
|
48152367b3 | ||
|
|
fa141a189a | ||
|
|
464fe15603 | ||
|
|
e4bc10a000 | ||
|
|
cedd986cf2 | ||
|
|
7f0d259e54 | ||
|
|
0e8a06c0bd | ||
|
|
595a634fd7 | ||
|
|
808e2b99e6 | ||
|
|
b54d27c0f1 | ||
|
|
d00c29a783 | ||
|
|
781c2698ba | ||
|
|
1ed8c49aca | ||
|
|
e230f474b8 | ||
|
|
028fb5a236 | ||
|
|
390de5b472 | ||
|
|
3499acbb7a |
68
.github/workflows/build_test.yml
vendored
68
.github/workflows/build_test.yml
vendored
@@ -100,6 +100,16 @@ jobs:
|
||||
cxx_compiler: g++
|
||||
os: macos-latest
|
||||
|
||||
- name: cmake-ios:clang
|
||||
build_system: cmake
|
||||
c_compiler: clang
|
||||
cxx_compiler: clang++
|
||||
os: macos-latest
|
||||
skip_tests: true # TODO(eustas): run tests in a simulator
|
||||
cmake_args: >-
|
||||
-DCMAKE_SYSTEM_NAME=iOS
|
||||
-DCMAKE_OSX_ARCHITECTURES=arm64
|
||||
|
||||
- name: cmake-win64:msvc-rel
|
||||
build_system: cmake
|
||||
cmake_generator: Visual Studio 17 2022
|
||||
@@ -123,12 +133,22 @@ jobs:
|
||||
c_compiler: clang
|
||||
cxx_compiler: clang++
|
||||
|
||||
- name: python3.10-win
|
||||
- name: python3.14:clang
|
||||
build_system: python
|
||||
python_version: "3.10"
|
||||
# TODO: investigate why win-builds can't run tests
|
||||
py_setuptools_cmd: build_ext
|
||||
os: windows-2022
|
||||
python_version: "3.14"
|
||||
c_compiler: clang
|
||||
cxx_compiler: clang++
|
||||
|
||||
- name: python3.14t:clang
|
||||
build_system: python
|
||||
python_version: "3.14t"
|
||||
c_compiler: clang
|
||||
cxx_compiler: clang++
|
||||
|
||||
- name: python3.14-win
|
||||
build_system: python
|
||||
python_version: "3.14"
|
||||
os: windows-latest
|
||||
|
||||
- name: maven
|
||||
build_system: maven
|
||||
@@ -198,7 +218,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: Harden Runner
|
||||
uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
|
||||
uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
@@ -233,7 +253,9 @@ jobs:
|
||||
|
||||
cmake -B out . ${CMAKE_OPTIONS[*]} -DCMAKE_C_FLAGS='${{ matrix.c_flags || '' }}'
|
||||
cmake --build out ${CMAKE_BUILD_OPTIONS[*]}
|
||||
cd out; ctest ${CMAKE_TEST_OPTIONS[*]}; cd ..
|
||||
cd out
|
||||
[ ! -z '${{ matrix.skip_tests || '' }}' ] || ctest ${CMAKE_TEST_OPTIONS[*]}
|
||||
cd ..
|
||||
|
||||
- name: Quick Fuzz
|
||||
if: ${{ matrix.build_system == 'fuzz' }}
|
||||
@@ -320,30 +342,15 @@ jobs:
|
||||
if: ${{ matrix.build_system == 'python' }}
|
||||
run: |
|
||||
python -VV
|
||||
python -c "import sys; sys.exit('Invalid python version') if '.'.join(map(str,sys.version_info[0:2])) != '${{ matrix.python_version }}' else True"
|
||||
pip install setuptools==51.3.3
|
||||
python setup.py ${{ matrix.py_setuptools_cmd || 'test'}}
|
||||
pip install "setuptools>=70.0.0" pytest
|
||||
python setup.py build_ext --inplace
|
||||
pytest ./python/tests
|
||||
|
||||
build_test_py27:
|
||||
name: Build and test with Python 2.7
|
||||
build_test_dotnet:
|
||||
name: Build and test with .NET
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ubuntu:22.04
|
||||
steps:
|
||||
|
||||
- name: Harden Runner
|
||||
uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Install deps
|
||||
run: |
|
||||
apt update
|
||||
apt install -y curl gcc python2.7 python2.7-dev
|
||||
curl https://bootstrap.pypa.io/pip/2.7/get-pip.py --output get-pip.py
|
||||
python2.7 get-pip.py
|
||||
python2.7 -m pip install distutils-pytest==0.1
|
||||
|
||||
- name: Checkout the source
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
@@ -352,6 +359,7 @@ jobs:
|
||||
|
||||
- name: Build / Test
|
||||
run: |
|
||||
python2.7 -VV
|
||||
python2.7 -c "import sys; sys.exit('Invalid python version') if '.'.join(map(str,sys.version_info[0:2])) != '2.7' else True"
|
||||
python2.7 setup.py test
|
||||
cd csharp
|
||||
dotnet build brotlidec.csproj --configuration Release
|
||||
dotnet test brotlidec.Tests.csproj
|
||||
|
||||
|
||||
4
.github/workflows/build_test_wasm.yml
vendored
4
.github/workflows/build_test_wasm.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: Harden Runner
|
||||
uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
|
||||
uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
@@ -45,7 +45,7 @@ jobs:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Install node
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version: ${{env.NODE_VERSION}}
|
||||
|
||||
|
||||
12
.github/workflows/codeql.yml
vendored
12
.github/workflows/codeql.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: Harden Runner
|
||||
uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
|
||||
uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
@@ -44,7 +44,7 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # v3.29.5
|
||||
uses: github/codeql-action/init@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v3.29.5
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# CodeQL is currently crashing on files with large lists:
|
||||
@@ -56,7 +56,7 @@ jobs:
|
||||
|
||||
- if: matrix.language == 'cpp'
|
||||
name: Build CPP
|
||||
uses: github/codeql-action/autobuild@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # v3.29.5
|
||||
uses: github/codeql-action/autobuild@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v3.29.5
|
||||
|
||||
- if: matrix.language == 'cpp' || matrix.language == 'java'
|
||||
name: Build Java
|
||||
@@ -66,15 +66,17 @@ jobs:
|
||||
|
||||
- if: matrix.language == 'javascript'
|
||||
name: Build JS
|
||||
uses: github/codeql-action/autobuild@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # v3.29.5
|
||||
uses: github/codeql-action/autobuild@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v3.29.5
|
||||
|
||||
- if: matrix.language == 'cpp' || matrix.language == 'python'
|
||||
name: Build Python
|
||||
run: |
|
||||
python -VV
|
||||
pip install "setuptools>=70.0.0"
|
||||
python setup.py build_ext
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # v3.29.5
|
||||
uses: github/codeql-action/analyze@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v3.29.5
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
ref: "${{ github.ref != 'master' && github.ref || '/refs/heads/master' }}"
|
||||
|
||||
2
.github/workflows/fuzz.yml
vendored
2
.github/workflows/fuzz.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: Harden Runner
|
||||
uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
|
||||
uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
|
||||
9
.github/workflows/lint.yml
vendored
9
.github/workflows/lint.yml
vendored
@@ -30,7 +30,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: Harden Runner
|
||||
uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
|
||||
uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
@@ -40,11 +40,16 @@ jobs:
|
||||
- name: Install tools
|
||||
run: |
|
||||
eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)"
|
||||
brew install buildifier typos-cli
|
||||
brew install buildifier ruff typos-cli
|
||||
|
||||
- name: Check typos
|
||||
run: |
|
||||
eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)"
|
||||
./scripts/check_typos.sh
|
||||
|
||||
- name: Lint Python code
|
||||
run: |
|
||||
eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)"
|
||||
ruff check
|
||||
|
||||
# TODO(eustas): run buildifier
|
||||
|
||||
8
.github/workflows/release.yaml
vendored
8
.github/workflows/release.yaml
vendored
@@ -64,7 +64,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: Harden Runner
|
||||
uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
|
||||
uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
@@ -145,7 +145,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: Harden Runner
|
||||
uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
|
||||
uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
@@ -182,7 +182,7 @@ jobs:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
with:
|
||||
path: release_assets
|
||||
merge-multiple: true
|
||||
@@ -203,7 +203,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: Harden Runner
|
||||
uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
|
||||
uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
|
||||
4
.github/workflows/scorecard.yml
vendored
4
.github/workflows/scorecard.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Harden Runner
|
||||
uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
|
||||
uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
@@ -77,6 +77,6 @@ jobs:
|
||||
|
||||
# Upload the results to GitHub's code scanning dashboard.
|
||||
- name: "Upload to code-scanning"
|
||||
uses: github/codeql-action/upload-sarif@17783bfb99b07f70fae080b654aed0c514057477 # v2.23.3
|
||||
uses: github/codeql-action/upload-sarif@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v2.23.3
|
||||
with:
|
||||
sarif_file: results.sarif
|
||||
|
||||
@@ -7,7 +7,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
|
||||
## Unreleased
|
||||
|
||||
## [1.2.0] - 2025-10-xx
|
||||
## [1.2.0] - 2025-10-27
|
||||
|
||||
### SECURITY
|
||||
- python: added `Decompressor::can_accept_more_data` method and optional
|
||||
|
||||
@@ -180,6 +180,8 @@ endif()
|
||||
if (BROTLI_BUILD_TOOLS)
|
||||
add_executable(brotli c/tools/brotli.c)
|
||||
target_link_libraries(brotli ${BROTLI_LIBRARIES})
|
||||
# brotli is a CLI tool
|
||||
set_target_properties(brotli PROPERTIES MACOSX_BUNDLE OFF)
|
||||
endif()
|
||||
|
||||
# Installation
|
||||
@@ -384,11 +386,11 @@ endif() # BROTLI_BUNDLED_MODE
|
||||
|
||||
if (BROTLI_BUILD_TOOLS)
|
||||
install(FILES "docs/brotli.1"
|
||||
DESTINATION "${CMAKE_INSTALL_FULL_MANDIR}/man1")
|
||||
DESTINATION "${CMAKE_INSTALL_MANDIR}/man1")
|
||||
endif()
|
||||
|
||||
install(FILES docs/constants.h.3 docs/decode.h.3 docs/encode.h.3 docs/types.h.3
|
||||
DESTINATION "${CMAKE_INSTALL_FULL_MANDIR}/man3")
|
||||
DESTINATION "${CMAKE_INSTALL_MANDIR}/man3")
|
||||
|
||||
if (ENABLE_COVERAGE STREQUAL "yes")
|
||||
setup_target_for_coverage(coverage test coverage)
|
||||
|
||||
@@ -9,7 +9,6 @@ include c/include/brotli/*.h
|
||||
include LICENSE
|
||||
include MANIFEST.in
|
||||
include python/_brotli.cc
|
||||
include python/bro.py
|
||||
include python/brotli.py
|
||||
include python/README.md
|
||||
include python/tests/*
|
||||
|
||||
@@ -213,6 +213,10 @@ OR:
|
||||
#define BROTLI_TARGET_MIPS64
|
||||
#endif
|
||||
|
||||
#if defined(__ia64__) || defined(_M_IA64)
|
||||
#define BROTLI_TARGET_IA64
|
||||
#endif
|
||||
|
||||
#if defined(BROTLI_TARGET_X64) || defined(BROTLI_TARGET_ARMV8_64) || \
|
||||
defined(BROTLI_TARGET_POWERPC64) || defined(BROTLI_TARGET_RISCV64) || \
|
||||
defined(BROTLI_TARGET_LOONGARCH64) || defined(BROTLI_TARGET_MIPS64)
|
||||
@@ -665,13 +669,14 @@ BROTLI_UNUSED_FUNCTION void BrotliSuppressUnusedFunctions(void) {
|
||||
#undef BROTLI_TEST
|
||||
#endif
|
||||
|
||||
#if BROTLI_GNUC_HAS_ATTRIBUTE(model, 3, 0, 3)
|
||||
#if !defined(BROTLI_MODEL) && BROTLI_GNUC_HAS_ATTRIBUTE(model, 3, 0, 3) && \
|
||||
!defined(BROTLI_TARGET_IA64) && !defined(BROTLI_TARGET_LOONGARCH64)
|
||||
#define BROTLI_MODEL(M) __attribute__((model(M)))
|
||||
#else
|
||||
#define BROTLI_MODEL(M) /* M */
|
||||
#endif
|
||||
|
||||
#if BROTLI_GNUC_HAS_ATTRIBUTE(cold, 4, 3, 0)
|
||||
#if !defined(BROTLI_COLD) && BROTLI_GNUC_HAS_ATTRIBUTE(cold, 4, 3, 0)
|
||||
#define BROTLI_COLD __attribute__((cold))
|
||||
#else
|
||||
#define BROTLI_COLD /* cold */
|
||||
|
||||
20
csharp/brotlidec.Tests.csproj
Normal file
20
csharp/brotlidec.Tests.csproj
Normal file
@@ -0,0 +1,20 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net9.0</TargetFramework>
|
||||
<EnableDefaultCompileItems>false</EnableDefaultCompileItems>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<!-- Both regular sources and test sources -->
|
||||
<Compile Include="org\brotli\dec\*.cs" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="18.0.0" />
|
||||
<!-- Stick to NUnit3 until tests are regenerated -->
|
||||
<PackageReference Include="NUnit" Version="3.14.0" />
|
||||
<PackageReference Include="NUnit3TestAdapter" Version="5.2.0" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
13
csharp/brotlidec.csproj
Normal file
13
csharp/brotlidec.csproj
Normal file
@@ -0,0 +1,13 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net9.0</TargetFramework>
|
||||
<EnableDefaultCompileItems>false</EnableDefaultCompileItems>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Compile Include="org\brotli\dec\*.cs" />
|
||||
<Compile Remove="**\*Test.cs" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -1879,8 +1879,8 @@ func copyRawBytes(s *_State, data []int8, offset int32, length int32) int32 {
|
||||
}
|
||||
for len > 0 {
|
||||
var chunkLen int32 = readInput(s, data, pos, len)
|
||||
if len < -1 {
|
||||
return len
|
||||
if chunkLen < -1 {
|
||||
return chunkLen
|
||||
}
|
||||
if chunkLen <= 0 {
|
||||
return makeError(s, -16)
|
||||
|
||||
@@ -6,42 +6,73 @@
|
||||
|
||||
package org.brotli.dec;
|
||||
|
||||
/**
|
||||
* Possible errors from decoder.
|
||||
*/
|
||||
public class BrotliError {
|
||||
/** Possible errors from decoder. */
|
||||
public final class BrotliError {
|
||||
/** Success; anything greater is also success. */
|
||||
public static final int BROTLI_OK = 0;
|
||||
/** Success; decoder has finished decompressing the input. */
|
||||
public static final int BROTLI_OK_DONE = BROTLI_OK + 1;
|
||||
/** Success; decoder has more output to produce. */
|
||||
public static final int BROTLI_OK_NEED_MORE_OUTPUT = BROTLI_OK + 2;
|
||||
|
||||
// It is important that actual error codes are LESS than -1!
|
||||
/** Error code threshold; actual error codes are LESS than -1! */
|
||||
public static final int BROTLI_ERROR = -1;
|
||||
/** Stream error: corrupted code length table. */
|
||||
public static final int BROTLI_ERROR_CORRUPTED_CODE_LENGTH_TABLE = BROTLI_ERROR - 1;
|
||||
/** Stream error: corrupted context map. */
|
||||
public static final int BROTLI_ERROR_CORRUPTED_CONTEXT_MAP = BROTLI_ERROR - 2;
|
||||
/** Stream error: corrupted Huffman code histogram. */
|
||||
public static final int BROTLI_ERROR_CORRUPTED_HUFFMAN_CODE_HISTOGRAM = BROTLI_ERROR - 3;
|
||||
/** Stream error: corrupted padding bits. */
|
||||
public static final int BROTLI_ERROR_CORRUPTED_PADDING_BITS = BROTLI_ERROR - 4;
|
||||
/** Stream error: corrupted reserved bit. */
|
||||
public static final int BROTLI_ERROR_CORRUPTED_RESERVED_BIT = BROTLI_ERROR - 5;
|
||||
/** Stream error: duplicate simple Huffman symbol. */
|
||||
public static final int BROTLI_ERROR_DUPLICATE_SIMPLE_HUFFMAN_SYMBOL = BROTLI_ERROR - 6;
|
||||
/** Stream error: exuberant nibble. */
|
||||
public static final int BROTLI_ERROR_EXUBERANT_NIBBLE = BROTLI_ERROR - 7;
|
||||
/** Stream error: invalid backward reference. */
|
||||
public static final int BROTLI_ERROR_INVALID_BACKWARD_REFERENCE = BROTLI_ERROR - 8;
|
||||
/** Stream error: invalid metablock length. */
|
||||
public static final int BROTLI_ERROR_INVALID_METABLOCK_LENGTH = BROTLI_ERROR - 9;
|
||||
/** Stream error: invalid window bits. */
|
||||
public static final int BROTLI_ERROR_INVALID_WINDOW_BITS = BROTLI_ERROR - 10;
|
||||
/** Stream error: negative distance. */
|
||||
public static final int BROTLI_ERROR_NEGATIVE_DISTANCE = BROTLI_ERROR - 11;
|
||||
/** Stream error: read after end of input buffer. */
|
||||
public static final int BROTLI_ERROR_READ_AFTER_END = BROTLI_ERROR - 12;
|
||||
/** IO error: read failed. */
|
||||
public static final int BROTLI_ERROR_READ_FAILED = BROTLI_ERROR - 13;
|
||||
/** IO error: symbol out of range. */
|
||||
public static final int BROTLI_ERROR_SYMBOL_OUT_OF_RANGE = BROTLI_ERROR - 14;
|
||||
/** Stream error: truncated input. */
|
||||
public static final int BROTLI_ERROR_TRUNCATED_INPUT = BROTLI_ERROR - 15;
|
||||
/** Stream error: unused bytes after end of stream. */
|
||||
public static final int BROTLI_ERROR_UNUSED_BYTES_AFTER_END = BROTLI_ERROR - 16;
|
||||
/** Stream error: unused Huffman space. */
|
||||
public static final int BROTLI_ERROR_UNUSED_HUFFMAN_SPACE = BROTLI_ERROR - 17;
|
||||
|
||||
/** Exception code threshold. */
|
||||
public static final int BROTLI_PANIC = -21;
|
||||
/** Exception: stream is already closed. */
|
||||
public static final int BROTLI_PANIC_ALREADY_CLOSED = BROTLI_PANIC - 1;
|
||||
/** Exception: max distance is too small. */
|
||||
public static final int BROTLI_PANIC_MAX_DISTANCE_TOO_SMALL = BROTLI_PANIC - 2;
|
||||
/** Exception: state is not fresh. */
|
||||
public static final int BROTLI_PANIC_STATE_NOT_FRESH = BROTLI_PANIC - 3;
|
||||
/** Exception: state is not initialized. */
|
||||
public static final int BROTLI_PANIC_STATE_NOT_INITIALIZED = BROTLI_PANIC - 4;
|
||||
/** Exception: state is not uninitialized. */
|
||||
public static final int BROTLI_PANIC_STATE_NOT_UNINITIALIZED = BROTLI_PANIC - 5;
|
||||
/** Exception: too many dictionary chunks. */
|
||||
public static final int BROTLI_PANIC_TOO_MANY_DICTIONARY_CHUNKS = BROTLI_PANIC - 6;
|
||||
/** Exception: unexpected state. */
|
||||
public static final int BROTLI_PANIC_UNEXPECTED_STATE = BROTLI_PANIC - 7;
|
||||
/** Exception: unreachable code. */
|
||||
public static final int BROTLI_PANIC_UNREACHABLE = BROTLI_PANIC - 8;
|
||||
/** Exception: unaligned copy bytes. */
|
||||
public static final int BROTLI_PANIC_UNALIGNED_COPY_BYTES = BROTLI_PANIC - 9;
|
||||
|
||||
/** Non-instantiable. */
|
||||
private BrotliError() {}
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@ import java.io.InputStream;
|
||||
*/
|
||||
public class BrotliInputStream extends InputStream {
|
||||
|
||||
/** Default size of internal buffer (used for faster byte-by-byte reading). */
|
||||
public static final int DEFAULT_INTERNAL_BUFFER_SIZE = 256;
|
||||
|
||||
/**
|
||||
@@ -93,14 +94,17 @@ public class BrotliInputStream extends InputStream {
|
||||
}
|
||||
}
|
||||
|
||||
/** Attach "RAW" dictionary (chunk) to decoder. */
|
||||
public void attachDictionaryChunk(byte[] data) {
|
||||
Decode.attachDictionaryChunk(state, data);
|
||||
}
|
||||
|
||||
/** Request decoder to produce output as soon as it is available. */
|
||||
public void enableEagerOutput() {
|
||||
Decode.enableEagerOutput(state);
|
||||
}
|
||||
|
||||
/** Enable "large window" stream feature. */
|
||||
public void enableLargeWindow() {
|
||||
Decode.enableLargeWindow(state);
|
||||
}
|
||||
|
||||
@@ -144,7 +144,6 @@ public class DecodeTest {
|
||||
public void testUtils() {
|
||||
new Context();
|
||||
new Decode();
|
||||
new Dictionary();
|
||||
new Huffman();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
|
||||
/** Toy decoder CLI; mostly used for simple benchmarking. */
|
||||
public class Decoder {
|
||||
private static long decodeBytes(InputStream input, OutputStream output, byte[] buffer)
|
||||
throws IOException {
|
||||
@@ -53,6 +54,7 @@ public class Decoder {
|
||||
System.out.println(mbDecoded / timeDelta + " MiB/s");
|
||||
}
|
||||
|
||||
/** CLI entry point. */
|
||||
public static void main(String... args) throws IOException {
|
||||
if (args.length != 2 && args.length != 3) {
|
||||
System.out.println("Usage: decoder <compressed_in> <decompressed_out> [repeat]");
|
||||
@@ -69,4 +71,7 @@ public class Decoder {
|
||||
decompress(args[0], args[1], buffer);
|
||||
}
|
||||
}
|
||||
|
||||
/** Non-instantiable. */
|
||||
private Decoder() {}
|
||||
}
|
||||
|
||||
@@ -41,6 +41,7 @@ public final class Dictionary {
|
||||
|
||||
private static final int DICTIONARY_DEBUG = Utils.isDebugMode();
|
||||
|
||||
/** Initialize static dictionary. */
|
||||
public static void setData(ByteBuffer newData, int[] newSizeBits) {
|
||||
if (DICTIONARY_DEBUG != 0) {
|
||||
if ((Utils.isDirect(newData) == 0) || (Utils.isReadOnly(newData) == 0)) {
|
||||
@@ -90,6 +91,7 @@ public final class Dictionary {
|
||||
Dictionary.data = newData;
|
||||
}
|
||||
|
||||
/** Access static dictionary. */
|
||||
public static ByteBuffer getData() {
|
||||
if (data.capacity() != 0) {
|
||||
return data;
|
||||
@@ -100,4 +102,7 @@ public final class Dictionary {
|
||||
/* Might have been set when {@link DictionaryData} was loaded.*/
|
||||
return data;
|
||||
}
|
||||
|
||||
/** Non-instantiable. */
|
||||
private Dictionary() {}
|
||||
}
|
||||
|
||||
@@ -1706,8 +1706,8 @@ internal fun copyRawBytes(s: State, data: ByteArray, offset: Int, length: Int):
|
||||
}
|
||||
while (len > 0) {
|
||||
val chunkLen: Int = readInput(s, data, pos, len);
|
||||
if (len < -1) {
|
||||
return len;
|
||||
if (chunkLen < -1) {
|
||||
return chunkLen;
|
||||
}
|
||||
if (chunkLen <= 0) {
|
||||
return makeError(s, -16);
|
||||
|
||||
@@ -2026,8 +2026,8 @@ let makeBrotliDecode = () => {
|
||||
}
|
||||
while (len > 0) {
|
||||
const /** @type {number} */ chunkLen = readInput(s, data, pos, len);
|
||||
if (len < -1) {
|
||||
return len;
|
||||
if (chunkLen < -1) {
|
||||
return chunkLen;
|
||||
}
|
||||
if (chunkLen <= 0) {
|
||||
return makeError(s, -16);
|
||||
|
||||
2
js/decode.min.js
vendored
2
js/decode.min.js
vendored
File diff suppressed because one or more lines are too long
@@ -1697,8 +1697,8 @@ function copyRawBytes(s: State, data: Int8Array, offset: number, length: number)
|
||||
}
|
||||
while (len > 0) {
|
||||
const chunkLen: number = readInput(s, data, pos, len);
|
||||
if (len < -1) {
|
||||
return len;
|
||||
if (chunkLen < -1) {
|
||||
return chunkLen;
|
||||
}
|
||||
if (chunkLen <= 0) {
|
||||
return makeError(s, -16);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
This directory contains the code for the Python `brotli` module,
|
||||
`bro.py` tool, and roundtrip tests.
|
||||
and roundtrip tests.
|
||||
|
||||
Only Python 2.7+ is supported.
|
||||
Only Python 3.10+ is supported.
|
||||
|
||||
We provide a `Makefile` to simplify common development commands.
|
||||
|
||||
@@ -17,13 +17,17 @@ following command from this directory:
|
||||
|
||||
$ make install
|
||||
|
||||
If you already have native Brotli installed on your system and want to use this one instead of the vendored sources, you
|
||||
should set the `USE_SYSTEM_BROTLI=1` environment variable when building the wheel, like this:
|
||||
If you already have native Brotli installed on your system and want to use
|
||||
this one instead of the vendored sources, you should set
|
||||
the `USE_SYSTEM_BROTLI=1` environment variable when building the wheel,
|
||||
like this:
|
||||
|
||||
$ USE_SYSTEM_BROTLI=1 pip install brotli --no-binary brotli
|
||||
|
||||
Brotli is found via the `pkg-config` utility. Moreover, you must build all 3 `brotlicommon`, `brotlienc`, and `brotlidec`
|
||||
components. If you're installing brotli from the package manager, you need the development package, like this on Fedora:
|
||||
Brotli is found via the `pkg-config` utility. Moreover, you must build
|
||||
all 3 `brotlicommon`, `brotlienc`, and `brotlidec` components. If you're
|
||||
installing brotli from the package manager, you need the development package,
|
||||
like this on Fedora:
|
||||
|
||||
$ dnf install brotli brotli-devel
|
||||
|
||||
@@ -45,8 +49,8 @@ able to edit the source files, you can use the `setuptools`
|
||||
|
||||
### Code Style
|
||||
|
||||
Brotli's code follows the [Google Python Style Guide][]. To
|
||||
automatically format your code, first install [YAPF][]:
|
||||
Brotli code follows the [Google Python Style Guide][].
|
||||
To automatically format your code, first install [YAPF][]:
|
||||
|
||||
$ pip install yapf
|
||||
|
||||
@@ -56,7 +60,6 @@ Then, to format all files in the project, you can run:
|
||||
|
||||
See the [YAPF usage][] documentation for more information.
|
||||
|
||||
|
||||
[PyPI]: https://pypi.org/project/Brotli/
|
||||
[development mode]: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode
|
||||
[Google Python Style Guide]: https://google.github.io/styleguide/pyguide.html
|
||||
|
||||
443
python/_brotli.c
443
python/_brotli.c
@@ -12,18 +12,40 @@
|
||||
#include <brotli/decode.h>
|
||||
#include <brotli/encode.h>
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
#define PY_GET_TYPE(Obj) (Py_TYPE(Obj))
|
||||
// 3.9 end-of-life is 2025-10-31.
|
||||
// 3.10 end-of-life is 2026-10.
|
||||
// 3.11 end-of-life is 2027-10.
|
||||
// 3.12 end-of-life is 2028-10.
|
||||
// 3.13 end-of-life is 2029-10.
|
||||
// 3.14 end-of-life is 2030-10.
|
||||
#if PY_MAJOR_VERSION < 3 || (PY_MAJOR_VERSION == 3 && PY_MINOR_VERSION < 10)
|
||||
#error "Only Python 3.10+ is supported"
|
||||
#endif
|
||||
|
||||
/*
|
||||
Decoder / encoder nature does not support concurrent access. Attempt to enter
|
||||
concurrently will result in an exception.
|
||||
|
||||
"Critical" parts used in prologues to ensure that only one thread enters.
|
||||
For consistency, we use them in epilogues as well. "Critical" is essential for
|
||||
free-threaded. In GIL environment those rendered as a scope (i.e. `{` and `}`).
|
||||
|
||||
NB: `Py_BEGIN_ALLOW_THREADS` / `Py_END_ALLOW_THREADS` are still required to
|
||||
unblock the stop-the-world GC.
|
||||
*/
|
||||
#ifdef Py_GIL_DISABLED
|
||||
#if PY_MAJOR_VERSION < 3 || (PY_MAJOR_VERSION == 3 && PY_MINOR_VERSION < 13)
|
||||
#error "Critical sections are only available in Python 3.13+"
|
||||
#endif
|
||||
#define BROTLI_CRITICAL_START Py_BEGIN_CRITICAL_SECTION(self)
|
||||
#define BROTLI_CRITICAL_END Py_END_CRITICAL_SECTION()
|
||||
#else
|
||||
#define PY_GET_TYPE(Obj) ((Obj)->ob_type)
|
||||
#define BROTLI_CRITICAL_START {
|
||||
#define BROTLI_CRITICAL_END }
|
||||
#endif
|
||||
|
||||
static const char kErrorAttr[] = "error";
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
static const char kModuleAttr[] = "_module";
|
||||
#else
|
||||
static PyObject* BrotliError;
|
||||
#endif
|
||||
|
||||
static const char kInvalidBufferError[] =
|
||||
"brotli: data must be a C-contiguous buffer";
|
||||
@@ -170,7 +192,7 @@ PyDoc_STRVAR(brotli_Decompressor_is_finished_doc,
|
||||
" False otherwise\n");
|
||||
PyDoc_STRVAR(brotli_Decompressor_can_accept_more_data_doc,
|
||||
"Checks if the decoder instance can accept more compressed data.\n"
|
||||
"If the 'decompress()' method on this instance of decompressor was never\n"
|
||||
"If the 'process()' method on this instance of decompressor was never\n"
|
||||
"called with 'max_length', this method will always return True.\n"
|
||||
"\n"
|
||||
"Signature:"
|
||||
@@ -178,8 +200,8 @@ PyDoc_STRVAR(brotli_Decompressor_can_accept_more_data_doc,
|
||||
"\n"
|
||||
"Returns:\n"
|
||||
" True if the decoder is ready to accept more compressed data via\n"
|
||||
" 'decompress()'\n"
|
||||
" False if the decoder needs to output some data via 'decompress(b'')'\n"
|
||||
" 'process()'\n"
|
||||
" False if the decoder needs to output some data via 'process(b'')'\n"
|
||||
" before being provided any more compressed data\n");
|
||||
PyDoc_STRVAR(brotli_decompress__doc__,
|
||||
"Decompress a compressed byte string.\n"
|
||||
@@ -201,7 +223,6 @@ PyDoc_STRVAR(brotli_doc,
|
||||
/* clang-format on */
|
||||
|
||||
static void set_brotli_exception(PyObject* t, const char* msg) {
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
PyObject* error = NULL;
|
||||
PyObject* module = NULL;
|
||||
assert(t != NULL);
|
||||
@@ -213,13 +234,9 @@ static void set_brotli_exception(PyObject* t, const char* msg) {
|
||||
if (error == NULL) return; /* AttributeError raised. */
|
||||
PyErr_SetString(error, msg);
|
||||
Py_DECREF(error);
|
||||
#else
|
||||
PyErr_SetString(BrotliError, msg);
|
||||
#endif
|
||||
}
|
||||
|
||||
static void set_brotli_exception_from_module(PyObject* m, const char* msg) {
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
PyObject* error = NULL;
|
||||
assert(m != NULL);
|
||||
assert(PyModule_Check(m));
|
||||
@@ -227,9 +244,6 @@ static void set_brotli_exception_from_module(PyObject* m, const char* msg) {
|
||||
if (error == NULL) return; /* AttributeError raised. */
|
||||
PyErr_SetString(error, msg);
|
||||
Py_DECREF(error);
|
||||
#else
|
||||
PyErr_SetString(BrotliError, msg);
|
||||
#endif
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -354,7 +368,7 @@ static PyObject* Buffer_Finish(Buffer* buffer) {
|
||||
}
|
||||
if (len == 0) return result;
|
||||
|
||||
out = PyBytes_AS_STRING(result);
|
||||
out = (uint8_t*)PyBytes_AS_STRING(result);
|
||||
block = buffer->head;
|
||||
while (block != buffer->tail) {
|
||||
memcpy(out + pos, block->payload, block->size);
|
||||
@@ -389,7 +403,7 @@ static PyObject* brotli_Compressor_new(PyTypeObject* type, PyObject* args,
|
||||
self->enc = BrotliEncoderCreateInstance(0, 0, 0);
|
||||
if (self->enc == NULL) {
|
||||
set_brotli_exception(self_type, kCompressCreateError);
|
||||
PY_GET_TYPE(self)->tp_free((PyObject*)self);
|
||||
Py_TYPE(self)->tp_free((PyObject*)self);
|
||||
return NULL;
|
||||
}
|
||||
self->healthy = 1;
|
||||
@@ -401,7 +415,7 @@ static int brotli_Compressor_init(PyBrotli_Compressor* self, PyObject* args,
|
||||
PyObject* keywds) {
|
||||
static const char* kwlist[] = {"mode", "quality", "lgwin", "lgblock", NULL};
|
||||
|
||||
PyObject* self_type = (PyObject*)PY_GET_TYPE((PyObject*)self);
|
||||
PyObject* self_type = (PyObject*)Py_TYPE((PyObject*)self);
|
||||
unsigned char mode = BROTLI_DEFAULT_MODE;
|
||||
unsigned char quality = BROTLI_DEFAULT_QUALITY;
|
||||
unsigned char lgwin = BROTLI_DEFAULT_WINDOW;
|
||||
@@ -454,7 +468,34 @@ static int brotli_Compressor_init(PyBrotli_Compressor* self, PyObject* args,
|
||||
|
||||
static void brotli_Compressor_dealloc(PyBrotli_Compressor* self) {
|
||||
if (self->enc) BrotliEncoderDestroyInstance(self->enc);
|
||||
PY_GET_TYPE(self)->tp_free((PyObject*)self);
|
||||
Py_TYPE(self)->tp_free((PyObject*)self);
|
||||
}
|
||||
|
||||
static int brotli_compressor_enter(PyBrotli_Compressor* self) {
|
||||
PyObject* self_type = (PyObject*)Py_TYPE((PyObject*)self);
|
||||
int ok = 1;
|
||||
|
||||
BROTLI_CRITICAL_START;
|
||||
if (self->healthy == 0) {
|
||||
set_brotli_exception(self_type, kCompressUnhealthyError);
|
||||
ok = 0;
|
||||
}
|
||||
if (ok && self->processing != 0) {
|
||||
set_brotli_exception(self_type, kCompressConcurrentError);
|
||||
ok = 0;
|
||||
}
|
||||
if (ok) {
|
||||
self->processing = 1;
|
||||
}
|
||||
BROTLI_CRITICAL_END;
|
||||
return ok;
|
||||
}
|
||||
|
||||
static void brotli_compressor_leave(PyBrotli_Compressor* self) {
|
||||
BROTLI_CRITICAL_START;
|
||||
assert(self->processing == 1);
|
||||
self->processing = 0;
|
||||
BROTLI_CRITICAL_END;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -466,7 +507,7 @@ static void brotli_Compressor_dealloc(PyBrotli_Compressor* self) {
|
||||
static PyObject* compress_stream(PyBrotli_Compressor* self,
|
||||
BrotliEncoderOperation op, uint8_t* input,
|
||||
size_t input_length) {
|
||||
PyObject* self_type = (PyObject*)PY_GET_TYPE((PyObject*)self);
|
||||
PyObject* self_type = (PyObject*)Py_TYPE((PyObject*)self);
|
||||
size_t available_in = input_length;
|
||||
const uint8_t* next_in = input;
|
||||
Buffer buffer;
|
||||
@@ -526,70 +567,53 @@ error:
|
||||
|
||||
static PyObject* brotli_Compressor_process(PyBrotli_Compressor* self,
|
||||
PyObject* args) {
|
||||
PyObject* self_type = (PyObject*)PY_GET_TYPE((PyObject*)self);
|
||||
PyObject* self_type = (PyObject*)Py_TYPE((PyObject*)self);
|
||||
PyObject* ret = NULL;
|
||||
PyObject* input_object = NULL;
|
||||
Py_buffer input;
|
||||
int ok = 1;
|
||||
|
||||
if (self->healthy == 0) {
|
||||
set_brotli_exception(self_type, kCompressUnhealthyError);
|
||||
return NULL;
|
||||
}
|
||||
if (self->processing != 0) {
|
||||
set_brotli_exception(self_type, kCompressConcurrentError);
|
||||
return NULL;
|
||||
}
|
||||
if (!brotli_compressor_enter(self)) return NULL;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "O:process", &input_object)) {
|
||||
return NULL;
|
||||
ok = 0;
|
||||
}
|
||||
if (!get_data_view(input_object, &input)) {
|
||||
if (ok && !get_data_view(input_object, &input)) {
|
||||
ok = 0;
|
||||
}
|
||||
if (!ok) {
|
||||
self->healthy = 0;
|
||||
brotli_compressor_leave(self);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
self->processing = 1;
|
||||
ret = compress_stream(self, BROTLI_OPERATION_PROCESS, (uint8_t*)input.buf,
|
||||
input.len);
|
||||
PyBuffer_Release(&input);
|
||||
self->processing = 0;
|
||||
brotli_compressor_leave(self);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
static PyObject* brotli_Compressor_flush(PyBrotli_Compressor* self) {
|
||||
PyObject* self_type = (PyObject*)PY_GET_TYPE((PyObject*)self);
|
||||
PyObject* self_type = (PyObject*)Py_TYPE((PyObject*)self);
|
||||
PyObject* ret = NULL;
|
||||
|
||||
if (self->healthy == 0) {
|
||||
set_brotli_exception(self_type, kCompressUnhealthyError);
|
||||
return NULL;
|
||||
}
|
||||
if (self->processing != 0) {
|
||||
set_brotli_exception(self_type, kCompressConcurrentError);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
self->processing = 1;
|
||||
if (!brotli_compressor_enter(self)) return NULL;
|
||||
ret = compress_stream(self, BROTLI_OPERATION_FLUSH, NULL, 0);
|
||||
self->processing = 0;
|
||||
brotli_compressor_leave(self);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
static PyObject* brotli_Compressor_finish(PyBrotli_Compressor* self) {
|
||||
PyObject* self_type = (PyObject*)PY_GET_TYPE((PyObject*)self);
|
||||
PyObject* self_type = (PyObject*)Py_TYPE((PyObject*)self);
|
||||
PyObject* ret = NULL;
|
||||
|
||||
if (self->healthy == 0) {
|
||||
set_brotli_exception(self_type, kCompressUnhealthyError);
|
||||
return NULL;
|
||||
}
|
||||
if (self->processing != 0) {
|
||||
set_brotli_exception(self_type, kCompressConcurrentError);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
self->processing = 1;
|
||||
if (!brotli_compressor_enter(self)) return NULL;
|
||||
ret = compress_stream(self, BROTLI_OPERATION_FINISH, NULL, 0);
|
||||
self->processing = 0;
|
||||
brotli_compressor_leave(self);
|
||||
|
||||
if (ret != NULL) {
|
||||
assert(BrotliEncoderIsFinished(self->enc));
|
||||
}
|
||||
@@ -619,7 +643,7 @@ static PyObject* brotli_Decompressor_new(PyTypeObject* type, PyObject* args,
|
||||
self->dec = BrotliDecoderCreateInstance(0, 0, 0);
|
||||
if (self->dec == NULL) {
|
||||
set_brotli_exception(self_type, kDecompressCreateError);
|
||||
PY_GET_TYPE(self)->tp_free((PyObject*)self);
|
||||
Py_TYPE(self)->tp_free((PyObject*)self);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
@@ -647,20 +671,47 @@ static int brotli_Decompressor_init(PyBrotli_Decompressor* self, PyObject* args,
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int brotli_decompressor_enter(PyBrotli_Decompressor* self) {
|
||||
PyObject* self_type = (PyObject*)Py_TYPE((PyObject*)self);
|
||||
int ok = 1;
|
||||
|
||||
BROTLI_CRITICAL_START;
|
||||
if (self->healthy == 0) {
|
||||
set_brotli_exception(self_type, kDecompressUnhealthyError);
|
||||
ok = 0;
|
||||
}
|
||||
if (ok && self->processing != 0) {
|
||||
set_brotli_exception(self_type, kDecompressConcurrentError);
|
||||
ok = 0;
|
||||
}
|
||||
if (ok) {
|
||||
self->processing = 1;
|
||||
}
|
||||
BROTLI_CRITICAL_END;
|
||||
return ok;
|
||||
}
|
||||
|
||||
static void brotli_decompressor_leave(PyBrotli_Decompressor* self) {
|
||||
BROTLI_CRITICAL_START;
|
||||
assert(self->processing == 1);
|
||||
self->processing = 0;
|
||||
BROTLI_CRITICAL_END;
|
||||
}
|
||||
|
||||
static void brotli_Decompressor_dealloc(PyBrotli_Decompressor* self) {
|
||||
if (self->dec) BrotliDecoderDestroyInstance(self->dec);
|
||||
if (self->unconsumed_data) {
|
||||
free(self->unconsumed_data);
|
||||
self->unconsumed_data = NULL;
|
||||
}
|
||||
PY_GET_TYPE(self)->tp_free((PyObject*)self);
|
||||
Py_TYPE(self)->tp_free((PyObject*)self);
|
||||
}
|
||||
|
||||
static PyObject* brotli_Decompressor_process(PyBrotli_Decompressor* self,
|
||||
PyObject* args, PyObject* keywds) {
|
||||
static const char* kwlist[] = {"", "output_buffer_limit", NULL};
|
||||
|
||||
PyObject* self_type = (PyObject*)PY_GET_TYPE((PyObject*)self);
|
||||
PyObject* self_type = (PyObject*)Py_TYPE((PyObject*)self);
|
||||
PyObject* ret = NULL;
|
||||
PyObject* input_object = NULL;
|
||||
Py_buffer input;
|
||||
@@ -672,26 +723,24 @@ static PyObject* brotli_Decompressor_process(PyBrotli_Decompressor* self,
|
||||
uint8_t* new_tail = NULL;
|
||||
size_t new_tail_length = 0;
|
||||
int oom = 0;
|
||||
int ok = 1;
|
||||
|
||||
if (self->healthy == 0) {
|
||||
set_brotli_exception(self_type, kDecompressUnhealthyError);
|
||||
return NULL;
|
||||
}
|
||||
if (self->processing != 0) {
|
||||
set_brotli_exception(self_type, kDecompressConcurrentError);
|
||||
return NULL;
|
||||
}
|
||||
if (!brotli_decompressor_enter(self)) return NULL;
|
||||
|
||||
if (!PyArg_ParseTupleAndKeywords(args, keywds, "O|n:process", (char**)kwlist,
|
||||
&input_object, &output_buffer_limit)) {
|
||||
return NULL;
|
||||
ok = 0;
|
||||
}
|
||||
if (!get_data_view(input_object, &input)) {
|
||||
if (ok && !get_data_view(input_object, &input)) {
|
||||
ok = 0;
|
||||
}
|
||||
if (!ok) {
|
||||
self->healthy = 0;
|
||||
brotli_decompressor_leave(self);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
Buffer_Init(&buffer);
|
||||
self->processing = 1;
|
||||
|
||||
if (self->unconsumed_data_length > 0) {
|
||||
if (input.len > 0) {
|
||||
@@ -720,7 +769,7 @@ static PyObject* brotli_Decompressor_process(PyBrotli_Decompressor* self,
|
||||
if (result == BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT) {
|
||||
assert(buffer.avail_out == 0);
|
||||
/* All allocated is used -> reached the output length limit. */
|
||||
if (buffer.total_allocated >= output_buffer_limit) break;
|
||||
if (buffer.total_allocated >= (uint64_t)output_buffer_limit) break;
|
||||
if (Buffer_Grow(&buffer) < 0) {
|
||||
oom = 1;
|
||||
break;
|
||||
@@ -777,21 +826,17 @@ finally:
|
||||
assert(ret == NULL);
|
||||
self->healthy = 0;
|
||||
}
|
||||
self->processing = 0;
|
||||
brotli_decompressor_leave(self);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
static PyObject* brotli_Decompressor_is_finished(PyBrotli_Decompressor* self) {
|
||||
PyObject* self_type = (PyObject*)PY_GET_TYPE((PyObject*)self);
|
||||
if (self->healthy == 0) {
|
||||
set_brotli_exception(self_type, kDecompressUnhealthyError);
|
||||
return NULL;
|
||||
}
|
||||
if (self->processing != 0) {
|
||||
set_brotli_exception(self_type, kDecompressConcurrentError);
|
||||
return NULL;
|
||||
}
|
||||
if (BrotliDecoderIsFinished(self->dec)) {
|
||||
int result;
|
||||
if (!brotli_decompressor_enter(self)) return NULL;
|
||||
result = BrotliDecoderIsFinished(self->dec);
|
||||
brotli_decompressor_leave(self);
|
||||
if (result) {
|
||||
Py_RETURN_TRUE;
|
||||
} else {
|
||||
Py_RETURN_FALSE;
|
||||
@@ -800,16 +845,11 @@ static PyObject* brotli_Decompressor_is_finished(PyBrotli_Decompressor* self) {
|
||||
|
||||
static PyObject* brotli_Decompressor_can_accept_more_data(
|
||||
PyBrotli_Decompressor* self) {
|
||||
PyObject* self_type = (PyObject*)PY_GET_TYPE((PyObject*)self);
|
||||
if (self->healthy == 0) {
|
||||
set_brotli_exception(self_type, kDecompressUnhealthyError);
|
||||
return NULL;
|
||||
}
|
||||
if (self->processing != 0) {
|
||||
set_brotli_exception(self_type, kDecompressConcurrentError);
|
||||
return NULL;
|
||||
}
|
||||
if (self->unconsumed_data_length > 0) {
|
||||
int result;
|
||||
if (!brotli_decompressor_enter(self)) return NULL;
|
||||
result = (self->unconsumed_data_length > 0);
|
||||
brotli_decompressor_leave(self);
|
||||
if (result) {
|
||||
Py_RETURN_FALSE;
|
||||
} else {
|
||||
Py_RETURN_TRUE;
|
||||
@@ -892,13 +932,6 @@ finally:
|
||||
|
||||
/* Module definition */
|
||||
|
||||
static int init_brotli_mod(PyObject* m);
|
||||
|
||||
static PyMethodDef brotli_methods[] = {
|
||||
{"decompress", (PyCFunction)brotli_decompress, METH_VARARGS | METH_KEYWORDS,
|
||||
brotli_decompress__doc__},
|
||||
{NULL, NULL, 0, NULL}};
|
||||
|
||||
static PyMethodDef brotli_Compressor_methods[] = {
|
||||
{"process", (PyCFunction)brotli_Compressor_process, METH_VARARGS,
|
||||
brotli_Compressor_process_doc},
|
||||
@@ -909,44 +942,6 @@ static PyMethodDef brotli_Compressor_methods[] = {
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
static PyMethodDef brotli_Decompressor_methods[] = {
|
||||
{"process", (PyCFunction)brotli_Decompressor_process,
|
||||
METH_VARARGS | METH_KEYWORDS, brotli_Decompressor_process_doc},
|
||||
{"is_finished", (PyCFunction)brotli_Decompressor_is_finished, METH_NOARGS,
|
||||
brotli_Decompressor_is_finished_doc},
|
||||
{"can_accept_more_data",
|
||||
(PyCFunction)brotli_Decompressor_can_accept_more_data, METH_NOARGS,
|
||||
brotli_Decompressor_can_accept_more_data_doc},
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
|
||||
#if PY_MINOR_VERSION >= 5
|
||||
static PyModuleDef_Slot brotli_mod_slots[] = {
|
||||
{Py_mod_exec, init_brotli_mod},
|
||||
#if PY_MINOR_VERSION >= 12
|
||||
{Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED},
|
||||
#endif
|
||||
{0, NULL}};
|
||||
#endif
|
||||
|
||||
static struct PyModuleDef brotli_module = {
|
||||
PyModuleDef_HEAD_INIT,
|
||||
"_brotli", /* m_name */
|
||||
brotli_doc, /* m_doc */
|
||||
0, /* m_size */
|
||||
brotli_methods, /* m_methods */
|
||||
#if PY_MINOR_VERSION >= 5
|
||||
brotli_mod_slots, /* m_slots */
|
||||
#else
|
||||
NULL, /* m_reload */
|
||||
#endif
|
||||
NULL, /* m_traverse */
|
||||
NULL, /* m_clear */
|
||||
NULL /* m_free */
|
||||
};
|
||||
|
||||
static PyType_Slot brotli_Compressor_slots[] = {
|
||||
{Py_tp_dealloc, (destructor)brotli_Compressor_dealloc},
|
||||
{Py_tp_doc, (void*)brotli_Compressor_doc},
|
||||
@@ -960,6 +955,17 @@ static PyType_Spec brotli_Compressor_spec = {
|
||||
"brotli.Compressor", sizeof(PyBrotli_Compressor), 0,
|
||||
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, brotli_Compressor_slots};
|
||||
|
||||
static PyMethodDef brotli_Decompressor_methods[] = {
|
||||
{"process", (PyCFunction)brotli_Decompressor_process,
|
||||
METH_VARARGS | METH_KEYWORDS, brotli_Decompressor_process_doc},
|
||||
{"is_finished", (PyCFunction)brotli_Decompressor_is_finished, METH_NOARGS,
|
||||
brotli_Decompressor_is_finished_doc},
|
||||
{"can_accept_more_data",
|
||||
(PyCFunction)brotli_Decompressor_can_accept_more_data, METH_NOARGS,
|
||||
brotli_Decompressor_can_accept_more_data_doc},
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
static PyType_Slot brotli_Decompressor_slots[] = {
|
||||
{Py_tp_dealloc, (destructor)brotli_Decompressor_dealloc},
|
||||
{Py_tp_doc, (void*)brotli_Decompressor_doc},
|
||||
@@ -973,127 +979,17 @@ static PyType_Spec brotli_Decompressor_spec = {
|
||||
"brotli.Decompressor", sizeof(PyBrotli_Decompressor), 0,
|
||||
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, brotli_Decompressor_slots};
|
||||
|
||||
PyMODINIT_FUNC PyInit__brotli(void) {
|
||||
#if PY_MINOR_VERSION < 5
|
||||
PyObject* m = PyModule_Create(&brotli_module);
|
||||
if (m == NULL) return NULL;
|
||||
if (init_brotli_mod(m) < 0) {
|
||||
Py_DECREF(m);
|
||||
m = NULL;
|
||||
}
|
||||
return m;
|
||||
#else
|
||||
return PyModuleDef_Init(&brotli_module);
|
||||
#endif
|
||||
}
|
||||
|
||||
#else
|
||||
|
||||
static PyTypeObject brotli_CompressorType = {
|
||||
PyObject_HEAD_INIT(NULL) 0, /* ob_size */
|
||||
"brotli.Compressor", /* tp_name */
|
||||
sizeof(PyBrotli_Compressor), /* tp_basicsize */
|
||||
0, /* tp_itemsize */
|
||||
(destructor)brotli_Compressor_dealloc, /* tp_dealloc */
|
||||
0, /* tp_print */
|
||||
0, /* tp_getattr */
|
||||
0, /* tp_setattr */
|
||||
0, /* tp_compare */
|
||||
0, /* tp_repr */
|
||||
0, /* tp_as_number */
|
||||
0, /* tp_as_sequence */
|
||||
0, /* tp_as_mapping */
|
||||
0, /* tp_hash */
|
||||
0, /* tp_call */
|
||||
0, /* tp_str */
|
||||
0, /* tp_getattro */
|
||||
0, /* tp_setattro */
|
||||
0, /* tp_as_buffer */
|
||||
Py_TPFLAGS_DEFAULT, /* tp_flags */
|
||||
brotli_Compressor_doc, /* tp_doc */
|
||||
0, /* tp_traverse */
|
||||
0, /* tp_clear */
|
||||
0, /* tp_richcompare */
|
||||
0, /* tp_weaklistoffset */
|
||||
0, /* tp_iter */
|
||||
0, /* tp_iternext */
|
||||
brotli_Compressor_methods, /* tp_methods */
|
||||
0, /* tp_members */
|
||||
0, /* tp_getset */
|
||||
0, /* tp_base */
|
||||
0, /* tp_dict */
|
||||
0, /* tp_descr_get */
|
||||
0, /* tp_descr_set */
|
||||
0, /* tp_dictoffset */
|
||||
(initproc)brotli_Compressor_init, /* tp_init */
|
||||
0, /* tp_alloc */
|
||||
brotli_Compressor_new, /* tp_new */
|
||||
};
|
||||
|
||||
static PyTypeObject brotli_DecompressorType = {
|
||||
PyObject_HEAD_INIT(NULL) 0, /* ob_size */
|
||||
"brotli.Decompressor", /* tp_name */
|
||||
sizeof(PyBrotli_Decompressor), /* tp_basicsize */
|
||||
0, /* tp_itemsize */
|
||||
(destructor)brotli_Decompressor_dealloc, /* tp_dealloc */
|
||||
0, /* tp_print */
|
||||
0, /* tp_getattr */
|
||||
0, /* tp_setattr */
|
||||
0, /* tp_compare */
|
||||
0, /* tp_repr */
|
||||
0, /* tp_as_number */
|
||||
0, /* tp_as_sequence */
|
||||
0, /* tp_as_mapping */
|
||||
0, /* tp_hash */
|
||||
0, /* tp_call */
|
||||
0, /* tp_str */
|
||||
0, /* tp_getattro */
|
||||
0, /* tp_setattro */
|
||||
0, /* tp_as_buffer */
|
||||
Py_TPFLAGS_DEFAULT, /* tp_flags */
|
||||
brotli_Decompressor_doc, /* tp_doc */
|
||||
0, /* tp_traverse */
|
||||
0, /* tp_clear */
|
||||
0, /* tp_richcompare */
|
||||
0, /* tp_weaklistoffset */
|
||||
0, /* tp_iter */
|
||||
0, /* tp_iternext */
|
||||
brotli_Decompressor_methods, /* tp_methods */
|
||||
0, /* tp_members */
|
||||
0, /* tp_getset */
|
||||
0, /* tp_base */
|
||||
0, /* tp_dict */
|
||||
0, /* tp_descr_get */
|
||||
0, /* tp_descr_set */
|
||||
0, /* tp_dictoffset */
|
||||
(initproc)brotli_Decompressor_init, /* tp_init */
|
||||
0, /* tp_alloc */
|
||||
brotli_Decompressor_new, /* tp_new */
|
||||
};
|
||||
|
||||
PyMODINIT_FUNC init_brotli(void) {
|
||||
PyObject* m = Py_InitModule3("_brotli", brotli_methods, brotli_doc);
|
||||
if (m == NULL) return;
|
||||
init_brotli_mod(m);
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
/* Emulates PyModule_AddObject */
|
||||
static int RegisterObject(PyObject* mod, const char* name, PyObject* value) {
|
||||
assert(value != NULL);
|
||||
#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 10
|
||||
int ret = PyModule_AddObjectRef(mod, name, value);
|
||||
/* Emulates PyModule_AddObject, i.e. decrements the reference count on
|
||||
success. */
|
||||
if (ret == 0) Py_DECREF(value);
|
||||
return ret;
|
||||
#else
|
||||
return PyModule_AddObject(mod, name, value);
|
||||
#endif
|
||||
}
|
||||
|
||||
static int init_brotli_mod(PyObject* m) {
|
||||
static int brotli_init_mod(PyObject* m) {
|
||||
PyObject* error_type = NULL;
|
||||
PyObject* compressor_type = NULL;
|
||||
PyObject* decompressor_type = NULL;
|
||||
@@ -1104,34 +1000,19 @@ static int init_brotli_mod(PyObject* m) {
|
||||
if (error_type == NULL) goto error;
|
||||
|
||||
if (RegisterObject(m, kErrorAttr, error_type) < 0) goto error;
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
/* Assumption: pointer is used only while module is alive and well. */
|
||||
BrotliError = error_type;
|
||||
#endif
|
||||
error_type = NULL;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
compressor_type = PyType_FromSpec(&brotli_Compressor_spec);
|
||||
decompressor_type = PyType_FromSpec(&brotli_Decompressor_spec);
|
||||
#else
|
||||
compressor_type = (PyObject*)&brotli_CompressorType;
|
||||
Py_INCREF(compressor_type);
|
||||
decompressor_type = (PyObject*)&brotli_DecompressorType;
|
||||
Py_INCREF(decompressor_type);
|
||||
#endif
|
||||
if (compressor_type == NULL) goto error;
|
||||
if (PyType_Ready((PyTypeObject*)compressor_type) < 0) goto error;
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
if (PyObject_SetAttrString(compressor_type, kModuleAttr, m) < 0) goto error;
|
||||
#endif
|
||||
if (RegisterObject(m, "Compressor", compressor_type) < 0) goto error;
|
||||
compressor_type = NULL;
|
||||
|
||||
if (decompressor_type == NULL) goto error;
|
||||
if (PyType_Ready((PyTypeObject*)decompressor_type) < 0) goto error;
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
if (PyObject_SetAttrString(decompressor_type, kModuleAttr, m) < 0) goto error;
|
||||
#endif
|
||||
if (RegisterObject(m, "Decompressor", decompressor_type) < 0) goto error;
|
||||
decompressor_type = NULL;
|
||||
|
||||
@@ -1162,3 +1043,31 @@ error:
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
static PyMethodDef brotli_methods[] = {
|
||||
{"decompress", (PyCFunction)brotli_decompress, METH_VARARGS | METH_KEYWORDS,
|
||||
brotli_decompress__doc__},
|
||||
{NULL, NULL, 0, NULL}};
|
||||
|
||||
static PyModuleDef_Slot brotli_mod_slots[] = {
|
||||
{Py_mod_exec, brotli_init_mod},
|
||||
#ifdef Py_GIL_DISABLED
|
||||
{Py_mod_gil, Py_MOD_GIL_NOT_USED},
|
||||
#elif (PY_MAJOR_VERSION > 3) || (PY_MINOR_VERSION >= 12)
|
||||
{Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED},
|
||||
#endif
|
||||
{0, NULL}};
|
||||
|
||||
static struct PyModuleDef brotli_module = {
|
||||
PyModuleDef_HEAD_INIT,
|
||||
"_brotli", /* m_name */
|
||||
brotli_doc, /* m_doc */
|
||||
0, /* m_size */
|
||||
brotli_methods, /* m_methods */
|
||||
brotli_mod_slots, /* m_slots */
|
||||
NULL, /* m_traverse */
|
||||
NULL, /* m_clear */
|
||||
NULL /* m_free */
|
||||
};
|
||||
|
||||
PyMODINIT_FUNC PyInit__brotli(void) { return PyModuleDef_Init(&brotli_module); }
|
||||
|
||||
194
python/bro.py
194
python/bro.py
@@ -1,194 +0,0 @@
|
||||
#! /usr/bin/env python
|
||||
"""Compression/decompression utility using the Brotli algorithm."""
|
||||
|
||||
# Note: Python2 has been deprecated long ago, but some projects out in
|
||||
# the wide world may still use it nevertheless. This should not
|
||||
# deprive them from being able to run Brotli.
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
|
||||
import brotli
|
||||
|
||||
|
||||
# default values of encoder parameters
|
||||
_DEFAULT_PARAMS = {
|
||||
'mode': brotli.MODE_GENERIC,
|
||||
'quality': 11,
|
||||
'lgwin': 22,
|
||||
'lgblock': 0,
|
||||
}
|
||||
|
||||
|
||||
def get_binary_stdio(stream):
|
||||
"""Return the specified stdin/stdout/stderr stream.
|
||||
|
||||
If the stdio stream requested (i.e. sys.(stdin|stdout|stderr))
|
||||
has been replaced with a stream object that does not have a `.buffer`
|
||||
attribute, this will return the original stdio stream's buffer, i.e.
|
||||
`sys.__(stdin|stdout|stderr)__.buffer`.
|
||||
|
||||
Args:
|
||||
stream: One of 'stdin', 'stdout', 'stderr'.
|
||||
|
||||
Returns:
|
||||
The stream, as a 'raw' buffer object (i.e. io.BufferedIOBase subclass
|
||||
instance such as io.Bufferedreader/io.BufferedWriter), suitable for
|
||||
reading/writing binary data from/to it.
|
||||
"""
|
||||
if stream == 'stdin': stdio = sys.stdin
|
||||
elif stream == 'stdout': stdio = sys.stdout
|
||||
elif stream == 'stderr': stdio = sys.stderr
|
||||
else:
|
||||
raise ValueError('invalid stream name: %s' % (stream,))
|
||||
if sys.version_info[0] < 3:
|
||||
if sys.platform == 'win32':
|
||||
# set I/O stream binary flag on python2.x (Windows)
|
||||
runtime = platform.python_implementation()
|
||||
if runtime == 'PyPy':
|
||||
# the msvcrt trick doesn't work in pypy, so use fdopen().
|
||||
mode = 'rb' if stream == 'stdin' else 'wb'
|
||||
stdio = os.fdopen(stdio.fileno(), mode, 0)
|
||||
else:
|
||||
# this works with CPython -- untested on other implementations
|
||||
import msvcrt
|
||||
msvcrt.setmode(stdio.fileno(), os.O_BINARY)
|
||||
return stdio
|
||||
else:
|
||||
try:
|
||||
return stdio.buffer
|
||||
except AttributeError:
|
||||
# The Python reference explains
|
||||
# (-> https://docs.python.org/3/library/sys.html#sys.stdin)
|
||||
# that the `.buffer` attribute might not exist, since
|
||||
# the standard streams might have been replaced by something else
|
||||
# (such as an `io.StringIO()` - perhaps via
|
||||
# `contextlib.redirect_stdout()`).
|
||||
# We fall back to the original stdio in these cases.
|
||||
if stream == 'stdin': return sys.__stdin__.buffer
|
||||
if stream == 'stdout': return sys.__stdout__.buffer
|
||||
if stream == 'stderr': return sys.__stderr__.buffer
|
||||
assert False, 'Impossible Situation.'
|
||||
|
||||
|
||||
def main(args=None):
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
prog=os.path.basename(__file__), description=__doc__)
|
||||
parser.add_argument(
|
||||
'--version', action='version', version=brotli.version)
|
||||
parser.add_argument(
|
||||
'-i',
|
||||
'--input',
|
||||
metavar='FILE',
|
||||
type=str,
|
||||
dest='infile',
|
||||
help='Input file',
|
||||
default=None)
|
||||
parser.add_argument(
|
||||
'-o',
|
||||
'--output',
|
||||
metavar='FILE',
|
||||
type=str,
|
||||
dest='outfile',
|
||||
help='Output file',
|
||||
default=None)
|
||||
parser.add_argument(
|
||||
'-f',
|
||||
'--force',
|
||||
action='store_true',
|
||||
help='Overwrite existing output file',
|
||||
default=False)
|
||||
parser.add_argument(
|
||||
'-d',
|
||||
'--decompress',
|
||||
action='store_true',
|
||||
help='Decompress input file',
|
||||
default=False)
|
||||
params = parser.add_argument_group('optional encoder parameters')
|
||||
params.add_argument(
|
||||
'-m',
|
||||
'--mode',
|
||||
metavar='MODE',
|
||||
type=int,
|
||||
choices=[0, 1, 2],
|
||||
help='The compression mode can be 0 for generic input, '
|
||||
'1 for UTF-8 encoded text, or 2 for WOFF 2.0 font data. '
|
||||
'Defaults to 0.')
|
||||
params.add_argument(
|
||||
'-q',
|
||||
'--quality',
|
||||
metavar='QUALITY',
|
||||
type=int,
|
||||
choices=list(range(0, 12)),
|
||||
help='Controls the compression-speed vs compression-density '
|
||||
'tradeoff. The higher the quality, the slower the '
|
||||
'compression. Range is 0 to 11. Defaults to 11.')
|
||||
params.add_argument(
|
||||
'--lgwin',
|
||||
metavar='LGWIN',
|
||||
type=int,
|
||||
choices=list(range(10, 25)),
|
||||
help='Base 2 logarithm of the sliding window size. Range is '
|
||||
'10 to 24. Defaults to 22.')
|
||||
params.add_argument(
|
||||
'--lgblock',
|
||||
metavar='LGBLOCK',
|
||||
type=int,
|
||||
choices=[0] + list(range(16, 25)),
|
||||
help='Base 2 logarithm of the maximum input block size. '
|
||||
'Range is 16 to 24. If set to 0, the value will be set based '
|
||||
'on the quality. Defaults to 0.')
|
||||
# set default values using global _DEFAULT_PARAMS dictionary
|
||||
parser.set_defaults(**_DEFAULT_PARAMS)
|
||||
|
||||
options = parser.parse_args(args=args)
|
||||
|
||||
if options.infile:
|
||||
try:
|
||||
with open(options.infile, 'rb') as infile:
|
||||
data = infile.read()
|
||||
except OSError:
|
||||
parser.error('Could not read --infile: %s' % (infile,))
|
||||
else:
|
||||
if sys.stdin.isatty():
|
||||
# interactive console, just quit
|
||||
parser.error('No input (called from interactive terminal).')
|
||||
infile = get_binary_stdio('stdin')
|
||||
data = infile.read()
|
||||
|
||||
if options.outfile:
|
||||
# Caution! If `options.outfile` is a broken symlink, will try to
|
||||
# redirect the write according to symlink.
|
||||
if os.path.exists(options.outfile) and not options.force:
|
||||
parser.error(('Target --outfile=%s already exists, '
|
||||
'but --force was not requested.') % (options.outfile,))
|
||||
outfile = open(options.outfile, 'wb')
|
||||
did_open_outfile = True
|
||||
else:
|
||||
outfile = get_binary_stdio('stdout')
|
||||
did_open_outfile = False
|
||||
try:
|
||||
try:
|
||||
if options.decompress:
|
||||
data = brotli.decompress(data)
|
||||
else:
|
||||
data = brotli.compress(
|
||||
data,
|
||||
mode=options.mode,
|
||||
quality=options.quality,
|
||||
lgwin=options.lgwin,
|
||||
lgblock=options.lgblock)
|
||||
outfile.write(data)
|
||||
finally:
|
||||
if did_open_outfile: outfile.close()
|
||||
except brotli.error as e:
|
||||
parser.exit(1,
|
||||
'bro: error: %s: %s' % (e, options.infile or '{stdin}'))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,27 +1,21 @@
|
||||
"""Common utilities for Brotli tests."""
|
||||
|
||||
from __future__ import print_function
|
||||
import filecmp
|
||||
import glob
|
||||
import itertools
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
import sysconfig
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
# TODO(eustas): use str(pathlib.PurePath(file).parent.parent) for Python 3.4+
|
||||
project_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
|
||||
project_dir = str(pathlib.PurePath(__file__).parent.parent.parent)
|
||||
runtime_dir = os.getenv('TEST_SRCDIR')
|
||||
test_dir = os.getenv('BROTLI_TESTS_PATH')
|
||||
BRO_ARGS = [os.getenv('BROTLI_WRAPPER')]
|
||||
|
||||
# Fallbacks
|
||||
if test_dir is None:
|
||||
if test_dir and runtime_dir:
|
||||
test_dir = os.path.join(runtime_dir, test_dir)
|
||||
elif test_dir is None:
|
||||
test_dir = os.path.join(project_dir, 'tests')
|
||||
if BRO_ARGS[0] is None:
|
||||
python_exe = sys.executable or 'python'
|
||||
bro_path = os.path.join(project_dir, 'python', 'bro.py')
|
||||
BRO_ARGS = [python_exe, bro_path]
|
||||
|
||||
# Get the platform/version-specific build folder.
|
||||
# By default, the distutils build base is in the same location as setup.py.
|
||||
@@ -41,113 +35,49 @@ else:
|
||||
|
||||
TESTDATA_DIR = os.path.join(test_dir, 'testdata')
|
||||
|
||||
TESTDATA_FILES = [
|
||||
'empty', # Empty file
|
||||
'10x10y', # Small text
|
||||
'alice29.txt', # Large text
|
||||
'random_org_10k.bin', # Small data
|
||||
'mapsdatazrh', # Large data
|
||||
'ukkonooa', # Poem
|
||||
'cp1251-utf16le', # Codepage 1251 table saved in UTF16-LE encoding
|
||||
'cp852-utf8', # Codepage 852 table saved in UTF8 encoding
|
||||
# TODO(eustas): add test on already compressed content
|
||||
]
|
||||
|
||||
# Some files might be missing in a lightweight sources pack.
|
||||
TESTDATA_PATH_CANDIDATES = [
|
||||
os.path.join(TESTDATA_DIR, f) for f in TESTDATA_FILES
|
||||
]
|
||||
|
||||
TESTDATA_PATHS = [
|
||||
path for path in TESTDATA_PATH_CANDIDATES if os.path.isfile(path)
|
||||
]
|
||||
|
||||
TESTDATA_PATHS_FOR_DECOMPRESSION = glob.glob(
|
||||
os.path.join(TESTDATA_DIR, '*.compressed')
|
||||
)
|
||||
|
||||
TEMP_DIR = tempfile.mkdtemp()
|
||||
def gather_text_inputs():
|
||||
"""Discover inputs for decompression tests."""
|
||||
all_inputs = [
|
||||
'empty', # Empty file
|
||||
'10x10y', # Small text
|
||||
'alice29.txt', # Large text
|
||||
'random_org_10k.bin', # Small data
|
||||
'mapsdatazrh', # Large data
|
||||
'ukkonooa', # Poem
|
||||
'cp1251-utf16le', # Codepage 1251 table saved in UTF16-LE encoding
|
||||
'cp852-utf8', # Codepage 852 table saved in UTF8 encoding
|
||||
# TODO(eustas): add test on already compressed content
|
||||
]
|
||||
# Filter out non-existing files; e.g. in lightweight sources pack.
|
||||
return [
|
||||
f for f in all_inputs if os.path.isfile(os.path.join(TESTDATA_DIR, f))
|
||||
]
|
||||
|
||||
|
||||
def get_temp_compressed_name(filename):
|
||||
return os.path.join(TEMP_DIR, os.path.basename(filename + '.bro'))
|
||||
def gather_compressed_inputs():
|
||||
"""Discover inputs for compression tests."""
|
||||
candidates = glob.glob(os.path.join(TESTDATA_DIR, '*.compressed'))
|
||||
pairs = [(f, f.split('.compressed')[0]) for f in candidates]
|
||||
existing = [
|
||||
pair
|
||||
for pair in pairs
|
||||
if os.path.isfile(pair[0]) and os.path.isfile(pair[1])
|
||||
]
|
||||
return [
|
||||
(os.path.basename(pair[0]), (os.path.basename(pair[1])))
|
||||
for pair in existing
|
||||
]
|
||||
|
||||
|
||||
def get_temp_uncompressed_name(filename):
|
||||
return os.path.join(TEMP_DIR, os.path.basename(filename + '.unbro'))
|
||||
def take_input(input_name):
|
||||
with open(os.path.join(TESTDATA_DIR, input_name), 'rb') as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def bind_method_args(method, *args, **kwargs):
|
||||
return lambda self: method(self, *args, **kwargs)
|
||||
def has_input(input_name):
|
||||
return os.path.isfile(os.path.join(TESTDATA_DIR, input_name))
|
||||
|
||||
|
||||
# TODO(eustas): migrate to absl.testing.parameterized.
|
||||
def generate_test_methods(
|
||||
test_case_class, for_decompression=False, variants=None
|
||||
):
|
||||
"""Adds test methods for each test data file and each variant.
|
||||
|
||||
This makes identifying problems with specific compression scenarios easier.
|
||||
|
||||
Args:
|
||||
test_case_class: The test class to add methods to.
|
||||
for_decompression: If True, uses compressed test data files.
|
||||
variants: A dictionary where keys are option names and values are lists of
|
||||
possible values for that option. Each combination of variants will
|
||||
generate a separate test method.
|
||||
"""
|
||||
if for_decompression:
|
||||
paths = [
|
||||
path for path in TESTDATA_PATHS_FOR_DECOMPRESSION
|
||||
if os.path.exists(path.replace('.compressed', ''))
|
||||
]
|
||||
else:
|
||||
paths = TESTDATA_PATHS
|
||||
opts = []
|
||||
if variants:
|
||||
opts_list = []
|
||||
for k, v in variants.items():
|
||||
opts_list.append([r for r in itertools.product([k], v)])
|
||||
for o in itertools.product(*opts_list):
|
||||
opts_name = '_'.join([str(i) for i in itertools.chain(*o)])
|
||||
opts_dict = dict(o)
|
||||
opts.append([opts_name, opts_dict])
|
||||
else:
|
||||
opts.append(['', {}])
|
||||
for method in [m for m in dir(test_case_class) if m.startswith('_test')]:
|
||||
for testdata in paths:
|
||||
for opts_name, opts_dict in opts:
|
||||
f = os.path.splitext(os.path.basename(testdata))[0]
|
||||
name = 'test_{method}_{options}_{file}'.format(
|
||||
method=method, options=opts_name, file=f
|
||||
)
|
||||
func = bind_method_args(
|
||||
getattr(test_case_class, method), testdata, **opts_dict
|
||||
)
|
||||
setattr(test_case_class, name, func)
|
||||
|
||||
|
||||
class TestCase(unittest.TestCase):
|
||||
"""Base class for Brotli test cases.
|
||||
|
||||
Provides common setup and teardown logic, including cleaning up temporary
|
||||
files and a utility for comparing file contents.
|
||||
"""
|
||||
|
||||
def tearDown(self):
|
||||
for f in TESTDATA_PATHS:
|
||||
try:
|
||||
os.unlink(get_temp_compressed_name(f))
|
||||
except OSError:
|
||||
pass
|
||||
try:
|
||||
os.unlink(get_temp_uncompressed_name(f))
|
||||
except OSError:
|
||||
pass
|
||||
# super().tearDown() # Requires Py3+
|
||||
|
||||
def assert_files_match(self, first, second):
|
||||
self.assertTrue(
|
||||
filecmp.cmp(first, second, shallow=False),
|
||||
'File {} differs from {}'.format(first, second),
|
||||
)
|
||||
def chunk_input(data, chunk_size):
|
||||
return [data[i:i + chunk_size] for i in range(0, len(data), chunk_size)]
|
||||
|
||||
@@ -1,104 +0,0 @@
|
||||
# Copyright 2016 The Brotli Authors. All rights reserved.
|
||||
#
|
||||
# Distributed under MIT license.
|
||||
# See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
|
||||
|
||||
import subprocess
|
||||
import unittest
|
||||
|
||||
from . import _test_utils
|
||||
|
||||
BRO_ARGS = _test_utils.BRO_ARGS
|
||||
TEST_ENV = _test_utils.TEST_ENV
|
||||
|
||||
|
||||
def _get_original_name(test_data):
|
||||
return test_data.split('.compressed')[0]
|
||||
|
||||
|
||||
class TestBroDecompress(_test_utils.TestCase):
|
||||
|
||||
def _check_decompression(self, test_data):
|
||||
# Verify decompression matches the original.
|
||||
temp_uncompressed = _test_utils.get_temp_uncompressed_name(test_data)
|
||||
original = _get_original_name(test_data)
|
||||
self.assert_files_match(temp_uncompressed, original)
|
||||
|
||||
def _decompress_file(self, test_data):
|
||||
temp_uncompressed = _test_utils.get_temp_uncompressed_name(test_data)
|
||||
args = BRO_ARGS + ['-f', '-d', '-i', test_data, '-o', temp_uncompressed]
|
||||
subprocess.check_call(args, env=TEST_ENV)
|
||||
|
||||
def _decompress_pipe(self, test_data):
|
||||
temp_uncompressed = _test_utils.get_temp_uncompressed_name(test_data)
|
||||
args = BRO_ARGS + ['-d']
|
||||
with open(temp_uncompressed, 'wb') as out_file:
|
||||
with open(test_data, 'rb') as in_file:
|
||||
subprocess.check_call(
|
||||
args, stdin=in_file, stdout=out_file, env=TEST_ENV
|
||||
)
|
||||
|
||||
def _test_decompress_file(self, test_data):
|
||||
self._decompress_file(test_data)
|
||||
self._check_decompression(test_data)
|
||||
|
||||
def _test_decompress_pipe(self, test_data):
|
||||
self._decompress_pipe(test_data)
|
||||
self._check_decompression(test_data)
|
||||
|
||||
|
||||
_test_utils.generate_test_methods(TestBroDecompress, for_decompression=True)
|
||||
|
||||
|
||||
class TestBroCompress(_test_utils.TestCase):
|
||||
|
||||
VARIANTS = {'quality': (1, 6, 9, 11), 'lgwin': (10, 15, 20, 24)}
|
||||
|
||||
def _check_decompression(self, test_data):
|
||||
# Write decompression to temp file and verify it matches the original.
|
||||
temp_uncompressed = _test_utils.get_temp_uncompressed_name(test_data)
|
||||
temp_compressed = _test_utils.get_temp_compressed_name(test_data)
|
||||
original = test_data
|
||||
args = BRO_ARGS + ['-f', '-d']
|
||||
args.extend(['-i', temp_compressed, '-o', temp_uncompressed])
|
||||
subprocess.check_call(args, env=TEST_ENV)
|
||||
self.assert_files_match(temp_uncompressed, original)
|
||||
|
||||
def _compress_file(self, test_data, **kwargs):
|
||||
temp_compressed = _test_utils.get_temp_compressed_name(test_data)
|
||||
args = BRO_ARGS + ['-f']
|
||||
if 'quality' in kwargs:
|
||||
args.extend(['-q', str(kwargs['quality'])])
|
||||
if 'lgwin' in kwargs:
|
||||
args.extend(['--lgwin', str(kwargs['lgwin'])])
|
||||
args.extend(['-i', test_data, '-o', temp_compressed])
|
||||
subprocess.check_call(args, env=TEST_ENV)
|
||||
|
||||
def _compress_pipe(self, test_data, **kwargs):
|
||||
temp_compressed = _test_utils.get_temp_compressed_name(test_data)
|
||||
args = BRO_ARGS
|
||||
if 'quality' in kwargs:
|
||||
args.extend(['-q', str(kwargs['quality'])])
|
||||
if 'lgwin' in kwargs:
|
||||
args.extend(['--lgwin', str(kwargs['lgwin'])])
|
||||
with open(temp_compressed, 'wb') as out_file:
|
||||
with open(test_data, 'rb') as in_file:
|
||||
subprocess.check_call(
|
||||
args, stdin=in_file, stdout=out_file, env=TEST_ENV
|
||||
)
|
||||
|
||||
def _test_compress_file(self, test_data, **kwargs):
|
||||
self._compress_file(test_data, **kwargs)
|
||||
self._check_decompression(test_data)
|
||||
|
||||
def _test_compress_pipe(self, test_data, **kwargs):
|
||||
self._compress_pipe(test_data, **kwargs)
|
||||
self._check_decompression(test_data)
|
||||
|
||||
|
||||
_test_utils.generate_test_methods(
|
||||
TestBroCompress, variants=TestBroCompress.VARIANTS
|
||||
)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -3,39 +3,17 @@
|
||||
# Distributed under MIT license.
|
||||
# See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
|
||||
|
||||
import unittest
|
||||
|
||||
import brotli
|
||||
import pytest
|
||||
|
||||
from . import _test_utils
|
||||
|
||||
|
||||
class TestCompress(_test_utils.TestCase):
|
||||
|
||||
VARIANTS = {'quality': (1, 6, 9, 11), 'lgwin': (10, 15, 20, 24)}
|
||||
|
||||
def _check_decompression(self, test_data):
|
||||
# Write decompression to temp file and verify it matches the original.
|
||||
temp_uncompressed = _test_utils.get_temp_uncompressed_name(test_data)
|
||||
temp_compressed = _test_utils.get_temp_compressed_name(test_data)
|
||||
original = test_data
|
||||
with open(temp_uncompressed, 'wb') as out_file:
|
||||
with open(temp_compressed, 'rb') as in_file:
|
||||
out_file.write(brotli.decompress(in_file.read()))
|
||||
self.assert_files_match(temp_uncompressed, original)
|
||||
|
||||
def _compress(self, test_data, **kwargs):
|
||||
temp_compressed = _test_utils.get_temp_compressed_name(test_data)
|
||||
with open(temp_compressed, 'wb') as out_file:
|
||||
with open(test_data, 'rb') as in_file:
|
||||
out_file.write(brotli.compress(in_file.read(), **kwargs))
|
||||
|
||||
def _test_compress(self, test_data, **kwargs):
|
||||
self._compress(test_data, **kwargs)
|
||||
self._check_decompression(test_data)
|
||||
|
||||
|
||||
_test_utils.generate_test_methods(TestCompress, variants=TestCompress.VARIANTS)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@pytest.mark.parametrize("quality", [1, 6, 9, 11])
|
||||
@pytest.mark.parametrize("lgwin", [10, 15, 20, 24])
|
||||
@pytest.mark.parametrize("text_name", _test_utils.gather_text_inputs())
|
||||
def test_compress(quality, lgwin, text_name):
|
||||
original = _test_utils.take_input(text_name)
|
||||
compressed = brotli.compress(original, quality=quality, lgwin=lgwin)
|
||||
decompressed = brotli.decompress(compressed)
|
||||
assert original == decompressed
|
||||
|
||||
@@ -3,98 +3,148 @@
|
||||
# Distributed under MIT license.
|
||||
# See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
|
||||
|
||||
import functools
|
||||
import unittest
|
||||
import queue
|
||||
import random
|
||||
import threading
|
||||
import time
|
||||
|
||||
import brotli
|
||||
import pytest
|
||||
|
||||
from . import _test_utils
|
||||
|
||||
|
||||
# Do not inherit from TestCase here to ensure that test methods
|
||||
# are not run automatically and instead are run as part of a specific
|
||||
# configuration below.
|
||||
class _TestCompressor(object):
|
||||
|
||||
CHUNK_SIZE = 2048
|
||||
|
||||
def tearDown(self):
|
||||
self.compressor = None
|
||||
# super().tearDown() # Requires Py3+
|
||||
|
||||
def _check_decompression(self, test_data):
|
||||
# Write decompression to temp file and verify it matches the original.
|
||||
temp_uncompressed = _test_utils.get_temp_uncompressed_name(test_data)
|
||||
temp_compressed = _test_utils.get_temp_compressed_name(test_data)
|
||||
original = test_data
|
||||
with open(temp_uncompressed, 'wb') as out_file:
|
||||
with open(temp_compressed, 'rb') as in_file:
|
||||
out_file.write(brotli.decompress(in_file.read()))
|
||||
self.assert_files_match(temp_uncompressed, original)
|
||||
|
||||
def _test_single_process(self, test_data):
|
||||
# Write single-shot compression to temp file.
|
||||
temp_compressed = _test_utils.get_temp_compressed_name(test_data)
|
||||
with open(temp_compressed, 'wb') as out_file:
|
||||
with open(test_data, 'rb') as in_file:
|
||||
out_file.write(self.compressor.process(in_file.read()))
|
||||
out_file.write(self.compressor.finish())
|
||||
self._check_decompression(test_data)
|
||||
|
||||
def _test_multiple_process(self, test_data):
|
||||
# Write chunked compression to temp file.
|
||||
temp_compressed = _test_utils.get_temp_compressed_name(test_data)
|
||||
with open(temp_compressed, 'wb') as out_file:
|
||||
with open(test_data, 'rb') as in_file:
|
||||
read_chunk = functools.partial(in_file.read, self.CHUNK_SIZE)
|
||||
for data in iter(read_chunk, b''):
|
||||
out_file.write(self.compressor.process(data))
|
||||
out_file.write(self.compressor.finish())
|
||||
self._check_decompression(test_data)
|
||||
|
||||
def _test_multiple_process_and_flush(self, test_data):
|
||||
# Write chunked and flushed compression to temp file.
|
||||
temp_compressed = _test_utils.get_temp_compressed_name(test_data)
|
||||
with open(temp_compressed, 'wb') as out_file:
|
||||
with open(test_data, 'rb') as in_file:
|
||||
read_chunk = functools.partial(in_file.read, self.CHUNK_SIZE)
|
||||
for data in iter(read_chunk, b''):
|
||||
out_file.write(self.compressor.process(data))
|
||||
out_file.write(self.compressor.flush())
|
||||
out_file.write(self.compressor.finish())
|
||||
self._check_decompression(test_data)
|
||||
@pytest.mark.parametrize("quality", [1, 6, 9, 11])
|
||||
@pytest.mark.parametrize("text_name", _test_utils.gather_text_inputs())
|
||||
def test_single_process(quality, text_name):
|
||||
original = _test_utils.take_input(text_name)
|
||||
compressor = brotli.Compressor(quality=quality)
|
||||
compressed = compressor.process(original)
|
||||
compressed += compressor.finish()
|
||||
decompressed = brotli.decompress(compressed)
|
||||
assert original == decompressed
|
||||
|
||||
|
||||
_test_utils.generate_test_methods(_TestCompressor)
|
||||
@pytest.mark.parametrize("quality", [1, 6, 9, 11])
|
||||
@pytest.mark.parametrize("text_name", _test_utils.gather_text_inputs())
|
||||
def test_multiple_process(quality, text_name):
|
||||
original = _test_utils.take_input(text_name)
|
||||
chunk_size = 2048
|
||||
chunks = _test_utils.chunk_input(original, chunk_size)
|
||||
compressor = brotli.Compressor(quality=quality)
|
||||
compressed = b""
|
||||
for chunk in chunks:
|
||||
compressed += compressor.process(chunk)
|
||||
compressed += compressor.finish()
|
||||
decompressed = brotli.decompress(compressed)
|
||||
assert original == decompressed
|
||||
|
||||
|
||||
class TestCompressorQuality1(_TestCompressor, _test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
# super().setUp() # Requires Py3+
|
||||
self.compressor = brotli.Compressor(quality=1)
|
||||
@pytest.mark.parametrize("quality", [1, 6, 9, 11])
|
||||
@pytest.mark.parametrize("text_name", _test_utils.gather_text_inputs())
|
||||
def test_multiple_process_and_flush(quality, text_name):
|
||||
original = _test_utils.take_input(text_name)
|
||||
chunk_size = 2048
|
||||
chunks = _test_utils.chunk_input(original, chunk_size)
|
||||
compressor = brotli.Compressor(quality=quality)
|
||||
compressed = b""
|
||||
for chunk in chunks:
|
||||
compressed += compressor.process(chunk)
|
||||
compressed += compressor.flush()
|
||||
compressed += compressor.finish()
|
||||
decompressed = brotli.decompress(compressed)
|
||||
assert original == decompressed
|
||||
|
||||
|
||||
class TestCompressorQuality6(_TestCompressor, _test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
# super().setUp() # Requires Py3+
|
||||
self.compressor = brotli.Compressor(quality=6)
|
||||
def make_input(size):
|
||||
abc = [bytes([b]) for b in b"abcdefghijklmnopqrstuvwxyz"]
|
||||
abc_cap = [bytes([b]) for b in b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"]
|
||||
num_words_by_len = [0, 25, 100, 175, 1700, 1000, 1000, 1000]
|
||||
word_set = set()
|
||||
rng = random.Random()
|
||||
rng.seed(2025)
|
||||
words_by_len = [[]]
|
||||
for word_len in range(1, len(num_words_by_len)):
|
||||
num_words = num_words_by_len[word_len]
|
||||
words = []
|
||||
for _ in range(num_words):
|
||||
while True:
|
||||
word = b"".join(
|
||||
[rng.choice(abc_cap)]
|
||||
+ [rng.choice(abc) for _ in range(word_len - 1)]
|
||||
)
|
||||
if word not in word_set:
|
||||
word_set.add(word)
|
||||
words.append(word)
|
||||
break
|
||||
words_by_len.append(words)
|
||||
total_size = 0
|
||||
out = []
|
||||
while total_size < size:
|
||||
word_len = rng.choice(range(1, len(num_words_by_len)))
|
||||
word = rng.choice(words_by_len[word_len])
|
||||
total_size += len(word)
|
||||
out.append(word)
|
||||
return b"".join(out)
|
||||
|
||||
|
||||
class TestCompressorQuality9(_TestCompressor, _test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
# super().setUp() # Requires Py3+
|
||||
self.compressor = brotli.Compressor(quality=9)
|
||||
def _thread_compress(original, compressor, results):
|
||||
compressed = compressor.process(original)
|
||||
compressed += compressor.finish()
|
||||
results.put(1)
|
||||
|
||||
|
||||
class TestCompressorQuality11(_TestCompressor, _test_utils.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
# super().setUp() # Requires Py3+
|
||||
self.compressor = brotli.Compressor(quality=11)
|
||||
def _thread_concurrent_process(compressor, results):
|
||||
time.sleep(0.01)
|
||||
try:
|
||||
_ = compressor.process(b"whatever")
|
||||
except brotli.error:
|
||||
results.put(2)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
def _thread_concurrent_flush(compressor, results):
|
||||
time.sleep(0.02)
|
||||
try:
|
||||
_ = compressor.flush()
|
||||
except brotli.error:
|
||||
results.put(3)
|
||||
|
||||
|
||||
def _thread_concurrent_finish(compressor, results):
|
||||
time.sleep(0.03)
|
||||
try:
|
||||
_ = compressor.finish()
|
||||
except brotli.error:
|
||||
results.put(4)
|
||||
|
||||
|
||||
def test_concurrency():
|
||||
original = make_input(2 * 1024 * 1024)
|
||||
compressor = brotli.Compressor(quality=9)
|
||||
results = queue.Queue()
|
||||
threads = []
|
||||
threads.append(
|
||||
threading.Thread(
|
||||
target=_thread_compress, args=(original, compressor, results)
|
||||
)
|
||||
)
|
||||
threads.append(
|
||||
threading.Thread(
|
||||
target=_thread_concurrent_process, args=(compressor, results)
|
||||
)
|
||||
)
|
||||
threads.append(
|
||||
threading.Thread(
|
||||
target=_thread_concurrent_flush, args=(compressor, results)
|
||||
)
|
||||
)
|
||||
threads.append(
|
||||
threading.Thread(
|
||||
target=_thread_concurrent_finish, args=(compressor, results)
|
||||
)
|
||||
)
|
||||
for thread in threads:
|
||||
thread.start()
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
assert sorted(list(results.queue)) == [1, 2, 3, 4]
|
||||
|
||||
@@ -3,41 +3,22 @@
|
||||
# Distributed under MIT license.
|
||||
# See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
|
||||
|
||||
import unittest
|
||||
|
||||
import brotli
|
||||
import pytest
|
||||
|
||||
from . import _test_utils
|
||||
|
||||
|
||||
def _get_original_name(test_data):
|
||||
return test_data.split('.compressed')[0]
|
||||
@pytest.mark.parametrize(
|
||||
'compressed_name, original_name', _test_utils.gather_compressed_inputs()
|
||||
)
|
||||
def test_decompress(compressed_name, original_name):
|
||||
compressed = _test_utils.take_input(compressed_name)
|
||||
original = _test_utils.take_input(original_name)
|
||||
decompressed = brotli.decompress(compressed)
|
||||
assert decompressed == original
|
||||
|
||||
|
||||
class TestDecompress(_test_utils.TestCase):
|
||||
|
||||
def _check_decompression(self, test_data):
|
||||
# Verify decompression matches the original.
|
||||
temp_uncompressed = _test_utils.get_temp_uncompressed_name(test_data)
|
||||
original = _get_original_name(test_data)
|
||||
self.assert_files_match(temp_uncompressed, original)
|
||||
|
||||
def _decompress(self, test_data):
|
||||
temp_uncompressed = _test_utils.get_temp_uncompressed_name(test_data)
|
||||
with open(temp_uncompressed, 'wb') as out_file:
|
||||
with open(test_data, 'rb') as in_file:
|
||||
out_file.write(brotli.decompress(in_file.read()))
|
||||
|
||||
def _test_decompress(self, test_data):
|
||||
self._decompress(test_data)
|
||||
self._check_decompression(test_data)
|
||||
|
||||
def test_garbage_appended(self):
|
||||
with self.assertRaises(brotli.error):
|
||||
brotli.decompress(brotli.compress(b'a') + b'a')
|
||||
|
||||
|
||||
_test_utils.generate_test_methods(TestDecompress, for_decompression=True)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
def test_garbage_appended():
|
||||
with pytest.raises(brotli.error):
|
||||
brotli.decompress(brotli.compress(b'a') + b'a')
|
||||
|
||||
@@ -3,116 +3,166 @@
|
||||
# Distributed under MIT license.
|
||||
# See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
|
||||
|
||||
import functools
|
||||
import os
|
||||
import unittest
|
||||
import queue
|
||||
import threading
|
||||
import time
|
||||
|
||||
import brotli
|
||||
import pytest
|
||||
|
||||
from . import _test_utils
|
||||
|
||||
|
||||
def _get_original_name(test_data):
|
||||
return test_data.split('.compressed')[0]
|
||||
MIN_OUTPUT_BUFFER_SIZE = 32768 # Actually, several bytes less.
|
||||
|
||||
|
||||
class TestDecompressor(_test_utils.TestCase):
|
||||
@pytest.mark.parametrize(
|
||||
'compressed_name, original_name', _test_utils.gather_compressed_inputs()
|
||||
)
|
||||
def test_decompress(compressed_name, original_name):
|
||||
decompressor = brotli.Decompressor()
|
||||
compressed = _test_utils.take_input(compressed_name)
|
||||
original = _test_utils.take_input(original_name)
|
||||
chunk_size = 1
|
||||
chunks = _test_utils.chunk_input(compressed, chunk_size)
|
||||
decompressed = b''
|
||||
for chunk in chunks:
|
||||
decompressed += decompressor.process(chunk)
|
||||
assert decompressor.is_finished()
|
||||
assert original == decompressed
|
||||
|
||||
CHUNK_SIZE = 1
|
||||
MIN_OUTPUT_BUFFER_SIZE = 32768 # Actually, several bytes less.
|
||||
|
||||
def setUp(self):
|
||||
# super().setUp() # Requires Py3+
|
||||
self.decompressor = brotli.Decompressor()
|
||||
|
||||
def tearDown(self):
|
||||
self.decompressor = None
|
||||
# super().tearDown() # Requires Py3+
|
||||
|
||||
def _check_decompression(self, test_data):
|
||||
# Verify decompression matches the original.
|
||||
temp_uncompressed = _test_utils.get_temp_uncompressed_name(test_data)
|
||||
original = _get_original_name(test_data)
|
||||
self.assert_files_match(temp_uncompressed, original)
|
||||
|
||||
def _decompress(self, test_data):
|
||||
temp_uncompressed = _test_utils.get_temp_uncompressed_name(test_data)
|
||||
with open(temp_uncompressed, 'wb') as out_file:
|
||||
with open(test_data, 'rb') as in_file:
|
||||
read_chunk = functools.partial(in_file.read, self.CHUNK_SIZE)
|
||||
for data in iter(read_chunk, b''):
|
||||
out_file.write(self.decompressor.process(data))
|
||||
self.assertTrue(self.decompressor.is_finished())
|
||||
|
||||
def _decompress_with_limit(self, test_data):
|
||||
output_buffer_limit = 10922
|
||||
temp_uncompressed = _test_utils.get_temp_uncompressed_name(test_data)
|
||||
with open(temp_uncompressed, 'wb') as out_file:
|
||||
with open(test_data, 'rb') as in_file:
|
||||
chunk_iter = iter(functools.partial(in_file.read, 10 * 1024), b'')
|
||||
while not self.decompressor.is_finished():
|
||||
data = b''
|
||||
if self.decompressor.can_accept_more_data():
|
||||
data = next(chunk_iter, b'')
|
||||
decompressed_data = self.decompressor.process(
|
||||
data, output_buffer_limit=output_buffer_limit
|
||||
)
|
||||
self.assertLessEqual(
|
||||
len(decompressed_data), self.MIN_OUTPUT_BUFFER_SIZE
|
||||
)
|
||||
out_file.write(decompressed_data)
|
||||
self.assertIsNone(next(chunk_iter, None))
|
||||
|
||||
def _test_decompress(self, test_data):
|
||||
self._decompress(test_data)
|
||||
self._check_decompression(test_data)
|
||||
|
||||
def _test_decompress_with_limit(self, test_data):
|
||||
self._decompress_with_limit(test_data)
|
||||
self._check_decompression(test_data)
|
||||
|
||||
def test_too_much_input(self):
|
||||
with open(
|
||||
os.path.join(_test_utils.TESTDATA_DIR, 'zerosukkanooa.compressed'), 'rb'
|
||||
) as in_file:
|
||||
compressed = in_file.read()
|
||||
self.decompressor.process(compressed[:-1], output_buffer_limit=10240)
|
||||
# the following assertion checks whether the test setup is correct
|
||||
self.assertFalse(self.decompressor.can_accept_more_data())
|
||||
with self.assertRaises(brotli.error):
|
||||
self.decompressor.process(compressed[-1:])
|
||||
|
||||
def test_changing_limit(self):
|
||||
test_data = os.path.join(
|
||||
_test_utils.TESTDATA_DIR, 'zerosukkanooa.compressed'
|
||||
@pytest.mark.parametrize(
|
||||
'compressed_name, original_name', _test_utils.gather_compressed_inputs()
|
||||
)
|
||||
def test_decompress_with_limit(compressed_name, original_name):
|
||||
decompressor = brotli.Decompressor()
|
||||
compressed = _test_utils.take_input(compressed_name)
|
||||
original = _test_utils.take_input(original_name)
|
||||
chunk_size = 10 * 1024
|
||||
output_buffer_limit = 10922
|
||||
chunks = _test_utils.chunk_input(compressed, chunk_size)
|
||||
decompressed = b''
|
||||
while not decompressor.is_finished():
|
||||
data = b''
|
||||
if decompressor.can_accept_more_data() and chunks:
|
||||
data = chunks.pop(0)
|
||||
decompressed_chunk = decompressor.process(
|
||||
data, output_buffer_limit=output_buffer_limit
|
||||
)
|
||||
check_output = os.path.exists(test_data.replace('.compressed', ''))
|
||||
temp_uncompressed = _test_utils.get_temp_uncompressed_name(test_data)
|
||||
with open(temp_uncompressed, 'wb') as out_file:
|
||||
with open(test_data, 'rb') as in_file:
|
||||
compressed = in_file.read()
|
||||
uncompressed = self.decompressor.process(
|
||||
compressed[:-1], output_buffer_limit=10240
|
||||
)
|
||||
self.assertLessEqual(len(uncompressed), self.MIN_OUTPUT_BUFFER_SIZE)
|
||||
out_file.write(uncompressed)
|
||||
while not self.decompressor.can_accept_more_data():
|
||||
out_file.write(self.decompressor.process(b''))
|
||||
out_file.write(self.decompressor.process(compressed[-1:]))
|
||||
if check_output:
|
||||
self._check_decompression(test_data)
|
||||
|
||||
def test_garbage_appended(self):
|
||||
with self.assertRaises(brotli.error):
|
||||
self.decompressor.process(brotli.compress(b'a') + b'a')
|
||||
|
||||
def test_already_finished(self):
|
||||
self.decompressor.process(brotli.compress(b'a'))
|
||||
with self.assertRaises(brotli.error):
|
||||
self.decompressor.process(b'a')
|
||||
assert len(decompressed_chunk) <= MIN_OUTPUT_BUFFER_SIZE
|
||||
decompressed += decompressed_chunk
|
||||
assert not chunks
|
||||
assert original == decompressed
|
||||
|
||||
|
||||
_test_utils.generate_test_methods(TestDecompressor, for_decompression=True)
|
||||
def test_too_much_input():
|
||||
decompressor = brotli.Decompressor()
|
||||
compressed = _test_utils.take_input('zerosukkanooa.compressed')
|
||||
decompressor.process(compressed[:-1], output_buffer_limit=10240)
|
||||
# The following assertion checks whether the test setup is correct.
|
||||
assert not decompressor.can_accept_more_data()
|
||||
with pytest.raises(brotli.error):
|
||||
decompressor.process(compressed[-1:])
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
def test_changing_limit():
|
||||
decompressor = brotli.Decompressor()
|
||||
input_name = 'zerosukkanooa'
|
||||
compressed = _test_utils.take_input(input_name + '.compressed')
|
||||
check_output = _test_utils.has_input(input_name)
|
||||
decompressed = decompressor.process(
|
||||
compressed[:-1], output_buffer_limit=10240
|
||||
)
|
||||
assert len(decompressed) <= MIN_OUTPUT_BUFFER_SIZE
|
||||
while not decompressor.can_accept_more_data():
|
||||
decompressed += decompressor.process(b'')
|
||||
decompressed += decompressor.process(compressed[-1:])
|
||||
if check_output:
|
||||
original = _test_utils.take_input(input_name)
|
||||
assert original == decompressed
|
||||
|
||||
|
||||
def test_garbage_appended():
|
||||
decompressor = brotli.Decompressor()
|
||||
with pytest.raises(brotli.error):
|
||||
decompressor.process(brotli.compress(b'a') + b'a')
|
||||
|
||||
|
||||
def test_already_finished():
|
||||
decompressor = brotli.Decompressor()
|
||||
decompressor.process(brotli.compress(b'a'))
|
||||
with pytest.raises(brotli.error):
|
||||
decompressor.process(b'a')
|
||||
|
||||
|
||||
def make_input(size):
|
||||
compressor = brotli.Compressor(quality=1)
|
||||
prologue = compressor.process(b'b')
|
||||
prologue += compressor.flush()
|
||||
filler = compressor.process(b'c')
|
||||
filler += compressor.flush()
|
||||
epilogue = compressor.finish()
|
||||
return b''.join([prologue] + [filler] * (size // len(filler)) + [epilogue])
|
||||
|
||||
|
||||
def _thread_decompress(compressed, decompressor, results):
|
||||
_ = decompressor.process(compressed)
|
||||
if decompressor.is_finished():
|
||||
results.put(1)
|
||||
|
||||
|
||||
def _thread_concurrent_process(decompressor, results):
|
||||
time.sleep(0.01)
|
||||
try:
|
||||
_ = decompressor.process(b'')
|
||||
except brotli.error:
|
||||
results.put(2)
|
||||
|
||||
|
||||
def _thread_concurrent_can_accept_more_data(decompressor, results):
|
||||
time.sleep(0.02)
|
||||
try:
|
||||
_ = decompressor.can_accept_more_data()
|
||||
except brotli.error:
|
||||
results.put(3)
|
||||
|
||||
|
||||
def _thread_concurrent_is_finished(decompressor, results):
|
||||
time.sleep(0.03)
|
||||
try:
|
||||
_ = decompressor.is_finished()
|
||||
except brotli.error:
|
||||
results.put(4)
|
||||
|
||||
|
||||
def test_concurrency():
|
||||
compressed = make_input(16 * 1024 * 1024)
|
||||
decompressor = brotli.Decompressor()
|
||||
results = queue.Queue()
|
||||
threads = []
|
||||
threads.append(
|
||||
threading.Thread(
|
||||
target=_thread_decompress, args=(compressed, decompressor, results)
|
||||
)
|
||||
)
|
||||
threads.append(
|
||||
threading.Thread(
|
||||
target=_thread_concurrent_process, args=(decompressor, results)
|
||||
)
|
||||
)
|
||||
threads.append(
|
||||
threading.Thread(
|
||||
target=_thread_concurrent_can_accept_more_data,
|
||||
args=(decompressor, results),
|
||||
)
|
||||
)
|
||||
threads.append(
|
||||
threading.Thread(
|
||||
target=_thread_concurrent_is_finished, args=(decompressor, results)
|
||||
)
|
||||
)
|
||||
for thread in threads:
|
||||
thread.start()
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
assert sorted(list(results.queue)) == [1, 2, 3, 4]
|
||||
|
||||
@@ -8,6 +8,7 @@ I found the following issues with the Brotli format:
|
||||
- The block type code is useless if NBLTYPES==2, you would only need 1 symbol
|
||||
anyway, so why don't you just switch to "the other" type?
|
||||
"""
|
||||
# ruff: noqa
|
||||
import struct
|
||||
from operator import itemgetter, methodcaller
|
||||
from itertools import accumulate, repeat
|
||||
|
||||
511
setup.py
511
setup.py
@@ -3,144 +3,129 @@
|
||||
# Distributed under MIT license.
|
||||
# See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
|
||||
|
||||
"""This script is used for building and packaging the Brotli extension."""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import unittest
|
||||
|
||||
try:
|
||||
from setuptools import Extension
|
||||
from setuptools import setup
|
||||
except:
|
||||
from distutils.core import Extension
|
||||
from distutils.core import setup
|
||||
from distutils.command.build_ext import build_ext
|
||||
from distutils import errors
|
||||
from distutils import dep_util
|
||||
from distutils import log
|
||||
|
||||
import setuptools
|
||||
import setuptools.command.build_ext as build_ext
|
||||
import setuptools.errors as errors
|
||||
import setuptools.modified as modified
|
||||
|
||||
|
||||
CURR_DIR = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def bool_from_environ(key):
|
||||
value = os.environ.get(key)
|
||||
if not value:
|
||||
return False
|
||||
if value == "1":
|
||||
return True
|
||||
if value == "0":
|
||||
return False
|
||||
raise ValueError("Environment variable {} has invalid value {}. Please set it to 1, 0 or an empty string".format(key, value))
|
||||
|
||||
|
||||
def read_define(path, macro):
|
||||
"""Return macro value from the given file."""
|
||||
with open(path, "r") as f:
|
||||
for line in f:
|
||||
m = re.match(r"#define\s{}\s+(.+)".format(macro), line)
|
||||
if m:
|
||||
return m.group(1)
|
||||
return ""
|
||||
value = os.environ.get(key)
|
||||
if not value:
|
||||
return False
|
||||
if value == "1":
|
||||
return True
|
||||
if value == "0":
|
||||
return False
|
||||
raise ValueError(
|
||||
"Environment variable {} has invalid value {}. Please set it to 1, 0 or"
|
||||
" an empty string".format(key, value)
|
||||
)
|
||||
|
||||
|
||||
def get_version():
|
||||
"""Return library version string from 'common/version.h' file."""
|
||||
version_file_path = os.path.join(CURR_DIR, "c", "common", "version.h")
|
||||
major = read_define(version_file_path, "BROTLI_VERSION_MAJOR")
|
||||
minor = read_define(version_file_path, "BROTLI_VERSION_MINOR")
|
||||
patch = read_define(version_file_path, "BROTLI_VERSION_PATCH")
|
||||
if not major or not minor or not patch:
|
||||
return ""
|
||||
return "{}.{}.{}".format(major, minor, patch)
|
||||
"""Return library version string from 'common/version.h' file."""
|
||||
version_file_path = os.path.join(CURR_DIR, "c", "common", "version.h")
|
||||
defs = {}
|
||||
with open(version_file_path, "r") as file:
|
||||
for line in file:
|
||||
m = re.match(r"#define\s+(\w+)\s+(\d+)", line)
|
||||
if m:
|
||||
defs[m.group(1)] = m.group(2)
|
||||
parts = ["MAJOR", "MINOR", "PATCH"]
|
||||
major, minor, patch = [defs.get("BROTLI_VERSION_" + key) for key in parts]
|
||||
if not major or not minor or not patch:
|
||||
return ""
|
||||
return "{}.{}.{}".format(major, minor, patch)
|
||||
|
||||
|
||||
def get_test_suite():
|
||||
test_loader = unittest.TestLoader()
|
||||
test_suite = test_loader.discover("python", pattern="*_test.py")
|
||||
return test_suite
|
||||
class BuildExt(build_ext.build_ext):
|
||||
"""Customized build_ext command to handle Brotli extension building."""
|
||||
|
||||
def get_source_files(self):
|
||||
filenames = super().get_source_files()
|
||||
for ext in self.extensions:
|
||||
filenames.extend(ext.depends)
|
||||
return filenames
|
||||
|
||||
def build_extension(self, ext):
|
||||
if ext.sources is None or not isinstance(ext.sources, (list, tuple)):
|
||||
raise errors.DistutilsSetupError(
|
||||
"in 'ext_modules' option (extension '%s'), "
|
||||
"'sources' must be present and must be "
|
||||
"a list of source filenames"
|
||||
% ext.name
|
||||
)
|
||||
|
||||
ext_path = self.get_ext_fullpath(ext.name)
|
||||
depends = ext.sources + ext.depends
|
||||
is_outdated = modified.newer_group(depends, ext_path, "newer")
|
||||
if self.force or is_outdated:
|
||||
LOGGER.info("building '%s' extension", ext.name)
|
||||
else:
|
||||
LOGGER.debug("skipping '%s' extension (up-to-date)", ext.name)
|
||||
return
|
||||
|
||||
c_sources = []
|
||||
for source in ext.sources:
|
||||
if source.endswith(".c"):
|
||||
c_sources.append(source)
|
||||
extra_args = ext.extra_compile_args or []
|
||||
|
||||
objects = []
|
||||
|
||||
macros = ext.define_macros[:]
|
||||
for undef in ext.undef_macros:
|
||||
macros.append((undef,))
|
||||
|
||||
objs = self.compiler.compile(
|
||||
c_sources,
|
||||
output_dir=self.build_temp,
|
||||
macros=macros,
|
||||
include_dirs=ext.include_dirs,
|
||||
debug=self.debug,
|
||||
extra_postargs=extra_args,
|
||||
depends=ext.depends,
|
||||
)
|
||||
objects.extend(objs)
|
||||
|
||||
self._built_objects = objects[:]
|
||||
if ext.extra_objects:
|
||||
objects.extend(ext.extra_objects)
|
||||
extra_args = ext.extra_link_args or []
|
||||
# When using GCC on Windows, we statically link libgcc and libstdc++,
|
||||
# so that we don't need to package extra DLLs.
|
||||
if self.compiler.compiler_type == "mingw32":
|
||||
extra_args.extend(["-static-libgcc", "-static-libstdc++"])
|
||||
|
||||
ext_path = self.get_ext_fullpath(ext.name)
|
||||
# Detect target language, if not provided.
|
||||
language = ext.language or self.compiler.detect_language(c_sources)
|
||||
|
||||
self.compiler.link_shared_object(
|
||||
objects,
|
||||
ext_path,
|
||||
libraries=self.get_libraries(ext),
|
||||
library_dirs=ext.library_dirs,
|
||||
runtime_library_dirs=ext.runtime_library_dirs,
|
||||
extra_postargs=extra_args,
|
||||
export_symbols=self.get_export_symbols(ext),
|
||||
debug=self.debug,
|
||||
build_temp=self.build_temp,
|
||||
target_lang=language,
|
||||
)
|
||||
|
||||
|
||||
class BuildExt(build_ext):
|
||||
def get_source_files(self):
|
||||
filenames = build_ext.get_source_files(self)
|
||||
for ext in self.extensions:
|
||||
filenames.extend(ext.depends)
|
||||
return filenames
|
||||
|
||||
def build_extension(self, ext):
|
||||
if ext.sources is None or not isinstance(ext.sources, (list, tuple)):
|
||||
raise errors.DistutilsSetupError(
|
||||
"in 'ext_modules' option (extension '%s'), "
|
||||
"'sources' must be present and must be "
|
||||
"a list of source filenames" % ext.name
|
||||
)
|
||||
|
||||
ext_path = self.get_ext_fullpath(ext.name)
|
||||
depends = ext.sources + ext.depends
|
||||
if not (self.force or dep_util.newer_group(depends, ext_path, "newer")):
|
||||
log.debug("skipping '%s' extension (up-to-date)", ext.name)
|
||||
return
|
||||
else:
|
||||
log.info("building '%s' extension", ext.name)
|
||||
|
||||
c_sources = []
|
||||
for source in ext.sources:
|
||||
if source.endswith(".c"):
|
||||
c_sources.append(source)
|
||||
extra_args = ext.extra_compile_args or []
|
||||
|
||||
objects = []
|
||||
|
||||
macros = ext.define_macros[:]
|
||||
if self.compiler.compiler_type == "mingw32":
|
||||
# On Windows Python 2.7, pyconfig.h defines "hypot" as "_hypot",
|
||||
# This clashes with GCC's cmath, and causes compilation errors when
|
||||
# building under MinGW: http://bugs.python.org/issue11566
|
||||
macros.append(("_hypot", "hypot"))
|
||||
for undef in ext.undef_macros:
|
||||
macros.append((undef,))
|
||||
|
||||
objs = self.compiler.compile(
|
||||
c_sources,
|
||||
output_dir=self.build_temp,
|
||||
macros=macros,
|
||||
include_dirs=ext.include_dirs,
|
||||
debug=self.debug,
|
||||
extra_postargs=extra_args,
|
||||
depends=ext.depends,
|
||||
)
|
||||
objects.extend(objs)
|
||||
|
||||
self._built_objects = objects[:]
|
||||
if ext.extra_objects:
|
||||
objects.extend(ext.extra_objects)
|
||||
extra_args = ext.extra_link_args or []
|
||||
# when using GCC on Windows, we statically link libgcc and libstdc++,
|
||||
# so that we don't need to package extra DLLs
|
||||
if self.compiler.compiler_type == "mingw32":
|
||||
extra_args.extend(["-static-libgcc", "-static-libstdc++"])
|
||||
|
||||
ext_path = self.get_ext_fullpath(ext.name)
|
||||
# Detect target language, if not provided
|
||||
language = ext.language or self.compiler.detect_language(c_sources)
|
||||
|
||||
self.compiler.link_shared_object(
|
||||
objects,
|
||||
ext_path,
|
||||
libraries=self.get_libraries(ext),
|
||||
library_dirs=ext.library_dirs,
|
||||
runtime_library_dirs=ext.runtime_library_dirs,
|
||||
extra_postargs=extra_args,
|
||||
export_symbols=self.get_export_symbols(ext),
|
||||
debug=self.debug,
|
||||
build_temp=self.build_temp,
|
||||
target_lang=language,
|
||||
)
|
||||
|
||||
|
||||
NAME = "Brotli"
|
||||
NAME = "brotli"
|
||||
|
||||
VERSION = get_version()
|
||||
|
||||
@@ -158,20 +143,18 @@ CLASSIFIERS = [
|
||||
"Development Status :: 4 - Beta",
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: Developers",
|
||||
# Deprecated, see https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#license for details.
|
||||
# "License :: OSI Approved :: MIT License",
|
||||
"Operating System :: MacOS :: MacOS X",
|
||||
"Operating System :: Microsoft :: Windows",
|
||||
"Operating System :: POSIX :: Linux",
|
||||
"Programming Language :: C",
|
||||
"Programming Language :: C++",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 2",
|
||||
"Programming Language :: Python :: 2.7",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.3",
|
||||
"Programming Language :: Python :: 3.4",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
"Programming Language :: Python :: 3.14",
|
||||
"Programming Language :: Unix Shell",
|
||||
"Topic :: Software Development :: Libraries",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
@@ -188,154 +171,153 @@ PY_MODULES = ["brotli"]
|
||||
USE_SYSTEM_BROTLI = bool_from_environ("USE_SYSTEM_BROTLI")
|
||||
|
||||
if USE_SYSTEM_BROTLI:
|
||||
import pkgconfig
|
||||
|
||||
REQUIRED_BROTLI_SYSTEM_LIBRARIES = ["libbrotlicommon", "libbrotlienc", "libbrotlidec"]
|
||||
import pkgconfig
|
||||
|
||||
define_macros = []
|
||||
include_dirs = []
|
||||
libraries = []
|
||||
library_dirs = []
|
||||
REQUIRED_BROTLI_SYSTEM_LIBRARIES = [
|
||||
"libbrotlicommon",
|
||||
"libbrotlienc",
|
||||
"libbrotlidec",
|
||||
]
|
||||
|
||||
for required_system_library in REQUIRED_BROTLI_SYSTEM_LIBRARIES:
|
||||
package_configuration = pkgconfig.parse(required_system_library)
|
||||
define_macros = []
|
||||
include_dirs = []
|
||||
libraries = []
|
||||
library_dirs = []
|
||||
|
||||
define_macros += package_configuration["define_macros"]
|
||||
include_dirs += package_configuration["include_dirs"]
|
||||
libraries += package_configuration["libraries"]
|
||||
library_dirs += package_configuration["library_dirs"]
|
||||
for required_system_library in REQUIRED_BROTLI_SYSTEM_LIBRARIES:
|
||||
package_configuration = pkgconfig.parse(required_system_library)
|
||||
|
||||
brotli_extension = Extension(
|
||||
'_brotli',
|
||||
sources=[
|
||||
'python/_brotli.c'
|
||||
],
|
||||
include_dirs=include_dirs,
|
||||
define_macros=define_macros,
|
||||
libraries=libraries,
|
||||
library_dirs=library_dirs
|
||||
)
|
||||
define_macros += package_configuration["define_macros"]
|
||||
include_dirs += package_configuration["include_dirs"]
|
||||
libraries += package_configuration["libraries"]
|
||||
library_dirs += package_configuration["library_dirs"]
|
||||
|
||||
brotli_extension = setuptools.Extension(
|
||||
"_brotli",
|
||||
sources=["python/_brotli.c"],
|
||||
include_dirs=include_dirs,
|
||||
define_macros=define_macros,
|
||||
libraries=libraries,
|
||||
library_dirs=library_dirs,
|
||||
)
|
||||
|
||||
EXT_MODULES = [brotli_extension]
|
||||
EXT_MODULES = [brotli_extension]
|
||||
else:
|
||||
EXT_MODULES = [
|
||||
Extension(
|
||||
"_brotli",
|
||||
sources=[
|
||||
"python/_brotli.c",
|
||||
"c/common/constants.c",
|
||||
"c/common/context.c",
|
||||
"c/common/dictionary.c",
|
||||
"c/common/platform.c",
|
||||
"c/common/shared_dictionary.c",
|
||||
"c/common/transform.c",
|
||||
"c/dec/bit_reader.c",
|
||||
"c/dec/decode.c",
|
||||
"c/dec/huffman.c",
|
||||
"c/dec/prefix.c",
|
||||
"c/dec/state.c",
|
||||
"c/dec/static_init.c",
|
||||
"c/enc/backward_references.c",
|
||||
"c/enc/backward_references_hq.c",
|
||||
"c/enc/bit_cost.c",
|
||||
"c/enc/block_splitter.c",
|
||||
"c/enc/brotli_bit_stream.c",
|
||||
"c/enc/cluster.c",
|
||||
"c/enc/command.c",
|
||||
"c/enc/compound_dictionary.c",
|
||||
"c/enc/compress_fragment.c",
|
||||
"c/enc/compress_fragment_two_pass.c",
|
||||
"c/enc/dictionary_hash.c",
|
||||
"c/enc/encode.c",
|
||||
"c/enc/encoder_dict.c",
|
||||
"c/enc/entropy_encode.c",
|
||||
"c/enc/fast_log.c",
|
||||
"c/enc/histogram.c",
|
||||
"c/enc/literal_cost.c",
|
||||
"c/enc/memory.c",
|
||||
"c/enc/metablock.c",
|
||||
"c/enc/static_dict.c",
|
||||
"c/enc/static_dict_lut.c",
|
||||
"c/enc/static_init.c",
|
||||
"c/enc/utf8_util.c",
|
||||
],
|
||||
depends=[
|
||||
"c/common/constants.h",
|
||||
"c/common/context.h",
|
||||
"c/common/dictionary.h",
|
||||
"c/common/platform.h",
|
||||
"c/common/shared_dictionary_internal.h",
|
||||
"c/common/static_init.h",
|
||||
"c/common/transform.h",
|
||||
"c/common/version.h",
|
||||
"c/dec/bit_reader.h",
|
||||
"c/dec/huffman.h",
|
||||
"c/dec/prefix.h",
|
||||
"c/dec/prefix_inc.h",
|
||||
"c/dec/state.h",
|
||||
"c/dec/static_init.h",
|
||||
"c/enc/backward_references.h",
|
||||
"c/enc/backward_references_hq.h",
|
||||
"c/enc/backward_references_inc.h",
|
||||
"c/enc/bit_cost.h",
|
||||
"c/enc/bit_cost_inc.h",
|
||||
"c/enc/block_encoder_inc.h",
|
||||
"c/enc/block_splitter.h",
|
||||
"c/enc/block_splitter_inc.h",
|
||||
"c/enc/brotli_bit_stream.h",
|
||||
"c/enc/cluster.h",
|
||||
"c/enc/cluster_inc.h",
|
||||
"c/enc/command.h",
|
||||
"c/enc/compound_dictionary.h",
|
||||
"c/enc/compress_fragment.h",
|
||||
"c/enc/compress_fragment_two_pass.h",
|
||||
"c/enc/dictionary_hash.h",
|
||||
"c/enc/dictionary_hash_inc.h",
|
||||
"c/enc/encoder_dict.h",
|
||||
"c/enc/entropy_encode.h",
|
||||
"c/enc/entropy_encode_static.h",
|
||||
"c/enc/fast_log.h",
|
||||
"c/enc/find_match_length.h",
|
||||
"c/enc/hash.h",
|
||||
"c/enc/hash_composite_inc.h",
|
||||
"c/enc/hash_forgetful_chain_inc.h",
|
||||
"c/enc/hash_longest_match64_inc.h",
|
||||
"c/enc/hash_longest_match_inc.h",
|
||||
"c/enc/hash_longest_match_quickly_inc.h",
|
||||
"c/enc/hash_rolling_inc.h",
|
||||
"c/enc/hash_to_binary_tree_inc.h",
|
||||
"c/enc/histogram.h",
|
||||
"c/enc/histogram_inc.h",
|
||||
"c/enc/literal_cost.h",
|
||||
"c/enc/memory.h",
|
||||
"c/enc/metablock.h",
|
||||
"c/enc/metablock_inc.h",
|
||||
"c/enc/params.h",
|
||||
"c/enc/prefix.h",
|
||||
"c/enc/quality.h",
|
||||
"c/enc/ringbuffer.h",
|
||||
"c/enc/static_dict.h",
|
||||
"c/enc/static_dict_lut.h",
|
||||
"c/enc/static_init.h",
|
||||
"c/enc/utf8_util.h",
|
||||
"c/enc/write_bits.h",
|
||||
],
|
||||
include_dirs=[
|
||||
"c/include",
|
||||
]),
|
||||
]
|
||||
|
||||
TEST_SUITE = "setup.get_test_suite"
|
||||
sources = [
|
||||
"python/_brotli.c",
|
||||
"c/common/constants.c",
|
||||
"c/common/context.c",
|
||||
"c/common/dictionary.c",
|
||||
"c/common/platform.c",
|
||||
"c/common/shared_dictionary.c",
|
||||
"c/common/transform.c",
|
||||
"c/dec/bit_reader.c",
|
||||
"c/dec/decode.c",
|
||||
"c/dec/huffman.c",
|
||||
"c/dec/prefix.c",
|
||||
"c/dec/state.c",
|
||||
"c/dec/static_init.c",
|
||||
"c/enc/backward_references.c",
|
||||
"c/enc/backward_references_hq.c",
|
||||
"c/enc/bit_cost.c",
|
||||
"c/enc/block_splitter.c",
|
||||
"c/enc/brotli_bit_stream.c",
|
||||
"c/enc/cluster.c",
|
||||
"c/enc/command.c",
|
||||
"c/enc/compound_dictionary.c",
|
||||
"c/enc/compress_fragment.c",
|
||||
"c/enc/compress_fragment_two_pass.c",
|
||||
"c/enc/dictionary_hash.c",
|
||||
"c/enc/encode.c",
|
||||
"c/enc/encoder_dict.c",
|
||||
"c/enc/entropy_encode.c",
|
||||
"c/enc/fast_log.c",
|
||||
"c/enc/histogram.c",
|
||||
"c/enc/literal_cost.c",
|
||||
"c/enc/memory.c",
|
||||
"c/enc/metablock.c",
|
||||
"c/enc/static_dict.c",
|
||||
"c/enc/static_dict_lut.c",
|
||||
"c/enc/static_init.c",
|
||||
"c/enc/utf8_util.c",
|
||||
]
|
||||
headers = [
|
||||
"c/common/constants.h",
|
||||
"c/common/context.h",
|
||||
"c/common/dictionary.h",
|
||||
"c/common/platform.h",
|
||||
"c/common/shared_dictionary_internal.h",
|
||||
"c/common/static_init.h",
|
||||
"c/common/transform.h",
|
||||
"c/common/version.h",
|
||||
"c/dec/bit_reader.h",
|
||||
"c/dec/huffman.h",
|
||||
"c/dec/prefix.h",
|
||||
"c/dec/prefix_inc.h",
|
||||
"c/dec/state.h",
|
||||
"c/dec/static_init.h",
|
||||
"c/enc/backward_references.h",
|
||||
"c/enc/backward_references_hq.h",
|
||||
"c/enc/backward_references_inc.h",
|
||||
"c/enc/bit_cost.h",
|
||||
"c/enc/bit_cost_inc.h",
|
||||
"c/enc/block_encoder_inc.h",
|
||||
"c/enc/block_splitter.h",
|
||||
"c/enc/block_splitter_inc.h",
|
||||
"c/enc/brotli_bit_stream.h",
|
||||
"c/enc/cluster.h",
|
||||
"c/enc/cluster_inc.h",
|
||||
"c/enc/command.h",
|
||||
"c/enc/compound_dictionary.h",
|
||||
"c/enc/compress_fragment.h",
|
||||
"c/enc/compress_fragment_two_pass.h",
|
||||
"c/enc/dictionary_hash.h",
|
||||
"c/enc/dictionary_hash_inc.h",
|
||||
"c/enc/encoder_dict.h",
|
||||
"c/enc/entropy_encode.h",
|
||||
"c/enc/entropy_encode_static.h",
|
||||
"c/enc/fast_log.h",
|
||||
"c/enc/find_match_length.h",
|
||||
"c/enc/hash.h",
|
||||
"c/enc/hash_composite_inc.h",
|
||||
"c/enc/hash_forgetful_chain_inc.h",
|
||||
"c/enc/hash_longest_match64_inc.h",
|
||||
"c/enc/hash_longest_match_inc.h",
|
||||
"c/enc/hash_longest_match_quickly_inc.h",
|
||||
"c/enc/hash_rolling_inc.h",
|
||||
"c/enc/hash_to_binary_tree_inc.h",
|
||||
"c/enc/histogram.h",
|
||||
"c/enc/histogram_inc.h",
|
||||
"c/enc/literal_cost.h",
|
||||
"c/enc/memory.h",
|
||||
"c/enc/metablock.h",
|
||||
"c/enc/metablock_inc.h",
|
||||
"c/enc/params.h",
|
||||
"c/enc/prefix.h",
|
||||
"c/enc/quality.h",
|
||||
"c/enc/ringbuffer.h",
|
||||
"c/enc/static_dict.h",
|
||||
"c/enc/static_dict_lut.h",
|
||||
"c/enc/static_init.h",
|
||||
"c/enc/utf8_util.h",
|
||||
"c/enc/write_bits.h",
|
||||
]
|
||||
brotli_extension = setuptools.Extension(
|
||||
"_brotli",
|
||||
sources=sources,
|
||||
depends=headers,
|
||||
include_dirs=["c/include"],
|
||||
)
|
||||
EXT_MODULES = [brotli_extension]
|
||||
|
||||
CMD_CLASS = {
|
||||
"build_ext": BuildExt,
|
||||
}
|
||||
|
||||
with open("README.md", "r") as f:
|
||||
README = f.read()
|
||||
README = f.read()
|
||||
|
||||
setup(
|
||||
setuptools.setup(
|
||||
name=NAME,
|
||||
description=DESCRIPTION,
|
||||
long_description=README,
|
||||
@@ -349,6 +331,5 @@ setup(
|
||||
package_dir=PACKAGE_DIR,
|
||||
py_modules=PY_MODULES,
|
||||
ext_modules=EXT_MODULES,
|
||||
test_suite=TEST_SUITE,
|
||||
cmdclass=CMD_CLASS,
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user