diff --git a/.github/workflows/build_maixcam.yml b/.github/workflows/build_maixcam.yml new file mode 100755 index 00000000..51295bdf --- /dev/null +++ b/.github/workflows/build_maixcam.yml @@ -0,0 +1,107 @@ +# This is a basic workflow to help you get started with Actions + +name: Build MaixCAM + +# Controls when the action will run. +on: + # Triggers the workflow on push or pull request events but only for the main branch + push: + branches: [ main ] + # pull_request: + # branches: [ main ] + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +permissions: write-all + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + # This workflow contains a single job called "build" + build: + # Only run job for specific repository + if: github.repository == 'sipeed/MaixPy' + # The type of runner that the job will run on + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.11"] # MaixCAM use 3.11 + # Steps represent a sequence of tasks that will be executed as part of the job + steps: + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Build MaixPy + run: | + echo "-- Check python version must python 3.11 --" + python3 -c 'import sys;print(sys.version);assert sys.version_info >= (3, 11);assert sys.version_info < (3, 12)' + python -c 'import sys;print(sys.version);assert sys.version_info >= (3, 11);assert sys.version_info < (3, 12)' + whereis python + whereis python3 + # export PATH=~/.local/bin/:$PATH + # pull sipeed/MaixCDK repo here first + pwd_path=$(pwd) + cd ~ + git clone https://github.com/sipeed/MaixCDK --depth=1 + export MAIXCDK_PATH=`pwd`/MaixCDK + cd $pwd_path + python -m pip install -U pip setuptools wheel + python -m pip install -r $MAIXCDK_PATH/requirements.txt + python -m pip install pybind11-stubgen + echo "--------------------------------" + echo "-- Build MaixPy for Linux now --" + echo "--------------------------------" + sudo apt update -y + sudo apt install -y libopencv-dev libopencv-contrib-dev libsdl2-dev cmake + cmake --version + python setup.py bdist_wheel linux + echo "--------------------------------" + echo "-- Test MaixPy basic for Linux now --" + echo "--------------------------------" + chmod +x ./run.sh && ./run.sh test/test_basic.py + mkdir -p artifact + mv dist/* artifact/ + echo "----------------------------------" + echo "-- Build MaixPy for MaixCAM now --" + echo "----------------------------------" + python setup.py bdist_wheel maixcam + mv dist/* artifact/ + + - name: Upload MaixPy Linux firmware as artifact + uses: actions/upload-artifact@v4 + with: + name: maixpy_firmware + path: artifact/*.whl + + # Runs a set of commands using the runners shell + - name: Push doc to github pages + run: | + pip3 install teedoc + cd docs + echo "== install plugins ==" + teedoc install + echo "== start build ==" + teedoc build + echo "== build complete ==" + remote_addr=`git remote get-url --push origin` + remote_addr=`echo $remote_addr| awk -F'://' '{print $2}'` + user_name=`git log -1 --pretty=format:'%an'` + user_email=`git log -1 --pretty=format:'%ae'` + echo "== checkout gh-pages branch ==" + cd out + cp -r ../../.github . + git config --global init.defaultBranch gh-pages + git init + git config user.name "${user_name}" + git config user.email ${user_email} + remote_addr="https://Neutree:${{ secrets.DISPATCH_PAT }}@${remote_addr}" + echo "-- user ${user_name}" + echo "-- remote addr: ${remote_addr}" + git remote add origin "${remote_addr}" + echo "== add web files ==" + git add -A + git commit -m "Rebuild MaixPy doc by commit $GITHUB_REF" + git push origin HEAD:gh-pages --force + echo "== push complete ==" diff --git a/.github/workflows/build_maixcam_dev.yml b/.github/workflows/build_maixcam_dev.yml new file mode 100755 index 00000000..364f567b --- /dev/null +++ b/.github/workflows/build_maixcam_dev.yml @@ -0,0 +1,88 @@ +# This is a basic workflow to help you get started with Actions + +name: Build MaixCAM Dev branch + +# Controls when the action will run. +on: + # Triggers the workflow on push or pull request events but only for the main branch + push: + branches: [ dev ] + # pull_request: + # branches: [ main ] + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +permissions: write-all + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + # This workflow contains a single job called "build" + build: + # Only run job for specific repository + if: github.repository == 'sipeed/MaixPy' + # The type of runner that the job will run on + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.11"] # MaixCAM use 3.11 + # Steps represent a sequence of tasks that will be executed as part of the job + steps: + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Build MaixPy + run: | + echo "-- Check python version must python 3.11 --" + python3 -c 'import sys;print(sys.version);assert sys.version_info >= (3, 11);assert sys.version_info < (3, 12)' + python -c 'import sys;print(sys.version);assert sys.version_info >= (3, 11);assert sys.version_info < (3, 12)' + whereis python + whereis python3 + # export PATH=~/.local/bin/:$PATH + # pull sipeed/MaixCDK repo here first + pwd_path=$(pwd) + cd ~ + git clone https://github.com/sipeed/MaixCDK -b dev --depth=1 + export MAIXCDK_PATH=`pwd`/MaixCDK + cd $pwd_path + python -m pip install -U pip setuptools wheel + python -m pip install -r $MAIXCDK_PATH/requirements.txt + python -m pip install pybind11-stubgen + echo "--------------------------------" + echo "-- Build MaixPy for Linux now --" + echo "--------------------------------" + sudo apt update -y + sudo apt install -y libopencv-dev libopencv-contrib-dev libsdl2-dev cmake + cmake --version + python setup.py bdist_wheel linux + echo "--------------------------------" + echo "-- Test MaixPy basic for Linux now --" + echo "--------------------------------" + chmod +x ./run.sh && ./run.sh test/test_basic.py + mkdir -p artifact + mv dist/* artifact/ + echo "----------------------------------" + echo "-- Build MaixPy for MaixCAM now --" + echo "----------------------------------" + python setup.py bdist_wheel maixcam + mv dist/* artifact/ + + - name: Upload MaixPy Linux firmware as artifact + uses: actions/upload-artifact@v4 + with: + name: maixpy_firmware + path: artifact/*.whl + + # Runs a set of commands using the runners shell + - name: build doc + run: | + pip3 install teedoc + cd docs + echo "== install plugins ==" + teedoc install + echo "== start build ==" + teedoc build + echo "== build complete ==" + diff --git a/.github/workflows/release_maixcam.yml b/.github/workflows/release_maixcam.yml new file mode 100644 index 00000000..8ce0cf72 --- /dev/null +++ b/.github/workflows/release_maixcam.yml @@ -0,0 +1,205 @@ +name: Relase MaixPy for MaixCAM + +on: + release: + types: [published] + workflow_dispatch: + +permissions: write-all + +jobs: + build: + name: release and upload assets task + strategy: + matrix: + python-version: ["3.11"] # must use str, not int, or 3.10 will be recognized as 3.1 + os: ["ubuntu-latest"] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Build MaixPy + id: build_maixpy + run: | + echo "-- Check python version must python 3.11 --" + python3 -c 'import sys;print(sys.version);assert sys.version_info >= (3, 11);assert sys.version_info < (3, 12)' + python -c 'import sys;print(sys.version);assert sys.version_info >= (3, 11);assert sys.version_info < (3, 12)' + whereis python + whereis python3 + # export PATH=~/.local/bin/:$PATH + # pull sipeed/MaixCDK repo here first + maixpy_path=$(pwd) + maixpy_version=`git describe --tag` + cd ~ + git clone https://github.com/sipeed/MaixCDK --depth=1 + export MAIXCDK_PATH=`pwd`/MaixCDK + cd $maixpy_path + python -m pip install -U pip setuptools wheel twine + python -m pip install -r $MAIXCDK_PATH/requirements.txt + python -m pip install pybind11-stubgen + echo "--------------------------------" + echo "-- Build MaixPy for Linux now --" + echo "--------------------------------" + sudo apt update -y + sudo apt install -y libopencv-dev libopencv-contrib-dev libsdl2-dev cmake libfuse-dev mtools + cmake --version + python setup.py bdist_wheel linux + echo "--------------------------------" + echo "-- Test MaixPy basic for Linux now --" + echo "--------------------------------" + chmod +x ./run.sh && ./run.sh test/test_basic.py + mkdir -p artifact + cp dist/* artifact/ + release_name=`ls artifact|awk '{print $1}'` + release_path=artifact/$release_name + echo "release_linux_path=$release_path" >> $GITHUB_OUTPUT + echo "release_linux_name=$release_name" >> $GITHUB_OUTPUT + echo "----------------------------------" + echo "-- Build MaixPy for MaixCAM now --" + echo "----------------------------------" + python setup.py bdist_wheel maixcam + cp dist/* artifact/ + release_name=`ls dist|awk '{print $1}'` + release_path=`realpath dist/$release_name` + echo "release_path=$release_path" >> $GITHUB_OUTPUT + echo "release_name=$release_name" >> $GITHUB_OUTPUT + echo "--------------------------------" + echo "-- Generate MaixCDK version file --" + echo "--------------------------------" + cd $MAIXCDK_PATH + maixcdk_rev=`git rev-parse HEAD` + maixcdk_version_name="maixcdk_version_${maixcdk_rev}.txt" + echo "MaixPy ${maixpy_version} use MaixCDK commit ${maixcdk_rev}" > $maixcdk_version_name + maixcdk_version_path=`realpath "${MAIXCDK_PATH}/${maixcdk_version_name}"` + echo "maixcdk_version_path=$maixcdk_version_path" >> $GITHUB_OUTPUT + echo "maixcdk_version_name=$maixcdk_version_name" >> $GITHUB_OUTPUT + cd $maixpy_path + echo "--------------------------------" + echo "-- Generate system --" + echo "--------------------------------" + cd tools/os + chmod +x gen_os.sh + date_now=`date +"%Y-%m-%d"` + os_version_name="maixcam-${date_now}-maixpy-${maixpy_version}" + base_os_path=tmp/base_os.img.xz + python download_base_os.py -o ${base_os_path} + python download_builtin_files.py --unzip tmp/dl_builtin_files + builtin_files_dir=tmp/dl_builtin_files/sys_builtin_files + ./gen_os.sh $base_os_path $release_path $builtin_files_dir $os_version_name + os_filename=${os_version_name}.img.xz + os_filepath=`pwd`/tmp/$os_filename + sha256sum $release_path >> ${maixpy_path}/sha256sum_files.txt + sha256sum $os_filepath > ${maixpy_path}/sha256sum_files.txt + echo "os_path=$os_filepath" >> $GITHUB_OUTPUT + echo "os_name=$os_filename" >> $GITHUB_OUTPUT + echo "-------------------------------------" + echo "-- Generate system for MaixCAM-Pro --" + echo "-------------------------------------" + os_version_name_pro="maixcam-pro-${date_now}-maixpy-${maixpy_version}" + ./gen_os.sh $base_os_path $release_path $builtin_files_dir $os_version_name_pro 1 maixcam-pro + os_filename=${os_version_name_pro}.img.xz + os_filepath=`pwd`/tmp/$os_filename + sha256sum $os_filepath >> ${maixpy_path}/sha256sum_files.txt + echo "os_pro_path=$os_filepath" >> $GITHUB_OUTPUT + echo "os_pro_name=$os_filename" >> $GITHUB_OUTPUT + + - name: Build doc + id: build_doc + run: | + maixpy_path=$(pwd) + pip3 install teedoc + cd docs + echo "== install plugins ==" + teedoc install + echo "== start build ==" + teedoc build + echo "== build complete ==" + remote_addr=`git remote get-url --push origin` + remote_addr=`echo $remote_addr| awk -F'://' '{print $2}'` + user_name=`git log -1 --pretty=format:'%an'` + user_email=`git log -1 --pretty=format:'%ae'` + echo "== checkout gh-pages branch ==" + doc_dirname=maixpy_${{ github.ref_name }}_doc + doc_dir=${maixpy_path}/$doc_dirname + echo "#!/bin/bash" > out/view_doc.sh + echo "python -m http.server" >> out/view_doc.sh + echo "python -m http.server" > out/view_doc.bat + mkdir -p ${doc_dir}/html + mv ./out/* ${doc_dir}/html/ + mkdir -p ${doc_dir}/source + mv ./* ${doc_dir}/source/ + cd ${maixpy_path} + zip ${doc_dirname}.zip -r $doc_dirname + release_name=${doc_dirname}.zip + release_path=${maixpy_path}/$release_name + sha256sum $release_path >> ${maixpy_path}/sha256sum_files.txt + echo "release_doc_path=$release_path" >> $GITHUB_OUTPUT + echo "release_doc_name=$release_name" >> $GITHUB_OUTPUT + echo "sha256sum_path=${maixpy_path}/sha256sum_files.txt" >> $GITHUB_OUTPUT + echo "sha256sum_name=sha256sum_files.txt" >> $GITHUB_OUTPUT + + - name: Upload MaixPy Doc to release assets + uses: svenstaro/upload-release-action@v2 + with: + file: ${{ steps.build_doc.outputs.release_doc_path }} + asset_name: ${{ steps.build_doc.outputs.release_doc_name }} + tag: ${{ github.ref }} + repo_token: ${{ secrets.GITHUB_TOKEN }} + + - name: Upload sha256sum file to release assets + uses: svenstaro/upload-release-action@v2 + with: + file: ${{ steps.build_doc.outputs.sha256sum_path }} + asset_name: ${{ steps.build_doc.outputs.sha256sum_name }} + tag: ${{ github.ref }} + repo_token: ${{ secrets.GITHUB_TOKEN }} + + - name: Upload MaixCDK version txt to release assets + uses: svenstaro/upload-release-action@v2 + with: + file: ${{ steps.build_maixpy.outputs.maixcdk_version_path }} + asset_name: ${{ steps.build_maixpy.outputs.maixcdk_version_name }} + tag: ${{ github.ref }} + repo_token: ${{ secrets.GITHUB_TOKEN }} + + - name: Upload MaixPy MaixCAM to release assets + uses: svenstaro/upload-release-action@v2 + with: + file: ${{ steps.build_maixpy.outputs.release_path }} + asset_name: ${{ steps.build_maixpy.outputs.release_name }} + tag: ${{ github.ref }} + repo_token: ${{ secrets.GITHUB_TOKEN }} + + - name: Upload MaixPy Linux to release assets + uses: svenstaro/upload-release-action@v2 + with: + file: ${{ steps.build_maixpy.outputs.release_linux_path }} + asset_name: ${{ steps.build_maixpy.outputs.release_linux_name }} + tag: ${{ github.ref }} + repo_token: ${{ secrets.GITHUB_TOKEN }} + + - name: Upload MaixCAM OS to release assets + uses: svenstaro/upload-release-action@v2 + with: + file: ${{ steps.build_maixpy.outputs.os_path }} + asset_name: ${{ steps.build_maixpy.outputs.os_name }} + tag: ${{ github.ref }} + repo_token: ${{ secrets.GITHUB_TOKEN }} + + - name: Upload MaixCAM-Pro OS to release assets + uses: svenstaro/upload-release-action@v2 + with: + file: ${{ steps.build_maixpy.outputs.os_pro_path }} + asset_name: ${{ steps.build_maixpy.outputs.os_pro_name }} + tag: ${{ github.ref }} + repo_token: ${{ secrets.GITHUB_TOKEN }} + + - name: Publish MaixPy to pypi.org + run: | + echo "[pypi]" > ~/.pypirc + echo " username = __token__" >> ~/.pypirc + echo " password = ${{ secrets.PYPI_TOKEN }}" >> ~/.pypirc + twine upload artifact/*.whl + diff --git a/.github/workflows/release_other_linux.yml b/.github/workflows/release_other_linux.yml new file mode 100644 index 00000000..1aa74cbc --- /dev/null +++ b/.github/workflows/release_other_linux.yml @@ -0,0 +1,67 @@ +name: Relase MaixPy for Linux + +on: + release: + types: [published] + workflow_dispatch: + +permissions: write-all + +jobs: + build: + name: release and upload assets task + strategy: + matrix: + python-version: ["3.8", "3.9", "3.10", "3.12"] # 3.11 released by release_maixcam + os: ["ubuntu-latest"] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Build MaixPy + id: build_maixpy + run: | + whereis python + whereis python3 + # export PATH=~/.local/bin/:$PATH + # pull sipeed/MaixCDK repo here first + pwd_path=$(pwd) + cd ~ + git clone https://github.com/sipeed/MaixCDK --depth=1 + export MAIXCDK_PATH=`pwd`/MaixCDK + cd $pwd_path + python -m pip install -U pip setuptools wheel twine + python -m pip install -r $MAIXCDK_PATH/requirements.txt + python -m pip install pybind11-stubgen + echo "--------------------------------" + echo "-- Build MaixPy for Linux now --" + echo "--------------------------------" + sudo apt update -y + sudo apt install -y libopencv-dev libopencv-contrib-dev libsdl2-dev cmake + cmake --version + python setup.py bdist_wheel linux + echo "--------------------------------" + echo "-- Test MaixPy basic for Linux now --" + echo "--------------------------------" + release_name=`ls dist|awk '{print $1}'` + release_path=dist/$release_name + echo "release_path=$release_path" >> $GITHUB_OUTPUT + echo "release_name=$release_name" >> $GITHUB_OUTPUT + + - name: Publish MaixPy to pypi.org + run: | + echo "[pypi]" > ~/.pypirc + echo " username = __token__" >> ~/.pypirc + echo " password = ${{ secrets.PYPI_TOKEN }}" >> ~/.pypirc + twine upload dist/*.whl + + - name: Upload to release assets + uses: svenstaro/upload-release-action@v2 + with: + file: ${{ steps.build_maixpy.outputs.release_path }} + asset_name: ${{ steps.build_maixpy.outputs.release_name }} + tag: ${{ github.ref }} + repo_token: ${{ secrets.GITHUB_TOKEN }} + diff --git a/.github/workflows/trigger_wiki.yml b/.github/workflows/trigger_wiki.yml new file mode 100755 index 00000000..35e99749 --- /dev/null +++ b/.github/workflows/trigger_wiki.yml @@ -0,0 +1,45 @@ +# This is a basic workflow to help you get started with Actions + +name: trigger wiki + +# Controls when the action will run. +on: + push: + branches: + - gh-pages + pull_request: + branches: + - gh-pages + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +permissions: write-all + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + # This workflow contains a single job called "build" + trigger_wiki: + name: trigger sipeed_wiki + # Only run job for specific repository + # if: github.repository == 'sipeed/MaixPy' + # The type of runner that the job will run on + runs-on: ubuntu-latest + # strategy: + # matrix: + # python-version: [3.8] + + # Steps represent a sequence of tasks that will be executed as part of the job + steps: + - name: trigger sipeed wiki request + uses: actions/github-script@v6 + with: + github-token: ${{ secrets.DISPATCH_PAT }} + script: | + const result = await github.rest.repos.createDispatchEvent({ + owner: 'sipeed', + repo: 'sipeed_wiki', + event_type: 'update_maixpy_doc', + client_payload: {"key": "value"} + }) + console.log(result); diff --git a/maixpy/404.html b/maixpy/404.html new file mode 100644 index 00000000..004faaf5 --- /dev/null +++ b/maixpy/404.html @@ -0,0 +1,221 @@ + + + + + + + + + + + + + + + + + + + MaixPy + + + + + + + + + + + +
+
+
+ + +

404 页面未找到

+ + + + + +
+ 返回上一页 首页 +
+ + + +
+
+ + +
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/api.json b/maixpy/api/api.json new file mode 100644 index 00000000..dde4dbf7 --- /dev/null +++ b/maixpy/api/api.json @@ -0,0 +1,30263 @@ +{ + "type": "top_module", + "members": { + "maix": { + "type": "module", + "doc": { + "breif": "MaixPy C/C++ API from MaixCDK" + }, + "members": { + "err": { + "type": "module", + "doc": { + "brief": "maix.err module" + }, + "members": { + "Err": { + "type": "enum", + "name": "Err", + "doc": { + "brief": "Maix Error code", + "maixpy": "maix.err.Err", + "py_doc": "Maix Error code" + }, + "values": [ + [ + "ERR_NONE", + "0", + "No error" + ], + [ + "ERR_ARGS", + "", + "Invalid arguments" + ], + [ + "ERR_NO_MEM", + "", + "No memory" + ], + [ + "ERR_NOT_IMPL", + "", + "Not implemented" + ], + [ + "ERR_NOT_READY", + "", + "Not ready" + ], + [ + "ERR_NOT_INIT", + "", + "Not initialized" + ], + [ + "ERR_NOT_OPEN", + "", + "Not opened" + ], + [ + "ERR_NOT_PERMIT", + "", + "Not permitted" + ], + [ + "ERR_REOPEN", + "", + "Re-open" + ], + [ + "ERR_BUSY", + "", + "Busy" + ], + [ + "ERR_READ", + "", + "Read error" + ], + [ + "ERR_WRITE", + "", + "Write error" + ], + [ + "ERR_TIMEOUT", + "", + "Timeout" + ], + [ + "ERR_RUNTIME", + "", + "Runtime error" + ], + [ + "ERR_IO", + "", + "IO error" + ], + [ + "ERR_NOT_FOUND", + "", + "Not found" + ], + [ + "ERR_ALREAY_EXIST", + "", + "Already exist" + ], + [ + "ERR_BUFF_FULL", + "", + "Buffer full" + ], + [ + "ERR_BUFF_EMPTY", + "", + "Buffer empty" + ], + [ + "ERR_CANCEL", + "", + "Cancel" + ], + [ + "ERR_OVERFLOW", + "", + "Overflow" + ], + [ + "ERR_MAX", + "", + "" + ] + ], + "def": "enum Err\n {\n // !!! fixed error code, DO NOT change number already defined, only append new error code\n ERR_NONE = 0, // No error\n ERR_ARGS , // Invalid arguments\n ERR_NO_MEM , // No memory\n ERR_NOT_IMPL , // Not implemented\n ERR_NOT_READY , // Not ready\n ERR_NOT_INIT , // Not initialized\n ERR_NOT_OPEN , // Not opened\n ERR_NOT_PERMIT , // Not permitted\n ERR_REOPEN , // Re-open\n ERR_BUSY , // Busy\n ERR_READ , // Read error\n ERR_WRITE , // Write error\n ERR_TIMEOUT , // Timeout\n ERR_RUNTIME , // Runtime error\n ERR_IO , // IO error\n ERR_NOT_FOUND , // Not found\n ERR_ALREAY_EXIST , // Already exist\n ERR_BUFF_FULL , // Buffer full\n ERR_BUFF_EMPTY , // Buffer empty\n ERR_CANCEL , // Cancel\n ERR_OVERFLOW , // Overflow\n ERR_MAX,\n }" + }, + "Exception": { + "type": "class", + "name": "Exception", + "doc": { + "brief": "Maix Exception", + "maixpy": "maix.err.Exception", + "py_doc": "Maix Exception" + }, + "members": {}, + "def": "class Exception : public std::exception" + }, + "to_str": { + "type": "func", + "name": "to_str", + "doc": { + "brief": "Error code to string", + "param": { + "e": "direction [in], error code, err::Err type" + }, + "return": "error string", + "maixpy": "maix.err.to_str", + "py_doc": "Error code to string\n\nArgs:\n - e: direction [in], error code, err::Err type\n\n\nReturns: error string\n" + }, + "args": [ + [ + "err::Err", + "e", + null + ] + ], + "ret_type": "std::string", + "static": false, + "def": "std::string to_str(err::Err e)", + "py_def": "def to_str(e: Err) -> str" + }, + "get_error": { + "type": "func", + "name": "get_error", + "doc": { + "brief": "get last error string", + "return": "error string", + "maixpy": "maix.err.get_error", + "py_doc": "get last error string\n\nReturns: error string\n" + }, + "args": [], + "ret_type": "std::string&", + "static": false, + "def": "std::string& get_error()", + "py_def": "def get_error() -> str" + }, + "set_error": { + "type": "func", + "name": "set_error", + "doc": { + "brief": "set last error string", + "param": { + "str": "direction [in], error string" + }, + "maixpy": "maix.err.set_error", + "py_doc": "set last error string\n\nArgs:\n - str: direction [in], error string\n" + }, + "args": [ + [ + "const std::string &", + "str", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void set_error(const std::string &str)", + "py_def": "def set_error(str: str) -> None" + }, + "check_raise": { + "type": "func", + "name": "check_raise", + "doc": { + "brief": "Check error code, if not ERR_NONE, raise err.Exception", + "param": { + "e": "direction [in], error code, err::Err type", + "msg": "direction [in], error message" + }, + "maixpy": "maix.err.check_raise", + "py_doc": "Check error code, if not ERR_NONE, raise err.Exception\n\nArgs:\n - e: direction [in], error code, err::Err type\n - msg: direction [in], error message\n" + }, + "args": [ + [ + "err::Err", + "e", + null + ], + [ + "const std::string &", + "msg", + "\"\"" + ] + ], + "ret_type": "void", + "static": false, + "def": "void check_raise(err::Err e, const std::string &msg = \"\")", + "py_def": "def check_raise(e: Err, msg: str = '') -> None" + }, + "check_bool_raise": { + "type": "func", + "name": "check_bool_raise", + "doc": { + "brief": "Check condition, if false, raise err.Exception", + "param": { + "ok": "direction [in], condition, if true, do nothing, if false, raise err.Exception", + "msg": "direction [in], error message" + }, + "maixpy": "maix.err.check_bool_raise", + "py_doc": "Check condition, if false, raise err.Exception\n\nArgs:\n - ok: direction [in], condition, if true, do nothing, if false, raise err.Exception\n - msg: direction [in], error message\n" + }, + "args": [ + [ + "bool", + "ok", + null + ], + [ + "const std::string &", + "msg", + "\"\"" + ] + ], + "ret_type": "void", + "static": false, + "def": "void check_bool_raise(bool ok, const std::string &msg = \"\")", + "py_def": "def check_bool_raise(ok: bool, msg: str = '') -> None" + }, + "check_null_raise": { + "type": "func", + "name": "check_null_raise", + "doc": { + "brief": "Check NULL pointer, if NULL, raise exception", + "param": { + "ptr": "direction [in], pointer", + "msg": "direction [in], error message" + }, + "maixpy": "maix.err.check_null_raise", + "py_doc": "Check NULL pointer, if NULL, raise exception\n\nArgs:\n - ptr: direction [in], pointer\n - msg: direction [in], error message\n" + }, + "args": [ + [ + "void *", + "ptr", + null + ], + [ + "const std::string &", + "msg", + "\"\"" + ] + ], + "ret_type": "void", + "static": false, + "def": "void check_null_raise(void *ptr, const std::string &msg = \"\")", + "py_def": "def check_null_raise(ptr: capsule, msg: str = '') -> None" + } + }, + "auto_add": true + }, + "tensor": { + "type": "module", + "doc": { + "brief": "maix.tensor module" + }, + "members": { + "DType": { + "type": "enum", + "name": "DType", + "doc": { + "brief": "Tensor data types", + "maixpy": "maix.tensor.DType", + "py_doc": "Tensor data types" + }, + "values": [ + [ + "UINT8", + "0", + "" + ], + [ + "INT8", + "", + "" + ], + [ + "UINT16", + "", + "" + ], + [ + "INT16", + "", + "" + ], + [ + "UINT32", + "", + "" + ], + [ + "INT32", + "", + "" + ], + [ + "FLOAT16", + "", + "" + ], + [ + "FLOAT32", + "", + "" + ], + [ + "FLOAT64", + "", + "" + ], + [ + "BOOL", + "", + "" + ], + [ + "DTYPE_MAX", + "", + "" + ] + ], + "def": "enum DType\n {\n UINT8 = 0,\n INT8,\n UINT16,\n INT16,\n UINT32,\n INT32,\n FLOAT16,\n FLOAT32,\n FLOAT64,\n BOOL,\n // STRING,\n // OBJECT,\n DTYPE_MAX\n }" + }, + "dtype_size": { + "type": "var", + "name": "", + "doc": { + "brief": "Tensor data type size in bytes", + "attention": "It's a copy of this variable in MaixPy,\nso change it in C++ (e.g. update var in hello function) will not take effect the var inMaixPy.\nSo we add const for this var to avoid this mistake.", + "maixpy": "maix.tensor.dtype_size", + "py_doc": "Tensor data type size in bytes" + }, + "value": "{\n 1, // UINT8\n 1, // INT8\n 2, // UINT16\n 2, // INT16\n 4, // UINT32\n 4, // INT32\n 2, // FLOAT16\n 4, // FLOAT32\n 8, // FLOAT64\n 1, // BOOL\n // 1, // STRING\n // 1, // OBJECT\n 0\n }", + "static": false, + "readonly": true, + "def": "const std::vector dtype_size = {\n 1, // UINT8\n 1, // INT8\n 2, // UINT16\n 2, // INT16\n 4, // UINT32\n 4, // INT32\n 2, // FLOAT16\n 4, // FLOAT32\n 8, // FLOAT64\n 1, // BOOL\n // 1, // STRING\n // 1, // OBJECT\n 0\n }" + }, + "dtype_name": { + "type": "var", + "name": "", + "doc": { + "brief": "Tensor data type name", + "maixpy": "maix.tensor.dtype_name", + "py_doc": "Tensor data type name" + }, + "value": "{\n \"uint8\",\n \"int8\",\n \"uint16\",\n \"int16\",\n \"uint32\",\n \"int32\",\n \"float16\",\n \"float32\",\n \"float64\",\n \"bool\",\n // \"string\",\n // \"object\",\n \"invalid\"\n }", + "static": false, + "readonly": true, + "def": "const std::vector dtype_name = {\n \"uint8\",\n \"int8\",\n \"uint16\",\n \"int16\",\n \"uint32\",\n \"int32\",\n \"float16\",\n \"float32\",\n \"float64\",\n \"bool\",\n // \"string\",\n // \"object\",\n \"invalid\"\n }" + }, + "Tensor": { + "type": "class", + "name": "Tensor", + "doc": { + "brief": "Tensor class", + "maixpy": "maix.tensor.Tensor", + "py_doc": "Tensor class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Tensor", + "doc": { + "brief": "Tensor constructor", + "param": { + "shape": "tensor shape, a int list", + "dtype": "tensor element data type, see DType of this module" + }, + "maixpy": "maix.tensor.Tensor.__init__", + "py_doc": "Tensor constructor\n\nArgs:\n - shape: tensor shape, a int list\n - dtype: tensor element data type, see DType of this module\n" + }, + "args": [ + [ + "std::vector", + "shape", + null + ], + [ + "tensor::DType", + "dtype", + null + ] + ], + "ret_type": null, + "static": false, + "def": "Tensor(std::vector shape, tensor::DType dtype)", + "py_def": "def __init__(self, shape: list[int], dtype: DType) -> None" + }, + "to_str": { + "type": "func", + "name": "to_str", + "doc": { + "brief": "To string", + "maixpy": "maix.tensor.Tensor.to_str", + "py_doc": "To string" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string to_str()", + "py_def": "def to_str(self) -> str" + }, + "__str__": { + "type": "func", + "name": "__str__", + "doc": { + "brief": "To string", + "maixpy": "maix.tensor.Tensor.__str__", + "py_doc": "To string" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string __str__()", + "py_def": "def __str__(self) -> str" + }, + "shape": { + "type": "func", + "name": "shape", + "doc": { + "brief": "get tensor shape", + "return": "tensor shape, a int list", + "maixpy": "maix.tensor.Tensor.shape", + "py_doc": "get tensor shape\n\nReturns: tensor shape, a int list\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector shape()", + "py_def": "def shape(self) -> list[int]" + }, + "expand_dims": { + "type": "func", + "name": "expand_dims", + "doc": { + "brief": "expand tensor shape", + "param": { + "axis": "axis to expand" + }, + "maixpy": "maix.tensor.Tensor.expand_dims", + "py_doc": "expand tensor shape\n\nArgs:\n - axis: axis to expand\n" + }, + "args": [ + [ + "int", + "axis", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void expand_dims(int axis)", + "py_def": "def expand_dims(self, axis: int) -> None" + }, + "reshape": { + "type": "func", + "name": "reshape", + "doc": { + "brief": "reshape tensor shape, if size not match, it will throw an err::Exception", + "param": { + "shape": "new shape" + }, + "maixpy": "maix.tensor.Tensor.reshape", + "py_doc": "reshape tensor shape, if size not match, it will throw an err::Exception\n\nArgs:\n - shape: new shape\n" + }, + "args": [ + [ + "std::vector", + "shape", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void reshape(std::vector shape)", + "py_def": "def reshape(self, shape: list[int]) -> None" + }, + "flatten": { + "type": "func", + "name": "flatten", + "doc": { + "brief": "Flatten tensor shape to 1D", + "maixpy": "maix.tensor.Tensor.flatten", + "py_doc": "Flatten tensor shape to 1D" + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void flatten()", + "py_def": "def flatten(self) -> None" + }, + "dtype": { + "type": "func", + "name": "dtype", + "doc": { + "brief": "get tensor data type", + "return": "tensor data type, see DType of this module", + "maixpy": "maix.tensor.Tensor.dtype", + "py_doc": "get tensor data type\n\nReturns: tensor data type, see DType of this module\n" + }, + "args": [], + "ret_type": "tensor::DType", + "static": false, + "def": "tensor::DType dtype()", + "py_def": "def dtype(self) -> DType" + }, + "to_float_list": { + "type": "func", + "name": "to_float_list", + "doc": { + "brief": "get tensor data and return a list", + "return": "list type data", + "maixpy": "maix.tensor.Tensor.to_float_list", + "py_doc": "get tensor data and return a list\n\nReturns: list type data\n" + }, + "args": [], + "ret_type": "std::valarray*", + "static": false, + "def": "std::valarray* to_float_list()", + "py_def": "def to_float_list(self) -> list[float]" + }, + "argmax": { + "type": "func", + "name": "argmax", + "doc": { + "brief": "argmax of tensor", + "param": { + "axis": "By default, the index is into the flattened array, otherwise along the specified axis., wrong axis will throw an err::Exception" + }, + "return": "argmax result, you need to delete it after use in C++.", + "maixpy": "maix.tensor.Tensor.argmax", + "py_doc": "argmax of tensor\n\nArgs:\n - axis: By default, the index is into the flattened array, otherwise along the specified axis., wrong axis will throw an err::Exception\n\n\nReturns: argmax result, you need to delete it after use in C++.\n" + }, + "args": [ + [ + "int", + "axis", + "0xffff" + ] + ], + "ret_type": "tensor::Tensor*", + "static": false, + "def": "tensor::Tensor *argmax(int axis = 0xffff)", + "py_def": "def argmax(self, axis: int = 65535) -> Tensor" + }, + "argmax1": { + "type": "func", + "name": "argmax1", + "doc": { + "brief": "argmax1, flattened data max index", + "return": "argmax result, int type", + "maixpy": "maix.tensor.Tensor.argmax1", + "py_doc": "argmax1, flattened data max index\n\nReturns: argmax result, int type\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int argmax1()", + "py_def": "def argmax1(self) -> int" + } + }, + "def": "class Tensor" + }, + "Tensors": { + "type": "class", + "name": "Tensors", + "doc": { + "brief": "Tensors", + "maixpy": "maix.tensor.Tensors", + "py_doc": "Tensors" + }, + "members": { + "__init__": { + "type": "func", + "name": "Tensors", + "doc": { + "brief": "Constructor of Tensors", + "maixpy": "maix.tensor.Tensors.__init__", + "maixcdk": "maix.tensor.Tensors.Tensors", + "py_doc": "Constructor of Tensors" + }, + "args": [], + "ret_type": null, + "static": false, + "def": "Tensors()", + "py_def": "def __init__(self) -> None" + }, + "add_tensor": { + "type": "func", + "name": "add_tensor", + "doc": { + "brief": "Add tensor", + "maixpy": "maix.tensor.Tensors.add_tensor", + "py_doc": "Add tensor" + }, + "args": [ + [ + "const std::string &", + "key", + null + ], + [ + "tensor::Tensor *", + "tensor", + null + ], + [ + "bool", + "copy", + null + ], + [ + "bool", + "auto_delete", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void add_tensor(const std::string &key, tensor::Tensor *tensor, bool copy, bool auto_delete)", + "py_def": "def add_tensor(self, key: str, tensor: Tensor, copy: bool, auto_delete: bool) -> None" + }, + "rm_tensor": { + "type": "func", + "name": "rm_tensor", + "doc": { + "brief": "Remove tensor", + "maixpy": "maix.tensor.Tensors.rm_tensor", + "py_doc": "Remove tensor" + }, + "args": [ + [ + "const std::string &", + "key", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void rm_tensor(const std::string &key)", + "py_def": "def rm_tensor(self, key: str) -> None" + }, + "clear": { + "type": "func", + "name": "clear", + "doc": { + "brief": "Clear tensors", + "maixpy": "maix.tensor.Tensors.clear", + "py_doc": "Clear tensors" + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void clear()", + "py_def": "def clear(self) -> None" + }, + "get_tensor": { + "type": "func", + "name": "get_tensor", + "doc": { + "brief": "Get tensor by key", + "maixpy": "maix.tensor.Tensors.get_tensor", + "maixcdk": "maix.tensor.Tensors.get_tensor", + "py_doc": "Get tensor by key" + }, + "args": [ + [ + "const std::string &", + "key", + null + ] + ], + "ret_type": "tensor::Tensor&", + "static": false, + "def": "tensor::Tensor &get_tensor(const std::string &key)", + "py_def": "def get_tensor(self, key: str) -> Tensor" + }, + "__getitem__": { + "type": "func", + "name": "operator[]", + "doc": { + "brief": "Operator []", + "maixpy": "maix.tensor.Tensors.__getitem__", + "maixcdk": "maix.tensor.Tensors.[]", + "py_doc": "Operator []" + }, + "args": [ + [ + "const std::string &", + "key", + null + ] + ], + "ret_type": "tensor::Tensor&", + "static": false, + "def": "tensor::Tensor &operator[](const std::string &key)", + "py_def": "def __getitem__(self, key: str) -> Tensor" + }, + "__len__": { + "type": "func", + "name": "size", + "doc": { + "brief": "Size", + "maixpy": "maix.tensor.Tensors.__len__", + "maixcdk": "maix.tensor.Tensors.size", + "py_doc": "Size" + }, + "args": [], + "ret_type": "size_t", + "static": false, + "def": "size_t size()", + "py_def": "def __len__(self) -> int" + }, + "keys": { + "type": "func", + "name": "keys", + "doc": { + "brief": "Get names", + "maixpy": "maix.tensor.Tensors.keys", + "py_doc": "Get names" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector keys()", + "py_def": "def keys(self) -> list[str]" + }, + "tensors": { + "type": "var", + "name": "tensors", + "doc": { + "brief": "Tensors data, dict type", + "maixpy": "maix.tensor.Tensors.tensors", + "py_doc": "Tensors data, dict type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::map tensors" + } + }, + "def": "class Tensors" + }, + "tensor_from_numpy_float32": { + "type": "func", + "name": "tensor_from_numpy_float32", + "doc": { + "brief": "float32 type numpy ndarray object to tensor.Tensor object.", + "param": { + "array": "numpy array object.", + "copy": "if true, will alloc new buffer and copy data, else will directly use array's data buffer, default true.\nUse this arg carefully, when set to false, ther array MUST keep alive until we don't use the return tensor of this func, or will cause program crash." + }, + "return": "tensor.Tensor object.", + "maixpy": "maix.tensor.tensor_from_numpy_float32", + "py_doc": "float32 type numpy ndarray object to tensor.Tensor object.\n\nArgs:\n - array: numpy array object.\n - copy: if true, will alloc new buffer and copy data, else will directly use array's data buffer, default true.\nUse this arg carefully, when set to false, ther array MUST keep alive until we don't use the return tensor of this func, or will cause program crash.\n\n\nReturns: tensor.Tensor object.\n" + }, + "args": [ + [ + "py::array_t", + "array", + null + ], + [ + "bool", + "copy", + "true" + ] + ], + "ret_type": "tensor::Tensor*", + "static": false, + "def": "tensor::Tensor *tensor_from_numpy_float32(py::array_t array, bool copy = true)", + "py_def": "def tensor_from_numpy_float32(array: numpy.ndarray[numpy.float32], copy: bool = True) -> Tensor" + }, + "tensor_from_numpy_uint8": { + "type": "func", + "name": "tensor_from_numpy_uint8", + "doc": { + "brief": "uint8 type numpy ndarray object to tensor.Tensor object.", + "param": { + "array": "numpy array object.", + "copy": "if true, will alloc new buffer and copy data, else will directly use array's data buffer, default true.\nUse this arg carefully, when set to false, ther array MUST keep alive until we don't use the return tensor of this func, or will cause program crash." + }, + "return": "tensor.Tensor object.", + "maixpy": "maix.tensor.tensor_from_numpy_uint8", + "py_doc": "uint8 type numpy ndarray object to tensor.Tensor object.\n\nArgs:\n - array: numpy array object.\n - copy: if true, will alloc new buffer and copy data, else will directly use array's data buffer, default true.\nUse this arg carefully, when set to false, ther array MUST keep alive until we don't use the return tensor of this func, or will cause program crash.\n\n\nReturns: tensor.Tensor object.\n" + }, + "args": [ + [ + "py::array_t", + "array", + null + ], + [ + "bool", + "copy", + "true" + ] + ], + "ret_type": "tensor::Tensor*", + "static": false, + "def": "tensor::Tensor *tensor_from_numpy_uint8(py::array_t array, bool copy = true)", + "py_def": "def tensor_from_numpy_uint8(array: numpy.ndarray[numpy.uint8], copy: bool = True) -> Tensor" + }, + "tensor_from_numpy_int8": { + "type": "func", + "name": "tensor_from_numpy_int8", + "doc": { + "brief": "int8 type numpy ndarray object to tensor.Tensor object.", + "param": { + "array": "numpy array object.", + "copy": "if true, will alloc new buffer and copy data, else will directly use array's data buffer, default true.\nUse this arg carefully, when set to false, ther array MUST keep alive until we don't use the return tensor of this func, or will cause program crash." + }, + "return": "tensor.Tensor object.", + "maixpy": "maix.tensor.tensor_from_numpy_int8", + "py_doc": "int8 type numpy ndarray object to tensor.Tensor object.\n\nArgs:\n - array: numpy array object.\n - copy: if true, will alloc new buffer and copy data, else will directly use array's data buffer, default true.\nUse this arg carefully, when set to false, ther array MUST keep alive until we don't use the return tensor of this func, or will cause program crash.\n\n\nReturns: tensor.Tensor object.\n" + }, + "args": [ + [ + "py::array_t", + "array", + null + ], + [ + "bool", + "copy", + "true" + ] + ], + "ret_type": "tensor::Tensor*", + "static": false, + "def": "tensor::Tensor *tensor_from_numpy_int8(py::array_t array, bool copy = true)", + "py_def": "def tensor_from_numpy_int8(array: numpy.ndarray[numpy.int8], copy: bool = True) -> Tensor" + }, + "tensor_to_numpy_float32": { + "type": "func", + "name": "tensor_to_numpy_float32", + "doc": { + "brief": "tensor.Tensor object to float32 type numpy ndarray object.", + "param": { + "t": "tensor.Tensor object.", + "copy": "Whether alloc new Tensor and copy data or not,\nif not copy, array object will directly use arg's data buffer, will faster but change array will affect arg's data, default true." + }, + "return": "numpy array object", + "maixpy": "maix.tensor.tensor_to_numpy_float32", + "py_doc": "tensor.Tensor object to float32 type numpy ndarray object.\n\nArgs:\n - t: tensor.Tensor object.\n - copy: Whether alloc new Tensor and copy data or not,\nif not copy, array object will directly use arg's data buffer, will faster but change array will affect arg's data, default true.\n\n\nReturns: numpy array object\n" + }, + "args": [ + [ + "tensor::Tensor *", + "t", + null + ], + [ + "bool", + "copy", + "true" + ] + ], + "ret_type": "py::array_t", + "static": false, + "def": "py::array_t tensor_to_numpy_float32(tensor::Tensor *t, bool copy = true)", + "py_def": "def tensor_to_numpy_float32(t: Tensor, copy: bool = True) -> numpy.ndarray[numpy.float32]" + }, + "tensor_to_numpy_uint8": { + "type": "func", + "name": "tensor_to_numpy_uint8", + "doc": { + "brief": "tensor.Tensor object to int8 type numpy ndarray object.", + "param": { + "t": "tensor.Tensor object.", + "copy": "Whether alloc new Tensor and copy data or not,\nif not copy, array object will directly use arg's data buffer, will faster but change array will affect arg's data, default true." + }, + "return": "numpy array object", + "maixpy": "maix.tensor.tensor_to_numpy_uint8", + "py_doc": "tensor.Tensor object to int8 type numpy ndarray object.\n\nArgs:\n - t: tensor.Tensor object.\n - copy: Whether alloc new Tensor and copy data or not,\nif not copy, array object will directly use arg's data buffer, will faster but change array will affect arg's data, default true.\n\n\nReturns: numpy array object\n" + }, + "args": [ + [ + "tensor::Tensor *", + "t", + null + ], + [ + "bool", + "copy", + "true" + ] + ], + "ret_type": "py::array_t", + "static": false, + "def": "py::array_t tensor_to_numpy_uint8(tensor::Tensor *t, bool copy = true)", + "py_def": "def tensor_to_numpy_uint8(t: Tensor, copy: bool = True) -> numpy.ndarray[numpy.uint8]" + }, + "tensor_to_numpy_int8": { + "type": "func", + "name": "tensor_to_numpy_int8", + "doc": { + "brief": "tensor.Tensor object to int8 type numpy ndarray object.", + "param": { + "t": "tensor.Tensor object.", + "copy": "Whether alloc new Tensor and copy data or not,\nif not copy, array object will directly use arg's data buffer, will faster but change array will affect arg's data, default true." + }, + "return": "numpy array object", + "maixpy": "maix.tensor.tensor_to_numpy_int8", + "py_doc": "tensor.Tensor object to int8 type numpy ndarray object.\n\nArgs:\n - t: tensor.Tensor object.\n - copy: Whether alloc new Tensor and copy data or not,\nif not copy, array object will directly use arg's data buffer, will faster but change array will affect arg's data, default true.\n\n\nReturns: numpy array object\n" + }, + "args": [ + [ + "tensor::Tensor *", + "t", + null + ], + [ + "bool", + "copy", + "true" + ] + ], + "ret_type": "py::array_t", + "static": false, + "def": "py::array_t tensor_to_numpy_int8(tensor::Tensor *t, bool copy = true)", + "py_def": "def tensor_to_numpy_int8(t: Tensor, copy: bool = True) -> numpy.ndarray[numpy.int8]" + } + }, + "auto_add": true + }, + "image": { + "type": "module", + "doc": { + "brief": "maix.image module, image related definition and functions", + "maixpy": "maix.image", + "py_doc": "maix.image module, image related definition and functions" + }, + "members": { + "Format": { + "type": "enum", + "name": "Format", + "doc": { + "brief": "Image formats", + "attention": "for MaixPy firmware developers, update this enum will also need to update the fmt_size and fmt_names too !!!", + "maixpy": "maix.image.Format", + "py_doc": "Image formats" + }, + "values": [ + [ + "FMT_RGB888", + "0", + "RGBRGB...RGB, R at the lowest address" + ], + [ + "FMT_BGR888", + "", + "BGRBGR...BGR, B at the lowest address" + ], + [ + "FMT_RGBA8888", + "", + "RGBARGBA...RGBA, R at the lowest address" + ], + [ + "FMT_BGRA8888", + "", + "BGRABGRA...BGRA, B at the lowest address" + ], + [ + "FMT_RGB565", + "", + "" + ], + [ + "FMT_BGR565", + "", + "" + ], + [ + "FMT_YUV422SP", + "", + "YYY...UVUVUV...UVUV" + ], + [ + "FMT_YUV422P", + "", + "YYY...UUU...VVV" + ], + [ + "FMT_YVU420SP", + "", + "YYY...VUVUVU...VUVU, NV21" + ], + [ + "FMT_YUV420SP", + "", + "YYY...UVUVUV...UVUV, NV12" + ], + [ + "FMT_YVU420P", + "", + "YYY...VVV...UUU" + ], + [ + "FMT_YUV420P", + "", + "YYY...UUU...VVV" + ], + [ + "FMT_GRAYSCALE", + "", + "" + ], + [ + "FMT_BGGR6", + "", + "6-bit Bayer format with a BGGR pattern." + ], + [ + "FMT_GBRG6", + "", + "6-bit Bayer format with a GBRG pattern." + ], + [ + "FMT_GRBG6", + "", + "6-bit Bayer format with a GRBG pattern." + ], + [ + "FMT_RGGB6", + "", + "6-bit Bayer format with a RGGB pattern." + ], + [ + "FMT_BGGR8", + "", + "8-bit Bayer format with a BGGR pattern." + ], + [ + "FMT_GBRG8", + "", + "8-bit Bayer format with a GBRG pattern." + ], + [ + "FMT_GRBG8", + "", + "8-bit Bayer format with a GRBG pattern." + ], + [ + "FMT_RGGB8", + "", + "8-bit Bayer format with a RGGB pattern." + ], + [ + "FMT_BGGR10", + "", + "10-bit Bayer format with a BGGR pattern." + ], + [ + "FMT_GBRG10", + "", + "10-bit Bayer format with a GBRG pattern." + ], + [ + "FMT_GRBG10", + "", + "10-bit Bayer format with a GRBG pattern." + ], + [ + "FMT_RGGB10", + "", + "10-bit Bayer format with a RGGB pattern." + ], + [ + "FMT_BGGR12", + "", + "12-bit Bayer format with a BGGR pattern." + ], + [ + "FMT_GBRG12", + "", + "12-bit Bayer format with a GBRG pattern." + ], + [ + "FMT_GRBG12", + "", + "12-bit Bayer format with a GRBG pattern." + ], + [ + "FMT_RGGB12", + "", + "12-bit Bayer format with a RGGB pattern." + ], + [ + "FMT_UNCOMPRESSED_MAX", + "", + "" + ], + [ + "FMT_COMPRESSED_MIN", + "", + "" + ], + [ + "FMT_JPEG", + "", + "" + ], + [ + "FMT_PNG", + "", + "" + ], + [ + "FMT_COMPRESSED_MAX", + "", + "" + ], + [ + "FMT_INVALID", + "0xFF", + "format not valid" + ] + ], + "def": "enum Format\n {\n FMT_RGB888 = 0, // RGBRGB...RGB, R at the lowest address\n FMT_BGR888, // BGRBGR...BGR, B at the lowest address\n FMT_RGBA8888, // RGBARGBA...RGBA, R at the lowest address\n FMT_BGRA8888, // BGRABGRA...BGRA, B at the lowest address\n FMT_RGB565,\n FMT_BGR565,\n FMT_YUV422SP, // YYY...UVUVUV...UVUV\n FMT_YUV422P, // YYY...UUU...VVV\n FMT_YVU420SP, // YYY...VUVUVU...VUVU, NV21\n FMT_YUV420SP, // YYY...UVUVUV...UVUV, NV12\n FMT_YVU420P, // YYY...VVV...UUU\n FMT_YUV420P, // YYY...UUU...VVV\n FMT_GRAYSCALE,\n FMT_BGGR6, // 6-bit Bayer format with a BGGR pattern.\n FMT_GBRG6, // 6-bit Bayer format with a GBRG pattern.\n FMT_GRBG6, // 6-bit Bayer format with a GRBG pattern.\n FMT_RGGB6, // 6-bit Bayer format with a RGGB pattern.\n FMT_BGGR8, // 8-bit Bayer format with a BGGR pattern.\n FMT_GBRG8, // 8-bit Bayer format with a GBRG pattern.\n FMT_GRBG8, // 8-bit Bayer format with a GRBG pattern.\n FMT_RGGB8, // 8-bit Bayer format with a RGGB pattern.\n FMT_BGGR10, // 10-bit Bayer format with a BGGR pattern.\n FMT_GBRG10, // 10-bit Bayer format with a GBRG pattern.\n FMT_GRBG10, // 10-bit Bayer format with a GRBG pattern.\n FMT_RGGB10, // 10-bit Bayer format with a RGGB pattern.\n FMT_BGGR12, // 12-bit Bayer format with a BGGR pattern.\n FMT_GBRG12, // 12-bit Bayer format with a GBRG pattern.\n FMT_GRBG12, // 12-bit Bayer format with a GRBG pattern.\n FMT_RGGB12, // 12-bit Bayer format with a RGGB pattern.\n FMT_UNCOMPRESSED_MAX,\n\n // compressed format below, not compressed should define upper\n FMT_COMPRESSED_MIN,\n FMT_JPEG,\n FMT_PNG,\n FMT_COMPRESSED_MAX,\n\n FMT_INVALID = 0xFF // format not valid\n }" + }, + "fmt_size": { + "type": "var", + "name": "", + "doc": { + "brief": "Image format size in bytes", + "attention": "It's a copy of this variable in MaixPy,\nso change it in C++ (e.g. update var in hello function) will not take effect the var inMaixPy.\nSo we add const for this var to avoid this mistake.", + "maixpy": "maix.image.fmt_size", + "py_doc": "Image format size in bytes" + }, + "value": "{\n 3,\n 3,\n 4,\n 4,\n 2,\n 2,\n 2,\n 2,\n 1.5,\n 1.5,\n 1.5,\n 1.5,\n 1, // grayscale\n 0.75, // 6-bit Bayer format\n 0.75, // 6-bit Bayer format\n 0.75, // 6-bit Bayer format\n 0.75, // 6-bit Bayer format\n 1, // 8-bit Bayer format\n 1, // 8-bit Bayer format\n 1, // 8-bit Bayer format\n 1, // 8-bit Bayer format\n 1.25, // 10-bit Bayer format\n 1.25, // 10-bit Bayer format\n 1.25, // 10-bit Bayer format\n 1.25, // 10-bit Bayer format\n 1.5, // 12-bit Bayer format\n 1.5, // 12-bit Bayer format\n 1.5, // 12-bit Bayer format\n 1.5, // 12-bit Bayer format\n 0, // uncompereed_max\n 0, // compressed_min\n 1, // jpeg\n 1, // png\n 0, // compressed_max\n 0 // invalid\n }", + "static": false, + "readonly": true, + "def": "const std::vector fmt_size = {\n 3,\n 3,\n 4,\n 4,\n 2,\n 2,\n 2,\n 2,\n 1.5,\n 1.5,\n 1.5,\n 1.5,\n 1, // grayscale\n 0.75, // 6-bit Bayer format\n 0.75, // 6-bit Bayer format\n 0.75, // 6-bit Bayer format\n 0.75, // 6-bit Bayer format\n 1, // 8-bit Bayer format\n 1, // 8-bit Bayer format\n 1, // 8-bit Bayer format\n 1, // 8-bit Bayer format\n 1.25, // 10-bit Bayer format\n 1.25, // 10-bit Bayer format\n 1.25, // 10-bit Bayer format\n 1.25, // 10-bit Bayer format\n 1.5, // 12-bit Bayer format\n 1.5, // 12-bit Bayer format\n 1.5, // 12-bit Bayer format\n 1.5, // 12-bit Bayer format\n 0, // uncompereed_max\n 0, // compressed_min\n 1, // jpeg\n 1, // png\n 0, // compressed_max\n 0 // invalid\n }" + }, + "fmt_names": { + "type": "var", + "name": "", + "doc": { + "brief": "Image format string", + "maixpy": "maix.image.fmt_names", + "py_doc": "Image format string" + }, + "value": "{\n \"RGB888\",\n \"BGR888\",\n \"RGBA8888\",\n \"BGRA8888\",\n \"RGB565\",\n \"BGR565\",\n \"YUV422SP\",\n \"YUV422P\",\n \"YVU420SP\",\n \"YUV420SP\",\n \"YVU420P\",\n \"YUV420P\",\n \"GRAYSCALE\",\n \"BGGR6\",\n \"GBRG6\",\n \"GRBG6\",\n \"RG6B6\",\n \"BGGR8\",\n \"GBRG8\",\n \"GRBG8\",\n \"RG6B8\",\n \"BGGR10\",\n \"GBRG10\",\n \"GRBG10\",\n \"RG6B10\",\n \"BGGR12\",\n \"GBRG12\",\n \"GRBG12\",\n \"RG6B12\",\n \"UNCOMPRESSED_MAX\",\n \"COMPRESSED_MIN\",\n \"JPEG\",\n \"PNG\",\n \"COMPRESSED_MAX\",\n \"INVALID\"\n }", + "static": false, + "readonly": true, + "def": "const std::vector fmt_names = {\n \"RGB888\",\n \"BGR888\",\n \"RGBA8888\",\n \"BGRA8888\",\n \"RGB565\",\n \"BGR565\",\n \"YUV422SP\",\n \"YUV422P\",\n \"YVU420SP\",\n \"YUV420SP\",\n \"YVU420P\",\n \"YUV420P\",\n \"GRAYSCALE\",\n \"BGGR6\",\n \"GBRG6\",\n \"GRBG6\",\n \"RG6B6\",\n \"BGGR8\",\n \"GBRG8\",\n \"GRBG8\",\n \"RG6B8\",\n \"BGGR10\",\n \"GBRG10\",\n \"GRBG10\",\n \"RG6B10\",\n \"BGGR12\",\n \"GBRG12\",\n \"GRBG12\",\n \"RG6B12\",\n \"UNCOMPRESSED_MAX\",\n \"COMPRESSED_MIN\",\n \"JPEG\",\n \"PNG\",\n \"COMPRESSED_MAX\",\n \"INVALID\"\n }" + }, + "Size": { + "type": "class", + "name": "Size", + "doc": { + "brief": "Image size type", + "maixpy": "maix.image.Size", + "py_doc": "Image size type" + }, + "members": { + "__init__": { + "type": "func", + "name": "Size", + "doc": { + "brief": "Construct a new Size object", + "param": { + "width": "image width", + "height": "image height" + }, + "maixpy": "maix.image.Size.__init__", + "py_doc": "Construct a new Size object\n\nArgs:\n - width: image width\n - height: image height\n" + }, + "args": [ + [ + "int", + "width", + "0" + ], + [ + "int", + "height", + "0" + ] + ], + "ret_type": null, + "static": false, + "def": "Size(int width = 0, int height = 0)", + "py_def": "def __init__(self, width: int = 0, height: int = 0) -> None" + }, + "width": { + "type": "func", + "name": "width", + "doc": { + "brief": "width of size", + "param": { + "width": "set new width, if not set, only return current width" + }, + "maixpy": "maix.image.Size.width", + "py_doc": "width of size\n\nArgs:\n - width: set new width, if not set, only return current width\n" + }, + "args": [ + [ + "int", + "width", + "-1" + ] + ], + "ret_type": "int", + "static": false, + "def": "int width(int width = -1)", + "py_def": "def width(self, width: int = -1) -> int" + }, + "height": { + "type": "func", + "name": "height", + "doc": { + "brief": "height of size", + "param": { + "height": "set new height, if not set, only return current height" + }, + "maixpy": "maix.image.Size.height", + "py_doc": "height of size\n\nArgs:\n - height: set new height, if not set, only return current height\n" + }, + "args": [ + [ + "int", + "height", + "-1" + ] + ], + "ret_type": "int", + "static": false, + "def": "int height(int height = -1)", + "py_def": "def height(self, height: int = -1) -> int" + }, + "__getitem__": { + "type": "func", + "name": "operator[]", + "doc": { + "brief": "Subscript operator", + "param": { + "index": "0 for width, 1 for height" + }, + "return": "int& width or height", + "maixpy": "maix.image.Size.__getitem__", + "maixcdk": "maix.image.Size.[]", + "py_doc": "Subscript operator\n\nArgs:\n - index: 0 for width, 1 for height\n\n\nReturns: int& width or height\n" + }, + "args": [ + [ + "int", + "index", + null + ] + ], + "ret_type": "int&", + "static": false, + "def": "int &operator[](int index)", + "py_def": "def __getitem__(self, index: int) -> int" + }, + "__str__": { + "type": "func", + "name": "__str__", + "doc": { + "brief": "to string", + "maixpy": "maix.image.Size.__str__", + "py_doc": "to string" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string __str__()", + "py_def": "def __str__(self) -> str" + } + }, + "def": "class Size" + }, + "Fit": { + "type": "enum", + "name": "Fit", + "doc": { + "brief": "Object fit method", + "maixpy": "maix.image.Fit", + "py_doc": "Object fit method" + }, + "values": [ + [ + "FIT_NONE", + "-1", + "no object fit, keep original" + ], + [ + "FIT_FILL", + "0", + "width to new width, height to new height, may be stretch" + ], + [ + "FIT_CONTAIN", + "", + "keep aspect ratio, fill blank area with black color" + ], + [ + "FIT_COVER", + "", + "keep aspect ratio, crop image to fit new size" + ], + [ + "FIT_MAX", + "", + "" + ] + ], + "def": "enum Fit\n {\n FIT_NONE = -1, // no object fit, keep original\n FIT_FILL = 0, // width to new width, height to new height, may be stretch\n FIT_CONTAIN, // keep aspect ratio, fill blank area with black color\n FIT_COVER, // keep aspect ratio, crop image to fit new size\n FIT_MAX\n }" + }, + "ResizeMethod": { + "type": "enum", + "name": "ResizeMethod", + "doc": { + "brief": "Resize method", + "maixpy": "maix.image.ResizeMethod", + "py_doc": "Resize method" + }, + "values": [ + [ + "NEAREST", + "0", + "" + ], + [ + "BILINEAR", + "", + "" + ], + [ + "BICUBIC", + "", + "" + ], + [ + "AREA", + "", + "" + ], + [ + "LANCZOS", + "", + "" + ], + [ + "HAMMING", + "", + "" + ], + [ + "RESIZE_METHOD_MAX", + "", + "" + ] + ], + "def": "enum ResizeMethod\n {\n NEAREST = 0,\n BILINEAR,\n BICUBIC,\n AREA,\n LANCZOS,\n HAMMING,\n RESIZE_METHOD_MAX\n }" + }, + "ApriltagFamilies": { + "type": "enum", + "name": "ApriltagFamilies", + "doc": { + "brief": "Family of apriltag", + "maixpy": "maix.image.ApriltagFamilies", + "py_doc": "Family of apriltag" + }, + "values": [ + [ + "TAG16H5", + "1", + "" + ], + [ + "TAG25H7", + "2", + "" + ], + [ + "TAG25H9", + "4", + "" + ], + [ + "TAG36H10", + "8", + "" + ], + [ + "TAG36H11", + "16", + "" + ], + [ + "ARTOOLKIT", + "32", + "" + ] + ], + "def": "enum ApriltagFamilies\n {\n TAG16H5 = 1,\n TAG25H7 = 2,\n TAG25H9 = 4,\n TAG36H10 = 8,\n TAG36H11 = 16,\n ARTOOLKIT = 32\n }" + }, + "TemplateMatch": { + "type": "enum", + "name": "TemplateMatch", + "doc": { + "brief": "Template match method", + "maixpy": "maix.image.TemplateMatch", + "py_doc": "Template match method" + }, + "values": [ + [ + "SEARCH_EX", + "", + "Exhaustive search" + ], + [ + "SEARCH_DS", + "", + "Diamond search" + ] + ], + "def": "enum TemplateMatch\n {\n SEARCH_EX, // Exhaustive search\n SEARCH_DS, // Diamond search\n }" + }, + "CornerDetector": { + "type": "enum", + "name": "CornerDetector", + "doc": { + "brief": "CornerDetector class", + "maixpy": "maix.image.CornerDetector", + "py_doc": "CornerDetector class" + }, + "values": [ + [ + "CORNER_FAST", + "", + "" + ], + [ + "CORNER_AGAST", + "", + "" + ] + ], + "def": "enum CornerDetector\n {\n CORNER_FAST,\n CORNER_AGAST\n }" + }, + "EdgeDetector": { + "type": "enum", + "name": "EdgeDetector", + "doc": { + "brief": "EdgeDetector class", + "maixpy": "maix.image.EdgeDetector", + "py_doc": "EdgeDetector class" + }, + "values": [ + [ + "EDGE_CANNY", + "", + "" + ], + [ + "EDGE_SIMPLE", + "", + "" + ] + ], + "def": "enum EdgeDetector\n {\n EDGE_CANNY,\n EDGE_SIMPLE,\n }" + }, + "Line": { + "type": "class", + "name": "Line", + "doc": { + "brief": "Line class", + "maixpy": "maix.image.Line", + "py_doc": "Line class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Line", + "doc": { + "brief": "Line constructor", + "param": { + "x1": "coordinate x1 of the straight line", + "y1": "coordinate y1 of the straight line", + "x2": "coordinate x2 of the straight line", + "y2": "coordinate y2 of the straight line", + "magnitude": "magnitude of the straight line after Hough transformation", + "theta": "angle of the straight line after Hough transformation", + "rho": "p-value of the straight line after Hough transformation" + }, + "maixpy": "maix.image.Line.__init__", + "py_doc": "Line constructor\n\nArgs:\n - x1: coordinate x1 of the straight line\n - y1: coordinate y1 of the straight line\n - x2: coordinate x2 of the straight line\n - y2: coordinate y2 of the straight line\n - magnitude: magnitude of the straight line after Hough transformation\n - theta: angle of the straight line after Hough transformation\n - rho: p-value of the straight line after Hough transformation\n" + }, + "args": [ + [ + "int", + "x1", + null + ], + [ + "int", + "y1", + null + ], + [ + "int", + "x2", + null + ], + [ + "int", + "y2", + null + ], + [ + "int", + "magnitude", + "0" + ], + [ + "int", + "theta", + "0" + ], + [ + "int", + "rho", + "0" + ] + ], + "ret_type": null, + "static": false, + "def": "Line(int x1, int y1, int x2, int y2, int magnitude = 0, int theta = 0, int rho = 0)", + "py_def": "def __init__(self, x1: int, y1: int, x2: int, y2: int, magnitude: int = 0, theta: int = 0, rho: int = 0) -> None" + }, + "__getitem__": { + "type": "func", + "name": "__getitem__", + "doc": { + "brief": "Subscript operator", + "param": { + "index": "[0] get x1 of line\n[1] get y1 of line\n[2] get x2 of line\n[3] get y2 of line\n[4] get length of line\n[5] get magnitude of the straight line after Hough transformation\n[6] get angle of the straight line after Hough transformation (0-179 degrees)\n[7] get p-value of the straight line after Hough transformation" + }, + "return": "int&", + "maixpy": "maix.image.Line.__getitem__", + "py_doc": "Subscript operator\n\nArgs:\n - index: [0] get x1 of line\n[1] get y1 of line\n[2] get x2 of line\n[3] get y2 of line\n[4] get length of line\n[5] get magnitude of the straight line after Hough transformation\n[6] get angle of the straight line after Hough transformation (0-179 degrees)\n[7] get p-value of the straight line after Hough transformation\n\n\nReturns: int&\n" + }, + "args": [ + [ + "int", + "index", + null + ] + ], + "ret_type": "int&", + "static": false, + "def": "int &__getitem__(int index)", + "py_def": "def __getitem__(self, index: int) -> int" + }, + "x1": { + "type": "func", + "name": "x1", + "doc": { + "brief": "get x1 of line", + "return": "return x1 of the line, type is int", + "maixpy": "maix.image.Line.x1", + "py_doc": "get x1 of line\n\nReturns: return x1 of the line, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int x1()", + "py_def": "def x1(self) -> int" + }, + "y1": { + "type": "func", + "name": "y1", + "doc": { + "brief": "get y1 of line", + "return": "return y1 of the line, type is int", + "maixpy": "maix.image.Line.y1", + "py_doc": "get y1 of line\n\nReturns: return y1 of the line, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int y1()", + "py_def": "def y1(self) -> int" + }, + "x2": { + "type": "func", + "name": "x2", + "doc": { + "brief": "get x2 of line", + "return": "return x2 of the line, type is int", + "maixpy": "maix.image.Line.x2", + "py_doc": "get x2 of line\n\nReturns: return x2 of the line, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int x2()", + "py_def": "def x2(self) -> int" + }, + "y2": { + "type": "func", + "name": "y2", + "doc": { + "brief": "get y2 of line", + "return": "return y2 of the line, type is int", + "maixpy": "maix.image.Line.y2", + "py_doc": "get y2 of line\n\nReturns: return y2 of the line, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int y2()", + "py_def": "def y2(self) -> int" + }, + "length": { + "type": "func", + "name": "length", + "doc": { + "brief": "get length of line", + "return": "return length of the line, type is int", + "maixpy": "maix.image.Line.length", + "py_doc": "get length of line\n\nReturns: return length of the line, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int length()", + "py_def": "def length(self) -> int" + }, + "magnitude": { + "type": "func", + "name": "magnitude", + "doc": { + "brief": "get magnitude of the straight line after Hough transformation", + "return": "return magnitude, type is int", + "maixpy": "maix.image.Line.magnitude", + "py_doc": "get magnitude of the straight line after Hough transformation\n\nReturns: return magnitude, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int magnitude()", + "py_def": "def magnitude(self) -> int" + }, + "theta": { + "type": "func", + "name": "theta", + "doc": { + "brief": "get angle of the straight line after Hough transformation (0-179 degrees)", + "return": "return angle, type is int", + "maixpy": "maix.image.Line.theta", + "py_doc": "get angle of the straight line after Hough transformation (0-179 degrees)\n\nReturns: return angle, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int theta()", + "py_def": "def theta(self) -> int" + }, + "rho": { + "type": "func", + "name": "rho", + "doc": { + "brief": "get p-value of the straight line after Hough transformation", + "return": "return p-value, type is int", + "maixpy": "maix.image.Line.rho", + "py_doc": "get p-value of the straight line after Hough transformation\n\nReturns: return p-value, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int rho()", + "py_def": "def rho(self) -> int" + } + }, + "def": "class Line" + }, + "Rect": { + "type": "class", + "name": "Rect", + "doc": { + "brief": "Rect class", + "maixpy": "maix.image.Rect", + "py_doc": "Rect class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Rect", + "doc": { + "brief": "Rect constructor", + "param": { + "corners": "corners of rect", + "x": "coordinate x of the straight line", + "y": "coordinate y of the straight line", + "w": "coordinate w of the straight line", + "h": "coordinate h of the straight line", + "magnitude": "magnitude of the straight line after Hough transformation" + }, + "maixpy": "maix.image.Rect.__init__", + "py_doc": "Rect constructor\n\nArgs:\n - corners: corners of rect\n - x: coordinate x of the straight line\n - y: coordinate y of the straight line\n - w: coordinate w of the straight line\n - h: coordinate h of the straight line\n - magnitude: magnitude of the straight line after Hough transformation\n" + }, + "args": [ + [ + "std::vector> &", + "corners", + null + ], + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "int", + "w", + null + ], + [ + "int", + "h", + null + ], + [ + "int", + "magnitude", + "0" + ] + ], + "ret_type": null, + "static": false, + "def": "Rect(std::vector> &corners, int x, int y, int w, int h, int magnitude = 0)", + "py_def": "def __init__(self, corners: list[list[int]], x: int, y: int, w: int, h: int, magnitude: int = 0) -> None" + }, + "__getitem__": { + "type": "func", + "name": "__getitem__", + "doc": { + "brief": "Subscript operator", + "param": { + "index": "[0] get x of rect\n[1] get y of rect\n[2] get w of rect\n[3] get h of rect\n[4] get magnitude of the straight line after Hough transformation" + }, + "return": "int&", + "maixpy": "maix.image.Rect.__getitem__", + "py_doc": "Subscript operator\n\nArgs:\n - index: [0] get x of rect\n[1] get y of rect\n[2] get w of rect\n[3] get h of rect\n[4] get magnitude of the straight line after Hough transformation\n\n\nReturns: int&\n" + }, + "args": [ + [ + "int", + "index", + null + ] + ], + "ret_type": "int&", + "static": false, + "def": "int &__getitem__(int index)", + "py_def": "def __getitem__(self, index: int) -> int" + }, + "corners": { + "type": "func", + "name": "corners", + "doc": { + "brief": "get corners of rect", + "return": "return the coordinate of the rect.", + "maixpy": "maix.image.Rect.corners", + "py_doc": "get corners of rect\n\nReturns: return the coordinate of the rect.\n" + }, + "args": [], + "ret_type": "std::vector>", + "static": false, + "def": "std::vector> corners()", + "py_def": "def corners(self) -> list[list[int]]" + }, + "rect": { + "type": "func", + "name": "rect", + "doc": { + "brief": "get rectangle of rect", + "return": "return the rectangle of the rect. format is {x, y, w, h}, type is std::vector", + "maixpy": "maix.image.Rect.rect", + "py_doc": "get rectangle of rect\n\nReturns: return the rectangle of the rect. format is {x, y, w, h}, type is std::vector\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector rect()", + "py_def": "def rect(self) -> list[int]" + }, + "x": { + "type": "func", + "name": "x", + "doc": { + "brief": "get x of rect", + "return": "return x of the rect, type is int", + "maixpy": "maix.image.Rect.x", + "py_doc": "get x of rect\n\nReturns: return x of the rect, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int x()", + "py_def": "def x(self) -> int" + }, + "y": { + "type": "func", + "name": "y", + "doc": { + "brief": "get y of rect", + "return": "return y of the rect, type is int", + "maixpy": "maix.image.Rect.y", + "py_doc": "get y of rect\n\nReturns: return y of the rect, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int y()", + "py_def": "def y(self) -> int" + }, + "w": { + "type": "func", + "name": "w", + "doc": { + "brief": "get w of rect", + "return": "return w of the rect, type is int", + "maixpy": "maix.image.Rect.w", + "py_doc": "get w of rect\n\nReturns: return w of the rect, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int w()", + "py_def": "def w(self) -> int" + }, + "h": { + "type": "func", + "name": "h", + "doc": { + "brief": "get h of rect", + "return": "return h of the rect, type is int", + "maixpy": "maix.image.Rect.h", + "py_doc": "get h of rect\n\nReturns: return h of the rect, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int h()", + "py_def": "def h(self) -> int" + }, + "magnitude": { + "type": "func", + "name": "magnitude", + "doc": { + "brief": "get magnitude of the straight line after Hough transformation", + "return": "return magnitude, type is int", + "maixpy": "maix.image.Rect.magnitude", + "py_doc": "get magnitude of the straight line after Hough transformation\n\nReturns: return magnitude, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int magnitude()", + "py_def": "def magnitude(self) -> int" + } + }, + "def": "class Rect" + }, + "Circle": { + "type": "class", + "name": "Circle", + "doc": { + "brief": "circle class", + "maixpy": "maix.image.Circle", + "py_doc": "circle class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Circle", + "doc": { + "brief": "Circle constructor", + "param": { + "x": "coordinate x of the circle", + "y": "coordinate y of the circle", + "r": "coordinate r of the circle", + "magnitude": "coordinate y2 of the straight line" + }, + "maixpy": "maix.image.Circle.__init__", + "py_doc": "Circle constructor\n\nArgs:\n - x: coordinate x of the circle\n - y: coordinate y of the circle\n - r: coordinate r of the circle\n - magnitude: coordinate y2 of the straight line\n" + }, + "args": [ + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "int", + "r", + null + ], + [ + "int", + "magnitude", + null + ] + ], + "ret_type": null, + "static": false, + "def": "Circle(int x, int y, int r, int magnitude)", + "py_def": "def __init__(self, x: int, y: int, r: int, magnitude: int) -> None" + }, + "__getitem__": { + "type": "func", + "name": "__getitem__", + "doc": { + "brief": "Subscript operator", + "param": { + "index": "[0] get x of circle\n[1] get y of circle\n[2] get r of circle\n[3] get magnitude of the circle after Hough transformation" + }, + "return": "int&", + "maixpy": "maix.image.Circle.__getitem__", + "py_doc": "Subscript operator\n\nArgs:\n - index: [0] get x of circle\n[1] get y of circle\n[2] get r of circle\n[3] get magnitude of the circle after Hough transformation\n\n\nReturns: int&\n" + }, + "args": [ + [ + "int", + "index", + null + ] + ], + "ret_type": "int&", + "static": false, + "def": "int &__getitem__(int index)", + "py_def": "def __getitem__(self, index: int) -> int" + }, + "x": { + "type": "func", + "name": "x", + "doc": { + "brief": "get x of circle", + "return": "return x of the circle, type is int", + "maixpy": "maix.image.Circle.x", + "py_doc": "get x of circle\n\nReturns: return x of the circle, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int x()", + "py_def": "def x(self) -> int" + }, + "y": { + "type": "func", + "name": "y", + "doc": { + "brief": "get y of circle", + "return": "return y of the circle, type is int", + "maixpy": "maix.image.Circle.y", + "py_doc": "get y of circle\n\nReturns: return y of the circle, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int y()", + "py_def": "def y(self) -> int" + }, + "r": { + "type": "func", + "name": "r", + "doc": { + "brief": "get r of circle", + "return": "return r of the circle, type is int", + "maixpy": "maix.image.Circle.r", + "py_doc": "get r of circle\n\nReturns: return r of the circle, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int r()", + "py_def": "def r(self) -> int" + }, + "magnitude": { + "type": "func", + "name": "magnitude", + "doc": { + "brief": "get magnitude of the circle after Hough transformation", + "return": "return magnitude, type is int", + "maixpy": "maix.image.Circle.magnitude", + "py_doc": "get magnitude of the circle after Hough transformation\n\nReturns: return magnitude, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int magnitude()", + "py_def": "def magnitude(self) -> int" + } + }, + "def": "class Circle" + }, + "Blob": { + "type": "class", + "name": "Blob", + "doc": { + "brief": "Blob class", + "maixpy": "maix.image.Blob", + "py_doc": "Blob class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Blob", + "doc": { + "brief": "Blob constructor", + "param": { + "rect": "blob rect, type is std::vector", + "corners": "blob corners, type is std::vector>", + "mini_corners": "blob mini_corners, type is std::vector>", + "cx": "blob center x, type is float", + "cy": "blob center y, type is float", + "pixels": "blob pixels, type is int", + "rotation": "blob rotation, type is float", + "code": "blob code, type is int", + "count": "blob count, type is int", + "perimeter": "blob perimeter, type is int", + "roundness": "blob roundness, type is float", + "x_hist_bins": "blob x_hist_bins, type is std::vector", + "y_hist_bins": "blob y_hist_bins, type is std::vector" + }, + "maixpy": "maix.image.Blob.__init__", + "py_doc": "Blob constructor\n\nArgs:\n - rect: blob rect, type is std::vector\n - corners: blob corners, type is std::vector>\n - mini_corners: blob mini_corners, type is std::vector>\n - cx: blob center x, type is float\n - cy: blob center y, type is float\n - pixels: blob pixels, type is int\n - rotation: blob rotation, type is float\n - code: blob code, type is int\n - count: blob count, type is int\n - perimeter: blob perimeter, type is int\n - roundness: blob roundness, type is float\n - x_hist_bins: blob x_hist_bins, type is std::vector\n - y_hist_bins: blob y_hist_bins, type is std::vector\n" + }, + "args": [ + [ + "std::vector &", + "rect", + null + ], + [ + "std::vector> &", + "corners", + null + ], + [ + "std::vector> &", + "mini_corners", + null + ], + [ + "float", + "cx", + null + ], + [ + "float", + "cy", + null + ], + [ + "int", + "pixels", + null + ], + [ + "float", + "rotation", + null + ], + [ + "int", + "code", + null + ], + [ + "int", + "count", + null + ], + [ + "int", + "perimeter", + null + ], + [ + "float", + "roundness", + null + ], + [ + "std::vector &", + "x_hist_bins", + null + ], + [ + "std::vector &", + "y_hist_bins", + null + ] + ], + "ret_type": null, + "static": false, + "def": "Blob(std::vector &rect, std::vector> &corners, std::vector> &mini_corners,float cx, float cy, int pixels, float rotation, int code, int count, int perimeter, float roundness, std::vector &x_hist_bins, std::vector &y_hist_bins)", + "py_def": "def __init__(self, rect: list[int], corners: list[list[int]], mini_corners: list[list[int]], cx: float, cy: float, pixels: int, rotation: float, code: int, count: int, perimeter: int, roundness: float, x_hist_bins: list[int], y_hist_bins: list[int]) -> None" + }, + "__getitem__": { + "type": "func", + "name": "__getitem__", + "doc": { + "brief": "Subscript operator", + "param": { + "index": "[0] Returns the blob\u2019s bounding box x coordinate\n[1] Returns the blob\u2019s bounding box y coordinate\n[2] Returns the blob\u2019s bounding box w coordinate\n[3] Returns the blob\u2019s bounding box h coordinate\n[4] Returns the number of pixels that are part of this blob\n[5] Returns the centroid x position of the blob\n[6] Returns the centroid y position of the blob" + }, + "return": "int& width or height", + "maixpy": "maix.image.Blob.__getitem__", + "py_doc": "Subscript operator\n\nArgs:\n - index: [0] Returns the blob\u2019s bounding box x coordinate\n[1] Returns the blob\u2019s bounding box y coordinate\n[2] Returns the blob\u2019s bounding box w coordinate\n[3] Returns the blob\u2019s bounding box h coordinate\n[4] Returns the number of pixels that are part of this blob\n[5] Returns the centroid x position of the blob\n[6] Returns the centroid y position of the blob\n\n\nReturns: int& width or height\n" + }, + "args": [ + [ + "int", + "index", + null + ] + ], + "ret_type": "int&", + "static": false, + "def": "int &__getitem__(int index)", + "py_def": "def __getitem__(self, index: int) -> int" + }, + "corners": { + "type": "func", + "name": "corners", + "doc": { + "brief": "get blob corners", + "return": "Returns a list of 4 (x,y) tuples of the 4 corners of the object.\n(x0, y0)___________(x1, y1)\n| |\n| |\n| |\n|___________|\n(x3, y3) (x2, y2)\nnote: the order of corners may change", + "maixpy": "maix.image.Blob.corners", + "py_doc": "get blob corners\n\nReturns: Returns a list of 4 (x,y) tuples of the 4 corners of the object.\n(x0, y0)___________(x1, y1)\n| |\n| |\n| |\n|___________|\n(x3, y3) (x2, y2)\nnote: the order of corners may change\n" + }, + "args": [], + "ret_type": "std::vector>", + "static": false, + "def": "std::vector> corners()", + "py_def": "def corners(self) -> list[list[int]]" + }, + "mini_corners": { + "type": "func", + "name": "mini_corners", + "doc": { + "brief": "get blob mini corners", + "return": "Returns a list of 4 (x,y) tuples of the 4 corners than bound the min area rectangle of the blob.\n(x0, y0)___________(x1, y1)\n| |\n| |\n| |\n|___________|\n(x3, y3) (x2, y2)\nnote: the order of corners may change", + "maixpy": "maix.image.Blob.mini_corners", + "py_doc": "get blob mini corners\n\nReturns: Returns a list of 4 (x,y) tuples of the 4 corners than bound the min area rectangle of the blob.\n(x0, y0)___________(x1, y1)\n| |\n| |\n| |\n|___________|\n(x3, y3) (x2, y2)\nnote: the order of corners may change\n" + }, + "args": [], + "ret_type": "std::vector>", + "static": false, + "def": "std::vector> mini_corners()", + "py_def": "def mini_corners(self) -> list[list[int]]" + }, + "rect": { + "type": "func", + "name": "rect", + "doc": { + "brief": "get blob rect", + "return": "Returns the center coordinates and width and height of the rectangle. format is (x, y, w, h)\nw\n(x, y) ___________\n| |\n| | h\n| |\n|___________|", + "maixpy": "maix.image.Blob.rect", + "py_doc": "get blob rect\n\nReturns: Returns the center coordinates and width and height of the rectangle. format is (x, y, w, h)\nw\n(x, y) ___________\n| |\n| | h\n| |\n|___________|\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector rect()", + "py_def": "def rect(self) -> list[int]" + }, + "x": { + "type": "func", + "name": "x", + "doc": { + "brief": "get blob x of the upper left coordinate", + "return": "Returns the x coordinate of the upper left corner of the rectangle.", + "maixpy": "maix.image.Blob.x", + "py_doc": "get blob x of the upper left coordinate\n\nReturns: Returns the x coordinate of the upper left corner of the rectangle.\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int x()", + "py_def": "def x(self) -> int" + }, + "y": { + "type": "func", + "name": "y", + "doc": { + "brief": "get blob y of the upper left coordinate", + "return": "Returns the y coordinate of the upper left corner of the rectangle.", + "maixpy": "maix.image.Blob.y", + "py_doc": "get blob y of the upper left coordinate\n\nReturns: Returns the y coordinate of the upper left corner of the rectangle.\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int y()", + "py_def": "def y(self) -> int" + }, + "w": { + "type": "func", + "name": "w", + "doc": { + "brief": "get blob width", + "return": "Returns the blob\u2019s bounding box w coordinate", + "maixpy": "maix.image.Blob.w", + "py_doc": "get blob width\n\nReturns: Returns the blob\u2019s bounding box w coordinate\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int w()", + "py_def": "def w(self) -> int" + }, + "h": { + "type": "func", + "name": "h", + "doc": { + "brief": "get blob height", + "return": "Returns the blob\u2019s bounding box h coordinate", + "maixpy": "maix.image.Blob.h", + "py_doc": "get blob height\n\nReturns: Returns the blob\u2019s bounding box h coordinate\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int h()", + "py_def": "def h(self) -> int" + }, + "pixels": { + "type": "func", + "name": "pixels", + "doc": { + "brief": "get blob pixels", + "return": "Returns the number of pixels that are part of this blob.", + "maixpy": "maix.image.Blob.pixels", + "py_doc": "get blob pixels\n\nReturns: Returns the number of pixels that are part of this blob.\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int pixels()", + "py_def": "def pixels(self) -> int" + }, + "cx": { + "type": "func", + "name": "cx", + "doc": { + "brief": "get blob center x", + "return": "Returns the centroid x position of the blob", + "maixpy": "maix.image.Blob.cx", + "py_doc": "get blob center x\n\nReturns: Returns the centroid x position of the blob\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int cx()", + "py_def": "def cx(self) -> int" + }, + "cy": { + "type": "func", + "name": "cy", + "doc": { + "brief": "get blob center y", + "return": "Returns the centroid y position of the blob", + "maixpy": "maix.image.Blob.cy", + "py_doc": "get blob center y\n\nReturns: Returns the centroid y position of the blob\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int cy()", + "py_def": "def cy(self) -> int" + }, + "cxf": { + "type": "func", + "name": "cxf", + "doc": { + "brief": "get blob center x", + "return": "Returns the centroid x position of the blob", + "maixpy": "maix.image.Blob.cxf", + "py_doc": "get blob center x\n\nReturns: Returns the centroid x position of the blob\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float cxf()", + "py_def": "def cxf(self) -> float" + }, + "cyf": { + "type": "func", + "name": "cyf", + "doc": { + "brief": "get blob center y", + "return": "Returns the centroid y position of the blob", + "maixpy": "maix.image.Blob.cyf", + "py_doc": "get blob center y\n\nReturns: Returns the centroid y position of the blob\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float cyf()", + "py_def": "def cyf(self) -> float" + }, + "rotation": { + "type": "func", + "name": "rotation", + "doc": { + "brief": "get blob rotation", + "return": "Returns the rotation of the blob in radians (float). If the blob is like a pencil or pen this value will be unique for 0-180 degrees.", + "maixpy": "maix.image.Blob.rotation", + "py_doc": "get blob rotation\n\nReturns: Returns the rotation of the blob in radians (float). If the blob is like a pencil or pen this value will be unique for 0-180 degrees.\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float rotation()", + "py_def": "def rotation(self) -> float" + }, + "rotation_rad": { + "type": "func", + "name": "rotation_rad", + "doc": { + "brief": "get blob rotation_rad", + "return": "Returns the rotation of the blob in radians", + "maixpy": "maix.image.Blob.rotation_rad", + "py_doc": "get blob rotation_rad\n\nReturns: Returns the rotation of the blob in radians\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float rotation_rad()", + "py_def": "def rotation_rad(self) -> float" + }, + "rotation_deg": { + "type": "func", + "name": "rotation_deg", + "doc": { + "brief": "get blob rotation_deg", + "return": "Returns the rotation of the blob in degrees.", + "maixpy": "maix.image.Blob.rotation_deg", + "py_doc": "get blob rotation_deg\n\nReturns: Returns the rotation of the blob in degrees.\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int rotation_deg()", + "py_def": "def rotation_deg(self) -> int" + }, + "code": { + "type": "func", + "name": "code", + "doc": { + "brief": "get blob code", + "return": "Returns a 32-bit binary number with a bit set in it for each color threshold that\u2019s part of this blob", + "maixpy": "maix.image.Blob.code", + "py_doc": "get blob code\n\nReturns: Returns a 32-bit binary number with a bit set in it for each color threshold that\u2019s part of this blob\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int code()", + "py_def": "def code(self) -> int" + }, + "count": { + "type": "func", + "name": "count", + "doc": { + "brief": "get blob count", + "return": "Returns the number of blobs merged into this blob.", + "maixpy": "maix.image.Blob.count", + "py_doc": "get blob count\n\nReturns: Returns the number of blobs merged into this blob.\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int count()", + "py_def": "def count(self) -> int" + }, + "perimeter": { + "type": "func", + "name": "perimeter", + "doc": { + "brief": "get blob merge_cnt", + "return": "Returns the number of pixels on this blob\u2019s perimeter.", + "maixpy": "maix.image.Blob.perimeter", + "py_doc": "get blob merge_cnt\n\nReturns: Returns the number of pixels on this blob\u2019s perimeter.\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int perimeter()", + "py_def": "def perimeter(self) -> int" + }, + "roundness": { + "type": "func", + "name": "roundness", + "doc": { + "brief": "get blob roundness", + "return": "Returns a value between 0 and 1 representing how round the object is", + "maixpy": "maix.image.Blob.roundness", + "py_doc": "get blob roundness\n\nReturns: Returns a value between 0 and 1 representing how round the object is\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float roundness()", + "py_def": "def roundness(self) -> float" + }, + "elongation": { + "type": "func", + "name": "elongation", + "doc": { + "brief": "get blob elongation", + "returnReturns": "a value between 0 and 1 representing how long (not round) the object is", + "maixpy": "maix.image.Blob.elongation", + "py_doc": "get blob elongation" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float elongation()", + "py_def": "def elongation(self) -> float" + }, + "area": { + "type": "func", + "name": "area", + "doc": { + "brief": "get blob area", + "return": "Returns the area of the bounding box around the blob", + "maixpy": "maix.image.Blob.area", + "py_doc": "get blob area\n\nReturns: Returns the area of the bounding box around the blob\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int area()", + "py_def": "def area(self) -> int" + }, + "density": { + "type": "func", + "name": "density", + "doc": { + "brief": "get blob density", + "return": "Returns the density ratio of the blob", + "maixpy": "maix.image.Blob.density", + "py_doc": "get blob density\n\nReturns: Returns the density ratio of the blob\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float density()", + "py_def": "def density(self) -> float" + }, + "extent": { + "type": "func", + "name": "extent", + "doc": { + "brief": "Alias for blob.density()", + "return": "Returns the density ratio of the blob", + "maixpy": "maix.image.Blob.extent", + "py_doc": "Alias for blob.density()\n\nReturns: Returns the density ratio of the blob\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float extent()", + "py_def": "def extent(self) -> float" + }, + "compactness": { + "type": "func", + "name": "compactness", + "doc": { + "brief": "get blob compactness", + "return": "Returns the compactness ratio of the blob", + "maixpy": "maix.image.Blob.compactness", + "py_doc": "get blob compactness\n\nReturns: Returns the compactness ratio of the blob\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float compactness()", + "py_def": "def compactness(self) -> float" + }, + "solidity": { + "type": "func", + "name": "solidity", + "doc": { + "brief": "get blob solidity", + "return": "Returns the solidity ratio of the blob", + "maixpy": "maix.image.Blob.solidity", + "py_doc": "get blob solidity\n\nReturns: Returns the solidity ratio of the blob\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float solidity()", + "py_def": "def solidity(self) -> float" + }, + "convexity": { + "type": "func", + "name": "convexity", + "doc": { + "brief": "get blob convexity", + "return": "Returns a value between 0 and 1 representing how convex the object is", + "maixpy": "maix.image.Blob.convexity", + "py_doc": "get blob convexity\n\nReturns: Returns a value between 0 and 1 representing how convex the object is\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float convexity()", + "py_def": "def convexity(self) -> float" + }, + "x_hist_bins": { + "type": "func", + "name": "x_hist_bins", + "doc": { + "brief": "get blob x_hist_bins", + "return": "Returns the x_hist_bins of the blob", + "maixpy": "maix.image.Blob.x_hist_bins", + "py_doc": "get blob x_hist_bins\n\nReturns: Returns the x_hist_bins of the blob\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector x_hist_bins()", + "py_def": "def x_hist_bins(self) -> list[int]" + }, + "y_hist_bins": { + "type": "func", + "name": "y_hist_bins", + "doc": { + "brief": "get blob y_hist_bins", + "return": "Returns the y_hist_bins of the blob", + "maixpy": "maix.image.Blob.y_hist_bins", + "py_doc": "get blob y_hist_bins\n\nReturns: Returns the y_hist_bins of the blob\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector y_hist_bins()", + "py_def": "def y_hist_bins(self) -> list[int]" + }, + "major_axis_line": { + "type": "func", + "name": "major_axis_line", + "doc": { + "brief": "get blob major_axis_line", + "return": "Returns a line tuple (x1, y1, x2, y2) of the minor axis of the blob.", + "maixpy": "maix.image.Blob.major_axis_line", + "py_doc": "get blob major_axis_line\n\nReturns: Returns a line tuple (x1, y1, x2, y2) of the minor axis of the blob.\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector major_axis_line()", + "py_def": "def major_axis_line(self) -> list[int]" + }, + "minor_axis_line": { + "type": "func", + "name": "minor_axis_line", + "doc": { + "brief": "get blob minor_axis_line", + "return": "Returns a line tuple (x1, y1, x2, y2) of the minor axis of the blob.", + "maixpy": "maix.image.Blob.minor_axis_line", + "py_doc": "get blob minor_axis_line\n\nReturns: Returns a line tuple (x1, y1, x2, y2) of the minor axis of the blob.\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector minor_axis_line()", + "py_def": "def minor_axis_line(self) -> list[int]" + }, + "enclosing_circle": { + "type": "func", + "name": "enclosing_circle", + "doc": { + "brief": "get blob enclosing_circle", + "return": "Returns a circle tuple (x, y, r) of the circle that encloses the min area rectangle of a blob.", + "maixpy": "maix.image.Blob.enclosing_circle", + "py_doc": "get blob enclosing_circle\n\nReturns: Returns a circle tuple (x, y, r) of the circle that encloses the min area rectangle of a blob.\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector enclosing_circle()", + "py_def": "def enclosing_circle(self) -> list[int]" + }, + "enclosed_ellipse": { + "type": "func", + "name": "enclosed_ellipse", + "doc": { + "brief": "get blob enclosed_ellipse", + "return": "Returns an ellipse tuple (x, y, rx, ry, rotation) of the ellipse that fits inside of the min area rectangle of a blob.", + "maixpy": "maix.image.Blob.enclosed_ellipse", + "py_doc": "get blob enclosed_ellipse\n\nReturns: Returns an ellipse tuple (x, y, rx, ry, rotation) of the ellipse that fits inside of the min area rectangle of a blob.\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector enclosed_ellipse()", + "py_def": "def enclosed_ellipse(self) -> list[int]" + } + }, + "def": "class Blob" + }, + "QRCode": { + "type": "class", + "name": "QRCode", + "doc": { + "brief": "QRCode class", + "maixpy": "maix.image.QRCode", + "py_doc": "QRCode class" + }, + "members": { + "__init__": { + "type": "func", + "name": "QRCode", + "doc": { + "brief": "QRCode constructor", + "param": { + "rect": "rect of corners, type is std::vector", + "corners": "corners of QRCode", + "payload": "payload of the QRCode", + "version": "version of the QRCode", + "ecc_level": "ecc_level of the QRCode", + "mask": "mask of the QRCode", + "data_type": "data_type of the QRCode", + "eci": "eci of the QRCode" + }, + "maixpy": "maix.image.QRCode.__init__", + "py_doc": "QRCode constructor\n\nArgs:\n - rect: rect of corners, type is std::vector\n - corners: corners of QRCode\n - payload: payload of the QRCode\n - version: version of the QRCode\n - ecc_level: ecc_level of the QRCode\n - mask: mask of the QRCode\n - data_type: data_type of the QRCode\n - eci: eci of the QRCode\n" + }, + "args": [ + [ + "std::vector &", + "rect", + null + ], + [ + "std::vector> &", + "corners", + null + ], + [ + "std::string &", + "payload", + null + ], + [ + "int", + "version", + null + ], + [ + "int", + "ecc_level", + null + ], + [ + "int", + "mask", + null + ], + [ + "int", + "data_type", + null + ], + [ + "int", + "eci", + null + ] + ], + "ret_type": null, + "static": false, + "def": "QRCode(std::vector &rect, std::vector> &corners, std::string &payload, int version, int ecc_level, int mask, int data_type, int eci)", + "py_def": "def __init__(self, rect: list[int], corners: list[list[int]], payload: str, version: int, ecc_level: int, mask: int, data_type: int, eci: int) -> None" + }, + "__getitem__": { + "type": "func", + "name": "__getitem__", + "doc": { + "brief": "Subscript operator", + "param": { + "index": "[0] Returns the qrcode\u2019s bounding box x coordinate\n[1] Returns the qrcode\u2019s bounding box y coordinate\n[2] Returns the qrcode\u2019s bounding box w coordinate\n[3] Returns the qrcode\u2019s bounding box h coordinate\n[4] Not support this index, try to use payload() method\n[5] Returns the version of qrcode\n[6] Returns the error correction level of qrcode\n[7] Returns the mask of qrcode\n[8] Returns the datatype of qrcode\n[9] Returns the eci of qrcode" + }, + "return": "int&", + "maixpy": "maix.image.QRCode.__getitem__", + "py_doc": "Subscript operator\n\nArgs:\n - index: [0] Returns the qrcode\u2019s bounding box x coordinate\n[1] Returns the qrcode\u2019s bounding box y coordinate\n[2] Returns the qrcode\u2019s bounding box w coordinate\n[3] Returns the qrcode\u2019s bounding box h coordinate\n[4] Not support this index, try to use payload() method\n[5] Returns the version of qrcode\n[6] Returns the error correction level of qrcode\n[7] Returns the mask of qrcode\n[8] Returns the datatype of qrcode\n[9] Returns the eci of qrcode\n\n\nReturns: int&\n" + }, + "args": [ + [ + "int", + "index", + null + ] + ], + "ret_type": "int&", + "static": false, + "def": "int &__getitem__(int index)", + "py_def": "def __getitem__(self, index: int) -> int" + }, + "corners": { + "type": "func", + "name": "corners", + "doc": { + "brief": "get coordinate of QRCode", + "return": "return the coordinate of the QRCode.", + "maixpy": "maix.image.QRCode.corners", + "py_doc": "get coordinate of QRCode\n\nReturns: return the coordinate of the QRCode.\n" + }, + "args": [], + "ret_type": "std::vector>", + "static": false, + "def": "std::vector> corners()", + "py_def": "def corners(self) -> list[list[int]]" + }, + "rect": { + "type": "func", + "name": "rect", + "doc": { + "brief": "get rectangle of QRCode", + "return": "return the rectangle of the QRCode. format is {x, y, w, h}, type is std::vector", + "maixpy": "maix.image.QRCode.rect", + "py_doc": "get rectangle of QRCode\n\nReturns: return the rectangle of the QRCode. format is {x, y, w, h}, type is std::vector\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector rect()", + "py_def": "def rect(self) -> list[int]" + }, + "x": { + "type": "func", + "name": "x", + "doc": { + "brief": "get x of QRCode", + "return": "return x of the QRCode, type is int", + "maixpy": "maix.image.QRCode.x", + "py_doc": "get x of QRCode\n\nReturns: return x of the QRCode, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int x()", + "py_def": "def x(self) -> int" + }, + "y": { + "type": "func", + "name": "y", + "doc": { + "brief": "get y of QRCode", + "return": "return y of the QRCode, type is int", + "maixpy": "maix.image.QRCode.y", + "py_doc": "get y of QRCode\n\nReturns: return y of the QRCode, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int y()", + "py_def": "def y(self) -> int" + }, + "w": { + "type": "func", + "name": "w", + "doc": { + "brief": "get w of QRCode", + "return": "return w of the QRCode, type is int", + "maixpy": "maix.image.QRCode.w", + "py_doc": "get w of QRCode\n\nReturns: return w of the QRCode, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int w()", + "py_def": "def w(self) -> int" + }, + "h": { + "type": "func", + "name": "h", + "doc": { + "brief": "get h of QRCode", + "return": "return h of the QRCode, type is int", + "maixpy": "maix.image.QRCode.h", + "py_doc": "get h of QRCode\n\nReturns: return h of the QRCode, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int h()", + "py_def": "def h(self) -> int" + }, + "payload": { + "type": "func", + "name": "payload", + "doc": { + "brief": "get QRCode payload", + "return": "return area of the QRCode", + "maixpy": "maix.image.QRCode.payload", + "py_doc": "get QRCode payload\n\nReturns: return area of the QRCode\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string payload()", + "py_def": "def payload(self) -> str" + }, + "version": { + "type": "func", + "name": "version", + "doc": { + "brief": "get QRCode version", + "return": "return version of the QRCode", + "maixpy": "maix.image.QRCode.version", + "py_doc": "get QRCode version\n\nReturns: return version of the QRCode\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int version()", + "py_def": "def version(self) -> int" + }, + "ecc_level": { + "type": "func", + "name": "ecc_level", + "doc": { + "brief": "get QRCode error correction level", + "return": "return error correction level of the QRCode", + "maixpy": "maix.image.QRCode.ecc_level", + "py_doc": "get QRCode error correction level\n\nReturns: return error correction level of the QRCode\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int ecc_level()", + "py_def": "def ecc_level(self) -> int" + }, + "mask": { + "type": "func", + "name": "mask", + "doc": { + "brief": "get QRCode mask", + "return": "return mask of the QRCode", + "maixpy": "maix.image.QRCode.mask", + "py_doc": "get QRCode mask\n\nReturns: return mask of the QRCode\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int mask()", + "py_def": "def mask(self) -> int" + }, + "data_type": { + "type": "func", + "name": "data_type", + "doc": { + "brief": "get QRCode dataType", + "return": "return mask of the QRCode", + "maixpy": "maix.image.QRCode.data_type", + "py_doc": "get QRCode dataType\n\nReturns: return mask of the QRCode\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int data_type()", + "py_def": "def data_type(self) -> int" + }, + "eci": { + "type": "func", + "name": "eci", + "doc": { + "brief": "get QRCode eci", + "return": "return data of the QRCode", + "maixpy": "maix.image.QRCode.eci", + "py_doc": "get QRCode eci\n\nReturns: return data of the QRCode\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int eci()", + "py_def": "def eci(self) -> int" + }, + "is_numeric": { + "type": "func", + "name": "is_numeric", + "doc": { + "brief": "check QRCode is numeric", + "return": "return true if the result type of the QRCode is numeric", + "maixpy": "maix.image.QRCode.is_numeric", + "py_doc": "check QRCode is numeric\n\nReturns: return true if the result type of the QRCode is numeric\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool is_numeric()", + "py_def": "def is_numeric(self) -> bool" + }, + "is_alphanumeric": { + "type": "func", + "name": "is_alphanumeric", + "doc": { + "brief": "check QRCode is alphanumeric", + "return": "return true if the result type of the QRCode is alphanumeric", + "maixpy": "maix.image.QRCode.is_alphanumeric", + "py_doc": "check QRCode is alphanumeric\n\nReturns: return true if the result type of the QRCode is alphanumeric\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool is_alphanumeric()", + "py_def": "def is_alphanumeric(self) -> bool" + }, + "is_binary": { + "type": "func", + "name": "is_binary", + "doc": { + "brief": "check QRCode is binary", + "return": "return true if the result type of the QRCode is binary", + "maixpy": "maix.image.QRCode.is_binary", + "py_doc": "check QRCode is binary\n\nReturns: return true if the result type of the QRCode is binary\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool is_binary()", + "py_def": "def is_binary(self) -> bool" + }, + "is_kanji": { + "type": "func", + "name": "is_kanji", + "doc": { + "brief": "check QRCode is kanji", + "return": "return true if the result type of the QRCode is kanji", + "maixpy": "maix.image.QRCode.is_kanji", + "py_doc": "check QRCode is kanji\n\nReturns: return true if the result type of the QRCode is kanji\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool is_kanji()", + "py_def": "def is_kanji(self) -> bool" + } + }, + "def": "class QRCode" + }, + "AprilTag": { + "type": "class", + "name": "AprilTag", + "doc": { + "brief": "AprilTag class", + "maixpy": "maix.image.AprilTag", + "py_doc": "AprilTag class" + }, + "members": { + "__init__": { + "type": "func", + "name": "AprilTag", + "doc": { + "brief": "AprilTag constructor", + "param": { + "rect": "Inlucdes the top-left corner and the width and height of the rectangle. format is {x, y, w, h}, type is std::vector", + "corners": "Includes the four corners of the rectangle. format is {{x0, y0}, {x1, y1}, {x2, y2}, {x3, y3}}, type is std::vector>", + "id": "The id of the AprilTag", + "famliy": "The family of the AprilTag", + "centroid_x": "The x coordinate of the center of the AprilTag", + "centroid_y": "The y coordinate of the center of the AprilTag", + "rotation": "The rotation of the AprilTag", + "decision_margin": "The decision_margin of the AprilTag", + "hamming": "The hamming of the AprilTag", + "goodness": "The goodness of the AprilTag", + "x_translation": "The x_translation of the AprilTag", + "y_translation": "The y_translation of the AprilTag", + "z_translation": "The z_translation of the AprilTag", + "x_rotation": "The x_rotation of the AprilTag", + "y_rotation": "The y_rotation of the AprilTag", + "z_rotation": "The z_rotation of the AprilTag" + }, + "maixpy": "maix.image.AprilTag.__init__", + "py_doc": "AprilTag constructor\n\nArgs:\n - rect: Inlucdes the top-left corner and the width and height of the rectangle. format is {x, y, w, h}, type is std::vector\n - corners: Includes the four corners of the rectangle. format is {{x0, y0}, {x1, y1}, {x2, y2}, {x3, y3}}, type is std::vector>\n - id: The id of the AprilTag\n - famliy: The family of the AprilTag\n - centroid_x: The x coordinate of the center of the AprilTag\n - centroid_y: The y coordinate of the center of the AprilTag\n - rotation: The rotation of the AprilTag\n - decision_margin: The decision_margin of the AprilTag\n - hamming: The hamming of the AprilTag\n - goodness: The goodness of the AprilTag\n - x_translation: The x_translation of the AprilTag\n - y_translation: The y_translation of the AprilTag\n - z_translation: The z_translation of the AprilTag\n - x_rotation: The x_rotation of the AprilTag\n - y_rotation: The y_rotation of the AprilTag\n - z_rotation: The z_rotation of the AprilTag\n" + }, + "args": [ + [ + "std::vector &", + "rect", + null + ], + [ + "std::vector> &", + "corners", + null + ], + [ + "int", + "id", + null + ], + [ + "int", + "famliy", + null + ], + [ + "float", + "centroid_x", + null + ], + [ + "float", + "centroid_y", + null + ], + [ + "float", + "rotation", + null + ], + [ + "float", + "decision_margin", + null + ], + [ + "int", + "hamming", + null + ], + [ + "float", + "goodness", + null + ], + [ + "float", + "x_translation", + null + ], + [ + "float", + "y_translation", + null + ], + [ + "float", + "z_translation", + null + ], + [ + "float", + "x_rotation", + null + ], + [ + "float", + "y_rotation", + null + ], + [ + "float", + "z_rotation", + null + ] + ], + "ret_type": null, + "static": false, + "def": "AprilTag(std::vector &rect, std::vector> &corners, int id, int famliy, float centroid_x, float centroid_y, float rotation, float decision_margin, int hamming, float goodness, float x_translation, float y_translation, float z_translation, float x_rotation, float y_rotation, float z_rotation)", + "py_def": "def __init__(self, rect: list[int], corners: list[list[int]], id: int, famliy: int, centroid_x: float, centroid_y: float, rotation: float, decision_margin: float, hamming: int, goodness: float, x_translation: float, y_translation: float, z_translation: float, x_rotation: float, y_rotation: float, z_rotation: float) -> None" + }, + "__getitem__": { + "type": "func", + "name": "__getitem__", + "doc": { + "brief": "Subscript operator", + "param": { + "index": "[0] Returns the apriltag\u2019s bounding box x coordinate\n[1] Returns the apriltag\u2019s bounding box y coordinate\n[2] Returns the apriltag\u2019s bounding box w coordinate\n[3] Returns the apriltag\u2019s bounding box h coordinate\n[4] Returns the apriltag\u2019s id\n[5] Returns the apriltag\u2019s family\n[6] Not support\n[7] Not support\n[8] Not support\n[9] Not support\n[10] Returns the apriltag\u2019s hamming\n[11] Not support\n[12] Not support\n[13] Not support\n[14] Not support\n[15] Not support\n[16] Not support\n[17] Not support" + }, + "return": "int&", + "maixpy": "maix.image.AprilTag.__getitem__", + "py_doc": "Subscript operator\n\nArgs:\n - index: [0] Returns the apriltag\u2019s bounding box x coordinate\n[1] Returns the apriltag\u2019s bounding box y coordinate\n[2] Returns the apriltag\u2019s bounding box w coordinate\n[3] Returns the apriltag\u2019s bounding box h coordinate\n[4] Returns the apriltag\u2019s id\n[5] Returns the apriltag\u2019s family\n[6] Not support\n[7] Not support\n[8] Not support\n[9] Not support\n[10] Returns the apriltag\u2019s hamming\n[11] Not support\n[12] Not support\n[13] Not support\n[14] Not support\n[15] Not support\n[16] Not support\n[17] Not support\n\n\nReturns: int&\n" + }, + "args": [ + [ + "int", + "index", + null + ] + ], + "ret_type": "int&", + "static": false, + "def": "int &__getitem__(int index)", + "py_def": "def __getitem__(self, index: int) -> int" + }, + "corners": { + "type": "func", + "name": "corners", + "doc": { + "brief": "get coordinate of AprilTag", + "return": "return the coordinate of the AprilTag.", + "maixpy": "maix.image.AprilTag.corners", + "py_doc": "get coordinate of AprilTag\n\nReturns: return the coordinate of the AprilTag.\n" + }, + "args": [], + "ret_type": "std::vector>", + "static": false, + "def": "std::vector> corners()", + "py_def": "def corners(self) -> list[list[int]]" + }, + "rect": { + "type": "func", + "name": "rect", + "doc": { + "brief": "get rectangle of AprilTag", + "return": "return the rectangle of the AprilTag. format is {x, y, w, h}, type is std::vector", + "maixpy": "maix.image.AprilTag.rect", + "py_doc": "get rectangle of AprilTag\n\nReturns: return the rectangle of the AprilTag. format is {x, y, w, h}, type is std::vector\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector rect()", + "py_def": "def rect(self) -> list[int]" + }, + "x": { + "type": "func", + "name": "x", + "doc": { + "brief": "get x of AprilTag", + "return": "return x of the AprilTag, type is int", + "maixpy": "maix.image.AprilTag.x", + "py_doc": "get x of AprilTag\n\nReturns: return x of the AprilTag, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int x()", + "py_def": "def x(self) -> int" + }, + "y": { + "type": "func", + "name": "y", + "doc": { + "brief": "get y of AprilTag", + "return": "return y of the AprilTag, type is int", + "maixpy": "maix.image.AprilTag.y", + "py_doc": "get y of AprilTag\n\nReturns: return y of the AprilTag, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int y()", + "py_def": "def y(self) -> int" + }, + "w": { + "type": "func", + "name": "w", + "doc": { + "brief": "get w of AprilTag", + "return": "return w of the AprilTag, type is int", + "maixpy": "maix.image.AprilTag.w", + "py_doc": "get w of AprilTag\n\nReturns: return w of the AprilTag, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int w()", + "py_def": "def w(self) -> int" + }, + "h": { + "type": "func", + "name": "h", + "doc": { + "brief": "get h of AprilTag", + "return": "return h of the AprilTag, type is int", + "maixpy": "maix.image.AprilTag.h", + "py_doc": "get h of AprilTag\n\nReturns: return h of the AprilTag, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int h()", + "py_def": "def h(self) -> int" + }, + "id": { + "type": "func", + "name": "id", + "doc": { + "brief": "get id of AprilTag", + "return": "return id of the AprilTag, type is int", + "maixpy": "maix.image.AprilTag.id", + "py_doc": "get id of AprilTag\n\nReturns: return id of the AprilTag, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int id()", + "py_def": "def id(self) -> int" + }, + "family": { + "type": "func", + "name": "family", + "doc": { + "brief": "get family of AprilTag", + "return": "return family of the AprilTag, type is int", + "maixpy": "maix.image.AprilTag.family", + "py_doc": "get family of AprilTag\n\nReturns: return family of the AprilTag, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int family()", + "py_def": "def family(self) -> int" + }, + "cx": { + "type": "func", + "name": "cx", + "doc": { + "brief": "get cx of AprilTag", + "return": "return cx of the AprilTag, type is int", + "maixpy": "maix.image.AprilTag.cx", + "py_doc": "get cx of AprilTag\n\nReturns: return cx of the AprilTag, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int cx()", + "py_def": "def cx(self) -> int" + }, + "cxf": { + "type": "func", + "name": "cxf", + "doc": { + "brief": "get cxf of AprilTag", + "return": "return cxf of the AprilTag, type is float", + "maixpy": "maix.image.AprilTag.cxf", + "py_doc": "get cxf of AprilTag\n\nReturns: return cxf of the AprilTag, type is float\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float cxf()", + "py_def": "def cxf(self) -> float" + }, + "cy": { + "type": "func", + "name": "cy", + "doc": { + "brief": "get cy of AprilTag", + "return": "return cy of the AprilTag, type is int", + "maixpy": "maix.image.AprilTag.cy", + "py_doc": "get cy of AprilTag\n\nReturns: return cy of the AprilTag, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int cy()", + "py_def": "def cy(self) -> int" + }, + "cyf": { + "type": "func", + "name": "cyf", + "doc": { + "brief": "get cyf of AprilTag", + "return": "return cyf of the AprilTag, type is float", + "maixpy": "maix.image.AprilTag.cyf", + "py_doc": "get cyf of AprilTag\n\nReturns: return cyf of the AprilTag, type is float\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float cyf()", + "py_def": "def cyf(self) -> float" + }, + "rotation": { + "type": "func", + "name": "rotation", + "doc": { + "brief": "get rotation of AprilTag", + "return": "return rotation of the AprilTag, type is float", + "maixpy": "maix.image.AprilTag.rotation", + "py_doc": "get rotation of AprilTag\n\nReturns: return rotation of the AprilTag, type is float\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float rotation()", + "py_def": "def rotation(self) -> float" + }, + "decision_margin": { + "type": "func", + "name": "decision_margin", + "doc": { + "brief": "Get decision_margin of AprilTag", + "return": "Returns the quality of the apriltag match (0.0 - 1.0) where 1.0 is the best.", + "maixpy": "maix.image.AprilTag.decision_margin", + "py_doc": "Get decision_margin of AprilTag\n\nReturns: Returns the quality of the apriltag match (0.0 - 1.0) where 1.0 is the best.\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float decision_margin()", + "py_def": "def decision_margin(self) -> float" + }, + "hamming": { + "type": "func", + "name": "hamming", + "doc": { + "brief": "get hamming of AprilTag", + "return": "Returns the number of accepted bit errors for this tag.\nreturn 0, means 0 bit errors will be accepted.\n1 is TAG25H7, means up to 1 bit error may be accepted\n2 is TAG25H9, means up to 3 bit errors may be accepted\n3 is TAG36H10, means up to 3 bit errors may be accepted\n4 is TAG36H11, means up to 4 bit errors may be accepted\n5 is ARTOOLKIT, means 0 bit errors will be accepted", + "maixpy": "maix.image.AprilTag.hamming", + "py_doc": "get hamming of AprilTag\n\nReturns: Returns the number of accepted bit errors for this tag.\nreturn 0, means 0 bit errors will be accepted.\n1 is TAG25H7, means up to 1 bit error may be accepted\n2 is TAG25H9, means up to 3 bit errors may be accepted\n3 is TAG36H10, means up to 3 bit errors may be accepted\n4 is TAG36H11, means up to 4 bit errors may be accepted\n5 is ARTOOLKIT, means 0 bit errors will be accepted\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int hamming()", + "py_def": "def hamming(self) -> int" + }, + "goodness": { + "type": "func", + "name": "goodness", + "doc": { + "brief": "get goodness of AprilTag", + "return": "return goodness of the AprilTag, type is float\nNote: This value is always 0.0 for now.", + "maixpy": "maix.image.AprilTag.goodness", + "py_doc": "get goodness of AprilTag\n\nReturns: return goodness of the AprilTag, type is float\nNote: This value is always 0.0 for now.\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float goodness()", + "py_def": "def goodness(self) -> float" + }, + "x_translation": { + "type": "func", + "name": "x_translation", + "doc": { + "brief": "get x_translation of AprilTag", + "return": "return x_translation of the AprilTag, type is float", + "maixpy": "maix.image.AprilTag.x_translation", + "py_doc": "get x_translation of AprilTag\n\nReturns: return x_translation of the AprilTag, type is float\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float x_translation()", + "py_def": "def x_translation(self) -> float" + }, + "y_translation": { + "type": "func", + "name": "y_translation", + "doc": { + "brief": "get y_translation of AprilTag", + "return": "return y_translation of the AprilTag, type is float", + "maixpy": "maix.image.AprilTag.y_translation", + "py_doc": "get y_translation of AprilTag\n\nReturns: return y_translation of the AprilTag, type is float\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float y_translation()", + "py_def": "def y_translation(self) -> float" + }, + "z_translation": { + "type": "func", + "name": "z_translation", + "doc": { + "brief": "get z_translation of AprilTag", + "return": "return z_translation of the AprilTag, type is float", + "maixpy": "maix.image.AprilTag.z_translation", + "py_doc": "get z_translation of AprilTag\n\nReturns: return z_translation of the AprilTag, type is float\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float z_translation()", + "py_def": "def z_translation(self) -> float" + }, + "x_rotation": { + "type": "func", + "name": "x_rotation", + "doc": { + "brief": "get x_rotation of AprilTag", + "return": "return x_rotation of the AprilTag, type is float", + "maixpy": "maix.image.AprilTag.x_rotation", + "py_doc": "get x_rotation of AprilTag\n\nReturns: return x_rotation of the AprilTag, type is float\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float x_rotation()", + "py_def": "def x_rotation(self) -> float" + }, + "y_rotation": { + "type": "func", + "name": "y_rotation", + "doc": { + "brief": "get y_rotation of AprilTag", + "return": "return y_rotation of the AprilTag, type is float", + "maixpy": "maix.image.AprilTag.y_rotation", + "py_doc": "get y_rotation of AprilTag\n\nReturns: return y_rotation of the AprilTag, type is float\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float y_rotation()", + "py_def": "def y_rotation(self) -> float" + }, + "z_rotation": { + "type": "func", + "name": "z_rotation", + "doc": { + "brief": "get z_rotation of AprilTag", + "return": "return z_rotation of the AprilTag, type is float", + "maixpy": "maix.image.AprilTag.z_rotation", + "py_doc": "get z_rotation of AprilTag\n\nReturns: return z_rotation of the AprilTag, type is float\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float z_rotation()", + "py_def": "def z_rotation(self) -> float" + } + }, + "def": "class AprilTag" + }, + "DataMatrix": { + "type": "class", + "name": "DataMatrix", + "doc": { + "brief": "DataMatrix class", + "maixpy": "maix.image.DataMatrix", + "py_doc": "DataMatrix class" + }, + "members": { + "__init__": { + "type": "func", + "name": "DataMatrix", + "doc": { + "brief": "DataMatrix constructor", + "param": { + "rect": "Inlucdes the top-left corner and the width and height of the rectangle. format is {x, y, w, h}, type is std::vector", + "corners": "Includes the four corners of the rectangle. format is {{x0, y0}, {x1, y1}, {x2, y2}, {x3, y3}}, type is std::vector>", + "payload": "The payload of the DataMatrix", + "rotation": "The rotation of the DataMatrix", + "rows": "The rows of the DataMatrix", + "columns": "The columns of the DataMatrix", + "capacity": "The capacity of the DataMatrix", + "padding": "The padding of the DataMatrix" + }, + "maixpy": "maix.image.DataMatrix.__init__", + "py_doc": "DataMatrix constructor\n\nArgs:\n - rect: Inlucdes the top-left corner and the width and height of the rectangle. format is {x, y, w, h}, type is std::vector\n - corners: Includes the four corners of the rectangle. format is {{x0, y0}, {x1, y1}, {x2, y2}, {x3, y3}}, type is std::vector>\n - payload: The payload of the DataMatrix\n - rotation: The rotation of the DataMatrix\n - rows: The rows of the DataMatrix\n - columns: The columns of the DataMatrix\n - capacity: The capacity of the DataMatrix\n - padding: The padding of the DataMatrix\n" + }, + "args": [ + [ + "std::vector &", + "rect", + null + ], + [ + "std::vector> &", + "corners", + null + ], + [ + "std::string &", + "payload", + null + ], + [ + "float", + "rotation", + null + ], + [ + "int", + "rows", + null + ], + [ + "int", + "columns", + null + ], + [ + "int", + "capacity", + null + ], + [ + "int", + "padding", + null + ] + ], + "ret_type": null, + "static": false, + "def": "DataMatrix(std::vector &rect, std::vector> &corners, std::string &payload, float rotation, int rows, int columns, int capacity, int padding)", + "py_def": "def __init__(self, rect: list[int], corners: list[list[int]], payload: str, rotation: float, rows: int, columns: int, capacity: int, padding: int) -> None" + }, + "__getitem__": { + "type": "func", + "name": "__getitem__", + "doc": { + "brief": "Subscript operator", + "param": { + "index": "[0] get x of DataMatrix\n[1] get y of DataMatrix\n[2] get w of DataMatrix\n[3] get h of DataMatrix\n[4] Not support this index, try to use payload() method\n[5] Not support this index, try to use rotation() method\n[6] get rows of DataMatrix\n[7] get columns of DataMatrix\n[8] get capacity of DataMatrix\n[9] get padding of DataMatrix" + }, + "return": "int&", + "maixpy": "maix.image.DataMatrix.__getitem__", + "py_doc": "Subscript operator\n\nArgs:\n - index: [0] get x of DataMatrix\n[1] get y of DataMatrix\n[2] get w of DataMatrix\n[3] get h of DataMatrix\n[4] Not support this index, try to use payload() method\n[5] Not support this index, try to use rotation() method\n[6] get rows of DataMatrix\n[7] get columns of DataMatrix\n[8] get capacity of DataMatrix\n[9] get padding of DataMatrix\n\n\nReturns: int&\n" + }, + "args": [ + [ + "int", + "index", + null + ] + ], + "ret_type": "int&", + "static": false, + "def": "int &__getitem__(int index)", + "py_def": "def __getitem__(self, index: int) -> int" + }, + "corners": { + "type": "func", + "name": "corners", + "doc": { + "brief": "get coordinate of DataMatrix", + "return": "return the coordinate of the DataMatrix.", + "maixpy": "maix.image.DataMatrix.corners", + "py_doc": "get coordinate of DataMatrix\n\nReturns: return the coordinate of the DataMatrix.\n" + }, + "args": [], + "ret_type": "std::vector>", + "static": false, + "def": "std::vector> corners()", + "py_def": "def corners(self) -> list[list[int]]" + }, + "rect": { + "type": "func", + "name": "rect", + "doc": { + "brief": "get rectangle of DataMatrix", + "return": "return the rectangle of the DataMatrix. format is {x, y, w, h}, type is std::vector", + "maixpy": "maix.image.DataMatrix.rect", + "py_doc": "get rectangle of DataMatrix\n\nReturns: return the rectangle of the DataMatrix. format is {x, y, w, h}, type is std::vector\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector rect()", + "py_def": "def rect(self) -> list[int]" + }, + "x": { + "type": "func", + "name": "x", + "doc": { + "brief": "get x of DataMatrix", + "return": "return x of the DataMatrix, type is int", + "maixpy": "maix.image.DataMatrix.x", + "py_doc": "get x of DataMatrix\n\nReturns: return x of the DataMatrix, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int x()", + "py_def": "def x(self) -> int" + }, + "y": { + "type": "func", + "name": "y", + "doc": { + "brief": "get y of DataMatrix", + "return": "return y of the DataMatrix, type is int", + "maixpy": "maix.image.DataMatrix.y", + "py_doc": "get y of DataMatrix\n\nReturns: return y of the DataMatrix, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int y()", + "py_def": "def y(self) -> int" + }, + "w": { + "type": "func", + "name": "w", + "doc": { + "brief": "get w of DataMatrix", + "return": "return w of the DataMatrix, type is int", + "maixpy": "maix.image.DataMatrix.w", + "py_doc": "get w of DataMatrix\n\nReturns: return w of the DataMatrix, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int w()", + "py_def": "def w(self) -> int" + }, + "h": { + "type": "func", + "name": "h", + "doc": { + "brief": "get h of DataMatrix", + "return": "return h of the DataMatrix, type is int", + "maixpy": "maix.image.DataMatrix.h", + "py_doc": "get h of DataMatrix\n\nReturns: return h of the DataMatrix, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int h()", + "py_def": "def h(self) -> int" + }, + "payload": { + "type": "func", + "name": "payload", + "doc": { + "brief": "get payload of DataMatrix", + "return": "return payload of the DataMatrix, type is std::string", + "maixpy": "maix.image.DataMatrix.payload", + "py_doc": "get payload of DataMatrix\n\nReturns: return payload of the DataMatrix, type is std::string\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string payload()", + "py_def": "def payload(self) -> str" + }, + "rotation": { + "type": "func", + "name": "rotation", + "doc": { + "brief": "get rotation of DataMatrix", + "return": "return rotation of the DataMatrix, type is float", + "maixpy": "maix.image.DataMatrix.rotation", + "py_doc": "get rotation of DataMatrix\n\nReturns: return rotation of the DataMatrix, type is float\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float rotation()", + "py_def": "def rotation(self) -> float" + }, + "rows": { + "type": "func", + "name": "rows", + "doc": { + "brief": "get rows of DataMatrix", + "return": "return rows of the DataMatrix, type is int", + "maixpy": "maix.image.DataMatrix.rows", + "py_doc": "get rows of DataMatrix\n\nReturns: return rows of the DataMatrix, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int rows()", + "py_def": "def rows(self) -> int" + }, + "columns": { + "type": "func", + "name": "columns", + "doc": { + "brief": "get columns of DataMatrix", + "return": "return columns of the DataMatrix, type is int", + "maixpy": "maix.image.DataMatrix.columns", + "py_doc": "get columns of DataMatrix\n\nReturns: return columns of the DataMatrix, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int columns()", + "py_def": "def columns(self) -> int" + }, + "capacity": { + "type": "func", + "name": "capacity", + "doc": { + "brief": "get capacity of DataMatrix", + "return": "returns how many characters could fit in this data matrix, type is int", + "maixpy": "maix.image.DataMatrix.capacity", + "py_doc": "get capacity of DataMatrix\n\nReturns: returns how many characters could fit in this data matrix, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int capacity()", + "py_def": "def capacity(self) -> int" + }, + "padding": { + "type": "func", + "name": "padding", + "doc": { + "brief": "get padding of DataMatrix", + "return": "returns how many unused characters are in this data matrix, type is int", + "maixpy": "maix.image.DataMatrix.padding", + "py_doc": "get padding of DataMatrix\n\nReturns: returns how many unused characters are in this data matrix, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int padding()", + "py_def": "def padding(self) -> int" + } + }, + "def": "class DataMatrix" + }, + "BarCode": { + "type": "class", + "name": "BarCode", + "doc": { + "brief": "BarCode class", + "maixpy": "maix.image.BarCode", + "py_doc": "BarCode class" + }, + "members": { + "__init__": { + "type": "func", + "name": "BarCode", + "doc": { + "brief": "BarCode constructor", + "param": { + "rect": "Inlucdes the top-left corner and the width and height of the rectangle. format is {x, y, w, h}, type is std::vector", + "corners": "Includes the four corners of the rectangle. format is {{x0, y0}, {x1, y1}, {x2, y2}, {x3, y3}}, type is std::vector>", + "payload": "The payload of the BarCode", + "type": "The type of the BarCode", + "rotation": "The rotation of the BarCode", + "quality": "The quality of the BarCode" + }, + "maixpy": "maix.image.BarCode.__init__", + "py_doc": "BarCode constructor\n\nArgs:\n - rect: Inlucdes the top-left corner and the width and height of the rectangle. format is {x, y, w, h}, type is std::vector\n - corners: Includes the four corners of the rectangle. format is {{x0, y0}, {x1, y1}, {x2, y2}, {x3, y3}}, type is std::vector>\n - payload: The payload of the BarCode\n - type: The type of the BarCode\n - rotation: The rotation of the BarCode\n - quality: The quality of the BarCode\n" + }, + "args": [ + [ + "std::vector &", + "rect", + null + ], + [ + "std::vector> &", + "corners", + null + ], + [ + "std::string &", + "payload", + null + ], + [ + "int", + "type", + null + ], + [ + "float", + "rotation", + null + ], + [ + "int", + "quality", + null + ] + ], + "ret_type": null, + "static": false, + "def": "BarCode(std::vector &rect, std::vector> &corners, std::string &payload, int type, float rotation, int quality)", + "py_def": "def __init__(self, rect: list[int], corners: list[list[int]], payload: str, type: int, rotation: float, quality: int) -> None" + }, + "__getitem__": { + "type": "func", + "name": "__getitem__", + "doc": { + "brief": "Subscript operator", + "param": { + "index": "[0] get x of BarCode\n[1] get y of BarCode\n[2] get w of BarCode\n[3] get h of BarCode\n[4] Not support this index, try to use payload() method\n[5] get type of BarCode\n[6] Not support this index, try to use rotation() method\n[7] get quality of BarCode" + }, + "return": "int&", + "maixpy": "maix.image.BarCode.__getitem__", + "py_doc": "Subscript operator\n\nArgs:\n - index: [0] get x of BarCode\n[1] get y of BarCode\n[2] get w of BarCode\n[3] get h of BarCode\n[4] Not support this index, try to use payload() method\n[5] get type of BarCode\n[6] Not support this index, try to use rotation() method\n[7] get quality of BarCode\n\n\nReturns: int&\n" + }, + "args": [ + [ + "int", + "index", + null + ] + ], + "ret_type": "int&", + "static": false, + "def": "int &__getitem__(int index)", + "py_def": "def __getitem__(self, index: int) -> int" + }, + "corners": { + "type": "func", + "name": "corners", + "doc": { + "brief": "get coordinate of BarCode", + "return": "return the coordinate of the BarCode.", + "maixpy": "maix.image.BarCode.corners", + "py_doc": "get coordinate of BarCode\n\nReturns: return the coordinate of the BarCode.\n" + }, + "args": [], + "ret_type": "std::vector>", + "static": false, + "def": "std::vector> corners()", + "py_def": "def corners(self) -> list[list[int]]" + }, + "rect": { + "type": "func", + "name": "rect", + "doc": { + "brief": "get rectangle of BarCode", + "return": "return the rectangle of the BarCode. format is {x, y, w, h}, type is std::vector", + "maixpy": "maix.image.BarCode.rect", + "py_doc": "get rectangle of BarCode\n\nReturns: return the rectangle of the BarCode. format is {x, y, w, h}, type is std::vector\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector rect()", + "py_def": "def rect(self) -> list[int]" + }, + "x": { + "type": "func", + "name": "x", + "doc": { + "brief": "get x of BarCode", + "return": "return x of the BarCode, type is int", + "maixpy": "maix.image.BarCode.x", + "py_doc": "get x of BarCode\n\nReturns: return x of the BarCode, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int x()", + "py_def": "def x(self) -> int" + }, + "y": { + "type": "func", + "name": "y", + "doc": { + "brief": "get y of BarCode", + "return": "return y of the BarCode, type is int", + "maixpy": "maix.image.BarCode.y", + "py_doc": "get y of BarCode\n\nReturns: return y of the BarCode, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int y()", + "py_def": "def y(self) -> int" + }, + "w": { + "type": "func", + "name": "w", + "doc": { + "brief": "get w of BarCode", + "return": "return w of the BarCode, type is int", + "maixpy": "maix.image.BarCode.w", + "py_doc": "get w of BarCode\n\nReturns: return w of the BarCode, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int w()", + "py_def": "def w(self) -> int" + }, + "h": { + "type": "func", + "name": "h", + "doc": { + "brief": "get h of BarCode", + "return": "return h of the BarCode, type is int", + "maixpy": "maix.image.BarCode.h", + "py_doc": "get h of BarCode\n\nReturns: return h of the BarCode, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int h()", + "py_def": "def h(self) -> int" + }, + "payload": { + "type": "func", + "name": "payload", + "doc": { + "brief": "get payload of BarCode", + "return": "return payload of the BarCode, type is std::string", + "maixpy": "maix.image.BarCode.payload", + "py_doc": "get payload of BarCode\n\nReturns: return payload of the BarCode, type is std::string\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string payload()", + "py_def": "def payload(self) -> str" + }, + "type": { + "type": "func", + "name": "type", + "doc": { + "brief": "get type of BarCode", + "return": "return type of the BarCode, type is int", + "maixpy": "maix.image.BarCode.type", + "py_doc": "get type of BarCode\n\nReturns: return type of the BarCode, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int type()", + "py_def": "def type(self) -> int" + }, + "rotation": { + "type": "func", + "name": "rotation", + "doc": { + "brief": "get rotation of BarCode", + "return": "return rotation of the BarCode, type is float. FIXME: always return 0.0", + "maixpy": "maix.image.BarCode.rotation", + "py_doc": "get rotation of BarCode\n\nReturns: return rotation of the BarCode, type is float. FIXME: always return 0.0\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float rotation()", + "py_def": "def rotation(self) -> float" + }, + "quality": { + "type": "func", + "name": "quality", + "doc": { + "brief": "get quality of BarCode", + "return": "return quality of the BarCode, type is int", + "maixpy": "maix.image.BarCode.quality", + "py_doc": "get quality of BarCode\n\nReturns: return quality of the BarCode, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int quality()", + "py_def": "def quality(self) -> int" + } + }, + "def": "class BarCode" + }, + "Statistics": { + "type": "class", + "name": "Statistics", + "doc": { + "brief": "Statistics class", + "maixpy": "maix.image.Statistics", + "py_doc": "Statistics class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Statistics", + "doc": { + "brief": "Statistics constructor", + "param": { + "format": "The statistics source image format", + "l_statistics": "The statistics of the L channel. format is {mean, median, mode, std_dev, min, max, lq, uq}, type is std::vector", + "a_statistics": "The statistics of the A channel. format is {mean, median, mode, std_dev, min, max, lq, uq}, type is std::vector", + "b_statistics": "The statistics of the B channel. format is {mean, median, mode, std_dev, min, max, lq, uq}, type is std::vector" + }, + "maixpy": "maix.image.Statistics.__init__", + "py_doc": "Statistics constructor\n\nArgs:\n - format: The statistics source image format\n - l_statistics: The statistics of the L channel. format is {mean, median, mode, std_dev, min, max, lq, uq}, type is std::vector\n - a_statistics: The statistics of the A channel. format is {mean, median, mode, std_dev, min, max, lq, uq}, type is std::vector\n - b_statistics: The statistics of the B channel. format is {mean, median, mode, std_dev, min, max, lq, uq}, type is std::vector\n" + }, + "args": [ + [ + "image::Format", + "format", + null + ], + [ + "std::vector &", + "l_statistics", + null + ], + [ + "std::vector &", + "a_statistics", + null + ], + [ + "std::vector &", + "b_statistics", + null + ] + ], + "ret_type": null, + "static": false, + "def": "Statistics(image::Format format, std::vector &l_statistics, std::vector &a_statistics, std::vector &b_statistics)", + "py_def": "def __init__(self, format: Format, l_statistics: list[int], a_statistics: list[int], b_statistics: list[int]) -> None" + }, + "__getitem__": { + "type": "func", + "name": "__getitem__", + "doc": { + "brief": "Subscript operator", + "param": { + "index": "array index" + }, + "return": "int&", + "maixpy": "maix.image.Statistics.__getitem__", + "py_doc": "Subscript operator\n\nArgs:\n - index: array index\n\n\nReturns: int&\n" + }, + "args": [ + [ + "int", + "index", + null + ] + ], + "ret_type": "int&", + "static": false, + "def": "int &__getitem__(int index)", + "py_def": "def __getitem__(self, index: int) -> int" + }, + "format": { + "type": "func", + "name": "format", + "doc": { + "brief": "get format of Statistics source image", + "return": "return format of the Statistics source image, type is image::Format", + "maixpy": "maix.image.Statistics.format", + "py_doc": "get format of Statistics source image\n\nReturns: return format of the Statistics source image, type is image::Format\n" + }, + "args": [], + "ret_type": "image::Format", + "static": false, + "def": "image::Format format()", + "py_def": "def format(self) -> Format" + }, + "l_mean": { + "type": "func", + "name": "l_mean", + "doc": { + "brief": "get L channel mean", + "return": "return L channel mean, type is int", + "maixpy": "maix.image.Statistics.l_mean", + "py_doc": "get L channel mean\n\nReturns: return L channel mean, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int l_mean()", + "py_def": "def l_mean(self) -> int" + }, + "l_median": { + "type": "func", + "name": "l_median", + "doc": { + "brief": "get L channel median", + "return": "return L channel median, type is int", + "maixpy": "maix.image.Statistics.l_median", + "py_doc": "get L channel median\n\nReturns: return L channel median, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int l_median()", + "py_def": "def l_median(self) -> int" + }, + "l_mode": { + "type": "func", + "name": "l_mode", + "doc": { + "brief": "get L channel mode", + "return": "return L channel mode, type is int", + "maixpy": "maix.image.Statistics.l_mode", + "py_doc": "get L channel mode\n\nReturns: return L channel mode, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int l_mode()", + "py_def": "def l_mode(self) -> int" + }, + "l_std_dev": { + "type": "func", + "name": "l_std_dev", + "doc": { + "brief": "get L channel std_dev", + "return": "return L channel std_dev, type is int", + "maixpy": "maix.image.Statistics.l_std_dev", + "py_doc": "get L channel std_dev\n\nReturns: return L channel std_dev, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int l_std_dev()", + "py_def": "def l_std_dev(self) -> int" + }, + "l_min": { + "type": "func", + "name": "l_min", + "doc": { + "brief": "get L channel min", + "return": "return L channel min, type is int", + "maixpy": "maix.image.Statistics.l_min", + "py_doc": "get L channel min\n\nReturns: return L channel min, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int l_min()", + "py_def": "def l_min(self) -> int" + }, + "l_max": { + "type": "func", + "name": "l_max", + "doc": { + "brief": "get L channel max", + "return": "return L channel max, type is int", + "maixpy": "maix.image.Statistics.l_max", + "py_doc": "get L channel max\n\nReturns: return L channel max, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int l_max()", + "py_def": "def l_max(self) -> int" + }, + "l_lq": { + "type": "func", + "name": "l_lq", + "doc": { + "brief": "get L channel lq", + "return": "return L channel lq, type is int", + "maixpy": "maix.image.Statistics.l_lq", + "py_doc": "get L channel lq\n\nReturns: return L channel lq, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int l_lq()", + "py_def": "def l_lq(self) -> int" + }, + "l_uq": { + "type": "func", + "name": "l_uq", + "doc": { + "brief": "get L channel uq", + "return": "return L channel uq, type is int", + "maixpy": "maix.image.Statistics.l_uq", + "py_doc": "get L channel uq\n\nReturns: return L channel uq, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int l_uq()", + "py_def": "def l_uq(self) -> int" + }, + "a_mean": { + "type": "func", + "name": "a_mean", + "doc": { + "brief": "get A channel mean", + "return": "return A channel mean, type is int", + "maixpy": "maix.image.Statistics.a_mean", + "py_doc": "get A channel mean\n\nReturns: return A channel mean, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int a_mean()", + "py_def": "def a_mean(self) -> int" + }, + "a_median": { + "type": "func", + "name": "a_median", + "doc": { + "brief": "get A channea median", + "return": "return A channel median, type is int", + "maixpy": "maix.image.Statistics.a_median", + "py_doc": "get A channea median\n\nReturns: return A channel median, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int a_median()", + "py_def": "def a_median(self) -> int" + }, + "a_mode": { + "type": "func", + "name": "a_mode", + "doc": { + "brief": "get A channel mode", + "return": "return A channel mode, type is int", + "maixpy": "maix.image.Statistics.a_mode", + "py_doc": "get A channel mode\n\nReturns: return A channel mode, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int a_mode()", + "py_def": "def a_mode(self) -> int" + }, + "a_std_dev": { + "type": "func", + "name": "a_std_dev", + "doc": { + "brief": "get A channel std_dev", + "return": "return A channel std_dev, type is int", + "maixpy": "maix.image.Statistics.a_std_dev", + "py_doc": "get A channel std_dev\n\nReturns: return A channel std_dev, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int a_std_dev()", + "py_def": "def a_std_dev(self) -> int" + }, + "a_min": { + "type": "func", + "name": "a_min", + "doc": { + "brief": "get A channel min", + "return": "return A channel min, type is int", + "maixpy": "maix.image.Statistics.a_min", + "py_doc": "get A channel min\n\nReturns: return A channel min, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int a_min()", + "py_def": "def a_min(self) -> int" + }, + "a_max": { + "type": "func", + "name": "a_max", + "doc": { + "brief": "get A channel max", + "return": "return A channel max, type is int", + "maixpy": "maix.image.Statistics.a_max", + "py_doc": "get A channel max\n\nReturns: return A channel max, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int a_max()", + "py_def": "def a_max(self) -> int" + }, + "a_lq": { + "type": "func", + "name": "a_lq", + "doc": { + "brief": "get A channel lq", + "return": "return A channel lq, type is int", + "maixpy": "maix.image.Statistics.a_lq", + "py_doc": "get A channel lq\n\nReturns: return A channel lq, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int a_lq()", + "py_def": "def a_lq(self) -> int" + }, + "a_uq": { + "type": "func", + "name": "a_uq", + "doc": { + "brief": "get A channel uq", + "return": "return A channel uq, type is int", + "maixpy": "maix.image.Statistics.a_uq", + "py_doc": "get A channel uq\n\nReturns: return A channel uq, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int a_uq()", + "py_def": "def a_uq(self) -> int" + }, + "b_mean": { + "type": "func", + "name": "b_mean", + "doc": { + "brief": "get B channel mean", + "return": "return B channel mean, type is int", + "maixpy": "maix.image.Statistics.b_mean", + "py_doc": "get B channel mean\n\nReturns: return B channel mean, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int b_mean()", + "py_def": "def b_mean(self) -> int" + }, + "b_median": { + "type": "func", + "name": "b_median", + "doc": { + "brief": "get B channea median", + "return": "return B channel median, type is int", + "maixpy": "maix.image.Statistics.b_median", + "py_doc": "get B channea median\n\nReturns: return B channel median, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int b_median()", + "py_def": "def b_median(self) -> int" + }, + "b_mode": { + "type": "func", + "name": "b_mode", + "doc": { + "brief": "get B channel mode", + "return": "return B channel mode, type is int", + "maixpy": "maix.image.Statistics.b_mode", + "py_doc": "get B channel mode\n\nReturns: return B channel mode, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int b_mode()", + "py_def": "def b_mode(self) -> int" + }, + "b_std_dev": { + "type": "func", + "name": "b_std_dev", + "doc": { + "brief": "get B channel std_dev", + "return": "return B channel std_dev, type is int", + "maixpy": "maix.image.Statistics.b_std_dev", + "py_doc": "get B channel std_dev\n\nReturns: return B channel std_dev, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int b_std_dev()", + "py_def": "def b_std_dev(self) -> int" + }, + "b_min": { + "type": "func", + "name": "b_min", + "doc": { + "brief": "get B channel min", + "return": "return B channel min, type is int", + "maixpy": "maix.image.Statistics.b_min", + "py_doc": "get B channel min\n\nReturns: return B channel min, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int b_min()", + "py_def": "def b_min(self) -> int" + }, + "b_max": { + "type": "func", + "name": "b_max", + "doc": { + "brief": "get B channel max", + "return": "return B channel max, type is int", + "maixpy": "maix.image.Statistics.b_max", + "py_doc": "get B channel max\n\nReturns: return B channel max, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int b_max()", + "py_def": "def b_max(self) -> int" + }, + "b_lq": { + "type": "func", + "name": "b_lq", + "doc": { + "brief": "get B channel lq", + "return": "return B channel lq, type is int", + "maixpy": "maix.image.Statistics.b_lq", + "py_doc": "get B channel lq\n\nReturns: return B channel lq, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int b_lq()", + "py_def": "def b_lq(self) -> int" + }, + "b_uq": { + "type": "func", + "name": "b_uq", + "doc": { + "brief": "get B channel uq", + "return": "return B channel uq, type is int", + "maixpy": "maix.image.Statistics.b_uq", + "py_doc": "get B channel uq\n\nReturns: return B channel uq, type is int\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int b_uq()", + "py_def": "def b_uq(self) -> int" + } + }, + "def": "class Statistics" + }, + "Displacement": { + "type": "class", + "name": "Displacement", + "doc": { + "brief": "Displacement class", + "maixpy": "maix.image.Displacement", + "py_doc": "Displacement class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Displacement", + "doc": { + "brief": "Displacement constructor", + "param": { + "x_translation": "The x_translation of the Displacement", + "y_translation": "The y_translation of the Displacement", + "rotation": "The rotation of the Displacement", + "scale": "The scale of the Displacement", + "response": "The response of the Displacement" + }, + "maixpy": "maix.image.Displacement.__init__", + "py_doc": "Displacement constructor\n\nArgs:\n - x_translation: The x_translation of the Displacement\n - y_translation: The y_translation of the Displacement\n - rotation: The rotation of the Displacement\n - scale: The scale of the Displacement\n - response: The response of the Displacement\n" + }, + "args": [ + [ + "float", + "x_translation", + null + ], + [ + "float", + "y_translation", + null + ], + [ + "float", + "rotation", + null + ], + [ + "float", + "scale", + null + ], + [ + "float", + "response", + null + ] + ], + "ret_type": null, + "static": false, + "def": "Displacement(float x_translation, float y_translation, float rotation, float scale, float response)", + "py_def": "def __init__(self, x_translation: float, y_translation: float, rotation: float, scale: float, response: float) -> None" + }, + "__getitem__": { + "type": "func", + "name": "__getitem__", + "doc": { + "brief": "Subscript operator", + "param": { + "index": "array index" + }, + "return": "int&", + "maixpy": "maix.image.Displacement.__getitem__", + "py_doc": "Subscript operator\n\nArgs:\n - index: array index\n\n\nReturns: int&\n" + }, + "args": [ + [ + "int", + "index", + null + ] + ], + "ret_type": "int&", + "static": false, + "def": "int &__getitem__(int index)", + "py_def": "def __getitem__(self, index: int) -> int" + }, + "x_translation": { + "type": "func", + "name": "x_translation", + "doc": { + "brief": "get x_translation of Displacement", + "return": "return x_translation of the Displacement, type is float", + "maixpy": "maix.image.Displacement.x_translation", + "py_doc": "get x_translation of Displacement\n\nReturns: return x_translation of the Displacement, type is float\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float x_translation()", + "py_def": "def x_translation(self) -> float" + }, + "y_translation": { + "type": "func", + "name": "y_translation", + "doc": { + "brief": "get y_translation of Displacement", + "return": "return y_translation of the Displacement, type is float", + "maixpy": "maix.image.Displacement.y_translation", + "py_doc": "get y_translation of Displacement\n\nReturns: return y_translation of the Displacement, type is float\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float y_translation()", + "py_def": "def y_translation(self) -> float" + }, + "rotation": { + "type": "func", + "name": "rotation", + "doc": { + "brief": "get rotation of Displacement", + "return": "return rotation of the Displacement, type is float", + "maixpy": "maix.image.Displacement.rotation", + "py_doc": "get rotation of Displacement\n\nReturns: return rotation of the Displacement, type is float\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float rotation()", + "py_def": "def rotation(self) -> float" + }, + "scale": { + "type": "func", + "name": "scale", + "doc": { + "brief": "get scale of Displacement", + "return": "return scale of the Displacement, type is float", + "maixpy": "maix.image.Displacement.scale", + "py_doc": "get scale of Displacement\n\nReturns: return scale of the Displacement, type is float\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float scale()", + "py_def": "def scale(self) -> float" + }, + "response": { + "type": "func", + "name": "response", + "doc": { + "brief": "get response of Displacement", + "return": "return response of the Displacement, type is float", + "maixpy": "maix.image.Displacement.response", + "py_doc": "get response of Displacement\n\nReturns: return response of the Displacement, type is float\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float response()", + "py_def": "def response(self) -> float" + } + }, + "def": "class Displacement" + }, + "Percentile": { + "type": "class", + "name": "Percentile", + "doc": { + "brief": "Percentile class", + "maixpy": "maix.image.Percentile", + "py_doc": "Percentile class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Percentile", + "doc": { + "brief": "Percentile constructor", + "param": { + "l_value": "for grayscale image, it is grayscale percentile value (between 0 and 255).\nfor rgb888 image, it is l channel percentile value of lab (between 0 and 100).", + "a_value": "for rgb888 image, it is a channel percentile value of lab format(between -128 and 127).", + "b_value": "for rgb888 image, it is b channel percentile value of lab format(between -128 and 127)." + }, + "maixpy": "maix.image.Percentile.__init__", + "py_doc": "Percentile constructor\n\nArgs:\n - l_value: for grayscale image, it is grayscale percentile value (between 0 and 255).\nfor rgb888 image, it is l channel percentile value of lab (between 0 and 100).\n - a_value: for rgb888 image, it is a channel percentile value of lab format(between -128 and 127).\n - b_value: for rgb888 image, it is b channel percentile value of lab format(between -128 and 127).\n" + }, + "args": [ + [ + "int", + "l_value", + null + ], + [ + "int", + "a_value", + "0" + ], + [ + "int", + "b_value", + "0" + ] + ], + "ret_type": null, + "static": false, + "def": "Percentile(int l_value, int a_value = 0, int b_value = 0)", + "py_def": "def __init__(self, l_value: int, a_value: int = 0, b_value: int = 0) -> None" + }, + "__getitem__": { + "type": "func", + "name": "__getitem__", + "doc": { + "brief": "Subscript operator", + "maixpy": "maix.image.Percentile.__getitem__", + "py_doc": "Subscript operator" + }, + "args": [ + [ + "int", + "index", + null + ] + ], + "ret_type": "int&", + "static": false, + "def": "int &__getitem__(int index)", + "py_def": "def __getitem__(self, index: int) -> int" + }, + "value": { + "type": "func", + "name": "value", + "doc": { + "brief": "Return the grayscale percentile value (between 0 and 255).", + "return": "returns grayscale percentile value", + "maixpy": "maix.image.Percentile.value", + "py_doc": "Return the grayscale percentile value (between 0 and 255).\n\nReturns: returns grayscale percentile value\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int value()", + "py_def": "def value(self) -> int" + }, + "l_value": { + "type": "func", + "name": "l_value", + "doc": { + "brief": "Return the l channel percentile value of lab format (between 0 and 100).", + "return": "returns l channel percentile value", + "maixpy": "maix.image.Percentile.l_value", + "py_doc": "Return the l channel percentile value of lab format (between 0 and 100).\n\nReturns: returns l channel percentile value\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int l_value()", + "py_def": "def l_value(self) -> int" + }, + "a_value": { + "type": "func", + "name": "a_value", + "doc": { + "brief": "Return the a channel percentile value of lab format (between -128 and 127).", + "return": "returns a channel percentile value", + "maixpy": "maix.image.Percentile.a_value", + "py_doc": "Return the a channel percentile value of lab format (between -128 and 127).\n\nReturns: returns a channel percentile value\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int a_value()", + "py_def": "def a_value(self) -> int" + }, + "b_value": { + "type": "func", + "name": "b_value", + "doc": { + "brief": "Return the b channel percentile value of lab format (between -128 and 127).", + "return": "returns b channel percentile value", + "maixpy": "maix.image.Percentile.b_value", + "py_doc": "Return the b channel percentile value of lab format (between -128 and 127).\n\nReturns: returns b channel percentile value\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int b_value()", + "py_def": "def b_value(self) -> int" + } + }, + "def": "class Percentile" + }, + "Threshold": { + "type": "class", + "name": "Threshold", + "doc": { + "brief": "Threshold class", + "maixpy": "maix.image.Threshold", + "py_doc": "Threshold class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Threshold", + "doc": { + "brief": "Threshold constructor", + "param": { + "l_value": "for grayscale image, it is grayscale threshold value (between 0 and 255).\nfor rgb888 image, it is l channel threshold value of lab (between 0 and 100).", + "a_value": "for rgb888 image, it is a channel threshold value of lab format(between -128 and 127).", + "b_value": "for rgb888 image, it is b channel threshold value of lab format(between -128 and 127)." + }, + "maixpy": "maix.image.Threshold.__init__", + "py_doc": "Threshold constructor\n\nArgs:\n - l_value: for grayscale image, it is grayscale threshold value (between 0 and 255).\nfor rgb888 image, it is l channel threshold value of lab (between 0 and 100).\n - a_value: for rgb888 image, it is a channel threshold value of lab format(between -128 and 127).\n - b_value: for rgb888 image, it is b channel threshold value of lab format(between -128 and 127).\n" + }, + "args": [ + [ + "int", + "l_value", + null + ], + [ + "int", + "a_value", + "0" + ], + [ + "int", + "b_value", + "0" + ] + ], + "ret_type": null, + "static": false, + "def": "Threshold(int l_value, int a_value = 0, int b_value = 0)", + "py_def": "def __init__(self, l_value: int, a_value: int = 0, b_value: int = 0) -> None" + }, + "__getitem__": { + "type": "func", + "name": "__getitem__", + "doc": { + "brief": "Subscript operator", + "maixpy": "maix.image.Threshold.__getitem__", + "py_doc": "Subscript operator" + }, + "args": [ + [ + "int", + "index", + null + ] + ], + "ret_type": "int&", + "static": false, + "def": "int &__getitem__(int index)", + "py_def": "def __getitem__(self, index: int) -> int" + }, + "value": { + "type": "func", + "name": "value", + "doc": { + "brief": "Return the grayscale threshold value (between 0 and 255).", + "return": "returns grayscale threshold value", + "maixpy": "maix.image.Threshold.value", + "py_doc": "Return the grayscale threshold value (between 0 and 255).\n\nReturns: returns grayscale threshold value\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int value()", + "py_def": "def value(self) -> int" + }, + "l_value": { + "type": "func", + "name": "l_value", + "doc": { + "brief": "Return the l channel threshold value of lab format (between 0 and 100).", + "return": "returns l channel percentile value", + "maixpy": "maix.image.Threshold.l_value", + "py_doc": "Return the l channel threshold value of lab format (between 0 and 100).\n\nReturns: returns l channel percentile value\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int l_value()", + "py_def": "def l_value(self) -> int" + }, + "a_value": { + "type": "func", + "name": "a_value", + "doc": { + "brief": "Return the a channel threshold value of lab format (between -128 and 127).", + "return": "returns a channel percentile value", + "maixpy": "maix.image.Threshold.a_value", + "py_doc": "Return the a channel threshold value of lab format (between -128 and 127).\n\nReturns: returns a channel percentile value\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int a_value()", + "py_def": "def a_value(self) -> int" + }, + "b_value": { + "type": "func", + "name": "b_value", + "doc": { + "brief": "Return the b channel threshold value of lab format (between -128 and 127).", + "return": "returns b channel percentile value", + "maixpy": "maix.image.Threshold.b_value", + "py_doc": "Return the b channel threshold value of lab format (between -128 and 127).\n\nReturns: returns b channel percentile value\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int b_value()", + "py_def": "def b_value(self) -> int" + } + }, + "def": "class Threshold" + }, + "Histogram": { + "type": "class", + "name": "Histogram", + "doc": { + "brief": "Histogram class", + "maixpy": "maix.image.Histogram", + "py_doc": "Histogram class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Histogram", + "doc": { + "brief": "Histogram constructor", + "param": { + "l_value": "for grayscale image, it is grayscale threshold value list (the range of element values in the list is 0 and 255).\nfor rgb888 image, it is l channel threshold value list of lab (the range of element values in the list is 0 and 100).", + "a_value": "for rgb888 image, it is a channel threshold value list of lab format(the range of element values in the list is -128 and 127).", + "b_value": "for rgb888 image, it is b channel threshold value list of lab format(the range of element values in the list is -128 and 127).", + "format": "format of the source image" + }, + "maixpy": "maix.image.Histogram.__init__", + "py_doc": "Histogram constructor\n\nArgs:\n - l_value: for grayscale image, it is grayscale threshold value list (the range of element values in the list is 0 and 255).\nfor rgb888 image, it is l channel threshold value list of lab (the range of element values in the list is 0 and 100).\n - a_value: for rgb888 image, it is a channel threshold value list of lab format(the range of element values in the list is -128 and 127).\n - b_value: for rgb888 image, it is b channel threshold value list of lab format(the range of element values in the list is -128 and 127).\n - format: format of the source image\n" + }, + "args": [ + [ + "std::vector", + "l_bin", + null + ], + [ + "std::vector", + "a_bin", + null + ], + [ + "std::vector", + "b_bin", + null + ], + [ + "image::Format", + "format", + "image::Format::FMT_RGB888" + ] + ], + "ret_type": null, + "static": false, + "def": "Histogram(std::vector l_bin, std::vector a_bin, std::vector b_bin, image::Format format = image::Format::FMT_RGB888)", + "py_def": "def __init__(self, l_bin: list[float], a_bin: list[float], b_bin: list[float], format: Format = ...) -> None" + }, + "__getitem__": { + "type": "func", + "name": "__getitem__", + "doc": { + "brief": "Subscript operator", + "maixpy": "maix.image.Histogram.__getitem__", + "py_doc": "Subscript operator" + }, + "args": [ + [ + "int", + "index", + null + ] + ], + "ret_type": "int&", + "static": false, + "def": "int &__getitem__(int index)", + "py_def": "def __getitem__(self, index: int) -> int" + }, + "bins": { + "type": "func", + "name": "bins", + "doc": { + "brief": "Returns a list of floats for the grayscale histogram.", + "maixpy": "maix.image.Histogram.bins", + "py_doc": "Returns a list of floats for the grayscale histogram." + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector bins()", + "py_def": "def bins(self) -> list[float]" + }, + "l_bins": { + "type": "func", + "name": "l_bins", + "doc": { + "brief": "Returns a list of floats for the RGB565 histogram LAB L channel.", + "maixpy": "maix.image.Histogram.l_bins", + "py_doc": "Returns a list of floats for the RGB565 histogram LAB L channel." + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector l_bins()", + "py_def": "def l_bins(self) -> list[float]" + }, + "a_bins": { + "type": "func", + "name": "a_bins", + "doc": { + "brief": "Returns a list of floats for the RGB565 histogram LAB A channel.", + "maixpy": "maix.image.Histogram.a_bins", + "py_doc": "Returns a list of floats for the RGB565 histogram LAB A channel." + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector a_bins()", + "py_def": "def a_bins(self) -> list[float]" + }, + "b_bins": { + "type": "func", + "name": "b_bins", + "doc": { + "brief": "Returns a list of floats for the RGB565 histogram LAB B channel.", + "maixpy": "maix.image.Histogram.b_bins", + "py_doc": "Returns a list of floats for the RGB565 histogram LAB B channel." + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector b_bins()", + "py_def": "def b_bins(self) -> list[float]" + }, + "get_percentile": { + "type": "func", + "name": "get_percentile", + "doc": { + "brief": "Computes the CDF of the histogram channels and returns a image::Percentile object", + "param": { + "percentile": "the values of the histogram at the passed in percentile (0.0 - 1.0) (float).\nSo, if you pass in 0.1 this method will tell you (going from left-to-right in the histogram)\nwhat bin when summed into an accumulator caused the accumulator to cross 0.1. This is useful\nto determine min (with 0.1) and max (with 0.9) of a color distribution without outlier effects\nruining your results for adaptive color tracking." + }, + "return": "image::Percentile object", + "maixpy": "maix.image.Histogram.get_percentile", + "py_doc": "Computes the CDF of the histogram channels and returns a image::Percentile object\n\nArgs:\n - percentile: the values of the histogram at the passed in percentile (0.0 - 1.0) (float).\nSo, if you pass in 0.1 this method will tell you (going from left-to-right in the histogram)\nwhat bin when summed into an accumulator caused the accumulator to cross 0.1. This is useful\nto determine min (with 0.1) and max (with 0.9) of a color distribution without outlier effects\nruining your results for adaptive color tracking.\n\n\nReturns: image::Percentile object\n" + }, + "args": [ + [ + "float", + "percentile", + null + ] + ], + "ret_type": "image::Percentile", + "static": false, + "def": "image::Percentile get_percentile(float percentile)", + "py_def": "def get_percentile(self, percentile: float) -> Percentile" + }, + "get_threshold": { + "type": "func", + "name": "get_threshold", + "doc": { + "brief": "Uses Otsu\u2019s Method to compute the optimal threshold values that split the histogram into two halves for each channel of the histogram and returns a image::Threshold object.", + "return": "image::Threshold object", + "maixpy": "maix.image.Histogram.get_threshold", + "py_doc": "Uses Otsu\u2019s Method to compute the optimal threshold values that split the histogram into two halves for each channel of the histogram and returns a image::Threshold object.\n\nReturns: image::Threshold object\n" + }, + "args": [], + "ret_type": "image::Threshold", + "static": false, + "def": "image::Threshold get_threshold()", + "py_def": "def get_threshold(self) -> Threshold" + }, + "get_statistics": { + "type": "func", + "name": "get_statistics", + "doc": { + "brief": "Computes the mean, median, mode, standard deviation, min, max, lower quartile, and upper quartile of each color channel in the histogram and returns a image::Statistics object.", + "return": "image::Statistics object", + "maixpy": "maix.image.Histogram.get_statistics", + "py_doc": "Computes the mean, median, mode, standard deviation, min, max, lower quartile, and upper quartile of each color channel in the histogram and returns a image::Statistics object.\n\nReturns: image::Statistics object\n" + }, + "args": [], + "ret_type": "image::Statistics", + "static": false, + "def": "image::Statistics get_statistics()", + "py_def": "def get_statistics(self) -> Statistics" + } + }, + "def": "class Histogram" + }, + "LBPKeyPoint": { + "type": "class", + "name": "LBPKeyPoint", + "doc": { + "brief": "LBPKeyPoint class", + "maixpy": "maix.image.LBPKeyPoint", + "py_doc": "LBPKeyPoint class" + }, + "members": { + "__init__": { + "type": "func", + "name": "LBPKeyPoint", + "doc": { + "brief": "LBPKeyPoint constructor", + "param": { + "data": "The data of the LBPKeyPoint" + }, + "maixpy": "maix.image.LBPKeyPoint.__init__", + "py_doc": "LBPKeyPoint constructor\n\nArgs:\n - data: The data of the LBPKeyPoint\n" + }, + "args": [ + [ + "std::valarray &", + "data", + null + ] + ], + "ret_type": null, + "static": false, + "def": "LBPKeyPoint(std::valarray &data)", + "py_def": "def __init__(self, data: list[int]) -> None" + } + }, + "def": "class LBPKeyPoint" + }, + "KeyPoint": { + "type": "class", + "name": "KeyPoint", + "doc": { + "brief": "KeyPoint class", + "maixpy": "maix.image.KeyPoint", + "py_doc": "KeyPoint class" + }, + "members": { + "__init__": { + "type": "func", + "name": "KeyPoint", + "doc": { + "brief": "KeyPoint constructor", + "param": { + "x": "The x of the KeyPoint", + "y": "The y of the KeyPoint", + "score": "The score of the KeyPoint", + "octave": "The octave of the KeyPoint", + "angle": "The angle of the KeyPoint", + "matched": "The matched of the KeyPoint", + "desc": "The desc of the KeyPoint" + }, + "maixpy": "maix.image.KeyPoint.__init__", + "py_doc": "KeyPoint constructor\n\nArgs:\n - x: The x of the KeyPoint\n - y: The y of the KeyPoint\n - score: The score of the KeyPoint\n - octave: The octave of the KeyPoint\n - angle: The angle of the KeyPoint\n - matched: The matched of the KeyPoint\n - desc: The desc of the KeyPoint\n" + }, + "args": [ + [ + "uint16_t", + "x", + null + ], + [ + "uint16_t", + "y", + null + ], + [ + "uint16_t", + "score", + null + ], + [ + "uint16_t", + "octave", + null + ], + [ + "uint16_t", + "angle", + null + ], + [ + "uint16_t", + "matched", + null + ], + [ + "std::vector &", + "desc", + null + ] + ], + "ret_type": null, + "static": false, + "def": "KeyPoint(uint16_t x, uint16_t y, uint16_t score, uint16_t octave, uint16_t angle, uint16_t matched, std::vector &desc)", + "py_def": "def __init__(self, x: int, y: int, score: int, octave: int, angle: int, matched: int, desc: list[int]) -> None" + } + }, + "def": "class KeyPoint" + }, + "KPTMatch": { + "type": "class", + "name": "KPTMatch", + "doc": { + "brief": "KPTMatch class", + "maixpy": "maix.image.KPTMatch", + "py_doc": "KPTMatch class" + }, + "members": { + "__init__": { + "type": "func", + "name": "KPTMatch", + "doc": { + "brief": "KPTMatch constructor", + "param": { + "cx": "The cx of the KPTMatch", + "cy": "The cy of the KPTMatch", + "x": "The x of the KPTMatch", + "y": "The y of the KPTMatch", + "w": "The w of the KPTMatch", + "h": "The h of the KPTMatch", + "score": "The score of the KPTMatch", + "theta": "The theta of the KPTMatch", + "match": "The match of the KPTMatch" + }, + "maixpy": "maix.image.KPTMatch.__init__", + "py_doc": "KPTMatch constructor\n\nArgs:\n - cx: The cx of the KPTMatch\n - cy: The cy of the KPTMatch\n - x: The x of the KPTMatch\n - y: The y of the KPTMatch\n - w: The w of the KPTMatch\n - h: The h of the KPTMatch\n - score: The score of the KPTMatch\n - theta: The theta of the KPTMatch\n - match: The match of the KPTMatch\n" + }, + "args": [ + [ + "int", + "cx", + null + ], + [ + "int", + "cy", + null + ], + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "int", + "w", + null + ], + [ + "int", + "h", + null + ], + [ + "int", + "score", + null + ], + [ + "int", + "theta", + null + ], + [ + "int", + "match", + null + ] + ], + "ret_type": null, + "static": false, + "def": "KPTMatch(int cx, int cy, int x, int y, int w, int h, int score, int theta, int match)", + "py_def": "def __init__(self, cx: int, cy: int, x: int, y: int, w: int, h: int, score: int, theta: int, match: int) -> None" + } + }, + "def": "class KPTMatch" + }, + "ORBKeyPoint": { + "type": "class", + "name": "ORBKeyPoint", + "doc": { + "brief": "ORBKeyPoint class", + "maixpy": "maix.image.ORBKeyPoint", + "py_doc": "ORBKeyPoint class" + }, + "members": { + "__init__": { + "type": "func", + "name": "ORBKeyPoint", + "doc": { + "brief": "ORBKeyPoint constructor", + "param": { + "data": "The data of the ORBKeyPoint", + "threshold": "The threshold of the ORBKeyPoint", + "normalized": "The normalized of the ORBKeyPoint" + }, + "maixpy": "maix.image.ORBKeyPoint.__init__", + "py_doc": "ORBKeyPoint constructor\n\nArgs:\n - data: The data of the ORBKeyPoint\n - threshold: The threshold of the ORBKeyPoint\n - normalized: The normalized of the ORBKeyPoint\n" + }, + "args": [ + [ + "std::vector &", + "data", + null + ], + [ + "int", + "threshold", + null + ], + [ + "bool", + "normalized", + null + ] + ], + "ret_type": null, + "static": false, + "def": "ORBKeyPoint(std::vector &data, int threshold, bool normalized)", + "py_def": "def __init__(self, data: list[KeyPoint], threshold: int, normalized: bool) -> None" + }, + "get_data": { + "type": "func", + "name": "get_data", + "doc": { + "brief": "get data of ORBKeyPoint", + "return": "return data of the ORBKeyPoint, type is std::vector", + "maixpy": "maix.image.ORBKeyPoint.get_data", + "py_doc": "get data of ORBKeyPoint\n\nReturns: return data of the ORBKeyPoint, type is std::vector\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector get_data()", + "py_def": "def get_data(self) -> list[KeyPoint]" + } + }, + "def": "class ORBKeyPoint" + }, + "HaarCascade": { + "type": "class", + "name": "HaarCascade", + "doc": { + "brief": "HaarCascade class", + "maixpy": "maix.image.HaarCascade", + "py_doc": "HaarCascade class" + }, + "members": { + "__init__": { + "type": "func", + "name": "HaarCascade", + "doc": { + "brief": "HaarCascade constructor", + "param": { + "data": "The data of the HaarCascade", + "threshold": "The threshold of the HaarCascade", + "normalized": "The normalized of the HaarCascade" + }, + "maixpy": "maix.image.HaarCascade.__init__", + "py_doc": "HaarCascade constructor\n\nArgs:\n - data: The data of the HaarCascade\n - threshold: The threshold of the HaarCascade\n - normalized: The normalized of the HaarCascade\n" + }, + "args": [], + "ret_type": null, + "static": false, + "def": "HaarCascade()", + "py_def": "def __init__(self) -> None" + } + }, + "def": "class HaarCascade" + }, + "Color": { + "type": "class", + "name": "Color", + "doc": { + "brief": "Color class", + "maixpy": "maix.image.Color", + "py_doc": "Color class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Color", + "doc": { + "brief": "Color constructor", + "param": { + "alpha": "alpha channel, value range: 0 ~ 1" + }, + "maixpy": "maix.image.Color.__init__", + "py_doc": "Color constructor\n\nArgs:\n - alpha: alpha channel, value range: 0 ~ 1\n" + }, + "args": [ + [ + "uint8_t", + "ch1", + null + ], + [ + "uint8_t", + "ch2", + "0" + ], + [ + "uint8_t", + "ch3", + "0" + ], + [ + "float", + "alpha", + "0" + ], + [ + "image::Format", + "format", + "image::FMT_GRAYSCALE" + ] + ], + "ret_type": null, + "static": false, + "def": "Color(uint8_t ch1, uint8_t ch2 = 0, uint8_t ch3 = 0, float alpha = 0, image::Format format = image::FMT_GRAYSCALE)", + "py_def": "def __init__(self, ch1: int, ch2: int = 0, ch3: int = 0, alpha: float = 0, format: Format = ...) -> None" + }, + "r": { + "type": "var", + "name": "r", + "doc": { + "brief": "Color red channel", + "maixpy": "maix.image.Color.r", + "py_doc": "Color red channel" + }, + "value": null, + "static": false, + "readonly": false, + "def": "uint8_t r" + }, + "g": { + "type": "var", + "name": "g", + "doc": { + "brief": "Color green channel", + "maixpy": "maix.image.Color.g", + "py_doc": "Color green channel" + }, + "value": null, + "static": false, + "readonly": false, + "def": "uint8_t g" + }, + "b": { + "type": "var", + "name": "b", + "doc": { + "brief": "Color blue channel", + "maixpy": "maix.image.Color.b", + "py_doc": "Color blue channel" + }, + "value": null, + "static": false, + "readonly": false, + "def": "uint8_t b" + }, + "alpha": { + "type": "var", + "name": "alpha", + "doc": { + "brief": "Color alpha channel, value from 0.0 to 1.0, float value", + "maixpy": "maix.image.Color.alpha", + "py_doc": "Color alpha channel, value from 0.0 to 1.0, float value" + }, + "value": null, + "static": false, + "readonly": false, + "def": "float alpha" + }, + "gray": { + "type": "var", + "name": "gray", + "doc": { + "brief": "Color gray channel", + "maixpy": "maix.image.Color.gray", + "py_doc": "Color gray channel" + }, + "value": null, + "static": false, + "readonly": false, + "def": "uint8_t gray" + }, + "format": { + "type": "var", + "name": "format", + "doc": { + "brief": "Color format", + "maixpy": "maix.image.Color.format", + "py_doc": "Color format" + }, + "value": null, + "static": false, + "readonly": false, + "def": "image::Format format" + }, + "hex": { + "type": "func", + "name": "hex", + "doc": { + "brief": "Get color's hex value", + "maixpy": "maix.image.Color.hex", + "py_doc": "Get color's hex value" + }, + "args": [], + "ret_type": "uint32_t", + "static": false, + "def": "uint32_t hex()", + "py_def": "def hex(self) -> int" + }, + "from_rgb": { + "type": "func", + "name": "from_rgb", + "doc": { + "brief": "Create Color object from RGB channels", + "maixpy": "maix.image.Color.from_rgb", + "py_doc": "Create Color object from RGB channels" + }, + "args": [ + [ + "uint8_t", + "r", + null + ], + [ + "uint8_t", + "g", + null + ], + [ + "uint8_t", + "b", + null + ] + ], + "ret_type": "image::Color", + "static": true, + "def": "static image::Color from_rgb(uint8_t r, uint8_t g, uint8_t b)", + "py_def": "def from_rgb(r: int, g: int, b: int) -> Color" + }, + "from_bgr": { + "type": "func", + "name": "from_bgr", + "doc": { + "brief": "Create Color object from BGR channels", + "maixpy": "maix.image.Color.from_bgr", + "py_doc": "Create Color object from BGR channels" + }, + "args": [ + [ + "uint8_t", + "b", + null + ], + [ + "uint8_t", + "g", + null + ], + [ + "uint8_t", + "r", + null + ] + ], + "ret_type": "image::Color", + "static": true, + "def": "static image::Color from_bgr(uint8_t b, uint8_t g, uint8_t r)", + "py_def": "def from_bgr(b: int, g: int, r: int) -> Color" + }, + "from_gray": { + "type": "func", + "name": "from_gray", + "doc": { + "brief": "Create Color object from gray channel", + "maixpy": "maix.image.Color.from_gray", + "py_doc": "Create Color object from gray channel" + }, + "args": [ + [ + "uint8_t", + "gray", + null + ] + ], + "ret_type": "image::Color", + "static": true, + "def": "static image::Color from_gray(uint8_t gray)", + "py_def": "def from_gray(gray: int) -> Color" + }, + "from_rgba": { + "type": "func", + "name": "from_rgba", + "doc": { + "brief": "Create Color object from RGBA channels", + "param": { + "alpha": "alpha channel, float value, value range: 0 ~ 1" + }, + "maixpy": "maix.image.Color.from_rgba", + "py_doc": "Create Color object from RGBA channels\n\nArgs:\n - alpha: alpha channel, float value, value range: 0 ~ 1\n" + }, + "args": [ + [ + "uint8_t", + "r", + null + ], + [ + "uint8_t", + "g", + null + ], + [ + "uint8_t", + "b", + null + ], + [ + "float", + "alpha", + null + ] + ], + "ret_type": "image::Color", + "static": true, + "def": "static image::Color from_rgba(uint8_t r, uint8_t g, uint8_t b, float alpha)", + "py_def": "def from_rgba(r: int, g: int, b: int, alpha: float) -> Color" + }, + "from_bgra": { + "type": "func", + "name": "from_bgra", + "doc": { + "brief": "Create Color object from BGRA channels", + "param": { + "alpha": "alpha channel, float value, value range: 0 ~ 1" + }, + "maixpy": "maix.image.Color.from_bgra", + "py_doc": "Create Color object from BGRA channels\n\nArgs:\n - alpha: alpha channel, float value, value range: 0 ~ 1\n" + }, + "args": [ + [ + "uint8_t", + "b", + null + ], + [ + "uint8_t", + "g", + null + ], + [ + "uint8_t", + "r", + null + ], + [ + "float", + "alpha", + null + ] + ], + "ret_type": "image::Color", + "static": true, + "def": "static image::Color from_bgra(uint8_t b, uint8_t g, uint8_t r, float alpha)", + "py_def": "def from_bgra(b: int, g: int, r: int, alpha: float) -> Color" + }, + "from_hex": { + "type": "func", + "name": "from_hex", + "doc": { + "brief": "Create Color object from hex value", + "param": { + "hex": "hex value, e.g. 0x0000FF00, lower address if first channel", + "format": "color format, @see image::Format" + }, + "maixpy": "maix.image.Color.from_hex", + "py_doc": "Create Color object from hex value\n\nArgs:\n - hex: hex value, e.g. 0x0000FF00, lower address if first channel\n - format: color format, @see image::Format\n" + }, + "args": [ + [ + "uint32_t", + "hex", + null + ], + [ + "image::Format &", + "format", + null + ] + ], + "ret_type": "image::Color", + "static": true, + "def": "static image::Color from_hex(uint32_t hex, image::Format &format)", + "py_def": "def from_hex(hex: int, format: Format) -> Color" + }, + "to_format": { + "type": "func", + "name": "to_format", + "doc": { + "brief": "Convert Color format", + "param": { + "format": "format want to convert to, @see image::Format, only support RGB888, BGR888, RGBA8888, BGRA8888, GRAYSCALE." + }, + "maixpy": "maix.image.Color.to_format", + "py_doc": "Convert Color format\n\nArgs:\n - format: format want to convert to, @see image::Format, only support RGB888, BGR888, RGBA8888, BGRA8888, GRAYSCALE.\n" + }, + "args": [ + [ + "const image::Format &", + "format", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void to_format(const image::Format &format)", + "py_def": "def to_format(self, format: Format) -> None" + }, + "to_format2": { + "type": "func", + "name": "to_format2", + "doc": { + "brief": "Convert color format and return a new Color object", + "param": { + "format": "format want to convert to, @see image::Format, only support RGB888, BGR888, RGBA8888, BGRA8888, GRAYSCALE." + }, + "return": "new Color object, you need to delete it manually in C++.", + "maixpy": "maix.image.Color.to_format2", + "py_doc": "Convert color format and return a new Color object\n\nArgs:\n - format: format want to convert to, @see image::Format, only support RGB888, BGR888, RGBA8888, BGRA8888, GRAYSCALE.\n\n\nReturns: new Color object, you need to delete it manually in C++.\n" + }, + "args": [ + [ + "const image::Format &", + "format", + null + ] + ], + "ret_type": "image::Color*", + "static": false, + "def": "image::Color *to_format2(const image::Format &format)", + "py_def": "def to_format2(self, format: Format) -> Color" + } + }, + "def": "class Color" + }, + "COLOR_WHITE": { + "type": "var", + "name": "", + "doc": { + "brief": "Predefined color white", + "maixpy": "maix.image.COLOR_WHITE", + "py_doc": "Predefined color white" + }, + "value": "image::Color::from_rgb(255, 255, 255)", + "static": false, + "readonly": true, + "def": "const image::Color COLOR_WHITE = image::Color::from_rgb(255, 255, 255)" + }, + "COLOR_BLACK": { + "type": "var", + "name": "", + "doc": { + "brief": "Predefined color black", + "maixpy": "maix.image.COLOR_BLACK", + "py_doc": "Predefined color black" + }, + "value": "image::Color::from_rgb(0, 0, 0)", + "static": false, + "readonly": true, + "def": "const image::Color COLOR_BLACK = image::Color::from_rgb(0, 0, 0)" + }, + "COLOR_RED": { + "type": "var", + "name": "", + "doc": { + "brief": "Predefined color red", + "maixpy": "maix.image.COLOR_RED", + "py_doc": "Predefined color red" + }, + "value": "image::Color::from_rgb(255, 0, 0)", + "static": false, + "readonly": true, + "def": "const image::Color COLOR_RED = image::Color::from_rgb(255, 0, 0)" + }, + "COLOR_GREEN": { + "type": "var", + "name": "", + "doc": { + "brief": "Predefined color green", + "maixpy": "maix.image.COLOR_GREEN", + "py_doc": "Predefined color green" + }, + "value": "image::Color::from_rgb(0, 255, 0)", + "static": false, + "readonly": true, + "def": "const image::Color COLOR_GREEN = image::Color::from_rgb(0, 255, 0)" + }, + "COLOR_BLUE": { + "type": "var", + "name": "", + "doc": { + "brief": "Predefined color blue", + "maixpy": "maix.image.COLOR_BLUE", + "py_doc": "Predefined color blue" + }, + "value": "image::Color::from_rgb(0, 0, 255)", + "static": false, + "readonly": true, + "def": "const image::Color COLOR_BLUE = image::Color::from_rgb(0, 0, 255)" + }, + "COLOR_YELLOW": { + "type": "var", + "name": "", + "doc": { + "brief": "Predefined color yellow", + "maixpy": "maix.image.COLOR_YELLOW", + "py_doc": "Predefined color yellow" + }, + "value": "image::Color::from_rgb(255, 255, 0)", + "static": false, + "readonly": true, + "def": "const image::Color COLOR_YELLOW = image::Color::from_rgb(255, 255, 0)" + }, + "COLOR_PURPLE": { + "type": "var", + "name": "", + "doc": { + "brief": "Predefined color purple", + "maixpy": "maix.image.COLOR_PURPLE", + "py_doc": "Predefined color purple" + }, + "value": "image::Color::from_rgb(143, 0, 255)", + "static": false, + "readonly": true, + "def": "const image::Color COLOR_PURPLE = image::Color::from_rgb(143, 0, 255)" + }, + "COLOR_ORANGE": { + "type": "var", + "name": "", + "doc": { + "brief": "Predefined color orange", + "maixpy": "maix.image.COLOR_ORANGE", + "py_doc": "Predefined color orange" + }, + "value": "image::Color::from_rgb(255, 127, 0)", + "static": false, + "readonly": true, + "def": "const image::Color COLOR_ORANGE = image::Color::from_rgb(255, 127, 0)" + }, + "COLOR_GRAY": { + "type": "var", + "name": "", + "doc": { + "brief": "Predefined color gray", + "maixpy": "maix.image.COLOR_GRAY", + "py_doc": "Predefined color gray" + }, + "value": "image::Color::from_rgb(127, 127, 127)", + "static": false, + "readonly": true, + "def": "const image::Color COLOR_GRAY = image::Color::from_rgb(127, 127, 127)" + }, + "resize_map_pos": { + "type": "func", + "name": "resize_map_pos", + "doc": { + "brief": "map point position or rectangle position from one image size to another image size(resize)", + "param": { + "int": "h_out target image height", + "fit": "resize method, see maix.image.Fit", + "x": "original point x, or rectagle left-top point's x", + "y": "original point y, or rectagle left-top point's y", + "w": "original rectagle width, can be -1 if not use this arg, default -1.", + "h": "original rectagle height, can be -1 if not use this arg, default -1." + }, + "return": "list type, [x, y] if map point, [x, y, w, h] if resize rectangle.", + "maixpy": "maix.image.resize_map_pos", + "py_doc": "map point position or rectangle position from one image size to another image size(resize)\n\nArgs:\n - int: h_out target image height\n - fit: resize method, see maix.image.Fit\n - x: original point x, or rectagle left-top point's x\n - y: original point y, or rectagle left-top point's y\n - w: original rectagle width, can be -1 if not use this arg, default -1.\n - h: original rectagle height, can be -1 if not use this arg, default -1.\n\n\nReturns: list type, [x, y] if map point, [x, y, w, h] if resize rectangle.\n" + }, + "args": [ + [ + "int", + "w_in", + null + ], + [ + "int", + "h_in", + null + ], + [ + "int", + "w_out", + null + ], + [ + "int", + "h_out", + null + ], + [ + "image::Fit", + "fit", + null + ], + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "int", + "w", + "-1" + ], + [ + "int", + "h", + "-1" + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector resize_map_pos(int w_in, int h_in, int w_out, int h_out, image::Fit fit, int x, int y, int w = -1, int h = -1)", + "overload": [ + { + "type": "func", + "name": "resize_map_pos", + "doc": { + "brief": "map point position or rectangle position from this image size to another image size(resize)", + "param": { + "int": "h_out target image height", + "fit": "resize method, see maix.image.Fit", + "x": "original point x, or rectagle left-top point's x", + "y": "original point y, or rectagle left-top point's y", + "w": "original rectagle width, can be -1 if not use this arg, default -1.", + "h": "original rectagle height, can be -1 if not use this arg, default -1." + }, + "return": "list type, [x, y] if map point, [x, y, w, h] if resize rectangle.", + "maixpy": "maix.image.resize_map_pos", + "py_doc": "map point position or rectangle position from this image size to another image size(resize)\n\nArgs:\n - int: h_out target image height\n - fit: resize method, see maix.image.Fit\n - x: original point x, or rectagle left-top point's x\n - y: original point y, or rectagle left-top point's y\n - w: original rectagle width, can be -1 if not use this arg, default -1.\n - h: original rectagle height, can be -1 if not use this arg, default -1.\n\n\nReturns: list type, [x, y] if map point, [x, y, w, h] if resize rectangle.\n" + }, + "args": [ + [ + "int", + "w_out", + null + ], + [ + "int", + "h_out", + null + ], + [ + "image::Fit", + "fit", + null + ], + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "int", + "w", + "-1" + ], + [ + "int", + "h", + "-1" + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector resize_map_pos(int w_out, int h_out, image::Fit fit, int x, int y, int w = -1, int h = -1)" + } + ], + "py_def": "def resize_map_pos(w_in: int, h_in: int, w_out: int, h_out: int, fit: Fit, x: int, y: int, w: int = -1, h: int = -1) -> list[int]" + }, + "resize_map_pos_reverse": { + "type": "func", + "name": "resize_map_pos_reverse", + "doc": { + "brief": "reverse resize_map_pos method, when we call image.resize method resiz image 'a' to image 'b', we want to known the original position on 'a' whith a knew point on 'b'", + "param": { + "int": "h_out image height after resized", + "fit": "resize method, see maix.image.Fit", + "x": "point on resized image x, or rectagle left-top point's x", + "y": "original point y, or rectagle left-top point's y", + "w": "original rectagle width, can be -1 if not use this arg, default -1.", + "h": "original rectagle height, can be -1 if not use this arg, default -1." + }, + "return": "list type, [x, y] if map point, [x, y, w, h] if resize rectangle.", + "maixpy": "maix.image.resize_map_pos_reverse", + "py_doc": "reverse resize_map_pos method, when we call image.resize method resiz image 'a' to image 'b', we want to known the original position on 'a' whith a knew point on 'b'\n\nArgs:\n - int: h_out image height after resized\n - fit: resize method, see maix.image.Fit\n - x: point on resized image x, or rectagle left-top point's x\n - y: original point y, or rectagle left-top point's y\n - w: original rectagle width, can be -1 if not use this arg, default -1.\n - h: original rectagle height, can be -1 if not use this arg, default -1.\n\n\nReturns: list type, [x, y] if map point, [x, y, w, h] if resize rectangle.\n" + }, + "args": [ + [ + "int", + "w_in", + null + ], + [ + "int", + "h_in", + null + ], + [ + "int", + "w_out", + null + ], + [ + "int", + "h_out", + null + ], + [ + "image::Fit", + "fit", + null + ], + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "int", + "w", + "-1" + ], + [ + "int", + "h", + "-1" + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector resize_map_pos_reverse(int w_in, int h_in, int w_out, int h_out, image::Fit fit, int x, int y, int w = -1, int h = -1)", + "py_def": "def resize_map_pos_reverse(w_in: int, h_in: int, w_out: int, h_out: int, fit: Fit, x: int, y: int, w: int = -1, h: int = -1) -> list[int]" + }, + "Image": { + "type": "class", + "name": "Image", + "doc": { + "brief": "Image class", + "maixpy": "maix.image.Image", + "py_doc": "Image class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Image", + "doc": { + "brief": "Image constructor", + "param": { + "width": "image width, should > 0", + "height": "image height, should > 0", + "format": "image format @see image::Format" + }, + "maixpy": "maix.image.Image.__init__", + "maixcdk": "maix.image.Image.Image", + "py_doc": "Image constructor\n\nArgs:\n - width: image width, should > 0\n - height: image height, should > 0\n - format: image format @see image::Format\n" + }, + "args": [ + [ + "int", + "width", + null + ], + [ + "int", + "height", + null + ], + [ + "image::Format", + "format", + "image::Format::FMT_RGB888" + ] + ], + "ret_type": null, + "static": false, + "def": "Image(int width, int height, image::Format format = image::Format::FMT_RGB888)", + "py_def": "def __init__(self, width: int, height: int, format: Format = ...) -> None" + }, + "format": { + "type": "func", + "name": "format", + "doc": { + "brief": "Get image's format", + "see": "image.Format", + "maixpy": "maix.image.Image.format", + "py_doc": "Get image's format" + }, + "args": [], + "ret_type": "image::Format", + "static": false, + "def": "image::Format format()", + "py_def": "def format(self) -> Format" + }, + "size": { + "type": "func", + "name": "size", + "doc": { + "brief": "Get image's size, [width, height]", + "maixpy": "maix.image.Image.size", + "py_doc": "Get image's size, [width, height]" + }, + "args": [], + "ret_type": "image::Size", + "static": false, + "def": "image::Size size()", + "py_def": "def size(self) -> Size" + }, + "data_size": { + "type": "func", + "name": "data_size", + "doc": { + "brief": "Get image's data size", + "maixpy": "maix.image.Image.data_size", + "py_doc": "Get image's data size" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int data_size()", + "py_def": "def data_size(self) -> int" + }, + "width": { + "type": "func", + "name": "width", + "doc": { + "brief": "Get image's width", + "maixpy": "maix.image.Image.width", + "py_doc": "Get image's width" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int width()", + "py_def": "def width(self) -> int" + }, + "height": { + "type": "func", + "name": "height", + "doc": { + "brief": "Get image's height", + "maixpy": "maix.image.Image.height", + "py_doc": "Get image's height" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int height()", + "py_def": "def height(self) -> int" + }, + "data": { + "type": "func", + "name": "data", + "doc": { + "brief": "Get image's data pointer.\\nIn MaixPy is capsule object.", + "maixpy": "maix.image.Image.data", + "py_doc": "Get image's data pointer.\nIn MaixPy is capsule object." + }, + "args": [], + "ret_type": "void*", + "static": false, + "def": "void *data()", + "py_def": "def data(self) -> capsule" + }, + "__str__": { + "type": "func", + "name": "__str__", + "doc": { + "brief": "To string method", + "maixpy": "maix.image.Image.__str__", + "py_doc": "To string method" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string __str__()", + "py_def": "def __str__(self) -> str" + }, + "to_str": { + "type": "func", + "name": "to_str", + "doc": { + "brief": "To string method", + "maixpy": "maix.image.Image.to_str", + "py_doc": "To string method" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string to_str()", + "py_def": "def to_str(self) -> str" + }, + "get_pixel": { + "type": "func", + "name": "get_pixel", + "doc": { + "brief": "Get pixel of image", + "param": { + "x": "pixel's coordinate x. x must less than image's width", + "y": "pixel's coordinate y. y must less than image's height", + "rgbtuple": "switch return value method. rgbtuple decides whether to split the return or not. default is false." + }, + "return": "pixel value,\nAccording to image format and rgbtuple, return different value:\nformat is FMT_RGB888, rgbtuple is true, return [R, G, B]; rgbtuple is false, return [RGB]\nforamt is FMT_BGR888, rgbtuple is true, return [B, G, R]; rgbtuple is false, return [BGR]\nformat is FMT_GRAYSCALE, return [GRAY];", + "maixpy": "maix.image.Image.get_pixel", + "py_doc": "Get pixel of image\n\nArgs:\n - x: pixel's coordinate x. x must less than image's width\n - y: pixel's coordinate y. y must less than image's height\n - rgbtuple: switch return value method. rgbtuple decides whether to split the return or not. default is false.\n\n\nReturns: pixel value,\nAccording to image format and rgbtuple, return different value:\nformat is FMT_RGB888, rgbtuple is true, return [R, G, B]; rgbtuple is false, return [RGB]\nforamt is FMT_BGR888, rgbtuple is true, return [B, G, R]; rgbtuple is false, return [BGR]\nformat is FMT_GRAYSCALE, return [GRAY];\n" + }, + "args": [ + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "bool", + "rgbtuple", + "false" + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector get_pixel(int x, int y, bool rgbtuple = false)", + "py_def": "def get_pixel(self, x: int, y: int, rgbtuple: bool = False) -> list[int]" + }, + "set_pixel": { + "type": "func", + "name": "set_pixel", + "doc": { + "brief": "Set pixel of image", + "param": { + "x": "pixel's coordinate x. x must less than image's width", + "y": "pixel's coordinate y. y must less than image's height", + "pixel": "pixel value, according to image format and size of pixel, has different operation:\nformat is FMT_RGB888, pixel size must be 1 or 3, if size is 1, will split pixel[0] to [R, G, B]; if size is 3, will use pixel directly\nformat is FMT_BGR888, pixel size must be 1 or 3, if size is 1, will split pixel[0] to [B, G, R]; if size is 3, will use pixel directly\nformat is FMT_GRAYSCALE, pixel size must be 1, will use pixel directly" + }, + "return": "error code, Err::ERR_NONE is ok, other is error", + "maixpy": "maix.image.Image.set_pixel", + "py_doc": "Set pixel of image\n\nArgs:\n - x: pixel's coordinate x. x must less than image's width\n - y: pixel's coordinate y. y must less than image's height\n - pixel: pixel value, according to image format and size of pixel, has different operation:\nformat is FMT_RGB888, pixel size must be 1 or 3, if size is 1, will split pixel[0] to [R, G, B]; if size is 3, will use pixel directly\nformat is FMT_BGR888, pixel size must be 1 or 3, if size is 1, will split pixel[0] to [B, G, R]; if size is 3, will use pixel directly\nformat is FMT_GRAYSCALE, pixel size must be 1, will use pixel directly\n\n\nReturns: error code, Err::ERR_NONE is ok, other is error\n" + }, + "args": [ + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "std::vector", + "pixel", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err set_pixel(int x, int y, std::vector pixel)", + "py_def": "def set_pixel(self, x: int, y: int, pixel: list[int]) -> maix.err.Err" + }, + "to_tensor": { + "type": "func", + "name": "to_tensor", + "doc": { + "brief": "Convert Image object to tensor::Tensor object", + "param": { + "chw": "if true, the shape of tensor is [C, H, W], else [H, W, C]", + "copy": "if true, will alloc memory for tensor data, else will use the memory of Image object" + }, + "return": "tensor::Tensor object pointer, an allocated tensor object", + "maixpy": "maix.image.Image.to_tensor", + "py_doc": "Convert Image object to tensor::Tensor object\n\nArgs:\n - chw: if true, the shape of tensor is [C, H, W], else [H, W, C]\n - copy: if true, will alloc memory for tensor data, else will use the memory of Image object\n\n\nReturns: tensor::Tensor object pointer, an allocated tensor object\n" + }, + "args": [ + [ + "bool", + "chw", + "false" + ], + [ + "bool", + "copy", + "true" + ] + ], + "ret_type": "tensor::Tensor*", + "static": false, + "def": "tensor::Tensor *to_tensor(bool chw = false, bool copy = true)", + "py_def": "def to_tensor(self, chw: bool = False, copy: bool = True) -> maix.tensor.Tensor" + }, + "to_bytes": { + "type": "func", + "name": "to_bytes", + "doc": { + "brief": "Get image's data and convert to array bytes", + "param": { + "copy": "if true, will alloc memory and copy data to new buffer,\nelse will use the memory of Image object, delete bytes object will not affect Image object\uff0c\nbut delete Image object will make bytes object invalid, it may cause program crash !!!!\nSo use this param carefully." + }, + "return": "image's data bytes, need be delete by caller in C++.", + "maixpy": "maix.image.Image.to_bytes", + "py_doc": "Get image's data and convert to array bytes\n\nArgs:\n - copy: if true, will alloc memory and copy data to new buffer,\nelse will use the memory of Image object, delete bytes object will not affect Image object\uff0c\nbut delete Image object will make bytes object invalid, it may cause program crash !!!!\nSo use this param carefully.\n\n\nReturns: image's data bytes, need be delete by caller in C++.\n" + }, + "args": [ + [ + "bool", + "copy", + "true" + ] + ], + "ret_type": "Bytes*", + "static": false, + "def": "Bytes *to_bytes(bool copy = true)", + "py_def": "def to_bytes(*args, **kwargs)" + }, + "to_format": { + "type": "func", + "name": "to_format", + "doc": { + "brief": "Convert image to specific format", + "param": { + "format": "format want to convert to, @see image::Format, only support RGB888, BGR888, RGBA8888, BGRA8888, GRAYSCALE, JPEG." + }, + "return": "new image object. Need be delete by caller in C++.", + "throw": "err.Exception, if two images' format not support, **or already the format**, will raise exception", + "maixpy": "maix.image.Image.to_format", + "py_doc": "Convert image to specific format\n\nArgs:\n - format: format want to convert to, @see image::Format, only support RGB888, BGR888, RGBA8888, BGRA8888, GRAYSCALE, JPEG.\n\n\nReturns: new image object. Need be delete by caller in C++.\n" + }, + "args": [ + [ + "const image::Format &", + "format", + null + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *to_format(const image::Format &format)", + "py_def": "def to_format(self, format: Format) -> Image" + }, + "to_jpeg": { + "type": "func", + "name": "to_jpeg", + "doc": { + "brief": "Convert image to jpeg", + "param": { + "quality": "the quality of jpg, default is 95. For MaixCAM supported range is (50, 100], if <= 50 will be fixed to 51." + }, + "return": "new image object. Need be delete by caller in C++.", + "throw": "err.Exception, if two images' format not support, **or already the format**, will raise exception", + "maixpy": "maix.image.Image.to_jpeg", + "py_doc": "Convert image to jpeg\n\nArgs:\n - quality: the quality of jpg, default is 95. For MaixCAM supported range is (50, 100], if <= 50 will be fixed to 51.\n\n\nReturns: new image object. Need be delete by caller in C++.\n" + }, + "args": [ + [ + "int", + "quality", + "95" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *to_jpeg(int quality = 95)", + "py_def": "def to_jpeg(self, quality: int = 95) -> Image" + }, + "draw_image": { + "type": "func", + "name": "draw_image", + "doc": { + "brief": "Draw image on this image", + "param": { + "x": "left top corner of image point's coordinate x", + "y": "left top corner of image point's coordinate y", + "img": "image object to draw, the caller's channel must <= the args' channel,\ne.g. caller is RGB888, args is RGBA8888, will throw exception, but caller is RGBA8888, args is RGB888 or RGBA8888 is ok" + }, + "return": "this image object self", + "maixpy": "maix.image.Image.draw_image", + "py_doc": "Draw image on this image\n\nArgs:\n - x: left top corner of image point's coordinate x\n - y: left top corner of image point's coordinate y\n - img: image object to draw, the caller's channel must <= the args' channel,\ne.g. caller is RGB888, args is RGBA8888, will throw exception, but caller is RGBA8888, args is RGB888 or RGBA8888 is ok\n\n\nReturns: this image object self\n" + }, + "args": [ + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "image::Image &", + "img", + null + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *draw_image(int x, int y, image::Image &img)", + "py_def": "def draw_image(self, x: int, y: int, img: Image) -> Image" + }, + "draw_rect": { + "type": "func", + "name": "draw_rect", + "doc": { + "brief": "Fill rectangle color to image", + "param": { + "x": "left top corner of rectangle point's coordinate x", + "y": "left top corner of rectangle point's coordinate y", + "w": "rectangle width", + "h": "rectangle height", + "color": "rectangle color", + "thickness": "rectangle thickness(line width), by default(value is 1), -1 means fill rectangle" + }, + "return": "this image object self", + "maixpy": "maix.image.Image.draw_rect", + "py_doc": "Fill rectangle color to image\n\nArgs:\n - x: left top corner of rectangle point's coordinate x\n - y: left top corner of rectangle point's coordinate y\n - w: rectangle width\n - h: rectangle height\n - color: rectangle color\n - thickness: rectangle thickness(line width), by default(value is 1), -1 means fill rectangle\n\n\nReturns: this image object self\n" + }, + "args": [ + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "int", + "w", + null + ], + [ + "int", + "h", + null + ], + [ + "const image::Color &", + "color", + null + ], + [ + "int", + "thickness", + "1" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *draw_rect(int x, int y, int w, int h, const image::Color &color, int thickness = 1)", + "py_def": "def draw_rect(self, x: int, y: int, w: int, h: int, color: Color, thickness: int = 1) -> Image" + }, + "draw_line": { + "type": "func", + "name": "draw_line", + "doc": { + "brief": "Draw line on image", + "param": { + "x1": "start point's coordinate x", + "y1": "start point's coordinate y", + "x2": "end point's coordinate x", + "y2": "end point's coordinate y", + "color": "line color @see image::Color", + "thickness": "line thickness(line width), by default(value is 1)" + }, + "return": "this image object self", + "maixpy": "maix.image.Image.draw_line", + "py_doc": "Draw line on image\n\nArgs:\n - x1: start point's coordinate x\n - y1: start point's coordinate y\n - x2: end point's coordinate x\n - y2: end point's coordinate y\n - color: line color @see image::Color\n - thickness: line thickness(line width), by default(value is 1)\n\n\nReturns: this image object self\n" + }, + "args": [ + [ + "int", + "x1", + null + ], + [ + "int", + "y1", + null + ], + [ + "int", + "x2", + null + ], + [ + "int", + "y2", + null + ], + [ + "const image::Color &", + "color", + null + ], + [ + "int", + "thickness", + "1" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *draw_line(int x1, int y1, int x2, int y2, const image::Color &color, int thickness = 1)", + "py_def": "def draw_line(self, x1: int, y1: int, x2: int, y2: int, color: Color, thickness: int = 1) -> Image" + }, + "draw_circle": { + "type": "func", + "name": "draw_circle", + "doc": { + "brief": "Draw circle on image", + "param": { + "x": "circle center point's coordinate x", + "y": "circle center point's coordinate y", + "radius": "circle radius", + "color": "circle color @see image::Color", + "thickness": "circle thickness(line width), default -1 means fill circle" + }, + "return": "this image object self", + "maixpy": "maix.image.Image.draw_circle", + "py_doc": "Draw circle on image\n\nArgs:\n - x: circle center point's coordinate x\n - y: circle center point's coordinate y\n - radius: circle radius\n - color: circle color @see image::Color\n - thickness: circle thickness(line width), default -1 means fill circle\n\n\nReturns: this image object self\n" + }, + "args": [ + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "int", + "radius", + null + ], + [ + "const image::Color &", + "color", + null + ], + [ + "int", + "thickness", + "1" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *draw_circle(int x, int y, int radius, const image::Color &color, int thickness = 1)", + "py_def": "def draw_circle(self, x: int, y: int, radius: int, color: Color, thickness: int = 1) -> Image" + }, + "draw_ellipse": { + "type": "func", + "name": "draw_ellipse", + "doc": { + "brief": "Draw ellipse on image", + "param": { + "x": "ellipse center point's coordinate x", + "y": "ellipse center point's coordinate y", + "a": "ellipse major axis length", + "b": "ellipse minor axis length", + "angle": "ellipse rotation angle", + "start_angle": "ellipse start angle", + "end_angle": "ellipse end angle", + "color": "ellipse color @see image::Color", + "thickness": "ellipse thickness(line width), by default(value is 1), -1 means fill ellipse" + }, + "return": "this image object self", + "maixpy": "maix.image.Image.draw_ellipse", + "py_doc": "Draw ellipse on image\n\nArgs:\n - x: ellipse center point's coordinate x\n - y: ellipse center point's coordinate y\n - a: ellipse major axis length\n - b: ellipse minor axis length\n - angle: ellipse rotation angle\n - start_angle: ellipse start angle\n - end_angle: ellipse end angle\n - color: ellipse color @see image::Color\n - thickness: ellipse thickness(line width), by default(value is 1), -1 means fill ellipse\n\n\nReturns: this image object self\n" + }, + "args": [ + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "int", + "a", + null + ], + [ + "int", + "b", + null + ], + [ + "float", + "angle", + null + ], + [ + "float", + "start_angle", + null + ], + [ + "float", + "end_angle", + null + ], + [ + "const image::Color &", + "color", + null + ], + [ + "int", + "thickness", + "1" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *draw_ellipse(int x, int y, int a, int b, float angle, float start_angle, float end_angle, const image::Color &color, int thickness = 1)", + "py_def": "def draw_ellipse(self, x: int, y: int, a: int, b: int, angle: float, start_angle: float, end_angle: float, color: Color, thickness: int = 1) -> Image" + }, + "draw_string": { + "type": "func", + "name": "draw_string", + "doc": { + "brief": "Draw text on image", + "param": { + "x": "text left top point's coordinate x", + "y": "text left top point's coordinate y", + "string": "text content", + "color": "text color @see image::Color, default is white", + "scale": "font scale, by default(value is 1)", + "thickness": "text thickness(line width), if negative, the glyph is filled, by default(value is -1)", + "wrap": "if true, will auto wrap text to next line if text width > image width, by default(value is true)" + }, + "return": "this image object self", + "maixpy": "maix.image.Image.draw_string", + "py_doc": "Draw text on image\n\nArgs:\n - x: text left top point's coordinate x\n - y: text left top point's coordinate y\n - string: text content\n - color: text color @see image::Color, default is white\n - scale: font scale, by default(value is 1)\n - thickness: text thickness(line width), if negative, the glyph is filled, by default(value is -1)\n - wrap: if true, will auto wrap text to next line if text width > image width, by default(value is true)\n\n\nReturns: this image object self\n" + }, + "args": [ + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "const std::string &", + "textstring", + null + ], + [ + "const image::Color &", + "color", + "image::COLOR_WHITE" + ], + [ + "float", + "scale", + "1" + ], + [ + "int", + "thickness", + "-1" + ], + [ + "bool", + "wrap", + "true" + ], + [ + "int", + "wrap_space", + "4" + ], + [ + "const std::string &", + "font", + "\"\"" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *draw_string(int x, int y, const std::string &textstring, const image::Color &color = image::COLOR_WHITE, float scale = 1, int thickness = -1,\n bool wrap = true, int wrap_space = 4, const std::string &font = \"\")", + "py_def": "def draw_string(self, x: int, y: int, textstring: str, color: Color = ..., scale: float = 1, thickness: int = -1, wrap: bool = True, wrap_space: int = 4, font: str = '') -> Image" + }, + "draw_cross": { + "type": "func", + "name": "draw_cross", + "doc": { + "brief": "Draw cross on image", + "param": { + "x": "cross center point's coordinate x", + "y": "cross center point's coordinate y", + "color": "cross color @see image::Color", + "size": "how long the lines of the cross extend, by default(value is 5). So the line length is `2 * size + thickness`", + "thickness": "cross thickness(line width), by default(value is 1)" + }, + "maixpy": "maix.image.Image.draw_cross", + "py_doc": "Draw cross on image\n\nArgs:\n - x: cross center point's coordinate x\n - y: cross center point's coordinate y\n - color: cross color @see image::Color\n - size: how long the lines of the cross extend, by default(value is 5). So the line length is `2 * size + thickness`\n - thickness: cross thickness(line width), by default(value is 1)\n" + }, + "args": [ + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "const image::Color &", + "color", + null + ], + [ + "int", + "size", + "5" + ], + [ + "int", + "thickness", + "1" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *draw_cross(int x, int y, const image::Color &color, int size = 5, int thickness = 1)", + "py_def": "def draw_cross(self, x: int, y: int, color: Color, size: int = 5, thickness: int = 1) -> Image" + }, + "draw_arrow": { + "type": "func", + "name": "draw_arrow", + "doc": { + "brief": "Draw arrow on image", + "param": { + "x0": "start coordinate of the arrow x0", + "y0": "start coordinate of the arrow y0", + "x1": "end coordinate of the arrow x1", + "y1": "end coordinate of the arrow y1", + "color": "cross color @see image::Color", + "thickness": "cross thickness(line width), by default(value is 1)" + }, + "return": "this image object self", + "maixpy": "maix.image.Image.draw_arrow", + "py_doc": "Draw arrow on image\n\nArgs:\n - x0: start coordinate of the arrow x0\n - y0: start coordinate of the arrow y0\n - x1: end coordinate of the arrow x1\n - y1: end coordinate of the arrow y1\n - color: cross color @see image::Color\n - thickness: cross thickness(line width), by default(value is 1)\n\n\nReturns: this image object self\n" + }, + "args": [ + [ + "int", + "x0", + null + ], + [ + "int", + "y0", + null + ], + [ + "int", + "x1", + null + ], + [ + "int", + "y1", + null + ], + [ + "const image::Color &", + "color", + null + ], + [ + "int", + "thickness", + "1" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *draw_arrow(int x0, int y0, int x1, int y1, const image::Color &color, int thickness = 1)", + "py_def": "def draw_arrow(self, x0: int, y0: int, x1: int, y1: int, color: Color, thickness: int = 1) -> Image" + }, + "draw_edges": { + "type": "func", + "name": "draw_edges", + "doc": { + "brief": "Draw edges on image", + "param": { + "corners": "edges, [[x0, y0], [x1, y1], [x2, y2], [x3, y3]]", + "color": "edges color @see image::Color", + "size": "the circle of radius size. TODO: support in the feature", + "thickness": "edges thickness(line width), by default(value is 1)", + "fill": "if true, will fill edges, by default(value is false)" + }, + "return": "this image object self", + "maixpy": "maix.image.Image.draw_edges", + "py_doc": "Draw edges on image\n\nArgs:\n - corners: edges, [[x0, y0], [x1, y1], [x2, y2], [x3, y3]]\n - color: edges color @see image::Color\n - size: the circle of radius size. TODO: support in the feature\n - thickness: edges thickness(line width), by default(value is 1)\n - fill: if true, will fill edges, by default(value is false)\n\n\nReturns: this image object self\n" + }, + "args": [ + [ + "std::vector>", + "corners", + null + ], + [ + "const image::Color &", + "color", + null + ], + [ + "int", + "size", + "0" + ], + [ + "int", + "thickness", + "1" + ], + [ + "bool", + "fill", + "false" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *draw_edges(std::vector> corners, const image::Color &color, int size = 0, int thickness = 1, bool fill = false)", + "py_def": "def draw_edges(self, corners: list[list[int]], color: Color, size: int = 0, thickness: int = 1, fill: bool = False) -> Image" + }, + "draw_keypoints": { + "type": "func", + "name": "draw_keypoints", + "doc": { + "brief": "Draw keypoints on image", + "param": { + "keypoints": "keypoints, [x1, y1, x2, y2...] or [x, y, rotation_andle_in_degrees, x2, y2, rotation_andle_in_degrees2](TODO: rotation_andle_in_degrees support in the feature)", + "color": "keypoints color @see image::Color", + "size": "size of keypoints(radius)", + "thickness": "keypoints thickness(line width), by default(value is -1 means fill circle)", + "line_thickness": "line thickness, default 0 means not draw lines, > 0 will draw lines connect points." + }, + "return": "this image object self", + "maixpy": "maix.image.Image.draw_keypoints", + "py_doc": "Draw keypoints on image\n\nArgs:\n - keypoints: keypoints, [x1, y1, x2, y2...] or [x, y, rotation_andle_in_degrees, x2, y2, rotation_andle_in_degrees2](TODO: rotation_andle_in_degrees support in the feature)\n - color: keypoints color @see image::Color\n - size: size of keypoints(radius)\n - thickness: keypoints thickness(line width), by default(value is -1 means fill circle)\n - line_thickness: line thickness, default 0 means not draw lines, > 0 will draw lines connect points.\n\n\nReturns: this image object self\n" + }, + "args": [ + [ + "const std::vector &", + "keypoints", + null + ], + [ + "const image::Color &", + "color", + null + ], + [ + "int", + "size", + "4" + ], + [ + "int", + "thickness", + "-1" + ], + [ + "int", + "line_thickness", + "0" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *draw_keypoints(const std::vector &keypoints, const image::Color &color, int size = 4, int thickness = -1, int line_thickness = 0)", + "py_def": "def draw_keypoints(self, keypoints: list[int], color: Color, size: int = 4, thickness: int = -1, line_thickness: int = 0) -> Image" + }, + "resize": { + "type": "func", + "name": "resize", + "doc": { + "brief": "Resize image, will create a new resized image object", + "param": { + "width": "new width, if value is -1, will use height to calculate aspect ratio", + "height": "new height, if value is -1, will use width to calculate aspect ratio", + "object_fit": "fill, contain, cover, by default is fill", + "method": "resize method, by default is bilinear" + }, + "return": "Always return a new resized image object even size not change, So in C++ you should take care of the return value to avoid memory leak.\nAnd it's better to judge whether the size has changed before calling this function to make the program more efficient.\ne.g.\nif img->width() != width || img->height() != height:\nimg = img->resize(width, height);", + "maixpy": "maix.image.Image.resize", + "py_doc": "Resize image, will create a new resized image object\n\nArgs:\n - width: new width, if value is -1, will use height to calculate aspect ratio\n - height: new height, if value is -1, will use width to calculate aspect ratio\n - object_fit: fill, contain, cover, by default is fill\n - method: resize method, by default is bilinear\n\n\nReturns: Always return a new resized image object even size not change, So in C++ you should take care of the return value to avoid memory leak.\nAnd it's better to judge whether the size has changed before calling this function to make the program more efficient.\ne.g.\nif img->width() != width || img->height() != height:\nimg = img->resize(width, height);\n" + }, + "args": [ + [ + "int", + "width", + null + ], + [ + "int", + "height", + null + ], + [ + "image::Fit", + "object_fit", + "image::Fit::FIT_FILL" + ], + [ + "image::ResizeMethod", + "method", + "image::ResizeMethod::NEAREST" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *resize(int width, int height, image::Fit object_fit = image::Fit::FIT_FILL, image::ResizeMethod method = image::ResizeMethod::NEAREST)", + "py_def": "def resize(self, width: int, height: int, object_fit: Fit = ..., method: ResizeMethod = ...) -> Image" + }, + "affine": { + "type": "func", + "name": "affine", + "doc": { + "brief": "Affine transform image, will create a new transformed image object", + "param": { + "src_points": "three source points, [x1, y1, x2, y2, x3, y3]", + "dst_points": "three destination points, [x1, y1, x2, y2, x3, y3]", + "width": "new width, if value is -1, will use height to calculate aspect ratio", + "height": "new height, if value is -1, will use width to calculate aspect ratio", + "method": "resize method, by default is bilinear" + }, + "return": "new transformed image object", + "maixpy": "maix.image.Image.affine", + "py_doc": "Affine transform image, will create a new transformed image object\n\nArgs:\n - src_points: three source points, [x1, y1, x2, y2, x3, y3]\n - dst_points: three destination points, [x1, y1, x2, y2, x3, y3]\n - width: new width, if value is -1, will use height to calculate aspect ratio\n - height: new height, if value is -1, will use width to calculate aspect ratio\n - method: resize method, by default is bilinear\n\n\nReturns: new transformed image object\n" + }, + "args": [ + [ + "std::vector", + "src_points", + null + ], + [ + "std::vector", + "dst_points", + null + ], + [ + "int", + "width", + "-1" + ], + [ + "int", + "height", + "-1" + ], + [ + "image::ResizeMethod", + "method", + "image::ResizeMethod::BILINEAR" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *affine(std::vector src_points, std::vector dst_points, int width = -1, int height = -1, image::ResizeMethod method = image::ResizeMethod::BILINEAR)", + "py_def": "def affine(self, src_points: list[int], dst_points: list[int], width: int = -1, height: int = -1, method: ResizeMethod = ...) -> Image" + }, + "copy": { + "type": "func", + "name": "copy", + "doc": { + "brief": "Copy image, will create a new copied image object", + "return": "new copied image object", + "maixpy": "maix.image.Image.copy", + "py_doc": "Copy image, will create a new copied image object\n\nReturns: new copied image object\n" + }, + "args": [], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *copy()", + "py_def": "def copy(self) -> Image" + }, + "crop": { + "type": "func", + "name": "crop", + "doc": { + "brief": "Crop image, will create a new cropped image object", + "param": { + "x": "left top corner of crop rectangle point's coordinate x", + "y": "left top corner of crop rectangle point's coordinate y", + "w": "crop rectangle width", + "h": "crop rectangle height" + }, + "return": "new cropped image object", + "maixpy": "maix.image.Image.crop", + "py_doc": "Crop image, will create a new cropped image object\n\nArgs:\n - x: left top corner of crop rectangle point's coordinate x\n - y: left top corner of crop rectangle point's coordinate y\n - w: crop rectangle width\n - h: crop rectangle height\n\n\nReturns: new cropped image object\n" + }, + "args": [ + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "int", + "w", + null + ], + [ + "int", + "h", + null + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *crop(int x, int y, int w, int h)", + "py_def": "def crop(self, x: int, y: int, w: int, h: int) -> Image" + }, + "rotate": { + "type": "func", + "name": "rotate", + "doc": { + "brief": "Rotate image, will create a new rotated image object", + "param": { + "angle": "anti-clock wise rotate angle, if angle is 90 or 270, and width or height is -1, will swap width and height, or will throw exception", + "width": "new width, if value is -1, will use height to calculate aspect ratio", + "height": "new height, if value is -1, will use width to calculate aspect ratio", + "method": "resize method, by default is bilinear" + }, + "return": "new rotated image object", + "maixpy": "maix.image.Image.rotate", + "py_doc": "Rotate image, will create a new rotated image object\n\nArgs:\n - angle: anti-clock wise rotate angle, if angle is 90 or 270, and width or height is -1, will swap width and height, or will throw exception\n - width: new width, if value is -1, will use height to calculate aspect ratio\n - height: new height, if value is -1, will use width to calculate aspect ratio\n - method: resize method, by default is bilinear\n\n\nReturns: new rotated image object\n" + }, + "args": [ + [ + "float", + "angle", + null + ], + [ + "int", + "width", + "-1" + ], + [ + "int", + "height", + "-1" + ], + [ + "image::ResizeMethod", + "method", + "image::ResizeMethod::BILINEAR" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *rotate(float angle, int width = -1, int height = -1, image::ResizeMethod method = image::ResizeMethod::BILINEAR)", + "py_def": "def rotate(self, angle: float, width: int = -1, height: int = -1, method: ResizeMethod = ...) -> Image" + }, + "mean_pool": { + "type": "func", + "name": "mean_pool", + "doc": { + "brief": "Finds the mean of x_div * y_div squares in the image and returns the modified image composed of the mean of each square.", + "param": { + "x_div": "The width of the squares.", + "y_div": "The height of the squares.", + "copy": "Select whether to return a new image or modify the original image. default is false.\nIf true, returns a new image composed of the mean of each square; If false, returns the modified image composed of the mean of each square." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.mean_pool", + "py_doc": "Finds the mean of x_div * y_div squares in the image and returns the modified image composed of the mean of each square.\n\nArgs:\n - x_div: The width of the squares.\n - y_div: The height of the squares.\n - copy: Select whether to return a new image or modify the original image. default is false.\nIf true, returns a new image composed of the mean of each square; If false, returns the modified image composed of the mean of each square.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "int", + "x_div", + null + ], + [ + "int", + "y_div", + null + ], + [ + "bool", + "copy", + "false" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *mean_pool(int x_div, int y_div, bool copy = false)", + "py_def": "def mean_pool(self, x_div: int, y_div: int, copy: bool = False) -> Image" + }, + "midpoint_pool": { + "type": "func", + "name": "midpoint_pool", + "doc": { + "brief": "Finds the midpoint of x_div * y_div squares in the image and returns the modified image composed of the mean of each square.", + "param": { + "x_div": "The width of the squares.", + "y_div": "The height of the squares.", + "bias": "The bias of the midpoint. default is 0.5.\nmidpoint value is equal to (max * bias + min * (1 - bias))", + "copy": "Select whether to return a new image or modify the original image. default is false.\nIf true, returns a new image composed of the midpoint of each square; If false, returns the modified image composed of the midpoint of each square." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.midpoint_pool", + "py_doc": "Finds the midpoint of x_div * y_div squares in the image and returns the modified image composed of the mean of each square.\n\nArgs:\n - x_div: The width of the squares.\n - y_div: The height of the squares.\n - bias: The bias of the midpoint. default is 0.5.\nmidpoint value is equal to (max * bias + min * (1 - bias))\n - copy: Select whether to return a new image or modify the original image. default is false.\nIf true, returns a new image composed of the midpoint of each square; If false, returns the modified image composed of the midpoint of each square.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "int", + "x_div", + null + ], + [ + "int", + "y_div", + null + ], + [ + "double", + "bias", + "0.5" + ], + [ + "bool", + "copy", + "false" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *midpoint_pool(int x_div, int y_div, double bias = 0.5, bool copy = false)", + "py_def": "def midpoint_pool(self, x_div: int, y_div: int, bias: float = 0.5, copy: bool = False) -> Image" + }, + "compress": { + "type": "func", + "name": "compress", + "doc": { + "brief": "JPEG compresses the image in place, the same as to_jpeg functioin, it's recommend to use to_jpeg instead.", + "param": { + "quality": "The quality of the compressed image. default is 95." + }, + "return": "Returns the compressed JPEG image", + "maixpy": "maix.image.Image.compress", + "py_doc": "JPEG compresses the image in place, the same as to_jpeg functioin, it's recommend to use to_jpeg instead.\n\nArgs:\n - quality: The quality of the compressed image. default is 95.\n\n\nReturns: Returns the compressed JPEG image\n" + }, + "args": [ + [ + "int", + "quality", + "95" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *compress(int quality = 95)", + "py_def": "def compress(self, quality: int = 95) -> Image" + }, + "clear": { + "type": "func", + "name": "clear", + "doc": { + "brief": "Sets all pixels in the image to zero", + "param": { + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.clear", + "py_doc": "Sets all pixels in the image to zero\n\nArgs:\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *clear(image::Image *mask = nullptr)", + "py_def": "def clear(self, mask: Image = None) -> Image" + }, + "mask_rectange": { + "type": "func", + "name": "mask_rectange", + "doc": { + "brief": "Zeros a rectangular part of the image. If no arguments are supplied this method zeros the center of the image.", + "param": { + "x": "The x coordinate of the top left corner of the rectangle.", + "y": "The y coordinate of the top left corner of the rectangle.", + "w": "The width of the rectangle.", + "h": "The height of the rectangle." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.mask_rectange", + "py_doc": "Zeros a rectangular part of the image. If no arguments are supplied this method zeros the center of the image.\n\nArgs:\n - x: The x coordinate of the top left corner of the rectangle.\n - y: The y coordinate of the top left corner of the rectangle.\n - w: The width of the rectangle.\n - h: The height of the rectangle.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "int", + "x", + "-1" + ], + [ + "int", + "y", + "-1" + ], + [ + "int", + "w", + "-1" + ], + [ + "int", + "h", + "-1" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *mask_rectange(int x = -1, int y = -1, int w = -1, int h = -1)", + "py_def": "def mask_rectange(self, x: int = -1, y: int = -1, w: int = -1, h: int = -1) -> Image" + }, + "mask_circle": { + "type": "func", + "name": "mask_circle", + "doc": { + "brief": "Zeros a circular part of the image. If no arguments are supplied this method zeros the center of the image.", + "param": { + "x": "The x coordinate of the center of the circle.", + "y": "The y coordinate of the center of the circle.", + "radius": "The radius of the circle." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.mask_circle", + "py_doc": "Zeros a circular part of the image. If no arguments are supplied this method zeros the center of the image.\n\nArgs:\n - x: The x coordinate of the center of the circle.\n - y: The y coordinate of the center of the circle.\n - radius: The radius of the circle.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "int", + "x", + "-1" + ], + [ + "int", + "y", + "-1" + ], + [ + "int", + "radius", + "-1" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *mask_circle(int x = -1, int y = -1, int radius = -1)", + "py_def": "def mask_circle(self, x: int = -1, y: int = -1, radius: int = -1) -> Image" + }, + "mask_ellipse": { + "type": "func", + "name": "mask_ellipse", + "doc": { + "brief": "Zeros a ellipse part of the image. If no arguments are supplied this method zeros the center of the image.", + "param": { + "x": "The x coordinate of the center of the ellipse.", + "y": "The y coordinate of the center of the ellipse.", + "radius_x": "The radius of the ellipse in the x direction.", + "radius_y": "The radius of the ellipse in the y direction.", + "rotation_angle_in_degrees": "The rotation angle of the ellipse in degrees." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.mask_ellipse", + "py_doc": "Zeros a ellipse part of the image. If no arguments are supplied this method zeros the center of the image.\n\nArgs:\n - x: The x coordinate of the center of the ellipse.\n - y: The y coordinate of the center of the ellipse.\n - radius_x: The radius of the ellipse in the x direction.\n - radius_y: The radius of the ellipse in the y direction.\n - rotation_angle_in_degrees: The rotation angle of the ellipse in degrees.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "int", + "x", + "-1" + ], + [ + "int", + "y", + "-1" + ], + [ + "int", + "radius_x", + "-1" + ], + [ + "int", + "radius_y", + "-1" + ], + [ + "float", + "rotation_angle_in_degrees", + "0" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *mask_ellipse(int x = -1, int y = -1, int radius_x = -1, int radius_y = -1, float rotation_angle_in_degrees = 0)", + "py_def": "def mask_ellipse(self, x: int = -1, y: int = -1, radius_x: int = -1, radius_y: int = -1, rotation_angle_in_degrees: float = 0) -> Image" + }, + "binary": { + "type": "func", + "name": "binary", + "doc": { + "brief": "Sets all pixels in the image to black or white depending on if the pixel is inside of a threshold in the threshold list thresholds or not.", + "note": "For GRAYSCALE format, Lmin and Lmax range is [0, 255]. For RGB888 format, Lmin and Lmax range is [0, 100].", + "param": { + "thresholds": "You can define multiple thresholds.\nFor GRAYSCALE format, you can use {{Lmin, Lmax}, ...} to define one or more thresholds.\nFor RGB888 format, you can use {{Lmin, Lmax, Amin, Amax, Bmin, Bmax}, ...} to define one or more thresholds.\nWhere the upper case L,A,B represent the L,A,B channels of the LAB image format, and min, max represent the minimum and maximum values of the corresponding channels.", + "invert": "If true, the thresholds will be inverted before the operation. default is false.", + "zero": "If zero is true, the image will be set the pixels within the threshold to 0, other pixels remain unchanged. If zero is false, the image will be set to black or white. default is false.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.", + "to_bitmap": "If true, the image will be converted to a bitmap image before thresholding. default is false. TODO: support in the feature", + "copy": "Select whether to return a new image or modify the original image. default is false." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.binary", + "py_doc": "Sets all pixels in the image to black or white depending on if the pixel is inside of a threshold in the threshold list thresholds or not.\n\nArgs:\n - thresholds: You can define multiple thresholds.\nFor GRAYSCALE format, you can use {{Lmin, Lmax}, ...} to define one or more thresholds.\nFor RGB888 format, you can use {{Lmin, Lmax, Amin, Amax, Bmin, Bmax}, ...} to define one or more thresholds.\nWhere the upper case L,A,B represent the L,A,B channels of the LAB image format, and min, max represent the minimum and maximum values of the corresponding channels.\n - invert: If true, the thresholds will be inverted before the operation. default is false.\n - zero: If zero is true, the image will be set the pixels within the threshold to 0, other pixels remain unchanged. If zero is false, the image will be set to black or white. default is false.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n - to_bitmap: If true, the image will be converted to a bitmap image before thresholding. default is false. TODO: support in the feature\n - copy: Select whether to return a new image or modify the original image. default is false.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "std::vector>", + "thresholds", + "std::vector>()" + ], + [ + "bool", + "invert", + "false" + ], + [ + "bool", + "zero", + "false" + ], + [ + "image::Image *", + "mask", + "nullptr" + ], + [ + "bool", + "to_bitmap", + "false" + ], + [ + "bool", + "copy", + "false" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *binary(std::vector> thresholds = std::vector>(), bool invert = false, bool zero = false, image::Image *mask = nullptr, bool to_bitmap = false, bool copy = false)", + "py_def": "def binary(self, thresholds: list[list[int]] = [], invert: bool = False, zero: bool = False, mask: Image = None, to_bitmap: bool = False, copy: bool = False) -> Image" + }, + "invert": { + "type": "func", + "name": "invert", + "doc": { + "brief": "Inverts the image in place.", + "return": "Returns the image after the operation is completed", + "maixpy": "maix.image.Image.invert", + "py_doc": "Inverts the image in place.\n\nReturns: Returns the image after the operation is completed\n" + }, + "args": [], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *invert()", + "py_def": "def invert(self) -> Image" + }, + "b_and": { + "type": "func", + "name": "b_and", + "doc": { + "brief": "Performs a bitwise and operation between the image and the other image.", + "param": { + "other": "The other image should be an image and should be the same size as the image being operated on. TODO: support path?", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.b_and", + "py_doc": "Performs a bitwise and operation between the image and the other image.\n\nArgs:\n - other: The other image should be an image and should be the same size as the image being operated on. TODO: support path?\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "image::Image *", + "other", + null + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *b_and(image::Image *other, image::Image *mask = nullptr)", + "py_def": "def b_and(self, other: Image, mask: Image = None) -> Image" + }, + "b_nand": { + "type": "func", + "name": "b_nand", + "doc": { + "brief": "Performs a bitwise nand operation between the image and the other image.", + "param": { + "other": "The other image should be an image and should be the same size as the image being operated on. TODO: support path?", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.b_nand", + "py_doc": "Performs a bitwise nand operation between the image and the other image.\n\nArgs:\n - other: The other image should be an image and should be the same size as the image being operated on. TODO: support path?\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "image::Image *", + "other", + null + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *b_nand(image::Image *other, image::Image *mask = nullptr)", + "py_def": "def b_nand(self, other: Image, mask: Image = None) -> Image" + }, + "b_or": { + "type": "func", + "name": "b_or", + "doc": { + "brief": "Performs a bitwise or operation between the image and the other image.", + "param": { + "other": "The other image should be an image and should be the same size as the image being operated on. TODO: support path?", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.b_or", + "py_doc": "Performs a bitwise or operation between the image and the other image.\n\nArgs:\n - other: The other image should be an image and should be the same size as the image being operated on. TODO: support path?\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "image::Image *", + "other", + null + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *b_or(image::Image *other, image::Image *mask = nullptr)", + "py_def": "def b_or(self, other: Image, mask: Image = None) -> Image" + }, + "b_nor": { + "type": "func", + "name": "b_nor", + "doc": { + "brief": "Performs a bitwise nor operation between the image and the other image.", + "param": { + "other": "The other image should be an image and should be the same size as the image being operated on. TODO: support path?", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.b_nor", + "py_doc": "Performs a bitwise nor operation between the image and the other image.\n\nArgs:\n - other: The other image should be an image and should be the same size as the image being operated on. TODO: support path?\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "image::Image *", + "other", + null + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *b_nor(image::Image *other, image::Image *mask = nullptr)", + "py_def": "def b_nor(self, other: Image, mask: Image = None) -> Image" + }, + "b_xor": { + "type": "func", + "name": "b_xor", + "doc": { + "brief": "Performs a bitwise xor operation between the image and the other image.", + "param": { + "other": "The other image should be an image and should be the same size as the image being operated on. TODO: support path?", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.b_xor", + "py_doc": "Performs a bitwise xor operation between the image and the other image.\n\nArgs:\n - other: The other image should be an image and should be the same size as the image being operated on. TODO: support path?\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "image::Image *", + "other", + null + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *b_xor(image::Image *other, image::Image *mask = nullptr)", + "py_def": "def b_xor(self, other: Image, mask: Image = None) -> Image" + }, + "b_xnor": { + "type": "func", + "name": "b_xnor", + "doc": { + "brief": "Performs a bitwise xnor operation between the image and the other image.", + "param": { + "other": "The other image should be an image and should be the same size as the image being operated on. TODO: support path?", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.b_xnor", + "py_doc": "Performs a bitwise xnor operation between the image and the other image.\n\nArgs:\n - other: The other image should be an image and should be the same size as the image being operated on. TODO: support path?\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "image::Image *", + "other", + null + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *b_xnor(image::Image *other, image::Image *mask = nullptr)", + "py_def": "def b_xnor(self, other: Image, mask: Image = None) -> Image" + }, + "awb": { + "type": "func", + "name": "awb", + "doc": { + "brief": "Performs an auto white balance operation on the image. TODO: support in the feature", + "param": { + "max": "if True uses the white-patch algorithm instead. default is false." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.awb", + "py_doc": "Performs an auto white balance operation on the image. TODO: support in the feature\n\nArgs:\n - max: if True uses the white-patch algorithm instead. default is false.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "bool", + "max", + "false" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *awb(bool max = false)", + "py_def": "def awb(self, max: bool = False) -> Image" + }, + "ccm": { + "type": "func", + "name": "ccm", + "doc": { + "brief": "Multiples the passed (3x3) or (4x3) floating-point color-correction-matrix with the image.\\nnote: Grayscale format is not support.", + "param": { + "matrix": "The color correction matrix to use. 3x3 or 4x3 matrix.\nWeights may either be positive or negative, and the sum of each column in the 3x3 matrix should generally be 1.\nexample:\n{\n1, 0, 0,\n0, 1, 0,\n0, 0, 1,\n}\nWhere the last row of the 4x3 matrix is an offset per color channel. If you add an offset you may wish to make the\nweights sum to less than 1 to account for the offset.\nexample:\n{\n1, 0, 0,\n0, 1, 0,\n0, 0, 1,\n0, 0, 0,\n}" + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.ccm", + "py_doc": "Multiples the passed (3x3) or (4x3) floating-point color-correction-matrix with the image.\nnote: Grayscale format is not support.\n\nArgs:\n - matrix: The color correction matrix to use. 3x3 or 4x3 matrix.\nWeights may either be positive or negative, and the sum of each column in the 3x3 matrix should generally be 1.\nexample:\n{\n1, 0, 0,\n0, 1, 0,\n0, 0, 1,\n}\nWhere the last row of the 4x3 matrix is an offset per color channel. If you add an offset you may wish to make the\nweights sum to less than 1 to account for the offset.\nexample:\n{\n1, 0, 0,\n0, 1, 0,\n0, 0, 1,\n0, 0, 0,\n}\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "std::vector &", + "matrix", + null + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *ccm(std::vector &matrix)", + "py_def": "def ccm(self, matrix: list[float]) -> Image" + }, + "gamma": { + "type": "func", + "name": "gamma", + "doc": { + "brief": "Quickly changes the image gamma, contrast, and brightness. Create a array whose size is usually 255,\\nand use the parameters gamma, contrast, and brightness to calculate the value of the array, and then map the\\nimage pixel value through the value of the array.\\nThe calculation method for array is: array[array_idx] = (powf((array_idx / 255.0), (1 / gamma)) * contrast + brightness) * scale,\\n`powf` is a function used to calculate floating point power.\\n`array` is the array used for mapping.\\n`array_idx` is the index of the array, the maximum value is determined according to the image format, usually 255.\\n`scale` is a constant, the value is determined by the image format, usually 255.\\nMapping method:\\nAssume that a pixel value in the image is 128, then map the pixel value to the value of array[128]\\nUsers can adjust the value of the array through the gamma, contrast, and brightness parameters.", + "param": { + "gamma": "The contrast gamma greater than 1.0 makes the image darker in a non-linear manner while less than 1.0 makes the image brighter. default is 1.0.", + "contrast": "The contrast value greater than 1.0 makes the image brighter in a linear manner while less than 1.0 makes the image darker. default is 1.0.", + "brightness": "The brightness value greater than 0.0 makes the image brighter in a constant manner while less than 0.0 makes the image darker. default is 0.0." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.gamma", + "py_doc": "Quickly changes the image gamma, contrast, and brightness. Create a array whose size is usually 255,\nand use the parameters gamma, contrast, and brightness to calculate the value of the array, and then map the\nimage pixel value through the value of the array.\nThe calculation method for array is: array[array_idx] = (powf((array_idx / 255.0), (1 / gamma)) * contrast + brightness) * scale,\n`powf` is a function used to calculate floating point power.\n`array` is the array used for mapping.\n`array_idx` is the index of the array, the maximum value is determined according to the image format, usually 255.\n`scale` is a constant, the value is determined by the image format, usually 255.\nMapping method:\nAssume that a pixel value in the image is 128, then map the pixel value to the value of array[128]\nUsers can adjust the value of the array through the gamma, contrast, and brightness parameters.\n\nArgs:\n - gamma: The contrast gamma greater than 1.0 makes the image darker in a non-linear manner while less than 1.0 makes the image brighter. default is 1.0.\n - contrast: The contrast value greater than 1.0 makes the image brighter in a linear manner while less than 1.0 makes the image darker. default is 1.0.\n - brightness: The brightness value greater than 0.0 makes the image brighter in a constant manner while less than 0.0 makes the image darker. default is 0.0.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "double", + "gamma", + "1.0" + ], + [ + "double", + "contrast", + "1.0" + ], + [ + "double", + "brightness", + "0.0" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *gamma(double gamma = 1.0, double contrast = 1.0, double brightness = 0.0)", + "py_def": "def gamma(self, gamma: float = 1.0, contrast: float = 1.0, brightness: float = 0.0) -> Image" + }, + "gamma_corr": { + "type": "func", + "name": "gamma_corr", + "doc": { + "brief": "Alias for Image.gamma.", + "param": { + "gamma": "The contrast gamma greater than 1.0 makes the image darker in a non-linear manner while less than 1.0 makes the image brighter. default is 1.0.", + "contrast": "The contrast value greater than 1.0 makes the image brighter in a linear manner while less than 1.0 makes the image darker. default is 1.0.", + "brightness": "The brightness value greater than 0.0 makes the image brighter in a constant manner while less than 0.0 makes the image darker. default is 0.0." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.gamma_corr", + "py_doc": "Alias for Image.gamma.\n\nArgs:\n - gamma: The contrast gamma greater than 1.0 makes the image darker in a non-linear manner while less than 1.0 makes the image brighter. default is 1.0.\n - contrast: The contrast value greater than 1.0 makes the image brighter in a linear manner while less than 1.0 makes the image darker. default is 1.0.\n - brightness: The brightness value greater than 0.0 makes the image brighter in a constant manner while less than 0.0 makes the image darker. default is 0.0.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "double", + "gamma", + null + ], + [ + "double", + "contrast", + "1.0" + ], + [ + "double", + "brightness", + "0.0" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *gamma_corr(double gamma, double contrast = 1.0, double brightness = 0.0)", + "py_def": "def gamma_corr(self, gamma: float, contrast: float = 1.0, brightness: float = 0.0) -> Image" + }, + "negate": { + "type": "func", + "name": "negate", + "doc": { + "brief": "Flips (numerically inverts) all pixels values in an image", + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.negate", + "py_doc": "Flips (numerically inverts) all pixels values in an image\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *negate()", + "py_def": "def negate(self) -> Image" + }, + "replace": { + "type": "func", + "name": "replace", + "doc": { + "brief": "Replaces all pixels in the image with the corresponding pixels in the other image.", + "param": { + "other": "The other image should be an image and should be the same size as the image being operated on.", + "hmirror": "If true, the image will be horizontally mirrored before the operation. default is false.", + "vflip": "If true, the image will be vertically flipped before the operation. default is false.", + "transpose": "If true, the image can be used to rotate 90 degrees or 270 degrees.\nhmirror = false, vflip = false, transpose = false, the image will not be rotated.\nhmirror = false, vflip = true, transpose = true, the image will be rotated 90 degrees.\nhmirror = true, vflip = true, transpose = false, the image will be rotated 180 degrees.\nhmirror = true, vflip = false, transpose = true, the image will be rotated 270 degrees.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.replace", + "py_doc": "Replaces all pixels in the image with the corresponding pixels in the other image.\n\nArgs:\n - other: The other image should be an image and should be the same size as the image being operated on.\n - hmirror: If true, the image will be horizontally mirrored before the operation. default is false.\n - vflip: If true, the image will be vertically flipped before the operation. default is false.\n - transpose: If true, the image can be used to rotate 90 degrees or 270 degrees.\nhmirror = false, vflip = false, transpose = false, the image will not be rotated.\nhmirror = false, vflip = true, transpose = true, the image will be rotated 90 degrees.\nhmirror = true, vflip = true, transpose = false, the image will be rotated 180 degrees.\nhmirror = true, vflip = false, transpose = true, the image will be rotated 270 degrees.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "image::Image *", + "other", + "nullptr" + ], + [ + "bool", + "hmirror", + "false" + ], + [ + "bool", + "vflip", + "false" + ], + [ + "bool", + "transpose", + "false" + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *replace(image::Image *other = nullptr, bool hmirror = false, bool vflip = false, bool transpose = false, image::Image *mask = nullptr)", + "py_def": "def replace(self, other: Image = None, hmirror: bool = False, vflip: bool = False, transpose: bool = False, mask: Image = None) -> Image" + }, + "set": { + "type": "func", + "name": "set", + "doc": { + "brief": "Alias for Image::replace.", + "param": { + "other": "The other image should be an image and should be the same size as the image being operated on.", + "hmirror": "If true, the image will be horizontally mirrored before the operation. default is false.", + "vflip": "If true, the image will be vertically flipped before the operation. default is false.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.set", + "py_doc": "Alias for Image::replace.\n\nArgs:\n - other: The other image should be an image and should be the same size as the image being operated on.\n - hmirror: If true, the image will be horizontally mirrored before the operation. default is false.\n - vflip: If true, the image will be vertically flipped before the operation. default is false.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "image::Image *", + "other", + null + ], + [ + "bool", + "hmirror", + "false" + ], + [ + "bool", + "vflip", + "false" + ], + [ + "bool", + "transpose", + "false" + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *set(image::Image *other, bool hmirror = false, bool vflip = false, bool transpose = false, image::Image *mask = nullptr)", + "py_def": "def set(self, other: Image, hmirror: bool = False, vflip: bool = False, transpose: bool = False, mask: Image = None) -> Image" + }, + "add": { + "type": "func", + "name": "add", + "doc": { + "brief": "Adds the other image to the image.", + "param": { + "other": "The other image should be an image and should be the same size as the image being operated on. TODO: support path?", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.add", + "py_doc": "Adds the other image to the image.\n\nArgs:\n - other: The other image should be an image and should be the same size as the image being operated on. TODO: support path?\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "image::Image *", + "other", + null + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *add(image::Image *other, image::Image *mask = nullptr)", + "py_def": "def add(self, other: Image, mask: Image = None) -> Image" + }, + "sub": { + "type": "func", + "name": "sub", + "doc": { + "brief": "Subtracts the other image from the image.", + "param": { + "other": "The other image should be an image and should be the same size as the image being operated on. TODO: support path?", + "reverse": "If true, the image will be reversed before the operation. default is false.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.sub", + "py_doc": "Subtracts the other image from the image.\n\nArgs:\n - other: The other image should be an image and should be the same size as the image being operated on. TODO: support path?\n - reverse: If true, the image will be reversed before the operation. default is false.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "image::Image *", + "other", + null + ], + [ + "bool", + "reverse", + "false" + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *sub(image::Image *other, bool reverse = false, image::Image *mask = nullptr)", + "py_def": "def sub(self, other: Image, reverse: bool = False, mask: Image = None) -> Image" + }, + "mul": { + "type": "func", + "name": "mul", + "doc": { + "brief": "Multiplies the image by the other image.\\nNote: This method is meant for image blending and cannot multiply the pixels in the image by a scalar like 2.", + "param": { + "other": "The other image should be an image and should be the same size as the image being operated on. TODO: support path?", + "invert": "If true, the image will be change the multiplication operation from a*b to 1/((1/a)*(1/b)).\nIn particular, this lightens the image instead of darkening it (e.g. multiply versus burn operations). default is false.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.mul", + "py_doc": "Multiplies the image by the other image.\nNote: This method is meant for image blending and cannot multiply the pixels in the image by a scalar like 2.\n\nArgs:\n - other: The other image should be an image and should be the same size as the image being operated on. TODO: support path?\n - invert: If true, the image will be change the multiplication operation from a*b to 1/((1/a)*(1/b)).\nIn particular, this lightens the image instead of darkening it (e.g. multiply versus burn operations). default is false.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "image::Image *", + "other", + null + ], + [ + "bool", + "invert", + "false" + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *mul(image::Image *other, bool invert = false, image::Image *mask = nullptr)", + "py_def": "def mul(self, other: Image, invert: bool = False, mask: Image = None) -> Image" + }, + "div": { + "type": "func", + "name": "div", + "doc": { + "brief": "Divides the image by the other image.\\nThis method is meant for image blending and cannot divide the pixels in the image by a scalar like 2.", + "param": { + "other": "The other image should be an image and should be the same size as the image being operated on. TODO: support path?", + "invert": "If true, the image will be change the division direction from a/b to b/a. default is false.", + "mod": "If true, the image will be change the division operation to the modulus operation. default is false.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.div", + "py_doc": "Divides the image by the other image.\nThis method is meant for image blending and cannot divide the pixels in the image by a scalar like 2.\n\nArgs:\n - other: The other image should be an image and should be the same size as the image being operated on. TODO: support path?\n - invert: If true, the image will be change the division direction from a/b to b/a. default is false.\n - mod: If true, the image will be change the division operation to the modulus operation. default is false.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "image::Image *", + "other", + null + ], + [ + "bool", + "invert", + "false" + ], + [ + "bool", + "mod", + "false" + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *div(image::Image *other, bool invert = false, bool mod = false, image::Image *mask = nullptr)", + "py_def": "def div(self, other: Image, invert: bool = False, mod: bool = False, mask: Image = None) -> Image" + }, + "min": { + "type": "func", + "name": "min", + "doc": { + "brief": "Caculate the minimum of each pixel in the image and the other image.", + "param": { + "other": "The other image should be an image and should be the same size as the image being operated on.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.min", + "py_doc": "Caculate the minimum of each pixel in the image and the other image.\n\nArgs:\n - other: The other image should be an image and should be the same size as the image being operated on.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "image::Image *", + "other", + null + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *min(image::Image *other, image::Image *mask = nullptr)", + "py_def": "def min(self, other: Image, mask: Image = None) -> Image" + }, + "max": { + "type": "func", + "name": "max", + "doc": { + "brief": "Caculate the maximum of each pixel in the image and the other image.", + "param": { + "other": "The other image should be an image and should be the same size as the image being operated on.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.max", + "py_doc": "Caculate the maximum of each pixel in the image and the other image.\n\nArgs:\n - other: The other image should be an image and should be the same size as the image being operated on.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "image::Image *", + "other", + null + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *max(image::Image *other, image::Image *mask = nullptr)", + "py_def": "def max(self, other: Image, mask: Image = None) -> Image" + }, + "difference": { + "type": "func", + "name": "difference", + "doc": { + "brief": "Caculate the absolute value of the difference between each pixel in the image and the other image.", + "param": { + "other": "The other image should be an image and should be the same size as the image being operated on.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.difference", + "py_doc": "Caculate the absolute value of the difference between each pixel in the image and the other image.\n\nArgs:\n - other: The other image should be an image and should be the same size as the image being operated on.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "image::Image *", + "other", + null + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *difference(image::Image *other, image::Image *mask = nullptr)", + "py_def": "def difference(self, other: Image, mask: Image = None) -> Image" + }, + "blend": { + "type": "func", + "name": "blend", + "doc": { + "brief": "Blends the image with the other image.\\nres = alpha * this_img / 256 + (256 - alpha) * other_img / 256", + "param": { + "other": "The other image should be an image and should be the same size as the image being operated on.", + "alpha": "The alpha value of the blend, the value range is [0, 256],default is 128.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.blend", + "py_doc": "Blends the image with the other image.\nres = alpha * this_img / 256 + (256 - alpha) * other_img / 256\n\nArgs:\n - other: The other image should be an image and should be the same size as the image being operated on.\n - alpha: The alpha value of the blend, the value range is [0, 256],default is 128.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "image::Image *", + "other", + null + ], + [ + "int", + "alpha", + "128" + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *blend(image::Image *other, int alpha = 128, image::Image *mask = nullptr)", + "py_def": "def blend(self, other: Image, alpha: int = 128, mask: Image = None) -> Image" + }, + "histeq": { + "type": "func", + "name": "histeq", + "doc": { + "brief": "Runs the histogram equalization algorithm on the image.", + "param": { + "adaptive": "If true, an adaptive histogram equalization method will be run on the image instead which as generally better results than non-adaptive histogram qualization but a longer run time. default is false.", + "clip_limit": "Provides a way to limit the contrast of the adaptive histogram qualization. Use a small value for this, like 10, to produce good histogram equalized contrast limited images. default is -1.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.histeq", + "py_doc": "Runs the histogram equalization algorithm on the image.\n\nArgs:\n - adaptive: If true, an adaptive histogram equalization method will be run on the image instead which as generally better results than non-adaptive histogram qualization but a longer run time. default is false.\n - clip_limit: Provides a way to limit the contrast of the adaptive histogram qualization. Use a small value for this, like 10, to produce good histogram equalized contrast limited images. default is -1.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "bool", + "adaptive", + "false" + ], + [ + "int", + "clip_limit", + "-1" + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *histeq(bool adaptive = false, int clip_limit = -1, image::Image *mask = nullptr)", + "py_def": "def histeq(self, adaptive: bool = False, clip_limit: int = -1, mask: Image = None) -> Image" + }, + "mean": { + "type": "func", + "name": "mean", + "doc": { + "brief": "Standard mean blurring filter using a box filter.\\nThe parameters offset and invert are valid when threshold is True.", + "param": { + "size": "Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).", + "threshold": "If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel\u2019s brightness in relation to the brightness of the kernel of pixels around them.\ndefault is false.", + "offset": "The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.", + "invert": "If true, the image will be inverted before the operation. default is false.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.mean", + "py_doc": "Standard mean blurring filter using a box filter.\nThe parameters offset and invert are valid when threshold is True.\n\nArgs:\n - size: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).\n - threshold: If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel\u2019s brightness in relation to the brightness of the kernel of pixels around them.\ndefault is false.\n - offset: The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.\n - invert: If true, the image will be inverted before the operation. default is false.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "int", + "size", + null + ], + [ + "bool", + "threshold", + "false" + ], + [ + "int", + "offset", + "0" + ], + [ + "bool", + "invert", + "false" + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *mean(int size, bool threshold = false, int offset = 0, bool invert = false, image::Image *mask = nullptr)", + "py_def": "def mean(self, size: int, threshold: bool = False, offset: int = 0, invert: bool = False, mask: Image = None) -> Image" + }, + "median": { + "type": "func", + "name": "median", + "doc": { + "brief": "Runs the median filter on the image. The median filter is the best filter for smoothing surfaces while preserving edges but it is very slow.", + "param": { + "size": "Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).", + "percentile": "This parameter controls the percentile of the value used in the kernel. You can set this to 0 for a min filter, 0.25 for a lower quartile filter, 0.75 for an upper quartile filter, and 1.0 for a max filter. default is 0.5.", + "threshold": "If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel\u2019s brightness in relation to the brightness of the kernel of pixels around them.\ndefault is false.", + "offset": "The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.", + "invert": "If true, the image will be inverted before the operation. default is false.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.median", + "py_doc": "Runs the median filter on the image. The median filter is the best filter for smoothing surfaces while preserving edges but it is very slow.\n\nArgs:\n - size: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).\n - percentile: This parameter controls the percentile of the value used in the kernel. You can set this to 0 for a min filter, 0.25 for a lower quartile filter, 0.75 for an upper quartile filter, and 1.0 for a max filter. default is 0.5.\n - threshold: If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel\u2019s brightness in relation to the brightness of the kernel of pixels around them.\ndefault is false.\n - offset: The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.\n - invert: If true, the image will be inverted before the operation. default is false.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "int", + "size", + null + ], + [ + "double", + "percentile", + "0.5" + ], + [ + "bool", + "threshold", + "false" + ], + [ + "int", + "offset", + "0" + ], + [ + "bool", + "invert", + "false" + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *median(int size, double percentile = 0.5, bool threshold = false, int offset = 0, bool invert = false, image::Image *mask = nullptr)", + "py_def": "def median(self, size: int, percentile: float = 0.5, threshold: bool = False, offset: int = 0, invert: bool = False, mask: Image = None) -> Image" + }, + "mode": { + "type": "func", + "name": "mode", + "doc": { + "brief": "Runs the mode filter on the image by replacing each pixel with the mode of their neighbors.", + "param": { + "size": "Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).", + "threshold": "If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel\u2019s brightness in relation to the brightness of the kernel of pixels around them.\ndefault is false.", + "offset": "The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.", + "invert": "If true, the image will be inverted before the operation. default is false.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.mode", + "py_doc": "Runs the mode filter on the image by replacing each pixel with the mode of their neighbors.\n\nArgs:\n - size: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).\n - threshold: If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel\u2019s brightness in relation to the brightness of the kernel of pixels around them.\ndefault is false.\n - offset: The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.\n - invert: If true, the image will be inverted before the operation. default is false.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "int", + "size", + null + ], + [ + "bool", + "threshold", + "false" + ], + [ + "int", + "offset", + "0" + ], + [ + "bool", + "invert", + "false" + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *mode(int size, bool threshold = false, int offset = 0, bool invert = false, image::Image *mask = nullptr)", + "py_def": "def mode(self, size: int, threshold: bool = False, offset: int = 0, invert: bool = False, mask: Image = None) -> Image" + }, + "midpoint": { + "type": "func", + "name": "midpoint", + "doc": { + "brief": "Runs the midpoint filter on the image.This filter finds the midpoint (max * bias + min * (1 - bias)) of each pixel neighborhood in the image.", + "param": { + "size": "Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).", + "bias": "The bias of the midpoint. default is 0.5.", + "threshold": "If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel\u2019s brightness in relation to the brightness of the kernel of pixels around them.\ndefault is false.", + "offset": "The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.", + "invert": "If true, the image will be inverted before the operation. default is false.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.midpoint", + "py_doc": "Runs the midpoint filter on the image.This filter finds the midpoint (max * bias + min * (1 - bias)) of each pixel neighborhood in the image.\n\nArgs:\n - size: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).\n - bias: The bias of the midpoint. default is 0.5.\n - threshold: If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel\u2019s brightness in relation to the brightness of the kernel of pixels around them.\ndefault is false.\n - offset: The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.\n - invert: If true, the image will be inverted before the operation. default is false.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "int", + "size", + null + ], + [ + "double", + "bias", + "0.5" + ], + [ + "bool", + "threshold", + "false" + ], + [ + "int", + "offset", + "0" + ], + [ + "bool", + "invert", + "false" + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *midpoint(int size, double bias = 0.5, bool threshold = false, int offset = 0, bool invert = false, image::Image *mask = nullptr)", + "py_def": "def midpoint(self, size: int, bias: float = 0.5, threshold: bool = False, offset: int = 0, invert: bool = False, mask: Image = None) -> Image" + }, + "morph": { + "type": "func", + "name": "morph", + "doc": { + "brief": "Convolves the image by a filter kernel. This allows you to do general purpose convolutions on an image.", + "param": { + "size": "Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).", + "kernel": "The kernel used for convolution. The kernel should be a list of lists of numbers. The kernel should be the same size as the actual kernel size.", + "mul": "This parameter is used to multiply the convolved pixel results. default is auto.", + "add": "This parameter is the value to be added to each convolution pixel result. default is 0.0.", + "threshold": "If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel\u2019s brightness in relation to the brightness of the kernel of pixels around them.\ndefault is false.", + "offset": "The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.", + "invert": "If true, the image will be inverted before the operation. default is false.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.morph", + "py_doc": "Convolves the image by a filter kernel. This allows you to do general purpose convolutions on an image.\n\nArgs:\n - size: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).\n - kernel: The kernel used for convolution. The kernel should be a list of lists of numbers. The kernel should be the same size as the actual kernel size.\n - mul: This parameter is used to multiply the convolved pixel results. default is auto.\n - add: This parameter is the value to be added to each convolution pixel result. default is 0.0.\n - threshold: If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel\u2019s brightness in relation to the brightness of the kernel of pixels around them.\ndefault is false.\n - offset: The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.\n - invert: If true, the image will be inverted before the operation. default is false.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "int", + "size", + null + ], + [ + "std::vector", + "kernel", + null + ], + [ + "float", + "mul", + "-1" + ], + [ + "float", + "add", + "0.0" + ], + [ + "bool", + "threshold", + "false" + ], + [ + "int", + "offset", + "0" + ], + [ + "bool", + "invert", + "false" + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *morph(int size, std::vector kernel, float mul = -1, float add = 0.0, bool threshold = false, int offset = 0, bool invert = false, image::Image *mask = nullptr)", + "py_def": "def morph(self, size: int, kernel: list[int], mul: float = -1, add: float = 0.0, threshold: bool = False, offset: int = 0, invert: bool = False, mask: Image = None) -> Image" + }, + "gaussian": { + "type": "func", + "name": "gaussian", + "doc": { + "brief": "Convolves the image by a smoothing guassian kernel.", + "param": { + "size": "Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).", + "unsharp": "If true, this method will perform an unsharp mask operation instead of gaussian filtering operation, this improves the clarity of image edges. default is false.", + "mul": "This parameter is used to multiply the convolved pixel results. default is auto.", + "add": "This parameter is the value to be added to each convolution pixel result. default is 0.0.", + "threshold": "If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel\u2019s brightness in relation to the brightness of the kernel of pixels around them.\ndefault is false.", + "offset": "The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.", + "invert": "If true, the image will be inverted before the operation. default is false.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.gaussian", + "py_doc": "Convolves the image by a smoothing guassian kernel.\n\nArgs:\n - size: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).\n - unsharp: If true, this method will perform an unsharp mask operation instead of gaussian filtering operation, this improves the clarity of image edges. default is false.\n - mul: This parameter is used to multiply the convolved pixel results. default is auto.\n - add: This parameter is the value to be added to each convolution pixel result. default is 0.0.\n - threshold: If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel\u2019s brightness in relation to the brightness of the kernel of pixels around them.\ndefault is false.\n - offset: The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.\n - invert: If true, the image will be inverted before the operation. default is false.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "int", + "size", + null + ], + [ + "bool", + "unsharp", + "false" + ], + [ + "float", + "mul", + "-1" + ], + [ + "float", + "add", + "0.0" + ], + [ + "bool", + "threshold", + "false" + ], + [ + "int", + "offset", + "0" + ], + [ + "bool", + "invert", + "false" + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *gaussian(int size, bool unsharp = false, float mul = -1, float add = 0.0, bool threshold = false, int offset = 0, bool invert = false, image::Image *mask = nullptr)", + "py_def": "def gaussian(self, size: int, unsharp: bool = False, mul: float = -1, add: float = 0.0, threshold: bool = False, offset: int = 0, invert: bool = False, mask: Image = None) -> Image" + }, + "laplacian": { + "type": "func", + "name": "laplacian", + "doc": { + "brief": "Convolves the image by a edge detecting laplacian kernel.", + "param": { + "size": "Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).", + "sharpen": "If True, this method will sharpen the image instead of an unthresholded edge detection image. Then increase the kernel size to improve image clarity. default is false.", + "mul": "This parameter is used to multiply the convolved pixel results. default is auto.", + "add": "This parameter is the value to be added to each convolution pixel result. default is 0.0.", + "threshold": "If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel\u2019s brightness in relation to the brightness of the kernel of pixels around them.\ndefault is false.", + "offset": "The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.", + "invert": "If true, the image will be inverted before the operation. default is false.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.laplacian", + "py_doc": "Convolves the image by a edge detecting laplacian kernel.\n\nArgs:\n - size: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).\n - sharpen: If True, this method will sharpen the image instead of an unthresholded edge detection image. Then increase the kernel size to improve image clarity. default is false.\n - mul: This parameter is used to multiply the convolved pixel results. default is auto.\n - add: This parameter is the value to be added to each convolution pixel result. default is 0.0.\n - threshold: If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel\u2019s brightness in relation to the brightness of the kernel of pixels around them.\ndefault is false.\n - offset: The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.\n - invert: If true, the image will be inverted before the operation. default is false.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "int", + "size", + null + ], + [ + "bool", + "sharpen", + "false" + ], + [ + "float", + "mul", + "-1" + ], + [ + "float", + "add", + "0.0" + ], + [ + "bool", + "threshold", + "false" + ], + [ + "int", + "offset", + "0" + ], + [ + "bool", + "invert", + "false" + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *laplacian(int size, bool sharpen = false, float mul = -1, float add = 0.0, bool threshold = false, int offset = 0, bool invert = false, image::Image *mask = nullptr)", + "py_def": "def laplacian(self, size: int, sharpen: bool = False, mul: float = -1, add: float = 0.0, threshold: bool = False, offset: int = 0, invert: bool = False, mask: Image = None) -> Image" + }, + "bilateral": { + "type": "func", + "name": "bilateral", + "doc": { + "brief": "Convolves the image by a bilateral filter.", + "param": { + "size": "Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).", + "color_sigma": "Controls how closely colors are matched using the bilateral filter. default is 0.1.", + "space_sigma": "Controls how closely pixels space-wise are blurred with each other. default is 1.", + "threshold": "If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel\u2019s brightness in relation to the brightness of the kernel of pixels around them.\ndefault is false.", + "offset": "The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.", + "invert": "If true, the image will be inverted before the operation. default is false.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.bilateral", + "py_doc": "Convolves the image by a bilateral filter.\n\nArgs:\n - size: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).\n - color_sigma: Controls how closely colors are matched using the bilateral filter. default is 0.1.\n - space_sigma: Controls how closely pixels space-wise are blurred with each other. default is 1.\n - threshold: If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel\u2019s brightness in relation to the brightness of the kernel of pixels around them.\ndefault is false.\n - offset: The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.\n - invert: If true, the image will be inverted before the operation. default is false.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "int", + "size", + null + ], + [ + "double", + "color_sigma", + "0.1" + ], + [ + "double", + "space_sigma", + "1" + ], + [ + "bool", + "threshold", + "false" + ], + [ + "int", + "offset", + "0" + ], + [ + "bool", + "invert", + "false" + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *bilateral(int size, double color_sigma = 0.1, double space_sigma = 1, bool threshold = false, int offset = 0, bool invert = false, image::Image *mask = nullptr)", + "py_def": "def bilateral(self, size: int, color_sigma: float = 0.1, space_sigma: float = 1, threshold: bool = False, offset: int = 0, invert: bool = False, mask: Image = None) -> Image" + }, + "linpolar": { + "type": "func", + "name": "linpolar", + "doc": { + "brief": "Re-project\u2019s and image from cartessian coordinates to linear polar coordinates.", + "param": { + "reverse": "If true, the image will be reverse polar transformed. default is false." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.linpolar", + "py_doc": "Re-project\u2019s and image from cartessian coordinates to linear polar coordinates.\n\nArgs:\n - reverse: If true, the image will be reverse polar transformed. default is false.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "bool", + "reverse", + "false" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *linpolar(bool reverse = false)", + "py_def": "def linpolar(self, reverse: bool = False) -> Image" + }, + "logpolar": { + "type": "func", + "name": "logpolar", + "doc": { + "brief": "Re-project\u2019s and image from cartessian coordinates to log polar coordinates.", + "param": { + "reverse": "If true, the image will be reverse polar transformed. default is false." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.logpolar", + "py_doc": "Re-project\u2019s and image from cartessian coordinates to log polar coordinates.\n\nArgs:\n - reverse: If true, the image will be reverse polar transformed. default is false.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "bool", + "reverse", + "false" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *logpolar(bool reverse = false)", + "py_def": "def logpolar(self, reverse: bool = False) -> Image" + }, + "lens_corr": { + "type": "func", + "name": "lens_corr", + "doc": { + "brief": "Performs a lens correction operation on the image. TODO: support in the feature", + "param": { + "strength": "The strength of the lens correction. default is 1.8.", + "zoom": "The zoom of the lens correction. default is 1.0.", + "x_corr": "The x correction of the lens correction. default is 0.0.", + "y_corr": "The y correction of the lens correction. default is 0.0." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.lens_corr", + "py_doc": "Performs a lens correction operation on the image. TODO: support in the feature\n\nArgs:\n - strength: The strength of the lens correction. default is 1.8.\n - zoom: The zoom of the lens correction. default is 1.0.\n - x_corr: The x correction of the lens correction. default is 0.0.\n - y_corr: The y correction of the lens correction. default is 0.0.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "double", + "strength", + "1.8" + ], + [ + "double", + "zoom", + "1.0" + ], + [ + "double", + "x_corr", + "0.0" + ], + [ + "double", + "y_corr", + "0.0" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *lens_corr(double strength = 1.8, double zoom = 1.0, double x_corr = 0.0, double y_corr = 0.0)", + "py_def": "def lens_corr(self, strength: float = 1.8, zoom: float = 1.0, x_corr: float = 0.0, y_corr: float = 0.0) -> Image" + }, + "rotation_corr": { + "type": "func", + "name": "rotation_corr", + "doc": { + "brief": "Performs a rotation correction operation on the image. TODO: support in the feature", + "param": { + "x_rotation": "The x rotation of the rotation correction. default is 0.0.", + "y_rotation": "The y rotation of the rotation correction. default is 0.0.", + "z_rotation": "The z rotation of the rotation correction. default is 0.0.", + "x_translation": "The x translation of the rotation correction. default is 0.0.", + "y_translation": "The y translation of the rotation correction. default is 0.0.", + "zoom": "The zoom of the rotation correction. default is 1.0.", + "fov": "The fov of the rotation correction. default is 60.0.", + "corners": "The corners of the rotation correction. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.rotation_corr", + "py_doc": "Performs a rotation correction operation on the image. TODO: support in the feature\n\nArgs:\n - x_rotation: The x rotation of the rotation correction. default is 0.0.\n - y_rotation: The y rotation of the rotation correction. default is 0.0.\n - z_rotation: The z rotation of the rotation correction. default is 0.0.\n - x_translation: The x translation of the rotation correction. default is 0.0.\n - y_translation: The y translation of the rotation correction. default is 0.0.\n - zoom: The zoom of the rotation correction. default is 1.0.\n - fov: The fov of the rotation correction. default is 60.0.\n - corners: The corners of the rotation correction. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "double", + "x_rotation", + "0.0" + ], + [ + "double", + "y_rotation", + "0.0" + ], + [ + "double", + "z_rotation", + "0.0" + ], + [ + "double", + "x_translation", + "0.0" + ], + [ + "double", + "y_translation", + "0.0" + ], + [ + "double", + "zoom", + "1.0" + ], + [ + "double", + "fov", + "60.0" + ], + [ + "std::vector", + "corners", + "std::vector()" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *rotation_corr(double x_rotation = 0.0, double y_rotation = 0.0, double z_rotation = 0.0, double x_translation = 0.0, double y_translation = 0.0, double zoom = 1.0, double fov = 60.0, std::vector corners = std::vector())", + "py_def": "def rotation_corr(self, x_rotation: float = 0.0, y_rotation: float = 0.0, z_rotation: float = 0.0, x_translation: float = 0.0, y_translation: float = 0.0, zoom: float = 1.0, fov: float = 60.0, corners: list[float] = []) -> Image" + }, + "get_histogram": { + "type": "func", + "name": "get_histogram", + "doc": { + "brief": "Computes the normalized histogram on all color channels and returns a image::Histogram object.", + "note": "For GRAYSCALE format, Lmin and Lmax range is [0, 255]. For RGB888 format, Lmin and Lmax range is [0, 100].", + "param": { + "thresholds": "You can define multiple thresholds.\nFor GRAYSCALE format, you can use {{Lmin, Lmax}, ...} to define one or more thresholds.\nFor RGB888 format, you can use {{Lmin, Lmax, Amin, Amax, Bmin, Bmax}, ...} to define one or more thresholds.\nWhere the upper case L,A,B represent the L,A,B channels of the LAB image format, and min, max represent the minimum and maximum values of the corresponding channels.", + "invert": "If true, the thresholds will be inverted before the operation. default is false.", + "roi": "The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.", + "bins": "The number of bins to use for the histogram.\nIn GRAYSCALE format, setting range is [2, 256], default is 100.\nIn RGB888 format, setting range is [2, 100], default is 100.", + "l_bins": "The number of bins to use for the l channel of the histogram. Only valid in RGB888 format.\nIf an invalid value is set, bins will be used instead. The setting range is [2, 100], default is 100.", + "a_bins": "The number of bins to use for the a channel of the histogram.\nOnly valid in RGB888 format.The setting range is [2, 256], default is 256.", + "b_bins": "The number of bins to use for the b channel of the histogram.\nOnly valid in RGB888 format. The setting range is [2, 256], default is 256.", + "difference": "difference may be set to an image object to cause this method to operate on the difference image between the current image and the difference image object.\ndefault is None." + }, + "return": "Returns image::Histogram object", + "maixpy": "maix.image.Image.get_histogram", + "py_doc": "Computes the normalized histogram on all color channels and returns a image::Histogram object.\n\nArgs:\n - thresholds: You can define multiple thresholds.\nFor GRAYSCALE format, you can use {{Lmin, Lmax}, ...} to define one or more thresholds.\nFor RGB888 format, you can use {{Lmin, Lmax, Amin, Amax, Bmin, Bmax}, ...} to define one or more thresholds.\nWhere the upper case L,A,B represent the L,A,B channels of the LAB image format, and min, max represent the minimum and maximum values of the corresponding channels.\n - invert: If true, the thresholds will be inverted before the operation. default is false.\n - roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.\n - bins: The number of bins to use for the histogram.\nIn GRAYSCALE format, setting range is [2, 256], default is 100.\nIn RGB888 format, setting range is [2, 100], default is 100.\n - l_bins: The number of bins to use for the l channel of the histogram. Only valid in RGB888 format.\nIf an invalid value is set, bins will be used instead. The setting range is [2, 100], default is 100.\n - a_bins: The number of bins to use for the a channel of the histogram.\nOnly valid in RGB888 format.The setting range is [2, 256], default is 256.\n - b_bins: The number of bins to use for the b channel of the histogram.\nOnly valid in RGB888 format. The setting range is [2, 256], default is 256.\n - difference: difference may be set to an image object to cause this method to operate on the difference image between the current image and the difference image object.\ndefault is None.\n\n\nReturns: Returns image::Histogram object\n" + }, + "args": [ + [ + "std::vector>", + "thresholds", + "std::vector>()" + ], + [ + "bool", + "invert", + "false" + ], + [ + "std::vector", + "roi", + "std::vector()" + ], + [ + "int", + "bins", + "-1" + ], + [ + "int", + "l_bins", + "100" + ], + [ + "int", + "a_bins", + "256" + ], + [ + "int", + "b_bins", + "256" + ], + [ + "image::Image *", + "difference", + "nullptr" + ] + ], + "ret_type": "image::Histogram", + "static": false, + "def": "image::Histogram get_histogram(std::vector> thresholds = std::vector>(), bool invert = false, std::vector roi = std::vector(), int bins = -1, int l_bins = 100, int a_bins = 256, int b_bins = 256, image::Image *difference = nullptr)", + "py_def": "def get_histogram(self, thresholds: list[list[int]] = [], invert: bool = False, roi: list[int] = [], bins: int = -1, l_bins: int = 100, a_bins: int = 256, b_bins: int = 256, difference: Image = None) -> Histogram" + }, + "get_statistics": { + "type": "func", + "name": "get_statistics", + "doc": { + "brief": "Gets the statistics of the image. TODO: support in the feature", + "note": "For GRAYSCALE format, Lmin and Lmax range is [0, 255]. For RGB888 format, Lmin and Lmax range is [0, 100].", + "param": { + "thresholds": "You can define multiple thresholds.\nFor GRAYSCALE format, you can use {{Lmin, Lmax}, ...} to define one or more thresholds.\nFor RGB888 format, you can use {{Lmin, Lmax, Amin, Amax, Bmin, Bmax}, ...} to define one or more thresholds.\nWhere the upper case L,A,B represent the L,A,B channels of the LAB image format, and min, max represent the minimum and maximum values of the corresponding channels.", + "invert": "If true, the image will be inverted before the operation. default is false.", + "roi": "The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.", + "bins": "The number of bins to use for the statistics. default is -1.", + "l_bins": "The number of bins to use for the l channel of the statistics. default is -1.", + "a_bins": "The number of bins to use for the a channel of the statistics. default is -1.", + "b_bins": "The number of bins to use for the b channel of the statistics. default is -1.", + "difference": "The difference image to use for the statistics. default is None." + }, + "return": "Returns the statistics of the image", + "maixpy": "maix.image.Image.get_statistics", + "py_doc": "Gets the statistics of the image. TODO: support in the feature\n\nArgs:\n - thresholds: You can define multiple thresholds.\nFor GRAYSCALE format, you can use {{Lmin, Lmax}, ...} to define one or more thresholds.\nFor RGB888 format, you can use {{Lmin, Lmax, Amin, Amax, Bmin, Bmax}, ...} to define one or more thresholds.\nWhere the upper case L,A,B represent the L,A,B channels of the LAB image format, and min, max represent the minimum and maximum values of the corresponding channels.\n - invert: If true, the image will be inverted before the operation. default is false.\n - roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.\n - bins: The number of bins to use for the statistics. default is -1.\n - l_bins: The number of bins to use for the l channel of the statistics. default is -1.\n - a_bins: The number of bins to use for the a channel of the statistics. default is -1.\n - b_bins: The number of bins to use for the b channel of the statistics. default is -1.\n - difference: The difference image to use for the statistics. default is None.\n\n\nReturns: Returns the statistics of the image\n" + }, + "args": [ + [ + "std::vector>", + "thresholds", + "std::vector>()" + ], + [ + "bool", + "invert", + "false" + ], + [ + "std::vector", + "roi", + "std::vector()" + ], + [ + "int", + "bins", + "-1" + ], + [ + "int", + "l_bins", + "-1" + ], + [ + "int", + "a_bins", + "-1" + ], + [ + "int", + "b_bins", + "-1" + ], + [ + "image::Image *", + "difference", + "nullptr" + ] + ], + "ret_type": "image::Statistics", + "static": false, + "def": "image::Statistics get_statistics(std::vector> thresholds = std::vector>(), bool invert = false, std::vector roi = std::vector(), int bins = -1, int l_bins = -1, int a_bins = -1, int b_bins = -1, image::Image *difference = nullptr)", + "py_def": "def get_statistics(self, thresholds: list[list[int]] = [], invert: bool = False, roi: list[int] = [], bins: int = -1, l_bins: int = -1, a_bins: int = -1, b_bins: int = -1, difference: Image = None) -> Statistics" + }, + "get_regression": { + "type": "func", + "name": "get_regression", + "doc": { + "brief": "Gets the regression of the image.", + "note": "For GRAYSCALE format, Lmin and Lmax range is [0, 255]. For RGB888 format, Lmin and Lmax range is [0, 100].", + "param": { + "thresholds": "You can define multiple thresholds.\nFor GRAYSCALE format, you can use {{Lmin, Lmax}, ...} to define one or more thresholds.\nFor RGB888 format, you can use {{Lmin, Lmax, Amin, Amax, Bmin, Bmax}, ...} to define one or more thresholds.\nWhere the upper case L,A,B represent the L,A,B channels of the LAB image format, and min, max represent the minimum and maximum values of the corresponding channels.", + "invert": "If true, the image will be inverted before the operation. default is false.", + "roi": "The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.", + "x_stride": "The x stride to use for the regression. default is 2.", + "y_stride": "The y stride to use for the regression. default is 1.", + "area_threshold": "The area threshold to use for the regression. default is 10.", + "pixels_threshold": "The pixels threshold to use for the regression. default is 10.", + "robust": "If true, the regression will be robust. default is false." + }, + "return": "Returns the regression of the image", + "maixpy": "maix.image.Image.get_regression", + "py_doc": "Gets the regression of the image.\n\nArgs:\n - thresholds: You can define multiple thresholds.\nFor GRAYSCALE format, you can use {{Lmin, Lmax}, ...} to define one or more thresholds.\nFor RGB888 format, you can use {{Lmin, Lmax, Amin, Amax, Bmin, Bmax}, ...} to define one or more thresholds.\nWhere the upper case L,A,B represent the L,A,B channels of the LAB image format, and min, max represent the minimum and maximum values of the corresponding channels.\n - invert: If true, the image will be inverted before the operation. default is false.\n - roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.\n - x_stride: The x stride to use for the regression. default is 2.\n - y_stride: The y stride to use for the regression. default is 1.\n - area_threshold: The area threshold to use for the regression. default is 10.\n - pixels_threshold: The pixels threshold to use for the regression. default is 10.\n - robust: If true, the regression will be robust. default is false.\n\n\nReturns: Returns the regression of the image\n" + }, + "args": [ + [ + "std::vector>", + "thresholds", + "std::vector>()" + ], + [ + "bool", + "invert", + "false" + ], + [ + "std::vector", + "roi", + "std::vector()" + ], + [ + "int", + "x_stride", + "2" + ], + [ + "int", + "y_stride", + "1" + ], + [ + "int", + "area_threshold", + "10" + ], + [ + "int", + "pixels_threshold", + "10" + ], + [ + "bool", + "robust", + "false" + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector get_regression(std::vector> thresholds = std::vector>(), bool invert = false, std::vector roi = std::vector(), int x_stride = 2, int y_stride = 1, int area_threshold = 10, int pixels_threshold = 10, bool robust = false)", + "py_def": "def get_regression(self, thresholds: list[list[int]] = [], invert: bool = False, roi: list[int] = [], x_stride: int = 2, y_stride: int = 1, area_threshold: int = 10, pixels_threshold: int = 10, robust: bool = False) -> list[Line]" + }, + "save": { + "type": "func", + "name": "save", + "doc": { + "brief": "Save image to file", + "param": { + "path": "file path", + "quality": "image quality, by default(value is 95), support jpeg and png format" + }, + "return": "error code, err::ERR_NONE is ok, other is error", + "maixpy": "maix.image.Image.save", + "py_doc": "Save image to file\n\nArgs:\n - path: file path\n - quality: image quality, by default(value is 95), support jpeg and png format\n\n\nReturns: error code, err::ERR_NONE is ok, other is error\n" + }, + "args": [ + [ + "const char *", + "path", + null + ], + [ + "int", + "quality", + "95" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err save(const char *path, int quality = 95)", + "py_def": "def save(self, path: str, quality: int = 95) -> maix.err.Err" + }, + "flood_fill": { + "type": "func", + "name": "flood_fill", + "doc": { + "brief": "Flood fills a region of the image starting from location x, y.", + "param": { + "x": "The x coordinate of the seed point.", + "y": "The y coordinate of the seed point.", + "seed_threshold": "The seed_threshold value controls how different any pixel in the fill area may be from the original starting pixel. default is 0.05.", + "floating_threshold": "The floating_threshold value controls how different any pixel in the fill area may be from any neighbor pixels. default is 0.05.", + "color": "The color to fill the region with. default is white.", + "invert": "If true, the image will be inverted before the operation. default is false.", + "clear_background": "If true, the background will be cleared before the operation. default is false.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None. FIXME: the mask image works abnormally" + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.flood_fill", + "py_doc": "Flood fills a region of the image starting from location x, y.\n\nArgs:\n - x: The x coordinate of the seed point.\n - y: The y coordinate of the seed point.\n - seed_threshold: The seed_threshold value controls how different any pixel in the fill area may be from the original starting pixel. default is 0.05.\n - floating_threshold: The floating_threshold value controls how different any pixel in the fill area may be from any neighbor pixels. default is 0.05.\n - color: The color to fill the region with. default is white.\n - invert: If true, the image will be inverted before the operation. default is false.\n - clear_background: If true, the background will be cleared before the operation. default is false.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None. FIXME: the mask image works abnormally\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "float", + "seed_threshold", + "0.05" + ], + [ + "float", + "floating_threshold", + "0.05" + ], + [ + "image::Color", + "color", + "image::COLOR_WHITE" + ], + [ + "bool", + "invert", + "false" + ], + [ + "bool", + "clear_background", + "false" + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *flood_fill(int x, int y, float seed_threshold = 0.05, float floating_threshold = 0.05, image::Color color = image::COLOR_WHITE, bool invert = false, bool clear_background = false, image::Image *mask = nullptr)", + "py_def": "def flood_fill(self, x: int, y: int, seed_threshold: float = 0.05, floating_threshold: float = 0.05, color: Color = ..., invert: bool = False, clear_background: bool = False, mask: Image = None) -> Image" + }, + "erode": { + "type": "func", + "name": "erode", + "doc": { + "brief": "Erodes the image in place.", + "param": { + "size": "Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).", + "threshold": "The number of pixels in the kernel that are not 0. If it is less than or equal to the threshold, set the center pixel to black. default is (kernel_size - 1).", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.erode", + "py_doc": "Erodes the image in place.\n\nArgs:\n - size: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).\n - threshold: The number of pixels in the kernel that are not 0. If it is less than or equal to the threshold, set the center pixel to black. default is (kernel_size - 1).\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "int", + "size", + null + ], + [ + "int", + "threshold", + "-1" + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *erode(int size, int threshold = -1, image::Image *mask = nullptr)", + "py_def": "def erode(self, size: int, threshold: int = -1, mask: Image = None) -> Image" + }, + "dilate": { + "type": "func", + "name": "dilate", + "doc": { + "brief": "Dilates the image in place.", + "param": { + "size": "Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).", + "threshold": "The number of pixels in the kernel that are not 0. If it is greater than or equal to the threshold, set the center pixel to white. default is 0.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.dilate", + "py_doc": "Dilates the image in place.\n\nArgs:\n - size: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).\n - threshold: The number of pixels in the kernel that are not 0. If it is greater than or equal to the threshold, set the center pixel to white. default is 0.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "int", + "size", + null + ], + [ + "int", + "threshold", + "0" + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *dilate(int size, int threshold = 0, image::Image *mask = nullptr)", + "py_def": "def dilate(self, size: int, threshold: int = 0, mask: Image = None) -> Image" + }, + "open": { + "type": "func", + "name": "open", + "doc": { + "brief": "Performs erosion and dilation on an image in order.", + "param": { + "size": "Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).", + "threshold": "As the threshold for erosion and dilation, the actual threshold for erosion is (kernel_size - 1 - threshold), the actual threshold for dialation is threshold. default is 0.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.open", + "py_doc": "Performs erosion and dilation on an image in order.\n\nArgs:\n - size: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).\n - threshold: As the threshold for erosion and dilation, the actual threshold for erosion is (kernel_size - 1 - threshold), the actual threshold for dialation is threshold. default is 0.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "int", + "size", + null + ], + [ + "int", + "threshold", + "0" + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *open(int size, int threshold = 0, image::Image *mask = nullptr)", + "py_def": "def open(self, size: int, threshold: int = 0, mask: Image = None) -> Image" + }, + "close": { + "type": "func", + "name": "close", + "doc": { + "brief": "Performs dilation and erosion on an image in order.", + "param": { + "size": "Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).", + "threshold": "As the threshold for erosion and dilation, the actual threshold for erosion is (kernel_size - 1 - threshold), the actual threshold for dialation is threshold. default is 0.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.close", + "py_doc": "Performs dilation and erosion on an image in order.\n\nArgs:\n - size: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).\n - threshold: As the threshold for erosion and dilation, the actual threshold for erosion is (kernel_size - 1 - threshold), the actual threshold for dialation is threshold. default is 0.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "int", + "size", + null + ], + [ + "int", + "threshold", + "0" + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *close(int size, int threshold = 0, image::Image *mask = nullptr)", + "py_def": "def close(self, size: int, threshold: int = 0, mask: Image = None) -> Image" + }, + "top_hat": { + "type": "func", + "name": "top_hat", + "doc": { + "brief": "Returns the image difference of the image and Image.open()\u2019ed image.", + "param": { + "size": "Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).", + "threshold": "As the threshold for open method. default is 0.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.top_hat", + "py_doc": "Returns the image difference of the image and Image.open()\u2019ed image.\n\nArgs:\n - size: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).\n - threshold: As the threshold for open method. default is 0.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "int", + "size", + null + ], + [ + "int", + "threshold", + "0" + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *top_hat(int size, int threshold = 0, image::Image *mask = nullptr)", + "py_def": "def top_hat(self, size: int, threshold: int = 0, mask: Image = None) -> Image" + }, + "black_hat": { + "type": "func", + "name": "black_hat", + "doc": { + "brief": "Returns the image difference of the image and Image.close()\u2019ed image.", + "param": { + "size": "Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).", + "threshold": "As the threshold for close method. default is 0.", + "mask": "Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None." + }, + "return": "Returns the image after the operation is completed.", + "maixpy": "maix.image.Image.black_hat", + "py_doc": "Returns the image difference of the image and Image.close()\u2019ed image.\n\nArgs:\n - size: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).\n - threshold: As the threshold for close method. default is 0.\n - mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.\nOnly pixels set in the mask are modified. default is None.\n\n\nReturns: Returns the image after the operation is completed.\n" + }, + "args": [ + [ + "int", + "size", + null + ], + [ + "int", + "threshold", + "0" + ], + [ + "image::Image *", + "mask", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *black_hat(int size, int threshold = 0, image::Image *mask = nullptr)", + "py_def": "def black_hat(self, size: int, threshold: int = 0, mask: Image = None) -> Image" + }, + "find_blobs": { + "type": "func", + "name": "find_blobs", + "doc": { + "brief": "Finds all blobs in the image and returns a list of image.Blob class which describe each Blob.\\nPlease see the image.Blob object more more information.", + "note": "For GRAYSCALE format, Lmin and Lmax range is [0, 255]. For RGB888 format, Lmin and Lmax range is [0, 100].", + "param": { + "thresholds": "You can define multiple thresholds.\nFor GRAYSCALE format, you can use {{Lmin, Lmax}, ...} to define one or more thresholds.\nFor RGB888 format, you can use {{Lmin, Lmax, Amin, Amax, Bmin, Bmax}, ...} to define one or more thresholds.\nWhere the upper case L,A,B represent the L,A,B channels of the LAB image format, and min, max represent the minimum and maximum values of the corresponding channels.", + "invert": "if true, will invert thresholds before find blobs, default is false", + "roi": "The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.", + "x_stride": "x stride is the number of x pixels to skip when doing the hough transform. default is 2", + "y_stride": "y_stride is the number of y pixels to skip when doing the hough transform. default is 1", + "area_threshold": "area threshold, if the blob area is smaller than area_threshold, the blob is not returned, default is 10", + "pixels_threshold": "pixels threshold, if the blob pixels is smaller than area_threshold, the blob is not returned,, default is 10.\nwhen x_stride and y_stride is equal to 1, pixels_threshold is equivalent to area_threshold", + "merge": "if True merges all not filtered out blobs whos bounding rectangles intersect each other. default is false", + "margin": "margin can be used to increase or decrease the size of the bounding rectangles for blobs during the intersection test.\nFor example, with a margin of 1 blobs whos bounding rectangles are 1 pixel away from each other will be merged. default is 0", + "x_hist_bins_max": "if set to non-zero populates a histogram buffer in each blob object with an x_histogram projection of all columns in the object. This value then sets the number of bins for that projection.", + "y_hist_bins_max": "if set to non-zero populates a histogram buffer in each blob object with an y_histogram projection of all rows in the object. This value then sets the number of bins for that projection." + }, + "return": "Return the blob when found blobs, format is (blob1, blob2, ...), you can use blob class methods to do more operations.", + "maixpy": "maix.image.Image.find_blobs", + "py_doc": "Finds all blobs in the image and returns a list of image.Blob class which describe each Blob.\nPlease see the image.Blob object more more information.\n\nArgs:\n - thresholds: You can define multiple thresholds.\nFor GRAYSCALE format, you can use {{Lmin, Lmax}, ...} to define one or more thresholds.\nFor RGB888 format, you can use {{Lmin, Lmax, Amin, Amax, Bmin, Bmax}, ...} to define one or more thresholds.\nWhere the upper case L,A,B represent the L,A,B channels of the LAB image format, and min, max represent the minimum and maximum values of the corresponding channels.\n - invert: if true, will invert thresholds before find blobs, default is false\n - roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.\n - x_stride: x stride is the number of x pixels to skip when doing the hough transform. default is 2\n - y_stride: y_stride is the number of y pixels to skip when doing the hough transform. default is 1\n - area_threshold: area threshold, if the blob area is smaller than area_threshold, the blob is not returned, default is 10\n - pixels_threshold: pixels threshold, if the blob pixels is smaller than area_threshold, the blob is not returned,, default is 10.\nwhen x_stride and y_stride is equal to 1, pixels_threshold is equivalent to area_threshold\n - merge: if True merges all not filtered out blobs whos bounding rectangles intersect each other. default is false\n - margin: margin can be used to increase or decrease the size of the bounding rectangles for blobs during the intersection test.\nFor example, with a margin of 1 blobs whos bounding rectangles are 1 pixel away from each other will be merged. default is 0\n - x_hist_bins_max: if set to non-zero populates a histogram buffer in each blob object with an x_histogram projection of all columns in the object. This value then sets the number of bins for that projection.\n - y_hist_bins_max: if set to non-zero populates a histogram buffer in each blob object with an y_histogram projection of all rows in the object. This value then sets the number of bins for that projection.\n\n\nReturns: Return the blob when found blobs, format is (blob1, blob2, ...), you can use blob class methods to do more operations.\n" + }, + "args": [ + [ + "std::vector>", + "thresholds", + "std::vector>()" + ], + [ + "bool", + "invert", + "false" + ], + [ + "std::vector", + "roi", + "std::vector()" + ], + [ + "int", + "x_stride", + "2" + ], + [ + "int", + "y_stride", + "1" + ], + [ + "int", + "area_threshold", + "10" + ], + [ + "int", + "pixels_threshold", + "10" + ], + [ + "bool", + "merge", + "false" + ], + [ + "int", + "margin", + "0" + ], + [ + "int", + "x_hist_bins_max", + "0" + ], + [ + "int", + "y_hist_bins_max", + "0" + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector find_blobs(std::vector> thresholds = std::vector>(), bool invert = false, std::vector roi = std::vector(), int x_stride = 2, int y_stride = 1, int area_threshold = 10, int pixels_threshold = 10, bool merge = false, int margin = 0, int x_hist_bins_max = 0, int y_hist_bins_max = 0)", + "py_def": "def find_blobs(self, thresholds: list[list[int]] = [], invert: bool = False, roi: list[int] = [], x_stride: int = 2, y_stride: int = 1, area_threshold: int = 10, pixels_threshold: int = 10, merge: bool = False, margin: int = 0, x_hist_bins_max: int = 0, y_hist_bins_max: int = 0) -> list[Blob]" + }, + "find_lines": { + "type": "func", + "name": "find_lines", + "doc": { + "brief": "Find lines in image", + "param": { + "roi": "The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.", + "x_stride": "x stride is the number of x pixels to skip when doing the hough transform. default is 2", + "y_stride": "y_stride is the number of y pixels to skip when doing the hough transform. default is 1", + "threshold": "threshold threshold controls what lines are detected from the hough transform. Only lines with a magnitude greater than or equal to threshold are returned.\nThe right value of threshold for your application is image dependent. default is 1000.", + "theta_margin": "theta_margin controls the merging of detected lines. default is 25.", + "rho_margin": "rho_margin controls the merging of detected lines. default is 25." + }, + "return": "Return the line when found lines, format is (line1, line2, ...), you can use line class methods to do more operations", + "maixpy": "maix.image.Image.find_lines", + "py_doc": "Find lines in image\n\nArgs:\n - roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.\n - x_stride: x stride is the number of x pixels to skip when doing the hough transform. default is 2\n - y_stride: y_stride is the number of y pixels to skip when doing the hough transform. default is 1\n - threshold: threshold threshold controls what lines are detected from the hough transform. Only lines with a magnitude greater than or equal to threshold are returned.\nThe right value of threshold for your application is image dependent. default is 1000.\n - theta_margin: theta_margin controls the merging of detected lines. default is 25.\n - rho_margin: rho_margin controls the merging of detected lines. default is 25.\n\n\nReturns: Return the line when found lines, format is (line1, line2, ...), you can use line class methods to do more operations\n" + }, + "args": [ + [ + "std::vector", + "roi", + "std::vector()" + ], + [ + "int", + "x_stride", + "2" + ], + [ + "int", + "y_stride", + "1" + ], + [ + "double", + "threshold", + "1000" + ], + [ + "double", + "theta_margin", + "25" + ], + [ + "double", + "rho_margin", + "25" + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector find_lines(std::vector roi = std::vector(), int x_stride = 2, int y_stride = 1, double threshold = 1000, double theta_margin = 25, double rho_margin = 25)", + "py_def": "def find_lines(self, roi: list[int] = [], x_stride: int = 2, y_stride: int = 1, threshold: float = 1000, theta_margin: float = 25, rho_margin: float = 25) -> list[Line]" + }, + "find_line_segments": { + "type": "func", + "name": "find_line_segments", + "doc": { + "brief": "Finds all line segments in the image.", + "param": { + "roi": "The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.", + "merge_distance": "The maximum distance between two lines to merge them. default is 0.", + "max_theta_difference": "The maximum difference between two lines to merge them. default is 15." + }, + "return": "Return the line when found lines, format is (line1, line2, ...), you can use line class methods to do more operations", + "maixpy": "maix.image.Image.find_line_segments", + "py_doc": "Finds all line segments in the image.\n\nArgs:\n - roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.\n - merge_distance: The maximum distance between two lines to merge them. default is 0.\n - max_theta_difference: The maximum difference between two lines to merge them. default is 15.\n\n\nReturns: Return the line when found lines, format is (line1, line2, ...), you can use line class methods to do more operations\n" + }, + "args": [ + [ + "std::vector", + "roi", + "std::vector()" + ], + [ + "int", + "merge_distance", + "0" + ], + [ + "int", + "max_theta_difference", + "15" + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector find_line_segments(std::vector roi = std::vector(), int merge_distance = 0, int max_theta_difference = 15)", + "py_def": "def find_line_segments(self, roi: list[int] = [], merge_distance: int = 0, max_theta_difference: int = 15) -> list[Line]" + }, + "find_circles": { + "type": "func", + "name": "find_circles", + "doc": { + "brief": "Find circles in image", + "param": { + "roi": "The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.", + "x_stride": "x stride is the number of x pixels to skip when doing the hough transform. default is 2", + "y_stride": "y_stride is the number of y pixels to skip when doing the hough transform. default is 1", + "threshold": "threshold controls what circles are detected from the hough transform. Only circles with a magnitude greater than or equal to threshold are returned.\nThe right value of threshold for your application is image dependent.", + "x_margin": "x_margin controls the merging of detected circles. Circles which are x_margin, y_margin, and r_margin pixels apart are merged. default is 10", + "y_margin": "y_margin controls the merging of detected circles. Circles which are x_margin, y_margin, and r_margin pixels apart are merged. default is 10", + "r_margin": "r_margin controls the merging of detected circles. Circles which are x_margin, y_margin, and r_margin pixels apart are merged. default is 10", + "r_min": "r_min controls the minimum circle radius detected. Increase this to speed up the algorithm. default is 2", + "r_max": "r_max controls the maximum circle radius detected. Decrease this to speed up the algorithm. default is min(roi.w / 2, roi.h / 2)", + "r_step": "r_step controls how to step the radius detection by. default is 2." + }, + "return": "Return the circle when found circles, format is (circle1, circle2, ...), you can use circle class methods to do more operations", + "maixpy": "maix.image.Image.find_circles", + "py_doc": "Find circles in image\n\nArgs:\n - roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.\n - x_stride: x stride is the number of x pixels to skip when doing the hough transform. default is 2\n - y_stride: y_stride is the number of y pixels to skip when doing the hough transform. default is 1\n - threshold: threshold controls what circles are detected from the hough transform. Only circles with a magnitude greater than or equal to threshold are returned.\nThe right value of threshold for your application is image dependent.\n - x_margin: x_margin controls the merging of detected circles. Circles which are x_margin, y_margin, and r_margin pixels apart are merged. default is 10\n - y_margin: y_margin controls the merging of detected circles. Circles which are x_margin, y_margin, and r_margin pixels apart are merged. default is 10\n - r_margin: r_margin controls the merging of detected circles. Circles which are x_margin, y_margin, and r_margin pixels apart are merged. default is 10\n - r_min: r_min controls the minimum circle radius detected. Increase this to speed up the algorithm. default is 2\n - r_max: r_max controls the maximum circle radius detected. Decrease this to speed up the algorithm. default is min(roi.w / 2, roi.h / 2)\n - r_step: r_step controls how to step the radius detection by. default is 2.\n\n\nReturns: Return the circle when found circles, format is (circle1, circle2, ...), you can use circle class methods to do more operations\n" + }, + "args": [ + [ + "std::vector", + "roi", + "std::vector()" + ], + [ + "int", + "x_stride", + "2" + ], + [ + "int", + "y_stride", + "1" + ], + [ + "int", + "threshold", + "2000" + ], + [ + "int", + "x_margin", + "10" + ], + [ + "int", + "y_margin", + "10" + ], + [ + "int", + "r_margin", + "10" + ], + [ + "int", + "r_min", + "2" + ], + [ + "int", + "r_max", + "-1" + ], + [ + "int", + "r_step", + "2" + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector find_circles(std::vector roi = std::vector(), int x_stride = 2, int y_stride = 1, int threshold = 2000, int x_margin = 10, int y_margin = 10, int r_margin = 10, int r_min = 2, int r_max = -1, int r_step = 2)", + "py_def": "def find_circles(self, roi: list[int] = [], x_stride: int = 2, y_stride: int = 1, threshold: int = 2000, x_margin: int = 10, y_margin: int = 10, r_margin: int = 10, r_min: int = 2, r_max: int = -1, r_step: int = 2) -> list[Circle]" + }, + "find_rects": { + "type": "func", + "name": "find_rects", + "doc": { + "brief": "Finds all rects in the image.", + "param": { + "roi": "The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.", + "threshold": "The threshold to use for the rects. default is 10000." + }, + "return": "Returns the rects of the image", + "maixpy": "maix.image.Image.find_rects", + "py_doc": "Finds all rects in the image.\n\nArgs:\n - roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.\n - threshold: The threshold to use for the rects. default is 10000.\n\n\nReturns: Returns the rects of the image\n" + }, + "args": [ + [ + "std::vector", + "roi", + "std::vector()" + ], + [ + "int", + "threshold", + "10000" + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector find_rects(std::vector roi = std::vector(), int threshold = 10000)", + "py_def": "def find_rects(self, roi: list[int] = [], threshold: int = 10000) -> list[Rect]" + }, + "find_qrcodes": { + "type": "func", + "name": "find_qrcodes", + "doc": { + "brief": "Finds all qrcodes in the image.", + "param": { + "roi": "The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image." + }, + "return": "Returns the qrcodes of the image", + "maixpy": "maix.image.Image.find_qrcodes", + "py_doc": "Finds all qrcodes in the image.\n\nArgs:\n - roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.\n\n\nReturns: Returns the qrcodes of the image\n" + }, + "args": [ + [ + "std::vector", + "roi", + "std::vector()" + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector find_qrcodes(std::vector roi = std::vector())", + "py_def": "def find_qrcodes(self, roi: list[int] = []) -> list[QRCode]" + }, + "find_apriltags": { + "type": "func", + "name": "find_apriltags", + "doc": { + "brief": "Finds all apriltags in the image.", + "param": { + "roi": "The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.", + "families": "The families to use for the apriltags. default is TAG36H11.", + "fx": "The camera X focal length in pixels, default is -1.", + "fy": "The camera Y focal length in pixels, default is -1.", + "cx": "The camera X center in pixels, default is image.width / 2.", + "cy": "The camera Y center in pixels, default is image.height / 2." + }, + "return": "Returns the apriltags of the image", + "maixpy": "maix.image.Image.find_apriltags", + "py_doc": "Finds all apriltags in the image.\n\nArgs:\n - roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.\n - families: The families to use for the apriltags. default is TAG36H11.\n - fx: The camera X focal length in pixels, default is -1.\n - fy: The camera Y focal length in pixels, default is -1.\n - cx: The camera X center in pixels, default is image.width / 2.\n - cy: The camera Y center in pixels, default is image.height / 2.\n\n\nReturns: Returns the apriltags of the image\n" + }, + "args": [ + [ + "std::vector", + "roi", + "std::vector()" + ], + [ + "image::ApriltagFamilies", + "families", + "image::ApriltagFamilies::TAG36H11" + ], + [ + "float", + "fx", + "-1" + ], + [ + "float", + "fy", + "-1" + ], + [ + "int", + "cx", + "-1" + ], + [ + "int", + "cy", + "-1" + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector find_apriltags(std::vector roi = std::vector(), image::ApriltagFamilies families = image::ApriltagFamilies::TAG36H11, float fx = -1, float fy = -1, int cx = -1, int cy = -1)", + "py_def": "def find_apriltags(self, roi: list[int] = [], families: ApriltagFamilies = ..., fx: float = -1, fy: float = -1, cx: int = -1, cy: int = -1) -> list[AprilTag]" + }, + "find_datamatrices": { + "type": "func", + "name": "find_datamatrices", + "doc": { + "brief": "Finds all datamatrices in the image.", + "param": { + "roi": "The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.", + "effort": "Controls how much time to spend trying to find data matrix matches. default is 200." + }, + "return": "Returns the datamatrices of the image", + "maixpy": "maix.image.Image.find_datamatrices", + "py_doc": "Finds all datamatrices in the image.\n\nArgs:\n - roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.\n - effort: Controls how much time to spend trying to find data matrix matches. default is 200.\n\n\nReturns: Returns the datamatrices of the image\n" + }, + "args": [ + [ + "std::vector", + "roi", + "std::vector()" + ], + [ + "int", + "effort", + "200" + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector find_datamatrices(std::vector roi = std::vector(), int effort = 200)", + "py_def": "def find_datamatrices(self, roi: list[int] = [], effort: int = 200) -> list[DataMatrix]" + }, + "find_barcodes": { + "type": "func", + "name": "find_barcodes", + "doc": { + "brief": "Finds all barcodes in the image.", + "param": { + "roi": "The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image." + }, + "return": "Returns the barcodes of the image", + "maixpy": "maix.image.Image.find_barcodes", + "py_doc": "Finds all barcodes in the image.\n\nArgs:\n - roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.\n\n\nReturns: Returns the barcodes of the image\n" + }, + "args": [ + [ + "std::vector", + "roi", + "std::vector()" + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector find_barcodes(std::vector roi = std::vector())", + "py_def": "def find_barcodes(self, roi: list[int] = []) -> list[BarCode]" + }, + "find_displacement": { + "type": "func", + "name": "find_displacement", + "doc": { + "brief": "Finds the displacement between the image and the template. TODO: support in the feature\\nnote: this method must be used on power-of-2 image sizes", + "param": { + "template_image": "The template image.", + "roi": "The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.", + "template_roi": "The region-of-interest rectangle (x, y, w, h) to work in. If not specified, it is equal to the image rectangle.", + "logpolar": "If true, it will instead find rotation and scale changes between the two images. default is false." + }, + "return": "Returns the displacement of the image", + "maixpy": "maix.image.Image.find_displacement", + "py_doc": "Finds the displacement between the image and the template. TODO: support in the feature\nnote: this method must be used on power-of-2 image sizes\n\nArgs:\n - template_image: The template image.\n - roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.\n - template_roi: The region-of-interest rectangle (x, y, w, h) to work in. If not specified, it is equal to the image rectangle.\n - logpolar: If true, it will instead find rotation and scale changes between the two images. default is false.\n\n\nReturns: Returns the displacement of the image\n" + }, + "args": [ + [ + "image::Image &", + "template_image", + null + ], + [ + "std::vector", + "roi", + "std::vector()" + ], + [ + "std::vector", + "template_roi", + "std::vector()" + ], + [ + "bool", + "logpolar", + "false" + ] + ], + "ret_type": "image::Displacement", + "static": false, + "def": "image::Displacement find_displacement(image::Image &template_image, std::vector roi = std::vector(), std::vector template_roi = std::vector(), bool logpolar = false)", + "py_def": "def find_displacement(self, template_image: Image, roi: list[int] = [], template_roi: list[int] = [], logpolar: bool = False) -> Displacement" + }, + "find_template": { + "type": "func", + "name": "find_template", + "doc": { + "brief": "Finds the template in the image.", + "param": { + "template_image": "The template image.", + "threshold": "Threshold is floating point number (0.0-1.0) where a higher threshold prevents false positives while lowering the detection rate while a lower threshold does the opposite.", + "roi": "The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image. Only valid in SEARCH_EX mode.", + "step": "The step size to use for the template. default is 2. Only valid in SEARCH_EX mode", + "search": "The search method to use for the template. default is SEARCH_EX." + }, + "return": "Returns a bounding box tuple (x, y, w, h) for the matching location otherwise None.", + "maixpy": "maix.image.Image.find_template", + "py_doc": "Finds the template in the image.\n\nArgs:\n - template_image: The template image.\n - threshold: Threshold is floating point number (0.0-1.0) where a higher threshold prevents false positives while lowering the detection rate while a lower threshold does the opposite.\n - roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image. Only valid in SEARCH_EX mode.\n - step: The step size to use for the template. default is 2. Only valid in SEARCH_EX mode\n - search: The search method to use for the template. default is SEARCH_EX.\n\n\nReturns: Returns a bounding box tuple (x, y, w, h) for the matching location otherwise None.\n" + }, + "args": [ + [ + "image::Image &", + "template_image", + null + ], + [ + "float", + "threshold", + null + ], + [ + "std::vector", + "roi", + "std::vector()" + ], + [ + "int", + "step", + "2" + ], + [ + "image::TemplateMatch", + "search", + "image::TemplateMatch::SEARCH_EX" + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector find_template(image::Image &template_image, float threshold, std::vector roi = std::vector(), int step = 2, image::TemplateMatch search = image::TemplateMatch::SEARCH_EX)", + "py_def": "def find_template(self, template_image: Image, threshold: float, roi: list[int] = [], step: int = 2, search: TemplateMatch = ...) -> list[int]" + }, + "find_features": { + "type": "func", + "name": "find_features", + "doc": { + "brief": "Finds the features in the image. TODO: support in the feature", + "param": { + "cascade": "The cascade to use for the features. default is CASCADE_FRONTALFACE_ALT.", + "threshold": "The threshold to use for the features. default is 0.5.", + "scale": "The scale to use for the features. default is 1.5.", + "roi": "The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image." + }, + "return": "Returns the features of the image", + "maixpy": "maix.image.Image.find_features", + "py_doc": "Finds the features in the image. TODO: support in the feature\n\nArgs:\n - cascade: The cascade to use for the features. default is CASCADE_FRONTALFACE_ALT.\n - threshold: The threshold to use for the features. default is 0.5.\n - scale: The scale to use for the features. default is 1.5.\n - roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.\n\n\nReturns: Returns the features of the image\n" + }, + "args": [ + [ + "int", + "cascade", + null + ], + [ + "float", + "threshold", + "0.5" + ], + [ + "float", + "scale", + "1.5" + ], + [ + "std::vector", + "roi", + "std::vector()" + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector find_features(int cascade, float threshold = 0.5, float scale = 1.5, std::vector roi = std::vector())", + "py_def": "def find_features(self, cascade: int, threshold: float = 0.5, scale: float = 1.5, roi: list[int] = []) -> list[int]" + }, + "find_lbp": { + "type": "func", + "name": "find_lbp", + "doc": { + "brief": "Finds the lbp in the image. TODO: support in the feature.", + "param": { + "roi": "The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image." + }, + "return": "Returns the lbp of the image", + "maixpy": "maix.image.Image.find_lbp", + "py_doc": "Finds the lbp in the image. TODO: support in the feature.\n\nArgs:\n - roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.\n\n\nReturns: Returns the lbp of the image\n" + }, + "args": [ + [ + "std::vector", + "roi", + "std::vector()" + ] + ], + "ret_type": "image::LBPKeyPoint", + "static": false, + "def": "image::LBPKeyPoint find_lbp(std::vector roi = std::vector())", + "py_def": "def find_lbp(self, roi: list[int] = []) -> LBPKeyPoint" + }, + "find_keypoints": { + "type": "func", + "name": "find_keypoints", + "doc": { + "brief": "Finds the keypoints in the image. TODO: support in the feature.", + "param": { + "roi": "The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.", + "threshold": "The threshold to use for the keypoints. default is 20.", + "normalized": "If true, the image will be normalized before the operation. default is false.", + "scale_factor": "The scale factor to use for the keypoints. default is 1.5.", + "max_keypoints": "The maximum number of keypoints to use for the keypoints. default is 100.", + "corner_detector": "The corner detector to use for the keypoints. default is CORNER_AGAST." + }, + "return": "Returns the keypoints of the image", + "maixpy": "maix.image.Image.find_keypoints", + "py_doc": "Finds the keypoints in the image. TODO: support in the feature.\n\nArgs:\n - roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.\n - threshold: The threshold to use for the keypoints. default is 20.\n - normalized: If true, the image will be normalized before the operation. default is false.\n - scale_factor: The scale factor to use for the keypoints. default is 1.5.\n - max_keypoints: The maximum number of keypoints to use for the keypoints. default is 100.\n - corner_detector: The corner detector to use for the keypoints. default is CORNER_AGAST.\n\n\nReturns: Returns the keypoints of the image\n" + }, + "args": [ + [ + "std::vector", + "roi", + "std::vector()" + ], + [ + "int", + "threshold", + "20" + ], + [ + "bool", + "normalized", + "false" + ], + [ + "float", + "scale_factor", + "1.5" + ], + [ + "int", + "max_keypoints", + "100" + ], + [ + "image::CornerDetector", + "corner_detector", + "image::CornerDetector::CORNER_AGAST" + ] + ], + "ret_type": "image::ORBKeyPoint", + "static": false, + "def": "image::ORBKeyPoint find_keypoints(std::vector roi = std::vector(), int threshold = 20, bool normalized = false, float scale_factor = 1.5, int max_keypoints = 100, image::CornerDetector corner_detector = image::CornerDetector::CORNER_AGAST)", + "py_def": "def find_keypoints(self, roi: list[int] = [], threshold: int = 20, normalized: bool = False, scale_factor: float = 1.5, max_keypoints: int = 100, corner_detector: CornerDetector = ...) -> ORBKeyPoint" + }, + "find_edges": { + "type": "func", + "name": "find_edges", + "doc": { + "brief": "Finds the edges in the image.", + "param": { + "edge_type": "The edge type to use for the edges. default is EDGE_CANNY.", + "roi": "The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.", + "threshold": "The threshold to use for the edges. default is 20." + }, + "return": "Returns the edges of the image", + "maixpy": "maix.image.Image.find_edges", + "py_doc": "Finds the edges in the image.\n\nArgs:\n - edge_type: The edge type to use for the edges. default is EDGE_CANNY.\n - roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.\n - threshold: The threshold to use for the edges. default is 20.\n\n\nReturns: Returns the edges of the image\n" + }, + "args": [ + [ + "image::EdgeDetector", + "edge_type", + null + ], + [ + "std::vector", + "roi", + "std::vector()" + ], + [ + "std::vector", + "threshold", + "std::vector({100, 200})" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image* find_edges(image::EdgeDetector edge_type, std::vector roi = std::vector(), std::vector threshold = std::vector({100, 200}))", + "py_def": "def find_edges(self, edge_type: EdgeDetector, roi: list[int] = [], threshold: list[int] = [100, 200]) -> Image" + }, + "find_hog": { + "type": "func", + "name": "find_hog", + "doc": { + "brief": "Finds the hog in the image. TODO: support in the feature", + "param": { + "roi": "The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.", + "size": "The size to use for the hog. default is 8." + }, + "return": "Returns the hog of the image", + "maixpy": "maix.image.Image.find_hog", + "py_doc": "Finds the hog in the image. TODO: support in the feature\n\nArgs:\n - roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.\ndefault is None, means whole image.\n - size: The size to use for the hog. default is 8.\n\n\nReturns: Returns the hog of the image\n" + }, + "args": [ + [ + "std::vector", + "roi", + "std::vector()" + ], + [ + "int", + "size", + "8" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image* find_hog(std::vector roi = std::vector(), int size = 8)", + "py_def": "def find_hog(self, roi: list[int] = [], size: int = 8) -> Image" + }, + "match_lbp_descriptor": { + "type": "func", + "name": "match_lbp_descriptor", + "doc": { + "brief": "Matches the lbp descriptor of the image. TODO: support in the feature", + "param": { + "desc1": "The descriptor to use for the match.", + "desc2": "The descriptor to use for the match." + }, + "return": "Returns the match of the image", + "maixpy": "maix.image.Image.match_lbp_descriptor", + "py_doc": "Matches the lbp descriptor of the image. TODO: support in the feature\n\nArgs:\n - desc1: The descriptor to use for the match.\n - desc2: The descriptor to use for the match.\n\n\nReturns: Returns the match of the image\n" + }, + "args": [ + [ + "image::LBPKeyPoint &", + "desc1", + null + ], + [ + "image::LBPKeyPoint &", + "desc2", + null + ] + ], + "ret_type": "int", + "static": false, + "def": "int match_lbp_descriptor(image::LBPKeyPoint &desc1, image::LBPKeyPoint &desc2)", + "py_def": "def match_lbp_descriptor(self, desc1: LBPKeyPoint, desc2: LBPKeyPoint) -> int" + }, + "match_orb_descriptor": { + "type": "func", + "name": "match_orb_descriptor", + "doc": { + "brief": "Matches the orb descriptor of the image. TODO: support in the feature", + "param": { + "desc1": "The descriptor to use for the match.", + "desc2": "The descriptor to use for the match.", + "threshold": "The threshold to use for the match. default is 95.", + "filter_outliers": "If true, the image will be filter_outliers before the operation. default is false." + }, + "return": "Returns the match of the image", + "maixpy": "maix.image.Image.match_orb_descriptor", + "py_doc": "Matches the orb descriptor of the image. TODO: support in the feature\n\nArgs:\n - desc1: The descriptor to use for the match.\n - desc2: The descriptor to use for the match.\n - threshold: The threshold to use for the match. default is 95.\n - filter_outliers: If true, the image will be filter_outliers before the operation. default is false.\n\n\nReturns: Returns the match of the image\n" + }, + "args": [ + [ + "image::ORBKeyPoint &", + "desc1", + null + ], + [ + "image::ORBKeyPoint &", + "desc2", + null + ], + [ + "int", + "threshold", + "95" + ], + [ + "bool", + "filter_outliers", + "false" + ] + ], + "ret_type": "image::KPTMatch", + "static": false, + "def": "image::KPTMatch match_orb_descriptor(image::ORBKeyPoint &desc1, image::ORBKeyPoint &desc2, int threshold = 95, bool filter_outliers = false)", + "py_def": "def match_orb_descriptor(self, desc1: ORBKeyPoint, desc2: ORBKeyPoint, threshold: int = 95, filter_outliers: bool = False) -> KPTMatch" + } + }, + "def": "class Image" + }, + "load": { + "type": "func", + "name": "load", + "doc": { + "brief": "Load image from file, and convert to Image object", + "param": { + "path": "image file path", + "format": "read as this format, if not match, will convert to this format, by default is RGB888" + }, + "return": "Image object, if load failed, will return None(nullptr in C++), so you should care about it.", + "maixpy": "maix.image.load", + "py_doc": "Load image from file, and convert to Image object\n\nArgs:\n - path: image file path\n - format: read as this format, if not match, will convert to this format, by default is RGB888\n\n\nReturns: Image object, if load failed, will return None(nullptr in C++), so you should care about it.\n" + }, + "args": [ + [ + "const char *", + "path", + null + ], + [ + "image::Format", + "format", + "image::Format::FMT_RGB888" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *load(const char *path, image::Format format = image::Format::FMT_RGB888)", + "py_def": "def load(path: str, format: Format = ...) -> Image" + }, + "from_bytes": { + "type": "func", + "name": "from_bytes", + "doc": { + "brief": "Create image from bytes", + "param": { + "width": "image width", + "height": "image height", + "format": "image format", + "data": "image data, if data is None, will malloc memory for image data\nIf the image is in jpeg format, data must be filled in.", + "copy": "if true and data is not None, will copy data to new buffer, else will use data directly. default is true to avoid memory leak.\nUse it carefully!!!" + }, + "return": "Image object", + "maixpy": "maix.image.from_bytes", + "py_doc": "Create image from bytes\n\nArgs:\n - width: image width\n - height: image height\n - format: image format\n - data: image data, if data is None, will malloc memory for image data\nIf the image is in jpeg format, data must be filled in.\n - copy: if true and data is not None, will copy data to new buffer, else will use data directly. default is true to avoid memory leak.\nUse it carefully!!!\n\n\nReturns: Image object\n" + }, + "args": [ + [ + "int", + "width", + null + ], + [ + "int", + "height", + null + ], + [ + "image::Format", + "format", + null + ], + [ + "Bytes *", + "data", + null + ], + [ + "bool", + "copy", + "true" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *from_bytes(int width, int height, image::Format format, Bytes *data, bool copy = true)", + "py_def": "def from_bytes(width: int, height: int, format: Format, data: maix.Bytes(bytes), copy: bool = True) -> Image" + }, + "load_font": { + "type": "func", + "name": "load_font", + "doc": { + "brief": "Load font from file", + "param": { + "name": "font name, used to identify font", + "path": "font file path, support ttf, ttc, otf", + "size": "font size, font height, by default is 16" + }, + "return": "error code, err::ERR_NONE is ok, other is error", + "maixpy": "maix.image.load_font", + "py_doc": "Load font from file\n\nArgs:\n - name: font name, used to identify font\n - path: font file path, support ttf, ttc, otf\n - size: font size, font height, by default is 16\n\n\nReturns: error code, err::ERR_NONE is ok, other is error\n" + }, + "args": [ + [ + "const std::string &", + "name", + null + ], + [ + "const char *", + "path", + null + ], + [ + "int", + "size", + "16" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err load_font(const std::string &name, const char *path, int size = 16)", + "py_def": "def load_font(name: str, path: str, size: int = 16) -> maix.err.Err" + }, + "set_default_font": { + "type": "func", + "name": "set_default_font", + "doc": { + "brief": "Set default font, if not call this method, default is hershey_plain", + "param": { + "name": "font name, supported names can be get by fonts()" + }, + "return": "error code, err::ERR_NONE is ok, other is error", + "maixpy": "maix.image.set_default_font", + "py_doc": "Set default font, if not call this method, default is hershey_plain\n\nArgs:\n - name: font name, supported names can be get by fonts()\n\n\nReturns: error code, err::ERR_NONE is ok, other is error\n" + }, + "args": [ + [ + "const std::string &", + "name", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err set_default_font(const std::string &name)" + }, + "fonts": { + "type": "func", + "name": "fonts", + "doc": { + "brief": "Get all loaded fonts", + "return": "all loaded fonts, string list type", + "maixpy": "maix.image.fonts", + "py_doc": "Get all loaded fonts\n\nReturns: all loaded fonts, string list type\n" + }, + "args": [], + "ret_type": "std::vector*", + "static": false, + "def": "std::vector *fonts()", + "py_def": "def fonts() -> list[str]" + }, + "string_size": { + "type": "func", + "name": "string_size", + "doc": { + "brief": "Get text rendered width and height", + "param": { + "string": "text content", + "scale": "font scale, by default(value is 1)", + "thickness": "text thickness(line width), by default(value is 1)" + }, + "return": "text rendered width and height, [width, height]", + "maixpy": "maix.image.string_size", + "py_doc": "Get text rendered width and height\n\nArgs:\n - string: text content\n - scale: font scale, by default(value is 1)\n - thickness: text thickness(line width), by default(value is 1)\n\n\nReturns: text rendered width and height, [width, height]\n" + }, + "args": [ + [ + "std::string", + "string", + null + ], + [ + "float", + "scale", + "1" + ], + [ + "int", + "thickness", + "1" + ], + [ + "const std::string &", + "font", + "\"\"" + ] + ], + "ret_type": "image::Size", + "static": false, + "def": "image::Size string_size(std::string string, float scale = 1, int thickness = 1, const std::string &font = \"\")", + "py_def": "def string_size(string: str, scale: float = 1, thickness: int = 1, font: str = '') -> Size" + }, + "cv2image": { + "type": "func", + "name": "cv2image", + "doc": { + "brief": "OpenCV Mat(numpy array object) to Image object", + "param": { + "array": "numpy array object, must be a 3-dim or 2-dim continuous array with shape hwc or hw", + "bgr": "if set bgr, the return image will be marked as BGR888 or BGRA8888 format, grayscale will ignore this arg.", + "copy": "if true, will alloc new buffer and copy data, else will directly use array's data buffer, default true.\nUse this arg carefully, when set to false, ther array MUST keep alive until we don't use the return img of this func, or will cause program crash." + }, + "return": "Image object", + "maixpy": "maix.image.cv2image", + "py_doc": "OpenCV Mat(numpy array object) to Image object\n\nArgs:\n - array: numpy array object, must be a 3-dim or 2-dim continuous array with shape hwc or hw\n - bgr: if set bgr, the return image will be marked as BGR888 or BGRA8888 format, grayscale will ignore this arg.\n - copy: if true, will alloc new buffer and copy data, else will directly use array's data buffer, default true.\nUse this arg carefully, when set to false, ther array MUST keep alive until we don't use the return img of this func, or will cause program crash.\n\n\nReturns: Image object\n" + }, + "args": [ + [ + "py::array_t", + "array", + null + ], + [ + "bool", + "bgr", + "true" + ], + [ + "bool", + "copy", + "true" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *cv2image(py::array_t array, bool bgr = true, bool copy = true)", + "py_def": "def cv2image(array: numpy.ndarray[numpy.uint8], bgr: bool = True, copy: bool = True) -> Image" + }, + "image2cv": { + "type": "func", + "name": "image2cv", + "doc": { + "brief": "Image object to OpenCV Mat(numpy array object)", + "param": { + "img": "Image object, maix.image.Image type.", + "ensure_bgr": "auto convert to BGR888 or BGRA8888 if img format is not BGR or BGRA, if set to false, will not auto convert and directly use img's data, default true.", + "copy": "Whether alloc new image and copy data or not, if ensure_bgr and img is not bgr or bgra format, always copy,\nif not copy, array object will directly use img's data buffer, will faster but change array will affect img's data, default true." + }, + "attention": "take care of ensure_bgr and copy param.", + "return": "numpy array object", + "maixpy": "maix.image.image2cv", + "py_doc": "Image object to OpenCV Mat(numpy array object)\n\nArgs:\n - img: Image object, maix.image.Image type.\n - ensure_bgr: auto convert to BGR888 or BGRA8888 if img format is not BGR or BGRA, if set to false, will not auto convert and directly use img's data, default true.\n - copy: Whether alloc new image and copy data or not, if ensure_bgr and img is not bgr or bgra format, always copy,\nif not copy, array object will directly use img's data buffer, will faster but change array will affect img's data, default true.\n\n\nReturns: numpy array object\n" + }, + "args": [ + [ + "image::Image *", + "img", + null + ], + [ + "bool", + "ensure_bgr", + "true" + ], + [ + "bool", + "copy", + "true" + ] + ], + "ret_type": "py::array_t", + "static": false, + "def": "py::array_t image2cv(image::Image *img, bool ensure_bgr = true, bool copy = true)", + "py_def": "def image2cv(img: Image, ensure_bgr: bool = True, copy: bool = True) -> numpy.ndarray[numpy.uint8]" + } + }, + "auto_add": true + }, + "camera": { + "type": "module", + "doc": { + "brief": "maix.camera module, access camera device and get image from it", + "maixpy": "maix.camera", + "py_doc": "maix.camera module, access camera device and get image from it" + }, + "members": { + "list_devices": { + "type": "func", + "name": "list_devices", + "doc": { + "brief": "List all supported camera devices.", + "return": "Returns the path to the camera device.", + "maixpy": "maix.camera.list_devices", + "py_doc": "List all supported camera devices.\n\nReturns: Returns the path to the camera device.\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector list_devices()", + "py_def": "def list_devices() -> list[str]" + }, + "set_regs_enable": { + "type": "func", + "name": "set_regs_enable", + "doc": { + "brief": "Enable set camera registers, default is false, if set to true, will not set camera registers, you can manually set registers by write_reg API.", + "param": { + "enable": "enable/disable set camera registers" + }, + "maixpy": "maix.camera.set_regs_enable", + "py_doc": "Enable set camera registers, default is false, if set to true, will not set camera registers, you can manually set registers by write_reg API.\n\nArgs:\n - enable: enable/disable set camera registers\n" + }, + "args": [ + [ + "bool", + "enable", + "true" + ] + ], + "ret_type": "void", + "static": false, + "def": "void set_regs_enable(bool enable = true)", + "py_def": "def set_regs_enable(enable: bool = True) -> None" + }, + "Camera": { + "type": "class", + "name": "Camera", + "doc": { + "brief": "Camera class", + "maixpy": "maix.camera.Camera", + "py_doc": "Camera class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Camera", + "doc": { + "brief": "Construct a new Camera object.\\nMaximum resolution support 2560x1440.", + "param": { + "width": "camera width, default is -1, means auto, mostly means max width of camera support", + "height": "camera height, default is -1, means auto, mostly means max height of camera support", + "format": "camera output format, default is image.Format.FMT_RGB888", + "device": "camera device path, you can get devices by list_devices method, by default(value is NULL(None in MaixPy)) means the first device", + "fps": "camera fps, default is -1, means auto, mostly means max fps of camera support", + "buff_num": "camera buffer number, default is 3, means 3 buffer, one used by user, one used for cache the next frame,\nmore than one buffer will accelerate image read speed, but will cost more memory.", + "open": "If true, camera will automatically call open() after creation. default is true.", + "raw": "If true, you can use read_raw() to capture the raw image output from the sensor." + }, + "maixpy": "maix.camera.Camera.__init__", + "maixcdk": "maix.camera.Camera.Camera", + "py_doc": "Construct a new Camera object.\nMaximum resolution support 2560x1440.\n\nArgs:\n - width: camera width, default is -1, means auto, mostly means max width of camera support\n - height: camera height, default is -1, means auto, mostly means max height of camera support\n - format: camera output format, default is image.Format.FMT_RGB888\n - device: camera device path, you can get devices by list_devices method, by default(value is NULL(None in MaixPy)) means the first device\n - fps: camera fps, default is -1, means auto, mostly means max fps of camera support\n - buff_num: camera buffer number, default is 3, means 3 buffer, one used by user, one used for cache the next frame,\nmore than one buffer will accelerate image read speed, but will cost more memory.\n - open: If true, camera will automatically call open() after creation. default is true.\n - raw: If true, you can use read_raw() to capture the raw image output from the sensor.\n" + }, + "args": [ + [ + "int", + "width", + "-1" + ], + [ + "int", + "height", + "-1" + ], + [ + "image::Format", + "format", + "image::FMT_RGB888" + ], + [ + "const char *", + "device", + "nullptr" + ], + [ + "double", + "fps", + "-1" + ], + [ + "int", + "buff_num", + "3" + ], + [ + "bool", + "open", + "true" + ], + [ + "bool", + "raw", + "false" + ] + ], + "ret_type": null, + "static": false, + "def": "Camera(int width = -1, int height = -1, image::Format format = image::FMT_RGB888, const char *device = nullptr, double fps = -1, int buff_num = 3, bool open = true, bool raw = false)", + "py_def": "def __init__(self, width: int = -1, height: int = -1, format: maix.image.Format = ..., device: str = None, fps: float = -1, buff_num: int = 3, open: bool = True, raw: bool = False) -> None" + }, + "get_ch_nums": { + "type": "func", + "name": "get_ch_nums", + "doc": { + "brief": "Get the number of channels supported by the camera.", + "return": "Returns the maximum number of channels.", + "maixpy": "maix.camera.Camera.get_ch_nums", + "py_doc": "Get the number of channels supported by the camera.\n\nReturns: Returns the maximum number of channels.\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int get_ch_nums()", + "py_def": "def get_ch_nums(self) -> int" + }, + "open": { + "type": "func", + "name": "open", + "doc": { + "brief": "Open camera and run", + "param": { + "width": "camera width, default is -1, means auto, mostly means max width of camera support", + "height": "camera height, default is -1, means auto, mostly means max height of camera support", + "format": "camera output format, default same as the constructor's format argument", + "fps": "camera fps, default is -1, means auto, mostly means max fps of camera support", + "buff_num": "camera buffer number, default is 3, means 3 buffer, one used by user, one used for cache the next frame,\nmore than one buffer will accelerate image read speed, but will cost more memory." + }, + "return": "error code, err::ERR_NONE means success, others means failed", + "maixpy": "maix.camera.Camera.open", + "py_doc": "Open camera and run\n\nArgs:\n - width: camera width, default is -1, means auto, mostly means max width of camera support\n - height: camera height, default is -1, means auto, mostly means max height of camera support\n - format: camera output format, default same as the constructor's format argument\n - fps: camera fps, default is -1, means auto, mostly means max fps of camera support\n - buff_num: camera buffer number, default is 3, means 3 buffer, one used by user, one used for cache the next frame,\nmore than one buffer will accelerate image read speed, but will cost more memory.\n\n\nReturns: error code, err::ERR_NONE means success, others means failed\n" + }, + "args": [ + [ + "int", + "width", + "-1" + ], + [ + "int", + "height", + "-1" + ], + [ + "image::Format", + "format", + "image::FMT_INVALID" + ], + [ + "double", + "fps", + "-1" + ], + [ + "int", + "buff_num", + "-1" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err open(int width = -1, int height = -1, image::Format format = image::FMT_INVALID, double fps = -1, int buff_num = -1)", + "py_def": "def open(self, width: int = -1, height: int = -1, format: maix.image.Format = ..., fps: float = -1, buff_num: int = -1) -> maix.err.Err" + }, + "read": { + "type": "func", + "name": "read", + "doc": { + "brief": "Get one frame image from camera buffer, must call open method before read.\\nIf open method not called, will call it automatically, if open failed, will throw exception!\\nSo call open method before read is recommended.", + "param": { + "buff": "buffer to store image data, if buff is nullptr, will alloc memory automatically.\nIn MaixPy, default to None, you can create a image.Image object, then pass img.data() to buff.", + "block": "block read, default is true, means block util read image successfully,\nif set to false, will return nullptr if no image in buffer", + "block_ms": "block read timeout" + }, + "return": "image::Image object, if failed, return nullptr, you should delete if manually in C++", + "maixpy": "maix.camera.Camera.read", + "py_doc": "Get one frame image from camera buffer, must call open method before read.\nIf open method not called, will call it automatically, if open failed, will throw exception!\nSo call open method before read is recommended.\n\nArgs:\n - buff: buffer to store image data, if buff is nullptr, will alloc memory automatically.\nIn MaixPy, default to None, you can create a image.Image object, then pass img.data() to buff.\n - block: block read, default is true, means block util read image successfully,\nif set to false, will return nullptr if no image in buffer\n - block_ms: block read timeout\n\n\nReturns: image::Image object, if failed, return nullptr, you should delete if manually in C++\n" + }, + "args": [ + [ + "void *", + "buff", + "nullptr" + ], + [ + "size_t", + "buff_size", + "0" + ], + [ + "bool", + "block", + "true" + ], + [ + "int", + "block_ms", + "-1" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *read(void *buff = nullptr, size_t buff_size = 0, bool block = true, int block_ms = -1)", + "py_def": "def read(self, buff: capsule = None, buff_size: int = 0, block: bool = True, block_ms: int = -1) -> maix.image.Image" + }, + "read_raw": { + "type": "func", + "name": "read_raw", + "doc": { + "brief": "Read the raw image and obtain the width, height, and format of the raw image through the returned Image object.", + "note": "The raw image is in a Bayer format, and its width and height are affected by the driver. Modifying the size and format is generally not allowed.", + "return": "image::Image object, if failed, return nullptr, you should delete if manually in C++", + "maixpy": "maix.camera.Camera.read_raw", + "py_doc": "Read the raw image and obtain the width, height, and format of the raw image through the returned Image object.\n\nReturns: image::Image object, if failed, return nullptr, you should delete if manually in C++\n" + }, + "args": [], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *read_raw()", + "py_def": "def read_raw(self) -> maix.image.Image" + }, + "clear_buff": { + "type": "func", + "name": "clear_buff", + "doc": { + "brief": "Clear buff to ensure the next read image is the latest image", + "maixpy": "maix.camera.Camera.clear_buff", + "py_doc": "Clear buff to ensure the next read image is the latest image" + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void clear_buff()", + "py_def": "def clear_buff(self) -> None" + }, + "skip_frames": { + "type": "func", + "name": "skip_frames", + "doc": { + "brief": "Read some frames and drop, this is usually used avoid read not stable image when camera just opened.", + "param": { + "num": "number of frames to read and drop" + }, + "maixpy": "maix.camera.Camera.skip_frames", + "py_doc": "Read some frames and drop, this is usually used avoid read not stable image when camera just opened.\n\nArgs:\n - num: number of frames to read and drop\n" + }, + "args": [ + [ + "int", + "num", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void skip_frames(int num)", + "py_def": "def skip_frames(self, num: int) -> None" + }, + "close": { + "type": "func", + "name": "close", + "doc": { + "brief": "Close camera", + "maixpy": "maix.camera.Camera.close", + "py_doc": "Close camera" + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void close()", + "py_def": "def close(self) -> None" + }, + "add_channel": { + "type": "func", + "name": "add_channel", + "doc": { + "brief": "Add a new channel and return a new Camera object, you can use close() to close this channel.", + "param": { + "width": "camera width, default is -1, means auto, mostly means max width of camera support", + "height": "camera height, default is -1, means auto, mostly means max height of camera support", + "format": "camera output format, default is RGB888", + "fps": "camera fps, default is -1, means auto, mostly means max fps of camera support", + "buff_num": "camera buffer number, default is 3, means 3 buffer, one used by user, one used for cache the next frame,\nmore than one buffer will accelerate image read speed, but will cost more memory.", + "open": "If true, camera will automatically call open() after creation. default is true." + }, + "return": "new Camera object", + "maixpy": "maix.camera.Camera.add_channel", + "py_doc": "Add a new channel and return a new Camera object, you can use close() to close this channel.\n\nArgs:\n - width: camera width, default is -1, means auto, mostly means max width of camera support\n - height: camera height, default is -1, means auto, mostly means max height of camera support\n - format: camera output format, default is RGB888\n - fps: camera fps, default is -1, means auto, mostly means max fps of camera support\n - buff_num: camera buffer number, default is 3, means 3 buffer, one used by user, one used for cache the next frame,\nmore than one buffer will accelerate image read speed, but will cost more memory.\n - open: If true, camera will automatically call open() after creation. default is true.\n\n\nReturns: new Camera object\n" + }, + "args": [ + [ + "int", + "width", + "-1" + ], + [ + "int", + "height", + "-1" + ], + [ + "image::Format", + "format", + "image::FMT_RGB888" + ], + [ + "double", + "fps", + "-1" + ], + [ + "int", + "buff_num", + "3" + ], + [ + "bool", + "open", + "true" + ] + ], + "ret_type": "camera::Camera*", + "static": false, + "def": "camera::Camera *add_channel(int width = -1, int height = -1, image::Format format = image::FMT_RGB888, double fps = -1, int buff_num = 3, bool open = true)", + "py_def": "def add_channel(self, width: int = -1, height: int = -1, format: maix.image.Format = ..., fps: float = -1, buff_num: int = 3, open: bool = True) -> Camera" + }, + "is_opened": { + "type": "func", + "name": "is_opened", + "doc": { + "brief": "Check if camera is opened", + "return": "true if camera is opened, false if not", + "maixpy": "maix.camera.Camera.is_opened", + "py_doc": "Check if camera is opened\n\nReturns: true if camera is opened, false if not\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool is_opened()", + "py_def": "def is_opened(self) -> bool" + }, + "is_closed": { + "type": "func", + "name": "is_closed", + "doc": { + "brief": "check camera device is closed or not", + "return": "closed or not, bool type", + "maixpy": "maix.camera.Camera.is_closed", + "py_doc": "check camera device is closed or not\n\nReturns: closed or not, bool type\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool is_closed()", + "py_def": "def is_closed(self) -> bool" + }, + "width": { + "type": "func", + "name": "width", + "doc": { + "brief": "Get camera width", + "return": "camera width", + "maixpy": "maix.camera.Camera.width", + "py_doc": "Get camera width\n\nReturns: camera width\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int width()", + "py_def": "def width(self) -> int" + }, + "height": { + "type": "func", + "name": "height", + "doc": { + "brief": "Get camera height", + "return": "camera height", + "maixpy": "maix.camera.Camera.height", + "py_doc": "Get camera height\n\nReturns: camera height\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int height()", + "py_def": "def height(self) -> int" + }, + "fps": { + "type": "func", + "name": "fps", + "doc": { + "brief": "Get camera fps", + "return": "camera fps", + "maixpy": "maix.camera.Camera.fps", + "py_doc": "Get camera fps\n\nReturns: camera fps\n" + }, + "args": [], + "ret_type": "double", + "static": false, + "def": "double fps()", + "py_def": "def fps(self) -> float" + }, + "format": { + "type": "func", + "name": "format", + "doc": { + "brief": "Get camera output format", + "return": "camera output format, image::Format object", + "maixpy": "maix.camera.Camera.format", + "py_doc": "Get camera output format\n\nReturns: camera output format, image::Format object\n" + }, + "args": [], + "ret_type": "image::Format", + "static": false, + "def": "image::Format format()", + "py_def": "def format(self) -> maix.image.Format" + }, + "buff_num": { + "type": "func", + "name": "buff_num", + "doc": { + "brief": "Get camera buffer number", + "return": "camera buffer number", + "maixpy": "maix.camera.Camera.buff_num", + "py_doc": "Get camera buffer number\n\nReturns: camera buffer number\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int buff_num()", + "py_def": "def buff_num(self) -> int" + }, + "hmirror": { + "type": "func", + "name": "hmirror", + "doc": { + "brief": "Set/Get camera horizontal mirror", + "return": "camera horizontal mirror", + "maixpy": "maix.camera.Camera.hmirror", + "py_doc": "Set/Get camera horizontal mirror\n\nReturns: camera horizontal mirror\n" + }, + "args": [ + [ + "int", + "value", + "-1" + ] + ], + "ret_type": "int", + "static": false, + "def": "int hmirror(int value = -1)", + "py_def": "def hmirror(self, value: int = -1) -> int" + }, + "vflip": { + "type": "func", + "name": "vflip", + "doc": { + "brief": "Set/Get camera vertical flip", + "return": "camera vertical flip", + "maixpy": "maix.camera.Camera.vflip", + "py_doc": "Set/Get camera vertical flip\n\nReturns: camera vertical flip\n" + }, + "args": [ + [ + "int", + "value", + "-1" + ] + ], + "ret_type": "int", + "static": false, + "def": "int vflip(int value = -1)", + "py_def": "def vflip(self, value: int = -1) -> int" + }, + "device": { + "type": "func", + "name": "device", + "doc": { + "brief": "Get camera device path", + "return": "camera device path", + "maixpy": "maix.camera.Camera.device", + "py_doc": "Get camera device path\n\nReturns: camera device path\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string device()", + "py_def": "def device(self) -> str" + }, + "write_reg": { + "type": "func", + "name": "write_reg", + "doc": { + "brief": "Write camera register", + "param": { + "addr": "register address", + "data": "register data", + "bit_width": "register data bit width, default is 8" + }, + "return": "error code, err::ERR_NONE means success, others means failed", + "maixpy": "maix.camera.Camera.write_reg", + "py_doc": "Write camera register\n\nArgs:\n - addr: register address\n - data: register data\n - bit_width: register data bit width, default is 8\n\n\nReturns: error code, err::ERR_NONE means success, others means failed\n" + }, + "args": [ + [ + "int", + "addr", + null + ], + [ + "int", + "data", + null + ], + [ + "int", + "bit_width", + "8" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err write_reg(int addr, int data, int bit_width = 8)", + "py_def": "def write_reg(self, addr: int, data: int, bit_width: int = 8) -> maix.err.Err" + }, + "read_reg": { + "type": "func", + "name": "read_reg", + "doc": { + "brief": "Read camera register", + "param": { + "addr": "register address", + "bit_width": "register data bit width, default is 8" + }, + "return": "register data, -1 means failed", + "maixpy": "maix.camera.Camera.read_reg", + "py_doc": "Read camera register\n\nArgs:\n - addr: register address\n - bit_width: register data bit width, default is 8\n\n\nReturns: register data, -1 means failed\n" + }, + "args": [ + [ + "int", + "addr", + null + ], + [ + "int", + "bit_width", + "8" + ] + ], + "ret_type": "int", + "static": false, + "def": "int read_reg(int addr, int bit_width = 8)", + "py_def": "def read_reg(self, addr: int, bit_width: int = 8) -> int" + }, + "show_colorbar": { + "type": "func", + "name": "show_colorbar", + "doc": { + "brief": "Camera output color bar image for test", + "param": { + "enable": "enable/disable color bar" + }, + "return": "error code, err::ERR_NONE means success, others means failed", + "maixpy": "maix.camera.Camera.show_colorbar", + "py_doc": "Camera output color bar image for test\n\nArgs:\n - enable: enable/disable color bar\n\n\nReturns: error code, err::ERR_NONE means success, others means failed\n" + }, + "args": [ + [ + "bool", + "enable", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err show_colorbar(bool enable)", + "py_def": "def show_colorbar(self, enable: bool) -> maix.err.Err" + }, + "get_channel": { + "type": "func", + "name": "get_channel", + "doc": { + "brief": "Get channel of camera", + "return": "channel number", + "maixpy": "maix.camera.Camera.get_channel", + "py_doc": "Get channel of camera\n\nReturns: channel number\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int get_channel()", + "py_def": "def get_channel(self) -> int" + }, + "set_resolution": { + "type": "func", + "name": "set_resolution", + "doc": { + "brief": "Set camera resolution", + "param": { + "width": "new width", + "height": "new height" + }, + "return": "error code, err::ERR_NONE means success, others means failed", + "maixpy": "maix.camera.Camera.set_resolution", + "py_doc": "Set camera resolution\n\nArgs:\n - width: new width\n - height: new height\n\n\nReturns: error code, err::ERR_NONE means success, others means failed\n" + }, + "args": [ + [ + "int", + "width", + null + ], + [ + "int", + "height", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err set_resolution(int width, int height)", + "py_def": "def set_resolution(self, width: int, height: int) -> maix.err.Err" + }, + "set_fps": { + "type": "func", + "name": "set_fps", + "doc": { + "brief": "Set camera fps", + "param": { + "fps": "new fps" + }, + "return": "error code, err::ERR_NONE means success, others means failed", + "maixpy": "maix.camera.Camera.set_fps", + "py_doc": "Set camera fps\n\nArgs:\n - fps: new fps\n\n\nReturns: error code, err::ERR_NONE means success, others means failed\n" + }, + "args": [ + [ + "double", + "fps", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err set_fps(double fps)", + "py_def": "def set_fps(self, fps: float) -> maix.err.Err" + }, + "exposure": { + "type": "func", + "name": "exposure", + "doc": { + "brief": "Set/Get camera exposure", + "attention": "This method will affect the isp and thus the image, so please be careful with it.", + "param": { + "value": "exposure time. unit: us\nIf value == -1, return exposure time.\nIf value != 0, set and return exposure time." + }, + "return": "camera exposure time", + "maixpy": "maix.camera.Camera.exposure", + "py_doc": "Set/Get camera exposure\n\nArgs:\n - value: exposure time. unit: us\nIf value == -1, return exposure time.\nIf value != 0, set and return exposure time.\n\n\nReturns: camera exposure time\n" + }, + "args": [ + [ + "int", + "value", + "-1" + ] + ], + "ret_type": "int", + "static": false, + "def": "int exposure(int value = -1)", + "py_def": "def exposure(self, value: int = -1) -> int" + }, + "gain": { + "type": "func", + "name": "gain", + "doc": { + "brief": "Set/Get camera gain", + "attention": "This method will affect the isp and thus the image, so please be careful with it.", + "param": { + "value": "camera gain.\nIf value == -1, returns camera gain.\nIf value != 0, set and return camera gain." + }, + "return": "camera gain", + "maixpy": "maix.camera.Camera.gain", + "py_doc": "Set/Get camera gain\n\nArgs:\n - value: camera gain.\nIf value == -1, returns camera gain.\nIf value != 0, set and return camera gain.\n\n\nReturns: camera gain\n" + }, + "args": [ + [ + "int", + "value", + "-1" + ] + ], + "ret_type": "int", + "static": false, + "def": "int gain(int value = -1)", + "py_def": "def gain(self, value: int = -1) -> int" + }, + "luma": { + "type": "func", + "name": "luma", + "doc": { + "brief": "Set/Get camera luma", + "attention": "This method will affect the isp and thus the image, so please be careful with it.", + "param": { + "value": "luma value, range is [0, 100]\nIf value == -1, returns luma value.\nIf value != 0, set and return luma value." + }, + "return": "returns luma value", + "maixpy": "maix.camera.Camera.luma", + "py_doc": "Set/Get camera luma\n\nArgs:\n - value: luma value, range is [0, 100]\nIf value == -1, returns luma value.\nIf value != 0, set and return luma value.\n\n\nReturns: returns luma value\n" + }, + "args": [ + [ + "int", + "value", + "-1" + ] + ], + "ret_type": "int", + "static": false, + "def": "int luma(int value = -1)", + "py_def": "def luma(self, value: int = -1) -> int" + }, + "constrast": { + "type": "func", + "name": "constrast", + "doc": { + "brief": "Set/Get camera constrast", + "attention": "This method will affect the isp and thus the image, so please be careful with it.", + "param": { + "value": "constrast value, range is [0, 100]\nIf value == -1, returns constrast value.\nIf value != 0, set and return constrast value." + }, + "return": "returns constrast value", + "maixpy": "maix.camera.Camera.constrast", + "py_doc": "Set/Get camera constrast\n\nArgs:\n - value: constrast value, range is [0, 100]\nIf value == -1, returns constrast value.\nIf value != 0, set and return constrast value.\n\n\nReturns: returns constrast value\n" + }, + "args": [ + [ + "int", + "value", + "-1" + ] + ], + "ret_type": "int", + "static": false, + "def": "int constrast(int value = -1)", + "py_def": "def constrast(self, value: int = -1) -> int" + }, + "saturation": { + "type": "func", + "name": "saturation", + "doc": { + "brief": "Set/Get camera saturation", + "attention": "This method will affect the isp and thus the image, so please be careful with it.", + "param": { + "value": "saturation value, range is [0, 100]\nIf value == -1, returns saturation value.\nIf value != 0, set and return saturation value." + }, + "return": "returns saturation value", + "maixpy": "maix.camera.Camera.saturation", + "py_doc": "Set/Get camera saturation\n\nArgs:\n - value: saturation value, range is [0, 100]\nIf value == -1, returns saturation value.\nIf value != 0, set and return saturation value.\n\n\nReturns: returns saturation value\n" + }, + "args": [ + [ + "int", + "value", + "-1" + ] + ], + "ret_type": "int", + "static": false, + "def": "int saturation(int value = -1)", + "py_def": "def saturation(self, value: int = -1) -> int" + }, + "awb_mode": { + "type": "func", + "name": "awb_mode", + "doc": { + "brief": "Set/Get white balance mode (deprecated interface)", + "attention": "This method will affect the isp and thus the image, so please be careful with it.\nThis interface may be deprecated in the future, and there may be incompatibilities in the definition of the parameters of the new interface", + "param": { + "value": "value = 0, means set white balance to auto mode, value = 1, means set white balance to manual mode, default is auto mode." + }, + "return": "returns awb mode", + "maixpy": "maix.camera.Camera.awb_mode", + "py_doc": "Set/Get white balance mode (deprecated interface)\n\nArgs:\n - value: value = 0, means set white balance to auto mode, value = 1, means set white balance to manual mode, default is auto mode.\n\n\nReturns: returns awb mode\n" + }, + "args": [ + [ + "int", + "value", + "-1" + ] + ], + "ret_type": "int", + "static": false, + "def": "int awb_mode(int value = -1)", + "py_def": "def awb_mode(self, value: int = -1) -> int" + }, + "set_awb": { + "type": "func", + "name": "set_awb", + "doc": { + "brief": "Set/Get white balance mode", + "attention": "This method will affect the isp and thus the image, so please be careful with it.", + "param": { + "value": "value = 0, means set white balance to manual mode, value = 1, means set white balance to auto mode, default is auto mode." + }, + "return": "returns awb mode", + "maixpy": "maix.camera.Camera.set_awb", + "py_doc": "Set/Get white balance mode\n\nArgs:\n - value: value = 0, means set white balance to manual mode, value = 1, means set white balance to auto mode, default is auto mode.\n\n\nReturns: returns awb mode\n" + }, + "args": [ + [ + "int", + "mode", + "-1" + ] + ], + "ret_type": "int", + "static": false, + "def": "int set_awb(int mode = -1)", + "py_def": "def set_awb(self, mode: int = -1) -> int" + }, + "exp_mode": { + "type": "func", + "name": "exp_mode", + "doc": { + "brief": "Set/Get exposure mode (deprecated interface)", + "attention": "This method will affect the isp and thus the image, so please be careful with it.\nThis interface may be deprecated in the future, and there may be incompatibilities in the definition of the parameters of the new interface", + "param": { + "value": "value = 0, means set exposure to auto mode, value = 1, means set exposure to manual mode, default is auto mode." + }, + "return": "returns exposure mode", + "maixpy": "maix.camera.Camera.exp_mode", + "py_doc": "Set/Get exposure mode (deprecated interface)\n\nArgs:\n - value: value = 0, means set exposure to auto mode, value = 1, means set exposure to manual mode, default is auto mode.\n\n\nReturns: returns exposure mode\n" + }, + "args": [ + [ + "int", + "value", + "-1" + ] + ], + "ret_type": "int", + "static": false, + "def": "int exp_mode(int value = -1)", + "py_def": "def exp_mode(self, value: int = -1) -> int" + }, + "set_windowing": { + "type": "func", + "name": "set_windowing", + "doc": { + "brief": "Set window size of camera", + "param": { + "roi": "Support two input formats, [x,y,w,h] set the coordinates and size of the window;\n[w,h] set the size of the window, when the window is centred." + }, + "return": "error code", + "maixpy": "maix.camera.Camera.set_windowing", + "py_doc": "Set window size of camera\n\nArgs:\n - roi: Support two input formats, [x,y,w,h] set the coordinates and size of the window;\n[w,h] set the size of the window, when the window is centred.\n\n\nReturns: error code\n" + }, + "args": [ + [ + "std::vector", + "roi", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err set_windowing(std::vector roi)", + "py_def": "def set_windowing(self, roi: list[int]) -> maix.err.Err" + } + }, + "def": "class Camera" + } + }, + "auto_add": false + }, + "display": { + "type": "module", + "doc": { + "brief": "maix.display module, control display device and show image on it", + "maixpy": "maix.display", + "py_doc": "maix.display module, control display device and show image on it" + }, + "members": { + "Display": { + "type": "class", + "name": "Display", + "doc": { + "brief": "Display class", + "maixpy": "maix.display.Display", + "py_doc": "Display class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Display", + "doc": { + "brief": "Construct a new Display object", + "param": { + "width": "display width, by default(value is -1) means auto detect,\nif width > max device supported width, will auto set to max device supported width", + "height": "display height, by default(value is -1) means auto detect,\nif height > max device supported height, will auto set to max device supported height", + "device": "display device name, you can get devices by list_devices method, by default(value is NULL(None in MaixPy)) means the first device", + "open": "If true, display will automatically call open() after creation. default is true." + }, + "maixpy": "maix.display.Display.__init__", + "maixcdk": "maix.display.Display.Display", + "py_doc": "Construct a new Display object\n\nArgs:\n - width: display width, by default(value is -1) means auto detect,\nif width > max device supported width, will auto set to max device supported width\n - height: display height, by default(value is -1) means auto detect,\nif height > max device supported height, will auto set to max device supported height\n - device: display device name, you can get devices by list_devices method, by default(value is NULL(None in MaixPy)) means the first device\n - open: If true, display will automatically call open() after creation. default is true.\n" + }, + "args": [ + [ + "int", + "width", + "-1" + ], + [ + "int", + "height", + "-1" + ], + [ + "image::Format", + "format", + "image::FMT_RGB888" + ], + [ + "const char *", + "device", + "nullptr" + ], + [ + "bool", + "open", + "true" + ] + ], + "ret_type": null, + "static": false, + "def": "Display(int width = -1, int height = -1, image::Format format = image::FMT_RGB888, const char *device = nullptr, bool open = true)", + "py_def": "def __init__(self, width: int = -1, height: int = -1, format: maix.image.Format = ..., device: str = None, open: bool = True) -> None" + }, + "width": { + "type": "func", + "name": "width", + "doc": { + "brief": "Get display width", + "return": "width", + "maixpy": "maix.display.Display.width", + "py_doc": "Get display width\n\nReturns: width\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int width()", + "py_def": "def width(self) -> int" + }, + "height": { + "type": "func", + "name": "height", + "doc": { + "brief": "Get display height", + "param": { + "ch": "channel to get, by default(value is 0) means the first channel" + }, + "return": "height", + "maixpy": "maix.display.Display.height", + "py_doc": "Get display height\n\nArgs:\n - ch: channel to get, by default(value is 0) means the first channel\n\n\nReturns: height\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int height()", + "py_def": "def height(self) -> int" + }, + "size": { + "type": "func", + "name": "size", + "doc": { + "brief": "Get display size", + "param": { + "ch": "channel to get, by default(value is 0) means the first channel" + }, + "return": "size A list type in MaixPy, [width, height]", + "maixpy": "maix.display.Display.size", + "py_doc": "Get display size\n\nArgs:\n - ch: channel to get, by default(value is 0) means the first channel\n\n\nReturns: size A list type in MaixPy, [width, height]\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector size()", + "py_def": "def size(self) -> list[int]" + }, + "format": { + "type": "func", + "name": "format", + "doc": { + "brief": "Get display format", + "return": "format", + "maixpy": "maix.display.Display.format", + "py_doc": "Get display format\n\nReturns: format\n" + }, + "args": [], + "ret_type": "image::Format", + "static": false, + "def": "image::Format format()", + "py_def": "def format(self) -> maix.image.Format" + }, + "open": { + "type": "func", + "name": "open", + "doc": { + "brief": "open display device, if already opened, will return err.ERR_NONE.", + "param": { + "width": "display width, default is -1, means auto, mostly means max width of display support", + "height": "display height, default is -1, means auto, mostly means max height of display support", + "format": "display output format, default is RGB888" + }, + "return": "error code", + "maixpy": "maix.display.Display.open", + "py_doc": "open display device, if already opened, will return err.ERR_NONE.\n\nArgs:\n - width: display width, default is -1, means auto, mostly means max width of display support\n - height: display height, default is -1, means auto, mostly means max height of display support\n - format: display output format, default is RGB888\n\n\nReturns: error code\n" + }, + "args": [ + [ + "int", + "width", + "-1" + ], + [ + "int", + "height", + "-1" + ], + [ + "image::Format", + "format", + "image::FMT_INVALID" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err open(int width = -1, int height = -1, image::Format format = image::FMT_INVALID)", + "py_def": "def open(self, width: int = -1, height: int = -1, format: maix.image.Format = ...) -> maix.err.Err" + }, + "close": { + "type": "func", + "name": "close", + "doc": { + "brief": "close display device", + "return": "error code", + "maixpy": "maix.display.Display.close", + "py_doc": "close display device\n\nReturns: error code\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err close()", + "py_def": "def close(self) -> maix.err.Err" + }, + "add_channel": { + "type": "func", + "name": "add_channel", + "doc": { + "brief": "Add a new channel and return a new Display object, you can use close() to close this channel.", + "attention": "If a new disp channel is created, it is recommended to set fit=image::FIT_COVER or fit=image::FIT_FILL when running show for the main channel,\notherwise the display of the new disp channel may be abnormal.", + "param": { + "width": "display width, default is -1, means auto, mostly means max width of display support. Maximum width must not exceed the main channel.", + "height": "display height, default is -1, means auto, mostly means max height of display support. Maximum height must not exceed the main channel.", + "format": "display output format, default is FMT_BGRA8888", + "open": "If true, display will automatically call open() after creation. default is true." + }, + "return": "new Display object", + "maixpy": "maix.display.Display.add_channel", + "py_doc": "Add a new channel and return a new Display object, you can use close() to close this channel.\n\nArgs:\n - width: display width, default is -1, means auto, mostly means max width of display support. Maximum width must not exceed the main channel.\n - height: display height, default is -1, means auto, mostly means max height of display support. Maximum height must not exceed the main channel.\n - format: display output format, default is FMT_BGRA8888\n - open: If true, display will automatically call open() after creation. default is true.\n\n\nReturns: new Display object\n" + }, + "args": [ + [ + "int", + "width", + "-1" + ], + [ + "int", + "height", + "-1" + ], + [ + "image::Format", + "format", + "image::FMT_BGRA8888" + ], + [ + "bool", + "open", + "true" + ] + ], + "ret_type": "display::Display*", + "static": false, + "def": "display::Display *add_channel(int width = -1, int height = -1, image::Format format = image::FMT_BGRA8888, bool open = true)", + "py_def": "def add_channel(self, width: int = -1, height: int = -1, format: maix.image.Format = ..., open: bool = True) -> Display" + }, + "is_opened": { + "type": "func", + "name": "is_opened", + "doc": { + "brief": "check display device is opened or not", + "return": "opened or not, bool type", + "maixpy": "maix.display.Display.is_opened", + "py_doc": "check display device is opened or not\n\nReturns: opened or not, bool type\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool is_opened()", + "py_def": "def is_opened(self) -> bool" + }, + "is_closed": { + "type": "func", + "name": "is_closed", + "doc": { + "brief": "check display device is closed or not", + "return": "closed or not, bool type", + "maixpy": "maix.display.Display.is_closed", + "py_doc": "check display device is closed or not\n\nReturns: closed or not, bool type\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool is_closed()", + "py_def": "def is_closed(self) -> bool" + }, + "show": { + "type": "func", + "name": "show", + "doc": { + "brief": "show image on display device, and will also send to MaixVision work station if connected.", + "param": { + "img": "image to show, image.Image object,\nif the size of image smaller than display size, will show in the center of display;\nif the size of image bigger than display size, will auto resize to display size and keep ratio, fill blank with black color.", + "fit": "image in screen fit mode, by default(value is image.FIT_CONTAIN), @see image.Fit for more details\ne.g. image.FIT_CONTAIN means resize image to fit display size and keep ratio, fill blank with black color." + }, + "return": "error code", + "maixpy": "maix.display.Display.show", + "py_doc": "show image on display device, and will also send to MaixVision work station if connected.\n\nArgs:\n - img: image to show, image.Image object,\nif the size of image smaller than display size, will show in the center of display;\nif the size of image bigger than display size, will auto resize to display size and keep ratio, fill blank with black color.\n - fit: image in screen fit mode, by default(value is image.FIT_CONTAIN), @see image.Fit for more details\ne.g. image.FIT_CONTAIN means resize image to fit display size and keep ratio, fill blank with black color.\n\n\nReturns: error code\n" + }, + "args": [ + [ + "image::Image &", + "img", + null + ], + [ + "image::Fit", + "fit", + "image::FIT_CONTAIN" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err show(image::Image &img, image::Fit fit = image::FIT_CONTAIN)", + "py_def": "def show(self, img: maix.image.Image, fit: maix.image.Fit = ...) -> maix.err.Err" + }, + "device": { + "type": "func", + "name": "device", + "doc": { + "brief": "Get display device path", + "return": "display device path", + "maixpy": "maix.display.Display.device", + "py_doc": "Get display device path\n\nReturns: display device path\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string device()", + "py_def": "def device(self) -> str" + }, + "set_backlight": { + "type": "func", + "name": "set_backlight", + "doc": { + "brief": "Set display backlight", + "param": { + "value": "backlight value, float type, range is [0, 100]" + }, + "maixpy": "maix.display.Display.set_backlight", + "py_doc": "Set display backlight\n\nArgs:\n - value: backlight value, float type, range is [0, 100]\n" + }, + "args": [ + [ + "float", + "value", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void set_backlight(float value)", + "py_def": "def set_backlight(self, value: float) -> None" + }, + "get_backlight": { + "type": "func", + "name": "get_backlight", + "doc": { + "brief": "Get display backlight", + "return": "value backlight value, float type, range is [0, 100]", + "maixpy": "maix.display.Display.get_backlight", + "py_doc": "Get display backlight\n\nReturns: value backlight value, float type, range is [0, 100]\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float get_backlight()", + "py_def": "def get_backlight(self) -> float" + }, + "set_hmirror": { + "type": "func", + "name": "set_hmirror", + "doc": { + "brief": "Set display mirror", + "param": { + "en": "enable/disable mirror" + }, + "maixpy": "maix.display.Display.set_hmirror", + "py_doc": "Set display mirror\n\nArgs:\n - en: enable/disable mirror\n" + }, + "args": [ + [ + "bool", + "en", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err set_hmirror(bool en)", + "py_def": "def set_hmirror(self, en: bool) -> maix.err.Err" + }, + "set_vflip": { + "type": "func", + "name": "set_vflip", + "doc": { + "brief": "Set display flip", + "param": { + "en": "enable/disable flip" + }, + "maixpy": "maix.display.Display.set_vflip", + "py_doc": "Set display flip\n\nArgs:\n - en: enable/disable flip\n" + }, + "args": [ + [ + "bool", + "en", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err set_vflip(bool en)", + "py_def": "def set_vflip(self, en: bool) -> maix.err.Err" + } + }, + "def": "class Display" + }, + "send_to_maixvision": { + "type": "func", + "name": "send_to_maixvision", + "doc": { + "brief": "Send image to MaixVision work station if connected.\\nIf you want to debug your program an don't want to initialize display, use this method.", + "param": { + "img": "image to send, image.Image object" + }, + "maixpy": "maix.display.send_to_maixvision", + "py_doc": "Send image to MaixVision work station if connected.\nIf you want to debug your program an don't want to initialize display, use this method.\n\nArgs:\n - img: image to send, image.Image object\n" + }, + "args": [ + [ + "image::Image &", + "img", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void send_to_maixvision(image::Image &img)", + "py_def": "def send_to_maixvision(img: maix.image.Image) -> None" + } + }, + "auto_add": false + }, + "ext_dev": { + "type": "module", + "doc": { + "brief": "maix.ext_dev module" + }, + "members": { + "imu": { + "type": "module", + "doc": { + "brief": "maix.ext_dev.imu module" + }, + "members": { + "Mode": { + "type": "enum", + "name": "class", + "doc": { + "brief": "imu mode", + "maixpy": "maix.ext_dev.imu.Mode", + "py_doc": "imu mode" + }, + "values": [ + [ + "ACC_ONLY", + "0", + "" + ], + [ + "GYRO_ONLY", + "", + "" + ], + [ + "DUAL", + "", + "" + ] + ], + "def": "enum class Mode {\n ACC_ONLY = 0,\n GYRO_ONLY,\n DUAL\n}" + }, + "AccScale": { + "type": "enum", + "name": "class", + "doc": { + "brief": "imu acc scale", + "maixpy": "maix.ext_dev.imu.AccScale", + "py_doc": "imu acc scale" + }, + "values": [ + [ + "ACC_SCALE_2G", + "0", + "" + ], + [ + "ACC_SCALE_4G", + "", + "" + ], + [ + "ACC_SCALE_8G", + "", + "" + ], + [ + "ACC_SCALE_16G", + "", + "" + ] + ], + "def": "enum class AccScale {\n ACC_SCALE_2G = 0,\n ACC_SCALE_4G,\n ACC_SCALE_8G,\n ACC_SCALE_16G\n}" + }, + "AccOdr": { + "type": "enum", + "name": "class", + "doc": { + "brief": "imu acc output data rate", + "maixpy": "maix.ext_dev.imu.AccOdr", + "py_doc": "imu acc output data rate" + }, + "values": [ + [ + "ACC_ODR_8000", + "", + "Accelerometer ODR set to 8000 Hz." + ], + [ + "ACC_ODR_4000", + "", + "Accelerometer ODR set to 4000 Hz." + ], + [ + "ACC_ODR_2000", + "", + "Accelerometer ODR set to 2000 Hz." + ], + [ + "ACC_ODR_1000", + "", + "Accelerometer ODR set to 1000 Hz." + ], + [ + "ACC_ODR_500", + "", + "Accelerometer ODR set to 500 Hz." + ], + [ + "ACC_ODR_250", + "", + "Accelerometer ODR set to 250 Hz." + ], + [ + "ACC_ODR_125", + "", + "Accelerometer ODR set to 125 Hz." + ], + [ + "ACC_ODR_62_5", + "", + "Accelerometer ODR set to 62.5 Hz." + ], + [ + "ACC_ODR_31_25", + "", + "Accelerometer ODR set to 31.25 Hz." + ], + [ + "ACC_ODR_128", + "12", + "Accelerometer ODR set to 128 Hz." + ], + [ + "ACC_ODR_21", + "", + "Accelerometer ODR set to 21 Hz." + ], + [ + "ACC_ODR_11", + "", + "Accelerometer ODR set to 11 Hz." + ], + [ + "ACC_ODR_3", + "", + "Accelerometer ODR set to 3 Hz." + ] + ], + "def": "enum class AccOdr {\n ACC_ODR_8000, // Accelerometer ODR set to 8000 Hz.\n ACC_ODR_4000, // Accelerometer ODR set to 4000 Hz.\n ACC_ODR_2000, // Accelerometer ODR set to 2000 Hz.\n ACC_ODR_1000, // Accelerometer ODR set to 1000 Hz.\n ACC_ODR_500, // Accelerometer ODR set to 500 Hz.\n ACC_ODR_250, // Accelerometer ODR set to 250 Hz.\n ACC_ODR_125, // Accelerometer ODR set to 125 Hz.\n ACC_ODR_62_5, // Accelerometer ODR set to 62.5 Hz.\n ACC_ODR_31_25, // Accelerometer ODR set to 31.25 Hz.\n ACC_ODR_128 = 12, // Accelerometer ODR set to 128 Hz.\n ACC_ODR_21, // Accelerometer ODR set to 21 Hz.\n ACC_ODR_11, // Accelerometer ODR set to 11 Hz.\n ACC_ODR_3, // Accelerometer ODR set to 3 Hz.\n}" + }, + "GyroScale": { + "type": "enum", + "name": "class", + "doc": { + "brief": "imu gyro scale", + "maixpy": "maix.ext_dev.imu.GyroScale", + "py_doc": "imu gyro scale" + }, + "values": [ + [ + "GYRO_SCALE_16DPS", + "0", + "Gyroscope scale set to \u00b116 degrees per second." + ], + [ + "GYRO_SCALE_32DPS", + "", + "Gyroscope scale set to \u00b132 degrees per second." + ], + [ + "GYRO_SCALE_64DPS", + "", + "Gyroscope scale set to \u00b164 degrees per second." + ], + [ + "GYRO_SCALE_128DPS", + "", + "Gyroscope scale set to \u00b1128 degrees per second." + ], + [ + "GYRO_SCALE_256DPS", + "", + "Gyroscope scale set to \u00b1256 degrees per second." + ], + [ + "GYRO_SCALE_512DPS", + "", + "Gyroscope scale set to \u00b1512 degrees per second." + ], + [ + "GYRO_SCALE_1024DPS", + "", + "Gyroscope scale set to \u00b11024 degrees per second." + ], + [ + "GYRO_SCALE_2048DPS", + "", + "Gyroscope scale set to \u00b12048 degrees per second." + ] + ], + "def": "enum class GyroScale {\n GYRO_SCALE_16DPS = 0, // Gyroscope scale set to \u00b116 degrees per second.\n GYRO_SCALE_32DPS, // Gyroscope scale set to \u00b132 degrees per second.\n GYRO_SCALE_64DPS, // Gyroscope scale set to \u00b164 degrees per second.\n GYRO_SCALE_128DPS, // Gyroscope scale set to \u00b1128 degrees per second.\n GYRO_SCALE_256DPS, // Gyroscope scale set to \u00b1256 degrees per second.\n GYRO_SCALE_512DPS, // Gyroscope scale set to \u00b1512 degrees per second.\n GYRO_SCALE_1024DPS, // Gyroscope scale set to \u00b11024 degrees per second.\n GYRO_SCALE_2048DPS, // Gyroscope scale set to \u00b12048 degrees per second.\n}" + }, + "GyroOdr": { + "type": "enum", + "name": "class", + "doc": { + "brief": "imu gyro output data rate", + "maixpy": "maix.ext_dev.imu.GyroOdr", + "py_doc": "imu gyro output data rate" + }, + "values": [ + [ + "GYRO_ODR_8000", + "", + "Gyroscope ODR set to 8000 Hz." + ], + [ + "GYRO_ODR_4000", + "", + "Gyroscope ODR set to 4000 Hz." + ], + [ + "GYRO_ODR_2000", + "", + "Gyroscope ODR set to 2000 Hz." + ], + [ + "GYRO_ODR_1000", + "", + "Gyroscope ODR set to 1000 Hz." + ], + [ + "GYRO_ODR_500", + "", + "Gyroscope ODR set to 500 Hz." + ], + [ + "GYRO_ODR_250", + "", + "Gyroscope ODR set to 250 Hz." + ], + [ + "GYRO_ODR_125", + "", + "Gyroscope ODR set to 125 Hz." + ], + [ + "GYRO_ODR_62_5", + "", + "Gyroscope ODR set to 62.5 Hz." + ], + [ + "GYRO_ODR_31_25", + "", + "Gyroscope ODR set to 31.25 Hz." + ] + ], + "def": "enum class GyroOdr {\n GYRO_ODR_8000, // Gyroscope ODR set to 8000 Hz.\n GYRO_ODR_4000, // Gyroscope ODR set to 4000 Hz.\n GYRO_ODR_2000, // Gyroscope ODR set to 2000 Hz.\n GYRO_ODR_1000, // Gyroscope ODR set to 1000 Hz.\n GYRO_ODR_500, // Gyroscope ODR set to 500 Hz.\n GYRO_ODR_250, // Gyroscope ODR set to 250 Hz.\n GYRO_ODR_125, // Gyroscope ODR set to 125 Hz.\n GYRO_ODR_62_5, // Gyroscope ODR set to 62.5 Hz.\n GYRO_ODR_31_25, // Gyroscope ODR set to 31.25 Hz.\n}" + }, + "IMU": { + "type": "class", + "name": "IMU", + "doc": { + "brief": "QMI8656 driver class", + "maixpy": "maix.ext_dev.imu.IMU", + "py_doc": "QMI8656 driver class" + }, + "members": { + "__init__": { + "type": "func", + "name": "IMU", + "doc": { + "brief": "Construct a new IMU object, will open IMU", + "param": { + "driver": "driver name, only support \"qmi8656\"", + "i2c_bus": "i2c bus number. Automatically selects the on-board imu when -1 is passed in.", + "addr": "IMU i2c addr.", + "freq": "IMU freq", + "mode": "IMU Mode: ACC_ONLY/GYRO_ONLY/DUAL", + "acc_scale": "acc scale, see @imu::AccScale", + "acc_odr": "acc output data rate, see @imu::AccOdr", + "gyro_scale": "gyro scale, see @imu::GyroScale", + "gyro_odr": "gyro output data rate, see @imu::GyroOdr", + "block": "block or non-block, defalut is true" + }, + "maixpy": "maix.ext_dev.imu.IMU.__init__", + "py_doc": "Construct a new IMU object, will open IMU\n\nArgs:\n - driver: driver name, only support \"qmi8656\"\n - i2c_bus: i2c bus number. Automatically selects the on-board imu when -1 is passed in.\n - addr: IMU i2c addr.\n - freq: IMU freq\n - mode: IMU Mode: ACC_ONLY/GYRO_ONLY/DUAL\n - acc_scale: acc scale, see @imu::AccScale\n - acc_odr: acc output data rate, see @imu::AccOdr\n - gyro_scale: gyro scale, see @imu::GyroScale\n - gyro_odr: gyro output data rate, see @imu::GyroOdr\n - block: block or non-block, defalut is true\n" + }, + "args": [ + [ + "std::string", + "driver", + null + ], + [ + "int", + "i2c_bus", + "-1" + ], + [ + "int", + "addr", + "0x6B" + ], + [ + "int", + "freq", + "400000" + ], + [ + "maix::ext_dev::imu::Mode", + "mode", + "maix::ext_dev::imu::Mode::DUAL" + ], + [ + "maix::ext_dev::imu::AccScale", + "acc_scale", + "maix::ext_dev::imu::AccScale::ACC_SCALE_2G" + ], + [ + "maix::ext_dev::imu::AccOdr", + "acc_odr", + "maix::ext_dev::imu::AccOdr::ACC_ODR_8000" + ], + [ + "maix::ext_dev::imu::GyroScale", + "gyro_scale", + "maix::ext_dev::imu::GyroScale::GYRO_SCALE_16DPS" + ], + [ + "maix::ext_dev::imu::GyroOdr", + "gyro_odr", + "maix::ext_dev::imu::GyroOdr::GYRO_ODR_8000" + ], + [ + "bool", + "block", + "true" + ] + ], + "ret_type": null, + "static": false, + "def": "IMU(std::string driver, int i2c_bus=-1, int addr=0x6B, int freq=400000,\n maix::ext_dev::imu::Mode mode=maix::ext_dev::imu::Mode::DUAL,\n maix::ext_dev::imu::AccScale acc_scale=maix::ext_dev::imu::AccScale::ACC_SCALE_2G,\n maix::ext_dev::imu::AccOdr acc_odr=maix::ext_dev::imu::AccOdr::ACC_ODR_8000,\n maix::ext_dev::imu::GyroScale gyro_scale=maix::ext_dev::imu::GyroScale::GYRO_SCALE_16DPS,\n maix::ext_dev::imu::GyroOdr gyro_odr=maix::ext_dev::imu::GyroOdr::GYRO_ODR_8000,\n bool block=true)", + "py_def": "def __init__(self, driver: str, i2c_bus: int = -1, addr: int = 107, freq: int = 400000, mode: Mode = ..., acc_scale: AccScale = ..., acc_odr: AccOdr = ..., gyro_scale: GyroScale = ..., gyro_odr: GyroOdr = ..., block: bool = True) -> None" + }, + "read": { + "type": "func", + "name": "read", + "doc": { + "brief": "Read data from IMU.", + "return": "list type. If only one of the outputs is initialized, only [x,y,z] of that output will be returned.\nIf all outputs are initialized, [acc_x, acc_y, acc_z, gyro_x, gyro_y, gyro_z] is returned.", + "maixpy": "maix.ext_dev.imu.IMU.read", + "py_doc": "Read data from IMU.\n\nReturns: list type. If only one of the outputs is initialized, only [x,y,z] of that output will be returned.\nIf all outputs are initialized, [acc_x, acc_y, acc_z, gyro_x, gyro_y, gyro_z] is returned.\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector read()", + "py_def": "def read(self) -> list[float]" + }, + "calculate_calibration": { + "type": "func", + "name": "calculate_calibration", + "doc": { + "brief": "Caculate calibration, save calibration data to /maixapp/shart/imu_calibration", + "param": { + "time_ms": "caculate max time, unit:ms" + }, + "return": "err::Err", + "maixpy": "maix.ext_dev.imu.IMU.calculate_calibration", + "py_doc": "Caculate calibration, save calibration data to /maixapp/shart/imu_calibration\n\nArgs:\n - time_ms: caculate max time, unit:ms\n\n\nReturns: err::Err\n" + }, + "args": [ + [ + "uint64_t", + "time_ms", + "30 * 1000" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err calculate_calibration(uint64_t time_ms = 30 * 1000)", + "py_def": "def calculate_calibration(self, time_ms: int = 30000) -> maix.err.Err" + }, + "get_calibration": { + "type": "func", + "name": "get_calibration", + "doc": { + "brief": "Get calibration data", + "return": "return an array, format is [acc_x_bias, acc_y_bias, acc_z_bias, gyro_x_bias, gyro_y_bias, gyro_z_bias]\nIf the calibration file cannot be found, an empty array will be returned.", + "maixpy": "maix.ext_dev.imu.IMU.get_calibration", + "py_doc": "Get calibration data\n\nReturns: return an array, format is [acc_x_bias, acc_y_bias, acc_z_bias, gyro_x_bias, gyro_y_bias, gyro_z_bias]\nIf the calibration file cannot be found, an empty array will be returned.\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector get_calibration()", + "py_def": "def get_calibration(self) -> list[float]" + } + }, + "def": "class IMU" + }, + "Gcsv": { + "type": "class", + "name": "Gcsv", + "doc": { + "brief": "Gcsv class", + "maixpy": "maix.ext_dev.imu.Gcsv", + "py_doc": "Gcsv class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Gcsv", + "doc": { + "brief": "Construct a new IMU object", + "maixpy": "maix.ext_dev.imu.Gcsv.__init__", + "py_doc": "Construct a new IMU object" + }, + "args": [], + "ret_type": null, + "static": false, + "def": "Gcsv()", + "py_def": "def __init__(self) -> None" + }, + "open": { + "type": "func", + "name": "open", + "doc": { + "brief": "Open a file", + "param": { + "path": "the path where data will be saved", + "tscale": "time scale, default is 0.001", + "gscale": "gyroscope scale factor, default is 1, unit:g", + "ascale": "accelerometer scale factor, default is 1, unit:radians/second", + "mscale": "magnetometer scale factor, default is 1(unused)", + "version": "version number, default is \"1.3\"", + "id": "identifier for the IMU, default is \"imu\"", + "orientation": "sensor orientation, default is \"YxZ\"" + }, + "return": "error code", + "maixpy": "maix.ext_dev.imu.Gcsv.open", + "py_doc": "Open a file\n\nArgs:\n - path: the path where data will be saved\n - tscale: time scale, default is 0.001\n - gscale: gyroscope scale factor, default is 1, unit:g\n - ascale: accelerometer scale factor, default is 1, unit:radians/second\n - mscale: magnetometer scale factor, default is 1(unused)\n - version: version number, default is \"1.3\"\n - id: identifier for the IMU, default is \"imu\"\n - orientation: sensor orientation, default is \"YxZ\"\n\n\nReturns: error code\n" + }, + "args": [ + [ + "std::string", + "path", + null + ], + [ + "double", + "tscale", + "0.001" + ], + [ + "double", + "gscale", + "1" + ], + [ + "double", + "ascale", + "1" + ], + [ + "double", + "mscale", + "1" + ], + [ + "std::string", + "version", + "\"1.3\"" + ], + [ + "std::string", + "id", + "\"imu\"" + ], + [ + "std::string", + "orientation", + "\"YxZ\"" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err open(std::string path, double tscale = 0.001, double gscale = 1, double ascale = 1, double mscale = 1, std::string version = \"1.3\", std::string id = \"imu\", std::string orientation = \"YxZ\")", + "py_def": "def open(self, path: str, tscale: float = 0.001, gscale: float = 1, ascale: float = 1, mscale: float = 1, version: str = '1.3', id: str = 'imu', orientation: str = 'YxZ') -> maix.err.Err" + }, + "close": { + "type": "func", + "name": "close", + "doc": { + "brief": "Close file", + "return": "error code", + "maixpy": "maix.ext_dev.imu.Gcsv.close", + "py_doc": "Close file\n\nReturns: error code\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err close()", + "py_def": "def close(self) -> maix.err.Err" + }, + "is_opened": { + "type": "func", + "name": "is_opened", + "doc": { + "brief": "Check if the object is already open", + "return": "true, opened; false, not opened", + "maixpy": "maix.ext_dev.imu.Gcsv.is_opened", + "py_doc": "Check if the object is already open\n\nReturns: true, opened; false, not opened\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool is_opened()", + "py_def": "def is_opened(self) -> bool" + }, + "write": { + "type": "func", + "name": "write", + "doc": { + "brief": "Write imu data to gcsv file", + "param": { + "t": "Timestamp of the current data. The actual value is equal to t * tscale. unit:s", + "gyro": "Gyroscope data must be an array consisting of x, y, and z-axis data. The actual value is equal to gyro * gscale. unit:g", + "acc": "Acceleration data must be an array consisting of x, y, and z-axis data. The actual value is equal to acc * ascale.unit:radians/second", + "mag": "Magnetic data must be an array consisting of x, y, and z-axis data. Currently not supported." + }, + "maixpy": "maix.ext_dev.imu.Gcsv.write", + "py_doc": "Write imu data to gcsv file\n\nArgs:\n - t: Timestamp of the current data. The actual value is equal to t * tscale. unit:s\n - gyro: Gyroscope data must be an array consisting of x, y, and z-axis data. The actual value is equal to gyro * gscale. unit:g\n - acc: Acceleration data must be an array consisting of x, y, and z-axis data. The actual value is equal to acc * ascale.unit:radians/second\n - mag: Magnetic data must be an array consisting of x, y, and z-axis data. Currently not supported.\n" + }, + "args": [ + [ + "double", + "timestamp", + null + ], + [ + "std::vector", + "gyro", + null + ], + [ + "std::vector", + "acc", + null + ], + [ + "std::vector", + "mag", + "std::vector()" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err write(double timestamp, std::vector gyro, std::vector acc, std::vector mag = std::vector())", + "py_def": "def write(self, timestamp: float, gyro: list[float], acc: list[float], mag: list[float] = []) -> maix.err.Err" + } + }, + "def": "class Gcsv" + } + }, + "auto_add": true + }, + "qmi8658": { + "type": "module", + "doc": { + "brief": "maix.ext_dev.qmi8658 module" + }, + "members": { + "QMI8658": { + "type": "class", + "name": "QMI8658", + "doc": { + "brief": "QMI8656 driver class", + "maixpy": "maix.ext_dev.qmi8658.QMI8658", + "py_doc": "QMI8656 driver class" + }, + "members": { + "__init__": { + "type": "func", + "name": "QMI8658", + "doc": { + "brief": "Construct a new QMI8658 object, will open QMI8658", + "param": { + "i2c_bus": "i2c bus number. Automatically selects the on-board qmi8658 when -1 is passed in.", + "addr": "QMI8658 i2c addr.", + "freq": "QMI8658 freq", + "mode": "QMI8658 Mode: ACC_ONLY/GYRO_ONLY/DUAL", + "acc_scale": "acc scale, see @qmi8658::AccScale", + "acc_odr": "acc output data rate, see @qmi8658::AccOdr", + "gyro_scale": "gyro scale, see @qmi8658::GyroScale", + "gyro_odr": "gyro output data rate, see @qmi8658::GyroOdr", + "block": "block or non-block, defalut is true" + }, + "maixpy": "maix.ext_dev.qmi8658.QMI8658.__init__", + "py_doc": "Construct a new QMI8658 object, will open QMI8658\n\nArgs:\n - i2c_bus: i2c bus number. Automatically selects the on-board qmi8658 when -1 is passed in.\n - addr: QMI8658 i2c addr.\n - freq: QMI8658 freq\n - mode: QMI8658 Mode: ACC_ONLY/GYRO_ONLY/DUAL\n - acc_scale: acc scale, see @qmi8658::AccScale\n - acc_odr: acc output data rate, see @qmi8658::AccOdr\n - gyro_scale: gyro scale, see @qmi8658::GyroScale\n - gyro_odr: gyro output data rate, see @qmi8658::GyroOdr\n - block: block or non-block, defalut is true\n" + }, + "args": [ + [ + "int", + "i2c_bus", + "-1" + ], + [ + "int", + "addr", + "0x6B" + ], + [ + "int", + "freq", + "400000" + ], + [ + "maix::ext_dev::imu::Mode", + "mode", + "maix::ext_dev::imu::Mode::DUAL" + ], + [ + "maix::ext_dev::imu::AccScale", + "acc_scale", + "maix::ext_dev::imu::AccScale::ACC_SCALE_2G" + ], + [ + "maix::ext_dev::imu::AccOdr", + "acc_odr", + "maix::ext_dev::imu::AccOdr::ACC_ODR_8000" + ], + [ + "maix::ext_dev::imu::GyroScale", + "gyro_scale", + "maix::ext_dev::imu::GyroScale::GYRO_SCALE_16DPS" + ], + [ + "maix::ext_dev::imu::GyroOdr", + "gyro_odr", + "maix::ext_dev::imu::GyroOdr::GYRO_ODR_8000" + ], + [ + "bool", + "block", + "true" + ] + ], + "ret_type": null, + "static": false, + "def": "QMI8658(int i2c_bus=-1, int addr=0x6B, int freq=400000,\n maix::ext_dev::imu::Mode mode=maix::ext_dev::imu::Mode::DUAL,\n maix::ext_dev::imu::AccScale acc_scale=maix::ext_dev::imu::AccScale::ACC_SCALE_2G,\n maix::ext_dev::imu::AccOdr acc_odr=maix::ext_dev::imu::AccOdr::ACC_ODR_8000,\n maix::ext_dev::imu::GyroScale gyro_scale=maix::ext_dev::imu::GyroScale::GYRO_SCALE_16DPS,\n maix::ext_dev::imu::GyroOdr gyro_odr=maix::ext_dev::imu::GyroOdr::GYRO_ODR_8000,\n bool block=true)", + "py_def": "def __init__(self, i2c_bus: int = -1, addr: int = 107, freq: int = 400000, mode: maix.ext_dev.imu.Mode = ..., acc_scale: maix.ext_dev.imu.AccScale = ..., acc_odr: maix.ext_dev.imu.AccOdr = ..., gyro_scale: maix.ext_dev.imu.GyroScale = ..., gyro_odr: maix.ext_dev.imu.GyroOdr = ..., block: bool = True) -> None" + }, + "read": { + "type": "func", + "name": "read", + "doc": { + "brief": "Read data from QMI8658.", + "return": "list type. If only one of the outputs is initialized, only [x,y,z] of that output will be returned.\nIf all outputs are initialized, [acc_x, acc_y, acc_z, gyro_x, gyro_y, gyro_z] is returned.", + "maixpy": "maix.ext_dev.qmi8658.QMI8658.read", + "py_doc": "Read data from QMI8658.\n\nReturns: list type. If only one of the outputs is initialized, only [x,y,z] of that output will be returned.\nIf all outputs are initialized, [acc_x, acc_y, acc_z, gyro_x, gyro_y, gyro_z] is returned.\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector read()", + "py_def": "def read(self) -> list[float]" + } + }, + "def": "class QMI8658" + } + }, + "auto_add": true + }, + "tmc2209": { + "type": "module", + "doc": { + "brief": "maix.ext_dev.tmc2209 module" + }, + "members": { + "slide_scan": { + "type": "func", + "name": "slide_scan", + "doc": { + "brief": "Scan and initialize the slide with the given parameters", + "param": { + "port": "UART port, string type.", + "addr": "TMC2209 UART address, range 0x00~0x03, integer type.", + "baud": "UART baud rate, integer type.", + "step_angle": "Motor step angle, float type.", + "micro_step": "Motor micro step, options: 1/2/4/8/16/32/64/128/256, integer type.", + "round_mm": "Round distance in mm, float type.", + "speed_mm_s": "Speed of the slide in mm/s, float type.", + "dir": "Direction of movement, boolean type. Default is true.", + "use_internal_sense_resistors": "Enable internal sense resistors if true, disable if false, boolean type. Default is true.", + "run_current_per": "Motor run current percentage, range 0~100(%), integer type. Default is 100%.", + "hold_current_per": "Motor hold current percentage, range 0~100(%), integer type. Default is 100%.", + "conf_save_path": "Configuration save path, string type. Default is \"./slide_conf.bin\".", + "force_update": "Force update the configuration if true, boolean type. Default is true." + }, + "maixpy": "maix.ext_dev.tmc2209.slide_scan", + "py_doc": "Scan and initialize the slide with the given parameters\n\nArgs:\n - port: UART port, string type.\n - addr: TMC2209 UART address, range 0x00~0x03, integer type.\n - baud: UART baud rate, integer type.\n - step_angle: Motor step angle, float type.\n - micro_step: Motor micro step, options: 1/2/4/8/16/32/64/128/256, integer type.\n - round_mm: Round distance in mm, float type.\n - speed_mm_s: Speed of the slide in mm/s, float type.\n - dir: Direction of movement, boolean type. Default is true.\n - use_internal_sense_resistors: Enable internal sense resistors if true, disable if false, boolean type. Default is true.\n - run_current_per: Motor run current percentage, range 0~100(%), integer type. Default is 100%.\n - hold_current_per: Motor hold current percentage, range 0~100(%), integer type. Default is 100%.\n - conf_save_path: Configuration save path, string type. Default is \"./slide_conf.bin\".\n - force_update: Force update the configuration if true, boolean type. Default is true.\n" + }, + "args": [ + [ + "const char*", + "port", + null + ], + [ + "uint8_t", + "addr", + null + ], + [ + "long", + "baud", + null + ], + [ + "/* Uart init param */ float", + "step_angle", + null + ], + [ + "uint16_t", + "micro_step", + null + ], + [ + "float", + "round_mm", + null + ], + [ + "/* Motor init param */ float", + "speed_mm_s", + null + ], + [ + "bool", + "dir", + "true" + ], + [ + "bool", + "use_internal_sense_resistors", + "true" + ], + [ + "uint8_t", + "run_current_per", + "100" + ], + [ + "uint8_t", + "hold_current_per", + "100" + ], + [ + "const std::string", + "conf_save_path", + "\"./slide_conf.bin\"" + ], + [ + "bool", + "force_update", + "true /* Driver init param */" + ] + ], + "ret_type": "void", + "static": false, + "def": "void slide_scan(const char* port, uint8_t addr, long baud, /* Uart init param */\n float step_angle, uint16_t micro_step, float round_mm, /* Motor init param */\n float speed_mm_s, bool dir=true, bool use_internal_sense_resistors=true, uint8_t run_current_per=100,\n uint8_t hold_current_per=100, const std::string conf_save_path=\"./slide_conf.bin\",\n bool force_update=true /* Driver init param */)", + "py_def": "def slide_scan(port: str, addr: int, baud: int, step_angle: float, micro_step: int, round_mm: float, speed_mm_s: float, dir: bool = True, use_internal_sense_resistors: bool = True, run_current_per: int = 100, hold_current_per: int = 100, conf_save_path: str = './slide_conf.bin', force_update: bool = True) -> None" + }, + "slide_test": { + "type": "func", + "name": "slide_test", + "doc": { + "brief": "Test the slide with the given parameters\\nThis function tests the slide by moving it in the specified direction until a stall condition is detected, as defined in the configuration file.", + "param": { + "port": "UART port, string type.", + "addr": "TMC2209 UART address, range 0x00~0x03, integer type.", + "baud": "UART baud rate, integer type.", + "step_angle": "Motor step angle, float type.", + "micro_step": "Motor micro step, options: 1/2/4/8/16/32/64/128/256, integer type.", + "round_mm": "Round distance in mm, float type.", + "speed_mm_s": "Speed of the slide in mm/s, float type.", + "dir": "Direction of movement, boolean type. Default is true.", + "use_internal_sense_resistors": "Enable internal sense resistors if true, disable if false, boolean type. Default is true.", + "run_current_per": "Motor run current percentage, range 0~100(%), integer type. Default is 100%.", + "hold_current_per": "Motor hold current percentage, range 0~100(%), integer type. Default is 100%.", + "conf_save_path": "Configuration save path, string type. Default is \"./slide_conf.bin\"." + }, + "maixpy": "maix.ext_dev.tmc2209.slide_test", + "py_doc": "Test the slide with the given parameters\nThis function tests the slide by moving it in the specified direction until a stall condition is detected, as defined in the configuration file.\n\nArgs:\n - port: UART port, string type.\n - addr: TMC2209 UART address, range 0x00~0x03, integer type.\n - baud: UART baud rate, integer type.\n - step_angle: Motor step angle, float type.\n - micro_step: Motor micro step, options: 1/2/4/8/16/32/64/128/256, integer type.\n - round_mm: Round distance in mm, float type.\n - speed_mm_s: Speed of the slide in mm/s, float type.\n - dir: Direction of movement, boolean type. Default is true.\n - use_internal_sense_resistors: Enable internal sense resistors if true, disable if false, boolean type. Default is true.\n - run_current_per: Motor run current percentage, range 0~100(%), integer type. Default is 100%.\n - hold_current_per: Motor hold current percentage, range 0~100(%), integer type. Default is 100%.\n - conf_save_path: Configuration save path, string type. Default is \"./slide_conf.bin\".\n" + }, + "args": [ + [ + "const char*", + "port", + null + ], + [ + "uint8_t", + "addr", + null + ], + [ + "long", + "baud", + null + ], + [ + "/* Uart init param */ float", + "step_angle", + null + ], + [ + "uint16_t", + "micro_step", + null + ], + [ + "float", + "round_mm", + null + ], + [ + "/* Motor init param */ float", + "speed_mm_s", + null + ], + [ + "bool", + "dir", + "true" + ], + [ + "bool", + "use_internal_sense_resistors", + "true" + ], + [ + "uint8_t", + "run_current_per", + "100" + ], + [ + "uint8_t", + "hold_current_per", + "100" + ], + [ + "const std::string", + "conf_save_path", + "\"./slide_conf.bin\"/* Driver init param */" + ] + ], + "ret_type": "void", + "static": false, + "def": "void slide_test(const char* port, uint8_t addr, long baud, /* Uart init param */\n float step_angle, uint16_t micro_step, float round_mm, /* Motor init param */\n float speed_mm_s, bool dir=true, bool use_internal_sense_resistors=true, uint8_t run_current_per=100,\n uint8_t hold_current_per=100, const std::string conf_save_path=\"./slide_conf.bin\"/* Driver init param */)", + "py_def": "def slide_test(port: str, addr: int, baud: int, step_angle: float, micro_step: int, round_mm: float, speed_mm_s: float, dir: bool = True, use_internal_sense_resistors: bool = True, run_current_per: int = 100, hold_current_per: int = 100, conf_save_path: str = './slide_conf.bin') -> None" + }, + "Slide": { + "type": "class", + "name": "Slide", + "doc": { + "brief": "Slide Class", + "maixpy": "maix.ext_dev.tmc2209.Slide", + "py_doc": "Slide Class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Slide", + "doc": { + "brief": "Constructor for Slide\\nInitializes the Slide object with the specified parameters.", + "param": { + "port": "UART port, string type.", + "addr": "TMC2209 UART address, range 0x00~0x03, integer type.", + "baud": "UART baud rate, integer type.", + "step_angle": "Motor step angle, float type.", + "micro_step": "Motor micro step, options: 1/2/4/8/16/32/64/128/256, integer type.", + "round_mm": "Round distance in mm, float type.", + "speed_mm_s": "Speed of the slide in mm/s, float type. Default is -1, indicating the use of a default speed factor.", + "use_internal_sense_resistors": "Enable internal sense resistors if TRUE, disable if FALSE, boolean type. Default is TRUE.", + "run_current_per": "Motor run current percentage, range 0~100(%), integer type. Default is 100%.", + "hold_current_per": "Motor hold current percentage, range 0~100(%), integer type. Default is 100%.", + "cfg_file_path": "Configuration file path, string type. Default is an empty string, indicating no configuration file." + }, + "maixpy": "maix.ext_dev.tmc2209.Slide.__init__", + "py_doc": "Constructor for Slide\nInitializes the Slide object with the specified parameters.\n\nArgs:\n - port: UART port, string type.\n - addr: TMC2209 UART address, range 0x00~0x03, integer type.\n - baud: UART baud rate, integer type.\n - step_angle: Motor step angle, float type.\n - micro_step: Motor micro step, options: 1/2/4/8/16/32/64/128/256, integer type.\n - round_mm: Round distance in mm, float type.\n - speed_mm_s: Speed of the slide in mm/s, float type. Default is -1, indicating the use of a default speed factor.\n - use_internal_sense_resistors: Enable internal sense resistors if TRUE, disable if FALSE, boolean type. Default is TRUE.\n - run_current_per: Motor run current percentage, range 0~100(%), integer type. Default is 100%.\n - hold_current_per: Motor hold current percentage, range 0~100(%), integer type. Default is 100%.\n - cfg_file_path: Configuration file path, string type. Default is an empty string, indicating no configuration file.\n" + }, + "args": [ + [ + "const char*", + "port", + null + ], + [ + "uint8_t", + "addr", + null + ], + [ + "long", + "baud", + null + ], + [ + "/* Uart init param */ float", + "step_angle", + null + ], + [ + "uint16_t", + "micro_step", + null + ], + [ + "float", + "round_mm", + null + ], + [ + "/* Motor init param */ float", + "speed_mm_s", + "-1" + ], + [ + "bool", + "use_internal_sense_resistors", + "true" + ], + [ + "uint8_t", + "run_current_per", + "100" + ], + [ + "uint8_t", + "hold_current_per", + "100" + ], + [ + "std::string", + "cfg_file_path", + "\"\" /* Driver init param */" + ] + ], + "ret_type": null, + "static": false, + "def": "Slide(const char* port, uint8_t addr, long baud, /* Uart init param */\n float step_angle, uint16_t micro_step, float round_mm, /* Motor init param */\n float speed_mm_s=-1, bool use_internal_sense_resistors=true, uint8_t run_current_per=100,\n uint8_t hold_current_per=100, std::string cfg_file_path=\"\" /* Driver init param */)", + "py_def": "def __init__(self, port: str, addr: int, baud: int, step_angle: float, micro_step: int, round_mm: float, speed_mm_s: float = -1, use_internal_sense_resistors: bool = True, run_current_per: int = 100, hold_current_per: int = 100, cfg_file_path: str = '') -> None" + }, + "load_conf": { + "type": "func", + "name": "load_conf", + "doc": { + "brief": "Load configuration from a file\\nLoads the configuration settings for the slide from the specified file path.", + "param": { + "path": "Path to the configuration file, string type." + }, + "maixpy": "maix.ext_dev.tmc2209.Slide.load_conf", + "py_doc": "Load configuration from a file\nLoads the configuration settings for the slide from the specified file path.\n\nArgs:\n - path: Path to the configuration file, string type.\n" + }, + "args": [ + [ + "std::string", + "path", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void load_conf(std::string path)", + "py_def": "def load_conf(self, path: str) -> None" + }, + "move": { + "type": "func", + "name": "move", + "doc": { + "brief": "Move the slide by a specified length\\nMoves the slide by the specified length at the given speed. Optionally checks for stall conditions.", + "param": { + "oft": "Length to move, float type.", + "speed_mm_s": "Speed in mm/s. Default is -1, indicating the use of the default speed set during initialization.", + "check": "Enable movement check if true, boolean type. Default is true." + }, + "maixpy": "maix.ext_dev.tmc2209.Slide.move", + "py_doc": "Move the slide by a specified length\nMoves the slide by the specified length at the given speed. Optionally checks for stall conditions.\n\nArgs:\n - oft: Length to move, float type.\n - speed_mm_s: Speed in mm/s. Default is -1, indicating the use of the default speed set during initialization.\n - check: Enable movement check if true, boolean type. Default is true.\n" + }, + "args": [ + [ + "float", + "oft", + null + ], + [ + "int", + "speed_mm_s", + "-1" + ], + [ + "bool", + "check", + "true" + ] + ], + "ret_type": "void", + "static": false, + "def": "void move(float oft, int speed_mm_s=-1, bool check=true)", + "py_def": "def move(self, oft: float, speed_mm_s: int = -1, check: bool = True) -> None" + }, + "reset": { + "type": "func", + "name": "reset", + "doc": { + "brief": "Reset the slide position\\nResets the slide position in the specified direction at the given speed.", + "param": { + "dir": "Direction of reset, boolean type. Default is false.", + "speed_mm_s": "Speed in mm/s. Default is -1, indicating the use of the speed set during initialization." + }, + "maixpy": "maix.ext_dev.tmc2209.Slide.reset", + "py_doc": "Reset the slide position\nResets the slide position in the specified direction at the given speed.\n\nArgs:\n - dir: Direction of reset, boolean type. Default is false.\n - speed_mm_s: Speed in mm/s. Default is -1, indicating the use of the speed set during initialization.\n" + }, + "args": [ + [ + "bool", + "dir", + "false" + ], + [ + "int", + "speed_mm_s", + "-1" + ] + ], + "ret_type": "void", + "static": false, + "def": "void reset(bool dir=false, int speed_mm_s=-1)", + "py_def": "def reset(self, dir: bool = False, speed_mm_s: int = -1) -> None" + }, + "stop_default_per": { + "type": "func", + "name": "stop_default_per", + "doc": { + "brief": "Get or set the stop default percentage\\nRetrieves or sets the stop default percentage. If the parameter is -1, it returns the current setting.", + "param": { + "per": "Stop default percentage, range 0~100(%), integer type. Default is -1, indicating no change." + }, + "return": "int Current stop default percentage if per is -1, otherwise the new set percentage.", + "maixpy": "maix.ext_dev.tmc2209.Slide.stop_default_per", + "py_doc": "Get or set the stop default percentage\nRetrieves or sets the stop default percentage. If the parameter is -1, it returns the current setting.\n\nArgs:\n - per: Stop default percentage, range 0~100(%), integer type. Default is -1, indicating no change.\n\n\nReturns: int Current stop default percentage if per is -1, otherwise the new set percentage.\n" + }, + "args": [ + [ + "int", + "per", + "-1" + ] + ], + "ret_type": "int", + "static": false, + "def": "int stop_default_per(int per=-1)" + }, + "run_current_per": { + "type": "func", + "name": "run_current_per", + "doc": { + "brief": "Get or set the run current percentage\\nRetrieves or sets the run current percentage. If the parameter is -1, it returns the current setting.", + "param": { + "per": "Run current percentage, range 0~100(%), integer type. Default is -1, indicating no change." + }, + "return": "int Current run current percentage if per is -1, otherwise the new set percentage.", + "maixpy": "maix.ext_dev.tmc2209.Slide.run_current_per", + "py_doc": "Get or set the run current percentage\nRetrieves or sets the run current percentage. If the parameter is -1, it returns the current setting.\n\nArgs:\n - per: Run current percentage, range 0~100(%), integer type. Default is -1, indicating no change.\n\n\nReturns: int Current run current percentage if per is -1, otherwise the new set percentage.\n" + }, + "args": [ + [ + "int", + "per", + "-1" + ] + ], + "ret_type": "int", + "static": false, + "def": "int run_current_per(int per=-1)", + "py_def": "def run_current_per(self, per: int = -1) -> int" + }, + "hold_current_per": { + "type": "func", + "name": "hold_current_per", + "doc": { + "brief": "Get or set the hold current percentage\\nRetrieves or sets the hold current percentage. If the parameter is -1, it returns the current setting.", + "param": { + "per": "Hold current percentage, range 0~100(%), integer type. Default is -1, indicating no change." + }, + "return": "int Current hold current percentage if per is -1, otherwise the new set percentage.", + "maixpy": "maix.ext_dev.tmc2209.Slide.hold_current_per", + "py_doc": "Get or set the hold current percentage\nRetrieves or sets the hold current percentage. If the parameter is -1, it returns the current setting.\n\nArgs:\n - per: Hold current percentage, range 0~100(%), integer type. Default is -1, indicating no change.\n\n\nReturns: int Current hold current percentage if per is -1, otherwise the new set percentage.\n" + }, + "args": [ + [ + "int", + "per", + "-1" + ] + ], + "ret_type": "int", + "static": false, + "def": "int hold_current_per(int per=-1)", + "py_def": "def hold_current_per(self, per: int = -1) -> int" + }, + "use_internal_sense_resistors": { + "type": "func", + "name": "use_internal_sense_resistors", + "doc": { + "brief": "Enable or disable internal sense resistors\\nEnables or disables the internal sense resistors based on the provided boolean value.", + "param": { + "b": "Boolean value to enable (true) or disable (false) internal sense resistors. Default is true." + }, + "maixpy": "maix.ext_dev.tmc2209.Slide.use_internal_sense_resistors", + "py_doc": "Enable or disable internal sense resistors\nEnables or disables the internal sense resistors based on the provided boolean value.\n\nArgs:\n - b: Boolean value to enable (true) or disable (false) internal sense resistors. Default is true.\n" + }, + "args": [ + [ + "bool", + "b", + "true" + ] + ], + "ret_type": "void", + "static": false, + "def": "void use_internal_sense_resistors(bool b=true)", + "py_def": "def use_internal_sense_resistors(self, b: bool = True) -> None" + } + }, + "def": "class Slide" + }, + "ScrewSlide": { + "type": "class", + "name": "ScrewSlide", + "doc": { + "brief": "ScrewSlide Class", + "maixpy": "maix.ext_dev.tmc2209.ScrewSlide", + "py_doc": "ScrewSlide Class" + }, + "members": { + "__init__": { + "type": "func", + "name": "ScrewSlide", + "doc": { + "brief": "Constructor for ScrewSlide", + "param": { + "port": "UART port, string type.", + "addr": "TMC2209 UART address, range 0x00~0x03, integer type.", + "baud": "UART baud rate, integer type.", + "step_angle": "Motor step angle, float type.", + "micro_step": "Motor micro step, options: 1/2/4/8/16/32/64/128/256, integer type.", + "screw_pitch": "Screw pitch of the slide, integer type.", + "speed_mm_s": "Speed of the slide in mm/s, 10 means 10mm/s, float type.\nDefault is -1, indicating the use of a default speed factor.", + "use_internal_sense_resistors": "Enable internal sense resistors if TRUE,\ndisable if FALSE, boolean type. Default is TRUE.", + "run_current_per": "Motor run current percentage, range 0~100(%), integer type. Default is 100%.", + "hold_current_per": "Motor hold current percentage, range 0~100(%), integer type. Default is 100%." + }, + "maixpy": "maix.ext_dev.tmc2209.ScrewSlide.__init__", + "py_doc": "Constructor for ScrewSlide\n\nArgs:\n - port: UART port, string type.\n - addr: TMC2209 UART address, range 0x00~0x03, integer type.\n - baud: UART baud rate, integer type.\n - step_angle: Motor step angle, float type.\n - micro_step: Motor micro step, options: 1/2/4/8/16/32/64/128/256, integer type.\n - screw_pitch: Screw pitch of the slide, integer type.\n - speed_mm_s: Speed of the slide in mm/s, 10 means 10mm/s, float type.\nDefault is -1, indicating the use of a default speed factor.\n - use_internal_sense_resistors: Enable internal sense resistors if TRUE,\ndisable if FALSE, boolean type. Default is TRUE.\n - run_current_per: Motor run current percentage, range 0~100(%), integer type. Default is 100%.\n - hold_current_per: Motor hold current percentage, range 0~100(%), integer type. Default is 100%.\n" + }, + "args": [ + [ + "const char*", + "port", + null + ], + [ + "uint8_t", + "addr", + null + ], + [ + "long", + "baud", + null + ], + [ + "/* Uart init param */ float", + "step_angle", + null + ], + [ + "uint16_t", + "micro_step", + null + ], + [ + "float", + "screw_pitch", + null + ], + [ + "/* Motor init param */ float", + "speed_mm_s", + "-1" + ], + [ + "bool", + "use_internal_sense_resistors", + "true" + ], + [ + "uint8_t", + "run_current_per", + "100" + ], + [ + "uint8_t", + "hold_current_per", + "100" + ] + ], + "ret_type": null, + "static": false, + "def": "ScrewSlide(const char* port, uint8_t addr, long baud, /* Uart init param */\n float step_angle, uint16_t micro_step, float screw_pitch, /* Motor init param */\n float speed_mm_s=-1, bool use_internal_sense_resistors=true, uint8_t run_current_per=100,\n uint8_t hold_current_per=100)", + "py_def": "def __init__(self, port: str, addr: int, baud: int, step_angle: float, micro_step: int, screw_pitch: float, speed_mm_s: float = -1, use_internal_sense_resistors: bool = True, run_current_per: int = 100, hold_current_per: int = 100) -> None" + }, + "move": { + "type": "func", + "name": "move", + "doc": { + "brief": "Move the slide by a specified length", + "param": { + "oft": "Length to move, 10 means 10mm, float type.\nPositive values move the slide in the positive direction, negative values move it in the opposite direction.", + "speed_mm_s": "Speed in mm/s. Default is -1, indicating the use of the default speed set during initialization.", + "callback": "Callback function to be called during movement.\nThe callback function receives the current progress percentage (0~100%) of the movement.\nIf the callback returns true, the move operation will be terminated immediately. Default is nullptr." + }, + "maixpy": "maix.ext_dev.tmc2209.ScrewSlide.move", + "py_doc": "Move the slide by a specified length\n\nArgs:\n - oft: Length to move, 10 means 10mm, float type.\nPositive values move the slide in the positive direction, negative values move it in the opposite direction.\n - speed_mm_s: Speed in mm/s. Default is -1, indicating the use of the default speed set during initialization.\n - callback: Callback function to be called during movement.\nThe callback function receives the current progress percentage (0~100%) of the movement.\nIf the callback returns true, the move operation will be terminated immediately. Default is nullptr.\n" + }, + "args": [ + [ + "float", + "oft", + null + ], + [ + "int", + "speed_mm_s", + "-1" + ], + [ + "std::function", + "callback", + "nullptr" + ] + ], + "ret_type": "void", + "static": false, + "def": "void move(float oft, int speed_mm_s=-1, std::function callback=nullptr)", + "py_def": "def move(self, oft: float, speed_mm_s: int = -1, callback: typing.Callable[[float], bool] = None) -> None" + }, + "reset": { + "type": "func", + "name": "reset", + "doc": { + "brief": "Reset the slide position", + "param": { + "callback": "Callback function to be called during the reset loop.\nThe reset operation will only terminate if the callback returns true.", + "dir": "Direction of reset. Default is false.", + "speed_mm_s": "Speed in mm/s. Default is -1, indicating the use of the speed set during initialization." + }, + "maixpy": "maix.ext_dev.tmc2209.ScrewSlide.reset", + "py_doc": "Reset the slide position\n\nArgs:\n - callback: Callback function to be called during the reset loop.\nThe reset operation will only terminate if the callback returns true.\n - dir: Direction of reset. Default is false.\n - speed_mm_s: Speed in mm/s. Default is -1, indicating the use of the speed set during initialization.\n" + }, + "args": [ + [ + "std::function", + "callback", + null + ], + [ + "bool", + "dir", + "false" + ], + [ + "int", + "speed_mm_s", + "-1" + ] + ], + "ret_type": "void", + "static": false, + "def": "void reset(std::function callback, bool dir=false, int speed_mm_s=-1)", + "py_def": "def reset(self, callback: typing.Callable[[], bool], dir: bool = False, speed_mm_s: int = -1) -> None" + }, + "run_current_per": { + "type": "func", + "name": "run_current_per", + "doc": { + "brief": "Get or set the run current percentage", + "param": { + "per": "Run current percentage, range 0~100(%).\nDefault is -1, indicating no change and returning the current run current percentage." + }, + "return": "int Current run current percentage if per is -1, otherwise the new set percentage.", + "maixpy": "maix.ext_dev.tmc2209.ScrewSlide.run_current_per", + "py_doc": "Get or set the run current percentage\n\nArgs:\n - per: Run current percentage, range 0~100(%).\nDefault is -1, indicating no change and returning the current run current percentage.\n\n\nReturns: int Current run current percentage if per is -1, otherwise the new set percentage.\n" + }, + "args": [ + [ + "int", + "per", + "-1" + ] + ], + "ret_type": "int", + "static": false, + "def": "int run_current_per(int per=-1)", + "py_def": "def run_current_per(self, per: int = -1) -> int" + }, + "hold_current_per": { + "type": "func", + "name": "hold_current_per", + "doc": { + "brief": "Get or set the hold current percentage", + "param": { + "per": "Hold current percentage, range 0~100(%). Default is -1, indicating no change and returning the current hold current percentage." + }, + "return": "int Current hold current percentage if per is -1, otherwise the new set percentage.", + "maixpy": "maix.ext_dev.tmc2209.ScrewSlide.hold_current_per", + "py_doc": "Get or set the hold current percentage\n\nArgs:\n - per: Hold current percentage, range 0~100(%). Default is -1, indicating no change and returning the current hold current percentage.\n\n\nReturns: int Current hold current percentage if per is -1, otherwise the new set percentage.\n" + }, + "args": [ + [ + "int", + "per", + "-1" + ] + ], + "ret_type": "int", + "static": false, + "def": "int hold_current_per(int per=-1)", + "py_def": "def hold_current_per(self, per: int = -1) -> int" + }, + "use_internal_sense_resistors": { + "type": "func", + "name": "use_internal_sense_resistors", + "doc": { + "brief": "Enable or disable internal sense resistors", + "param": { + "b": "Boolean value to enable (true) or disable (false) internal sense resistors. Default is true." + }, + "maixpy": "maix.ext_dev.tmc2209.ScrewSlide.use_internal_sense_resistors", + "py_doc": "Enable or disable internal sense resistors\n\nArgs:\n - b: Boolean value to enable (true) or disable (false) internal sense resistors. Default is true.\n" + }, + "args": [ + [ + "bool", + "b", + "true" + ] + ], + "ret_type": "void", + "static": false, + "def": "void use_internal_sense_resistors(bool b=true)", + "py_def": "def use_internal_sense_resistors(self, b: bool = True) -> None" + } + }, + "def": "class ScrewSlide" + } + }, + "auto_add": true + }, + "bm8563": { + "type": "module", + "doc": { + "brief": "maix.ext_dev.bm8563 module" + }, + "members": { + "BM8563": { + "type": "class", + "name": "BM8563", + "doc": { + "brief": "Peripheral BM8563 class", + "maixpy": "maix.ext_dev.bm8563.BM8563", + "py_doc": "Peripheral BM8563 class" + }, + "members": { + "__init__": { + "type": "func", + "name": "BM8563", + "doc": { + "brief": "BM8563 constructor", + "param": { + "i2c_bus": "i2c bus number." + }, + "maixpy": "maix.ext_dev.bm8563.BM8563.__init__", + "py_doc": "BM8563 constructor\n\nArgs:\n - i2c_bus: i2c bus number.\n" + }, + "args": [ + [ + "int", + "i2c_bus", + "-1" + ] + ], + "ret_type": null, + "static": false, + "def": "BM8563(int i2c_bus=-1)", + "py_def": "def __init__(self, i2c_bus: int = -1) -> None" + }, + "datetime": { + "type": "func", + "name": "datetime", + "doc": { + "brief": "Get or set the date and time of the BM8563.", + "param": { + "timetuple": "time tuple, like (year, month, day[, hour[, minute[, second]]])" + }, + "return": "time tuple, like (year, month, day[, hour[, minute[, second]]])", + "maixpy": "maix.ext_dev.bm8563.BM8563.datetime", + "py_doc": "Get or set the date and time of the BM8563.\n\nArgs:\n - timetuple: time tuple, like (year, month, day[, hour[, minute[, second]]])\n\n\nReturns: time tuple, like (year, month, day[, hour[, minute[, second]]])\n" + }, + "args": [ + [ + "std::vector", + "timetuple", + "std::vector()" + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector datetime(std::vector timetuple=std::vector())", + "py_def": "def datetime(self, timetuple: list[int] = []) -> list[int]" + }, + "init": { + "type": "func", + "name": "init", + "doc": { + "brief": "Initialise the BM8563.", + "param": { + "timetuple": "time tuple, like (year, month, day[, hour[, minute[, second]]])" + }, + "return": "err::Err type, if init success, return err::ERR_NONE", + "maixpy": "maix.ext_dev.bm8563.BM8563.init", + "py_doc": "Initialise the BM8563.\n\nArgs:\n - timetuple: time tuple, like (year, month, day[, hour[, minute[, second]]])\n\n\nReturns: err::Err type, if init success, return err::ERR_NONE\n" + }, + "args": [ + [ + "std::vector", + "timetuple", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err init(std::vector timetuple)", + "py_def": "def init(self, timetuple: list[int]) -> maix.err.Err" + }, + "now": { + "type": "func", + "name": "now", + "doc": { + "brief": "Get get the current datetime.", + "return": "time tuple, like (year, month, day[, hour[, minute[, second]]])", + "maixpy": "maix.ext_dev.bm8563.BM8563.now", + "py_doc": "Get get the current datetime.\n\nReturns: time tuple, like (year, month, day[, hour[, minute[, second]]])\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector now()", + "py_def": "def now(self) -> list[int]" + }, + "deinit": { + "type": "func", + "name": "deinit", + "doc": { + "brief": "Deinit the BM8563.", + "return": "err::Err err::Err type, if deinit success, return err::ERR_NONE", + "maixpy": "maix.ext_dev.bm8563.BM8563.deinit", + "py_doc": "Deinit the BM8563.\n\nReturns: err::Err err::Err type, if deinit success, return err::ERR_NONE\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err deinit()", + "py_def": "def deinit(self) -> maix.err.Err" + }, + "hctosys": { + "type": "func", + "name": "hctosys", + "doc": { + "brief": "Set the system time from the BM8563", + "return": "err::Err type", + "maixpy": "maix.ext_dev.bm8563.BM8563.hctosys", + "py_doc": "Set the system time from the BM8563\n\nReturns: err::Err type\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err hctosys()", + "py_def": "def hctosys(self) -> maix.err.Err" + }, + "systohc": { + "type": "func", + "name": "systohc", + "doc": { + "brief": "Set the BM8563 from the system time", + "return": "err::Err type", + "maixpy": "maix.ext_dev.bm8563.BM8563.systohc", + "py_doc": "Set the BM8563 from the system time\n\nReturns: err::Err type\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err systohc()", + "py_def": "def systohc(self) -> maix.err.Err" + } + }, + "def": "class BM8563" + } + }, + "auto_add": true + } + }, + "auto_add": true + }, + "audio": { + "type": "module", + "doc": { + "brief": "maix.audio module", + "maixpy": "maix.audio", + "py_doc": "maix.audio module" + }, + "members": { + "Format": { + "type": "enum", + "name": "Format", + "doc": { + "brief": "Audio type", + "maixpy": "maix.audio.Format", + "py_doc": "Audio type" + }, + "values": [ + [ + "FMT_NONE", + "0", + "format invalid" + ], + [ + "FMT_S8", + "", + "unsigned 8 bits" + ], + [ + "FMT_S16_LE", + "", + "signed 16 bits, little endian" + ], + [ + "FMT_S32_LE", + "", + "signed 32 bits, little endian" + ], + [ + "FMT_S16_BE", + "", + "signed 16 bits, big endian" + ], + [ + "FMT_S32_BE", + "", + "signed 32 bits, big endian" + ], + [ + "FMT_U8", + "", + "unsigned 8 bits" + ], + [ + "FMT_U16_LE", + "", + "unsigned 16 bits, little endian" + ], + [ + "FMT_U32_LE", + "", + "unsigned 32 bits, little endian" + ], + [ + "FMT_U16_BE", + "", + "unsigned 16 bits, big endian" + ], + [ + "FMT_U32_BE", + "", + "unsigned 32 bits, big endian" + ] + ], + "def": "enum Format\n {\n FMT_NONE = 0, // format invalid\n FMT_S8, // unsigned 8 bits\n FMT_S16_LE, // signed 16 bits, little endian\n FMT_S32_LE, // signed 32 bits, little endian\n FMT_S16_BE, // signed 16 bits, big endian\n FMT_S32_BE, // signed 32 bits, big endian\n FMT_U8, // unsigned 8 bits\n FMT_U16_LE, // unsigned 16 bits, little endian\n FMT_U32_LE, // unsigned 32 bits, little endian\n FMT_U16_BE, // unsigned 16 bits, big endian\n FMT_U32_BE, // unsigned 32 bits, big endian\n }" + }, + "Recorder": { + "type": "class", + "name": "Recorder", + "doc": { + "brief": "Recorder class", + "maixpy": "maix.audio.Recorder", + "py_doc": "Recorder class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Recorder", + "doc": { + "brief": "Construct a new Recorder object. currectly only pcm and wav formats supported.", + "param": { + "path": "record path. the path determines the location where you save the file, if path is none, the audio module will not save file.", + "sample_rate": "record sample rate, default is 48000(48KHz), means 48000 samples per second.", + "format": "record sample format, default is audio::Format::FMT_S16_LE, means sampling 16 bits at a time and save as signed 16 bits, little endian. see @audio::Format", + "channel": "record sample channel, default is 1, means 1 channel sampling at the same time" + }, + "maixpy": "maix.audio.Recorder.__init__", + "maixcdk": "maix.audio.Recorder.Recorder", + "py_doc": "Construct a new Recorder object. currectly only pcm and wav formats supported.\n\nArgs:\n - path: record path. the path determines the location where you save the file, if path is none, the audio module will not save file.\n - sample_rate: record sample rate, default is 48000(48KHz), means 48000 samples per second.\n - format: record sample format, default is audio::Format::FMT_S16_LE, means sampling 16 bits at a time and save as signed 16 bits, little endian. see @audio::Format\n - channel: record sample channel, default is 1, means 1 channel sampling at the same time\n" + }, + "args": [ + [ + "std::string", + "path", + "std::string()" + ], + [ + "int", + "sample_rate", + "48000" + ], + [ + "audio::Format", + "format", + "audio::Format::FMT_S16_LE" + ], + [ + "int", + "channel", + "1" + ] + ], + "ret_type": null, + "static": false, + "def": "Recorder(std::string path = std::string(), int sample_rate = 48000, audio::Format format = audio::Format::FMT_S16_LE, int channel = 1)", + "py_def": "def __init__(self, path: str = '', sample_rate: int = 48000, format: Format = ..., channel: int = 1) -> None" + }, + "volume": { + "type": "func", + "name": "volume", + "doc": { + "brief": "Set/Get record volume", + "param": { + "value": "volume value, If you use this parameter, audio will set the value to volume,\nif you don't, it will return the current volume. range is [0, 100]." + }, + "return": "the current volume", + "maixpy": "maix.audio.Recorder.volume", + "py_doc": "Set/Get record volume\n\nArgs:\n - value: volume value, If you use this parameter, audio will set the value to volume,\nif you don't, it will return the current volume. range is [0, 100].\n\n\nReturns: the current volume\n" + }, + "args": [ + [ + "int", + "value", + "-1" + ] + ], + "ret_type": "int", + "static": false, + "def": "int volume(int value = -1)", + "py_def": "def volume(self, value: int = -1) -> int" + }, + "mute": { + "type": "func", + "name": "mute", + "doc": { + "brief": "Mute", + "param": { + "data": "mute data, If you set this parameter to true, audio will set the value to mute,\nif you don't, it will return the current mute status." + }, + "return": "Returns whether mute is currently enabled.", + "maixpy": "maix.audio.Recorder.mute", + "py_doc": "Mute\n\nArgs:\n - data: mute data, If you set this parameter to true, audio will set the value to mute,\nif you don't, it will return the current mute status.\n\n\nReturns: Returns whether mute is currently enabled.\n" + }, + "args": [ + [ + "int", + "data", + "-1" + ] + ], + "ret_type": "bool", + "static": false, + "def": "bool mute(int data = -1)", + "py_def": "def mute(self, data: int = -1) -> bool" + }, + "record": { + "type": "func", + "name": "record", + "doc": { + "brief": "Record, Read all cached data in buffer and return. If there is no audio data in the buffer, may return empty data.", + "param": { + "record_ms": "Block and record audio data lasting `record_ms` milliseconds and save it to a file, the return value does not return audio data. Only valid if the initialisation `path` is set." + }, + "return": "pcm data. datatype @see Bytes. If you pass in record_ms parameter, the return value is an empty Bytes object.", + "maixpy": "maix.audio.Recorder.record", + "py_doc": "Record, Read all cached data in buffer and return. If there is no audio data in the buffer, may return empty data.\n\nArgs:\n - record_ms: Block and record audio data lasting `record_ms` milliseconds and save it to a file, the return value does not return audio data. Only valid if the initialisation `path` is set.\n\n\nReturns: pcm data. datatype @see Bytes. If you pass in record_ms parameter, the return value is an empty Bytes object.\n" + }, + "args": [ + [ + "int", + "record_ms", + "-1" + ] + ], + "ret_type": "maix::Bytes*", + "static": false, + "def": "maix::Bytes *record(int record_ms = -1)", + "py_def": "def record(*args, **kwargs)" + }, + "finish": { + "type": "func", + "name": "finish", + "doc": { + "brief": "Finish the record, if you have passed in the path, this api will save the audio data to file.", + "return": "error code, err::ERR_NONE means success, others means failed", + "maixpy": "maix.audio.Recorder.finish", + "py_doc": "Finish the record, if you have passed in the path, this api will save the audio data to file.\n\nReturns: error code, err::ERR_NONE means success, others means failed\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err finish()", + "py_def": "def finish(self) -> maix.err.Err" + }, + "sample_rate": { + "type": "func", + "name": "sample_rate", + "doc": { + "brief": "Get sample rate", + "return": "returns sample rate", + "maixpy": "maix.audio.Recorder.sample_rate", + "py_doc": "Get sample rate\n\nReturns: returns sample rate\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int sample_rate()", + "py_def": "def sample_rate(self) -> int" + }, + "format": { + "type": "func", + "name": "format", + "doc": { + "brief": "Get sample format", + "return": "returns sample format", + "maixpy": "maix.audio.Recorder.format", + "py_doc": "Get sample format\n\nReturns: returns sample format\n" + }, + "args": [], + "ret_type": "audio::Format", + "static": false, + "def": "audio::Format format()", + "py_def": "def format(self) -> Format" + }, + "channel": { + "type": "func", + "name": "channel", + "doc": { + "brief": "Get sample channel", + "return": "returns sample channel", + "maixpy": "maix.audio.Recorder.channel", + "py_doc": "Get sample channel\n\nReturns: returns sample channel\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int channel()", + "py_def": "def channel(self) -> int" + } + }, + "def": "class Recorder" + }, + "Player": { + "type": "class", + "name": "Player", + "doc": { + "brief": "Player class", + "maixpy": "maix.audio.Player", + "py_doc": "Player class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Player", + "doc": { + "brief": "Construct a new Player object", + "param": { + "path": "player path. the path determines the location where you save the file, if path is none, the audio module will not save file.", + "sample_rate": "player sample rate, default is 48000(48KHz), means 48000 samples per second.", + "format": "player sample format, default is audio::Format::FMT_S16_LE, means sampling 16 bits at a time and save as signed 16 bits, little endian. see @audio::Format", + "channel": "player sample channel, default is 1, means 1 channel sampling at the same time" + }, + "maixpy": "maix.audio.Player.__init__", + "maixcdk": "maix.audio.Player.Player", + "py_doc": "Construct a new Player object\n\nArgs:\n - path: player path. the path determines the location where you save the file, if path is none, the audio module will not save file.\n - sample_rate: player sample rate, default is 48000(48KHz), means 48000 samples per second.\n - format: player sample format, default is audio::Format::FMT_S16_LE, means sampling 16 bits at a time and save as signed 16 bits, little endian. see @audio::Format\n - channel: player sample channel, default is 1, means 1 channel sampling at the same time\n" + }, + "args": [ + [ + "std::string", + "path", + "std::string()" + ], + [ + "int", + "sample_rate", + "48000" + ], + [ + "audio::Format", + "format", + "audio::Format::FMT_S16_LE" + ], + [ + "int", + "channel", + "1" + ] + ], + "ret_type": null, + "static": false, + "def": "Player(std::string path = std::string(), int sample_rate = 48000, audio::Format format = audio::Format::FMT_S16_LE, int channel = 1)", + "py_def": "def __init__(self, path: str = '', sample_rate: int = 48000, format: Format = ..., channel: int = 1) -> None" + }, + "volume": { + "type": "func", + "name": "volume", + "doc": { + "brief": "Set/Get player volume(Not support now)", + "param": { + "value": "volume value, If you use this parameter, audio will set the value to volume,\nif you don't, it will return the current volume." + }, + "return": "the current volume", + "maixpy": "maix.audio.Player.volume", + "py_doc": "Set/Get player volume(Not support now)\n\nArgs:\n - value: volume value, If you use this parameter, audio will set the value to volume,\nif you don't, it will return the current volume.\n\n\nReturns: the current volume\n" + }, + "args": [ + [ + "int", + "value", + "-1" + ] + ], + "ret_type": "int", + "static": false, + "def": "int volume(int value = -1)", + "py_def": "def volume(self, value: int = -1) -> int" + }, + "play": { + "type": "func", + "name": "play", + "doc": { + "brief": "Play", + "param": { + "data": "audio data, must be raw data" + }, + "return": "error code, err::ERR_NONE means success, others means failed", + "maixpy": "maix.audio.Player.play", + "py_doc": "Play\n\nArgs:\n - data: audio data, must be raw data\n\n\nReturns: error code, err::ERR_NONE means success, others means failed\n" + }, + "args": [ + [ + "maix::Bytes *", + "data", + "maix::audio::Player::NoneBytes" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err play(maix::Bytes *data = maix::audio::Player::NoneBytes)", + "py_def": "def play(self, data: maix.Bytes(bytes) = b'') -> maix.err.Err" + }, + "sample_rate": { + "type": "func", + "name": "sample_rate", + "doc": { + "brief": "Get sample rate", + "return": "returns sample rate", + "maixpy": "maix.audio.Player.sample_rate", + "py_doc": "Get sample rate\n\nReturns: returns sample rate\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int sample_rate()", + "py_def": "def sample_rate(self) -> int" + }, + "format": { + "type": "func", + "name": "format", + "doc": { + "brief": "Get sample format", + "return": "returns sample format", + "maixpy": "maix.audio.Player.format", + "py_doc": "Get sample format\n\nReturns: returns sample format\n" + }, + "args": [], + "ret_type": "audio::Format", + "static": false, + "def": "audio::Format format()", + "py_def": "def format(self) -> Format" + }, + "channel": { + "type": "func", + "name": "channel", + "doc": { + "brief": "Get sample channel", + "return": "returns sample channel", + "maixpy": "maix.audio.Player.channel", + "py_doc": "Get sample channel\n\nReturns: returns sample channel\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int channel()", + "py_def": "def channel(self) -> int" + } + }, + "def": "class Player" + } + }, + "auto_add": false + }, + "tracker": { + "type": "module", + "doc": { + "brief": "maix.tracker module" + }, + "members": { + "Object": { + "type": "class", + "name": "Object", + "doc": { + "brief": "tracker.Object class", + "maixpy": "maix.tracker.Object", + "py_doc": "tracker.Object class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Object", + "doc": { + "brief": "tracker.Object class constructor", + "maixpy": "maix.tracker.Object.__init__", + "maixcdk": "maix.tracker.Object.Object", + "py_doc": "tracker.Object class constructor" + }, + "args": [ + [ + "const int &", + "x", + null + ], + [ + "const int &", + "y", + null + ], + [ + "const int &", + "w", + null + ], + [ + "const int &", + "h", + null + ], + [ + "const int &", + "class_id", + null + ], + [ + "const float &", + "score", + null + ] + ], + "ret_type": null, + "static": false, + "def": "Object(const int &x, const int &y, const int &w, const int &h, const int &class_id, const float &score)", + "py_def": "def __init__(self, x: int, y: int, w: int, h: int, class_id: int, score: float) -> None" + }, + "x": { + "type": "var", + "name": "x", + "doc": { + "brief": "position x attribute.", + "maixpy": "maix.tracker.Object.x", + "py_doc": "position x attribute." + }, + "value": null, + "static": false, + "readonly": false, + "def": "int x" + }, + "y": { + "type": "var", + "name": "y", + "doc": { + "brief": "position y attribute.", + "maixpy": "maix.tracker.Object.y", + "py_doc": "position y attribute." + }, + "value": null, + "static": false, + "readonly": false, + "def": "int y" + }, + "w": { + "type": "var", + "name": "w", + "doc": { + "brief": "position rectangle width.", + "maixpy": "maix.tracker.Object.w", + "py_doc": "position rectangle width." + }, + "value": null, + "static": false, + "readonly": false, + "def": "int w" + }, + "h": { + "type": "var", + "name": "h", + "doc": { + "brief": "position rectangle height.", + "maixpy": "maix.tracker.Object.h", + "py_doc": "position rectangle height." + }, + "value": null, + "static": false, + "readonly": false, + "def": "int h" + }, + "class_id": { + "type": "var", + "name": "class_id", + "doc": { + "brief": "object class id, int type.", + "maixpy": "maix.tracker.Object.class_id", + "py_doc": "object class id, int type." + }, + "value": null, + "static": false, + "readonly": false, + "def": "int class_id" + }, + "score": { + "type": "var", + "name": "score", + "doc": { + "brief": "object score(prob).", + "maixpy": "maix.tracker.Object.score", + "py_doc": "object score(prob)." + }, + "value": null, + "static": false, + "readonly": false, + "def": "float score" + } + }, + "def": "class Object" + }, + "Track": { + "type": "class", + "name": "Track", + "doc": { + "brief": "tracker.Track class", + "maixpy": "maix.tracker.Track", + "py_doc": "tracker.Track class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Track", + "doc": { + "brief": "tracker.Track class constructor", + "maixpy": "maix.tracker.Track.__init__", + "maixcdk": "maix.tracker.Track.Track", + "py_doc": "tracker.Track class constructor" + }, + "args": [ + [ + "const size_t &", + "id", + null + ], + [ + "const float &", + "score", + null + ], + [ + "const bool &", + "lost", + null + ], + [ + "const size_t &", + "start_frame_id", + null + ], + [ + "const size_t &", + "frame_id", + null + ] + ], + "ret_type": null, + "static": false, + "def": "Track(const size_t &id, const float &score, const bool &lost, const size_t &start_frame_id, const size_t &frame_id)", + "py_def": "def __init__(self, id: int, score: float, lost: bool, start_frame_id: int, frame_id: int) -> None" + }, + "id": { + "type": "var", + "name": "id", + "doc": { + "brief": "track id.", + "maixpy": "maix.tracker.Track.id", + "py_doc": "track id." + }, + "value": null, + "static": false, + "readonly": false, + "def": "size_t id" + }, + "score": { + "type": "var", + "name": "score", + "doc": { + "brief": "track score(prob).", + "maixpy": "maix.tracker.Track.score", + "py_doc": "track score(prob)." + }, + "value": null, + "static": false, + "readonly": false, + "def": "float score" + }, + "lost": { + "type": "var", + "name": "lost", + "doc": { + "brief": "whether this track lost.", + "maixpy": "maix.tracker.Track.lost", + "py_doc": "whether this track lost." + }, + "value": null, + "static": false, + "readonly": false, + "def": "bool lost" + }, + "start_frame_id": { + "type": "var", + "name": "start_frame_id", + "doc": { + "brief": "track start frame id.", + "maixpy": "maix.tracker.Track.start_frame_id", + "py_doc": "track start frame id." + }, + "value": null, + "static": false, + "readonly": false, + "def": "size_t start_frame_id" + }, + "frame_id": { + "type": "var", + "name": "frame_id", + "doc": { + "brief": "track current frame id.", + "maixpy": "maix.tracker.Track.frame_id", + "py_doc": "track current frame id." + }, + "value": null, + "static": false, + "readonly": false, + "def": "size_t frame_id" + }, + "history": { + "type": "var", + "name": "history", + "doc": { + "brief": "track position history, the last one is latest position.", + "maixpy": "maix.tracker.Track.history", + "py_doc": "track position history, the last one is latest position." + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::deque history" + } + }, + "def": "class Track" + }, + "ByteTracker": { + "type": "class", + "name": "ByteTracker", + "doc": { + "brief": "tracker.ByteTracker class", + "maixpy": "maix.tracker.ByteTracker", + "py_doc": "tracker.ByteTracker class" + }, + "members": { + "__init__": { + "type": "func", + "name": "ByteTracker", + "doc": { + "brief": "tracker.ByteTracker class constructor", + "param": { + "max_lost_buff_num": "the frames for keep lost tracks.", + "track_thresh": "tracking confidence threshold.", + "high_thresh": "threshold to add to new track.", + "match_thresh": "matching threshold for tracking, e.g. one object in two frame iou < match_thresh we think they are the same obj.", + "max_history": "max tack's position history length." + }, + "maixpy": "maix.tracker.ByteTracker.__init__", + "maixcdk": "maix.tracker.ByteTracker.ByteTracker", + "py_doc": "tracker.ByteTracker class constructor\n\nArgs:\n - max_lost_buff_num: the frames for keep lost tracks.\n - track_thresh: tracking confidence threshold.\n - high_thresh: threshold to add to new track.\n - match_thresh: matching threshold for tracking, e.g. one object in two frame iou < match_thresh we think they are the same obj.\n - max_history: max tack's position history length.\n" + }, + "args": [ + [ + "const int &", + "max_lost_buff_num", + "60" + ], + [ + "const float &", + "track_thresh", + "0.5" + ], + [ + "const float &", + "high_thresh", + "0.6" + ], + [ + "const float &", + "match_thresh", + "0.8" + ], + [ + "const int &", + "max_history", + "20" + ] + ], + "ret_type": null, + "static": false, + "def": "ByteTracker(const int &max_lost_buff_num = 60,\n const float &track_thresh = 0.5,\n const float &high_thresh = 0.6,\n const float &match_thresh = 0.8,\n const int &max_history = 20)", + "py_def": "def __init__(self, max_lost_buff_num: int = 60, track_thresh: float = 0.5, high_thresh: float = 0.6, match_thresh: float = 0.8, max_history: int = 20) -> None" + }, + "update": { + "type": "func", + "name": "update", + "doc": { + "brief": "update tracks according to current detected objects.", + "maixpy": "maix.tracker.ByteTracker.update", + "py_doc": "update tracks according to current detected objects." + }, + "args": [ + [ + "const std::vector &", + "objs", + null + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector update(const std::vector &objs)", + "py_def": "def update(self, objs: list[Object]) -> list[Track]" + } + }, + "def": "class ByteTracker" + } + }, + "auto_add": true + }, + "http": { + "type": "module", + "doc": { + "brief": "maix.http module" + }, + "members": { + "JpegStreamer": { + "type": "class", + "name": "JpegStreamer", + "doc": { + "brief": "JpegStreamer class", + "maixpy": "maix.http.JpegStreamer", + "py_doc": "JpegStreamer class" + }, + "members": { + "__init__": { + "type": "func", + "name": "JpegStreamer", + "doc": { + "brief": "Construct a new jpeg streamer object", + "note": "You can get the picture stream through http://host:port/stream, you can also get it through http://ip:port, and you can add personal style through set_html() at this time", + "param": { + "host": "http host", + "port": "http port, default is 8000", + "client_number": "the max number of client" + }, + "maixpy": "maix.http.JpegStreamer.__init__", + "maixcdk": "maix.http.JpegStreamer.JpegStreamer", + "py_doc": "Construct a new jpeg streamer object\n\nArgs:\n - host: http host\n - port: http port, default is 8000\n - client_number: the max number of client\n" + }, + "args": [ + [ + "std::string", + "host", + "std::string()" + ], + [ + "int", + "port", + "8000" + ], + [ + "int", + "client_number", + "16" + ] + ], + "ret_type": null, + "static": false, + "def": "JpegStreamer(std::string host = std::string(), int port = 8000, int client_number = 16)", + "py_def": "def __init__(self, host: str = '', port: int = 8000, client_number: int = 16) -> None" + }, + "start": { + "type": "func", + "name": "start", + "doc": { + "brief": "start jpeg streame", + "return": "error code, err::ERR_NONE means success, others means failed", + "maixpy": "maix.http.JpegStreamer.start", + "py_doc": "start jpeg streame\n\nReturns: error code, err::ERR_NONE means success, others means failed\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err start()", + "overload": [ + { + "type": "func", + "name": "stop", + "doc": { + "brief": "stop http", + "return": "error code, err::ERR_NONE means success, others means failed", + "maixpy": "maix.http.JpegStreamer.start", + "py_doc": "stop http\n\nReturns: error code, err::ERR_NONE means success, others means failed\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err stop()" + } + ], + "py_def": "def start(self) -> maix.err.Err" + }, + "write": { + "type": "func", + "name": "write", + "doc": { + "brief": "Write data to http", + "param": { + "img": "image object" + }, + "return": "error code, err::ERR_NONE means success, others means failed", + "maixpy": "maix.http.JpegStreamer.write", + "py_doc": "Write data to http\n\nArgs:\n - img: image object\n\n\nReturns: error code, err::ERR_NONE means success, others means failed\n" + }, + "args": [ + [ + "image::Image *", + "img", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err write(image::Image *img)", + "py_def": "def write(self, img: maix.image.Image) -> maix.err.Err" + }, + "set_html": { + "type": "func", + "name": "set_html", + "doc": { + "brief": "add your style in this api\\ndefault is:\\n\\n\\n

JPG Stream

\\n\\n\\n", + "param": { + "data": "html code" + }, + "return": "error code, err::ERR_NONE means success, others means failed", + "maixpy": "maix.http.JpegStreamer.set_html", + "py_doc": "add your style in this api\ndefault is:\n\n\n

JPG Stream

\n\n\n\n\nArgs:\n - data: html code\n\n\nReturns: error code, err::ERR_NONE means success, others means failed\n" + }, + "args": [ + [ + "std::string", + "data", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err set_html(std::string data)", + "py_def": "def set_html(self, data: str) -> maix.err.Err" + }, + "host": { + "type": "func", + "name": "host", + "doc": { + "brief": "Get host", + "return": "host name", + "maixpy": "maix.http.JpegStreamer.host", + "py_doc": "Get host\n\nReturns: host name\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string host()", + "py_def": "def host(self) -> str" + }, + "port": { + "type": "func", + "name": "port", + "doc": { + "brief": "Get port", + "return": "port", + "maixpy": "maix.http.JpegStreamer.port", + "py_doc": "Get port\n\nReturns: port\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int port()", + "py_def": "def port(self) -> int" + } + }, + "def": "class JpegStreamer" + } + }, + "auto_add": true + }, + "rtsp": { + "type": "module", + "doc": { + "brief": "maix.rtsp module" + }, + "members": { + "RtspStreamType": { + "type": "enum", + "name": "RtspStreamType", + "doc": { + "brief": "The stream type of rtsp", + "maixpy": "maix.rtsp.RtspStreamType", + "py_doc": "The stream type of rtsp" + }, + "values": [ + [ + "RTSP_STREAM_NONE", + "0", + "format invalid" + ], + [ + "RTSP_STREAM_H265", + "", + "" + ] + ], + "def": "enum RtspStreamType\n {\n RTSP_STREAM_NONE = 0, // format invalid\n RTSP_STREAM_H265,\n }" + }, + "Region": { + "type": "class", + "name": "Region", + "doc": { + "brief": "Region class", + "maixpy": "maix.rtsp.Region", + "py_doc": "Region class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Region", + "doc": { + "brief": "Construct a new Region object", + "param": { + "x": "region coordinate x", + "y": "region coordinate y", + "width": "region width", + "height": "region height", + "format": "region format", + "camera": "bind region to camera" + }, + "maixpy": "maix.rtsp.Region.__init__", + "maixcdk": "maix.rtsp.Region.Region", + "py_doc": "Construct a new Region object\n\nArgs:\n - x: region coordinate x\n - y: region coordinate y\n - width: region width\n - height: region height\n - format: region format\n - camera: bind region to camera\n" + }, + "args": [ + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "int", + "width", + null + ], + [ + "int", + "height", + null + ], + [ + "image::Format", + "format", + null + ], + [ + "camera::Camera *", + "camera", + null + ] + ], + "ret_type": null, + "static": false, + "def": "Region(int x, int y, int width, int height, image::Format format, camera::Camera *camera)", + "py_def": "def __init__(self, x: int, y: int, width: int, height: int, format: maix.image.Format, camera: maix.camera.Camera) -> None" + }, + "get_canvas": { + "type": "func", + "name": "get_canvas", + "doc": { + "brief": "Return an image object from region", + "return": "image object", + "maixpy": "maix.rtsp.Region.get_canvas", + "py_doc": "Return an image object from region\n\nReturns: image object\n" + }, + "args": [], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *get_canvas()", + "py_def": "def get_canvas(self) -> maix.image.Image" + }, + "update_canvas": { + "type": "func", + "name": "update_canvas", + "doc": { + "brief": "Update canvas", + "return": "error code", + "maixpy": "maix.rtsp.Region.update_canvas", + "py_doc": "Update canvas\n\nReturns: error code\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err update_canvas()", + "py_def": "def update_canvas(self) -> maix.err.Err" + } + }, + "def": "class Region" + }, + "Rtsp": { + "type": "class", + "name": "Rtsp", + "doc": { + "brief": "Rtsp class", + "maixpy": "maix.rtsp.Rtsp", + "py_doc": "Rtsp class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Rtsp", + "doc": { + "brief": "Construct a new Video object", + "param": { + "ip": "rtsp ip", + "port": "rtsp port", + "fps": "rtsp fps", + "stream_type": "rtsp stream type" + }, + "maixpy": "maix.rtsp.Rtsp.__init__", + "maixcdk": "maix.rtsp.Rtsp.Rtsp", + "py_doc": "Construct a new Video object\n\nArgs:\n - ip: rtsp ip\n - port: rtsp port\n - fps: rtsp fps\n - stream_type: rtsp stream type\n" + }, + "args": [ + [ + "std::string", + "ip", + "std::string()" + ], + [ + "int", + "port", + "8554" + ], + [ + "int", + "fps", + "30" + ], + [ + "rtsp::RtspStreamType", + "stream_type", + "rtsp::RtspStreamType::RTSP_STREAM_H265" + ] + ], + "ret_type": null, + "static": false, + "def": "Rtsp(std::string ip = std::string(), int port = 8554, int fps = 30, rtsp::RtspStreamType stream_type = rtsp::RtspStreamType::RTSP_STREAM_H265)", + "py_def": "def __init__(self, ip: str = '', port: int = 8554, fps: int = 30, stream_type: RtspStreamType = ...) -> None" + }, + "start": { + "type": "func", + "name": "start", + "doc": { + "brief": "start rtsp", + "return": "error code, err::ERR_NONE means success, others means failed", + "maixpy": "maix.rtsp.Rtsp.start", + "py_doc": "start rtsp\n\nReturns: error code, err::ERR_NONE means success, others means failed\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err start()", + "overload": [ + { + "type": "func", + "name": "stop", + "doc": { + "brief": "stop rtsp", + "return": "error code, err::ERR_NONE means success, others means failed", + "maixpy": "maix.rtsp.Rtsp.start", + "py_doc": "stop rtsp\n\nReturns: error code, err::ERR_NONE means success, others means failed\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err stop()" + } + ], + "py_def": "def start(self) -> maix.err.Err" + }, + "bind_camera": { + "type": "func", + "name": "bind_camera", + "doc": { + "brief": "Bind camera", + "param": { + "camera": "camera object" + }, + "return": "error code, err::ERR_NONE means success, others means failed", + "maixpy": "maix.rtsp.Rtsp.bind_camera", + "py_doc": "Bind camera\n\nArgs:\n - camera: camera object\n\n\nReturns: error code, err::ERR_NONE means success, others means failed\n" + }, + "args": [ + [ + "camera::Camera *", + "camera", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err bind_camera(camera::Camera *camera)", + "py_def": "def bind_camera(self, camera: maix.camera.Camera) -> maix.err.Err" + }, + "write": { + "type": "func", + "name": "write", + "doc": { + "brief": "Write data to rtsp", + "param": { + "frame": "video frame data" + }, + "return": "error code, err::ERR_NONE means success, others means failed", + "maixpy": "maix.rtsp.Rtsp.write", + "py_doc": "Write data to rtsp\n\nArgs:\n - frame: video frame data\n\n\nReturns: error code, err::ERR_NONE means success, others means failed\n" + }, + "args": [ + [ + "video::Frame &", + "frame", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err write(video::Frame &frame)", + "py_def": "def write(self, frame: ...) -> maix.err.Err" + }, + "get_url": { + "type": "func", + "name": "get_url", + "doc": { + "brief": "Get url of rtsp", + "return": "url of rtsp", + "maixpy": "maix.rtsp.Rtsp.get_url", + "py_doc": "Get url of rtsp\n\nReturns: url of rtsp\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string get_url()", + "py_def": "def get_url(self) -> str" + }, + "get_urls": { + "type": "func", + "name": "get_urls", + "doc": { + "brief": "Get url list of rtsp", + "return": "url list of rtsp", + "maixpy": "maix.rtsp.Rtsp.get_urls", + "py_doc": "Get url list of rtsp\n\nReturns: url list of rtsp\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector get_urls()", + "py_def": "def get_urls(self) -> list[str]" + }, + "to_camera": { + "type": "func", + "name": "to_camera", + "doc": { + "brief": "Get camera object from rtsp", + "return": "camera object", + "maixpy": "maix.rtsp.Rtsp.to_camera", + "py_doc": "Get camera object from rtsp\n\nReturns: camera object\n" + }, + "args": [], + "ret_type": "camera::Camera*", + "static": false, + "def": "camera::Camera *to_camera()", + "py_def": "def to_camera(self) -> maix.camera.Camera" + }, + "rtsp_is_start": { + "type": "func", + "name": "rtsp_is_start", + "doc": { + "brief": "return rtsp start status", + "return": "true means rtsp is start, false means rtsp is stop.", + "maixpy": "maix.rtsp.Rtsp.rtsp_is_start", + "py_doc": "return rtsp start status\n\nReturns: true means rtsp is start, false means rtsp is stop.\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool rtsp_is_start()", + "py_def": "def rtsp_is_start(self) -> bool" + }, + "add_region": { + "type": "func", + "name": "add_region", + "doc": { + "brief": "return a region object, you can draw image on the region.", + "param": { + "x": "region coordinate x", + "y": "region coordinate y", + "width": "region width", + "height": "region height", + "format": "region format, support Format::FMT_BGRA8888 only" + }, + "return": "the reigon object", + "maixpy": "maix.rtsp.Rtsp.add_region", + "py_doc": "return a region object, you can draw image on the region.\n\nArgs:\n - x: region coordinate x\n - y: region coordinate y\n - width: region width\n - height: region height\n - format: region format, support Format::FMT_BGRA8888 only\n\n\nReturns: the reigon object\n" + }, + "args": [ + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "int", + "width", + null + ], + [ + "int", + "height", + null + ], + [ + "image::Format", + "format", + "image::Format::FMT_BGRA8888" + ] + ], + "ret_type": "rtsp::Region*", + "static": false, + "def": "rtsp::Region *add_region(int x, int y, int width, int height, image::Format format = image::Format::FMT_BGRA8888)", + "py_def": "def add_region(self, x: int, y: int, width: int, height: int, format: maix.image.Format = ...) -> Region" + }, + "update_region": { + "type": "func", + "name": "update_region", + "doc": { + "brief": "update and show region", + "return": "error code", + "maixpy": "maix.rtsp.Rtsp.update_region", + "py_doc": "update and show region\n\nReturns: error code\n" + }, + "args": [ + [ + "rtsp::Region &", + "region", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err update_region(rtsp::Region ®ion)", + "py_def": "def update_region(self, region: Region) -> maix.err.Err" + }, + "del_region": { + "type": "func", + "name": "del_region", + "doc": { + "brief": "del region", + "return": "error code", + "maixpy": "maix.rtsp.Rtsp.del_region", + "py_doc": "del region\n\nReturns: error code\n" + }, + "args": [ + [ + "rtsp::Region *", + "region", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err del_region(rtsp::Region *region)", + "py_def": "def del_region(self, region: Region) -> maix.err.Err" + }, + "draw_rect": { + "type": "func", + "name": "draw_rect", + "doc": { + "brief": "Draw a rectangle on the canvas", + "param": { + "id": "region id", + "x": "rectangle coordinate x", + "y": "rectangle coordinate y", + "width": "rectangle width", + "height": "rectangle height", + "color": "rectangle color", + "thickness": "rectangle thickness. If you set it to -1, the rectangle will be filled." + }, + "return": "error code", + "maixpy": "maix.rtsp.Rtsp.draw_rect", + "py_doc": "Draw a rectangle on the canvas\n\nArgs:\n - id: region id\n - x: rectangle coordinate x\n - y: rectangle coordinate y\n - width: rectangle width\n - height: rectangle height\n - color: rectangle color\n - thickness: rectangle thickness. If you set it to -1, the rectangle will be filled.\n\n\nReturns: error code\n" + }, + "args": [ + [ + "int", + "id", + null + ], + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "int", + "width", + null + ], + [ + "int", + "height", + null + ], + [ + "image::Color", + "color", + null + ], + [ + "int", + "thickness", + "1" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err draw_rect(int id, int x, int y, int width, int height, image::Color color, int thickness = 1)", + "py_def": "def draw_rect(self, id: int, x: int, y: int, width: int, height: int, color: maix.image.Color, thickness: int = 1) -> maix.err.Err" + }, + "draw_string": { + "type": "func", + "name": "draw_string", + "doc": { + "brief": "Draw a string on the canvas", + "param": { + "id": "region id", + "x": "string coordinate x", + "y": "string coordinate y", + "str": "string", + "color": "string color", + "size": "string size", + "thickness": "string thickness" + }, + "return": "error code", + "maixpy": "maix.rtsp.Rtsp.draw_string", + "py_doc": "Draw a string on the canvas\n\nArgs:\n - id: region id\n - x: string coordinate x\n - y: string coordinate y\n - str: string\n - color: string color\n - size: string size\n - thickness: string thickness\n\n\nReturns: error code\n" + }, + "args": [ + [ + "int", + "id", + null + ], + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "const char *", + "str", + null + ], + [ + "image::Color", + "color", + null + ], + [ + "int", + "size", + "16" + ], + [ + "int", + "thickness", + "1" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err draw_string(int id, int x, int y, const char *str, image::Color color, int size = 16, int thickness = 1)", + "py_def": "def draw_string(self, id: int, x: int, y: int, str: str, color: maix.image.Color, size: int = 16, thickness: int = 1) -> maix.err.Err" + } + }, + "def": "class Rtsp" + } + }, + "auto_add": true + }, + "rtmp": { + "type": "module", + "doc": { + "brief": "maix.rtmp module", + "maixpy": "maix.rtmp", + "py_doc": "maix.rtmp module" + }, + "members": { + "TagType": { + "type": "enum", + "name": "TagType", + "doc": { + "brief": "Video type", + "maixpy": "maix.rtmp.TagType", + "py_doc": "Video type" + }, + "values": [ + [ + "TAG_NONE", + "", + "" + ], + [ + "TAG_VIDEO", + "", + "" + ], + [ + "TAG_AUDIO", + "", + "" + ], + [ + "TAG_SCRIPT", + "", + "" + ] + ], + "def": "enum TagType\n {\n TAG_NONE,\n TAG_VIDEO,\n TAG_AUDIO,\n TAG_SCRIPT,\n }" + }, + "Rtmp": { + "type": "class", + "name": "Rtmp", + "doc": { + "brief": "Rtmp class", + "maixpy": "maix.rtmp.Rtmp", + "py_doc": "Rtmp class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Rtmp", + "doc": { + "brief": "Construct a new Video object", + "note": "Rtmp url : rtmp://host:prot/app/stream\nexample:\nr = Rtmp(\"localhost\", 1935, \"live\", \"stream\")\nmeans rtmp url is rtmp://localhost:1935/live/stream", + "param": { + "host": "rtmp ip", + "port": "rtmp port, default is 1935.", + "app": "rtmp app name", + "stream": "rtmp stream name", + "bitrate": "rtmp bitrate, default is 1000 * 1000" + }, + "maixpy": "maix.rtmp.Rtmp.__init__", + "maixcdk": "maix.rtmp.Rtmp.Rtmp", + "py_doc": "Construct a new Video object\n\nArgs:\n - host: rtmp ip\n - port: rtmp port, default is 1935.\n - app: rtmp app name\n - stream: rtmp stream name\n - bitrate: rtmp bitrate, default is 1000 * 1000\n" + }, + "args": [ + [ + "std::string", + "host", + "\"localhost\"" + ], + [ + "int", + "port", + "1935" + ], + [ + "std::string", + "app", + "std::string()" + ], + [ + "std::string", + "stream", + "std::string()" + ], + [ + "int", + "bitrate", + "1000 * 1000" + ] + ], + "ret_type": null, + "static": false, + "def": "Rtmp(std::string host = \"localhost\", int port = 1935, std::string app = std::string(), std::string stream = std::string(), int bitrate = 1000 * 1000)", + "py_def": "def __init__(self, host: str = 'localhost', port: int = 1935, app: str = '', stream: str = '', bitrate: int = 1000000) -> None" + }, + "push_video": { + "type": "func", + "name": "bitrate", + "doc": { + "brief": "Get bitrate", + "return": "bitrate", + "maixpy": "maix.rtmp.Rtmp.push_video", + "py_doc": "Get bitrate\n\nReturns: bitrate\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int bitrate()", + "py_def": "def push_video(self) -> int" + }, + "bind_camera": { + "type": "func", + "name": "bind_camera", + "doc": { + "brief": "Bind camera", + "note": "If the cam object is bound, the cam object cannot be used elsewhere.", + "param": { + "cam": "camera object" + }, + "return": "error code, err::ERR_NONE means success, others means failed", + "maixpy": "maix.rtmp.Rtmp.bind_camera", + "py_doc": "Bind camera\n\nArgs:\n - cam: camera object\n\n\nReturns: error code, err::ERR_NONE means success, others means failed\n" + }, + "args": [ + [ + "camera::Camera *", + "cam", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err bind_camera(camera::Camera *cam)", + "py_def": "def bind_camera(self, cam: maix.camera.Camera) -> maix.err.Err" + }, + "get_camera": { + "type": "func", + "name": "get_camera", + "doc": { + "brief": "If you bind a camera, return the camera object.", + "return": "Camera object", + "maixpy": "maix.rtmp.Rtmp.get_camera", + "py_doc": "If you bind a camera, return the camera object.\n\nReturns: Camera object\n" + }, + "args": [], + "ret_type": "camera::Camera*", + "static": false, + "def": "camera::Camera *get_camera()", + "py_def": "def get_camera(self) -> maix.camera.Camera" + }, + "start": { + "type": "func", + "name": "start", + "doc": { + "brief": "Start push stream", + "note": "only support flv file now", + "param": { + "path": "File path, if you passed file path, cyclic push the file, else if you bound camera, push the camera image." + }, + "return": "error code, err::ERR_NONE means success, others means failed", + "maixpy": "maix.rtmp.Rtmp.start", + "py_doc": "Start push stream\n\nArgs:\n - path: File path, if you passed file path, cyclic push the file, else if you bound camera, push the camera image.\n\n\nReturns: error code, err::ERR_NONE means success, others means failed\n" + }, + "args": [ + [ + "std::string", + "path", + "std::string()" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err start(std::string path = std::string())", + "py_def": "def start(self, path: str = '') -> maix.err.Err" + }, + "stop": { + "type": "func", + "name": "stop", + "doc": { + "brief": "Stop push stream", + "return": "error code, err::ERR_NONE means success, others means failed", + "maixpy": "maix.rtmp.Rtmp.stop", + "py_doc": "Stop push stream\n\nReturns: error code, err::ERR_NONE means success, others means failed\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err stop()", + "py_def": "def stop(self) -> maix.err.Err" + }, + "get_path": { + "type": "func", + "name": "get_path", + "doc": { + "brief": "Get the file path of the push stream", + "return": "file path", + "maixpy": "maix.rtmp.Rtmp.get_path", + "py_doc": "Get the file path of the push stream\n\nReturns: file path\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string get_path()", + "overload": [ + { + "type": "func", + "name": "is_started", + "doc": { + "brief": "Check whether push streaming has started", + "return": "If rtmp thread is running, returns true", + "maixpy": "maix.rtmp.Rtmp.get_path", + "py_doc": "Check whether push streaming has started\n\nReturns: If rtmp thread is running, returns true\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool is_started()" + } + ], + "py_def": "def get_path(self) -> str" + } + }, + "def": "class Rtmp" + } + }, + "auto_add": false + }, + "touchscreen": { + "type": "module", + "doc": { + "brief": "maix.touchscreen module" + }, + "members": { + "TouchScreen": { + "type": "class", + "name": "TouchScreen", + "doc": { + "brief": "TouchScreen class", + "maixpy": "maix.touchscreen.TouchScreen", + "py_doc": "TouchScreen class" + }, + "members": { + "__init__": { + "type": "func", + "name": "TouchScreen", + "doc": { + "brief": "Construct a new TouchScreen object", + "param": { + "device": "touchscreen device path, you can get devices by list_devices method, by default(value is NULL(None in MaixPy)) means the first device", + "open": "If true, touchscreen will automatically call open() after creation. default is true." + }, + "maixpy": "maix.touchscreen.TouchScreen.__init__", + "maixcdk": "maix.touchscreen.TouchScreen.TouchScreen", + "py_doc": "Construct a new TouchScreen object\n\nArgs:\n - device: touchscreen device path, you can get devices by list_devices method, by default(value is NULL(None in MaixPy)) means the first device\n - open: If true, touchscreen will automatically call open() after creation. default is true.\n" + }, + "args": [ + [ + "const std::string &", + "device", + "\"\"" + ], + [ + "bool", + "open", + "true" + ] + ], + "ret_type": null, + "static": false, + "def": "TouchScreen(const std::string &device = \"\", bool open = true)", + "py_def": "def __init__(self, device: str = '', open: bool = True) -> None" + }, + "open": { + "type": "func", + "name": "open", + "doc": { + "brief": "open touchscreen device", + "return": "error code, err::ERR_NONE means success, others means failed", + "maixpy": "maix.touchscreen.TouchScreen.open", + "py_doc": "open touchscreen device\n\nReturns: error code, err::ERR_NONE means success, others means failed\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err open()", + "py_def": "def open(self) -> maix.err.Err" + }, + "close": { + "type": "func", + "name": "close", + "doc": { + "brief": "close touchscreen device", + "return": "error code, err::ERR_NONE means success, others means failed", + "maixpy": "maix.touchscreen.TouchScreen.close", + "py_doc": "close touchscreen device\n\nReturns: error code, err::ERR_NONE means success, others means failed\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err close()", + "py_def": "def close(self) -> maix.err.Err" + }, + "read": { + "type": "func", + "name": "read", + "doc": { + "brief": "read touchscreen device", + "attention": "This method will discard same event in buffer, that is:\nif too many move event in buffer when call this method, it will only return the last one,\nand if read pressed or released event, it will return immediately.", + "return": "Returns a list include x, y, pressed state", + "maixpy": "maix.touchscreen.TouchScreen.read", + "py_doc": "read touchscreen device\n\nReturns: Returns a list include x, y, pressed state\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector read()", + "overload": [ + { + "type": "func", + "name": "read0", + "doc": { + "brief": "read touchscreen device", + "attention": "This method will return immediately if have event, so it's better to use available() to check if have more event in buffer,\nor too much event in buffer when your program call this read() interval is too long will make your program slow.", + "return": "Returns a list include x, y, pressed state", + "maixpy": "maix.touchscreen.TouchScreen.read", + "py_doc": "read touchscreen device\n\nReturns: Returns a list include x, y, pressed state\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector read0()" + } + ], + "py_def": "def read(self) -> list[int]" + }, + "available": { + "type": "func", + "name": "available", + "doc": { + "brief": "If we need to read from touchscreen, for event driven touchscreen means have event or not", + "param": { + "timeout": "-1 means block, 0 means no block, >0 means timeout, default is 0, unit is ms." + }, + "return": "true if need to read(have event), false if not", + "maixpy": "maix.touchscreen.TouchScreen.available", + "py_doc": "If we need to read from touchscreen, for event driven touchscreen means have event or not\n\nArgs:\n - timeout: -1 means block, 0 means no block, >0 means timeout, default is 0, unit is ms.\n\n\nReturns: true if need to read(have event), false if not\n" + }, + "args": [ + [ + "int", + "timeout", + "0" + ] + ], + "ret_type": "bool", + "static": false, + "def": "bool available(int timeout = 0)", + "py_def": "def available(self, timeout: int = 0) -> bool" + }, + "is_opened": { + "type": "func", + "name": "is_opened", + "doc": { + "brief": "Check if touchscreen is opened", + "return": "true if touchscreen is opened, false if not", + "maixpy": "maix.touchscreen.TouchScreen.is_opened", + "py_doc": "Check if touchscreen is opened\n\nReturns: true if touchscreen is opened, false if not\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool is_opened()", + "py_def": "def is_opened(self) -> bool" + } + }, + "def": "class TouchScreen" + } + }, + "auto_add": true + }, + "video": { + "type": "module", + "doc": { + "brief": "maix.video module", + "maixpy": "maix.video", + "py_doc": "maix.video module" + }, + "members": { + "VideoType": { + "type": "enum", + "name": "VideoType", + "doc": { + "brief": "Video type", + "maixpy": "maix.video.VideoType", + "py_doc": "Video type" + }, + "values": [ + [ + "VIDEO_NONE", + "0", + "format invalid" + ], + [ + "VIDEO_ENC_H265_CBR", + "", + "Deprecated" + ], + [ + "VIDEO_ENC_MP4_CBR", + "", + "Deprecated" + ], + [ + "VIDEO_DEC_H265_CBR", + "", + "Deprecated" + ], + [ + "VIDEO_DEC_MP4_CBR", + "", + "Deprecated" + ], + [ + "VIDEO_H264_CBR", + "", + "Deprecated" + ], + [ + "VIDEO_H265_CBR", + "", + "Deprecated" + ], + [ + "VIDEO_H264_CBR_MP4", + "", + "Deprecated" + ], + [ + "VIDEO_H265_CBR_MP4", + "", + "Deprecated" + ], + [ + "VIDEO_H264", + "", + "" + ], + [ + "VIDEO_H264_MP4", + "", + "" + ], + [ + "VIDEO_H264_FLV", + "", + "" + ], + [ + "VIDEO_H265", + "", + "" + ], + [ + "VIDEO_H265_MP4", + "", + "" + ] + ], + "def": "enum VideoType\n {\n VIDEO_NONE = 0, // format invalid\n VIDEO_ENC_H265_CBR, // Deprecated\n VIDEO_ENC_MP4_CBR, // Deprecated\n VIDEO_DEC_H265_CBR, // Deprecated\n VIDEO_DEC_MP4_CBR, // Deprecated\n VIDEO_H264_CBR, // Deprecated\n VIDEO_H265_CBR, // Deprecated\n VIDEO_H264_CBR_MP4, // Deprecated\n VIDEO_H265_CBR_MP4, // Deprecated\n\n VIDEO_H264,\n VIDEO_H264_MP4,\n VIDEO_H264_FLV,\n VIDEO_H265,\n VIDEO_H265_MP4,\n }" + }, + "MediaType": { + "type": "enum", + "name": "MediaType", + "doc": { + "brief": "Video type", + "maixpy": "maix.video.MediaType", + "py_doc": "Video type" + }, + "values": [ + [ + "MEDIA_TYPE_UNKNOWN", + "-1", + "Represents an unknown media type, which is usually treated as AVMEDIA_TYPE_DATA." + ], + [ + "MEDIA_TYPE_VIDEO", + "", + "Represents a video stream, such as video content encoded in H.264, MPEG-4, etc." + ], + [ + "MEDIA_TYPE_AUDIO", + "", + "Represents an audio stream, such as audio content encoded in AAC, MP3, etc." + ], + [ + "MEDIA_TYPE_DATA", + "", + "Represents opaque data streams that are usually continuous. This type of stream is not necessarily audio or video and may be used for other data purposes." + ], + [ + "MEDIA_TYPE_SUBTITLE", + "", + "Represents a subtitle stream used for displaying text or subtitle information, such as SRT, ASS, etc." + ], + [ + "MEDIA_TYPE_ATTACHMENT", + "", + "Represents attachment streams that are usually sparse. Attachment streams can include images, fonts, or other files that need to be bundled with the media." + ], + [ + "MEDIA_TYPE_NB", + "", + "Represents the number of media types (count) and indicates the total number of media types defined in this enumeration. It is not a media type itself but is used for counting enumeration items." + ] + ], + "def": "enum MediaType\n {\n MEDIA_TYPE_UNKNOWN = -1, // Represents an unknown media type, which is usually treated as AVMEDIA_TYPE_DATA.\n MEDIA_TYPE_VIDEO, // Represents a video stream, such as video content encoded in H.264, MPEG-4, etc.\n MEDIA_TYPE_AUDIO, // Represents an audio stream, such as audio content encoded in AAC, MP3, etc.\n MEDIA_TYPE_DATA, // Represents opaque data streams that are usually continuous. This type of stream is not necessarily audio or video and may be used for other data purposes.\n MEDIA_TYPE_SUBTITLE, // Represents a subtitle stream used for displaying text or subtitle information, such as SRT, ASS, etc.\n MEDIA_TYPE_ATTACHMENT, // Represents attachment streams that are usually sparse. Attachment streams can include images, fonts, or other files that need to be bundled with the media.\n MEDIA_TYPE_NB // Represents the number of media types (count) and indicates the total number of media types defined in this enumeration. It is not a media type itself but is used for counting enumeration items.\n }" + }, + "timebase_to_us": { + "type": "func", + "name": "timebase_to_us", + "doc": { + "brief": "Convert a value in timebase units to microseconds. value * 1000000 / (timebase[1] / timebase[0])", + "param": { + "timebse": "Time base, used as the unit for calculating playback time. It must be an array containing two parameters,\nin the format [num, den], where the first parameter is the numerator of the time base, and the second parameter is the denominator of the time base.", + "value": "Input value" + }, + "return": "Return the result in microseconds.", + "maixpy": "maix.video.timebase_to_us", + "py_doc": "Convert a value in timebase units to microseconds. value * 1000000 / (timebase[1] / timebase[0])\n\nArgs:\n - timebse: Time base, used as the unit for calculating playback time. It must be an array containing two parameters,\nin the format [num, den], where the first parameter is the numerator of the time base, and the second parameter is the denominator of the time base.\n - value: Input value\n\n\nReturns: Return the result in microseconds.\n" + }, + "args": [ + [ + "std::vector", + "timebase", + null + ], + [ + "uint64_t", + "value", + null + ] + ], + "ret_type": "double", + "static": false, + "def": "double timebase_to_us(std::vector timebase, uint64_t value)", + "py_def": "def timebase_to_us(timebase: list[int], value: int) -> float" + }, + "timebase_to_ms": { + "type": "func", + "name": "timebase_to_ms", + "doc": { + "brief": "Convert a value in timebase units to milliseconds.", + "param": { + "timebse": "Time base, used as the unit for calculating playback time. It must be an array containing two parameters,\nin the format [num, den], where the first parameter is the numerator of the time base, and the second parameter is the denominator of the time base.", + "value": "Input value" + }, + "return": "Return the result in milliseconds.", + "maixpy": "maix.video.timebase_to_ms", + "py_doc": "Convert a value in timebase units to milliseconds.\n\nArgs:\n - timebse: Time base, used as the unit for calculating playback time. It must be an array containing two parameters,\nin the format [num, den], where the first parameter is the numerator of the time base, and the second parameter is the denominator of the time base.\n - value: Input value\n\n\nReturns: Return the result in milliseconds.\n" + }, + "args": [ + [ + "std::vector", + "timebase", + null + ], + [ + "uint64_t", + "value", + null + ] + ], + "ret_type": "double", + "static": false, + "def": "double timebase_to_ms(std::vector timebase, uint64_t value)", + "py_def": "def timebase_to_ms(timebase: list[int], value: int) -> float" + }, + "Context": { + "type": "class", + "name": "Context", + "doc": { + "brief": "Context class", + "maixpy": "maix.video.Context", + "py_doc": "Context class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Context", + "doc": { + "brief": "Construct a new Context object", + "param": { + "media_type": "enable capture, if true, you can use capture() function to get an image object", + "timebase": "Time base, used as the unit for calculating playback time. It must be an array containing two parameters,\nin the format [num, den], where the first parameter is the numerator of the time base, and the second parameter is the denominator of the time base." + }, + "maixpy": "maix.video.Context.__init__", + "maixcdk": "maix.video.Context.Context", + "py_doc": "Construct a new Context object\n\nArgs:\n - media_type: enable capture, if true, you can use capture() function to get an image object\n - timebase: Time base, used as the unit for calculating playback time. It must be an array containing two parameters,\nin the format [num, den], where the first parameter is the numerator of the time base, and the second parameter is the denominator of the time base.\n" + }, + "args": [ + [ + "video::MediaType", + "media_type", + null + ], + [ + "std::vector", + "timebase", + null + ] + ], + "ret_type": null, + "static": false, + "def": "Context(video::MediaType media_type, std::vector timebase)", + "py_def": "def __init__(self, media_type: MediaType, timebase: list[int]) -> None" + }, + "audio_sample_rate": { + "type": "func", + "name": "audio_sample_rate", + "doc": { + "brief": "Get sample rate of audio (only valid in the context of audio)", + "return": "sample rate", + "maixpy": "maix.video.Context.audio_sample_rate", + "py_doc": "Get sample rate of audio (only valid in the context of audio)\n\nReturns: sample rate\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int audio_sample_rate()", + "overload": [ + { + "type": "func", + "name": "audio_sample_rate", + "doc": { + "brief": "Get sample rate of audio (only valid in the context of audio)", + "return": "sample rate", + "maixpy": "maix.video.Context.audio_sample_rate", + "py_doc": "Get sample rate of audio (only valid in the context of audio)\n\nReturns: sample rate\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int audio_sample_rate()" + } + ], + "py_def": "def audio_sample_rate(self) -> int" + }, + "audio_channels": { + "type": "func", + "name": "audio_channels", + "doc": { + "brief": "Get channels of audio (only valid in the context of audio)", + "return": "channels", + "maixpy": "maix.video.Context.audio_channels", + "py_doc": "Get channels of audio (only valid in the context of audio)\n\nReturns: channels\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int audio_channels()", + "overload": [ + { + "type": "func", + "name": "audio_channels", + "doc": { + "brief": "Get channels of audio (only valid in the context of audio)", + "return": "channels", + "maixpy": "maix.video.Context.audio_channels", + "py_doc": "Get channels of audio (only valid in the context of audio)\n\nReturns: channels\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int audio_channels()" + } + ], + "py_def": "def audio_channels(self) -> int" + }, + "audio_format": { + "type": "func", + "name": "audio_format", + "doc": { + "brief": "Get format of audio (only valid in the context of audio)", + "return": "audio format. @see audio::Format", + "maixpy": "maix.video.Context.audio_format", + "py_doc": "Get format of audio (only valid in the context of audio)\n\nReturns: audio format. @see audio::Format\n" + }, + "args": [], + "ret_type": "audio::Format", + "static": false, + "def": "audio::Format audio_format()", + "overload": [ + { + "type": "func", + "name": "audio_format", + "doc": { + "brief": "Get format of audio (only valid in the context of audio)", + "return": "audio format. @see audio::Format", + "maixpy": "maix.video.Context.audio_format", + "py_doc": "Get format of audio (only valid in the context of audio)\n\nReturns: audio format. @see audio::Format\n" + }, + "args": [], + "ret_type": "audio::Format", + "static": false, + "def": "audio::Format audio_format()" + } + ], + "py_def": "def audio_format(self) -> maix.audio.Format" + }, + "set_pcm": { + "type": "func", + "name": "set_pcm", + "doc": { + "brief": "Set pcm data (only valid in the context of audio)", + "param": { + "duration": "Duration of the current pcm. unit: timebase", + "pts": "The start time of this pcm playback. If it is 0, it means this parameter is not supported. unit: timebase" + }, + "return": "err::Err", + "maixpy": "maix.video.Context.set_pcm", + "py_doc": "Set pcm data (only valid in the context of audio)\n\nArgs:\n - duration: Duration of the current pcm. unit: timebase\n - pts: The start time of this pcm playback. If it is 0, it means this parameter is not supported. unit: timebase\n\n\nReturns: err::Err\n" + }, + "args": [ + [ + "maix::Bytes *", + "data", + null + ], + [ + "int", + "duration", + "0" + ], + [ + "uint64_t", + "pts", + "0" + ], + [ + "bool", + "copy", + "true" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err set_pcm(maix::Bytes *data, int duration = 0, uint64_t pts = 0, bool copy = true)", + "py_def": "def set_pcm(self, data: maix.Bytes(bytes), duration: int = 0, pts: int = 0, copy: bool = True) -> maix.err.Err" + }, + "get_pcm": { + "type": "func", + "name": "get_pcm", + "doc": { + "brief": "Get pcm data (only valid in the context of audio)", + "attention": "Note that if you call this interface, you are responsible for releasing the memory of the data, and this interface cannot be called again.", + "return": "Bytes", + "maixpy": "maix.video.Context.get_pcm", + "py_doc": "Get pcm data (only valid in the context of audio)\n\nReturns: Bytes\n" + }, + "args": [], + "ret_type": "Bytes*", + "static": false, + "def": "Bytes *get_pcm()", + "py_def": "def get_pcm(*args, **kwargs)" + }, + "image": { + "type": "func", + "name": "image", + "doc": { + "brief": "Retrieve the image data to be played.", + "attention": "Note that if you call this interface, you are responsible for releasing the memory of the image, and this interface cannot be called again.", + "maixpy": "maix.video.Context.image", + "py_doc": "Retrieve the image data to be played." + }, + "args": [], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *image()", + "py_def": "def image(self) -> maix.image.Image" + }, + "media_type": { + "type": "func", + "name": "media_type", + "doc": { + "brief": "Get the media type to determine whether it is video, audio, or another media type.", + "maixpy": "maix.video.Context.media_type", + "py_doc": "Get the media type to determine whether it is video, audio, or another media type." + }, + "args": [], + "ret_type": "video::MediaType", + "static": false, + "def": "video::MediaType media_type()", + "py_def": "def media_type(self) -> MediaType" + }, + "pts": { + "type": "func", + "name": "pts", + "doc": { + "brief": "Get the start time of the current playback., in units of time base.", + "maixpy": "maix.video.Context.pts", + "py_doc": "Get the start time of the current playback., in units of time base." + }, + "args": [], + "ret_type": "uint64_t", + "static": false, + "def": "uint64_t pts()", + "py_def": "def pts(self) -> int" + }, + "last_pts": { + "type": "func", + "name": "last_pts", + "doc": { + "brief": "Get the start time of the previous playback, in units of time base.", + "maixpy": "maix.video.Context.last_pts", + "py_doc": "Get the start time of the previous playback, in units of time base." + }, + "args": [], + "ret_type": "uint64_t", + "static": false, + "def": "uint64_t last_pts()", + "py_def": "def last_pts(self) -> int" + }, + "timebase": { + "type": "func", + "name": "timebase", + "doc": { + "brief": "Get the time base.", + "maixpy": "maix.video.Context.timebase", + "py_doc": "Get the time base." + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector timebase()", + "py_def": "def timebase(self) -> list[int]" + }, + "duration": { + "type": "func", + "name": "duration", + "doc": { + "brief": "Duration of the current frame. unit: timebase", + "maixpy": "maix.video.Context.duration", + "py_doc": "Duration of the current frame. unit: timebase" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int duration()", + "py_def": "def duration(self) -> int" + }, + "duration_us": { + "type": "func", + "name": "duration_us", + "doc": { + "brief": "Duration of the current frame. unit: us", + "maixpy": "maix.video.Context.duration_us", + "py_doc": "Duration of the current frame. unit: us" + }, + "args": [], + "ret_type": "uint64_t", + "static": false, + "def": "uint64_t duration_us()", + "py_def": "def duration_us(self) -> int" + } + }, + "def": "class Context" + }, + "Frame": { + "type": "class", + "name": "Frame", + "doc": { + "brief": "Frame class", + "maixpy": "maix.video.Frame", + "py_doc": "Frame class" + }, + "members": { + "to_bytes": { + "type": "func", + "name": "to_bytes", + "doc": { + "brief": "Get raw data of packet", + "param": { + "copy": "if true, will alloc memory and copy data to new buffer" + }, + "return": "raw data", + "maixpy": "maix.video.Frame.to_bytes", + "py_doc": "Get raw data of packet\n\nArgs:\n - copy: if true, will alloc memory and copy data to new buffer\n\n\nReturns: raw data\n" + }, + "args": [ + [ + "bool", + "copy", + "false" + ] + ], + "ret_type": "Bytes*", + "static": false, + "def": "Bytes *to_bytes(bool copy = false)", + "py_def": "def to_bytes(*args, **kwargs)" + }, + "size": { + "type": "func", + "name": "size", + "doc": { + "brief": "Get raw data size of packet", + "return": "size of raw data", + "maixpy": "maix.video.Frame.size", + "py_doc": "Get raw data size of packet\n\nReturns: size of raw data\n" + }, + "args": [], + "ret_type": "size_t", + "static": false, + "def": "size_t size()", + "py_def": "def size(self) -> int" + }, + "is_valid": { + "type": "func", + "name": "is_valid", + "doc": { + "brief": "Check packet is valid", + "return": "true, packet is valid; false, packet is invalid", + "maixpy": "maix.video.Frame.is_valid", + "py_doc": "Check packet is valid\n\nReturns: true, packet is valid; false, packet is invalid\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool is_valid()", + "py_def": "def is_valid(self) -> bool" + }, + "set_pts": { + "type": "func", + "name": "set_pts", + "doc": { + "brief": "Set pts", + "param": { + "pts": "presentation time stamp. unit: time_base" + }, + "maixpy": "maix.video.Frame.set_pts", + "py_doc": "Set pts\n\nArgs:\n - pts: presentation time stamp. unit: time_base\n" + }, + "args": [ + [ + "uint64_t", + "pts", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void set_pts(uint64_t pts)", + "py_def": "def set_pts(self, pts: int) -> None" + }, + "set_dts": { + "type": "func", + "name": "set_dts", + "doc": { + "brief": "Set dts", + "param": { + "dts": "decoding time stamp. unit: time_base" + }, + "maixpy": "maix.video.Frame.set_dts", + "py_doc": "Set dts\n\nArgs:\n - dts: decoding time stamp. unit: time_base\n" + }, + "args": [ + [ + "uint64_t", + "dts", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void set_dts(uint64_t dts)", + "py_def": "def set_dts(self, dts: int) -> None" + }, + "set_duration": { + "type": "func", + "name": "set_duration", + "doc": { + "brief": "Set duration", + "param": { + "duration": "packet display time. unit: time_base" + }, + "maixpy": "maix.video.Frame.set_duration", + "py_doc": "Set duration\n\nArgs:\n - duration: packet display time. unit: time_base\n" + }, + "args": [ + [ + "uint64_t", + "duration", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void set_duration(uint64_t duration)", + "py_def": "def set_duration(self, duration: int) -> None" + }, + "get_pts": { + "type": "func", + "name": "get_pts", + "doc": { + "brief": "Set pts", + "param": { + "pts": "presentation time stamp. unit: time_base" + }, + "return": "pts value", + "maixpy": "maix.video.Frame.get_pts", + "py_doc": "Set pts\n\nArgs:\n - pts: presentation time stamp. unit: time_base\n\n\nReturns: pts value\n" + }, + "args": [], + "ret_type": "uint64_t", + "static": false, + "def": "uint64_t get_pts()", + "py_def": "def get_pts(self) -> int" + }, + "get_dts": { + "type": "func", + "name": "get_dts", + "doc": { + "brief": "Set dts", + "param": { + "dts": "decoding time stamp. unit: time_base" + }, + "return": "dts value", + "maixpy": "maix.video.Frame.get_dts", + "py_doc": "Set dts\n\nArgs:\n - dts: decoding time stamp. unit: time_base\n\n\nReturns: dts value\n" + }, + "args": [], + "ret_type": "uint64_t", + "static": false, + "def": "uint64_t get_dts()", + "py_def": "def get_dts(self) -> int" + }, + "get_duration": { + "type": "func", + "name": "get_duration", + "doc": { + "brief": "Get duration", + "return": "duration value", + "maixpy": "maix.video.Frame.get_duration", + "py_doc": "Get duration\n\nReturns: duration value\n" + }, + "args": [], + "ret_type": "uint64_t", + "static": false, + "def": "uint64_t get_duration()", + "py_def": "def get_duration(self) -> int" + }, + "type": { + "type": "func", + "name": "type", + "doc": { + "brief": "Get frame type", + "return": "video type. @see video::VideoType", + "maixpy": "maix.video.Frame.type", + "py_doc": "Get frame type\n\nReturns: video type. @see video::VideoType\n" + }, + "args": [], + "ret_type": "video::VideoType", + "static": false, + "def": "video::VideoType type()", + "py_def": "def type(self) -> VideoType" + } + }, + "def": "class Frame" + }, + "Packet": { + "type": "class", + "name": "Packet", + "doc": { + "brief": "Packet class", + "maixpy": "maix.video.Packet", + "py_doc": "Packet class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Packet", + "doc": { + "brief": "Packet number (pair of numerator and denominator).", + "param": { + "data": "src data pointer, use pointers directly without copying.\nNote: this object will try to free this memory", + "len": "data len", + "pts": "presentation time stamp. unit: time_base", + "dts": "decoding time stamp. unit: time_base", + "duration": "packet display time. unit: time_base" + }, + "maixpy": "maix.video.Packet.__init__", + "maixcdk": "maix.video.Packet.Packet", + "py_doc": "Packet number (pair of numerator and denominator).\n\nArgs:\n - data: src data pointer, use pointers directly without copying.\nNote: this object will try to free this memory\n - len: data len\n - pts: presentation time stamp. unit: time_base\n - dts: decoding time stamp. unit: time_base\n - duration: packet display time. unit: time_base\n" + }, + "args": [ + [ + "uint8_t *", + "data", + null + ], + [ + "int", + "len", + null + ], + [ + "uint64_t", + "pts", + "-1" + ], + [ + "uint64_t", + "dts", + "-1" + ], + [ + "int64_t", + "duration", + "0" + ] + ], + "ret_type": null, + "static": false, + "def": "Packet(uint8_t *data, int len, uint64_t pts = -1, uint64_t dts = -1, int64_t duration = 0)", + "py_def": "def __init__(self, data: int, len: int, pts: int = -1, dts: int = -1, duration: int = 0) -> None" + }, + "get": { + "type": "func", + "name": "get", + "doc": { + "brief": "Get raw data of packet", + "return": "raw data", + "maixpy": "maix.video.Packet.get", + "py_doc": "Get raw data of packet\n\nReturns: raw data\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector get()", + "py_def": "def get(self) -> list[int]" + }, + "data": { + "type": "func", + "name": "data", + "doc": { + "brief": "Get raw data of packet", + "return": "raw data", + "maixpy": "maix.video.Packet.data", + "py_doc": "Get raw data of packet\n\nReturns: raw data\n" + }, + "args": [], + "ret_type": "uint8_t*", + "static": false, + "def": "uint8_t *data()", + "py_def": "def data(self) -> int" + }, + "data_size": { + "type": "func", + "name": "data_size", + "doc": { + "brief": "Get raw data size of packet", + "return": "size of raw data", + "maixpy": "maix.video.Packet.data_size", + "py_doc": "Get raw data size of packet\n\nReturns: size of raw data\n" + }, + "args": [], + "ret_type": "size_t", + "static": false, + "def": "size_t data_size()", + "py_def": "def data_size(self) -> int" + }, + "is_valid": { + "type": "func", + "name": "is_valid", + "doc": { + "brief": "Check packet is valid", + "return": "true, packet is valid; false, packet is invalid", + "maixpy": "maix.video.Packet.is_valid", + "py_doc": "Check packet is valid\n\nReturns: true, packet is valid; false, packet is invalid\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool is_valid()", + "py_def": "def is_valid(self) -> bool" + }, + "set_pts": { + "type": "func", + "name": "set_pts", + "doc": { + "brief": "Set pts", + "param": { + "pts": "presentation time stamp. unit: time_base" + }, + "return": "true, packet is valid; false, packet is invalid", + "maixpy": "maix.video.Packet.set_pts", + "py_doc": "Set pts\n\nArgs:\n - pts: presentation time stamp. unit: time_base\n\n\nReturns: true, packet is valid; false, packet is invalid\n" + }, + "args": [ + [ + "uint64_t", + "pts", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void set_pts(uint64_t pts)", + "py_def": "def set_pts(self, pts: int) -> None" + }, + "set_dts": { + "type": "func", + "name": "set_dts", + "doc": { + "brief": "Set dts", + "param": { + "dts": "decoding time stamp. unit: time_base" + }, + "return": "true, packet is valid; false, packet is invalid", + "maixpy": "maix.video.Packet.set_dts", + "py_doc": "Set dts\n\nArgs:\n - dts: decoding time stamp. unit: time_base\n\n\nReturns: true, packet is valid; false, packet is invalid\n" + }, + "args": [ + [ + "uint64_t", + "dts", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void set_dts(uint64_t dts)", + "py_def": "def set_dts(self, dts: int) -> None" + }, + "set_duration": { + "type": "func", + "name": "set_duration", + "doc": { + "brief": "Set duration", + "param": { + "duration": "packet display time. unit: time_base" + }, + "return": "true, packet is valid; false, packet is invalid", + "maixpy": "maix.video.Packet.set_duration", + "py_doc": "Set duration\n\nArgs:\n - duration: packet display time. unit: time_base\n\n\nReturns: true, packet is valid; false, packet is invalid\n" + }, + "args": [ + [ + "uint64_t", + "duration", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void set_duration(uint64_t duration)", + "py_def": "def set_duration(self, duration: int) -> None" + } + }, + "def": "class Packet" + }, + "Encoder": { + "type": "class", + "name": "Encoder", + "doc": { + "brief": "Encode class", + "maixpy": "maix.video.Encoder", + "py_doc": "Encode class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Encoder", + "doc": { + "brief": "Construct a new Video object", + "param": { + "width": "picture width. this value may be set automatically. default is 2560.", + "height": "picture height. this value may be set automatically. default is 1440.", + "format": "picture format. default is image::Format::FMT_YVU420SP. @see image::Format", + "type": "video encode/decode type. default is ENC_H265_CBR. @see EncodeType", + "framerate": "frame rate. framerate default is 30, means 30 frames per second\nfor video. 1/time_base is not the average frame rate if the frame rate is not constant.", + "gop": "for h264/h265 encoding, the interval between two I-frames, default is 50.", + "bitrate": "for h264/h265 encoding, used to limit the bandwidth used by compressed data, default is 3000kbps", + "time_base": "frame time base. time_base default is 1000, means 1/1000 ms (not used)", + "capture": "enable capture, if true, you can use capture() function to get an image object", + "block": "This parameter determines whether encoding should block until it is complete.\nIf set to true, it will wait until encoding is finished before returning.\nIf set to false, it will return the current encoding result on the next call." + }, + "maixpy": "maix.video.Encoder.__init__", + "maixcdk": "maix.video.Encoder.Encoder", + "py_doc": "Construct a new Video object\n\nArgs:\n - width: picture width. this value may be set automatically. default is 2560.\n - height: picture height. this value may be set automatically. default is 1440.\n - format: picture format. default is image::Format::FMT_YVU420SP. @see image::Format\n - type: video encode/decode type. default is ENC_H265_CBR. @see EncodeType\n - framerate: frame rate. framerate default is 30, means 30 frames per second\nfor video. 1/time_base is not the average frame rate if the frame rate is not constant.\n - gop: for h264/h265 encoding, the interval between two I-frames, default is 50.\n - bitrate: for h264/h265 encoding, used to limit the bandwidth used by compressed data, default is 3000kbps\n - time_base: frame time base. time_base default is 1000, means 1/1000 ms (not used)\n - capture: enable capture, if true, you can use capture() function to get an image object\n - block: This parameter determines whether encoding should block until it is complete.\nIf set to true, it will wait until encoding is finished before returning.\nIf set to false, it will return the current encoding result on the next call.\n" + }, + "args": [ + [ + "std::string", + "path", + "\"\"" + ], + [ + "int", + "width", + "2560" + ], + [ + "int", + "height", + "1440" + ], + [ + "image::Format", + "format", + "image::Format::FMT_YVU420SP" + ], + [ + "video::VideoType", + "type", + "video::VideoType::VIDEO_H264" + ], + [ + "int", + "framerate", + "30" + ], + [ + "int", + "gop", + "50" + ], + [ + "int", + "bitrate", + "3000 * 1000" + ], + [ + "int", + "time_base", + "1000" + ], + [ + "bool", + "capture", + "false" + ], + [ + "bool", + "block", + "true" + ] + ], + "ret_type": null, + "static": false, + "def": "Encoder(std::string path = \"\", int width = 2560, int height = 1440, image::Format format = image::Format::FMT_YVU420SP, video::VideoType type = video::VideoType::VIDEO_H264, int framerate = 30, int gop = 50, int bitrate = 3000 * 1000, int time_base = 1000, bool capture = false, bool block = true)", + "py_def": "def __init__(self, path: str = '', width: int = 2560, height: int = 1440, format: maix.image.Format = ..., type: VideoType = ..., framerate: int = 30, gop: int = 50, bitrate: int = 3000000, time_base: int = 1000, capture: bool = False, block: bool = True) -> None" + }, + "bind_camera": { + "type": "func", + "name": "bind_camera", + "doc": { + "brief": "Bind camera", + "param": { + "camera": "camera object" + }, + "return": "error code, err::ERR_NONE means success, others means failed", + "maixpy": "maix.video.Encoder.bind_camera", + "py_doc": "Bind camera\n\nArgs:\n - camera: camera object\n\n\nReturns: error code, err::ERR_NONE means success, others means failed\n" + }, + "args": [ + [ + "camera::Camera *", + "camera", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err bind_camera(camera::Camera *camera)", + "py_def": "def bind_camera(self, camera: maix.camera.Camera) -> maix.err.Err" + }, + "encode": { + "type": "func", + "name": "encode", + "doc": { + "brief": "Encode image.", + "param": { + "img": "the image will be encode.\nif the img is NULL, this function will try to get image from camera, you must use bind_camera() function to bind the camera.", + "pcm": "the pcm data will be encode." + }, + "return": "encode result", + "maixpy": "maix.video.Encoder.encode", + "py_doc": "Encode image.\n\nArgs:\n - img: the image will be encode.\nif the img is NULL, this function will try to get image from camera, you must use bind_camera() function to bind the camera.\n - pcm: the pcm data will be encode.\n\n\nReturns: encode result\n" + }, + "args": [ + [ + "image::Image *", + "img", + "maix::video::Encoder::NoneImage" + ], + [ + "Bytes *", + "pcm", + "maix::video::Encoder::NoneBytes" + ] + ], + "ret_type": "video::Frame*", + "static": false, + "def": "video::Frame *encode(image::Image *img = maix::video::Encoder::NoneImage, Bytes *pcm = maix::video::Encoder::NoneBytes)", + "py_def": "def encode(self, img: maix.image.Image = ..., pcm: maix.Bytes(bytes) = b'') -> Frame" + }, + "capture": { + "type": "func", + "name": "capture", + "doc": { + "brief": "Capture image", + "attention": "Each time encode is called, the last captured image will be released.", + "return": "error code", + "maixpy": "maix.video.Encoder.capture", + "py_doc": "Capture image\n\nReturns: error code\n" + }, + "args": [], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *capture()", + "py_def": "def capture(self) -> maix.image.Image" + }, + "width": { + "type": "func", + "name": "width", + "doc": { + "brief": "Get video width", + "return": "video width", + "maixpy": "maix.video.Encoder.width", + "py_doc": "Get video width\n\nReturns: video width\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int width()", + "py_def": "def width(self) -> int" + }, + "height": { + "type": "func", + "name": "height", + "doc": { + "brief": "Get video height", + "return": "video height", + "maixpy": "maix.video.Encoder.height", + "py_doc": "Get video height\n\nReturns: video height\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int height()", + "py_def": "def height(self) -> int" + }, + "type": { + "type": "func", + "name": "type", + "doc": { + "brief": "Get video encode type", + "return": "VideoType", + "maixpy": "maix.video.Encoder.type", + "py_doc": "Get video encode type\n\nReturns: VideoType\n" + }, + "args": [], + "ret_type": "video::VideoType", + "static": false, + "def": "video::VideoType type()", + "py_def": "def type(self) -> VideoType" + }, + "framerate": { + "type": "func", + "name": "framerate", + "doc": { + "brief": "Get video encode framerate", + "return": "frame rate", + "maixpy": "maix.video.Encoder.framerate", + "py_doc": "Get video encode framerate\n\nReturns: frame rate\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int framerate()", + "py_def": "def framerate(self) -> int" + }, + "gop": { + "type": "func", + "name": "gop", + "doc": { + "brief": "Get video encode gop", + "return": "gop value", + "maixpy": "maix.video.Encoder.gop", + "py_doc": "Get video encode gop\n\nReturns: gop value\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int gop()", + "py_def": "def gop(self) -> int" + }, + "bitrate": { + "type": "func", + "name": "bitrate", + "doc": { + "brief": "Get video encode bitrate", + "return": "bitrate value", + "maixpy": "maix.video.Encoder.bitrate", + "py_doc": "Get video encode bitrate\n\nReturns: bitrate value\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int bitrate()", + "py_def": "def bitrate(self) -> int" + }, + "time_base": { + "type": "func", + "name": "time_base", + "doc": { + "brief": "Get video encode time base", + "return": "time base value", + "maixpy": "maix.video.Encoder.time_base", + "py_doc": "Get video encode time base\n\nReturns: time base value\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int time_base()", + "py_def": "def time_base(self) -> int" + } + }, + "def": "class Encoder" + }, + "Decoder": { + "type": "class", + "name": "Decoder", + "doc": { + "brief": "Decoder class", + "maixpy": "maix.video.Decoder", + "py_doc": "Decoder class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Decoder", + "doc": { + "brief": "Construct a new decoder object", + "param": { + "path": "Path to the file to be decoded. Supports files with .264 and .mp4 extensions. Note that only mp4 files containing h.264 streams are supported.", + "format": "Decoded output format, currently only support YUV420SP" + }, + "maixpy": "maix.video.Decoder.__init__", + "maixcdk": "maix.video.Decoder.Decoder", + "py_doc": "Construct a new decoder object\n\nArgs:\n - path: Path to the file to be decoded. Supports files with .264 and .mp4 extensions. Note that only mp4 files containing h.264 streams are supported.\n - format: Decoded output format, currently only support YUV420SP\n" + }, + "args": [ + [ + "std::string", + "path", + null + ], + [ + "image::Format", + "format", + "image::Format::FMT_YVU420SP" + ] + ], + "ret_type": null, + "static": false, + "def": "Decoder(std::string path, image::Format format = image::Format::FMT_YVU420SP)", + "py_def": "def __init__(self, path: str, format: maix.image.Format = ...) -> None" + }, + "decode_video": { + "type": "func", + "name": "decode_video", + "doc": { + "brief": "Decode the video stream, returning the image of the next frame each time.", + "param": { + "block": "Whether it blocks or not. If true, it will wait for the decoding to complete and return the current frame.\nIf false, it will return the result of the previous frame's decoding. If the previous frame's decoding result is empty,\nit will return an unknown Context, and you can use the media_type method of the Context to determine if a valid result exists.\ndefault is true." + }, + "return": "Decoded context information.", + "maixpy": "maix.video.Decoder.decode_video", + "py_doc": "Decode the video stream, returning the image of the next frame each time.\n\nArgs:\n - block: Whether it blocks or not. If true, it will wait for the decoding to complete and return the current frame.\nIf false, it will return the result of the previous frame's decoding. If the previous frame's decoding result is empty,\nit will return an unknown Context, and you can use the media_type method of the Context to determine if a valid result exists.\ndefault is true.\n\n\nReturns: Decoded context information.\n" + }, + "args": [ + [ + "bool", + "block", + "true" + ] + ], + "ret_type": "video::Context *", + "static": false, + "def": "video::Context * decode_video(bool block = true)", + "py_def": "def decode_video(self, block: bool = True) -> Context" + }, + "decode_audio": { + "type": "func", + "name": "decode_audio", + "doc": { + "brief": "Decode the video stream, returning the image of the next frame each time.", + "return": "Decoded context information.", + "maixpy": "maix.video.Decoder.decode_audio", + "py_doc": "Decode the video stream, returning the image of the next frame each time.\n\nReturns: Decoded context information.\n" + }, + "args": [], + "ret_type": "video::Context *", + "static": false, + "def": "video::Context * decode_audio()", + "py_def": "def decode_audio(self) -> Context" + }, + "decode": { + "type": "func", + "name": "decode", + "doc": { + "brief": "Decode the video and audio stream", + "param": { + "block": "Whether it blocks or not. If true, it will wait for the decoding to complete and return the current frame.\nIf false, it will return the result of the previous frame's decoding. If the previous frame's decoding result is empty,\nit will return an unknown Context, and you can use the media_type method of the Context to determine if a valid result exists.\ndefault is true." + }, + "return": "Decoded context information.", + "maixpy": "maix.video.Decoder.decode", + "py_doc": "Decode the video and audio stream\n\nArgs:\n - block: Whether it blocks or not. If true, it will wait for the decoding to complete and return the current frame.\nIf false, it will return the result of the previous frame's decoding. If the previous frame's decoding result is empty,\nit will return an unknown Context, and you can use the media_type method of the Context to determine if a valid result exists.\ndefault is true.\n\n\nReturns: Decoded context information.\n" + }, + "args": [ + [ + "bool", + "block", + "true" + ] + ], + "ret_type": "video::Context *", + "static": false, + "def": "video::Context * decode(bool block = true)", + "py_def": "def decode(self, block: bool = True) -> Context" + }, + "width": { + "type": "func", + "name": "width", + "doc": { + "brief": "Get the video width", + "return": "video width", + "maixpy": "maix.video.Decoder.width", + "py_doc": "Get the video width\n\nReturns: video width\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int width()", + "py_def": "def width(self) -> int" + }, + "height": { + "type": "func", + "name": "height", + "doc": { + "brief": "Get the video height", + "return": "video height", + "maixpy": "maix.video.Decoder.height", + "py_doc": "Get the video height\n\nReturns: video height\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int height()", + "py_def": "def height(self) -> int" + }, + "bitrate": { + "type": "func", + "name": "bitrate", + "doc": { + "brief": "Get the video bitrate", + "return": "bitrate value", + "maixpy": "maix.video.Decoder.bitrate", + "py_doc": "Get the video bitrate\n\nReturns: bitrate value\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int bitrate()", + "py_def": "def bitrate(self) -> int" + }, + "fps": { + "type": "func", + "name": "fps", + "doc": { + "brief": "Get the video fps", + "return": "fps value", + "maixpy": "maix.video.Decoder.fps", + "py_doc": "Get the video fps\n\nReturns: fps value\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int fps()", + "py_def": "def fps(self) -> int" + }, + "seek": { + "type": "func", + "name": "seek", + "doc": { + "brief": "Seek to the required playback position", + "param": { + "time": "timestamp value, unit: s" + }, + "return": "return the current position, unit: s", + "maixpy": "maix.video.Decoder.seek", + "py_doc": "Seek to the required playback position\n\nArgs:\n - time: timestamp value, unit: s\n\n\nReturns: return the current position, unit: s\n" + }, + "args": [ + [ + "double", + "time", + "-1" + ] + ], + "ret_type": "double", + "static": false, + "def": "double seek(double time = -1)", + "py_def": "def seek(self, time: float = -1) -> float" + }, + "duration": { + "type": "func", + "name": "duration", + "doc": { + "brief": "Get the maximum duration of the video. If it returns 0, it means it cannot be predicted.", + "return": "duration value, unit: s", + "maixpy": "maix.video.Decoder.duration", + "py_doc": "Get the maximum duration of the video. If it returns 0, it means it cannot be predicted.\n\nReturns: duration value, unit: s\n" + }, + "args": [], + "ret_type": "double", + "static": false, + "def": "double duration()", + "py_def": "def duration(self) -> float" + }, + "timebase": { + "type": "func", + "name": "timebase", + "doc": { + "brief": "Get the time base.", + "maixpy": "maix.video.Decoder.timebase", + "py_doc": "Get the time base." + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector timebase()", + "py_def": "def timebase(self) -> list[int]" + }, + "has_audio": { + "type": "func", + "name": "has_audio", + "doc": { + "brief": "If find audio data, return true", + "maixpy": "maix.video.Decoder.has_audio", + "py_doc": "If find audio data, return true" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool has_audio()", + "py_def": "def has_audio(self) -> bool" + }, + "has_video": { + "type": "func", + "name": "has_video", + "doc": { + "brief": "If find video data, return true", + "maixpy": "maix.video.Decoder.has_video", + "py_doc": "If find video data, return true" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool has_video()", + "py_def": "def has_video(self) -> bool" + } + }, + "def": "class Decoder" + }, + "Video": { + "type": "class", + "name": "Video", + "doc": { + "brief": "Video class", + "maixpy": "maix.video.Video", + "py_doc": "Video class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Video", + "doc": { + "brief": "Construct a new Video object", + "param": { + "path": "video path. the path determines the location where you load or save the file, if path is none, the video module will not save or load file.\nxxx.h265 means video format is H265, xxx.mp4 means video format is MP4", + "width": "picture width. this value may be set automatically. default is 2560.", + "height": "picture height. this value may be set automatically. default is 1440.", + "format": "picture pixel format. this value may be set automatically. default is FMT_YVU420SP.", + "time_base": "frame time base. time_base default is 30, means 1/30 ms", + "framerate": "frame rate. framerate default is 30, means 30 frames per second\nfor video. 1/time_base is not the average frame rate if the frame rate is not constant.", + "capture": "enable capture, if true, you can use capture() function to get an image object", + "open": "If true, video will automatically call open() after creation. default is true." + }, + "maixpy": "maix.video.Video.__init__", + "maixcdk": "maix.video.Video.Video", + "py_doc": "Construct a new Video object\n\nArgs:\n - path: video path. the path determines the location where you load or save the file, if path is none, the video module will not save or load file.\nxxx.h265 means video format is H265, xxx.mp4 means video format is MP4\n - width: picture width. this value may be set automatically. default is 2560.\n - height: picture height. this value may be set automatically. default is 1440.\n - format: picture pixel format. this value may be set automatically. default is FMT_YVU420SP.\n - time_base: frame time base. time_base default is 30, means 1/30 ms\n - framerate: frame rate. framerate default is 30, means 30 frames per second\nfor video. 1/time_base is not the average frame rate if the frame rate is not constant.\n - capture: enable capture, if true, you can use capture() function to get an image object\n - open: If true, video will automatically call open() after creation. default is true.\n" + }, + "args": [ + [ + "std::string", + "path", + "std::string()" + ], + [ + "int", + "width", + "2560" + ], + [ + "int", + "height", + "1440" + ], + [ + "image::Format", + "format", + "image::Format::FMT_YVU420SP" + ], + [ + "int", + "time_base", + "30" + ], + [ + "int", + "framerate", + "30" + ], + [ + "bool", + "capture", + "false" + ], + [ + "bool", + "open", + "true" + ] + ], + "ret_type": null, + "static": false, + "def": "Video(std::string path = std::string(), int width = 2560, int height = 1440, image::Format format = image::Format::FMT_YVU420SP, int time_base = 30, int framerate = 30, bool capture = false, bool open = true)", + "py_def": "def __init__(self, path: str = '', width: int = 2560, height: int = 1440, format: maix.image.Format = ..., time_base: int = 30, framerate: int = 30, capture: bool = False, open: bool = True) -> None" + }, + "open": { + "type": "func", + "name": "open", + "doc": { + "brief": "Open video and run", + "param": { + "path": "video path. the path determines the location where you load or save the file, if path is none, the video module will not save or load file.\nxxx.h265 means video format is H265, xxx.mp4 means video format is MP4", + "fps": "video fps" + }, + "return": "error code, err::ERR_NONE means success, others means failed", + "maixpy": "maix.video.Video.open", + "py_doc": "Open video and run\n\nArgs:\n - path: video path. the path determines the location where you load or save the file, if path is none, the video module will not save or load file.\nxxx.h265 means video format is H265, xxx.mp4 means video format is MP4\n - fps: video fps\n\n\nReturns: error code, err::ERR_NONE means success, others means failed\n" + }, + "args": [ + [ + "std::string", + "path", + "std::string()" + ], + [ + "double", + "fps", + "30.0" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err open(std::string path = std::string(), double fps = 30.0)", + "py_def": "def open(self, path: str = '', fps: float = 30.0) -> maix.err.Err" + }, + "close": { + "type": "func", + "name": "close", + "doc": { + "brief": "Close video", + "maixpy": "maix.video.Video.close", + "py_doc": "Close video" + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void close()", + "py_def": "def close(self) -> None" + }, + "bind_camera": { + "type": "func", + "name": "bind_camera", + "doc": { + "brief": "Bind camera", + "param": { + "camera": "camera object" + }, + "return": "error code, err::ERR_NONE means success, others means failed", + "maixpy": "maix.video.Video.bind_camera", + "py_doc": "Bind camera\n\nArgs:\n - camera: camera object\n\n\nReturns: error code, err::ERR_NONE means success, others means failed\n" + }, + "args": [ + [ + "camera::Camera *", + "camera", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err bind_camera(camera::Camera *camera)", + "py_def": "def bind_camera(self, camera: maix.camera.Camera) -> maix.err.Err" + }, + "encode": { + "type": "func", + "name": "encode", + "doc": { + "brief": "Encode image.", + "param": { + "img": "the image will be encode.\nif the img is NULL, this function will try to get image from camera, you must use bind_camera() function to bind the camera." + }, + "return": "encode result", + "maixpy": "maix.video.Video.encode", + "py_doc": "Encode image.\n\nArgs:\n - img: the image will be encode.\nif the img is NULL, this function will try to get image from camera, you must use bind_camera() function to bind the camera.\n\n\nReturns: encode result\n" + }, + "args": [ + [ + "image::Image *", + "img", + "maix::video::Video::NoneImage" + ] + ], + "ret_type": "video::Packet*", + "static": false, + "def": "video::Packet *encode(image::Image *img = maix::video::Video::NoneImage)", + "py_def": "def encode(self, img: maix.image.Image = ...) -> Packet" + }, + "decode": { + "type": "func", + "name": "decode", + "doc": { + "brief": "Decode frame", + "param": { + "frame": "the frame will be decode" + }, + "return": "decode result", + "maixpy": "maix.video.Video.decode", + "py_doc": "Decode frame\n\nArgs:\n - frame: the frame will be decode\n\n\nReturns: decode result\n" + }, + "args": [ + [ + "video::Frame *", + "frame", + "nullptr" + ] + ], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *decode(video::Frame *frame = nullptr)", + "py_def": "def decode(self, frame: Frame = None) -> maix.image.Image" + }, + "finish": { + "type": "func", + "name": "finish", + "doc": { + "brief": "Encode or decode finish", + "return": "error code", + "maixpy": "maix.video.Video.finish", + "py_doc": "Encode or decode finish\n\nReturns: error code\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err finish()", + "py_def": "def finish(self) -> maix.err.Err" + }, + "capture": { + "type": "func", + "name": "capture", + "doc": { + "brief": "Capture image", + "attention": "Each time encode is called, the last captured image will be released.", + "return": "error code", + "maixpy": "maix.video.Video.capture", + "py_doc": "Capture image\n\nReturns: error code\n" + }, + "args": [], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *capture()", + "py_def": "def capture(self) -> maix.image.Image" + }, + "is_recording": { + "type": "func", + "name": "is_recording", + "doc": { + "brief": "Check if video is recording", + "return": "true if video is recording, false if not", + "maixpy": "maix.video.Video.is_recording", + "py_doc": "Check if video is recording\n\nReturns: true if video is recording, false if not\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool is_recording()", + "py_def": "def is_recording(self) -> bool" + }, + "is_opened": { + "type": "func", + "name": "is_opened", + "doc": { + "brief": "Check if video is opened", + "return": "true if video is opened, false if not", + "maixpy": "maix.video.Video.is_opened", + "py_doc": "Check if video is opened\n\nReturns: true if video is opened, false if not\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool is_opened()", + "py_def": "def is_opened(self) -> bool" + }, + "is_closed": { + "type": "func", + "name": "is_closed", + "doc": { + "brief": "check video device is closed or not", + "return": "closed or not, bool type", + "maixpy": "maix.video.Video.is_closed", + "py_doc": "check video device is closed or not\n\nReturns: closed or not, bool type\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool is_closed()", + "py_def": "def is_closed(self) -> bool" + }, + "width": { + "type": "func", + "name": "width", + "doc": { + "brief": "Get video width", + "return": "video width", + "maixpy": "maix.video.Video.width", + "py_doc": "Get video width\n\nReturns: video width\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int width()", + "py_def": "def width(self) -> int" + }, + "height": { + "type": "func", + "name": "height", + "doc": { + "brief": "Get video height", + "return": "video height", + "maixpy": "maix.video.Video.height", + "py_doc": "Get video height\n\nReturns: video height\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int height()", + "py_def": "def height(self) -> int" + } + }, + "def": "class Video" + }, + "VideoRecorder": { + "type": "class", + "name": "VideoRecorder", + "doc": { + "brief": "Video Recorder class. This module is not fully supported and may be deprecated in the future.", + "maixpy": "maix.video.VideoRecorder", + "py_doc": "Video Recorder class. This module is not fully supported and may be deprecated in the future." + }, + "members": { + "__init__": { + "type": "func", + "name": "VideoRecorder", + "doc": { + "brief": "Construct a new VideoRecorder object. This is an object that integrates recording, video capturing, and display functions, which can be used to achieve high-resolution video input when needed.", + "param": { + "open": "If true, video will automatically call open() after creation. default is true." + }, + "maixpy": "maix.video.VideoRecorder.__init__", + "maixcdk": "maix.video.VideoRecorder.VideoRecorder", + "py_doc": "Construct a new VideoRecorder object. This is an object that integrates recording, video capturing, and display functions, which can be used to achieve high-resolution video input when needed.\n\nArgs:\n - open: If true, video will automatically call open() after creation. default is true.\n" + }, + "args": [ + [ + "bool", + "open", + "true" + ] + ], + "ret_type": null, + "static": false, + "def": "VideoRecorder(bool open = true)", + "py_def": "def __init__(self, open: bool = True) -> None" + }, + "lock": { + "type": "func", + "name": "lock", + "doc": { + "brief": "lock video", + "param": { + "timeout": "timeout in ms. unit:ms" + }, + "return": "error code", + "maixpy": "maix.video.VideoRecorder.lock", + "py_doc": "lock video\n\nArgs:\n - timeout: timeout in ms. unit:ms\n\n\nReturns: error code\n" + }, + "args": [ + [ + "int64_t", + "timeout", + "-1" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err lock(int64_t timeout = -1)", + "py_def": "def lock(self, timeout: int = -1) -> maix.err.Err" + }, + "unlock": { + "type": "func", + "name": "unlock", + "doc": { + "brief": "unlock video", + "return": "error code", + "maixpy": "maix.video.VideoRecorder.unlock", + "py_doc": "unlock video\n\nReturns: error code\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err unlock()", + "py_def": "def unlock(self) -> maix.err.Err" + }, + "open": { + "type": "func", + "name": "open", + "doc": { + "brief": "Start a thread to handle the input function.", + "return": "error code", + "maixpy": "maix.video.VideoRecorder.open", + "py_doc": "Start a thread to handle the input function.\n\nReturns: error code\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err open()", + "py_def": "def open(self) -> maix.err.Err" + }, + "close": { + "type": "func", + "name": "close", + "doc": { + "brief": "Stop the thread, and reset the object.", + "return": "error code", + "maixpy": "maix.video.VideoRecorder.close", + "py_doc": "Stop the thread, and reset the object.\n\nReturns: error code\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err close()", + "py_def": "def close(self) -> maix.err.Err" + }, + "is_opened": { + "type": "func", + "name": "is_opened", + "doc": { + "brief": "Check whether the object is opened.", + "maixpy": "maix.video.VideoRecorder.is_opened", + "py_doc": "Check whether the object is opened." + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool is_opened()", + "py_def": "def is_opened(self) -> bool" + }, + "bind_display": { + "type": "func", + "name": "bind_display", + "doc": { + "brief": "Bind a Display object. if this object is not bound, it will not be displayed.", + "param": { + "display": "display object", + "fit": "fit mode. It is recommended to fill in FIT_COVER or FIT_FILL. For maixcam, using FIT_CONTAIN may affect the\nfunctionality of the second layer created by add_channel() in the Display. default is FIT_COVER." + }, + "return": "error code", + "maixpy": "maix.video.VideoRecorder.bind_display", + "py_doc": "Bind a Display object. if this object is not bound, it will not be displayed.\n\nArgs:\n - display: display object\n - fit: fit mode. It is recommended to fill in FIT_COVER or FIT_FILL. For maixcam, using FIT_CONTAIN may affect the\nfunctionality of the second layer created by add_channel() in the Display. default is FIT_COVER.\n\n\nReturns: error code\n" + }, + "args": [ + [ + "display::Display *", + "display", + null + ], + [ + "image::Fit", + "fit", + "image::FIT_COVER" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err bind_display(display::Display *display, image::Fit fit = image::FIT_COVER)", + "py_def": "def bind_display(self, display: maix.display.Display, fit: maix.image.Fit = ...) -> maix.err.Err" + }, + "bind_camera": { + "type": "func", + "name": "bind_camera", + "doc": { + "brief": "Bind a Camera object. if this object is not bound, images cannot be captured.", + "param": { + "camera": "camera object" + }, + "return": "error code", + "maixpy": "maix.video.VideoRecorder.bind_camera", + "py_doc": "Bind a Camera object. if this object is not bound, images cannot be captured.\n\nArgs:\n - camera: camera object\n\n\nReturns: error code\n" + }, + "args": [ + [ + "camera::Camera *", + "camera", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err bind_camera(camera::Camera *camera)", + "py_def": "def bind_camera(self, camera: maix.camera.Camera) -> maix.err.Err" + }, + "bind_audio": { + "type": "func", + "name": "bind_audio", + "doc": { + "brief": "Bind a AudioRecorder object. if this object is not bound, audio cannot be captured.", + "param": { + "audio": "audio recorder object" + }, + "return": "error code", + "maixpy": "maix.video.VideoRecorder.bind_audio", + "py_doc": "Bind a AudioRecorder object. if this object is not bound, audio cannot be captured.\n\nArgs:\n - audio: audio recorder object\n\n\nReturns: error code\n" + }, + "args": [ + [ + "audio::Recorder *", + "audio", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err bind_audio(audio::Recorder *audio)", + "py_def": "def bind_audio(self, audio: maix.audio.Recorder) -> maix.err.Err" + }, + "bind_imu": { + "type": "func", + "name": "bind_imu", + "doc": { + "brief": "Bind a IMU object. if this object is not bound, imu data cannot be captured.", + "param": { + "imu": "imu object" + }, + "return": "error code", + "maixpy": "maix.video.VideoRecorder.bind_imu", + "py_doc": "Bind a IMU object. if this object is not bound, imu data cannot be captured.\n\nArgs:\n - imu: imu object\n\n\nReturns: error code\n" + }, + "args": [ + [ + "void *", + "imu", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err bind_imu(void *imu)", + "py_def": "def bind_imu(self, imu: capsule) -> maix.err.Err" + }, + "reset": { + "type": "func", + "name": "reset", + "doc": { + "brief": "Reset the video recorder.", + "note": "It will not reset the bound object; if you have already bound the display using bind_display(), there is no need to rebind the display after calling reset().", + "return": "error code", + "maixpy": "maix.video.VideoRecorder.reset", + "py_doc": "Reset the video recorder.\n\nReturns: error code\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err reset()", + "py_def": "def reset(self) -> maix.err.Err" + }, + "config_path": { + "type": "func", + "name": "config_path", + "doc": { + "brief": "The recorded video will be saved to this path, and this API cannot be called during runtime.", + "param": { + "path": "The path of the video file to be saved" + }, + "return": "error code", + "maixpy": "maix.video.VideoRecorder.config_path", + "py_doc": "The recorded video will be saved to this path, and this API cannot be called during runtime.\n\nArgs:\n - path: The path of the video file to be saved\n\n\nReturns: error code\n" + }, + "args": [ + [ + "std::string", + "path", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err config_path(std::string path)", + "py_def": "def config_path(self, path: str) -> maix.err.Err" + }, + "get_path": { + "type": "func", + "name": "get_path", + "doc": { + "brief": "Get the path of the video file to be saved", + "return": "path", + "maixpy": "maix.video.VideoRecorder.get_path", + "py_doc": "Get the path of the video file to be saved\n\nReturns: path\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string get_path()", + "py_def": "def get_path(self) -> str" + }, + "config_snapshot": { + "type": "func", + "name": "config_snapshot", + "doc": { + "brief": "Set the snapshot parameters", + "note": "Enabling snapshot functionality may result in some performance loss.", + "param": { + "enable": "enable or disable snapshot", + "resolution": "image resolution of snapshot", + "format": "image format of snapshot" + }, + "return": "error code", + "maixpy": "maix.video.VideoRecorder.config_snapshot", + "py_doc": "Set the snapshot parameters\n\nArgs:\n - enable: enable or disable snapshot\n - resolution: image resolution of snapshot\n - format: image format of snapshot\n\n\nReturns: error code\n" + }, + "args": [ + [ + "bool", + "enable", + null + ], + [ + "std::vector", + "resolution", + "std::vector()" + ], + [ + "image::Format", + "format", + "image::Format::FMT_YVU420SP" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err config_snapshot(bool enable, std::vector resolution = std::vector(), image::Format format = image::Format::FMT_YVU420SP)", + "py_def": "def config_snapshot(self, enable: bool, resolution: list[int] = [], format: maix.image.Format = ...) -> maix.err.Err" + }, + "config_resolution": { + "type": "func", + "name": "config_resolution", + "doc": { + "brief": "Set the resolution of the video, and this API cannot be called during runtime.", + "note": "You must bind the camera first, and this interface will modify the camera's resolution. The width must be divisible by 32.", + "param": { + "resolution": "The resolution of the video" + }, + "return": "error code", + "maixpy": "maix.video.VideoRecorder.config_resolution", + "py_doc": "Set the resolution of the video, and this API cannot be called during runtime.\n\nArgs:\n - resolution: The resolution of the video\n\n\nReturns: error code\n" + }, + "args": [ + [ + "std::vector", + "resolution", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err config_resolution(std::vector resolution)", + "py_def": "def config_resolution(self, resolution: list[int]) -> maix.err.Err" + }, + "get_resolution": { + "type": "func", + "name": "get_resolution", + "doc": { + "brief": "Get the resolution of the video", + "return": "the resolution of the video", + "maixpy": "maix.video.VideoRecorder.get_resolution", + "py_doc": "Get the resolution of the video\n\nReturns: the resolution of the video\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector get_resolution()", + "py_def": "def get_resolution(self) -> list[int]" + }, + "config_fps": { + "type": "func", + "name": "config_fps", + "doc": { + "brief": "Set the fps of the video, and this API cannot be called during runtime.", + "note": "This interface only affect the fps of the encoded file.", + "return": "error code", + "maixpy": "maix.video.VideoRecorder.config_fps", + "py_doc": "Set the fps of the video, and this API cannot be called during runtime.\n\nReturns: error code\n" + }, + "args": [ + [ + "int", + "fps", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err config_fps(int fps)", + "py_def": "def config_fps(self, fps: int) -> maix.err.Err" + }, + "get_fps": { + "type": "func", + "name": "get_fps", + "doc": { + "brief": "Get the fps of the video.", + "return": "fps value", + "maixpy": "maix.video.VideoRecorder.get_fps", + "py_doc": "Get the fps of the video.\n\nReturns: fps value\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int get_fps()", + "py_def": "def get_fps(self) -> int" + }, + "config_bitrate": { + "type": "func", + "name": "config_bitrate", + "doc": { + "brief": "Set the bitrate of the video, and this API cannot be called during runtime.", + "return": "error code", + "maixpy": "maix.video.VideoRecorder.config_bitrate", + "py_doc": "Set the bitrate of the video, and this API cannot be called during runtime.\n\nReturns: error code\n" + }, + "args": [ + [ + "int", + "bitrate", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err config_bitrate(int bitrate)", + "py_def": "def config_bitrate(self, bitrate: int) -> maix.err.Err" + }, + "get_bitrate": { + "type": "func", + "name": "get_bitrate", + "doc": { + "brief": "Get the bitrate of the video.", + "return": "bitrate value", + "maixpy": "maix.video.VideoRecorder.get_bitrate", + "py_doc": "Get the bitrate of the video.\n\nReturns: bitrate value\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int get_bitrate()", + "py_def": "def get_bitrate(self) -> int" + }, + "mute": { + "type": "func", + "name": "mute", + "doc": { + "brief": "Set/Get the mute of the video", + "param": { + "data": "If the parameter is true, mute; if false, unmute; if no parameter is provided, return the mute status." + }, + "return": "error code", + "maixpy": "maix.video.VideoRecorder.mute", + "py_doc": "Set/Get the mute of the video\n\nArgs:\n - data: If the parameter is true, mute; if false, unmute; if no parameter is provided, return the mute status.\n\n\nReturns: error code\n" + }, + "args": [ + [ + "int", + "data", + "-1" + ] + ], + "ret_type": "int", + "static": false, + "def": "int mute(int data = -1)", + "py_def": "def mute(self, data: int = -1) -> int" + }, + "volume": { + "type": "func", + "name": "volume", + "doc": { + "brief": "Set/Get the volume of the video", + "param": { + "data": "The volume of the video, the range is 0-100. if no parameter is provided, return the volume." + }, + "return": "error code", + "maixpy": "maix.video.VideoRecorder.volume", + "py_doc": "Set/Get the volume of the video\n\nArgs:\n - data: The volume of the video, the range is 0-100. if no parameter is provided, return the volume.\n\n\nReturns: error code\n" + }, + "args": [ + [ + "int", + "data", + "-1" + ] + ], + "ret_type": "int", + "static": false, + "def": "int volume(int data = -1)", + "py_def": "def volume(self, data: int = -1) -> int" + }, + "seek": { + "type": "func", + "name": "seek", + "doc": { + "brief": "Get the current position of the video", + "return": "current position, unit: ms", + "maixpy": "maix.video.VideoRecorder.seek", + "py_doc": "Get the current position of the video\n\nReturns: current position, unit: ms\n" + }, + "args": [], + "ret_type": "int64_t", + "static": false, + "def": "int64_t seek()", + "py_def": "def seek(self) -> int" + }, + "record_start": { + "type": "func", + "name": "record_start", + "doc": { + "brief": "Start recording", + "note": "You must bind the camera at a minimum during input. Additionally,\nif you bind a display, the input image will be shown,\nif you bind a audio, audio will be recorded,\nif you bind a IMU, IMU data will be logged.", + "return": "error code", + "maixpy": "maix.video.VideoRecorder.record_start", + "py_doc": "Start recording\n\nReturns: error code\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err record_start()", + "py_def": "def record_start(self) -> maix.err.Err" + }, + "snapshot": { + "type": "func", + "name": "snapshot", + "doc": { + "brief": "Take a snapshot", + "return": "image::Image", + "maixpy": "maix.video.VideoRecorder.snapshot", + "py_doc": "Take a snapshot\n\nReturns: image::Image\n" + }, + "args": [], + "ret_type": "image::Image*", + "static": false, + "def": "image::Image *snapshot()", + "py_def": "def snapshot(self) -> maix.image.Image" + }, + "record_finish": { + "type": "func", + "name": "record_finish", + "doc": { + "brief": "Stop recording and save the video", + "return": "error code", + "maixpy": "maix.video.VideoRecorder.record_finish", + "py_doc": "Stop recording and save the video\n\nReturns: error code\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err record_finish()", + "py_def": "def record_finish(self) -> maix.err.Err" + }, + "draw_rect": { + "type": "func", + "name": "draw_rect", + "doc": { + "brief": "Draw a rect on the video", + "param": { + "id": "id of the rect, range is [0, 15]", + "x": "x coordinate", + "y": "y coordinate", + "w": "width", + "h": "height", + "color": "color", + "tickness": "The line width of the rectangular box; if set to -1, it indicates that the rectangular box will be filled.", + "hidden": "Hide or show the rectangular box" + }, + "return": "error code", + "maixpy": "maix.video.VideoRecorder.draw_rect", + "py_doc": "Draw a rect on the video\n\nArgs:\n - id: id of the rect, range is [0, 15]\n - x: x coordinate\n - y: y coordinate\n - w: width\n - h: height\n - color: color\n - tickness: The line width of the rectangular box; if set to -1, it indicates that the rectangular box will be filled.\n - hidden: Hide or show the rectangular box\n\n\nReturns: error code\n" + }, + "args": [ + [ + "int", + "id", + null + ], + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "int", + "w", + null + ], + [ + "int", + "h", + null + ], + [ + "image::Color", + "color", + "image::COLOR_WHITE" + ], + [ + "int", + "thickness", + "-1" + ], + [ + "bool", + "hidden", + "false" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err draw_rect(int id, int x, int y, int w, int h, image::Color color = image::COLOR_WHITE, int thickness = -1, bool hidden = false)", + "py_def": "def draw_rect(self, id: int, x: int, y: int, w: int, h: int, color: maix.image.Color = ..., thickness: int = -1, hidden: bool = False) -> maix.err.Err" + } + }, + "def": "class VideoRecorder" + } + }, + "auto_add": false + }, + "network": { + "type": "module", + "doc": { + "brief": "maix.network module" + }, + "members": { + "have_network": { + "type": "func", + "name": "have_network", + "doc": { + "brief": "Return if device have network(WiFi/Eth etc.)", + "return": "True if have network, else False.", + "maixpy": "maix.network.have_network", + "py_doc": "Return if device have network(WiFi/Eth etc.)\n\nReturns: True if have network, else False.\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool have_network()", + "py_def": "def have_network() -> bool" + }, + "wifi": { + "type": "module", + "doc": { + "brief": "maix.network.wifi module" + }, + "members": { + "AP_Info": { + "type": "class", + "name": "AP_Info", + "doc": { + "brief": "WiFi AP info", + "maixpy": "maix.network.wifi.AP_Info", + "py_doc": "WiFi AP info" + }, + "members": { + "ssid": { + "type": "var", + "name": "ssid", + "doc": { + "brief": "WiFi AP info SSID", + "maixpy": "maix.network.wifi.AP_Info.ssid", + "py_doc": "WiFi AP info SSID" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector ssid" + }, + "bssid": { + "type": "var", + "name": "bssid", + "doc": { + "brief": "WiFi AP info BSSID", + "maixpy": "maix.network.wifi.AP_Info.bssid", + "py_doc": "WiFi AP info BSSID" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::string bssid" + }, + "security": { + "type": "var", + "name": "security", + "doc": { + "brief": "WiFi AP info security", + "maixpy": "maix.network.wifi.AP_Info.security", + "py_doc": "WiFi AP info security" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::string security" + }, + "channel": { + "type": "var", + "name": "channel", + "doc": { + "brief": "WiFi AP info channel", + "maixpy": "maix.network.wifi.AP_Info.channel", + "py_doc": "WiFi AP info channel" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int channel" + }, + "frequency": { + "type": "var", + "name": "frequency", + "doc": { + "brief": "WiFi AP info frequency", + "maixpy": "maix.network.wifi.AP_Info.frequency", + "py_doc": "WiFi AP info frequency" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int frequency" + }, + "rssi": { + "type": "var", + "name": "rssi", + "doc": { + "brief": "WiFi AP info rssi", + "maixpy": "maix.network.wifi.AP_Info.rssi", + "py_doc": "WiFi AP info rssi" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int rssi" + }, + "ssid_str": { + "type": "func", + "name": "ssid_str", + "doc": { + "brief": "WiFi AP info ssid_str", + "maixpy": "maix.network.wifi.AP_Info.ssid_str", + "py_doc": "WiFi AP info ssid_str" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string ssid_str()", + "py_def": "def ssid_str(self) -> str" + } + }, + "def": "class AP_Info" + }, + "list_devices": { + "type": "func", + "name": "list_devices", + "doc": { + "brief": "List WiFi interfaces", + "return": "WiFi interface list, string type", + "maixpy": "maix.network.wifi.list_devices", + "py_doc": "List WiFi interfaces\n\nReturns: WiFi interface list, string type\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector list_devices()", + "py_def": "def list_devices() -> list[str]" + }, + "Wifi": { + "type": "class", + "name": "Wifi", + "doc": { + "brief": "Wifi class", + "maixpy": "maix.network.wifi.Wifi", + "py_doc": "Wifi class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Wifi", + "doc": { + "brief": "Wifi class", + "param": { + "iface": "wifi interface name, default is wlan0" + }, + "maixpy": "maix.network.wifi.Wifi.__init__", + "maixcdk": "maix.network.wifi.Wifi.Wifi", + "py_doc": "Wifi class\n\nArgs:\n - iface: wifi interface name, default is wlan0\n" + }, + "args": [ + [ + "std::string", + "iface", + "\"wlan0\"" + ] + ], + "ret_type": null, + "static": false, + "def": "Wifi(std::string iface = \"wlan0\")", + "py_def": "def __init__(self, iface: str = 'wlan0') -> None" + }, + "get_ip": { + "type": "func", + "name": "get_ip", + "doc": { + "brief": "Get current WiFi ip", + "return": "ip, string type, if network not connected, will return empty string.", + "maixpy": "maix.network.wifi.Wifi.get_ip", + "py_doc": "Get current WiFi ip\n\nReturns: ip, string type, if network not connected, will return empty string.\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string get_ip()", + "py_def": "def get_ip(self) -> str" + }, + "get_mac": { + "type": "func", + "name": "get_mac", + "doc": { + "brief": "Get current WiFi MAC address", + "return": "ip, string type.", + "maixpy": "maix.network.wifi.Wifi.get_mac", + "py_doc": "Get current WiFi MAC address\n\nReturns: ip, string type.\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string get_mac()", + "py_def": "def get_mac(self) -> str" + }, + "get_ssid": { + "type": "func", + "name": "get_ssid", + "doc": { + "brief": "Get current WiFi SSID", + "param": { + "from_cache": "if true, will not read config from file, direct use ssid in cache.\nattention, first time call this method will auto matically read config from file, and if call connect method will set cache." + }, + "return": "SSID, string type.", + "maixpy": "maix.network.wifi.Wifi.get_ssid", + "py_doc": "Get current WiFi SSID\n\nArgs:\n - from_cache: if true, will not read config from file, direct use ssid in cache.\nattention, first time call this method will auto matically read config from file, and if call connect method will set cache.\n\n\nReturns: SSID, string type.\n" + }, + "args": [ + [ + "bool", + "from_cache", + "true" + ] + ], + "ret_type": "std::string", + "static": false, + "def": "std::string get_ssid(bool from_cache = true)", + "py_def": "def get_ssid(self, from_cache: bool = True) -> str" + }, + "get_gateway": { + "type": "func", + "name": "get_gateway", + "doc": { + "brief": "Get current WiFi ip", + "return": "ip, string type, if network not connected, will return empty string.", + "maixpy": "maix.network.wifi.Wifi.get_gateway", + "py_doc": "Get current WiFi ip\n\nReturns: ip, string type, if network not connected, will return empty string.\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string get_gateway()", + "py_def": "def get_gateway(self) -> str" + }, + "start_scan": { + "type": "func", + "name": "start_scan", + "doc": { + "brief": "WiFi start scan AP info around in background.", + "return": "If success, return err.Err.ERR_NONE, else means failed.", + "maixpy": "maix.network.wifi.Wifi.start_scan", + "py_doc": "WiFi start scan AP info around in background.\n\nReturns: If success, return err.Err.ERR_NONE, else means failed.\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err start_scan()", + "py_def": "def start_scan(self) -> maix.err.Err" + }, + "get_scan_result": { + "type": "func", + "name": "get_scan_result", + "doc": { + "brief": "Get WiFi scan AP info.", + "return": "wifi.AP_Info list.", + "maixpy": "maix.network.wifi.Wifi.get_scan_result", + "py_doc": "Get WiFi scan AP info.\n\nReturns: wifi.AP_Info list.\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector get_scan_result()", + "py_def": "def get_scan_result(self) -> list[AP_Info]" + }, + "stop_scan": { + "type": "func", + "name": "stop_scan", + "doc": { + "brief": "Stop WiFi scan AP info.", + "maixpy": "maix.network.wifi.Wifi.stop_scan", + "py_doc": "Stop WiFi scan AP info." + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void stop_scan()", + "py_def": "def stop_scan(self) -> None" + }, + "connect": { + "type": "func", + "name": "connect", + "doc": { + "brief": "Connect to WiFi AP.", + "param": { + "ssid": "SSID of AP", + "password": "password of AP, if no password, leave it empty.", + "wait": "wait for got IP or failed or timeout.", + "timeout": "connect timeout internal, unit second." + }, + "return": "If success, return err.Err.ERR_NONE, else means failed.", + "maixpy": "maix.network.wifi.Wifi.connect", + "py_doc": "Connect to WiFi AP.\n\nArgs:\n - ssid: SSID of AP\n - password: password of AP, if no password, leave it empty.\n - wait: wait for got IP or failed or timeout.\n - timeout: connect timeout internal, unit second.\n\n\nReturns: If success, return err.Err.ERR_NONE, else means failed.\n" + }, + "args": [ + [ + "const std::string &", + "ssid", + null + ], + [ + "const std::string &", + "password", + null + ], + [ + "bool", + "wait", + "true" + ], + [ + "int", + "timeout", + "60" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err connect(const std::string &ssid, const std::string &password, bool wait = true, int timeout = 60)", + "py_def": "def connect(self, ssid: str, password: str, wait: bool = True, timeout: int = 60) -> maix.err.Err" + }, + "disconnect": { + "type": "func", + "name": "disconnect", + "doc": { + "brief": "Disconnect from WiFi AP.", + "return": "If success, return err.Err.ERR_NONE, else means failed.", + "maixpy": "maix.network.wifi.Wifi.disconnect", + "py_doc": "Disconnect from WiFi AP.\n\nReturns: If success, return err.Err.ERR_NONE, else means failed.\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err disconnect()", + "py_def": "def disconnect(self) -> maix.err.Err" + }, + "is_connected": { + "type": "func", + "name": "is_connected", + "doc": { + "brief": "See if WiFi is connected to AP.", + "return": "If connected return true, else false.", + "maixpy": "maix.network.wifi.Wifi.is_connected", + "py_doc": "See if WiFi is connected to AP.\n\nReturns: If connected return true, else false.\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool is_connected()", + "py_def": "def is_connected(self) -> bool" + }, + "start_ap": { + "type": "func", + "name": "start_ap", + "doc": { + "brief": "Start WiFi AP.", + "param": { + "ssid": "SSID of AP.", + "password": "password of AP, if no password, leave it empty.", + "ip": "ip address of hostap, default empty string means auto generated one according to hardware.", + "netmask": "netmask, default 255.255.255.0, now only support 255.255.255.0 .", + "mode": "WiFi mode, default g(IEEE 802.11g (2.4 GHz)), a = IEEE 802.11a (5 GHz), b = IEEE 802.11b (2.4 GHz).", + "channel": "WiFi channel number, 0 means auto select. MaixCAM not support auto, will default channel 1.", + "hidden": "hidden SSID or not." + }, + "return": "If success, return err.Err.ERR_NONE, else means failed.", + "maixpy": "maix.network.wifi.Wifi.start_ap", + "py_doc": "Start WiFi AP.\n\nArgs:\n - ssid: SSID of AP.\n - password: password of AP, if no password, leave it empty.\n - ip: ip address of hostap, default empty string means auto generated one according to hardware.\n - netmask: netmask, default 255.255.255.0, now only support 255.255.255.0 .\n - mode: WiFi mode, default g(IEEE 802.11g (2.4 GHz)), a = IEEE 802.11a (5 GHz), b = IEEE 802.11b (2.4 GHz).\n - channel: WiFi channel number, 0 means auto select. MaixCAM not support auto, will default channel 1.\n - hidden: hidden SSID or not.\n\n\nReturns: If success, return err.Err.ERR_NONE, else means failed.\n" + }, + "args": [ + [ + "const std::string &", + "ssid", + null + ], + [ + "const std::string &", + "password", + null + ], + [ + "std::string", + "mode", + "\"g\"" + ], + [ + "int", + "channel", + "0" + ], + [ + "const std::string &", + "ip", + "\"192.168.66.1\"" + ], + [ + "const std::string &", + "netmask", + "\"255.255.255.0\"" + ], + [ + "bool", + "hidden", + "false" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err start_ap(const std::string &ssid, const std::string &password,\n std::string mode = \"g\", int channel = 0,\n const std::string &ip = \"192.168.66.1\", const std::string &netmask = \"255.255.255.0\",\n bool hidden = false)", + "py_def": "def start_ap(self, ssid: str, password: str, mode: str = 'g', channel: int = 0, ip: str = '192.168.66.1', netmask: str = '255.255.255.0', hidden: bool = False) -> maix.err.Err" + }, + "stop_ap": { + "type": "func", + "name": "stop_ap", + "doc": { + "brief": "Stop WiFi AP.", + "return": "If success, return err.Err.ERR_NONE, else means failed.", + "maixpy": "maix.network.wifi.Wifi.stop_ap", + "py_doc": "Stop WiFi AP.\n\nReturns: If success, return err.Err.ERR_NONE, else means failed.\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err stop_ap()", + "py_def": "def stop_ap(self) -> maix.err.Err" + }, + "is_ap_mode": { + "type": "func", + "name": "is_ap_mode", + "doc": { + "brief": "Whether WiFi is AP mode", + "return": "True if AP mode now, or False.", + "maixpy": "maix.network.wifi.Wifi.is_ap_mode", + "py_doc": "Whether WiFi is AP mode\n\nReturns: True if AP mode now, or False.\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool is_ap_mode()", + "py_def": "def is_ap_mode(self) -> bool" + } + }, + "def": "class Wifi" + } + }, + "auto_add": true + } + }, + "auto_add": true + }, + "comm": { + "type": "module", + "doc": { + "brief": "maix.comm module" + }, + "members": { + "add_default_comm_listener": { + "type": "func", + "name": "add_default_comm_listener", + "doc": { + "brief": "Add default CommProtocol listener.\\nWhen the application uses this port, the listening thread will immediately\\nrelease the port resources and exit. If you need to start the default listening thread again,\\nplease release the default port resources and then call this function.", + "maixpy": "maix.comm.add_default_comm_listener", + "py_doc": "Add default CommProtocol listener.\nWhen the application uses this port, the listening thread will immediately\nrelease the port resources and exit. If you need to start the default listening thread again,\nplease release the default port resources and then call this function." + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void add_default_comm_listener()" + }, + "rm_default_comm_listener": { + "type": "func", + "name": "rm_default_comm_listener", + "doc": { + "brief": "Remove default CommProtocol listener.", + "return": "bool type.", + "maixpy": "maix.comm.rm_default_comm_listener", + "py_doc": "Remove default CommProtocol listener.\n\nReturns: bool type.\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool rm_default_comm_listener()" + }, + "CommProtocol": { + "type": "class", + "name": "CommProtocol", + "doc": { + "brief": "Class for communication protocol", + "maixpy": "maix.comm.CommProtocol", + "py_doc": "Class for communication protocol" + }, + "members": { + "__init__": { + "type": "func", + "name": "CommProtocol", + "doc": { + "brief": "Construct a new CommProtocol object", + "param": { + "buff_size": "buffer size, default to 1024 bytes" + }, + "maixpy": "maix.comm.CommProtocol.__init__", + "maixcdk": "maix.comm.CommProtocol.CommProtocol", + "py_doc": "Construct a new CommProtocol object\n\nArgs:\n - buff_size: buffer size, default to 1024 bytes\n" + }, + "args": [ + [ + "int", + "buff_size", + "1024" + ], + [ + "uint32_t", + "header", + "maix::protocol::HEADER" + ] + ], + "ret_type": null, + "static": false, + "def": "CommProtocol(int buff_size = 1024, uint32_t header=maix::protocol::HEADER)", + "py_def": "def __init__(self, buff_size: int = 1024, header: int = 3148663466) -> None" + }, + "get_msg": { + "type": "func", + "name": "get_msg", + "doc": { + "brief": "Read data to buffer, and try to decode it as maix.protocol.MSG object", + "param": { + "timeout": "unit ms, 0 means return immediatly, -1 means block util have msg, >0 means block until have msg or timeout." + }, + "return": "decoded data, if nullptr, means no valid frame found.\nAttentioin, delete it after use in C++.", + "maixpy": "maix.comm.CommProtocol.get_msg", + "py_doc": "Read data to buffer, and try to decode it as maix.protocol.MSG object\n\nArgs:\n - timeout: unit ms, 0 means return immediatly, -1 means block util have msg, >0 means block until have msg or timeout.\n\n\nReturns: decoded data, if nullptr, means no valid frame found.\nAttentioin, delete it after use in C++.\n" + }, + "args": [ + [ + "int", + "timeout", + "0" + ] + ], + "ret_type": "protocol::MSG*", + "static": false, + "def": "protocol::MSG *get_msg(int timeout = 0)", + "py_def": "def get_msg(self, timeout: int = 0) -> ..." + }, + "resp_ok": { + "type": "func", + "name": "resp_ok", + "doc": { + "brief": "Send response ok(success) message", + "param": { + "cmd": "CMD value", + "body": "response body, can be null" + }, + "return": "encoded data, if nullptr, means error, and the error code is -err.Err.\nAttentioin, delete it after use in C++.", + "maixpy": "maix.comm.CommProtocol.resp_ok", + "py_doc": "Send response ok(success) message\n\nArgs:\n - cmd: CMD value\n - body: response body, can be null\n\n\nReturns: encoded data, if nullptr, means error, and the error code is -err.Err.\nAttentioin, delete it after use in C++.\n" + }, + "args": [ + [ + "uint8_t", + "cmd", + null + ], + [ + "Bytes *", + "body", + "nullptr" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err resp_ok(uint8_t cmd, Bytes *body = nullptr)", + "py_def": "def resp_ok(self, cmd: int, body: maix.Bytes(bytes) = None) -> maix.err.Err" + }, + "report": { + "type": "func", + "name": "report", + "doc": { + "brief": "Send report message", + "param": { + "cmd": "CMD value", + "body": "report body, can be null" + }, + "return": "encoded data, if nullptr, means error, and the error code is -err.Err.\nAttentioin, delete it after use in C++.", + "maixpy": "maix.comm.CommProtocol.report", + "py_doc": "Send report message\n\nArgs:\n - cmd: CMD value\n - body: report body, can be null\n\n\nReturns: encoded data, if nullptr, means error, and the error code is -err.Err.\nAttentioin, delete it after use in C++.\n" + }, + "args": [ + [ + "uint8_t", + "cmd", + null + ], + [ + "Bytes *", + "body", + "nullptr" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err report(uint8_t cmd, Bytes *body = nullptr)", + "py_def": "def report(self, cmd: int, body: maix.Bytes(bytes) = None) -> maix.err.Err" + }, + "resp_err": { + "type": "func", + "name": "resp_err", + "doc": { + "brief": "Encode response error message to buffer", + "param": { + "cmd": "CMD value", + "code": "error code", + "msg": "error message" + }, + "return": "encoded data, if nullptr, means error, and the error code is -err.Err.\nAttentioin, delete it after use in C++.", + "maixpy": "maix.comm.CommProtocol.resp_err", + "py_doc": "Encode response error message to buffer\n\nArgs:\n - cmd: CMD value\n - code: error code\n - msg: error message\n\n\nReturns: encoded data, if nullptr, means error, and the error code is -err.Err.\nAttentioin, delete it after use in C++.\n" + }, + "args": [ + [ + "uint8_t", + "cmd", + null + ], + [ + "err::Err", + "code", + null + ], + [ + "const std::string &", + "msg", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err resp_err(uint8_t cmd, err::Err code, const std::string &msg)", + "py_def": "def resp_err(self, cmd: int, code: maix.err.Err, msg: str) -> maix.err.Err" + } + }, + "def": "class CommProtocol" + } + }, + "auto_add": true + }, + "fs": { + "type": "module", + "doc": { + "brief": "maix.fs module" + }, + "members": { + "SEEK": { + "type": "enum", + "name": "SEEK", + "doc": { + "brief": "SEEK enums", + "maixpy": "maix.fs.SEEK", + "py_doc": "SEEK enums" + }, + "values": [ + [ + "SEEK_SET", + "0", + "Seek from beginning of file." + ], + [ + "SEEK_CUR", + "1", + "Seek from current position." + ], + [ + "SEEK_END", + "2", + "Seek from end of file." + ] + ], + "def": "enum SEEK\n {\n SEEK_SET = 0, // Seek from beginning of file.\n SEEK_CUR = 1, // Seek from current position.\n SEEK_END = 2, // Seek from end of file.\n }" + }, + "isabs": { + "type": "func", + "name": "isabs", + "doc": { + "brief": "Check if the path is absolute path", + "param": { + "path": "path to check" + }, + "return": "true if path is absolute path", + "maixpy": "maix.fs.isabs", + "py_doc": "Check if the path is absolute path\n\nArgs:\n - path: path to check\n\n\nReturns: true if path is absolute path\n" + }, + "args": [ + [ + "const std::string &", + "path", + null + ] + ], + "ret_type": "bool", + "static": false, + "def": "bool isabs(const std::string &path)", + "py_def": "def isabs(path: str) -> bool" + }, + "isdir": { + "type": "func", + "name": "isdir", + "doc": { + "brief": "Check if the path is a directory, if not exist, throw exception", + "param": { + "path": "path to check" + }, + "return": "true if path is a directory", + "maixpy": "maix.fs.isdir", + "py_doc": "Check if the path is a directory, if not exist, throw exception\n\nArgs:\n - path: path to check\n\n\nReturns: true if path is a directory\n" + }, + "args": [ + [ + "const std::string &", + "path", + null + ] + ], + "ret_type": "bool", + "static": false, + "def": "bool isdir(const std::string &path)", + "py_def": "def isdir(path: str) -> bool" + }, + "isfile": { + "type": "func", + "name": "isfile", + "doc": { + "brief": "Check if the path is a file, if not exist, throw exception", + "param": { + "path": "path to check" + }, + "return": "true if path is a file", + "maixpy": "maix.fs.isfile", + "py_doc": "Check if the path is a file, if not exist, throw exception\n\nArgs:\n - path: path to check\n\n\nReturns: true if path is a file\n" + }, + "args": [ + [ + "const std::string &", + "path", + null + ] + ], + "ret_type": "bool", + "static": false, + "def": "bool isfile(const std::string &path)", + "py_def": "def isfile(path: str) -> bool" + }, + "islink": { + "type": "func", + "name": "islink", + "doc": { + "brief": "Check if the path is a link, if not exist, throw exception", + "param": { + "path": "path to check" + }, + "return": "true if path is a link", + "maixpy": "maix.fs.islink", + "py_doc": "Check if the path is a link, if not exist, throw exception\n\nArgs:\n - path: path to check\n\n\nReturns: true if path is a link\n" + }, + "args": [ + [ + "const std::string &", + "path", + null + ] + ], + "ret_type": "bool", + "static": false, + "def": "bool islink(const std::string &path)", + "py_def": "def islink(path: str) -> bool" + }, + "symlink": { + "type": "func", + "name": "symlink", + "doc": { + "brief": "Create soft link", + "param": { + "src": "real file path", + "link": "link file path", + "force": "force link, if already have link file, will delet it first then create." + }, + "maixpy": "maix.fs.symlink", + "py_doc": "Create soft link\n\nArgs:\n - src: real file path\n - link: link file path\n - force: force link, if already have link file, will delet it first then create.\n" + }, + "args": [ + [ + "const std::string &", + "src", + null + ], + [ + "const std::string &", + "link", + null + ], + [ + "bool", + "force", + "false" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err symlink(const std::string &src, const std::string &link, bool force = false)", + "py_def": "def symlink(src: str, link: str, force: bool = False) -> maix.err.Err" + }, + "exists": { + "type": "func", + "name": "exists", + "doc": { + "brief": "Check if the path exists", + "param": { + "path": "path to check" + }, + "return": "true if path exists", + "maixpy": "maix.fs.exists", + "py_doc": "Check if the path exists\n\nArgs:\n - path: path to check\n\n\nReturns: true if path exists\n" + }, + "args": [ + [ + "const std::string &", + "path", + null + ] + ], + "ret_type": "bool", + "static": false, + "def": "bool exists(const std::string &path)", + "py_def": "def exists(path: str) -> bool" + }, + "mkdir": { + "type": "func", + "name": "mkdir", + "doc": { + "brief": "Create a directory recursively", + "param": { + "path": "path to create", + "exist_ok": "if true, also return true if directory already exists", + "recursive": "if true, create directory recursively, otherwise, only create one directory, default is true" + }, + "return": "err::ERR_NONE(err.Err.ERR_NONE in MaixPy) if success, other error code if failed", + "maixpy": "maix.fs.mkdir", + "py_doc": "Create a directory recursively\n\nArgs:\n - path: path to create\n - exist_ok: if true, also return true if directory already exists\n - recursive: if true, create directory recursively, otherwise, only create one directory, default is true\n\n\nReturns: err::ERR_NONE(err.Err.ERR_NONE in MaixPy) if success, other error code if failed\n" + }, + "args": [ + [ + "const std::string &", + "path", + null + ], + [ + "bool", + "exist_ok", + "true" + ], + [ + "bool", + "recursive", + "true" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err mkdir(const std::string &path, bool exist_ok = true, bool recursive = true)", + "py_def": "def mkdir(path: str, exist_ok: bool = True, recursive: bool = True) -> maix.err.Err" + }, + "rmdir": { + "type": "func", + "name": "rmdir", + "doc": { + "brief": "Remove a directory", + "param": { + "path": "path to remove", + "recursive": "if true, remove directory recursively, otherwise, only remove empty directory, default is false" + }, + "return": "err::ERR_NONE(err.Err.ERR_NONE in MaixPy) if success, other error code if failed", + "maixpy": "maix.fs.rmdir", + "py_doc": "Remove a directory\n\nArgs:\n - path: path to remove\n - recursive: if true, remove directory recursively, otherwise, only remove empty directory, default is false\n\n\nReturns: err::ERR_NONE(err.Err.ERR_NONE in MaixPy) if success, other error code if failed\n" + }, + "args": [ + [ + "const std::string &", + "path", + null + ], + [ + "bool", + "recursive", + "false" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err rmdir(const std::string &path, bool recursive = false)", + "py_def": "def rmdir(path: str, recursive: bool = False) -> maix.err.Err" + }, + "remove": { + "type": "func", + "name": "remove", + "doc": { + "brief": "Remove a file", + "param": { + "path": "path to remove" + }, + "return": "err::ERR_NONE(err.Err.ERR_NONE in MaixPy) if success, other error code if failed", + "maixpy": "maix.fs.remove", + "py_doc": "Remove a file\n\nArgs:\n - path: path to remove\n\n\nReturns: err::ERR_NONE(err.Err.ERR_NONE in MaixPy) if success, other error code if failed\n" + }, + "args": [ + [ + "const std::string &", + "path", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err remove(const std::string &path)", + "py_def": "def remove(path: str) -> maix.err.Err" + }, + "rename": { + "type": "func", + "name": "rename", + "doc": { + "brief": "Rename a file or directory", + "param": { + "src": "source path", + "dst": "destination path, if destination dirs not exist, will auto create" + }, + "return": "err::ERR_NONE(err.Err.ERR_NONE in MaixPy) if success, other error code if failed", + "maixpy": "maix.fs.rename", + "py_doc": "Rename a file or directory\n\nArgs:\n - src: source path\n - dst: destination path, if destination dirs not exist, will auto create\n\n\nReturns: err::ERR_NONE(err.Err.ERR_NONE in MaixPy) if success, other error code if failed\n" + }, + "args": [ + [ + "const std::string &", + "src", + null + ], + [ + "const std::string &", + "dst", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err rename(const std::string &src, const std::string &dst)", + "py_def": "def rename(src: str, dst: str) -> maix.err.Err" + }, + "sync": { + "type": "func", + "name": "sync", + "doc": { + "brief": "Sync files, ensure they're wrriten to disk from RAM", + "maixpy": "maix.fs.sync", + "py_doc": "Sync files, ensure they're wrriten to disk from RAM" + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void sync()", + "py_def": "def sync() -> None" + }, + "getsize": { + "type": "func", + "name": "getsize", + "doc": { + "brief": "Get file size", + "param": { + "path": "path to get size" + }, + "return": "file size if success, -err::Err code if failed", + "maixpy": "maix.fs.getsize", + "py_doc": "Get file size\n\nArgs:\n - path: path to get size\n\n\nReturns: file size if success, -err::Err code if failed\n" + }, + "args": [ + [ + "const std::string &", + "path", + null + ] + ], + "ret_type": "int", + "static": false, + "def": "int getsize(const std::string &path)", + "py_def": "def getsize(path: str) -> int" + }, + "dirname": { + "type": "func", + "name": "dirname", + "doc": { + "brief": "Get directory name of path", + "param": { + "path": "path to get dirname" + }, + "return": "dirname if success, empty string if failed", + "maixpy": "maix.fs.dirname", + "py_doc": "Get directory name of path\n\nArgs:\n - path: path to get dirname\n\n\nReturns: dirname if success, empty string if failed\n" + }, + "args": [ + [ + "const std::string &", + "path", + null + ] + ], + "ret_type": "std::string", + "static": false, + "def": "std::string dirname(const std::string &path)", + "py_def": "def dirname(path: str) -> str" + }, + "basename": { + "type": "func", + "name": "basename", + "doc": { + "brief": "Get base name of path", + "param": { + "path": "path to get basename" + }, + "return": "basename if success, empty string if failed", + "maixpy": "maix.fs.basename", + "py_doc": "Get base name of path\n\nArgs:\n - path: path to get basename\n\n\nReturns: basename if success, empty string if failed\n" + }, + "args": [ + [ + "const std::string &", + "path", + null + ] + ], + "ret_type": "std::string", + "static": false, + "def": "std::string basename(const std::string &path)", + "py_def": "def basename(path: str) -> str" + }, + "abspath": { + "type": "func", + "name": "abspath", + "doc": { + "brief": "Get absolute path", + "param": { + "path": "path to get absolute path" + }, + "return": "absolute path if success, empty string if failed", + "maixpy": "maix.fs.abspath", + "py_doc": "Get absolute path\n\nArgs:\n - path: path to get absolute path\n\n\nReturns: absolute path if success, empty string if failed\n" + }, + "args": [ + [ + "const std::string &", + "path", + null + ] + ], + "ret_type": "std::string", + "static": false, + "def": "std::string abspath(const std::string &path)", + "py_def": "def abspath(path: str) -> str" + }, + "getcwd": { + "type": "func", + "name": "getcwd", + "doc": { + "brief": "Get current working directory", + "return": "current working directory absolute path", + "maixpy": "maix.fs.getcwd", + "py_doc": "Get current working directory\n\nReturns: current working directory absolute path\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string getcwd()", + "py_def": "def getcwd() -> str" + }, + "realpath": { + "type": "func", + "name": "realpath", + "doc": { + "brief": "Get realpath of path", + "param": { + "path": "path to get realpath" + }, + "return": "realpath if success, empty string if failed", + "maixpy": "maix.fs.realpath", + "py_doc": "Get realpath of path\n\nArgs:\n - path: path to get realpath\n\n\nReturns: realpath if success, empty string if failed\n" + }, + "args": [ + [ + "const std::string &", + "path", + null + ] + ], + "ret_type": "std::string", + "static": false, + "def": "std::string realpath(const std::string &path)", + "py_def": "def realpath(path: str) -> str" + }, + "splitext": { + "type": "func", + "name": "splitext", + "doc": { + "brief": "Get file extension", + "param": { + "path": "path to get extension" + }, + "return": "prefix_path and extension list if success, empty string if failed", + "maixpy": "maix.fs.splitext", + "py_doc": "Get file extension\n\nArgs:\n - path: path to get extension\n\n\nReturns: prefix_path and extension list if success, empty string if failed\n" + }, + "args": [ + [ + "const std::string &", + "path", + null + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector splitext(const std::string &path)", + "py_def": "def splitext(path: str) -> list[str]" + }, + "listdir": { + "type": "func", + "name": "listdir", + "doc": { + "brief": "List files in directory", + "param": { + "path": "path to list", + "recursive": "if true, list recursively, otherwise, only list current directory, default is false", + "full_path": "if true, return full path, otherwise, only return basename, default is false" + }, + "return": "files list if success, nullptr if failed, you should manually delete it in C++.", + "maixpy": "maix.fs.listdir", + "py_doc": "List files in directory\n\nArgs:\n - path: path to list\n - recursive: if true, list recursively, otherwise, only list current directory, default is false\n - full_path: if true, return full path, otherwise, only return basename, default is false\n\n\nReturns: files list if success, nullptr if failed, you should manually delete it in C++.\n" + }, + "args": [ + [ + "const std::string &", + "path", + null + ], + [ + "bool", + "recursive", + "false" + ], + [ + "bool", + "full_path", + "false" + ] + ], + "ret_type": "std::vector*", + "static": false, + "def": "std::vector *listdir(const std::string &path, bool recursive = false, bool full_path = false)", + "py_def": "def listdir(path: str, recursive: bool = False, full_path: bool = False) -> list[str]" + }, + "File": { + "type": "class", + "name": "File", + "doc": { + "brief": "File read write ops", + "maixpy": "maix.fs.File", + "py_doc": "File read write ops" + }, + "members": { + "__init__": { + "type": "func", + "name": "File", + "doc": { + "brief": "Construct File object", + "maixpy": "maix.fs.File.__init__", + "maixcdk": "maix.fs.File.File", + "py_doc": "Construct File object" + }, + "args": [], + "ret_type": null, + "static": false, + "def": "File()", + "py_def": "def __init__(self) -> None" + }, + "open": { + "type": "func", + "name": "open", + "doc": { + "brief": "Open a file", + "param": { + "path": "path to open", + "mode": "open mode, support \"r\", \"w\", \"a\", \"r+\", \"w+\", \"a+\", \"rb\", \"wb\", \"ab\", \"rb+\", \"wb+\", \"ab+\"" + }, + "return": "err::ERR_NONE(err.Err.ERR_NONE in MaixPy) if success, other error code if failed", + "maixpy": "maix.fs.File.open", + "py_doc": "Open a file\n\nArgs:\n - path: path to open\n - mode: open mode, support \"r\", \"w\", \"a\", \"r+\", \"w+\", \"a+\", \"rb\", \"wb\", \"ab\", \"rb+\", \"wb+\", \"ab+\"\n\n\nReturns: err::ERR_NONE(err.Err.ERR_NONE in MaixPy) if success, other error code if failed\n" + }, + "args": [ + [ + "const std::string &", + "path", + null + ], + [ + "const std::string &", + "mode", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err open(const std::string &path, const std::string &mode)", + "py_def": "def open(self, path: str, mode: str) -> maix.err.Err" + }, + "close": { + "type": "func", + "name": "close", + "doc": { + "brief": "Close a file", + "maixpy": "maix.fs.File.close", + "py_doc": "Close a file" + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void close()", + "py_def": "def close(self) -> None" + }, + "read": { + "type": "func", + "name": "read", + "doc": { + "brief": "Read data from file API2", + "param": { + "size": "max read size" + }, + "return": "bytes data if success(need delete manually in C/C++), nullptr if failed", + "maixpy": "maix.fs.File.read", + "py_doc": "Read data from file API2\n\nArgs:\n - size: max read size\n\n\nReturns: bytes data if success(need delete manually in C/C++), nullptr if failed\n" + }, + "args": [ + [ + "int", + "size", + null + ] + ], + "ret_type": "std::vector*", + "static": false, + "def": "std::vector *read(int size)", + "py_def": "def read(self, size: int) -> list[int]" + }, + "readline": { + "type": "func", + "name": "readline", + "doc": { + "brief": "Read line from file", + "return": "line if success, empty string if failed. You need to delete the returned object manually in C/C++.", + "maixpy": "maix.fs.File.readline", + "py_doc": "Read line from file\n\nReturns: line if success, empty string if failed. You need to delete the returned object manually in C/C++.\n" + }, + "args": [], + "ret_type": "std::string*", + "static": false, + "def": "std::string *readline()", + "py_def": "def readline(self) -> str" + }, + "eof": { + "type": "func", + "name": "eof", + "doc": { + "brief": "End of file or not", + "return": "0 if not reach end of file, else eof.", + "maixpy": "maix.fs.File.eof", + "py_doc": "End of file or not\n\nReturns: 0 if not reach end of file, else eof.\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int eof()", + "py_def": "def eof(self) -> int" + }, + "write": { + "type": "func", + "name": "write", + "doc": { + "brief": "Write data to file API2", + "param": { + "buf": "buffer to write" + }, + "return": "write size if success, -err::Err code if failed", + "maixpy": "maix.fs.File.write", + "py_doc": "Write data to file API2\n\nArgs:\n - buf: buffer to write\n\n\nReturns: write size if success, -err::Err code if failed\n" + }, + "args": [ + [ + "const std::vector &", + "buf", + null + ] + ], + "ret_type": "int", + "static": false, + "def": "int write(const std::vector &buf)", + "py_def": "def write(self, buf: list[int]) -> int" + }, + "seek": { + "type": "func", + "name": "seek", + "doc": { + "brief": "Seek file position", + "param": { + "offset": "offset to seek", + "whence": "@see maix.fs.SEEK" + }, + "return": "new position if success, -err::Err code if failed", + "maixpy": "maix.fs.File.seek", + "py_doc": "Seek file position\n\nArgs:\n - offset: offset to seek\n - whence: @see maix.fs.SEEK\n\n\nReturns: new position if success, -err::Err code if failed\n" + }, + "args": [ + [ + "int", + "offset", + null + ], + [ + "int", + "whence", + null + ] + ], + "ret_type": "int", + "static": false, + "def": "int seek(int offset, int whence)", + "py_def": "def seek(self, offset: int, whence: int) -> int" + }, + "tell": { + "type": "func", + "name": "tell", + "doc": { + "brief": "Get file position", + "return": "file position if success, -err::Err code if failed", + "maixpy": "maix.fs.File.tell", + "py_doc": "Get file position\n\nReturns: file position if success, -err::Err code if failed\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int tell()", + "py_def": "def tell(self) -> int" + }, + "flush": { + "type": "func", + "name": "flush", + "doc": { + "brief": "Flush file", + "return": "err::ERR_NONE(err.Err.ERR_NONE in MaixPy) if success, other error code if failed", + "maixpy": "maix.fs.File.flush", + "py_doc": "Flush file\n\nReturns: err::ERR_NONE(err.Err.ERR_NONE in MaixPy) if success, other error code if failed\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err flush()", + "py_def": "def flush(self) -> maix.err.Err" + } + }, + "def": "class File" + }, + "open": { + "type": "func", + "name": "open", + "doc": { + "brief": "Open a file, and return a File object", + "param": { + "path": "path to open", + "mode": "open mode, support \"r\", \"w\", \"a\", \"r+\", \"w+\", \"a+\", \"rb\", \"wb\", \"ab\", \"rb+\", \"wb+\", \"ab+\"" + }, + "return": "File object if success(need to delete object manually in C/C++), nullptr if failed", + "maixpy": "maix.fs.open", + "py_doc": "Open a file, and return a File object\n\nArgs:\n - path: path to open\n - mode: open mode, support \"r\", \"w\", \"a\", \"r+\", \"w+\", \"a+\", \"rb\", \"wb\", \"ab\", \"rb+\", \"wb+\", \"ab+\"\n\n\nReturns: File object if success(need to delete object manually in C/C++), nullptr if failed\n" + }, + "args": [ + [ + "const std::string &", + "path", + null + ], + [ + "const std::string &", + "mode", + null + ] + ], + "ret_type": "fs::File*", + "static": false, + "def": "fs::File *open(const std::string &path, const std::string &mode)", + "py_def": "def open(path: str, mode: str) -> File" + }, + "tempdir": { + "type": "func", + "name": "tempdir", + "doc": { + "brief": "Get temp files directory", + "return": "temp files directory", + "maixpy": "maix.fs.tempdir", + "py_doc": "Get temp files directory\n\nReturns: temp files directory\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string tempdir()", + "py_def": "def tempdir() -> str" + } + }, + "auto_add": true + }, + "app": { + "type": "module", + "doc": { + "brief": "maix.app module" + }, + "members": { + "Version": { + "type": "class", + "name": "Version", + "doc": { + "brief": "APP version", + "maixpy": "maix.app.Version", + "py_doc": "APP version" + }, + "members": { + "__str__": { + "type": "func", + "name": "__str__", + "doc": { + "brief": "Convert to string, e.g. 1.0.0", + "maixpy": "maix.app.Version.__str__", + "py_doc": "Convert to string, e.g. 1.0.0" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string __str__()", + "py_def": "def __str__(self) -> str" + }, + "from_str": { + "type": "func", + "name": "from_str", + "doc": { + "brief": "Convert from string, e.g. \\\"1.0.0\\\"", + "maixpy": "maix.app.Version.from_str", + "py_doc": "Convert from string, e.g. \"1.0.0\"" + }, + "args": [ + [ + "const string &", + "version_str", + null + ] + ], + "ret_type": "app::Version", + "static": true, + "def": "static app::Version from_str(const string &version_str)", + "py_def": "def from_str(version_str: str) -> Version" + } + }, + "def": "class Version" + }, + "APP_Info": { + "type": "class", + "name": "APP_Info", + "doc": { + "brief": "APP info", + "maixpy": "maix.app.APP_Info", + "py_doc": "APP info" + }, + "members": { + "id": { + "type": "var", + "name": "id", + "doc": { + "brief": "APP id", + "maixpy": "maix.app.APP_Info.id", + "py_doc": "APP id" + }, + "value": null, + "static": false, + "readonly": false, + "def": "string id" + }, + "name": { + "type": "var", + "name": "name", + "doc": { + "brief": "APP name", + "maixpy": "maix.app.APP_Info.name", + "py_doc": "APP name" + }, + "value": null, + "static": false, + "readonly": false, + "def": "string name" + }, + "icon": { + "type": "var", + "name": "icon", + "doc": { + "brief": "APP icon", + "maixpy": "maix.app.APP_Info.icon", + "py_doc": "APP icon" + }, + "value": null, + "static": false, + "readonly": false, + "def": "string icon" + }, + "version": { + "type": "var", + "name": "version", + "doc": { + "brief": "APP version", + "maixpy": "maix.app.APP_Info.version", + "py_doc": "APP version" + }, + "value": null, + "static": false, + "readonly": false, + "def": "Version version" + }, + "exec": { + "type": "var", + "name": "exec", + "doc": { + "brief": "APP exec", + "maixpy": "maix.app.APP_Info.exec", + "py_doc": "APP exec" + }, + "value": null, + "static": false, + "readonly": false, + "def": "string exec" + }, + "author": { + "type": "var", + "name": "author", + "doc": { + "brief": "APP author", + "maixpy": "maix.app.APP_Info.author", + "py_doc": "APP author" + }, + "value": null, + "static": false, + "readonly": false, + "def": "string author" + }, + "desc": { + "type": "var", + "name": "desc", + "doc": { + "brief": "APP desc", + "maixpy": "maix.app.APP_Info.desc", + "py_doc": "APP desc" + }, + "value": null, + "static": false, + "readonly": false, + "def": "string desc" + }, + "names": { + "type": "var", + "name": "names", + "doc": { + "brief": "APP names", + "maixpy": "maix.app.APP_Info.names", + "py_doc": "APP names" + }, + "value": null, + "static": false, + "readonly": false, + "def": "map names" + }, + "descs": { + "type": "var", + "name": "descs", + "doc": { + "brief": "APP descs", + "maixpy": "maix.app.APP_Info.descs", + "py_doc": "APP descs" + }, + "value": null, + "static": false, + "readonly": false, + "def": "map descs" + } + }, + "def": "class APP_Info" + }, + "app_id": { + "type": "func", + "name": "app_id", + "doc": { + "brief": "Get current APP ID.", + "return": "APP ID.", + "maixpy": "maix.app.app_id", + "py_doc": "Get current APP ID.\n\nReturns: APP ID.\n" + }, + "args": [], + "ret_type": "string", + "static": false, + "def": "string app_id()", + "py_def": "def app_id() -> str" + }, + "set_app_id": { + "type": "func", + "name": "set_app_id", + "doc": { + "brief": "Set current APP ID.", + "param": { + "app_id": "APP ID." + }, + "maixpy": "maix.app.set_app_id", + "py_doc": "Set current APP ID.\n\nArgs:\n - app_id: APP ID.\n" + }, + "args": [ + [ + "const string &", + "app_id", + null + ] + ], + "ret_type": "string", + "static": false, + "def": "string set_app_id(const string &app_id)", + "py_def": "def set_app_id(app_id: str) -> str" + }, + "get_apps_info_path": { + "type": "func", + "name": "get_apps_info_path", + "doc": { + "brief": "Get APP info file path.", + "maixpy": "maix.app.get_apps_info_path", + "py_doc": "Get APP info file path." + }, + "args": [], + "ret_type": "string", + "static": false, + "def": "string get_apps_info_path()", + "py_def": "def get_apps_info_path() -> str" + }, + "get_apps_info": { + "type": "func", + "name": "get_apps_info", + "doc": { + "brief": "Get APP info list.", + "param": { + "ignore_launcher": "if true, ignore launcher APP. default false.", + "ignore_app_store": "if true, ignore app store APP. default false." + }, + "return": "APP info list. APP_Info object list.", + "maixpy": "maix.app.get_apps_info", + "py_doc": "Get APP info list.\n\nArgs:\n - ignore_launcher: if true, ignore launcher APP. default false.\n - ignore_app_store: if true, ignore app store APP. default false.\n\n\nReturns: APP info list. APP_Info object list.\n" + }, + "args": [ + [ + "bool", + "ignore_launcher", + "false" + ], + [ + "bool", + "ignore_app_store", + "false" + ] + ], + "ret_type": "vector&", + "static": false, + "def": "vector &get_apps_info(bool ignore_launcher = false, bool ignore_app_store = false)", + "py_def": "def get_apps_info(ignore_launcher: bool = False, ignore_app_store: bool = False) -> list[APP_Info]" + }, + "get_app_info": { + "type": "func", + "name": "get_app_info", + "doc": { + "brief": "Get app info by app id.", + "return": "app.APP_Info type.", + "maixpy": "maix.app.get_app_info", + "py_doc": "Get app info by app id.\n\nReturns: app.APP_Info type.\n" + }, + "args": [ + [ + "const std::string &", + "app_id", + null + ] + ], + "ret_type": "app::APP_Info", + "static": false, + "def": "app::APP_Info get_app_info(const std::string &app_id)", + "py_def": "def get_app_info(app_id: str) -> APP_Info" + }, + "get_app_data_path": { + "type": "func", + "name": "get_app_data_path", + "doc": { + "brief": "Get APP info, APP can store private data in this directory.", + "return": "APP data path \"./data\", just return the data folder in current path because APP executed in app install path or project path.\nSo, you must execute your program in you project path to use the project/data folder when you debug your APP.", + "maixpy": "maix.app.get_app_data_path", + "py_doc": "Get APP info, APP can store private data in this directory.\n\nReturns: APP data path \"./data\", just return the data folder in current path because APP executed in app install path or project path.\nSo, you must execute your program in you project path to use the project/data folder when you debug your APP.\n" + }, + "args": [], + "ret_type": "string", + "static": false, + "def": "string get_app_data_path()", + "py_def": "def get_app_data_path() -> str" + }, + "get_app_path": { + "type": "func", + "name": "get_app_path", + "doc": { + "brief": "Get APP path.", + "param": { + "app_id": "APP ID, if empty, return current APP path, else return the APP path by app_id." + }, + "return": "APP path, just return the current path because APP executed in app install path or project path.\nSo, you must execute your program in you project path to use the project/data folder when you debug your APP.", + "maixpy": "maix.app.get_app_path", + "py_doc": "Get APP path.\n\nArgs:\n - app_id: APP ID, if empty, return current APP path, else return the APP path by app_id.\n\n\nReturns: APP path, just return the current path because APP executed in app install path or project path.\nSo, you must execute your program in you project path to use the project/data folder when you debug your APP.\n" + }, + "args": [ + [ + "const string &", + "app_id", + "\"\"" + ] + ], + "ret_type": "string", + "static": false, + "def": "string get_app_path(const string &app_id = \"\")", + "py_def": "def get_app_path(app_id: str = '') -> str" + }, + "get_tmp_path": { + "type": "func", + "name": "get_tmp_path", + "doc": { + "brief": "Get global temporary data path, APPs can use this path as temporary data directory.", + "return": "temporary data path.", + "maixpy": "maix.app.get_tmp_path", + "py_doc": "Get global temporary data path, APPs can use this path as temporary data directory.\n\nReturns: temporary data path.\n" + }, + "args": [], + "ret_type": "string", + "static": false, + "def": "string get_tmp_path()", + "py_def": "def get_tmp_path() -> str" + }, + "get_share_path": { + "type": "func", + "name": "get_share_path", + "doc": { + "brief": "Get data path of share, shared data like picture and video will put in this directory", + "return": "share data path.", + "maixpy": "maix.app.get_share_path", + "py_doc": "Get data path of share, shared data like picture and video will put in this directory\n\nReturns: share data path.\n" + }, + "args": [], + "ret_type": "string", + "static": false, + "def": "string get_share_path()", + "py_def": "def get_share_path() -> str" + }, + "get_picture_path": { + "type": "func", + "name": "get_picture_path", + "doc": { + "brief": "Get picture path of share, shared picture will put in this directory", + "return": "share picture path.", + "maixpy": "maix.app.get_picture_path", + "py_doc": "Get picture path of share, shared picture will put in this directory\n\nReturns: share picture path.\n" + }, + "args": [], + "ret_type": "string", + "static": false, + "def": "string get_picture_path()", + "py_def": "def get_picture_path() -> str" + }, + "get_video_path": { + "type": "func", + "name": "get_video_path", + "doc": { + "brief": "Get video path of share, shared video will put in this directory", + "return": "share video path.", + "maixpy": "maix.app.get_video_path", + "py_doc": "Get video path of share, shared video will put in this directory\n\nReturns: share video path.\n" + }, + "args": [], + "ret_type": "string", + "static": false, + "def": "string get_video_path()", + "py_def": "def get_video_path() -> str" + }, + "get_font_path": { + "type": "func", + "name": "get_font_path", + "doc": { + "brief": "Get font path of share, shared font will put in this directory", + "return": "share font path.", + "maixpy": "maix.app.get_font_path", + "py_doc": "Get font path of share, shared font will put in this directory\n\nReturns: share font path.\n" + }, + "args": [], + "ret_type": "string", + "static": false, + "def": "string get_font_path()", + "py_def": "def get_font_path() -> str" + }, + "get_icon_path": { + "type": "func", + "name": "get_icon_path", + "doc": { + "brief": "Get icon path of share, shared icon will put in this directory", + "return": "share icon path.", + "maixpy": "maix.app.get_icon_path", + "py_doc": "Get icon path of share, shared icon will put in this directory\n\nReturns: share icon path.\n" + }, + "args": [], + "ret_type": "string", + "static": false, + "def": "string get_icon_path()", + "py_def": "def get_icon_path() -> str" + }, + "get_sys_config_kv": { + "type": "func", + "name": "get_sys_config_kv", + "doc": { + "brief": "Get system config item value.", + "param": { + "item": "name of setting item, e.g. wifi, language. more see settings APP.", + "key": "config key, e.g. for wifi, key can be ssid, for language, key can be locale.", + "value": "default value, if not found, return this value.", + "from_cache": "if true, read from cache, if false, read from file." + }, + "return": "config value, always string type, if not found, return empty string.", + "maixpy": "maix.app.get_sys_config_kv", + "py_doc": "Get system config item value.\n\nArgs:\n - item: name of setting item, e.g. wifi, language. more see settings APP.\n - key: config key, e.g. for wifi, key can be ssid, for language, key can be locale.\n - value: default value, if not found, return this value.\n - from_cache: if true, read from cache, if false, read from file.\n\n\nReturns: config value, always string type, if not found, return empty string.\n" + }, + "args": [ + [ + "const string &", + "item", + null + ], + [ + "const string &", + "key", + null + ], + [ + "const string &", + "value", + "\"\"" + ], + [ + "bool", + "from_cache", + "true" + ] + ], + "ret_type": "string", + "static": false, + "def": "string get_sys_config_kv(const string &item, const string &key, const string &value = \"\", bool from_cache = true)", + "py_def": "def get_sys_config_kv(item: str, key: str, value: str = '', from_cache: bool = True) -> str" + }, + "get_app_config_kv": { + "type": "func", + "name": "get_app_config_kv", + "doc": { + "brief": "Get APP config item value.", + "param": { + "item": "name of setting item, e.g. user_info", + "key": "config key, e.g. for user_info, key can be name, age etc.", + "value": "default value, if not found, return this value.", + "from_cache": "if true, read from cache, if false, read from file." + }, + "return": "config value, always string type, if not found, return empty string.", + "maixpy": "maix.app.get_app_config_kv", + "py_doc": "Get APP config item value.\n\nArgs:\n - item: name of setting item, e.g. user_info\n - key: config key, e.g. for user_info, key can be name, age etc.\n - value: default value, if not found, return this value.\n - from_cache: if true, read from cache, if false, read from file.\n\n\nReturns: config value, always string type, if not found, return empty string.\n" + }, + "args": [ + [ + "const string &", + "item", + null + ], + [ + "const string &", + "key", + null + ], + [ + "const string &", + "value", + "\"\"" + ], + [ + "bool", + "from_cache", + "true" + ] + ], + "ret_type": "string", + "static": false, + "def": "string get_app_config_kv(const string &item, const string &key, const string &value = \"\", bool from_cache = true)", + "py_def": "def get_app_config_kv(item: str, key: str, value: str = '', from_cache: bool = True) -> str" + }, + "set_app_config_kv": { + "type": "func", + "name": "set_app_config_kv", + "doc": { + "brief": "Set APP config item value.", + "param": { + "item": "name of setting item, e.g. user_info", + "key": "config key, e.g. for user_info, key can be name, age etc.", + "value": "config value, always string type.", + "write_file": "if true, write to file, if false, just write to cache." + }, + "return": "err::Err", + "maixpy": "maix.app.set_app_config_kv", + "py_doc": "Set APP config item value.\n\nArgs:\n - item: name of setting item, e.g. user_info\n - key: config key, e.g. for user_info, key can be name, age etc.\n - value: config value, always string type.\n - write_file: if true, write to file, if false, just write to cache.\n\n\nReturns: err::Err\n" + }, + "args": [ + [ + "const string &", + "item", + null + ], + [ + "const string &", + "key", + null + ], + [ + "const string &", + "value", + null + ], + [ + "bool", + "write_file", + "true" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err set_app_config_kv(const string &item, const string &key, const string &value, bool write_file = true)", + "py_def": "def set_app_config_kv(item: str, key: str, value: str, write_file: bool = True) -> maix.err.Err" + }, + "get_app_config_path": { + "type": "func", + "name": "get_app_config_path", + "doc": { + "brief": "Get APP config path, ini format, so you can use your own ini parser to parse it like `configparser` in Python.\\nAll APP config info is recommended to store in this file.", + "return": "APP config path(ini format).", + "maixpy": "maix.app.get_app_config_path", + "py_doc": "Get APP config path, ini format, so you can use your own ini parser to parse it like `configparser` in Python.\nAll APP config info is recommended to store in this file.\n\nReturns: APP config path(ini format).\n" + }, + "args": [], + "ret_type": "string", + "static": false, + "def": "string get_app_config_path()", + "py_def": "def get_app_config_path() -> str" + }, + "set_exit_msg": { + "type": "func", + "name": "set_exit_msg", + "doc": { + "brief": "Set APP exit code and exit message.\\nIf code != 0, the launcher will show a dialog to user, and display the msg.", + "param": { + "code": "exit code, 0 means success, other means error, if code is 0, do nothing.", + "msg": "exit message, if code is 0, msg is not used." + }, + "return": "exit code, the same as arg @code.", + "maixpy": "maix.app.set_exit_msg", + "py_doc": "Set APP exit code and exit message.\nIf code != 0, the launcher will show a dialog to user, and display the msg.\n\nArgs:\n - code: exit code, 0 means success, other means error, if code is 0, do nothing.\n - msg: exit message, if code is 0, msg is not used.\n\n\nReturns: exit code, the same as arg @code.\n" + }, + "args": [ + [ + "err::Err", + "code", + null + ], + [ + "const string &", + "msg", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err set_exit_msg(err::Err code, const string &msg)", + "py_def": "def set_exit_msg(code: maix.err.Err, msg: str) -> maix.err.Err" + }, + "get_exit_msg": { + "type": "func", + "name": "get_exit_msg", + "doc": { + "brief": "Get APP exit code and exit message.", + "param": { + "cache": "if true, read from cache, if false, read from file. default false." + }, + "return": "exit return app_id, exit code and exit message.", + "maixpy": "maix.app.get_exit_msg", + "py_doc": "Get APP exit code and exit message.\n\nArgs:\n - cache: if true, read from cache, if false, read from file. default false.\n\n\nReturns: exit return app_id, exit code and exit message.\n" + }, + "args": [ + [ + "bool", + "cache", + "false" + ] + ], + "ret_type": "tuple", + "static": false, + "def": "tuple get_exit_msg(bool cache = false)", + "py_def": "def get_exit_msg(cache: bool = False) -> tuple[str, maix.err.Err, str]" + }, + "have_exit_msg": { + "type": "func", + "name": "have_exit_msg", + "doc": { + "brief": "Check if have exit msg", + "param": { + "cache": "if true, just check from cache, if false, check from file. default false." + }, + "return": "true if have exit msg, false if not.", + "maixpy": "maix.app.have_exit_msg", + "py_doc": "Check if have exit msg\n\nArgs:\n - cache: if true, just check from cache, if false, check from file. default false.\n\n\nReturns: true if have exit msg, false if not.\n" + }, + "args": [ + [ + "bool", + "cache", + "false" + ] + ], + "ret_type": "bool", + "static": false, + "def": "bool have_exit_msg(bool cache = false)", + "py_def": "def have_exit_msg(cache: bool = False) -> bool" + }, + "switch_app": { + "type": "func", + "name": "switch_app", + "doc": { + "brief": "Exit this APP and start another APP(by launcher).\\nCall this API will call set_exit_flag(true), you should check app::need_exit() in your code.\\nAnd exit this APP if app::need_exit() return true.", + "param": { + "app_id": "APP ID which will be started. app_id and idx must have one is valid.", + "idx": "APP index. app_id and idx must have one is valid.", + "start_param": "string type, will send to app, app can get this param by `app.get_start_param()`" + }, + "attention": "If app id or idx the same as current app, do nothing.", + "maixpy": "maix.app.switch_app", + "py_doc": "Exit this APP and start another APP(by launcher).\nCall this API will call set_exit_flag(true), you should check app::need_exit() in your code.\nAnd exit this APP if app::need_exit() return true.\n\nArgs:\n - app_id: APP ID which will be started. app_id and idx must have one is valid.\n - idx: APP index. app_id and idx must have one is valid.\n - start_param: string type, will send to app, app can get this param by `app.get_start_param()`\n" + }, + "args": [ + [ + "const string &", + "app_id", + null + ], + [ + "int", + "idx", + "-1" + ], + [ + "const std::string &", + "start_param", + "\"\"" + ] + ], + "ret_type": "void", + "static": false, + "def": "void switch_app(const string &app_id, int idx = -1, const std::string &start_param = \"\")", + "py_def": "def switch_app(app_id: str, idx: int = -1, start_param: str = '') -> None" + }, + "get_start_param": { + "type": "func", + "name": "get_start_param", + "doc": { + "brief": "Get start param set by caller", + "return": "param, string type", + "maixpy": "maix.app.get_start_param", + "py_doc": "Get start param set by caller\n\nReturns: param, string type\n" + }, + "args": [], + "ret_type": "const std::string", + "static": false, + "def": "const std::string get_start_param()", + "py_def": "def get_start_param() -> str" + }, + "need_exit": { + "type": "func", + "name": "need_exit", + "doc": { + "brief": "Shoule this APP exit?", + "return": "true if this APP should exit, false if not.", + "attention": "This API is a function, not a variable.", + "maixpy": "maix.app.need_exit", + "py_doc": "Shoule this APP exit?\n\nReturns: true if this APP should exit, false if not.\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool need_exit()", + "py_def": "def need_exit() -> bool" + }, + "running": { + "type": "func", + "name": "running", + "doc": { + "brief": "App should running? The same as !app::need_exit() (not app::need_exit() in MaixPy).", + "return": "true if this APP should running, false if not.", + "attention": "This API is a function, not a variable.", + "maixpy": "maix.app.running", + "py_doc": "App should running? The same as !app::need_exit() (not app::need_exit() in MaixPy).\n\nReturns: true if this APP should running, false if not.\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool running()", + "py_def": "def running() -> bool" + }, + "set_exit_flag": { + "type": "func", + "name": "set_exit_flag", + "doc": { + "brief": "Set exit flag. You can get exit flag by app.need_exit().", + "param": { + "exit": "true if this APP should exit, false if not." + }, + "maixpy": "maix.app.set_exit_flag", + "py_doc": "Set exit flag. You can get exit flag by app.need_exit().\n\nArgs:\n - exit: true if this APP should exit, false if not.\n" + }, + "args": [ + [ + "bool", + "exit", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void set_exit_flag(bool exit)", + "py_def": "def set_exit_flag(exit: bool) -> None" + } + }, + "auto_add": true + }, + "protocol": { + "type": "module", + "doc": { + "brief": "maix.protocol module" + }, + "members": { + "VERSION": { + "type": "var", + "name": "", + "doc": { + "brief": "protocol version", + "maixpy": "maix.protocol.VERSION", + "py_doc": "protocol version" + }, + "value": "1", + "static": false, + "readonly": true, + "def": "const uint8_t VERSION = 1" + }, + "HEADER": { + "type": "var", + "name": "HEADER", + "doc": { + "brief": "protocol header", + "maixpy": "maix.protocol.HEADER", + "py_doc": "protocol header" + }, + "value": null, + "static": false, + "readonly": false, + "def": "extern uint32_t HEADER" + }, + "CMD": { + "type": "enum", + "name": "CMD", + "doc": { + "brief": "protocol cmd, more doc see MaixCDK document's convention doc", + "note": "max app custom CMD value should < CMD_APP_MAX", + "maixpy": "maix.protocol.CMD", + "py_doc": "protocol cmd, more doc see MaixCDK document's convention doc" + }, + "values": [ + [ + "CMD_APP_MAX", + "0xC8", + "200, max app custom CMD value should < CMD_APP_MAX" + ], + [ + "CMD_SET_REPORT", + "0xF8", + "set auto upload data mode" + ], + [ + "CMD_APP_LIST", + "0xF9", + "" + ], + [ + "CMD_START_APP", + "0xFA", + "" + ], + [ + "CMD_EXIT_APP", + "0xFB", + "" + ], + [ + "CMD_CUR_APP_INFO", + "0xFC", + "" + ], + [ + "CMD_APP_INFO", + "0xFD", + "" + ], + [ + "CMD_KEY", + "0xFE", + "" + ], + [ + "CMD_TOUCH", + "0xFF", + "" + ] + ], + "def": "enum CMD\n {\n CMD_APP_MAX = 0xC8, // 200, max app custom CMD value should < CMD_APP_MAX\n\n CMD_SET_REPORT = 0xF8, // set auto upload data mode\n CMD_APP_LIST = 0xF9,\n CMD_START_APP = 0xFA,\n CMD_EXIT_APP = 0xFB,\n CMD_CUR_APP_INFO = 0xFC,\n CMD_APP_INFO = 0xFD,\n CMD_KEY = 0xFE,\n CMD_TOUCH = 0xFF,\n }" + }, + "FLAGS": { + "type": "enum", + "name": "FLAGS", + "doc": { + "brief": "protocol flags, more doc see MaixCDK document's convention doc", + "maixpy": "maix.protocol.FLAGS", + "py_doc": "protocol flags, more doc see MaixCDK document's convention doc" + }, + "values": [ + [ + "FLAG_REQ", + "0x00", + "" + ], + [ + "FLAG_RESP", + "0x80", + "" + ], + [ + "FLAG_IS_RESP_MASK", + "0x80", + "" + ], + [ + "FLAG_RESP_OK", + "0x40", + "" + ], + [ + "FLAG_RESP_ERR", + "0x00", + "" + ], + [ + "FLAG_RESP_OK_MASK", + "0x40", + "" + ], + [ + "FLAG_REPORT", + "0x20", + "" + ], + [ + "FLAG_REPORT_MASK", + "0x20", + "" + ], + [ + "FLAG_VERSION_MASK", + "0x03", + "" + ] + ], + "def": "enum FLAGS\n {\n FLAG_REQ = 0x00,\n FLAG_RESP = 0x80,\n FLAG_IS_RESP_MASK = 0x80,\n\n FLAG_RESP_OK = 0x40,\n FLAG_RESP_ERR = 0x00,\n FLAG_RESP_OK_MASK = 0x40,\n\n FLAG_REPORT = 0x20,\n FLAG_REPORT_MASK = 0x20,\n\n FLAG_VERSION_MASK = 0x03\n }" + }, + "MSG": { + "type": "class", + "name": "MSG", + "doc": { + "brief": "protocol msg", + "maixpy": "maix.protocol.MSG", + "py_doc": "protocol msg" + }, + "members": { + "version": { + "type": "var", + "name": "version", + "doc": { + "brief": "protocol version", + "maixpy": "maix.protocol.MSG.version", + "py_doc": "protocol version" + }, + "value": null, + "static": false, + "readonly": false, + "def": "uint8_t version" + }, + "resp_ok": { + "type": "var", + "name": "resp_ok", + "doc": { + "brief": "Indicate response message type, true means CMD valid and the CMD processed correctly, (only for response msg)", + "maixpy": "maix.protocol.MSG.resp_ok", + "py_doc": "Indicate response message type, true means CMD valid and the CMD processed correctly, (only for response msg)" + }, + "value": null, + "static": false, + "readonly": false, + "def": "uint8_t resp_ok" + }, + "has_been_replied": { + "type": "var", + "name": "has_been_replied{false}", + "doc": { + "brief": "Flag whether CMD has been processed and responded to CMD sender.\\nE.g. CMD CMD_START_APP will be automatically processed in CommProtocol.get_msg function,\\nso the return msg will set this flag to true.", + "maixpy": "maix.protocol.MSG.has_been_replied", + "py_doc": "Flag whether CMD has been processed and responded to CMD sender.\nE.g. CMD CMD_START_APP will be automatically processed in CommProtocol.get_msg function,\nso the return msg will set this flag to true." + }, + "value": null, + "static": false, + "readonly": false, + "def": "bool has_been_replied{false}" + }, + "cmd": { + "type": "var", + "name": "cmd", + "doc": { + "brief": "CMD value", + "maixpy": "maix.protocol.MSG.cmd", + "py_doc": "CMD value" + }, + "value": null, + "static": false, + "readonly": false, + "def": "uint8_t cmd" + }, + "is_resp": { + "type": "var", + "name": "is_resp", + "doc": { + "brief": "message is response or not, contrast with is_req", + "maixpy": "maix.protocol.MSG.is_resp", + "py_doc": "message is response or not, contrast with is_req" + }, + "value": null, + "static": false, + "readonly": false, + "def": "bool is_resp" + }, + "body_len": { + "type": "var", + "name": "body_len", + "doc": { + "brief": "Message body length, read only, use set_body() to update", + "attention": "DO NOT manually change this value", + "maixpy": "maix.protocol.MSG.body_len", + "py_doc": "Message body length, read only, use set_body() to update" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int body_len" + }, + "encode_resp_ok": { + "type": "func", + "name": "encode_resp_ok", + "doc": { + "brief": "Encode response ok(success) message", + "param": { + "body": "response body, can be null" + }, + "return": "encoded data, if nullptr, means error, and the error code is -err.Err", + "maixpy": "maix.protocol.MSG.encode_resp_ok", + "py_doc": "Encode response ok(success) message\n\nArgs:\n - body: response body, can be null\n\n\nReturns: encoded data, if nullptr, means error, and the error code is -err.Err\n" + }, + "args": [ + [ + "Bytes *", + "body", + "nullptr" + ] + ], + "ret_type": "Bytes*", + "static": false, + "def": "Bytes *encode_resp_ok(Bytes *body = nullptr)", + "py_def": "def encode_resp_ok(*args, **kwargs)" + }, + "encode_report": { + "type": "func", + "name": "encode_report", + "doc": { + "brief": "Encode proactively report message", + "param": { + "body": "report body, can be null" + }, + "return": "encoded data, if nullptr, means error, and the error code is -err.Err", + "maixpy": "maix.protocol.MSG.encode_report", + "py_doc": "Encode proactively report message\n\nArgs:\n - body: report body, can be null\n\n\nReturns: encoded data, if nullptr, means error, and the error code is -err.Err\n" + }, + "args": [ + [ + "Bytes *", + "body", + "nullptr" + ] + ], + "ret_type": "Bytes*", + "static": false, + "def": "Bytes *encode_report(Bytes *body = nullptr)", + "py_def": "def encode_report(*args, **kwargs)" + }, + "encode_resp_err": { + "type": "func", + "name": "encode_resp_err", + "doc": { + "brief": "Encode response error message", + "param": { + "code": "error code", + "msg": "error message" + }, + "return": "encoded data, if nullptr, means error, and the error code is -err.Err", + "maixpy": "maix.protocol.MSG.encode_resp_err", + "py_doc": "Encode response error message\n\nArgs:\n - code: error code\n - msg: error message\n\n\nReturns: encoded data, if nullptr, means error, and the error code is -err.Err\n" + }, + "args": [ + [ + "err::Err", + "code", + null + ], + [ + "const std::string &", + "msg", + null + ] + ], + "ret_type": "Bytes*", + "static": false, + "def": "Bytes *encode_resp_err(err::Err code, const std::string &msg)", + "py_def": "def encode_resp_err(*args, **kwargs)" + }, + "set_body": { + "type": "func", + "name": "set_body", + "doc": { + "brief": "Update message body", + "param": { + "body_new": "new body data" + }, + "maixpy": "maix.protocol.MSG.set_body", + "py_doc": "Update message body\n\nArgs:\n - body_new: new body data\n" + }, + "args": [ + [ + "Bytes *", + "body_new", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void set_body(Bytes *body_new)", + "py_def": "def set_body(self, body_new: maix.Bytes(bytes)) -> None" + }, + "get_body": { + "type": "func", + "name": "get_body", + "doc": { + "brief": "Get message body", + "return": "message body, bytes type", + "maixpy": "maix.protocol.MSG.get_body", + "py_doc": "Get message body\n\nReturns: message body, bytes type\n" + }, + "args": [], + "ret_type": "Bytes*", + "static": false, + "def": "Bytes *get_body()", + "py_def": "def get_body(*args, **kwargs)" + } + }, + "def": "class MSG" + }, + "Protocol": { + "type": "class", + "name": "Protocol", + "doc": { + "brief": "Communicate protocol", + "maixpy": "maix.protocol.Protocol", + "py_doc": "Communicate protocol" + }, + "members": { + "__init__": { + "type": "func", + "name": "Protocol", + "doc": { + "brief": "Construct a new Protocol object", + "param": { + "buff_size": "Data queue buffer size" + }, + "maixpy": "maix.protocol.Protocol.__init__", + "maixcdk": "maix.protocol.Protocol.Protocol", + "py_doc": "Construct a new Protocol object\n\nArgs:\n - buff_size: Data queue buffer size\n" + }, + "args": [ + [ + "int", + "buff_size", + "1024" + ], + [ + "uint32_t", + "header", + "maix::protocol::HEADER" + ] + ], + "ret_type": null, + "static": false, + "def": "Protocol(int buff_size = 1024, uint32_t header=maix::protocol::HEADER)", + "py_def": "def __init__(self, buff_size: int = 1024, header: int = 3148663466) -> None" + }, + "buff_size": { + "type": "func", + "name": "buff_size", + "doc": { + "brief": "Data queue buffer size", + "maixpy": "maix.protocol.Protocol.buff_size", + "py_doc": "Data queue buffer size" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int buff_size()", + "py_def": "def buff_size(self) -> int" + }, + "push_data": { + "type": "func", + "name": "push_data", + "doc": { + "brief": "Add data to data queue", + "param": { + "new_data": "new data" + }, + "return": "error code, maybe err.Err.ERR_BUFF_FULL", + "maixpy": "maix.protocol.Protocol.push_data", + "py_doc": "Add data to data queue\n\nArgs:\n - new_data: new data\n\n\nReturns: error code, maybe err.Err.ERR_BUFF_FULL\n" + }, + "args": [ + [ + "const Bytes *", + "new_data", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err push_data(const Bytes *new_data)", + "py_def": "def push_data(self, new_data: maix.Bytes(bytes)) -> maix.err.Err" + }, + "decode": { + "type": "func", + "name": "decode", + "doc": { + "brief": "Decode data in data queue and return a message", + "param": { + "new_data": "new data add to data queue, if null, only decode." + }, + "return": "decoded message, if nullptr, means no message decoded.", + "maixpy": "maix.protocol.Protocol.decode", + "py_doc": "Decode data in data queue and return a message\n\nArgs:\n - new_data: new data add to data queue, if null, only decode.\n\n\nReturns: decoded message, if nullptr, means no message decoded.\n" + }, + "args": [ + [ + "const Bytes *", + "new_data", + "nullptr" + ] + ], + "ret_type": "protocol::MSG*", + "static": false, + "def": "protocol::MSG *decode(const Bytes *new_data = nullptr)", + "py_def": "def decode(self, new_data: maix.Bytes(bytes) = None) -> MSG" + }, + "encode_resp_ok": { + "type": "func", + "name": "encode_resp_ok", + "doc": { + "brief": "Encode response ok(success) message to buffer", + "param": { + "cmd": "CMD value", + "body": "response body, can be null" + }, + "return": "encoded data, if nullptr, means error, and the error code is -err.Err", + "maixpy": "maix.protocol.Protocol.encode_resp_ok", + "py_doc": "Encode response ok(success) message to buffer\n\nArgs:\n - cmd: CMD value\n - body: response body, can be null\n\n\nReturns: encoded data, if nullptr, means error, and the error code is -err.Err\n" + }, + "args": [ + [ + "uint8_t", + "cmd", + null + ], + [ + "Bytes *", + "body", + "nullptr" + ] + ], + "ret_type": "Bytes*", + "static": false, + "def": "Bytes *encode_resp_ok(uint8_t cmd, Bytes *body = nullptr)", + "py_def": "def encode_resp_ok(*args, **kwargs)" + }, + "encode_report": { + "type": "func", + "name": "encode_report", + "doc": { + "brief": "Encode proactively report message to buffer", + "param": { + "cmd": "CMD value", + "body": "report body, can be null" + }, + "return": "encoded data, if nullptr, means error, and the error code is -err.Err", + "maixpy": "maix.protocol.Protocol.encode_report", + "py_doc": "Encode proactively report message to buffer\n\nArgs:\n - cmd: CMD value\n - body: report body, can be null\n\n\nReturns: encoded data, if nullptr, means error, and the error code is -err.Err\n" + }, + "args": [ + [ + "uint8_t", + "cmd", + null + ], + [ + "Bytes *", + "body", + "nullptr" + ] + ], + "ret_type": "Bytes*", + "static": false, + "def": "Bytes *encode_report(uint8_t cmd, Bytes *body = nullptr)", + "py_def": "def encode_report(*args, **kwargs)" + }, + "encode_resp_err": { + "type": "func", + "name": "encode_resp_err", + "doc": { + "brief": "Encode response error message to buffer", + "param": { + "cmd": "CMD value", + "code": "error code", + "msg": "error message" + }, + "return": "encoded data, if nullptr, means error, and the error code is -err.Err", + "maixpy": "maix.protocol.Protocol.encode_resp_err", + "py_doc": "Encode response error message to buffer\n\nArgs:\n - cmd: CMD value\n - code: error code\n - msg: error message\n\n\nReturns: encoded data, if nullptr, means error, and the error code is -err.Err\n" + }, + "args": [ + [ + "uint8_t", + "cmd", + null + ], + [ + "err::Err", + "code", + null + ], + [ + "const std::string &", + "msg", + null + ] + ], + "ret_type": "Bytes*", + "static": false, + "def": "Bytes *encode_resp_err(uint8_t cmd, err::Err code, const std::string &msg)", + "py_def": "def encode_resp_err(*args, **kwargs)" + } + }, + "def": "class Protocol" + }, + "crc16_IBM": { + "type": "func", + "name": "crc16_IBM", + "doc": { + "brief": "CRC16-IBM", + "param": { + "data": "data, bytes type." + }, + "return": "CRC16-IBM value, uint16_t type.", + "maixpy": "maix.protocol.crc16_IBM", + "py_doc": "CRC16-IBM\n\nArgs:\n - data: data, bytes type.\n\n\nReturns: CRC16-IBM value, uint16_t type.\n" + }, + "args": [ + [ + "const Bytes *", + "data", + null + ] + ], + "ret_type": "uint16_t", + "static": false, + "def": "uint16_t crc16_IBM(const Bytes *data)", + "py_def": "def crc16_IBM(data: maix.Bytes(bytes)) -> int" + } + }, + "auto_add": true + }, + "time": { + "type": "module", + "doc": { + "brief": "maix.time module" + }, + "members": { + "time": { + "type": "func", + "name": "time", + "doc": { + "brief": "Get current time in s", + "return": "current time in s, double type", + "attention": "If board have no RTC battery, when bootup and connect to network,\nsystem will automatically sync time by NTP, will cause time() have big change,\ne.g. before NTP: 10(s), after: 1718590639.5149617(s).\nIf you want to calculate time interval, please use ticks_s().", + "maixpy": "maix.time.time", + "py_doc": "Get current time in s\n\nReturns: current time in s, double type\n" + }, + "args": [], + "ret_type": "double", + "static": false, + "def": "double time()", + "py_def": "def time() -> float" + }, + "time_ms": { + "type": "func", + "name": "time_ms", + "doc": { + "brief": "Get current time in ms", + "return": "current time in ms, uint64_t type", + "attention": "If board have no RTC battery, when bootup and connect to network,\nsystem will automatically sync time by NTP, will cause time() have big change,\ne.g. before NTP: 10000(ms), after: 1718590639000(ms)\nIf you want to calculate time interval, please use ticks_ms().", + "maixpy": "maix.time.time_ms", + "py_doc": "Get current time in ms\n\nReturns: current time in ms, uint64_t type\n" + }, + "args": [], + "ret_type": "uint64_t", + "static": false, + "def": "uint64_t time_ms()", + "py_def": "def time_ms() -> int" + }, + "time_s": { + "type": "func", + "name": "time_s", + "doc": { + "brief": "Get current time in s", + "return": "current time in s, uint64_t type", + "attention": "If board have no RTC battery, when bootup and connect to network,\nsystem will automatically sync time by NTP, will cause time() have big change,\ne.g. before NTP: 10(s), after: 1718590639(s)", + "maixpy": "maix.time.time_s", + "py_doc": "Get current time in s\n\nReturns: current time in s, uint64_t type\n" + }, + "args": [], + "ret_type": "uint64_t", + "static": false, + "def": "uint64_t time_s()", + "py_def": "def time_s() -> int" + }, + "time_us": { + "type": "func", + "name": "time_us", + "doc": { + "brief": "Get current time in us", + "return": "current time in us, uint64_t type", + "attention": "If board have no RTC battery, when bootup and connect to network,\nsystem will automatically sync time by NTP, will cause time() have big change,\ne.g. before NTP: 10000000(us), after: 1718590639000000(s)\nIf you want to calculate time interval, please use ticks_us().", + "maixpy": "maix.time.time_us", + "py_doc": "Get current time in us\n\nReturns: current time in us, uint64_t type\n" + }, + "args": [], + "ret_type": "uint64_t", + "static": false, + "def": "uint64_t time_us()", + "py_def": "def time_us() -> int" + }, + "time_diff": { + "type": "func", + "name": "time_diff", + "doc": { + "brief": "Calculate time difference in s.", + "param": { + "last": "last time", + "now": "current time, can be -1 if use current time" + }, + "return": "time difference", + "attention": "If board have no RTC battery, when bootup and connect to network,\nsystem will automatically sync time by NTP, will cause time() have big change, and lead to big value.\ne.g. before NTP: 1(s), after: 1718590500(s)\nIf you want to calculate time interval, please use ticks_diff().", + "maixpy": "maix.time.time_diff", + "py_doc": "Calculate time difference in s.\n\nArgs:\n - last: last time\n - now: current time, can be -1 if use current time\n\n\nReturns: time difference\n" + }, + "args": [ + [ + "double", + "last", + null + ], + [ + "double", + "now", + "-1" + ] + ], + "ret_type": "double", + "static": false, + "def": "double time_diff(double last, double now = -1)", + "py_def": "def time_diff(last: float, now: float = -1) -> float" + }, + "ticks_s": { + "type": "func", + "name": "ticks_s", + "doc": { + "brief": "Get current time in s since bootup", + "return": "current time in s, double type", + "maixpy": "maix.time.ticks_s", + "py_doc": "Get current time in s since bootup\n\nReturns: current time in s, double type\n" + }, + "args": [], + "ret_type": "double", + "static": false, + "def": "double ticks_s()", + "py_def": "def ticks_s() -> float" + }, + "ticks_ms": { + "type": "func", + "name": "ticks_ms", + "doc": { + "brief": "Get current time in ms since bootup", + "return": "current time in ms, uint64_t type", + "maixpy": "maix.time.ticks_ms", + "py_doc": "Get current time in ms since bootup\n\nReturns: current time in ms, uint64_t type\n" + }, + "args": [], + "ret_type": "uint64_t", + "static": false, + "def": "uint64_t ticks_ms()", + "py_def": "def ticks_ms() -> int" + }, + "ticks_us": { + "type": "func", + "name": "ticks_us", + "doc": { + "brief": "Get current time in us since bootup", + "return": "current time in us, uint64_t type", + "maixpy": "maix.time.ticks_us", + "py_doc": "Get current time in us since bootup\n\nReturns: current time in us, uint64_t type\n" + }, + "args": [], + "ret_type": "uint64_t", + "static": false, + "def": "uint64_t ticks_us()", + "py_def": "def ticks_us() -> int" + }, + "ticks_diff": { + "type": "func", + "name": "ticks_diff", + "doc": { + "brief": "Calculate time difference in s.", + "param": { + "last": "last time", + "now": "current time, can be -1 if use current time" + }, + "return": "time difference", + "maixpy": "maix.time.ticks_diff", + "py_doc": "Calculate time difference in s.\n\nArgs:\n - last: last time\n - now: current time, can be -1 if use current time\n\n\nReturns: time difference\n" + }, + "args": [ + [ + "double", + "last", + null + ], + [ + "double", + "now", + "-1" + ] + ], + "ret_type": "double", + "static": false, + "def": "double ticks_diff(double last, double now = -1)", + "py_def": "def ticks_diff(last: float, now: float = -1) -> float" + }, + "sleep": { + "type": "func", + "name": "sleep", + "doc": { + "brief": "Sleep seconds", + "param": { + "s": "seconds, double type" + }, + "maixpy": "maix.time.sleep", + "py_doc": "Sleep seconds\n\nArgs:\n - s: seconds, double type\n" + }, + "args": [ + [ + "double", + "s", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void sleep(double s)" + }, + "sleep_ms": { + "type": "func", + "name": "sleep_ms", + "doc": { + "brief": "Sleep milliseconds", + "param": { + "ms": "milliseconds, uint64_t type" + }, + "maixpy": "maix.time.sleep_ms", + "py_doc": "Sleep milliseconds\n\nArgs:\n - ms: milliseconds, uint64_t type\n" + }, + "args": [ + [ + "uint64_t", + "ms", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void sleep_ms(uint64_t ms)" + }, + "sleep_us": { + "type": "func", + "name": "sleep_us", + "doc": { + "brief": "Sleep microseconds", + "param": { + "us": "microseconds, uint64_t type" + }, + "maixpy": "maix.time.sleep_us", + "py_doc": "Sleep microseconds\n\nArgs:\n - us: microseconds, uint64_t type\n" + }, + "args": [ + [ + "uint64_t", + "us", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void sleep_us(uint64_t us)" + }, + "fps": { + "type": "func", + "name": "fps", + "doc": { + "brief": "Calculate FPS since last call this method.\\nAttention, this method is not multi thread safe, only call this method in one threads.\\nIf you want to use in multi threads, please use time.FPS class.\\nFPS is average value of recent n(buff_len) times, and you can call fps_set_buff_len(10) to change buffer length, default is 20.\\nMultiple invoke this function will calculate fps between two invoke, and you can also call fps_start() fisrt to manually assign fps calulate start point.", + "return": "float type, current fps since last call this method", + "maixpy": "maix.time.fps", + "py_doc": "Calculate FPS since last call this method.\nAttention, this method is not multi thread safe, only call this method in one threads.\nIf you want to use in multi threads, please use time.FPS class.\nFPS is average value of recent n(buff_len) times, and you can call fps_set_buff_len(10) to change buffer length, default is 20.\nMultiple invoke this function will calculate fps between two invoke, and you can also call fps_start() fisrt to manually assign fps calulate start point.\n\nReturns: float type, current fps since last call this method\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float fps()", + "py_def": "def fps() -> float" + }, + "fps_start": { + "type": "func", + "name": "fps_start", + "doc": { + "brief": "Manually set fps calculation start point, then you can call fps() function to calculate fps between fps_start() and fps().", + "maixpy": "maix.time.fps_start", + "py_doc": "Manually set fps calculation start point, then you can call fps() function to calculate fps between fps_start() and fps()." + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void fps_start()", + "py_def": "def fps_start() -> None" + }, + "fps_set_buff_len": { + "type": "func", + "name": "fps_set_buff_len", + "doc": { + "brief": "Set fps method buffer length, by default the buffer length is 10.", + "param": { + "len": "Buffer length to store recent fps value." + }, + "maixpy": "maix.time.fps_set_buff_len", + "py_doc": "Set fps method buffer length, by default the buffer length is 10.\n\nArgs:\n - len: Buffer length to store recent fps value.\n" + }, + "args": [ + [ + "int", + "len", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void fps_set_buff_len(int len)", + "py_def": "def fps_set_buff_len(len: int) -> None" + }, + "FPS": { + "type": "class", + "name": "FPS", + "doc": { + "brief": "FPS class to use average filter to calculate FPS.", + "maixpy": "maix.time.FPS", + "py_doc": "FPS class to use average filter to calculate FPS." + }, + "members": { + "__init__": { + "type": "func", + "name": "FPS", + "doc": { + "brief": "FPS class constructor", + "param": { + "buff_len": "Average buffer length, default 20, that is, fps() function will return the average fps in recent buff_len times fps." + }, + "maixpy": "maix.time.FPS.__init__", + "maixcdk": "maix.time.FPS.FPS", + "py_doc": "FPS class constructor\n\nArgs:\n - buff_len: Average buffer length, default 20, that is, fps() function will return the average fps in recent buff_len times fps.\n" + }, + "args": [ + [ + "int", + "buff_len", + "20" + ] + ], + "ret_type": null, + "static": false, + "def": "FPS(int buff_len = 20)", + "py_def": "def __init__(self, buff_len: int = 20) -> None" + }, + "start": { + "type": "func", + "name": "start", + "doc": { + "brief": "Manually set fps calculation start point, then you can call fps() function to calculate fps between start() and fps().", + "maixpy": "maix.time.FPS.start", + "py_doc": "Manually set fps calculation start point, then you can call fps() function to calculate fps between start() and fps()." + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void start()", + "py_def": "def start(self) -> None" + }, + "fps": { + "type": "func", + "name": "fps", + "doc": { + "brief": "The same as end function.", + "return": "float type, current fps since last call this method", + "maixpy": "maix.time.FPS.fps", + "py_doc": "The same as end function.\n\nReturns: float type, current fps since last call this method\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float fps()", + "overload": [ + { + "type": "func", + "name": "end", + "doc": { + "brief": "Calculate FPS since last call this method.\\nFPS is average value of recent n(buff_len) times, and you can call fps_set_buff_len(10) to change buffer length, default is 20.\\nMultiple invoke this function will calculate fps between two invoke, and you can also call fps_start() fisrt to manually assign fps calulate start point.", + "return": "float type, current fps since last call this method", + "maixpy": "maix.time.FPS.fps", + "py_doc": "Calculate FPS since last call this method.\nFPS is average value of recent n(buff_len) times, and you can call fps_set_buff_len(10) to change buffer length, default is 20.\nMultiple invoke this function will calculate fps between two invoke, and you can also call fps_start() fisrt to manually assign fps calulate start point.\n\nReturns: float type, current fps since last call this method\n" + }, + "args": [], + "ret_type": "inline float", + "static": false, + "def": "inline float end()" + } + ], + "py_def": "def fps(self) -> float" + }, + "set_buff_len": { + "type": "func", + "name": "set_buff_len", + "doc": { + "brief": "Set fps method buffer length, by default the buffer length is 10.", + "param": { + "len": "Buffer length to store recent fps value." + }, + "maixpy": "maix.time.FPS.set_buff_len", + "py_doc": "Set fps method buffer length, by default the buffer length is 10.\n\nArgs:\n - len: Buffer length to store recent fps value.\n" + }, + "args": [ + [ + "int", + "len", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void set_buff_len(int len)", + "py_def": "def set_buff_len(self, len: int) -> None" + } + }, + "def": "class FPS" + }, + "DateTime": { + "type": "class", + "name": "DateTime", + "doc": { + "brief": "Date and time class", + "maixpy": "maix.time.DateTime", + "py_doc": "Date and time class" + }, + "members": { + "year": { + "type": "var", + "name": "year", + "doc": { + "brief": "Year", + "maixpy": "maix.time.DateTime.year", + "py_doc": "Year" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int year" + }, + "month": { + "type": "var", + "name": "month", + "doc": { + "brief": "Month, 1~12", + "maixpy": "maix.time.DateTime.month", + "py_doc": "Month, 1~12" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int month" + }, + "day": { + "type": "var", + "name": "day", + "doc": { + "brief": "Day", + "maixpy": "maix.time.DateTime.day", + "py_doc": "Day" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int day" + }, + "hour": { + "type": "var", + "name": "hour", + "doc": { + "brief": "Hour", + "maixpy": "maix.time.DateTime.hour", + "py_doc": "Hour" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int hour" + }, + "minute": { + "type": "var", + "name": "minute", + "doc": { + "brief": "Minute", + "maixpy": "maix.time.DateTime.minute", + "py_doc": "Minute" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int minute" + }, + "second": { + "type": "var", + "name": "second", + "doc": { + "brief": "Second", + "maixpy": "maix.time.DateTime.second", + "py_doc": "Second" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int second" + }, + "microsecond": { + "type": "var", + "name": "microsecond", + "doc": { + "brief": "Microsecond", + "maixpy": "maix.time.DateTime.microsecond", + "py_doc": "Microsecond" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int microsecond" + }, + "yearday": { + "type": "var", + "name": "yearday", + "doc": { + "brief": "Year day", + "maixpy": "maix.time.DateTime.yearday", + "py_doc": "Year day" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int yearday" + }, + "weekday": { + "type": "var", + "name": "weekday", + "doc": { + "brief": "Weekday, 0 is Monday, 6 is Sunday", + "maixpy": "maix.time.DateTime.weekday", + "py_doc": "Weekday, 0 is Monday, 6 is Sunday" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int weekday" + }, + "zone": { + "type": "var", + "name": "zone", + "doc": { + "brief": "Time zone", + "maixpy": "maix.time.DateTime.zone", + "py_doc": "Time zone" + }, + "value": null, + "static": false, + "readonly": false, + "def": "float zone" + }, + "zone_name": { + "type": "var", + "name": "zone_name", + "doc": { + "brief": "Time zone name", + "maixpy": "maix.time.DateTime.zone_name", + "py_doc": "Time zone name" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::string zone_name" + }, + "__init__": { + "type": "func", + "name": "DateTime", + "doc": { + "brief": "Constructor", + "param": { + "year": "year", + "month": "month", + "day": "day", + "hour": "hour", + "minute": "minute", + "second": "second", + "microsecond": "microsecond", + "yearday": "year day", + "weekday": "weekday", + "zone": "time zone" + }, + "maixcdk": "maix.time.DateTime.DateTime", + "maixpy": "maix.time.DateTime.__init__", + "py_doc": "Constructor\n\nArgs:\n - year: year\n - month: month\n - day: day\n - hour: hour\n - minute: minute\n - second: second\n - microsecond: microsecond\n - yearday: year day\n - weekday: weekday\n - zone: time zone\n" + }, + "args": [ + [ + "int", + "year", + "0" + ], + [ + "int", + "month", + "0" + ], + [ + "int", + "day", + "0" + ], + [ + "int", + "hour", + "0" + ], + [ + "int", + "minute", + "0" + ], + [ + "int", + "second", + "0" + ], + [ + "int", + "microsecond", + "0" + ], + [ + "int", + "yearday", + "0" + ], + [ + "int", + "weekday", + "0" + ], + [ + "int", + "zone", + "0" + ] + ], + "ret_type": null, + "static": false, + "def": "DateTime(int year = 0, int month = 0, int day = 0, int hour = 0, int minute = 0, int second = 0, int microsecond = 0, int yearday = 0, int weekday = 0, int zone = 0)", + "py_def": "def __init__(self, year: int = 0, month: int = 0, day: int = 0, hour: int = 0, minute: int = 0, second: int = 0, microsecond: int = 0, yearday: int = 0, weekday: int = 0, zone: int = 0) -> None" + }, + "strftime": { + "type": "func", + "name": "strftime", + "doc": { + "brief": "Convert to string", + "return": "date time string", + "maixpy": "maix.time.DateTime.strftime", + "py_doc": "Convert to string\n\nReturns: date time string\n" + }, + "args": [ + [ + "const std::string &", + "format", + null + ] + ], + "ret_type": "std::string", + "static": false, + "def": "std::string strftime(const std::string &format)", + "py_def": "def strftime(self, format: str) -> str" + }, + "timestamp": { + "type": "func", + "name": "timestamp", + "doc": { + "brief": "Convert to float timestamp", + "return": "float timestamp", + "maixpy": "maix.time.DateTime.timestamp", + "py_doc": "Convert to float timestamp\n\nReturns: float timestamp\n" + }, + "args": [], + "ret_type": "double", + "static": false, + "def": "double timestamp()", + "py_def": "def timestamp(self) -> float" + } + }, + "def": "class DateTime" + }, + "now": { + "type": "func", + "name": "now", + "doc": { + "brief": "Get current UTC date and time", + "return": "current date and time, DateTime type", + "maixpy": "maix.time.now", + "py_doc": "Get current UTC date and time\n\nReturns: current date and time, DateTime type\n" + }, + "args": [], + "ret_type": "time::DateTime*", + "static": false, + "def": "time::DateTime *now()", + "py_def": "def now() -> DateTime" + }, + "localtime": { + "type": "func", + "name": "localtime", + "doc": { + "brief": "Get local time", + "return": "local time, DateTime type", + "maixpy": "maix.time.localtime", + "py_doc": "Get local time\n\nReturns: local time, DateTime type\n" + }, + "args": [], + "ret_type": "time::DateTime*", + "static": false, + "def": "time::DateTime *localtime()", + "py_def": "def localtime() -> DateTime" + }, + "strptime": { + "type": "func", + "name": "strptime", + "doc": { + "brief": "DateTime from string", + "param": { + "str": "date time string", + "format": "date time format" + }, + "return": "DateTime", + "maixpy": "maix.time.strptime", + "py_doc": "DateTime from string\n\nArgs:\n - str: date time string\n - format: date time format\n\n\nReturns: DateTime\n" + }, + "args": [ + [ + "const std::string &", + "str", + null + ], + [ + "const std::string &", + "format", + null + ] + ], + "ret_type": "time::DateTime*", + "static": false, + "def": "time::DateTime *strptime(const std::string &str, const std::string &format)", + "py_def": "def strptime(str: str, format: str) -> DateTime" + }, + "gmtime": { + "type": "func", + "name": "gmtime", + "doc": { + "brief": "timestamp to DateTime(time zone is UTC (value 0))", + "param": { + "timestamp": "double timestamp" + }, + "return": "DateTime", + "maixpy": "maix.time.gmtime", + "py_doc": "timestamp to DateTime(time zone is UTC (value 0))\n\nArgs:\n - timestamp: double timestamp\n\n\nReturns: DateTime\n" + }, + "args": [ + [ + "double", + "timestamp", + null + ] + ], + "ret_type": "time::DateTime*", + "static": false, + "def": "time::DateTime *gmtime(double timestamp)", + "py_def": "def gmtime(timestamp: float) -> DateTime" + }, + "timezone": { + "type": "func", + "name": "timezone", + "doc": { + "brief": "Set or get timezone", + "param": { + "timezone": "string type, can be empty and default to empty, if empty, only return crrent timezone, a \"region/city\" string, e.g. Asia/Shanghai, Etc/UTC, you can get all by list_timezones function." + }, + "return": "string type, return current timezone setting.", + "attention": "when set new timezone, time setting not take effect in this process for some API, so you need to restart program.", + "maixpy": "maix.time.timezone", + "py_doc": "Set or get timezone\n\nArgs:\n - timezone: string type, can be empty and default to empty, if empty, only return crrent timezone, a \"region/city\" string, e.g. Asia/Shanghai, Etc/UTC, you can get all by list_timezones function.\n\n\nReturns: string type, return current timezone setting.\n" + }, + "args": [ + [ + "const std::string &", + "timezone", + "\"\"" + ] + ], + "ret_type": "std::string", + "static": false, + "def": "std::string timezone(const std::string &timezone = \"\")", + "overload": [ + { + "type": "func", + "name": "timezone2", + "doc": { + "brief": "Set or get timezone", + "param": { + "region": "string type, which region to set, can be empty means only get current, default empty.", + "city": "string type, which city to set, can be empty means only get current, default empty." + }, + "return": "list type, return current timezone setting, first is region, second is city.", + "attention": "when set new timezone, time setting not take effect in this process for some API, so you need to restart program.", + "maixpy": "maix.time.timezone", + "py_doc": "Set or get timezone\n\nArgs:\n - region: string type, which region to set, can be empty means only get current, default empty.\n - city: string type, which city to set, can be empty means only get current, default empty.\n\n\nReturns: list type, return current timezone setting, first is region, second is city.\n" + }, + "args": [ + [ + "const std::string &", + "region", + "\"\"" + ], + [ + "const std::string &", + "city", + "\"\"" + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector timezone2(const std::string ®ion = \"\", const std::string &city = \"\")" + } + ], + "py_def": "def timezone(timezone: str = '') -> str" + }, + "list_timezones": { + "type": "func", + "name": "list_timezones", + "doc": { + "brief": "List all timezone info", + "return": "A dict with key are regions, and value are region's cities.", + "maixpy": "maix.time.list_timezones", + "py_doc": "List all timezone info\n\nReturns: A dict with key are regions, and value are region's cities.\n" + }, + "args": [], + "ret_type": "std::map>", + "static": false, + "def": "std::map> list_timezones()", + "py_def": "def list_timezones() -> dict[str, list[str]]" + }, + "ntp_timetuple": { + "type": "func", + "name": "ntp_timetuple", + "doc": { + "brief": "Retrieves time from an NTP server\\nThis function fetches the current time from the specified NTP server and port,\\nreturning a tuple containing the time details.", + "param": { + "host": "The hostname or IP address of the NTP server.", + "port": "The port number of the NTP server. Use -1 for the default port 123.", + "retry": "The number of retry attempts. Must be at least 1.", + "timeout_ms": "The timeout duration in milliseconds. Must be non-negative." + }, + "return": "A list of 6 elements: [year, month, day, hour, minute, second]", + "maixpy": "maix.time.ntp_timetuple", + "py_doc": "Retrieves time from an NTP server\nThis function fetches the current time from the specified NTP server and port,\nreturning a tuple containing the time details.\n\nArgs:\n - host: The hostname or IP address of the NTP server.\n - port: The port number of the NTP server. Use -1 for the default port 123.\n - retry: The number of retry attempts. Must be at least 1.\n - timeout_ms: The timeout duration in milliseconds. Must be non-negative.\n\n\nReturns: A list of 6 elements: [year, month, day, hour, minute, second]\n" + }, + "args": [ + [ + "std::string", + "host", + null + ], + [ + "int", + "port", + "-1" + ], + [ + "uint8_t", + "retry", + "3" + ], + [ + "int", + "timeout_ms", + "0" + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector ntp_timetuple(std::string host, int port=-1, uint8_t retry=3, int timeout_ms=0)", + "py_def": "def ntp_timetuple(host: str, port: int = -1, retry: int = 3, timeout_ms: int = 0) -> list[int]" + }, + "ntp_timetuple_with_config": { + "type": "func", + "name": "ntp_timetuple_with_config", + "doc": { + "brief": "Retrieves time from an NTP server using a configuration file\\nThis function reads the configuration from a YAML file to fetch the current time\\nfrom a list of specified NTP servers, returning a tuple containing the time details.", + "param": { + "path": "The path to the YAML configuration file, which should include:\n- Config:\n- retry: Number of retry attempts (must be at least 1)\n- total_timeout_ms: Total timeout duration in milliseconds (must be non-negative)\n- NtpServers:\n- host: Hostname or IP address of the NTP server\n- port: Port number of the NTP server (use 123 for default)\nExample YAML configuration:\nConfig:\n- retry: 3\n- total_timeout_ms: 10000\nNtpServers:\n- host: \"pool.ntp.org\"\nport: 123\n- host: \"time.nist.gov\"\nport: 123\n- host: \"time.windows.com\"\nport: 123" + }, + "return": "A list of 6 elements: [year, month, day, hour, minute, second]", + "maixpy": "maix.time.ntp_timetuple_with_config", + "py_doc": "Retrieves time from an NTP server using a configuration file\nThis function reads the configuration from a YAML file to fetch the current time\nfrom a list of specified NTP servers, returning a tuple containing the time details.\n\nArgs:\n - path: The path to the YAML configuration file, which should include:\n- Config:\n- retry: Number of retry attempts (must be at least 1)\n- total_timeout_ms: Total timeout duration in milliseconds (must be non-negative)\n- NtpServers:\n- host: Hostname or IP address of the NTP server\n- port: Port number of the NTP server (use 123 for default)\nExample YAML configuration:\nConfig:\n- retry: 3\n- total_timeout_ms: 10000\nNtpServers:\n- host: \"pool.ntp.org\"\nport: 123\n- host: \"time.nist.gov\"\nport: 123\n- host: \"time.windows.com\"\nport: 123\n\n\nReturns: A list of 6 elements: [year, month, day, hour, minute, second]\n" + }, + "args": [ + [ + "std::string", + "path", + null + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector ntp_timetuple_with_config(std::string path)", + "py_def": "def ntp_timetuple_with_config(path: str) -> list[int]" + }, + "ntp_sync_sys_time": { + "type": "func", + "name": "ntp_sync_sys_time", + "doc": { + "brief": "Retrieves time from an NTP server and synchronizes the system time\\nThis function fetches the current time from the specified NTP server and port,\\nthen synchronizes the system time with the retrieved time.", + "param": { + "host": "The hostname or IP address of the NTP server.", + "port": "The port number of the NTP server. Use 123 for the default port.", + "retry": "The number of retry attempts. Must be at least 1.", + "timeout_ms": "The timeout duration in milliseconds. Must be non-negative." + }, + "return": "A list of 6 elements: [year, month, day, hour, minute, second]", + "maixpy": "maix.time.ntp_sync_sys_time", + "py_doc": "Retrieves time from an NTP server and synchronizes the system time\nThis function fetches the current time from the specified NTP server and port,\nthen synchronizes the system time with the retrieved time.\n\nArgs:\n - host: The hostname or IP address of the NTP server.\n - port: The port number of the NTP server. Use 123 for the default port.\n - retry: The number of retry attempts. Must be at least 1.\n - timeout_ms: The timeout duration in milliseconds. Must be non-negative.\n\n\nReturns: A list of 6 elements: [year, month, day, hour, minute, second]\n" + }, + "args": [ + [ + "std::string", + "host", + null + ], + [ + "int", + "port", + "-1" + ], + [ + "uint8_t", + "retry", + "3" + ], + [ + "int", + "timeout_ms", + "0" + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector ntp_sync_sys_time(std::string host, int port=-1, uint8_t retry=3, int timeout_ms=0)", + "py_def": "def ntp_sync_sys_time(host: str, port: int = -1, retry: int = 3, timeout_ms: int = 0) -> list[int]" + }, + "ntp_sync_sys_time_with_config": { + "type": "func", + "name": "ntp_sync_sys_time_with_config", + "doc": { + "brief": "Retrieves time from an NTP server using a configuration file and synchronizes the system time\\nThis function reads the configuration from a YAML file to fetch the current time\\nfrom a list of specified NTP servers, then synchronizes the system time with the retrieved time.", + "param": { + "path": "The path to the YAML configuration file, which should include:\n- Config:\n- retry: Number of retry attempts (must be at least 1)\n- total_timeout_ms: Total timeout duration in milliseconds (must be non-negative)\n- NtpServers:\n- host: Hostname or IP address of the NTP server\n- port: Port number of the NTP server (use 123 for default)\nExample YAML configuration:\nConfig:\n- retry: 3\n- total_timeout_ms: 10000\nNtpServers:\n- host: \"pool.ntp.org\"\nport: 123\n- host: \"time.nist.gov\"\nport: 123\n- host: \"time.windows.com\"\nport: 123" + }, + "return": "A vector of integers containing the time details: [year, month, day, hour, minute, second]", + "maixpy": "maix.time.ntp_sync_sys_time_with_config", + "py_doc": "Retrieves time from an NTP server using a configuration file and synchronizes the system time\nThis function reads the configuration from a YAML file to fetch the current time\nfrom a list of specified NTP servers, then synchronizes the system time with the retrieved time.\n\nArgs:\n - path: The path to the YAML configuration file, which should include:\n- Config:\n- retry: Number of retry attempts (must be at least 1)\n- total_timeout_ms: Total timeout duration in milliseconds (must be non-negative)\n- NtpServers:\n- host: Hostname or IP address of the NTP server\n- port: Port number of the NTP server (use 123 for default)\nExample YAML configuration:\nConfig:\n- retry: 3\n- total_timeout_ms: 10000\nNtpServers:\n- host: \"pool.ntp.org\"\nport: 123\n- host: \"time.nist.gov\"\nport: 123\n- host: \"time.windows.com\"\nport: 123\n\n\nReturns: A vector of integers containing the time details: [year, month, day, hour, minute, second]\n" + }, + "args": [ + [ + "std::string", + "path", + null + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector ntp_sync_sys_time_with_config(std::string path)", + "py_def": "def ntp_sync_sys_time_with_config(path: str) -> list[int]" + } + }, + "auto_add": true + }, + "example": { + "type": "module", + "doc": { + "brief": "example module, this will be maix.example module in MaixPy, maix::example namespace in MaixCDK", + "maixpy": "maix.example", + "py_doc": "example module, this will be maix.example module in MaixPy, maix::example namespace in MaixCDK" + }, + "members": { + "Test": { + "type": "class", + "name": "Test", + "doc": { + "brief": "Test class", + "maixpy": "maix.example.Test", + "py_doc": "Test class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Test", + "doc": { + "brief": "Test constructor", + "maixpy": "maix.example.Test.__init__", + "maixcdk": "maix.example.Test.Test", + "py_doc": "Test constructor" + }, + "args": [], + "ret_type": null, + "static": false, + "def": "Test()", + "py_def": "def __init__(self) -> None" + } + }, + "def": "class Test" + }, + "Kind": { + "type": "enum", + "name": "Kind", + "doc": { + "brief": "Example enum(not recommend! See Kind2)", + "maixpy": "maix.example.Kind", + "py_doc": "Example enum(not recommend! See Kind2)" + }, + "values": [ + [ + "KIND_NONE", + "0", + "Kind none, value always 0, other enum value will auto increase" + ], + [ + "KIND_DOG", + "", + "Kind dog" + ], + [ + "KIND_CAT", + "", + "Kind cat, value is auto generated according to KING_DOG" + ], + [ + "KIND_BIRD", + "", + "" + ], + [ + "KIND_MAX", + "", + "Max Kind quantity\nYou can get max Kind value by KIND_MAX - 1" + ] + ], + "def": "enum Kind\n {\n KIND_NONE = 0, /** Kind none, value always 0, other enum value will auto increase */\n KIND_DOG, /** Kind dog*/\n KIND_CAT, // Kind cat, value is auto generated according to KING_DOG\n KIND_BIRD,\n KIND_MAX /* Max Kind quantity,\n You can get max Kind value by KIND_MAX - 1\n */\n }" + }, + "Kind2": { + "type": "enum", + "name": "class", + "doc": { + "brief": "Example enum class(recommend!)", + "maixpy": "maix.example.Kind2", + "py_doc": "Example enum class(recommend!)" + }, + "values": [ + [ + "NONE", + "0", + "Kind none, value always 0, other enum value will auto increase" + ], + [ + "DOG", + "", + "Kind dog" + ], + [ + "CAT", + "", + "Kind cat, value is auto generated according to KING_DOG" + ], + [ + "BIRD", + "", + "" + ], + [ + "MAX", + "", + "Max Kind quantity\nYou can get max Kind value by KIND_MAX - 1" + ] + ], + "def": "enum class Kind2\n {\n NONE = 0, /** Kind none, value always 0, other enum value will auto increase */\n DOG, /** Kind dog*/\n CAT, // Kind cat, value is auto generated according to KING_DOG\n BIRD,\n MAX /* Max Kind quantity,\n You can get max Kind value by KIND_MAX - 1\n */\n }" + }, + "var1": { + "type": "var", + "name": "", + "doc": { + "brief": "Example module variable", + "attention": "It's a copy of this variable in MaixPy,\nso change it in C++ (e.g. update var in hello function) will not take effect the var inMaixPy.\nSo we add const for this var to avoid this mistake.", + "maixpy": "maix.example.var1", + "py_doc": "Example module variable" + }, + "value": "\"Sipeed\"", + "static": false, + "readonly": true, + "def": "const std::string var1 = \"Sipeed\"" + }, + "list_var": { + "type": "var", + "name": "", + "doc": { + "brief": "Tensor data type size in bytes", + "attention": [ + "DO NOT use C/C++ array directly for python API, the python wrapper not support it.\nUse std::vector instead.", + "It's a copy of this variable in MaixPy,\nso change it in C++ (e.g. update var in hello function) will not take effect the var inMaixPy.\nSo we add const for this var to avoid this mistake." + ], + "maixpy": "maix.example.list_var", + "py_doc": "Tensor data type size in bytes" + }, + "value": "{\n 0, 1, 2, 3, 4, 5, 6, 7, 8, 9}", + "static": false, + "readonly": true, + "def": "const std::vector list_var = {\n 0, 1, 2, 3, 4, 5, 6, 7, 8, 9}" + }, + "test_var": { + "type": "var", + "name": "", + "doc": { + "brief": "Example module variable test_var", + "attention": "It's a copy of this variable in MaixPy, so if you change it in C++, it will not take effect in MaixPy.\nAnd change it in MaixPy will not take effect in C++ as well !!!\nIf you want to use vars shared between C++ and MaixPy, you can create a class and use its member.", + "maixpy": "maix.example.test_var", + "py_doc": "Example module variable test_var" + }, + "value": "100", + "static": false, + "readonly": false, + "def": "int test_var = 100" + }, + "hello": { + "type": "func", + "name": "hello", + "doc": { + "brief": "say hello to someone", + "param": { + "name": "direction [in], name of someone, string type" + }, + "return": "string type, content is hello + name", + "maixpy": "maix.example.hello", + "py_doc": "say hello to someone\n\nArgs:\n - name: direction [in], name of someone, string type\n\n\nReturns: string type, content is hello + name\n" + }, + "args": [ + [ + "std::string", + "name", + null + ] + ], + "ret_type": "std::string", + "static": false, + "def": "std::string hello(std::string name)", + "py_def": "def hello(name: str) -> str" + }, + "Example": { + "type": "class", + "name": "Example", + "doc": { + "brief": "Example class\\nthis class will be export to MaixPy as maix.example.Example", + "maixpy": "maix.example.Example", + "py_doc": "Example class\nthis class will be export to MaixPy as maix.example.Example" + }, + "members": { + "__init__": { + "type": "func", + "name": "Example", + "doc": { + "brief": "Example constructor\\nthis constructor will be export to MaixPy as maix.example.Example.__init__", + "param": { + "name": "direction [in], name of Example, string type", + "age": "direction [in], age of Example, int type, default is 18, value range is [0, 100]" + }, + "attention": "to make auto generate code work, param Kind should with full namespace name `example::Kind` instead of `Kind`,\nnamespace `maix` can be ignored.", + "maixpy": "maix.example.Example.__init__", + "maixcdk": "maix.example.Example.Example", + "py_doc": "Example constructor\nthis constructor will be export to MaixPy as maix.example.Example.__init__\n\nArgs:\n - name: direction [in], name of Example, string type\n - age: direction [in], age of Example, int type, default is 18, value range is [0, 100]\n" + }, + "args": [ + [ + "std::string &", + "name", + null + ], + [ + "int", + "age", + "18" + ], + [ + "example::Kind", + "pet", + "example::KIND_NONE" + ] + ], + "ret_type": null, + "static": false, + "def": "Example(std::string &name, int age = 18, example::Kind pet = example::KIND_NONE)", + "py_def": "def __init__(self, name: str, age: int = 18, pet: Kind = ...) -> None" + }, + "get_name": { + "type": "func", + "name": "get_name", + "doc": { + "brief": "get name of Example\\nyou can also get name by property `name`.", + "return": "name of Example, string type", + "maixpy": "maix.example.Example.get_name", + "py_doc": "get name of Example\nyou can also get name by property `name`.\n\nReturns: name of Example, string type\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string get_name()", + "py_def": "def get_name(self) -> str" + }, + "get_age": { + "type": "func", + "name": "get_age", + "doc": { + "brief": "get age of Example", + "return": "age of Example, int type, value range is [0, 100]", + "maixpy": "maix.example.Example.get_age", + "py_doc": "get age of Example\n\nReturns: age of Example, int type, value range is [0, 100]\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int get_age()", + "py_def": "def get_age(self) -> int" + }, + "set_name": { + "type": "func", + "name": "set_name", + "doc": { + "brief": "set name of Example", + "param": { + "name": "name of Example, string type" + }, + "maixpy": "maix.example.Example.set_name", + "py_doc": "set name of Example\n\nArgs:\n - name: name of Example, string type\n" + }, + "args": [ + [ + "std::string", + "name", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void set_name(std::string name)", + "py_def": "def set_name(self, name: str) -> None" + }, + "set_age": { + "type": "func", + "name": "set_age", + "doc": { + "brief": "set age of Example", + "param": { + "age": "age of Example, int type, value range is [0, 100]" + }, + "maixpy": "maix.example.Example.set_age", + "py_doc": "set age of Example\n\nArgs:\n - age: age of Example, int type, value range is [0, 100]\n" + }, + "args": [ + [ + "int", + "age", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void set_age(int age)", + "py_def": "def set_age(self, age: int) -> None" + }, + "set_pet": { + "type": "func", + "name": "set_pet", + "doc": { + "brief": "Example enum member", + "attention": "", + "maixpy": "maix.example.Example.set_pet", + "py_doc": "Example enum member" + }, + "args": [ + [ + "example::Kind", + "pet", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void set_pet(example::Kind pet)", + "py_def": "def set_pet(self, pet: Kind) -> None" + }, + "get_pet": { + "type": "func", + "name": "get_pet", + "doc": { + "brief": "Example enum member", + "maixpy": "maix.example.Example.get_pet", + "py_doc": "Example enum member" + }, + "args": [], + "ret_type": "example::Kind", + "static": false, + "def": "example::Kind get_pet()", + "py_def": "def get_pet(self) -> Kind" + }, + "get_list": { + "type": "func", + "name": "get_list", + "doc": { + "brief": "get list example", + "param": { + "in": "direction [in], input list, items are int type.\nIn MaixPy, you can pass list or tuple to this API" + }, + "return": "list, items are int type, content is [1, 2, 3] + in. Alloc item, del in MaixPy will auto free memory.", + "maixpy": "maix.example.Example.get_list", + "py_doc": "get list example\n\nArgs:\n - in: direction [in], input list, items are int type.\nIn MaixPy, you can pass list or tuple to this API\n\n\nReturns: list, items are int type, content is [1, 2, 3] + in. Alloc item, del in MaixPy will auto free memory.\n" + }, + "args": [ + [ + "std::vector", + "in", + null + ] + ], + "ret_type": "std::vector*", + "static": false, + "def": "std::vector *get_list(std::vector in)", + "py_def": "def get_list(self, in: list[int]) -> list[int]" + }, + "get_dict": { + "type": "func", + "name": "get_dict", + "doc": { + "brief": "Example dict API", + "param": { + "in": "direction [in], input dict, key is string type, value is int type.\nIn MaixPy, you can pass `dict` to this API" + }, + "return": "dict, key is string type, value is int type, content is {\"a\": 1} + in\nIn MaixPy, return type is `dict` object", + "maixpy": "maix.example.Example.get_dict", + "py_doc": "Example dict API\n\nArgs:\n - in: direction [in], input dict, key is string type, value is int type.\nIn MaixPy, you can pass `dict` to this API\n\n\nReturns: dict, key is string type, value is int type, content is {\"a\": 1} + in\nIn MaixPy, return type is `dict` object\n" + }, + "args": [ + [ + "std::map &", + "in", + null + ] + ], + "ret_type": "std::map", + "static": false, + "def": "std::map get_dict(std::map &in)", + "py_def": "def get_dict(self, in: dict[str, int]) -> dict[str, int]" + }, + "hello": { + "type": "func", + "name": "hello", + "doc": { + "brief": "say hello to someone", + "param": { + "name": "name of someone, string type" + }, + "return": "string type, content is Example::hello_str + name", + "maixpy": "maix.example.Example.hello", + "py_doc": "say hello to someone\n\nArgs:\n - name: name of someone, string type\n\n\nReturns: string type, content is Example::hello_str + name\n" + }, + "args": [ + [ + "std::string", + "name", + null + ] + ], + "ret_type": "std::string", + "static": true, + "def": "static std::string hello(std::string name)", + "py_def": "def hello(name: str) -> str" + }, + "hello_bytes": { + "type": "func", + "name": "hello_bytes", + "doc": { + "brief": "param is bytes example", + "param": { + "bytes": "bytes type param" + }, + "return": "bytes type, return value is bytes changed value", + "maixpy": "maix.example.Example.hello_bytes", + "py_doc": "param is bytes example\n\nArgs:\n - bytes: bytes type param\n\n\nReturns: bytes type, return value is bytes changed value\n" + }, + "args": [ + [ + "Bytes &", + "bytes", + null + ] + ], + "ret_type": "Bytes*", + "static": true, + "def": "static Bytes *hello_bytes(Bytes &bytes)", + "py_def": "def hello_bytes(*args, **kwargs)" + }, + "callback": { + "type": "func", + "name": "callback", + "doc": { + "brief": "Callback example", + "param": { + "cb": "callback function, param is two int type, return is int type" + }, + "return": "int type, return value is cb's return value.", + "maixpy": "maix.example.Example.callback", + "py_doc": "Callback example\n\nArgs:\n - cb: callback function, param is two int type, return is int type\n\n\nReturns: int type, return value is cb's return value.\n" + }, + "args": [ + [ + "std::function", + "cb", + null + ] + ], + "ret_type": "int", + "static": true, + "def": "static int callback(std::function cb)", + "py_def": "def callback(cb: typing.Callable[[int, int], int]) -> int" + }, + "callback2": { + "type": "func", + "name": "callback2", + "doc": { + "brief": "Callback example", + "param": { + "cb": "callback function, param is a int list type and int type, return is int type" + }, + "return": "int type, return value is cb's return value.", + "maixpy": "maix.example.Example.callback2", + "py_doc": "Callback example\n\nArgs:\n - cb: callback function, param is a int list type and int type, return is int type\n\n\nReturns: int type, return value is cb's return value.\n" + }, + "args": [ + [ + "std::function, int)>", + "cb", + null + ] + ], + "ret_type": "int", + "static": true, + "def": "static int callback2(std::function, int)> cb)", + "py_def": "def callback2(cb: typing.Callable[[list[int], int], int]) -> int" + }, + "hello_dict": { + "type": "func", + "name": "hello_dict", + "doc": { + "brief": "Dict param example", + "param": { + "dict": "dict type param, key is string type, value is int type" + }, + "maixpy": "maix.example.Example.hello_dict", + "py_doc": "Dict param example\n\nArgs:\n - dict: dict type param, key is string type, value is int type\n" + }, + "args": [ + [ + "std::map *", + "dict", + null + ] + ], + "ret_type": "std::map*", + "static": true, + "def": "static std::map *hello_dict(std::map *dict)", + "py_def": "def hello_dict(dict: dict[str, int]) -> dict[str, int]" + }, + "name": { + "type": "var", + "name": "name", + "doc": { + "brief": "name member of Example", + "maixpy": "maix.example.Example.name", + "py_doc": "name member of Example" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::string name" + }, + "age": { + "type": "var", + "name": "age", + "doc": { + "brief": "age member of Example, value range should be [0, 100]", + "maixpy": "maix.example.Example.age", + "py_doc": "age member of Example, value range should be [0, 100]" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int age" + }, + "hello_str": { + "type": "var", + "name": "hello_str", + "doc": { + "brief": "hello_str member of Example, default value is \\\"hello \\\"", + "maixpy": "maix.example.Example.hello_str", + "py_doc": "hello_str member of Example, default value is \"hello \"" + }, + "value": null, + "static": true, + "readonly": false, + "def": "static std::string hello_str" + }, + "var1": { + "type": "var", + "name": "", + "doc": { + "brief": "Example module readonly variable", + "maixpy": "maix.example.Example.var1", + "py_doc": "Example module readonly variable" + }, + "value": "\"Example.var1\"", + "static": false, + "readonly": true, + "def": "const std::string var1 = \"Example.var1\"" + }, + "var2": { + "type": "var", + "name": "", + "doc": { + "brief": "Example module readonly variable", + "maixpy": "maix.example.Example.var2\n:readonly", + "py_doc": "Example module readonly variable" + }, + "value": "\"Example.var2\"", + "static": false, + "readonly": true, + "def": "std::string var2 = \"Example.var2\"" + }, + "dict_test": { + "type": "func", + "name": "dict_test", + "doc": { + "brief": "dict_test, return dict type, and element is pointer type(alloc in C++).\\nHere when the returned Tensor object will auto delete by Python GC.", + "maixpy": "maix.example.Example.dict_test", + "py_doc": "dict_test, return dict type, and element is pointer type(alloc in C++).\nHere when the returned Tensor object will auto delete by Python GC." + }, + "args": [], + "ret_type": "std::map*", + "static": true, + "def": "static std::map *dict_test()", + "py_def": "def dict_test() -> dict[str, Test]" + } + }, + "def": "class Example" + }, + "change_arg_name": { + "type": "func", + "name": "change_arg_name", + "doc": { + "brief": "Change arg name example", + "param": { + "e": "Example object" + }, + "return": "same as arg", + "maixpy": "maix.example.change_arg_name", + "py_doc": "Change arg name example\n\nArgs:\n - e: Example object\n\n\nReturns: same as arg\n" + }, + "args": [ + [ + "example::Example *", + "e", + null + ] + ], + "ret_type": "example::Example*", + "static": false, + "def": "example::Example *change_arg_name(example::Example *e)", + "py_def": "def change_arg_name(e: Example) -> Example" + }, + "change_arg_name2": { + "type": "func", + "name": "change_arg_name2", + "doc": { + "brief": "Change arg name example", + "param": { + "e": "Example object" + }, + "maixpy": "maix.example.change_arg_name2", + "py_doc": "Change arg name example\n\nArgs:\n - e: Example object\n" + }, + "args": [ + [ + "example::Example &", + "e", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void change_arg_name2(example::Example &e)", + "py_def": "def change_arg_name2(e: Example) -> None" + } + }, + "auto_add": false + }, + "util": { + "type": "module", + "doc": { + "brief": "maix.util module" + }, + "members": { + "do_exit_function": { + "type": "func", + "name": "do_exit_function", + "doc": { + "brief": "exec all of exit function", + "maixpy": "maix.util.do_exit_function", + "py_doc": "exec all of exit function" + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void do_exit_function()", + "py_def": "def do_exit_function() -> None" + }, + "register_atexit": { + "type": "func", + "name": "register_atexit", + "doc": { + "brief": "Registering default processes that need to be executed on exit", + "maixpy": "maix.util.register_atexit", + "py_doc": "Registering default processes that need to be executed on exit" + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void register_atexit()", + "py_def": "def register_atexit() -> None" + } + }, + "auto_add": true + }, + "thread": { + "type": "module", + "doc": { + "brief": "maix.thread module" + }, + "members": { + "Thread": { + "type": "class", + "name": "Thread", + "doc": { + "brief": "thread class", + "maixpy": "maix.thread.Thread", + "py_doc": "thread class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Thread", + "doc": { + "brief": "create thread", + "param": { + "func": "direction [in], thread function, one `args` parameter, void* type, no return value", + "args": "direction [in], thread function parameter" + }, + "maixpy": "maix.thread.Thread.__init__", + "maixcdk": "maix.thread.Thread.Thread", + "py_doc": "create thread\n\nArgs:\n - func: direction [in], thread function, one `args` parameter, void* type, no return value\n - args: direction [in], thread function parameter\n" + }, + "args": [ + [ + "std::function", + "func", + null + ], + [ + "void *", + "args", + "nullptr" + ] + ], + "ret_type": null, + "static": false, + "def": "Thread(std::function func, void *args = nullptr)", + "py_def": "def __init__(self, func: typing.Callable[[capsule], None], args: capsule = None) -> None" + }, + "join": { + "type": "func", + "name": "join", + "doc": { + "brief": "wait thread exit", + "maixpy": "maix.thread.Thread.join", + "py_doc": "wait thread exit" + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void join()", + "py_def": "def join(self) -> None" + }, + "detach": { + "type": "func", + "name": "detach", + "doc": { + "brief": "detach thread, detach will auto start thread and you can't use join anymore.", + "maixpy": "maix.thread.Thread.detach", + "py_doc": "detach thread, detach will auto start thread and you can't use join anymore." + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void detach()", + "py_def": "def detach(self) -> None" + }, + "joinable": { + "type": "func", + "name": "joinable", + "doc": { + "brief": "Check if thread is joinable", + "return": "true if thread is joinable", + "maixpy": "maix.thread.Thread.joinable", + "py_doc": "Check if thread is joinable\n\nReturns: true if thread is joinable\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool joinable()", + "py_def": "def joinable(self) -> bool" + } + }, + "def": "class Thread" + } + }, + "auto_add": true + }, + "sys": { + "type": "module", + "doc": { + "brief": "maix.sys module" + }, + "members": { + "os_version": { + "type": "func", + "name": "os_version", + "doc": { + "brief": "Get system version", + "return": "version string, e.g. \"maixcam-2024-08-13-maixpy-v4.4.20\"", + "maixpy": "maix.sys.os_version", + "py_doc": "Get system version\n\nReturns: version string, e.g. \"maixcam-2024-08-13-maixpy-v4.4.20\"\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string os_version()", + "py_def": "def os_version() -> str" + }, + "maixpy_version": { + "type": "func", + "name": "maixpy_version", + "doc": { + "brief": "Get MaixPy version, if get failed will return empty string.", + "return": "version string, e.g. \"4.4.21\"", + "maixpy": "maix.sys.maixpy_version", + "py_doc": "Get MaixPy version, if get failed will return empty string.\n\nReturns: version string, e.g. \"4.4.21\"\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string maixpy_version()", + "py_def": "def maixpy_version() -> str" + }, + "device_name": { + "type": "func", + "name": "device_name", + "doc": { + "brief": "Get device name", + "return": "device name, e.g. \"MaixCAM\"", + "maixpy": "maix.sys.device_name", + "py_doc": "Get device name\n\nReturns: device name, e.g. \"MaixCAM\"\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string device_name()", + "py_def": "def device_name() -> str" + }, + "host_name": { + "type": "func", + "name": "host_name", + "doc": { + "brief": "Get host name", + "return": "host name, e.g. \"maixcam-2f9f\"", + "maixpy": "maix.sys.host_name", + "py_doc": "Get host name\n\nReturns: host name, e.g. \"maixcam-2f9f\"\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string host_name()", + "py_def": "def host_name() -> str" + }, + "host_domain": { + "type": "func", + "name": "host_domain", + "doc": { + "brief": "Get host domain", + "return": "host domain, e.g. \"maixcam-2f9f.local\"", + "maixpy": "maix.sys.host_domain", + "py_doc": "Get host domain\n\nReturns: host domain, e.g. \"maixcam-2f9f.local\"\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string host_domain()", + "py_def": "def host_domain() -> str" + }, + "ip_address": { + "type": "func", + "name": "ip_address", + "doc": { + "brief": "Get ip address", + "return": "ip address, dict type, e.g. {\"eth0\": \"192.168.0.195\", \"wlan0\": \"192.168.0.123\", \"usb0\": \"10.47.159.1\"}", + "maixpy": "maix.sys.ip_address", + "py_doc": "Get ip address\n\nReturns: ip address, dict type, e.g. {\"eth0\": \"192.168.0.195\", \"wlan0\": \"192.168.0.123\", \"usb0\": \"10.47.159.1\"}\n" + }, + "args": [], + "ret_type": "std::map", + "static": false, + "def": "std::map ip_address()", + "py_def": "def ip_address() -> dict[str, str]" + }, + "mac_address": { + "type": "func", + "name": "mac_address", + "doc": { + "brief": "Get mac address", + "return": "mac address, dict type, e.g. {\"eth0\": \"00:0c:29:2f:9f:00\", \"wlan0\": \"00:0c:29:2f:9f:01\", \"usb0\": \"00:0c:29:2f:9f:02\"}", + "maixpy": "maix.sys.mac_address", + "py_doc": "Get mac address\n\nReturns: mac address, dict type, e.g. {\"eth0\": \"00:0c:29:2f:9f:00\", \"wlan0\": \"00:0c:29:2f:9f:01\", \"usb0\": \"00:0c:29:2f:9f:02\"}\n" + }, + "args": [], + "ret_type": "std::map", + "static": false, + "def": "std::map mac_address()", + "py_def": "def mac_address() -> dict[str, str]" + }, + "device_key": { + "type": "func", + "name": "device_key", + "doc": { + "brief": "Get device key, can be unique id of device", + "return": "device key, 32 bytes hex string, e.g. \"1234567890abcdef1234567890abcdef\"", + "maixpy": "maix.sys.device_key", + "py_doc": "Get device key, can be unique id of device\n\nReturns: device key, 32 bytes hex string, e.g. \"1234567890abcdef1234567890abcdef\"\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string device_key()", + "py_def": "def device_key() -> str" + }, + "memory_info": { + "type": "func", + "name": "memory_info", + "doc": { + "brief": "Get memory info", + "return": "memory info, dict type, e.g. {\"total\": 1024, \"used\": 512, \"hw_total\": 256*1024*1024}\ntotal: total memory size in Byte.\nused: used memory size in Byte.\nhw_total: total memory size in Byte of hardware, the total <= hw_total\uff0c\nOS kernel may reserve some memory for some hardware like camera, npu, display etc.", + "maixpy": "maix.sys.memory_info", + "py_doc": "Get memory info\n\nReturns: memory info, dict type, e.g. {\"total\": 1024, \"used\": 512, \"hw_total\": 256*1024*1024}\ntotal: total memory size in Byte.\nused: used memory size in Byte.\nhw_total: total memory size in Byte of hardware, the total <= hw_total\uff0c\nOS kernel may reserve some memory for some hardware like camera, npu, display etc.\n" + }, + "args": [], + "ret_type": "std::map", + "static": false, + "def": "std::map memory_info()", + "py_def": "def memory_info() -> dict[str, int]" + }, + "bytes_to_human": { + "type": "func", + "name": "bytes_to_human", + "doc": { + "brief": "Bytes to human readable string", + "param": { + "bytes:": "bytes size\uff0ce.g. 1234B = 1234/1024 = 1.205 KB", + "precision:": "decimal precision, default 2", + "base:": "base number, default 1024", + "unit:": "unit string, e.g. \"B\"", + "sep:": "separator string, e.g. \" \"" + }, + "return": "human readable string, e.g. \"1.21 KB\"", + "maixpy": "maix.sys.bytes_to_human", + "py_doc": "Bytes to human readable string\n\nArgs:\n - bytes:: bytes size\uff0ce.g. 1234B = 1234/1024 = 1.205 KB\n - precision:: decimal precision, default 2\n - base:: base number, default 1024\n - unit:: unit string, e.g. \"B\"\n - sep:: separator string, e.g. \" \"\n\n\nReturns: human readable string, e.g. \"1.21 KB\"\n" + }, + "args": [ + [ + "unsigned long long", + "bytes", + null + ], + [ + "int", + "precision", + "2" + ], + [ + "int", + "base", + "1024" + ], + [ + "const std::string &", + "unit", + "\"B\"" + ], + [ + "const std::string &", + "sep", + "\" \"" + ] + ], + "ret_type": "std::string", + "static": false, + "def": "std::string bytes_to_human(unsigned long long bytes, int precision = 2, int base = 1024, const std::string &unit = \"B\", const std::string &sep = \" \")", + "py_def": "def bytes_to_human(bytes: int, precision: int = 2, base: int = 1024, unit: str = 'B', sep: str = ' ') -> str" + }, + "cpu_freq": { + "type": "func", + "name": "cpu_freq", + "doc": { + "brief": "Get CPU frequency", + "return": "CPU frequency, dict type, e.g. {\"cpu0\": 1000000000, \"cpu1\": 1000000000}", + "maixpy": "maix.sys.cpu_freq", + "py_doc": "Get CPU frequency\n\nReturns: CPU frequency, dict type, e.g. {\"cpu0\": 1000000000, \"cpu1\": 1000000000}\n" + }, + "args": [], + "ret_type": "std::map", + "static": false, + "def": "std::map cpu_freq()", + "py_def": "def cpu_freq() -> dict[str, int]" + }, + "cpu_temp": { + "type": "func", + "name": "cpu_temp", + "doc": { + "brief": "Get CPU temperature", + "return": "CPU temperature, unit dgree, dict type, e.g. {\"cpu\": 50.0, \"cpu0\": 50, \"cpu1\": 50}", + "maixpy": "maix.sys.cpu_temp", + "py_doc": "Get CPU temperature\n\nReturns: CPU temperature, unit dgree, dict type, e.g. {\"cpu\": 50.0, \"cpu0\": 50, \"cpu1\": 50}\n" + }, + "args": [], + "ret_type": "std::map", + "static": false, + "def": "std::map cpu_temp()", + "py_def": "def cpu_temp() -> dict[str, float]" + }, + "cpu_usage": { + "type": "func", + "name": "cpu_usage", + "doc": { + "brief": "Get CPU usage", + "return": "CPU usage, dict type, e.g. {\"cpu\": 50.0, \"cpu0\": 50, \"cpu1\": 50}", + "maixpy": "maix.sys.cpu_usage", + "py_doc": "Get CPU usage\n\nReturns: CPU usage, dict type, e.g. {\"cpu\": 50.0, \"cpu0\": 50, \"cpu1\": 50}\n" + }, + "args": [], + "ret_type": "std::map", + "static": false, + "def": "std::map cpu_usage()", + "py_def": "def cpu_usage() -> dict[str, float]" + }, + "npu_freq": { + "type": "func", + "name": "npu_freq", + "doc": { + "brief": "Get NPU frequency", + "return": "NPU frequency, dict type, e.g. {\"npu0\": 500000000}", + "maixpy": "maix.sys.npu_freq", + "py_doc": "Get NPU frequency\n\nReturns: NPU frequency, dict type, e.g. {\"npu0\": 500000000}\n" + }, + "args": [], + "ret_type": "std::map", + "static": false, + "def": "std::map npu_freq()", + "py_def": "def npu_freq() -> dict[str, int]" + }, + "disk_usage": { + "type": "func", + "name": "disk_usage", + "doc": { + "brief": "Get disk usage", + "param": { + "path:": "disk path, default \"/\"" + }, + "return": "disk usage, dict type, e.g. {\"total\": 1024, \"used\": 512}", + "maixpy": "maix.sys.disk_usage", + "py_doc": "Get disk usage\n\nArgs:\n - path:: disk path, default \"/\"\n\n\nReturns: disk usage, dict type, e.g. {\"total\": 1024, \"used\": 512}\n" + }, + "args": [ + [ + "const std::string &", + "path", + "\"/\"" + ] + ], + "ret_type": "std::map", + "static": false, + "def": "std::map disk_usage(const std::string &path = \"/\")", + "py_def": "def disk_usage(path: str = '/') -> dict[str, int]" + }, + "disk_partitions": { + "type": "func", + "name": "disk_partitions", + "doc": { + "brief": "Get disk partition and mount point info", + "param": { + "only_disk": "only return real disk, tempfs sysfs etc. not return, default true." + }, + "return": "disk partition and mount point info, list type, e.g. [{\"device\": \"/dev/mmcblk0p1\", \"mountpoint\": \"/mnt/sdcard\", \"fstype\": \"vfat\"}]", + "maixpy": "maix.sys.disk_partitions", + "py_doc": "Get disk partition and mount point info\n\nArgs:\n - only_disk: only return real disk, tempfs sysfs etc. not return, default true.\n\n\nReturns: disk partition and mount point info, list type, e.g. [{\"device\": \"/dev/mmcblk0p1\", \"mountpoint\": \"/mnt/sdcard\", \"fstype\": \"vfat\"}]\n" + }, + "args": [ + [ + "bool", + "only_disk", + "true" + ] + ], + "ret_type": "std::vector>", + "static": false, + "def": "std::vector> disk_partitions(bool only_disk = true)", + "py_def": "def disk_partitions(only_disk: bool = True) -> list[dict[str, str]]" + }, + "register_default_signal_handle": { + "type": "func", + "name": "register_default_signal_handle", + "doc": { + "brief": "register default signal handle", + "maixpy": "maix.sys.register_default_signal_handle", + "py_doc": "register default signal handle" + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void register_default_signal_handle()" + }, + "poweroff": { + "type": "func", + "name": "poweroff", + "doc": { + "brief": "Power off device", + "maixpy": "maix.sys.poweroff", + "py_doc": "Power off device" + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void poweroff()", + "py_def": "def poweroff() -> None" + }, + "reboot": { + "type": "func", + "name": "reboot", + "doc": { + "brief": "Power off device and power on", + "maixpy": "maix.sys.reboot", + "py_doc": "Power off device and power on" + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void reboot()", + "py_def": "def reboot() -> None" + } + }, + "auto_add": true + }, + "i18n": { + "type": "module", + "doc": { + "brief": "maix.i18n module" + }, + "members": { + "locales": { + "type": "var", + "name": "", + "doc": { + "brief": "i18n locales list", + "maixpy": "maix.i18n.locales", + "py_doc": "i18n locales list" + }, + "value": "{\n \"en\",\n \"zh\",\n \"zh-tw\",\n \"ja\"}", + "static": true, + "readonly": false, + "def": "static std::vector locales = {\n \"en\",\n \"zh\",\n \"zh-tw\",\n \"ja\"}" + }, + "names": { + "type": "var", + "name": "", + "doc": { + "brief": "i18n language names list", + "maixpy": "maix.i18n.names", + "py_doc": "i18n language names list" + }, + "value": "{\n \"English\",\n \"\u7b80\u4f53\u4e2d\u6587\",\n \"\u7e41\u9ad4\u4e2d\u6587\",\n \"\u65e5\u672c\u8a9e\"}", + "static": false, + "readonly": true, + "def": "const static std::vector names = {\n \"English\",\n \"\u7b80\u4f53\u4e2d\u6587\",\n \"\u7e41\u9ad4\u4e2d\u6587\",\n \"\u65e5\u672c\u8a9e\"}" + }, + "get_locale": { + "type": "func", + "name": "get_locale", + "doc": { + "brief": "Get system config of locale.", + "return": "language locale, e.g. en, zh, zh_CN, zh_TW, etc.", + "maixpy": "maix.i18n.get_locale", + "py_doc": "Get system config of locale.\n\nReturns: language locale, e.g. en, zh, zh_CN, zh_TW, etc.\n" + }, + "args": [], + "ret_type": "string", + "static": false, + "def": "string get_locale()", + "py_def": "def get_locale() -> str" + }, + "get_language_name": { + "type": "func", + "name": "get_language_name", + "doc": { + "brief": "Get system config of language name.", + "return": "language name, e.g. English, \u7b80\u4f53\u4e2d\u6587, \u7e41\u9ad4\u4e2d\u6587, etc.", + "maixpy": "maix.i18n.get_language_name", + "py_doc": "Get system config of language name.\n\nReturns: language name, e.g. English, \u7b80\u4f53\u4e2d\u6587, \u7e41\u9ad4\u4e2d\u6587, etc.\n" + }, + "args": [], + "ret_type": "string", + "static": false, + "def": "string get_language_name()", + "py_def": "def get_language_name() -> str" + }, + "load_trans_yaml": { + "type": "func", + "name": "load_trans_yaml", + "doc": { + "brief": "Load translations from yaml files.", + "param": { + "locales_dir": "translation yaml files directory." + }, + "return": "A dict contains all translations, e.g. {\"zh\":{\"hello\": \"\u4f60\u597d\"}, \"en\":{\"hello\": \"hello\"}}, you should delete it after use in C++.", + "maixpy": "maix.i18n.load_trans_yaml", + "py_doc": "Load translations from yaml files.\n\nArgs:\n - locales_dir: translation yaml files directory.\n\n\nReturns: A dict contains all translations, e.g. {\"zh\":{\"hello\": \"\u4f60\u597d\"}, \"en\":{\"hello\": \"hello\"}}, you should delete it after use in C++.\n" + }, + "args": [ + [ + "const std::string &", + "locales_dir", + null + ] + ], + "ret_type": "const std::map>*", + "static": false, + "def": "const std::map> *load_trans_yaml(const std::string &locales_dir)", + "py_def": "def load_trans_yaml(locales_dir: str) -> dict[str, dict[str, str]]" + }, + "Trans": { + "type": "class", + "name": "Trans", + "doc": { + "brief": "Translate helper class.", + "maixpy": "maix.i18n.Trans", + "py_doc": "Translate helper class." + }, + "members": { + "__init__": { + "type": "func", + "name": "Trans", + "doc": { + "brief": "Translate helper class constructor.\\nBy default locale is get by `i18n.get_locale()` function which set by system settings.\\nBut you can also manually set by `set_locale` function temporarily.", + "param": { + "locales_dict": "locales dict, e.g. {\"zh\": {\"Confirm\": \"\u786e\u8ba4\", \"OK\": \"\u597d\u7684\"}, \"en\": {\"Confirm\": \"Confirm\", \"OK\": \"OK\"}}" + }, + "maixpy": "maix.i18n.Trans.__init__", + "maixcdk": "maix.i18n.Trans.Trans", + "py_doc": "Translate helper class constructor.\nBy default locale is get by `i18n.get_locale()` function which set by system settings.\nBut you can also manually set by `set_locale` function temporarily.\n\nArgs:\n - locales_dict: locales dict, e.g. {\"zh\": {\"Confirm\": \"\u786e\u8ba4\", \"OK\": \"\u597d\u7684\"}, \"en\": {\"Confirm\": \"Confirm\", \"OK\": \"OK\"}}\n" + }, + "args": [ + [ + "const std::map> &", + "locales_dict", + "std::map>()" + ] + ], + "ret_type": null, + "static": false, + "def": "Trans(const std::map> &locales_dict = std::map>())", + "py_def": "def __init__(self, locales_dict: dict[str, dict[str, str]] = {}) -> None" + }, + "load": { + "type": "func", + "name": "load", + "doc": { + "brief": "Load translation from yaml files generated by `maixtool i18n` command.", + "param": { + "locales_dir": "the translation files directory." + }, + "return": "err.Err type, no error will return err.Err.ERR_NONE.", + "maixpy": "maix.i18n.Trans.load", + "py_doc": "Load translation from yaml files generated by `maixtool i18n` command.\n\nArgs:\n - locales_dir: the translation files directory.\n\n\nReturns: err.Err type, no error will return err.Err.ERR_NONE.\n" + }, + "args": [ + [ + "const std::string &", + "locales_dir", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err load(const std::string &locales_dir)", + "py_def": "def load(self, locales_dir: str) -> maix.err.Err" + }, + "update_dict": { + "type": "func", + "name": "update_dict", + "doc": { + "brief": "Update translation dict.", + "param": { + "dict": "the new translation dict." + }, + "return": "err.Err type, no error will return err.Err.ERR_NONE.", + "maixpy": "maix.i18n.Trans.update_dict", + "py_doc": "Update translation dict.\n\nArgs:\n - dict: the new translation dict.\n\n\nReturns: err.Err type, no error will return err.Err.ERR_NONE.\n" + }, + "args": [ + [ + "const std::map> &", + "dict", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err update_dict(const std::map> &dict)", + "py_def": "def update_dict(self, dict: dict[str, dict[str, str]]) -> maix.err.Err" + }, + "tr": { + "type": "func", + "name": "tr", + "doc": { + "brief": "Translate string by key.", + "param": { + "key": "string key, e.g. \"Confirm\"", + "locale": "locale name, if not assign, use default locale set by system settings or set_locale function." + }, + "return": "translated string, if find translation, return it, or return key, e.g. \"\u786e\u8ba4\", \"Confirm\", etc.", + "maixpy": "maix.i18n.Trans.tr", + "py_doc": "Translate string by key.\n\nArgs:\n - key: string key, e.g. \"Confirm\"\n - locale: locale name, if not assign, use default locale set by system settings or set_locale function.\n\n\nReturns: translated string, if find translation, return it, or return key, e.g. \"\u786e\u8ba4\", \"Confirm\", etc.\n" + }, + "args": [ + [ + "const string &", + "key", + null + ], + [ + "const string", + "locale", + "\"\"" + ] + ], + "ret_type": "string", + "static": false, + "def": "string tr(const string &key, const string locale = \"\")", + "py_def": "def tr(self, key: str, locale: str = '') -> str" + }, + "set_locale": { + "type": "func", + "name": "set_locale", + "doc": { + "brief": "Set locale temporarily, will not affect system settings.", + "param": { + "locale": "locale name, e.g. \"zh\", \"en\", etc. @see maix.i18n.locales" + }, + "maixpy": "maix.i18n.Trans.set_locale", + "py_doc": "Set locale temporarily, will not affect system settings.\n\nArgs:\n - locale: locale name, e.g. \"zh\", \"en\", etc. @see maix.i18n.locales\n" + }, + "args": [ + [ + "const string &", + "locale", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void set_locale(const string &locale)", + "py_def": "def set_locale(self, locale: str) -> None" + }, + "get_locale": { + "type": "func", + "name": "get_locale", + "doc": { + "brief": "Get current locale.", + "return": "locale name, e.g. \"zh\", \"en\", etc. @see maix.i18n.locales", + "maixpy": "maix.i18n.Trans.get_locale", + "py_doc": "Get current locale.\n\nReturns: locale name, e.g. \"zh\", \"en\", etc. @see maix.i18n.locales\n" + }, + "args": [], + "ret_type": "string", + "static": false, + "def": "string get_locale()", + "py_def": "def get_locale(self) -> str" + } + }, + "def": "class Trans" + } + }, + "auto_add": true + }, + "peripheral": { + "type": "module", + "doc": { + "brief": "Chip's peripheral driver", + "maixpy": "maix.peripheral", + "py_doc": "Chip's peripheral driver" + }, + "members": { + "key": { + "type": "module", + "doc": { + "brief": "maix.peripheral.key module" + }, + "members": { + "Keys": { + "type": "enum", + "name": "Keys{", + "doc": { + "brief": "Keys enum, id the same as linux input.h(input-event-codes.h)", + "maixpy": "maix.peripheral.key.Keys", + "py_doc": "Keys enum, id the same as linux input.h(input-event-codes.h)" + }, + "values": [ + [ + "KEY_NONE", + "0x000", + "" + ], + [ + "KEY_ESC", + "0x001", + "" + ], + [ + "KEY_OK", + "0x160", + "" + ], + [ + "KEY_OPTION", + "0x165", + "" + ], + [ + "KEY_NEXT", + "0x197", + "" + ], + [ + "KEY_PREV", + "0x19c", + "" + ] + ], + "def": "enum Keys{\n KEY_NONE = 0x000,\n KEY_ESC = 0x001,\n KEY_OK = 0x160,\n KEY_OPTION = 0x165,\n KEY_NEXT = 0x197,\n KEY_PREV = 0x19c\n }" + }, + "State": { + "type": "enum", + "name": "State{", + "doc": { + "brief": "Key state enum", + "maixpy": "maix.peripheral.key.State", + "py_doc": "Key state enum" + }, + "values": [ + [ + "KEY_RELEASED", + "0", + "" + ], + [ + "KEY_PRESSED", + "1", + "" + ] + ], + "def": "enum State{\n KEY_RELEASED = 0,\n KEY_PRESSED = 1,\n }" + }, + "Key": { + "type": "class", + "name": "Key", + "doc": { + "brief": "Key input class", + "maixpy": "maix.peripheral.key.Key", + "py_doc": "Key input class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Key", + "doc": { + "brief": "Key Device constructor", + "param": { + "callback": "When key triggered and callback is not empty(empty In MaixPy is None, in C++ is nullptr),\ncallback will be called with args key(key.Keys) and value(key.State).\nIf set to null, you can get key value by read() function.\nThis callback called in a standalone thread, so you can block a while in callback, and you should be carefully when operate shared data.", + "open": "auto open device in constructor, if false, you need call open() to open device" + }, + "maixpy": "maix.peripheral.key.Key.__init__", + "maixcdk": "maix.peripheral.key.Key.Key", + "py_doc": "Key Device constructor\n\nArgs:\n - callback: When key triggered and callback is not empty(empty In MaixPy is None, in C++ is nullptr),\ncallback will be called with args key(key.Keys) and value(key.State).\nIf set to null, you can get key value by read() function.\nThis callback called in a standalone thread, so you can block a while in callback, and you should be carefully when operate shared data.\n - open: auto open device in constructor, if false, you need call open() to open device\n" + }, + "args": [ + [ + "std::function", + "callback", + "nullptr" + ], + [ + "bool", + "open", + "true" + ] + ], + "ret_type": null, + "static": false, + "def": "Key(std::function callback = nullptr, bool open = true)", + "py_def": "def __init__(self, callback: typing.Callable[[int, int], None] = None, open: bool = True) -> None" + }, + "open": { + "type": "func", + "name": "open", + "doc": { + "brief": "Open(Initialize) key device, if already opened, will close first and then open.", + "return": "err::Err type, err.Err.ERR_NONE means success", + "maixpy": "maix.peripheral.key.Key.open", + "py_doc": "Open(Initialize) key device, if already opened, will close first and then open.\n\nReturns: err::Err type, err.Err.ERR_NONE means success\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err open()", + "py_def": "def open(self) -> maix.err.Err" + }, + "close": { + "type": "func", + "name": "close", + "doc": { + "brief": "Close key device", + "return": "err::Err type, err.Err.ERR_NONE means success", + "maixpy": "maix.peripheral.key.Key.close", + "py_doc": "Close key device\n\nReturns: err::Err type, err.Err.ERR_NONE means success\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err close()", + "py_def": "def close(self) -> maix.err.Err" + }, + "is_opened": { + "type": "func", + "name": "is_opened", + "doc": { + "brief": "Check key device is opened", + "return": "bool type, true means opened, false means closed", + "maixpy": "maix.peripheral.key.Key.is_opened", + "py_doc": "Check key device is opened\n\nReturns: bool type, true means opened, false means closed\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool is_opened()", + "py_def": "def is_opened(self) -> bool" + }, + "read": { + "type": "func", + "name": "read", + "doc": { + "brief": "Read key input, and return key and value, if callback is set, DO NOT call this function manually.", + "return": "list type, first is key(maix.key.Keys), second is value(maix.key.State), if no key input, return [0, 0]", + "throw": "If read failed, will throw maix.err.Exception.", + "maixpy": "maix.peripheral.key.Key.read", + "py_doc": "Read key input, and return key and value, if callback is set, DO NOT call this function manually.\n\nReturns: list type, first is key(maix.key.Keys), second is value(maix.key.State), if no key input, return [0, 0]\n" + }, + "args": [], + "ret_type": "std::pair", + "static": false, + "def": "std::pair read()", + "py_def": "def read(self) -> tuple[int, int]" + } + }, + "def": "class Key" + }, + "add_default_listener": { + "type": "func", + "name": "add_default_listener", + "doc": { + "brief": "Add default listener, if you want to exit app when press ok button, you can just call this function.\\nThis function is auto called in MaixPy' startup code, so you don't need to call it in MaixPy.\\nCreate Key object will auto call rm_default_listener() to cancel the default ok button function.\\nWhen ok button pressed, a SIGINT signal will be raise and call app.set_exit_flag(True).", + "maixpy": "maix.peripheral.key.add_default_listener", + "py_doc": "Add default listener, if you want to exit app when press ok button, you can just call this function.\nThis function is auto called in MaixPy' startup code, so you don't need to call it in MaixPy.\nCreate Key object will auto call rm_default_listener() to cancel the default ok button function.\nWhen ok button pressed, a SIGINT signal will be raise and call app.set_exit_flag(True)." + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void add_default_listener()" + }, + "rm_default_listener": { + "type": "func", + "name": "rm_default_listener", + "doc": { + "brief": "Remove default listener, if you want to cancel the default ok button function(exit app), you can just call this function.", + "maixpy": "maix.peripheral.key.rm_default_listener", + "py_doc": "Remove default listener, if you want to cancel the default ok button function(exit app), you can just call this function." + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void rm_default_listener()" + } + }, + "auto_add": true + }, + "i2c": { + "type": "module", + "doc": { + "brief": "maix.peripheral.i2c module" + }, + "members": { + "AddrSize": { + "type": "enum", + "name": "AddrSize", + "doc": { + "brief": "Address size enum", + "maixpy": "maix.peripheral.i2c.AddrSize", + "py_doc": "Address size enum" + }, + "values": [ + [ + "SEVEN_BIT", + "7", + "7-bit address mode" + ], + [ + "TEN_BIT", + "10", + "10-bit address mode" + ] + ], + "def": "enum AddrSize\n {\n SEVEN_BIT = 7, // 7-bit address mode\n TEN_BIT = 10 // 10-bit address mode\n }" + }, + "Mode": { + "type": "enum", + "name": "Mode", + "doc": { + "brief": "I2C mode enum", + "maixpy": "maix.peripheral.i2c.Mode", + "py_doc": "I2C mode enum" + }, + "values": [ + [ + "MASTER", + "0x00", + "master mode" + ], + [ + "SLAVE", + "0x01", + "slave mode" + ] + ], + "def": "enum Mode\n {\n MASTER = 0x00, // master mode\n SLAVE = 0x01 // slave mode\n }" + }, + "list_devices": { + "type": "func", + "name": "list_devices", + "doc": { + "brief": "Get supported i2c bus devices.", + "return": "i2c bus devices list, int type, is the i2c bus id.", + "maixpy": "maix.peripheral.i2c.list_devices", + "py_doc": "Get supported i2c bus devices.\n\nReturns: i2c bus devices list, int type, is the i2c bus id.\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector list_devices()", + "py_def": "def list_devices() -> list[int]" + }, + "I2C": { + "type": "class", + "name": "I2C", + "doc": { + "brief": "Peripheral i2c class", + "maixpy": "maix.peripheral.i2c.I2C", + "py_doc": "Peripheral i2c class" + }, + "members": { + "__init__": { + "type": "func", + "name": "I2C", + "doc": { + "brief": "I2C Device constructor\\nthis constructor will be export to MaixPy as _maix.example.Example.__init__", + "param": { + "id": "direction [in], i2c bus id, int type, e.g. 0, 1, 2", + "freq": "direction [in], i2c clock, int type, default is 100000(100kbit/s), will auto set fast mode if freq > 100000.", + "mode": "direction [in], mode of i2c, i2c.Mode.SLAVE or i2c.Mode.MASTER.", + "addr_size": "direction [in], address length of i2c, i2c.AddrSize.SEVEN_BIT or i2c.AddrSize.TEN_BIT." + }, + "throw": "err::Exception if open i2c device failed.", + "maixpy": "maix.peripheral.i2c.I2C.__init__", + "py_doc": "I2C Device constructor\nthis constructor will be export to MaixPy as _maix.example.Example.__init__\n\nArgs:\n - id: direction [in], i2c bus id, int type, e.g. 0, 1, 2\n - freq: direction [in], i2c clock, int type, default is 100000(100kbit/s), will auto set fast mode if freq > 100000.\n - mode: direction [in], mode of i2c, i2c.Mode.SLAVE or i2c.Mode.MASTER.\n - addr_size: direction [in], address length of i2c, i2c.AddrSize.SEVEN_BIT or i2c.AddrSize.TEN_BIT.\n" + }, + "args": [ + [ + "int", + "id", + null + ], + [ + "i2c::Mode", + "mode", + null + ], + [ + "int", + "freq", + "100000" + ], + [ + "i2c::AddrSize", + "addr_size", + "i2c::AddrSize::SEVEN_BIT" + ] + ], + "ret_type": null, + "static": false, + "def": "I2C(int id, i2c::Mode mode, int freq = 100000, i2c::AddrSize addr_size = i2c::AddrSize::SEVEN_BIT)", + "py_def": "def __init__(self, id: int, mode: Mode, freq: int = 100000, addr_size: AddrSize = ...) -> None" + }, + "scan": { + "type": "func", + "name": "scan", + "doc": { + "brief": "scan all i2c salve address on the bus", + "param": { + "addr": "If -1, only scan this addr, or scan from 0x08~0x77, default -1." + }, + "return": "the list of i2c slave address, int list type.", + "maixpy": "maix.peripheral.i2c.I2C.scan", + "py_doc": "scan all i2c salve address on the bus\n\nArgs:\n - addr: If -1, only scan this addr, or scan from 0x08~0x77, default -1.\n\n\nReturns: the list of i2c slave address, int list type.\n" + }, + "args": [ + [ + "int", + "addr", + "-1" + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector scan(int addr = -1)", + "py_def": "def scan(self, addr: int = -1) -> list[int]" + }, + "writeto": { + "type": "func", + "name": "writeto", + "doc": { + "brief": "write data to i2c slave", + "param": { + "addr": "direction [in], i2c slave address, int type", + "data": "direction [in], data to write, bytes type.\nNote: The range of value should be in [0,255]." + }, + "return": "if success, return the length of written data, error occurred will return -err::Err.", + "maixpy": "maix.peripheral.i2c.I2C.writeto", + "py_doc": "write data to i2c slave\n\nArgs:\n - addr: direction [in], i2c slave address, int type\n - data: direction [in], data to write, bytes type.\nNote: The range of value should be in [0,255].\n\n\nReturns: if success, return the length of written data, error occurred will return -err::Err.\n" + }, + "args": [ + [ + "int", + "addr", + null + ], + [ + "const Bytes &", + "data", + null + ] + ], + "ret_type": "int", + "static": false, + "def": "int writeto(int addr, const Bytes &data)", + "py_def": "def writeto(self, addr: int, data: maix.Bytes(bytes)) -> int" + }, + "readfrom": { + "type": "func", + "name": "readfrom", + "doc": { + "brief": "read data from i2c slave", + "param": { + "addr": "direction [in], i2c slave address, int type", + "len": "direction [in], data length to read, int type" + }, + "return": "the list of data read from i2c slave, bytes type, you should delete it after use in C++.\nIf read failed, return nullptr in C++, None in MaixPy.", + "maixpy": "maix.peripheral.i2c.I2C.readfrom", + "py_doc": "read data from i2c slave\n\nArgs:\n - addr: direction [in], i2c slave address, int type\n - len: direction [in], data length to read, int type\n\n\nReturns: the list of data read from i2c slave, bytes type, you should delete it after use in C++.\nIf read failed, return nullptr in C++, None in MaixPy.\n" + }, + "args": [ + [ + "int", + "addr", + null + ], + [ + "int", + "len", + null + ] + ], + "ret_type": "Bytes*", + "static": false, + "def": "Bytes* readfrom(int addr, int len)", + "py_def": "def readfrom(*args, **kwargs)" + }, + "writeto_mem": { + "type": "func", + "name": "writeto_mem", + "doc": { + "brief": "write data to i2c slave's memory address", + "param": { + "addr": "direction [in], i2c slave address, int type", + "mem_addr": "direction [in], memory address want to write, int type.", + "data": "direction [in], data to write, bytes type.", + "mem_addr_size": "direction [in], memory address size, default is 8.", + "mem_addr_le": "direction [in], memory address little endian, default is false, that is send high byte first." + }, + "return": "data length written if success, error occurred will return -err::Err.", + "maixpy": "maix.peripheral.i2c.I2C.writeto_mem", + "py_doc": "write data to i2c slave's memory address\n\nArgs:\n - addr: direction [in], i2c slave address, int type\n - mem_addr: direction [in], memory address want to write, int type.\n - data: direction [in], data to write, bytes type.\n - mem_addr_size: direction [in], memory address size, default is 8.\n - mem_addr_le: direction [in], memory address little endian, default is false, that is send high byte first.\n\n\nReturns: data length written if success, error occurred will return -err::Err.\n" + }, + "args": [ + [ + "int", + "addr", + null + ], + [ + "int", + "mem_addr", + null + ], + [ + "const Bytes &", + "data", + null + ], + [ + "int", + "mem_addr_size", + "8" + ], + [ + "bool", + "mem_addr_le", + "false" + ] + ], + "ret_type": "int", + "static": false, + "def": "int writeto_mem(int addr, int mem_addr, const Bytes &data, int mem_addr_size = 8, bool mem_addr_le = false)", + "py_def": "def writeto_mem(self, addr: int, mem_addr: int, data: maix.Bytes(bytes), mem_addr_size: int = 8, mem_addr_le: bool = False) -> int" + }, + "readfrom_mem": { + "type": "func", + "name": "readfrom_mem", + "doc": { + "brief": "read data from i2c slave", + "param": { + "addr": "direction [in], i2c slave address, int type", + "mem_addr": "direction [in], memory address want to read, int type.", + "len": "direction [in], data length to read, int type", + "mem_addr_size": "direction [in], memory address size, default is 8.", + "mem_addr_le": "direction [in], memory address little endian, default is false, that is send high byte first." + }, + "return": "the list of data read from i2c slave, bytes type, you should delete it after use in C++.\nIf read failed, return nullptr in C++, None in MaixPy.", + "maixpy": "maix.peripheral.i2c.I2C.readfrom_mem", + "py_doc": "read data from i2c slave\n\nArgs:\n - addr: direction [in], i2c slave address, int type\n - mem_addr: direction [in], memory address want to read, int type.\n - len: direction [in], data length to read, int type\n - mem_addr_size: direction [in], memory address size, default is 8.\n - mem_addr_le: direction [in], memory address little endian, default is false, that is send high byte first.\n\n\nReturns: the list of data read from i2c slave, bytes type, you should delete it after use in C++.\nIf read failed, return nullptr in C++, None in MaixPy.\n" + }, + "args": [ + [ + "int", + "addr", + null + ], + [ + "int", + "mem_addr", + null + ], + [ + "int", + "len", + null + ], + [ + "int", + "mem_addr_size", + "8" + ], + [ + "bool", + "mem_addr_le", + "false" + ] + ], + "ret_type": "Bytes*", + "static": false, + "def": "Bytes* readfrom_mem(int addr, int mem_addr, int len, int mem_addr_size = 8, bool mem_addr_le = false)", + "py_def": "def readfrom_mem(*args, **kwargs)" + } + }, + "def": "class I2C" + } + }, + "auto_add": true + }, + "spi": { + "type": "module", + "doc": { + "brief": "maix.peripheral.spi module" + }, + "members": { + "Mode": { + "type": "enum", + "name": "Mode", + "doc": { + "brief": "SPI mode enum", + "maixpy": "maix.peripheral.spi.Mode", + "py_doc": "SPI mode enum" + }, + "values": [ + [ + "MASTER", + "0x0", + "spi master mode" + ], + [ + "SLAVE", + "0x1", + "spi slave mode" + ] + ], + "def": "enum Mode\n {\n MASTER = 0x0, // spi master mode\n SLAVE = 0x1, // spi slave mode\n }" + }, + "SPI": { + "type": "class", + "name": "SPI", + "doc": { + "brief": "Peripheral spi class", + "maixpy": "maix.peripheral.spi.SPI", + "py_doc": "Peripheral spi class" + }, + "members": { + "__init__": { + "type": "func", + "name": "SPI", + "doc": { + "brief": "SPI constructor", + "param": { + "id": "direction [in], spi bus id, int type", + "mode": "direction [in], mode of spi, spi.Mode type, spi.Mode.MASTER or spi.Mode.SLAVE.", + "freq": "direction [in], freq of spi, int type", + "polarity": "direction [in], polarity of spi, 0 means idle level of clock is low, 1 means high, int type, default is 0.", + "phase": "direction [in], phase of spi, 0 means data is captured on the first edge of the SPI clock cycle, 1 means second, int type, default is 0.", + "bits": "direction [in], bits of spi, int type, default is 8.", + "cs_enable": "direction [in], cs pin active level, default is 0(low)", + "soft_cs": "direction [in], not use hardware cs, bool type, if set true, you can operate cs pin use gpio manually.", + "cs": "direction [in], soft cs pin number, std::string type, default is \"GPIOA19\", if SPI support multi hardware cs, you can set it to other value." + }, + "maixpy": "maix.peripheral.spi.SPI.__init__", + "py_doc": "SPI constructor\n\nArgs:\n - id: direction [in], spi bus id, int type\n - mode: direction [in], mode of spi, spi.Mode type, spi.Mode.MASTER or spi.Mode.SLAVE.\n - freq: direction [in], freq of spi, int type\n - polarity: direction [in], polarity of spi, 0 means idle level of clock is low, 1 means high, int type, default is 0.\n - phase: direction [in], phase of spi, 0 means data is captured on the first edge of the SPI clock cycle, 1 means second, int type, default is 0.\n - bits: direction [in], bits of spi, int type, default is 8.\n - cs_enable: direction [in], cs pin active level, default is 0(low)\n - soft_cs: direction [in], not use hardware cs, bool type, if set true, you can operate cs pin use gpio manually.\n - cs: direction [in], soft cs pin number, std::string type, default is \"GPIOA19\", if SPI support multi hardware cs, you can set it to other value.\n" + }, + "args": [ + [ + "int", + "id", + null + ], + [ + "spi::Mode", + "mode", + null + ], + [ + "int", + "freq", + null + ], + [ + "int", + "polarity", + "0" + ], + [ + "int", + "phase", + "0" + ], + [ + "int", + "bits", + "8" + ], + [ + "unsigned char", + "cs_enable", + "0" + ], + [ + "bool", + "soft_cs", + "false" + ], + [ + "std::string", + "cs", + "\"GPIOA19\"" + ] + ], + "ret_type": null, + "static": false, + "def": "SPI(int id, spi::Mode mode, int freq, int polarity = 0, int phase = 0,\n int bits = 8, unsigned char cs_enable=0, bool soft_cs = false, std::string cs = \"GPIOA19\")", + "py_def": "def __init__(self, id: int, mode: Mode, freq: int, polarity: int = 0, phase: int = 0, bits: int = 8, cs_enable: int = 0, soft_cs: bool = False, cs: str = 'GPIOA19') -> None" + }, + "read": { + "type": "func", + "name": "read", + "doc": { + "brief": "read data from spi", + "param": { + "length": "direction [in], read length, int type" + }, + "return": "bytes data, Bytes type in C++, bytes type in MaixPy. You need to delete it manually after use in C++.", + "maixpy": "maix.peripheral.spi.SPI.read", + "py_doc": "read data from spi\n\nArgs:\n - length: direction [in], read length, int type\n\n\nReturns: bytes data, Bytes type in C++, bytes type in MaixPy. You need to delete it manually after use in C++.\n" + }, + "args": [ + [ + "int", + "length", + null + ] + ], + "ret_type": "Bytes*", + "static": false, + "def": "Bytes *read(int length)", + "py_def": "def read(*args, **kwargs)" + }, + "write": { + "type": "func", + "name": "write", + "doc": { + "brief": "write data to spi", + "param": { + "data": "direction [in], data to write, Bytes type in C++, bytes type in MaixPy" + }, + "return": "write length, int type, if write failed, return -err::Err code.", + "maixpy": "maix.peripheral.spi.SPI.write", + "py_doc": "write data to spi\n\nArgs:\n - data: direction [in], data to write, Bytes type in C++, bytes type in MaixPy\n\n\nReturns: write length, int type, if write failed, return -err::Err code.\n" + }, + "args": [ + [ + "Bytes *", + "data", + null + ] + ], + "ret_type": "int", + "static": false, + "def": "int write(Bytes *data)", + "py_def": "def write(self, data: maix.Bytes(bytes)) -> int" + }, + "write_read": { + "type": "func", + "name": "write_read", + "doc": { + "brief": "write data to spi and read data from spi at the same time.", + "param": { + "data": "direction [in], data to write, Bytes type in C++, bytes type in MaixPy", + "read_len": "direction [in], read length, int type, should > 0." + }, + "return": "read data, Bytes type in C++, bytes type in MaixPy. You need to delete it manually after use in C++.", + "maixpy": "maix.peripheral.spi.SPI.write_read", + "py_doc": "write data to spi and read data from spi at the same time.\n\nArgs:\n - data: direction [in], data to write, Bytes type in C++, bytes type in MaixPy\n - read_len: direction [in], read length, int type, should > 0.\n\n\nReturns: read data, Bytes type in C++, bytes type in MaixPy. You need to delete it manually after use in C++.\n" + }, + "args": [ + [ + "Bytes *", + "data", + null + ], + [ + "int", + "read_len", + null + ] + ], + "ret_type": "Bytes*", + "static": false, + "def": "Bytes *write_read(Bytes *data, int read_len)", + "py_def": "def write_read(*args, **kwargs)" + }, + "is_busy": { + "type": "func", + "name": "is_busy", + "doc": { + "brief": "get busy status of spi", + "return": "busy status, bool type", + "maixpy": "maix.peripheral.spi.SPI.is_busy", + "py_doc": "get busy status of spi\n\nReturns: busy status, bool type\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool is_busy()", + "py_def": "def is_busy(self) -> bool" + } + }, + "def": "class SPI" + } + }, + "auto_add": true + }, + "pwm": { + "type": "module", + "doc": { + "brief": "maix.peripheral.pwm module" + }, + "members": { + "PWM": { + "type": "class", + "name": "PWM", + "doc": { + "brief": "Peripheral pwm class", + "maixpy": "maix.peripheral.pwm.PWM", + "py_doc": "Peripheral pwm class" + }, + "members": { + "__init__": { + "type": "func", + "name": "PWM", + "doc": { + "brief": "PWM constructor", + "param": { + "pin": "direction [in], pwm id, int type, like 0, 1, 2 etc.", + "freq": "direction [in], pwm frequency, unit: Hz. int type. default is 1000", + "duty": "direction [in], pwm duty. double type. range is [0, 100], default is 0.", + "enable": "direction [in], enable pwm output right now. bool type. default is true, if false, you need to call enable() to enable pwm output.", + "duty_val": "direction [in], pwm duty value, int type. default -1 means not set and auto calculate by freq and duty.\nThis arg directly set pwm duty value, if set, will ignore duty arg.\nduty_val = duty / 100 * T_ns, T_ns = 1 / freq * 1000000000." + }, + "throw": "If args error or init pwm failed, will throw err::Exception", + "maixpy": "maix.peripheral.pwm.PWM.__init__", + "py_doc": "PWM constructor\n\nArgs:\n - pin: direction [in], pwm id, int type, like 0, 1, 2 etc.\n - freq: direction [in], pwm frequency, unit: Hz. int type. default is 1000\n - duty: direction [in], pwm duty. double type. range is [0, 100], default is 0.\n - enable: direction [in], enable pwm output right now. bool type. default is true, if false, you need to call enable() to enable pwm output.\n - duty_val: direction [in], pwm duty value, int type. default -1 means not set and auto calculate by freq and duty.\nThis arg directly set pwm duty value, if set, will ignore duty arg.\nduty_val = duty / 100 * T_ns, T_ns = 1 / freq * 1000000000.\n" + }, + "args": [ + [ + "int", + "id", + null + ], + [ + "int", + "freq", + "1000" + ], + [ + "double", + "duty", + "0" + ], + [ + "bool", + "enable", + "true" + ], + [ + "int", + "duty_val", + "-1" + ] + ], + "ret_type": null, + "static": false, + "def": "PWM(int id, int freq = 1000, double duty = 0, bool enable = true, int duty_val = -1)", + "py_def": "def __init__(self, id: int, freq: int = 1000, duty: float = 0, enable: bool = True, duty_val: int = -1) -> None" + }, + "duty": { + "type": "func", + "name": "duty", + "doc": { + "brief": "get or set pwm duty", + "param": { + "duty": "direction [in], pwm duty, double type, value in [0, 100], default -1 means only read." + }, + "return": "current duty, float type, if set and set failed will return -err::Err", + "maixpy": "maix.peripheral.pwm.PWM.duty", + "py_doc": "get or set pwm duty\n\nArgs:\n - duty: direction [in], pwm duty, double type, value in [0, 100], default -1 means only read.\n\n\nReturns: current duty, float type, if set and set failed will return -err::Err\n" + }, + "args": [ + [ + "double", + "duty", + "-1" + ] + ], + "ret_type": "double", + "static": false, + "def": "double duty(double duty = -1)", + "py_def": "def duty(self, duty: float = -1) -> float" + }, + "duty_val": { + "type": "func", + "name": "duty_val", + "doc": { + "brief": "set pwm duty value", + "param": { + "duty_val": "direction [in], pwm duty value. int type. default is -1\nduty_val > 0 means set duty_val\nduty_val == -1 or not set, return current duty_val" + }, + "return": "int type\nwhen get duty_val, return current duty_val, else return -err::Err code.", + "maixpy": "maix.peripheral.pwm.PWM.duty_val", + "py_doc": "set pwm duty value\n\nArgs:\n - duty_val: direction [in], pwm duty value. int type. default is -1\nduty_val > 0 means set duty_val\nduty_val == -1 or not set, return current duty_val\n\n\nReturns: int type\nwhen get duty_val, return current duty_val, else return -err::Err code.\n" + }, + "args": [ + [ + "int", + "duty_val", + "-1" + ] + ], + "ret_type": "int", + "static": false, + "def": "int duty_val(int duty_val = -1)", + "py_def": "def duty_val(self, duty_val: int = -1) -> int" + }, + "freq": { + "type": "func", + "name": "freq", + "doc": { + "brief": "get or set pwm frequency", + "param": { + "freq": "direction [in], pwm frequency. int type. default is -1\nfreq >= 0, set freq\nfreq == -1 or not set, return current freq" + }, + "return": "int type, current freq, if set and set failed will return -err::Err", + "maixpy": "maix.peripheral.pwm.PWM.freq", + "py_doc": "get or set pwm frequency\n\nArgs:\n - freq: direction [in], pwm frequency. int type. default is -1\nfreq >= 0, set freq\nfreq == -1 or not set, return current freq\n\n\nReturns: int type, current freq, if set and set failed will return -err::Err\n" + }, + "args": [ + [ + "int", + "freq", + "-1" + ] + ], + "ret_type": "int", + "static": false, + "def": "int freq(int freq = -1)", + "py_def": "def freq(self, freq: int = -1) -> int" + }, + "enable": { + "type": "func", + "name": "enable", + "doc": { + "brief": "set pwm enable", + "return": "err::Err type, err.Err.ERR_NONE means success", + "maixpy": "maix.peripheral.pwm.PWM.enable", + "py_doc": "set pwm enable\n\nReturns: err::Err type, err.Err.ERR_NONE means success\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err enable()", + "py_def": "def enable(self) -> maix.err.Err" + }, + "disable": { + "type": "func", + "name": "disable", + "doc": { + "brief": "set pwm disable", + "return": "err::Err type, err.Err.ERR_NONE means success", + "maixpy": "maix.peripheral.pwm.PWM.disable", + "py_doc": "set pwm disable\n\nReturns: err::Err type, err.Err.ERR_NONE means success\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err disable()", + "py_def": "def disable(self) -> maix.err.Err" + }, + "is_enabled": { + "type": "func", + "name": "is_enabled", + "doc": { + "brief": "get pwm enable status", + "return": "bool type, true means enable, false means disable", + "maixpy": "maix.peripheral.pwm.PWM.is_enabled", + "py_doc": "get pwm enable status\n\nReturns: bool type, true means enable, false means disable\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool is_enabled()", + "py_def": "def is_enabled(self) -> bool" + } + }, + "def": "class PWM" + } + }, + "auto_add": true + }, + "wdt": { + "type": "module", + "doc": { + "brief": "maix.peripheral.wdt module" + }, + "members": { + "WDT": { + "type": "class", + "name": "WDT", + "doc": { + "brief": "Peripheral wdt class", + "maixpy": "maix.peripheral.wdt.WDT", + "py_doc": "Peripheral wdt class" + }, + "members": { + "__init__": { + "type": "func", + "name": "WDT", + "doc": { + "brief": "WDT constructor, after construct, the wdt will auto start.", + "param": { + "id": "direction [in], id of wdt, int type", + "feed_ms": "direction [in], feed interval, int type, unit is ms, you must feed wdt in this interval, or system will restart." + }, + "maixpy": "maix.peripheral.wdt.WDT.__init__", + "py_doc": "WDT constructor, after construct, the wdt will auto start.\n\nArgs:\n - id: direction [in], id of wdt, int type\n - feed_ms: direction [in], feed interval, int type, unit is ms, you must feed wdt in this interval, or system will restart.\n" + }, + "args": [ + [ + "int", + "id", + null + ], + [ + "int", + "feed_ms", + null + ] + ], + "ret_type": null, + "static": false, + "def": "WDT(int id, int feed_ms)", + "py_def": "def __init__(self, id: int, feed_ms: int) -> None" + }, + "feed": { + "type": "func", + "name": "feed", + "doc": { + "brief": "feed wdt", + "return": "error code, if feed success, return err::ERR_NONE", + "maixpy": "maix.peripheral.wdt.WDT.feed", + "py_doc": "feed wdt\n\nReturns: error code, if feed success, return err::ERR_NONE\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int feed()", + "py_def": "def feed(self) -> int" + }, + "stop": { + "type": "func", + "name": "stop", + "doc": { + "brief": "stop wdt", + "maixpy": "maix.peripheral.wdt.WDT.stop", + "py_doc": "stop wdt" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int stop()", + "py_def": "def stop(self) -> int" + }, + "restart": { + "type": "func", + "name": "restart", + "doc": { + "brief": "restart wdt, stop and start watchdog timer.", + "maixpy": "maix.peripheral.wdt.WDT.restart", + "py_doc": "restart wdt, stop and start watchdog timer." + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int restart()", + "py_def": "def restart(self) -> int" + } + }, + "def": "class WDT" + } + }, + "auto_add": true + }, + "adc": { + "type": "module", + "doc": { + "brief": "maix.peripheral.adc module" + }, + "members": { + "RES_BIT_8": { + "type": "var", + "name": "", + "doc": { + "brief": "8-bit resolution, supported by the actual hardware", + "maixpy": "maix.peripheral.adc.RES_BIT_8", + "py_doc": "8-bit resolution, supported by the actual hardware" + }, + "value": "8", + "static": false, + "readonly": true, + "def": "const int RES_BIT_8 = 8" + }, + "RES_BIT_10": { + "type": "var", + "name": "", + "doc": { + "brief": "10-bit resolution, supported by the actual hardware", + "maixpy": "maix.peripheral.adc.RES_BIT_10", + "py_doc": "10-bit resolution, supported by the actual hardware" + }, + "value": "10", + "static": false, + "readonly": true, + "def": "const int RES_BIT_10 = 10" + }, + "RES_BIT_12": { + "type": "var", + "name": "", + "doc": { + "brief": "12-bit resolution, supported by the actual hardware", + "maixpy": "maix.peripheral.adc.RES_BIT_12", + "py_doc": "12-bit resolution, supported by the actual hardware" + }, + "value": "12", + "static": false, + "readonly": true, + "def": "const int RES_BIT_12 = 12" + }, + "RES_BIT_16": { + "type": "var", + "name": "", + "doc": { + "brief": "16-bit resolution, supported by the actual hardware", + "maixpy": "maix.peripheral.adc.RES_BIT_16", + "py_doc": "16-bit resolution, supported by the actual hardware" + }, + "value": "16", + "static": false, + "readonly": true, + "def": "const int RES_BIT_16 = 16" + }, + "ADC": { + "type": "class", + "name": "ADC", + "doc": { + "brief": "Peripheral adc class", + "maixpy": "maix.peripheral.adc.ADC", + "py_doc": "Peripheral adc class" + }, + "members": { + "__init__": { + "type": "func", + "name": "ADC", + "doc": { + "brief": "ADC constructor", + "param": { + "pin": "direction [in], adc pin, int type", + "resolution": "direction [in], adc resolution. default is -1, means use default resolution\noption:\nresolution = adc.RES_BIT_8, means 8-bit resolution\nresolution = adc.RES_BIT_10, means 10-bit resolution\nresolution = adc.RES_BIT_12, means 12-bit resolution\nresolution = adc.RES_BIT_16, means 16-bit resolution\nthe default resolution is determined by actual hardware.", + "vref": "direction [in], adc refer voltage. default is -1, means use default refer voltage.\nthe default vref is determined by actual hardware. range: [0.0, 10.0]" + }, + "maixpy": "maix.peripheral.adc.ADC.__init__", + "py_doc": "ADC constructor\n\nArgs:\n - pin: direction [in], adc pin, int type\n - resolution: direction [in], adc resolution. default is -1, means use default resolution\noption:\nresolution = adc.RES_BIT_8, means 8-bit resolution\nresolution = adc.RES_BIT_10, means 10-bit resolution\nresolution = adc.RES_BIT_12, means 12-bit resolution\nresolution = adc.RES_BIT_16, means 16-bit resolution\nthe default resolution is determined by actual hardware.\n - vref: direction [in], adc refer voltage. default is -1, means use default refer voltage.\nthe default vref is determined by actual hardware. range: [0.0, 10.0]\n" + }, + "args": [ + [ + "int", + "pin", + null + ], + [ + "int", + "resolution", + null + ], + [ + "float", + "vref", + "-1" + ] + ], + "ret_type": null, + "static": false, + "def": "ADC(int pin, int resolution, float vref = -1)", + "py_def": "def __init__(self, pin: int, resolution: int, vref: float = -1) -> None" + }, + "read": { + "type": "func", + "name": "read", + "doc": { + "brief": "read adc value", + "return": "adc data, int type\nif resolution is 8-bit, return value range is [0, 255]\nif resolution is 10-bit, return value range is [0, 1023]\nif resolution is 12-bit, return value range is [0, 4095]\nif resolution is 16-bit, return value range is [0, 65535]", + "maixpy": "maix.peripheral.adc.ADC.read", + "py_doc": "read adc value\n\nReturns: adc data, int type\nif resolution is 8-bit, return value range is [0, 255]\nif resolution is 10-bit, return value range is [0, 1023]\nif resolution is 12-bit, return value range is [0, 4095]\nif resolution is 16-bit, return value range is [0, 65535]\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int read()", + "py_def": "def read(self) -> int" + }, + "read_vol": { + "type": "func", + "name": "read_vol", + "doc": { + "brief": "read adc voltage", + "return": "adc voltage, float type\u3002the range is [0.0, vref]", + "maixpy": "maix.peripheral.adc.ADC.read_vol", + "py_doc": "read adc voltage\n\nReturns: adc voltage, float type\u3002the range is [0.0, vref]\n" + }, + "args": [], + "ret_type": "float", + "static": false, + "def": "float read_vol()", + "py_def": "def read_vol(self) -> float" + } + }, + "def": "class ADC" + } + }, + "auto_add": true + }, + "pinmap": { + "type": "module", + "doc": { + "brief": "maix.peripheral.pinmap module" + }, + "members": { + "get_pins": { + "type": "func", + "name": "get_pins", + "doc": { + "brief": "Get all pins of devices", + "return": "pin name list, string type.", + "maixpy": "maix.peripheral.pinmap.get_pins", + "py_doc": "Get all pins of devices\n\nReturns: pin name list, string type.\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector get_pins()", + "py_def": "def get_pins() -> list[str]" + }, + "get_pin_functions": { + "type": "func", + "name": "get_pin_functions", + "doc": { + "brief": "Get all function of a pin", + "param": { + "pin": "pin name, string type." + }, + "return": "function list, function name is string type.", + "throw": "If pin name error will throwout err.Err.ERR_ARGS error.", + "maixpy": "maix.peripheral.pinmap.get_pin_functions", + "py_doc": "Get all function of a pin\n\nArgs:\n - pin: pin name, string type.\n\n\nReturns: function list, function name is string type.\n" + }, + "args": [ + [ + "const std::string &", + "pin", + null + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector get_pin_functions(const std::string &pin)", + "py_def": "def get_pin_functions(pin: str) -> list[str]" + }, + "set_pin_function": { + "type": "func", + "name": "set_pin_function", + "doc": { + "brief": "Set function of a pin", + "param": { + "pin": "pin name, string type.", + "func": "which function should this pin use." + }, + "return": "if set ok, will return err.Err.ERR_NONE, else error occurs.", + "maixpy": "maix.peripheral.pinmap.set_pin_function", + "py_doc": "Set function of a pin\n\nArgs:\n - pin: pin name, string type.\n - func: which function should this pin use.\n\n\nReturns: if set ok, will return err.Err.ERR_NONE, else error occurs.\n" + }, + "args": [ + [ + "const std::string &", + "pin", + null + ], + [ + "const std::string &", + "func", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err set_pin_function(const std::string &pin, const std::string &func)", + "py_def": "def set_pin_function(pin: str, func: str) -> maix.err.Err" + } + }, + "auto_add": true + }, + "uart": { + "type": "module", + "doc": { + "brief": "maix uart peripheral driver", + "maixpy": "maix.peripheral.uart", + "py_doc": "maix uart peripheral driver" + }, + "members": { + "PARITY": { + "type": "enum", + "name": "PARITY", + "doc": { + "brief": "uart parity enum", + "maixpy": "maix.peripheral.uart.PARITY", + "py_doc": "uart parity enum" + }, + "values": [ + [ + "PARITY_NONE", + "0x00", + "no parity" + ], + [ + "PARITY_ODD", + "0x01", + "odd parity" + ], + [ + "PARITY_EVEN", + "0x02", + "even parity" + ], + [ + "PARITY_MAX", + "", + "" + ] + ], + "def": "enum PARITY\n {\n PARITY_NONE = 0x00, // no parity\n PARITY_ODD = 0x01, // odd parity\n PARITY_EVEN = 0x02, // even parity\n PARITY_MAX\n }" + }, + "STOP": { + "type": "enum", + "name": "STOP", + "doc": { + "brief": "uart stop bits", + "maixpy": "maix.peripheral.uart.STOP", + "py_doc": "uart stop bits" + }, + "values": [ + [ + "STOP_1", + "0x01", + "1 stop bit" + ], + [ + "STOP_2", + "0x02", + "2 stop bits" + ], + [ + "STOP_1_5", + "0x03", + "1.5 stop bits" + ], + [ + "STOP_MAX", + "", + "" + ] + ], + "def": "enum STOP\n {\n STOP_1 = 0x01, // 1 stop bit\n STOP_2 = 0x02, // 2 stop bits\n STOP_1_5 = 0x03, // 1.5 stop bits\n STOP_MAX\n }" + }, + "BITS": { + "type": "enum", + "name": "BITS", + "doc": { + "brief": "uart stop bits", + "maixpy": "maix.peripheral.uart.BITS", + "py_doc": "uart stop bits" + }, + "values": [ + [ + "BITS_5", + "5", + "5 data bits" + ], + [ + "BITS_6", + "6", + "6 data bits" + ], + [ + "BITS_7", + "7", + "7 data bits" + ], + [ + "BITS_8", + "8", + "8 data bits" + ], + [ + "BITS_MAX", + "", + "" + ] + ], + "def": "enum BITS\n {\n BITS_5 = 5, // 5 data bits\n BITS_6 = 6, // 6 data bits\n BITS_7 = 7, // 7 data bits\n BITS_8 = 8, // 8 data bits\n BITS_MAX\n }" + }, + "FLOW_CTRL": { + "type": "enum", + "name": "FLOW_CTRL", + "doc": { + "brief": "uart flow control", + "maixpy": "maix.peripheral.uart.FLOW_CTRL", + "py_doc": "uart flow control" + }, + "values": [ + [ + "FLOW_CTRL_NONE", + "0", + "no flow control" + ], + [ + "FLOW_CTRL_HW", + "1", + "hardware flow control" + ], + [ + "FLOW_CTRL_MAX", + "", + "" + ] + ], + "def": "enum FLOW_CTRL\n {\n FLOW_CTRL_NONE = 0, // no flow control\n FLOW_CTRL_HW = 1, // hardware flow control\n FLOW_CTRL_MAX\n }" + }, + "list_devices": { + "type": "func", + "name": "list_devices", + "doc": { + "brief": "Get supported uart ports.", + "return": "uart ports list, string type.", + "maixpy": "maix.peripheral.uart.list_devices", + "py_doc": "Get supported uart ports.\n\nReturns: uart ports list, string type.\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector list_devices()", + "py_def": "def list_devices() -> list[str]" + }, + "UART": { + "type": "class", + "name": "UART", + "doc": { + "brief": "maix uart peripheral driver", + "maixpy": "maix.peripheral.uart.UART", + "py_doc": "maix uart peripheral driver" + }, + "members": { + "__init__": { + "type": "func", + "name": "UART", + "doc": { + "brief": "UART constructor. You need to call open() to open the device.", + "param": { + "port": "uart port. string type, can get it by uart.list_devices().\nIf empty, will not open device in constructor, default empty.\nif not empty, will auto open device in constructor, open fail will throw err.Exception.", + "baudrate": "baudrate of uart. int type, default 115200.", + "databits": "databits, values @see uart.DATA_BITS", + "parity": "parity, values @see uart.PARITY", + "stopbits": "stopbits, values @see uart.STOP_BITS", + "flow_control": "flow_control, values @see uart.FLOW_CTRL" + }, + "maixpy": "maix.peripheral.uart.UART.__init__", + "py_doc": "UART constructor. You need to call open() to open the device.\n\nArgs:\n - port: uart port. string type, can get it by uart.list_devices().\nIf empty, will not open device in constructor, default empty.\nif not empty, will auto open device in constructor, open fail will throw err.Exception.\n - baudrate: baudrate of uart. int type, default 115200.\n - databits: databits, values @see uart.DATA_BITS\n - parity: parity, values @see uart.PARITY\n - stopbits: stopbits, values @see uart.STOP_BITS\n - flow_control: flow_control, values @see uart.FLOW_CTRL\n" + }, + "args": [ + [ + "const std::string &", + "port", + "\"\"" + ], + [ + "int", + "baudrate", + "115200" + ], + [ + "uart::BITS", + "databits", + "uart::BITS_8" + ], + [ + "uart::PARITY", + "parity", + "uart::PARITY_NONE" + ], + [ + "uart::STOP", + "stopbits", + "uart::STOP_1" + ], + [ + "uart::FLOW_CTRL", + "flow_ctrl", + "uart::FLOW_CTRL_NONE" + ] + ], + "ret_type": null, + "static": false, + "def": "UART(const std::string &port = \"\", int baudrate = 115200, uart::BITS databits = uart::BITS_8,\n uart::PARITY parity = uart::PARITY_NONE, uart::STOP stopbits = uart::STOP_1,\n uart::FLOW_CTRL flow_ctrl = uart::FLOW_CTRL_NONE)", + "py_def": "def __init__(self, port: str = '', baudrate: int = 115200, databits: BITS = ..., parity: PARITY = ..., stopbits: STOP = ..., flow_ctrl: FLOW_CTRL = ...) -> None" + }, + "set_port": { + "type": "func", + "name": "set_port", + "doc": { + "brief": "Set port", + "param": { + "port": "uart port. string type, can get it by uart.list_devices()." + }, + "return": "set port error code, err.Err type.", + "maixpy": "maix.peripheral.uart.UART.set_port", + "py_doc": "Set port\n\nArgs:\n - port: uart port. string type, can get it by uart.list_devices().\n\n\nReturns: set port error code, err.Err type.\n" + }, + "args": [ + [ + "const std::string &", + "port", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err set_port(const std::string &port)", + "py_def": "def set_port(self, port: str) -> maix.err.Err" + }, + "get_port": { + "type": "func", + "name": "get_port", + "doc": { + "brief": "Get port", + "return": "uart port, string type.", + "maixpy": "maix.peripheral.uart.UART.get_port", + "py_doc": "Get port\n\nReturns: uart port, string type.\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string get_port()", + "py_def": "def get_port(self) -> str" + }, + "set_baudrate": { + "type": "func", + "name": "set_baudrate", + "doc": { + "brief": "Set baud rate", + "param": { + "baudrate": "baudrate of uart. int type, default 115200." + }, + "return": "set baud rate error code, err.Err type.", + "maixpy": "maix.peripheral.uart.UART.set_baudrate", + "py_doc": "Set baud rate\n\nArgs:\n - baudrate: baudrate of uart. int type, default 115200.\n\n\nReturns: set baud rate error code, err.Err type.\n" + }, + "args": [ + [ + "int", + "baudrate", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err set_baudrate(int baudrate)", + "py_def": "def set_baudrate(self, baudrate: int) -> maix.err.Err" + }, + "get_baudrate": { + "type": "func", + "name": "get_baudrate", + "doc": { + "brief": "Get baud rate", + "return": "baud rate, int type.", + "maixpy": "maix.peripheral.uart.UART.get_baudrate", + "py_doc": "Get baud rate\n\nReturns: baud rate, int type.\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int get_baudrate()", + "py_def": "def get_baudrate(self) -> int" + }, + "open": { + "type": "func", + "name": "open", + "doc": { + "brief": "Open uart device, before open, port must be set in constructor or by set_port().\\nIf already opened, do nothing and return err.ERR_NONE.", + "return": "open device error code, err.Err type.", + "maixpy": "maix.peripheral.uart.UART.open", + "py_doc": "Open uart device, before open, port must be set in constructor or by set_port().\nIf already opened, do nothing and return err.ERR_NONE.\n\nReturns: open device error code, err.Err type.\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err open()", + "py_def": "def open(self) -> maix.err.Err" + }, + "is_open": { + "type": "func", + "name": "is_open", + "doc": { + "brief": "Check if device is opened.", + "return": "true if opened, false if not opened.", + "maixpy": "maix.peripheral.uart.UART.is_open", + "py_doc": "Check if device is opened.\n\nReturns: true if opened, false if not opened.\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool is_open()", + "py_def": "def is_open(self) -> bool" + }, + "close": { + "type": "func", + "name": "close", + "doc": { + "brief": "Close uart device, if already closed, do nothing and return err.ERR_NONE.", + "return": "close device error code, err.Err type.", + "maixpy": "maix.peripheral.uart.UART.close", + "py_doc": "Close uart device, if already closed, do nothing and return err.ERR_NONE.\n\nReturns: close device error code, err.Err type.\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err close()", + "py_def": "def close(self) -> maix.err.Err" + }, + "set_received_callback": { + "type": "func", + "name": "set_received_callback", + "doc": { + "brief": "Set received callback function", + "param": { + "callback": "function to call when received data" + }, + "maixpy": "maix.peripheral.uart.UART.set_received_callback", + "py_doc": "Set received callback function\n\nArgs:\n - callback: function to call when received data\n" + }, + "args": [ + [ + "std::function", + "callback", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void set_received_callback(std::function callback)", + "py_def": "def set_received_callback(self, callback: typing.Callable[[UART, maix.Bytes(bytes)], None]) -> None" + }, + "write_str": { + "type": "func", + "name": "write_str", + "doc": { + "brief": "Send string data", + "param": { + "str": "string data" + }, + "return": "sent data length, < 0 means error, value is -err.Err.", + "maixpy": "maix.peripheral.uart.UART.write_str", + "py_doc": "Send string data\n\nArgs:\n - str: string data\n\n\nReturns: sent data length, < 0 means error, value is -err.Err.\n" + }, + "args": [ + [ + "const std::string &", + "str", + null + ] + ], + "ret_type": "int", + "static": false, + "def": "int write_str(const std::string &str)", + "py_def": "def write_str(self, str: str) -> int" + }, + "write": { + "type": "func", + "name": "write", + "doc": { + "brief": "Send data to uart", + "param": { + "data": "direction [in], data to send, bytes type. If you want to send str type, use str.encode() to convert." + }, + "return": "sent length, int type, if < 0 means error, value is -err.Err.", + "maixpy": "maix.peripheral.uart.UART.write", + "py_doc": "Send data to uart\n\nArgs:\n - data: direction [in], data to send, bytes type. If you want to send str type, use str.encode() to convert.\n\n\nReturns: sent length, int type, if < 0 means error, value is -err.Err.\n" + }, + "args": [ + [ + "Bytes &", + "data", + null + ] + ], + "ret_type": "int", + "static": false, + "def": "int write(Bytes &data)", + "py_def": "def write(self, data: maix.Bytes(bytes)) -> int" + }, + "available": { + "type": "func", + "name": "available", + "doc": { + "brief": "Check if data available or wait data available.", + "param": { + "timeout": "unit ms, timeout to wait data, default 0.\n0 means check data available and return immediately,\n> 0 means wait until data available or timeout.\n- 1 means wait until data available." + }, + "return": "available data number, 0 if timeout or no data, <0 if error, value is -err.Err, can be err::ERR_IO\uff0c err::ERR_CANCEL, err::ERR_NOT_OPEN.", + "throw": "err.Exception if fatal error.", + "maixpy": "maix.peripheral.uart.UART.available", + "py_doc": "Check if data available or wait data available.\n\nArgs:\n - timeout: unit ms, timeout to wait data, default 0.\n0 means check data available and return immediately,\n> 0 means wait until data available or timeout.\n- 1 means wait until data available.\n\n\nReturns: available data number, 0 if timeout or no data, <0 if error, value is -err.Err, can be err::ERR_IO\uff0c err::ERR_CANCEL, err::ERR_NOT_OPEN.\n" + }, + "args": [ + [ + "int", + "timeout", + "0" + ] + ], + "ret_type": "int", + "static": false, + "def": "int available(int timeout = 0)", + "py_def": "def available(self, timeout: int = 0) -> int" + }, + "read": { + "type": "func", + "name": "read", + "doc": { + "brief": "Recv data from uart", + "param": { + "len": "max data length want to receive, default -1.\n-1 means read data in uart receive buffer.\n>0 means read len data want to receive.\nother values is invalid.", + "timeout": "unit ms, timeout to receive data, default 0.\n0 means read data in uart receive buffer and return immediately,\n-1 means block until read len data,\n>0 means block until read len data or timeout." + }, + "return": "received data, bytes type.\nAttention, you need to delete the returned object yourself in C++.", + "throw": "Read failed will raise err.Exception error.", + "maixpy": "maix.peripheral.uart.UART.read", + "py_doc": "Recv data from uart\n\nArgs:\n - len: max data length want to receive, default -1.\n-1 means read data in uart receive buffer.\n>0 means read len data want to receive.\nother values is invalid.\n - timeout: unit ms, timeout to receive data, default 0.\n0 means read data in uart receive buffer and return immediately,\n-1 means block until read len data,\n>0 means block until read len data or timeout.\n\n\nReturns: received data, bytes type.\nAttention, you need to delete the returned object yourself in C++.\n" + }, + "args": [ + [ + "int", + "len", + "-1" + ], + [ + "int", + "timeout", + "0" + ] + ], + "ret_type": "Bytes*", + "static": false, + "def": "Bytes *read(int len = -1, int timeout = 0)", + "py_def": "def read(*args, **kwargs)" + }, + "readline": { + "type": "func", + "name": "readline", + "doc": { + "brief": "Read line from uart, that is read until '\\n' or '\\r\\n'.", + "param": { + "timeout": "unit ms, timeout to receive data, default -1 means block until read '\\n' or '\\r\\n'.\n> 0 means block until read '\\n' or '\\r\\n' or timeout." + }, + "return": "received data, bytes type. If timeout will return the current received data despite not read '\\n' or '\\r\\n'.\ne.g. If we want to read b'123\\n', but when we only read b'12', timeout, then return b'12'.", + "maixpy": "maix.peripheral.uart.UART.readline", + "py_doc": "Read line from uart, that is read until '\\n' or '\\r\\n'.\n\nArgs:\n - timeout: unit ms, timeout to receive data, default -1 means block until read '\\n' or '\\r\\n'.\n> 0 means block until read '\\n' or '\\r\\n' or timeout.\n\n\nReturns: received data, bytes type. If timeout will return the current received data despite not read '\\n' or '\\r\\n'.\ne.g. If we want to read b'123\\n', but when we only read b'12', timeout, then return b'12'.\n" + }, + "args": [ + [ + "int", + "timeout", + "-1" + ] + ], + "ret_type": "Bytes*", + "static": false, + "def": "Bytes *readline(int timeout = -1)", + "py_def": "def readline(*args, **kwargs)" + } + }, + "def": "class UART : public comm::CommBase" + } + }, + "auto_add": false + }, + "gpio": { + "type": "module", + "doc": { + "brief": "maix.peripheral.gpio module" + }, + "members": { + "Mode": { + "type": "enum", + "name": "Mode", + "doc": { + "brief": "GPIO mode", + "maixpy": "maix.peripheral.gpio.Mode", + "py_doc": "GPIO mode" + }, + "values": [ + [ + "IN", + "0x01", + "input mode" + ], + [ + "OUT", + "0x02", + "output mode" + ], + [ + "OUT_OD", + "0x03", + "output open drain mode" + ], + [ + "MODE_MAX", + "", + "" + ] + ], + "def": "enum Mode\n {\n IN = 0x01, // input mode\n OUT = 0x02, // output mode\n OUT_OD = 0x03, // output open drain mode\n MODE_MAX\n }" + }, + "Pull": { + "type": "enum", + "name": "Pull", + "doc": { + "brief": "GPIO pull mode", + "maixpy": "maix.peripheral.gpio.Pull", + "py_doc": "GPIO pull mode" + }, + "values": [ + [ + "PULL_NONE", + "0x00", + "pull none mode" + ], + [ + "PULL_UP", + "0x01", + "pull up mode" + ], + [ + "PULL_DOWN", + "0x02", + "pull down mode" + ], + [ + "PULL_MAX", + "", + "" + ] + ], + "def": "enum Pull\n {\n PULL_NONE = 0x00, // pull none mode\n PULL_UP = 0x01, // pull up mode\n PULL_DOWN = 0x02, // pull down mode\n PULL_MAX\n }" + }, + "GPIO": { + "type": "class", + "name": "GPIO", + "doc": { + "brief": "Peripheral gpio class", + "maixpy": "maix.peripheral.gpio.GPIO", + "py_doc": "Peripheral gpio class" + }, + "members": { + "__init__": { + "type": "func", + "name": "GPIO", + "doc": { + "brief": "GPIO constructor", + "param": { + "pin": "direction [in], gpio pin name, string type the same as board's pin name, e.g. \"B14\" or \"GPIOB14\", or number string like \"10\" if board no gpiochipe name.", + "mode": "direction [in], gpio mode. gpio.Mode type, default is gpio.Mode.IN (input) mode.", + "pull": "direction [in], gpio pull. gpio.Pull type, default is gpio.Pull.PULL_NONE (pull none) mode.\nFor input mode, this will set gpio default status(value), if set to gpio.Pull.PULL_NONE, gpio value will be floating.\nFor output mode, this will set gpio default status(value), if set to gpio.Pull.PULL_UP, gpio value will be 1, else 0." + }, + "throw": "err::Exception if open gpio device failed.", + "maixpy": "maix.peripheral.gpio.GPIO.__init__", + "py_doc": "GPIO constructor\n\nArgs:\n - pin: direction [in], gpio pin name, string type the same as board's pin name, e.g. \"B14\" or \"GPIOB14\", or number string like \"10\" if board no gpiochipe name.\n - mode: direction [in], gpio mode. gpio.Mode type, default is gpio.Mode.IN (input) mode.\n - pull: direction [in], gpio pull. gpio.Pull type, default is gpio.Pull.PULL_NONE (pull none) mode.\nFor input mode, this will set gpio default status(value), if set to gpio.Pull.PULL_NONE, gpio value will be floating.\nFor output mode, this will set gpio default status(value), if set to gpio.Pull.PULL_UP, gpio value will be 1, else 0.\n" + }, + "args": [ + [ + "std::string", + "pin", + null + ], + [ + "gpio::Mode", + "mode", + "gpio::Mode::IN" + ], + [ + "gpio::Pull", + "pull", + "gpio::Pull::PULL_NONE" + ] + ], + "ret_type": null, + "static": false, + "def": "GPIO(std::string pin, gpio::Mode mode = gpio::Mode::IN, gpio::Pull pull = gpio::Pull::PULL_NONE)", + "py_def": "def __init__(self, pin: str, mode: Mode = ..., pull: Pull = ...) -> None" + }, + "value": { + "type": "func", + "name": "value", + "doc": { + "brief": "set and get gpio value", + "param": { + "value": "direction [in], gpio value. int type.\n0, means write gpio to low level\n1, means write gpio to high level\n-1, means read gpio value, not set" + }, + "return": "int type, return gpio value, can be 0 or 1", + "maixpy": "maix.peripheral.gpio.GPIO.value", + "py_doc": "set and get gpio value\n\nArgs:\n - value: direction [in], gpio value. int type.\n0, means write gpio to low level\n1, means write gpio to high level\n-1, means read gpio value, not set\n\n\nReturns: int type, return gpio value, can be 0 or 1\n" + }, + "args": [ + [ + "int", + "value", + "-1" + ] + ], + "ret_type": "int", + "static": false, + "def": "int value(int value = -1)", + "py_def": "def value(self, value: int = -1) -> int" + }, + "high": { + "type": "func", + "name": "high", + "doc": { + "brief": "set gpio high (value to 1)", + "maixpy": "maix.peripheral.gpio.GPIO.high", + "py_doc": "set gpio high (value to 1)" + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void high()", + "py_def": "def high(self) -> None" + }, + "low": { + "type": "func", + "name": "low", + "doc": { + "brief": "set gpio low (value to 0)", + "maixpy": "maix.peripheral.gpio.GPIO.low", + "py_doc": "set gpio low (value to 0)" + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void low()", + "py_def": "def low(self) -> None" + }, + "toggle": { + "type": "func", + "name": "toggle", + "doc": { + "brief": "gpio toggle", + "maixpy": "maix.peripheral.gpio.GPIO.toggle", + "py_doc": "gpio toggle" + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void toggle()", + "py_def": "def toggle(self) -> None" + }, + "get_mode": { + "type": "func", + "name": "get_mode", + "doc": { + "brief": "gpio get mode", + "maixpy": "maix.peripheral.gpio.GPIO.get_mode", + "py_doc": "gpio get mode" + }, + "args": [], + "ret_type": "gpio::Mode", + "static": false, + "def": "gpio::Mode get_mode()", + "py_def": "def get_mode(self) -> Mode" + }, + "get_pull": { + "type": "func", + "name": "get_pull", + "doc": { + "brief": "get gpio pull", + "return": "gpio::Pull type", + "maixpy": "maix.peripheral.gpio.GPIO.get_pull", + "py_doc": "get gpio pull\n\nReturns: gpio::Pull type\n" + }, + "args": [], + "ret_type": "gpio::Pull", + "static": false, + "def": "gpio::Pull get_pull()", + "py_def": "def get_pull(self) -> Pull" + }, + "reset": { + "type": "func", + "name": "reset", + "doc": { + "brief": "reset gpio", + "param": { + "mode": "direction [in], gpio mode. gpio.Mode type", + "pull": "direction [in], gpio pull. gpio.Pull type\nFor input mode, this will set gpio default status(value), if set to gpio.Pull.PULL_NONE, gpio value will be floating.\nFor output mode, this will set gpio default status(value), if set to gpio.Pull.PULL_UP, gpio value will be 1, else 0." + }, + "return": "err::Err type", + "maixpy": "maix.peripheral.gpio.GPIO.reset", + "py_doc": "reset gpio\n\nArgs:\n - mode: direction [in], gpio mode. gpio.Mode type\n - pull: direction [in], gpio pull. gpio.Pull type\nFor input mode, this will set gpio default status(value), if set to gpio.Pull.PULL_NONE, gpio value will be floating.\nFor output mode, this will set gpio default status(value), if set to gpio.Pull.PULL_UP, gpio value will be 1, else 0.\n\n\nReturns: err::Err type\n" + }, + "args": [ + [ + "gpio::Mode", + "mode", + null + ], + [ + "gpio::Pull", + "pull", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err reset(gpio::Mode mode, gpio::Pull pull)", + "py_def": "def reset(self, mode: Mode, pull: Pull) -> maix.err.Err" + } + }, + "def": "class GPIO" + } + }, + "auto_add": true + }, + "hid": { + "type": "module", + "doc": { + "brief": "maix.peripheral.hid module" + }, + "members": { + "DeviceType": { + "type": "enum", + "name": "DeviceType", + "doc": { + "brief": "Device enum of hid", + "maixpy": "maix.peripheral.hid.DeviceType", + "py_doc": "Device enum of hid" + }, + "values": [ + [ + "DEVICE_MOUSE", + "0", + "" + ], + [ + "DEVICE_KEYBOARD", + "", + "" + ], + [ + "DEVICE_TOUCHPAD", + "", + "" + ] + ], + "def": "enum DeviceType {\n DEVICE_MOUSE = 0,\n DEVICE_KEYBOARD,\n DEVICE_TOUCHPAD\n }" + }, + "Hid": { + "type": "class", + "name": "Hid", + "doc": { + "brief": "Hid class", + "maixpy": "maix.peripheral.hid.Hid", + "py_doc": "Hid class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Hid", + "doc": { + "brief": "Hid Device constructor", + "param": { + "device_type": "Device type, used to select mouse, keyboard, or touchpad.", + "open": "auto open device in constructor, if false, you need call open() to open device" + }, + "maixpy": "maix.peripheral.hid.Hid.__init__", + "py_doc": "Hid Device constructor\n\nArgs:\n - device_type: Device type, used to select mouse, keyboard, or touchpad.\n - open: auto open device in constructor, if false, you need call open() to open device\n" + }, + "args": [ + [ + "hid::DeviceType", + "device_type", + null + ], + [ + "bool", + "open", + "true" + ] + ], + "ret_type": null, + "static": false, + "def": "Hid(hid::DeviceType device_type, bool open = true)", + "py_def": "def __init__(self, device_type: DeviceType, open: bool = True) -> None" + }, + "open": { + "type": "func", + "name": "open", + "doc": { + "brief": "Open hid device", + "return": "err::Err", + "maixpy": "maix.peripheral.hid.Hid.open", + "py_doc": "Open hid device\n\nReturns: err::Err\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err open()", + "py_def": "def open(self) -> maix.err.Err" + }, + "close": { + "type": "func", + "name": "close", + "doc": { + "brief": "Close hid device", + "return": "err::Err", + "maixpy": "maix.peripheral.hid.Hid.close", + "py_doc": "Close hid device\n\nReturns: err::Err\n" + }, + "args": [], + "ret_type": "err::Err", + "static": false, + "def": "err::Err close()", + "py_def": "def close(self) -> maix.err.Err" + }, + "write": { + "type": "func", + "name": "write", + "doc": { + "brief": "Write data to hid device", + "param": { + "data": "data to write\nFor the keyboard, 8 bytes of data need to be written, with the format as follows:\ndata = [0x00, #\n0x00, #\n0x00, # Key value. Refer to the \"Universal Serial Bus HID Usage Tables\" section of the official documentation(https://www.usb.org).\n0x00, #\n0x00, #\n0x00, #\n0x00, #\n0x00] #\nFor the mouse, 4 bytes of data need to be written, with the format as follows:\ndata = [0x00, # Button state\n0x00: no button pressed\n0x01: press left button\n0x02: press right button\n0x04: press middle button\nx, # X-axis relative coordinates. Signed number, positive values for x indicate movement to the right\ny, # Y-axis relative coordinates. Signed number, positive values for y indicate movement downward\n0x00] # Wheel movement. Signed number, positive values indicate downward movement.\nFor the touchpad, 6 bytes of data need to be written, with the format as follows:\ndata = [0x00, # Button state (0: no button pressed, 0x01: press left button, 0x10, press right button.)\nx & 0xFF, (x >> 8) & 0xFF, # X-axis absolute coordinate, 0 means unused.\nNote: You must map the target position to the range [0x1, 0x7FFF]. This means x value = * 0x7FFF / \ny & 0xFF, (y >> 8) & 0xFF, # Y-axis absolute coordinate, 0 means unused.\nNote: You must map the target position to the range [0x1, 0x7FFF]. This means y value = * 0x7FFF / \n0x00, # Wheel movement. Signed number, positive values indicate downward movement." + }, + "return": "err::Err", + "maixpy": "maix.peripheral.hid.Hid.write", + "py_doc": "Write data to hid device\n\nArgs:\n - data: data to write\nFor the keyboard, 8 bytes of data need to be written, with the format as follows:\ndata = [0x00, #\n0x00, #\n0x00, # Key value. Refer to the \"Universal Serial Bus HID Usage Tables\" section of the official documentation(https://www.usb.org).\n0x00, #\n0x00, #\n0x00, #\n0x00, #\n0x00] #\nFor the mouse, 4 bytes of data need to be written, with the format as follows:\ndata = [0x00, # Button state\n0x00: no button pressed\n0x01: press left button\n0x02: press right button\n0x04: press middle button\nx, # X-axis relative coordinates. Signed number, positive values for x indicate movement to the right\ny, # Y-axis relative coordinates. Signed number, positive values for y indicate movement downward\n0x00] # Wheel movement. Signed number, positive values indicate downward movement.\nFor the touchpad, 6 bytes of data need to be written, with the format as follows:\ndata = [0x00, # Button state (0: no button pressed, 0x01: press left button, 0x10, press right button.)\nx & 0xFF, (x >> 8) & 0xFF, # X-axis absolute coordinate, 0 means unused.\nNote: You must map the target position to the range [0x1, 0x7FFF]. This means x value = * 0x7FFF / \ny & 0xFF, (y >> 8) & 0xFF, # Y-axis absolute coordinate, 0 means unused.\nNote: You must map the target position to the range [0x1, 0x7FFF]. This means y value = * 0x7FFF / \n0x00, # Wheel movement. Signed number, positive values indicate downward movement.\n\n\nReturns: err::Err\n" + }, + "args": [ + [ + "std::vector &", + "data", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err write(std::vector &data)", + "py_def": "def write(self, data: list[int]) -> maix.err.Err" + }, + "is_opened": { + "type": "func", + "name": "is_opened", + "doc": { + "brief": "Check if hid device is opened", + "return": "bool", + "maixpy": "maix.peripheral.hid.Hid.is_opened", + "py_doc": "Check if hid device is opened\n\nReturns: bool\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool is_opened()", + "py_def": "def is_opened(self) -> bool" + } + }, + "def": "class Hid" + } + }, + "auto_add": true + }, + "timer": { + "type": "module", + "doc": { + "brief": "maix.peripheral.timer module" + }, + "members": { + "TIMER": { + "type": "class", + "name": "TIMER", + "doc": { + "brief": "Peripheral timer class", + "maixpy": "maix.peripheral.timer.TIMER", + "py_doc": "Peripheral timer class" + }, + "members": { + "__init__": { + "type": "func", + "name": "TIMER", + "doc": { + "brief": "TIMER constructor", + "maixpy": "maix.peripheral.timer.TIMER.__init__", + "py_doc": "TIMER constructor" + }, + "args": [], + "ret_type": null, + "static": false, + "def": "TIMER()", + "py_def": "def __init__(self) -> None" + } + }, + "def": "class TIMER" + } + }, + "auto_add": true + } + }, + "auto_add": false + }, + "nn": { + "type": "module", + "doc": { + "brief": "maix.nn module" + }, + "members": { + "NanoTrack": { + "type": "class", + "name": "NanoTrack", + "doc": { + "brief": "NanoTrack class", + "maixpy": "maix.nn.NanoTrack", + "py_doc": "NanoTrack class" + }, + "members": { + "__init__": { + "type": "func", + "name": "NanoTrack", + "doc": { + "brief": "Constructor of NanoTrack class", + "param": { + "model": "model path, default empty, you can load model later by load function." + }, + "throw": "If model arg is not empty and load failed, will throw err::Exception.", + "maixpy": "maix.nn.NanoTrack.__init__", + "maixcdk": "maix.nn.NanoTrack.NanoTrack", + "py_doc": "Constructor of NanoTrack class\n\nArgs:\n - model: model path, default empty, you can load model later by load function.\n" + }, + "args": [ + [ + "const string &", + "model", + "\"\"" + ] + ], + "ret_type": null, + "static": false, + "def": "NanoTrack(const string &model = \"\")", + "py_def": "def __init__(self, model: str = '') -> None" + }, + "load": { + "type": "func", + "name": "load", + "doc": { + "brief": "Load model from file", + "param": { + "model": "Model path want to load" + }, + "return": "err::Err", + "maixpy": "maix.nn.NanoTrack.load", + "py_doc": "Load model from file\n\nArgs:\n - model: Model path want to load\n\n\nReturns: err::Err\n" + }, + "args": [ + [ + "const string &", + "model", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err load(const string &model)", + "py_def": "def load(self, model: str) -> maix.err.Err" + }, + "init": { + "type": "func", + "name": "init", + "doc": { + "brief": "Init tracker, give tacker first target image and target position.", + "param": { + "img": "Image want to detect, target should be in this image.", + "x": "the target position left top coordinate x.", + "y": "the target position left top coordinate y.", + "w": "the target width.", + "h": "the target height." + }, + "throw": "If image format not match model input format, will throw err::Exception.", + "maixpy": "maix.nn.NanoTrack.init", + "py_doc": "Init tracker, give tacker first target image and target position.\n\nArgs:\n - img: Image want to detect, target should be in this image.\n - x: the target position left top coordinate x.\n - y: the target position left top coordinate y.\n - w: the target width.\n - h: the target height.\n" + }, + "args": [ + [ + "image::Image &", + "img", + null + ], + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "int", + "w", + null + ], + [ + "int", + "h", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void init(image::Image &img, int x, int y, int w, int h)", + "py_def": "def init(self, img: maix.image.Image, x: int, y: int, w: int, h: int) -> None" + }, + "track": { + "type": "func", + "name": "track", + "doc": { + "brief": "Track object acoording to last object position and the init function learned target feature.", + "param": { + "img": "image to detect object and track, can be any resolution, before detect it will crop a area according to last time target's position.", + "threshold": "If score < threshold, will see this new detection is invalid, but remain return this new detecion, default 0.9." + }, + "return": "object, position and score, and detect area in points's first 4 element(x, y, w, h, center_x, center_y, input_size, target_size)", + "maixpy": "maix.nn.NanoTrack.track", + "py_doc": "Track object acoording to last object position and the init function learned target feature.\n\nArgs:\n - img: image to detect object and track, can be any resolution, before detect it will crop a area according to last time target's position.\n - threshold: If score < threshold, will see this new detection is invalid, but remain return this new detecion, default 0.9.\n\n\nReturns: object, position and score, and detect area in points's first 4 element(x, y, w, h, center_x, center_y, input_size, target_size)\n" + }, + "args": [ + [ + "image::Image &", + "img", + null + ], + [ + "float", + "threshold", + "0.9" + ] + ], + "ret_type": "nn::Object", + "static": false, + "def": "nn::Object track(image::Image &img, float threshold = 0.9)", + "py_def": "def track(self, img: maix.image.Image, threshold: float = 0.9) -> ..." + }, + "input_size": { + "type": "func", + "name": "input_size", + "doc": { + "brief": "Get model input size", + "return": "model input size", + "maixpy": "maix.nn.NanoTrack.input_size", + "py_doc": "Get model input size\n\nReturns: model input size\n" + }, + "args": [], + "ret_type": "image::Size", + "static": false, + "def": "image::Size input_size()", + "py_def": "def input_size(self) -> maix.image.Size" + }, + "input_width": { + "type": "func", + "name": "input_width", + "doc": { + "brief": "Get model input width", + "return": "model input size of width", + "maixpy": "maix.nn.NanoTrack.input_width", + "py_doc": "Get model input width\n\nReturns: model input size of width\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int input_width()", + "py_def": "def input_width(self) -> int" + }, + "input_height": { + "type": "func", + "name": "input_height", + "doc": { + "brief": "Get model input height", + "return": "model input size of height", + "maixpy": "maix.nn.NanoTrack.input_height", + "py_doc": "Get model input height\n\nReturns: model input size of height\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int input_height()", + "py_def": "def input_height(self) -> int" + }, + "input_format": { + "type": "func", + "name": "input_format", + "doc": { + "brief": "Get input image format", + "return": "input image format, image::Format type.", + "maixpy": "maix.nn.NanoTrack.input_format", + "py_doc": "Get input image format\n\nReturns: input image format, image::Format type.\n" + }, + "args": [], + "ret_type": "image::Format", + "static": false, + "def": "image::Format input_format()", + "py_def": "def input_format(self) -> maix.image.Format" + }, + "mean": { + "type": "var", + "name": "mean", + "doc": { + "brief": "Get mean value, list type", + "maixpy": "maix.nn.NanoTrack.mean", + "py_doc": "Get mean value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector mean" + }, + "scale": { + "type": "var", + "name": "scale", + "doc": { + "brief": "Get scale value, list type", + "maixpy": "maix.nn.NanoTrack.scale", + "py_doc": "Get scale value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector scale" + } + }, + "def": "class NanoTrack" + }, + "OCR_Box": { + "type": "class", + "name": "OCR_Box", + "doc": { + "brief": "Object for OCR detect box", + "maixpy": "maix.nn.OCR_Box", + "py_doc": "Object for OCR detect box" + }, + "members": { + "__init__": { + "type": "func", + "name": "OCR_Box", + "doc": { + "brief": "OCR_Box constructor", + "maixpy": "maix.nn.OCR_Box.__init__", + "maixcdk": "maix.nn.OCR_Box.OCR_Box", + "py_doc": "OCR_Box constructor" + }, + "args": [ + [ + "int", + "x1", + "0" + ], + [ + "int", + "y1", + "0" + ], + [ + "int", + "x2", + "0" + ], + [ + "int", + "y2", + "0" + ], + [ + "int", + "x3", + "0" + ], + [ + "int", + "y3", + "0" + ], + [ + "int", + "x4", + "0" + ], + [ + "int", + "y4", + "0" + ] + ], + "ret_type": null, + "static": false, + "def": "OCR_Box(int x1 = 0, int y1 = 0, int x2 = 0, int y2 = 0, int x3 = 0, int y3 = 0, int x4 = 0, int y4 = 0)", + "py_def": "def __init__(self, x1: int = 0, y1: int = 0, x2: int = 0, y2: int = 0, x3: int = 0, y3: int = 0, x4: int = 0, y4: int = 0) -> None" + }, + "x1": { + "type": "var", + "name": "x1", + "doc": { + "brief": "left top point of box", + "maixpy": "maix.nn.OCR_Box.x1", + "py_doc": "left top point of box" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int x1" + }, + "y1": { + "type": "var", + "name": "y1", + "doc": { + "brief": "left top point of box", + "maixpy": "maix.nn.OCR_Box.y1", + "py_doc": "left top point of box" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int y1" + }, + "x2": { + "type": "var", + "name": "x2", + "doc": { + "brief": "right top point of box", + "maixpy": "maix.nn.OCR_Box.x2", + "py_doc": "right top point of box" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int x2" + }, + "y2": { + "type": "var", + "name": "y2", + "doc": { + "brief": "right top point of box", + "maixpy": "maix.nn.OCR_Box.y2", + "py_doc": "right top point of box" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int y2" + }, + "x3": { + "type": "var", + "name": "x3", + "doc": { + "brief": "right bottom point of box", + "maixpy": "maix.nn.OCR_Box.x3", + "py_doc": "right bottom point of box" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int x3" + }, + "y3": { + "type": "var", + "name": "y3", + "doc": { + "brief": "right bottom point of box", + "maixpy": "maix.nn.OCR_Box.y3", + "py_doc": "right bottom point of box" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int y3" + }, + "x4": { + "type": "var", + "name": "x4", + "doc": { + "brief": "left bottom point of box", + "maixpy": "maix.nn.OCR_Box.x4", + "py_doc": "left bottom point of box" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int x4" + }, + "y4": { + "type": "var", + "name": "y4", + "doc": { + "brief": "left bottom point of box", + "maixpy": "maix.nn.OCR_Box.y4", + "py_doc": "left bottom point of box" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int y4" + }, + "to_list": { + "type": "func", + "name": "to_list", + "doc": { + "brief": "convert box point to a list type.", + "return": "list type, element is int type, value [x1, y1, x2, y2, x3, y3, x4, y4].", + "maixpy": "maix.nn.OCR_Box.to_list", + "py_doc": "convert box point to a list type.\n\nReturns: list type, element is int type, value [x1, y1, x2, y2, x3, y3, x4, y4].\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector to_list()", + "py_def": "def to_list(self) -> list[int]" + } + }, + "def": "class OCR_Box" + }, + "OCR_Object": { + "type": "class", + "name": "OCR_Object", + "doc": { + "brief": "Object for OCR detect result", + "maixpy": "maix.nn.OCR_Object", + "py_doc": "Object for OCR detect result" + }, + "members": { + "__init__": { + "type": "func", + "name": "OCR_Object", + "doc": { + "brief": "Constructor of Object for OCR detect result", + "param": { + "score": "score" + }, + "maixpy": "maix.nn.OCR_Object.__init__", + "maixcdk": "maix.nn.OCR_Object.OCR_Object", + "py_doc": "Constructor of Object for OCR detect result\n\nArgs:\n - score: score\n" + }, + "args": [ + [ + "const nn::OCR_Box &", + "box", + null + ], + [ + "const std::vector &", + "idx_list", + null + ], + [ + "const std::vector &", + "char_list", + null + ], + [ + "float", + "score", + "0" + ], + [ + "const std::vector &", + "char_pos", + "std::vector()" + ] + ], + "ret_type": null, + "static": false, + "def": "OCR_Object(const nn::OCR_Box &box, const std::vector &idx_list, const std::vector &char_list, float score = 0, const std::vector &char_pos = std::vector())", + "py_def": "def __init__(self, box: OCR_Box, idx_list: list[int], char_list: list[str], score: float = 0, char_pos: list[int] = []) -> None" + }, + "box": { + "type": "var", + "name": "box", + "doc": { + "brief": "OCR_Object box, 4 points box, first point at the left-top, clock-wise.", + "maixpy": "maix.nn.OCR_Object.box", + "py_doc": "OCR_Object box, 4 points box, first point at the left-top, clock-wise." + }, + "value": null, + "static": false, + "readonly": false, + "def": "nn::OCR_Box box" + }, + "score": { + "type": "var", + "name": "score", + "doc": { + "brief": "Object score", + "maixpy": "maix.nn.OCR_Object.score", + "py_doc": "Object score" + }, + "value": null, + "static": false, + "readonly": false, + "def": "float score" + }, + "idx_list": { + "type": "var", + "name": "idx_list", + "doc": { + "brief": "chars' idx list, element is int type.", + "maixpy": "maix.nn.OCR_Object.idx_list", + "py_doc": "chars' idx list, element is int type." + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector idx_list" + }, + "char_pos": { + "type": "var", + "name": "char_pos", + "doc": { + "brief": "Chars' position relative to left", + "maixpy": "maix.nn.OCR_Object.char_pos", + "py_doc": "Chars' position relative to left" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector char_pos" + }, + "char_str": { + "type": "func", + "name": "char_str", + "doc": { + "brief": "Get OCR_Object's charactors, return a string type.", + "return": "All charactors in string type.", + "maixpy": "maix.nn.OCR_Object.char_str", + "py_doc": "Get OCR_Object's charactors, return a string type.\n\nReturns: All charactors in string type.\n" + }, + "args": [], + "ret_type": "const std::string&", + "static": false, + "def": "const std::string &char_str()", + "py_def": "def char_str(self) -> str" + }, + "char_list": { + "type": "func", + "name": "char_list", + "doc": { + "brief": "Get OCR_Object's charactors, return a list type.", + "return": "All charactors in list type.", + "maixpy": "maix.nn.OCR_Object.char_list", + "py_doc": "Get OCR_Object's charactors, return a list type.\n\nReturns: All charactors in list type.\n" + }, + "args": [], + "ret_type": "const std::vector&", + "static": false, + "def": "const std::vector &char_list()", + "py_def": "def char_list(self) -> list[str]" + }, + "update_chars": { + "type": "func", + "name": "update_chars", + "doc": { + "brief": "Set OCR_Object's charactors", + "param": { + "char_list": "All charactors in list type." + }, + "maixpy": "maix.nn.OCR_Object.update_chars", + "py_doc": "Set OCR_Object's charactors\n\nArgs:\n - char_list: All charactors in list type.\n" + }, + "args": [ + [ + "const std::vector &", + "char_list", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void update_chars(const std::vector &char_list)", + "py_def": "def update_chars(self, char_list: list[str]) -> None" + }, + "__str__": { + "type": "func", + "name": "to_str", + "doc": { + "brief": "OCR_Object info to string", + "return": "OCR_Object info string", + "maixpy": "maix.nn.OCR_Object.__str__", + "maixcdk": "maix.nn.OCR_Object.to_str", + "py_doc": "OCR_Object info to string\n\nReturns: OCR_Object info string\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string to_str()", + "py_def": "def __str__(self) -> str" + } + }, + "def": "class OCR_Object" + }, + "OCR_Objects": { + "type": "class", + "name": "OCR_Objects", + "doc": { + "brief": "OCR_Objects Class for detect result", + "maixpy": "maix.nn.OCR_Objects", + "py_doc": "OCR_Objects Class for detect result" + }, + "members": { + "__init__": { + "type": "func", + "name": "OCR_Objects", + "doc": { + "brief": "Constructor of OCR_Objects class", + "maixpy": "maix.nn.OCR_Objects.__init__", + "maixcdk": "maix.nn.OCR_Objects.OCR_Objects", + "py_doc": "Constructor of OCR_Objects class" + }, + "args": [], + "ret_type": null, + "static": false, + "def": "OCR_Objects()", + "py_def": "def __init__(self) -> None" + }, + "add": { + "type": "func", + "name": "add", + "doc": { + "brief": "Add object to objects", + "throw": "Throw exception if no memory", + "maixpy": "maix.nn.OCR_Objects.add", + "py_doc": "Add object to objects" + }, + "args": [ + [ + "const nn::OCR_Box &", + "box", + null + ], + [ + "const std::vector &", + "idx_list", + null + ], + [ + "const std::vector &", + "char_list", + null + ], + [ + "float", + "score", + "0" + ], + [ + "const std::vector &", + "char_pos", + "std::vector()" + ] + ], + "ret_type": "nn::OCR_Object&", + "static": false, + "def": "nn::OCR_Object &add(const nn::OCR_Box &box, const std::vector &idx_list, const std::vector &char_list, float score = 0, const std::vector &char_pos = std::vector())", + "py_def": "def add(self, box: OCR_Box, idx_list: list[int], char_list: list[str], score: float = 0, char_pos: list[int] = []) -> OCR_Object" + }, + "remove": { + "type": "func", + "name": "remove", + "doc": { + "brief": "Remove object form objects", + "maixpy": "maix.nn.OCR_Objects.remove", + "py_doc": "Remove object form objects" + }, + "args": [ + [ + "int", + "idx", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err remove(int idx)", + "py_def": "def remove(self, idx: int) -> maix.err.Err" + }, + "at": { + "type": "func", + "name": "at", + "doc": { + "brief": "Get object item", + "maixpy": "maix.nn.OCR_Objects.at", + "py_doc": "Get object item" + }, + "args": [ + [ + "int", + "idx", + null + ] + ], + "ret_type": "nn::OCR_Object&", + "static": false, + "def": "nn::OCR_Object &at(int idx)", + "py_def": "def at(self, idx: int) -> OCR_Object" + }, + "__item__": { + "type": "func", + "name": "operator[]", + "doc": { + "brief": "Get object item", + "maixpy": "maix.nn.OCR_Objects.__item__", + "maixcdk": "maix.nn.OCR_Objects.[]", + "py_doc": "Get object item" + }, + "args": [ + [ + "int", + "idx", + null + ] + ], + "ret_type": "nn::OCR_Object&", + "static": false, + "def": "nn::OCR_Object &operator[](int idx)", + "py_def": "def __item__(self, idx: int) -> OCR_Object" + }, + "__len__": { + "type": "func", + "name": "size", + "doc": { + "brief": "Get size", + "maixpy": "maix.nn.OCR_Objects.__len__", + "maixcdk": "maix.nn.OCR_Objects.size", + "py_doc": "Get size" + }, + "args": [], + "ret_type": "size_t", + "static": false, + "def": "size_t size()", + "py_def": "def __len__(self) -> int" + }, + "__iter__": { + "type": "func", + "name": "begin", + "doc": { + "brief": "Begin", + "maixpy": "maix.nn.OCR_Objects.__iter__", + "maixcdk": "maix.nn.OCR_Objects.begin", + "py_doc": "Begin" + }, + "args": [], + "ret_type": "std::vector::iterator", + "static": false, + "def": "std::vector::iterator begin()", + "py_def": "def __iter__(self) -> typing.Iterator" + } + }, + "def": "class OCR_Objects" + }, + "SpeechDevice": { + "type": "enum", + "name": "SpeechDevice", + "doc": { + "brief": "speech device", + "maixpy": "maix.nn.SpeechDevice", + "py_doc": "speech device" + }, + "values": [ + [ + "DEVICE_NONE", + "-1", + "" + ], + [ + "DEVICE_PCM", + "", + "" + ], + [ + "DEVICE_MIC", + "", + "" + ], + [ + "DEVICE_WAV", + "", + "" + ] + ], + "def": "enum SpeechDevice {\n DEVICE_NONE = -1,\n DEVICE_PCM,\n DEVICE_MIC,\n DEVICE_WAV,\n}" + }, + "SpeechDecoder": { + "type": "enum", + "name": "SpeechDecoder", + "doc": { + "brief": "speech decoder type", + "maixpy": "maix.nn.SpeechDecoder", + "py_doc": "speech decoder type" + }, + "values": [ + [ + "DECODER_RAW", + "1", + "" + ], + [ + "DECODER_DIG", + "2", + "" + ], + [ + "DECODER_LVCSR", + "4", + "" + ], + [ + "DECODER_KWS", + "8", + "" + ], + [ + "DECODER_ALL", + "65535", + "" + ] + ], + "def": "enum SpeechDecoder {\n DECODER_RAW = 1,\n DECODER_DIG = 2,\n DECODER_LVCSR = 4,\n DECODER_KWS = 8,\n DECODER_ALL = 65535,\n}" + }, + "Speech": { + "type": "class", + "name": "Speech", + "doc": { + "brief": "Speech", + "maixpy": "maix.nn.Speech", + "py_doc": "Speech" + }, + "members": { + "__init__": { + "type": "func", + "name": "Speech", + "doc": { + "brief": "Construct a new Speech object", + "param": { + "model": "model path, default empty, you can load model later by load function." + }, + "throw": "If model arg is not empty and load failed, will throw err::Exception.", + "maixpy": "maix.nn.Speech.__init__", + "maixcdk": "maix.nn.Speech.Speech", + "py_doc": "Construct a new Speech object\n\nArgs:\n - model: model path, default empty, you can load model later by load function.\n" + }, + "args": [ + [ + "const string &", + "model", + "\"\"" + ] + ], + "ret_type": null, + "static": false, + "def": "Speech(const string &model = \"\")", + "overload": [ + { + "type": "func", + "name": "Speech", + "doc": { + "brief": "Construct a new Speech object", + "param": { + "model": "model path, default empty, you can load model later by load function." + }, + "throw": "If model arg is not empty and load failed, will throw err::Exception.", + "maixpy": "maix.nn.Speech.__init__", + "maixcdk": "maix.nn.Speech.Speech", + "py_doc": "Construct a new Speech object\n\nArgs:\n - model: model path, default empty, you can load model later by load function.\n" + }, + "args": [ + [ + "const string &", + "model", + "\"\"" + ] + ], + "ret_type": null, + "static": false, + "def": "Speech(const string &model = \"\")" + } + ], + "py_def": "def __init__(self, model: str = '') -> None" + }, + "load": { + "type": "func", + "name": "load", + "doc": { + "brief": "Load model from file", + "param": { + "model": "Model path want to load" + }, + "return": "err::Err", + "maixpy": "maix.nn.Speech.load", + "py_doc": "Load model from file\n\nArgs:\n - model: Model path want to load\n\n\nReturns: err::Err\n" + }, + "args": [ + [ + "const string &", + "model", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err load(const string &model)", + "overload": [ + { + "type": "func", + "name": "load", + "doc": { + "brief": "Load model from file", + "param": { + "model": "Model path want to load" + }, + "return": "err::Err", + "maixpy": "maix.nn.Speech.load", + "py_doc": "Load model from file\n\nArgs:\n - model: Model path want to load\n\n\nReturns: err::Err\n" + }, + "args": [ + [ + "const string &", + "model", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err load(const string &model)" + } + ], + "py_def": "def load(self, model: str) -> maix.err.Err" + }, + "init": { + "type": "func", + "name": "init", + "doc": { + "brief": "Init the ASR library and select the type and name of the audio device.", + "param": { + "dev_type": "device type want to detect, can choose between WAV, PCM, or MIC.", + "device_name": "device name want to detect, can choose a WAV file, a PCM file, or a MIC device name." + }, + "throw": [ + "If am model is not loaded, will throw err::ERR_NOT_IMPL.", + "If device is not supported, will throw err::ERR_NOT_IMPL." + ], + "return": "err::Err type, if init success, return err::ERR_NONE", + "maixpy": "maix.nn.Speech.init", + "py_doc": "Init the ASR library and select the type and name of the audio device.\n\nArgs:\n - dev_type: device type want to detect, can choose between WAV, PCM, or MIC.\n - device_name: device name want to detect, can choose a WAV file, a PCM file, or a MIC device name.\n\n\nReturns: err::Err type, if init success, return err::ERR_NONE\n" + }, + "args": [ + [ + "nn::SpeechDevice", + "dev_type", + null + ], + [ + "const string &", + "device_name", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err init(nn::SpeechDevice dev_type, const string &device_name)", + "overload": [ + { + "type": "func", + "name": "init", + "doc": { + "brief": "Init the ASR library and select the type and name of the audio device.", + "param": { + "dev_type": "device type want to detect, can choose between WAV, PCM, or MIC.", + "device_name": "device name want to detect, can choose a WAV file, a PCM file, or a MIC device name." + }, + "throw": [ + "If am model is not loaded, will throw err::ERR_NOT_IMPL.", + "If device is not supported, will throw err::ERR_NOT_IMPL." + ], + "return": "err::Err type, if init success, return err::ERR_NONE", + "maixpy": "maix.nn.Speech.init", + "py_doc": "Init the ASR library and select the type and name of the audio device.\n\nArgs:\n - dev_type: device type want to detect, can choose between WAV, PCM, or MIC.\n - device_name: device name want to detect, can choose a WAV file, a PCM file, or a MIC device name.\n\n\nReturns: err::Err type, if init success, return err::ERR_NONE\n" + }, + "args": [ + [ + "nn::SpeechDevice", + "dev_type", + null + ], + [ + "const string &", + "device_name", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err init(nn::SpeechDevice dev_type, const string &device_name)" + } + ], + "py_def": "def init(self, dev_type: SpeechDevice, device_name: str) -> maix.err.Err" + }, + "devive": { + "type": "func", + "name": "devive", + "doc": { + "brief": "Reset the device, usually used for PCM/WAV recognition,\\nsuch as identifying the next WAV file.", + "param": { + "dev_type": "device type want to detect, can choose between WAV, PCM, or MIC.", + "device_name": "device name want to detect, can choose a WAV file, a PCM file, or a MIC device name." + }, + "throw": "If device is not supported, will throw err::ERR_NOT_IMPL.", + "return": "err::Err type, if init success, return err::ERR_NONE", + "maixpy": "maix.nn.Speech.devive", + "py_doc": "Reset the device, usually used for PCM/WAV recognition,\nsuch as identifying the next WAV file.\n\nArgs:\n - dev_type: device type want to detect, can choose between WAV, PCM, or MIC.\n - device_name: device name want to detect, can choose a WAV file, a PCM file, or a MIC device name.\n\n\nReturns: err::Err type, if init success, return err::ERR_NONE\n" + }, + "args": [ + [ + "nn::SpeechDevice", + "dev_type", + null + ], + [ + "const string &", + "device_name", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err devive(nn::SpeechDevice dev_type, const string &device_name)", + "overload": [ + { + "type": "func", + "name": "devive", + "doc": { + "brief": "Reset the device, usually used for PCM/WAV recognition,\\nsuch as identifying the next WAV file.", + "param": { + "dev_type": "device type want to detect, can choose between WAV, PCM, or MIC.", + "device_name": "device name want to detect, can choose a WAV file, a PCM file, or a MIC device name." + }, + "throw": "If device is not supported, will throw err::ERR_NOT_IMPL.", + "return": "err::Err type, if init success, return err::ERR_NONE", + "maixpy": "maix.nn.Speech.devive", + "py_doc": "Reset the device, usually used for PCM/WAV recognition,\nsuch as identifying the next WAV file.\n\nArgs:\n - dev_type: device type want to detect, can choose between WAV, PCM, or MIC.\n - device_name: device name want to detect, can choose a WAV file, a PCM file, or a MIC device name.\n\n\nReturns: err::Err type, if init success, return err::ERR_NONE\n" + }, + "args": [ + [ + "nn::SpeechDevice", + "dev_type", + null + ], + [ + "const string &", + "device_name", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err devive(nn::SpeechDevice dev_type, const string &device_name)" + } + ], + "py_def": "def devive(self, dev_type: SpeechDevice, device_name: str) -> maix.err.Err" + }, + "deinit": { + "type": "func", + "name": "deinit", + "doc": { + "brief": "Deinit the ASR library.", + "maixpy": "maix.nn.Speech.deinit", + "py_doc": "Deinit the ASR library." + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void deinit()", + "overload": [ + { + "type": "func", + "name": "deinit", + "doc": { + "brief": "Deinit the ASR library.", + "maixpy": "maix.nn.Speech.deinit", + "py_doc": "Deinit the ASR library." + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void deinit()" + } + ], + "py_def": "def deinit(self) -> None" + }, + "dec_deinit": { + "type": "func", + "name": "dec_deinit", + "doc": { + "brief": "Deinit the decoder.", + "param": { + "decoder": "decoder type want to deinit\ncan choose between DECODER_RAW, DECODER_DIG, DECODER_LVCSR, DECODER_KWS or DECODER_ALL." + }, + "throw": "If device is not supported, will throw err::ERR_NOT_IMPL.", + "maixpy": "maix.nn.Speech.dec_deinit", + "py_doc": "Deinit the decoder.\n\nArgs:\n - decoder: decoder type want to deinit\ncan choose between DECODER_RAW, DECODER_DIG, DECODER_LVCSR, DECODER_KWS or DECODER_ALL.\n" + }, + "args": [ + [ + "nn::SpeechDecoder", + "decoder", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void dec_deinit(nn::SpeechDecoder decoder)", + "overload": [ + { + "type": "func", + "name": "dec_deinit", + "doc": { + "brief": "Deinit the decoder.", + "param": { + "decoder": "decoder type want to deinit\ncan choose between DECODER_RAW, DECODER_DIG, DECODER_LVCSR, DECODER_KWS or DECODER_ALL." + }, + "throw": "If device is not supported, will throw err::ERR_NOT_IMPL.", + "maixpy": "maix.nn.Speech.dec_deinit", + "py_doc": "Deinit the decoder.\n\nArgs:\n - decoder: decoder type want to deinit\ncan choose between DECODER_RAW, DECODER_DIG, DECODER_LVCSR, DECODER_KWS or DECODER_ALL.\n" + }, + "args": [ + [ + "nn::SpeechDecoder", + "decoder", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void dec_deinit(nn::SpeechDecoder decoder)" + } + ], + "py_def": "def dec_deinit(self, decoder: SpeechDecoder) -> None" + }, + "raw": { + "type": "func", + "name": "raw", + "doc": { + "brief": "Init raw decoder, it will output the prediction results of the original AM.", + "param": { + "callback": "raw decoder user callback." + }, + "return": "err::Err type, if init success, return err::ERR_NONE", + "maixpy": "maix.nn.Speech.raw", + "py_doc": "Init raw decoder, it will output the prediction results of the original AM.\n\nArgs:\n - callback: raw decoder user callback.\n\n\nReturns: err::Err type, if init success, return err::ERR_NONE\n" + }, + "args": [ + [ + "std::function, int)>", + "callback", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err raw(std::function, int)> callback)", + "overload": [ + { + "type": "func", + "name": "raw", + "doc": { + "brief": "Get raw decoder status", + "return": "bool, raw decoder status", + "maixpy": "maix.nn.Speech.raw", + "py_doc": "Get raw decoder status\n\nReturns: bool, raw decoder status\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool raw()" + }, + { + "type": "func", + "name": "raw", + "doc": { + "brief": "Init raw decoder, it will output the prediction results of the original AM.", + "param": { + "callback": "raw decoder user callback." + }, + "return": "err::Err type, if init success, return err::ERR_NONE", + "maixpy": "maix.nn.Speech.raw", + "py_doc": "Init raw decoder, it will output the prediction results of the original AM.\n\nArgs:\n - callback: raw decoder user callback.\n\n\nReturns: err::Err type, if init success, return err::ERR_NONE\n" + }, + "args": [ + [ + "std::function, int)>", + "callback", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err raw(std::function, int)> callback)" + }, + { + "type": "func", + "name": "raw", + "doc": { + "brief": "Get raw decoder status", + "return": "bool, raw decoder status", + "maixpy": "maix.nn.Speech.raw", + "py_doc": "Get raw decoder status\n\nReturns: bool, raw decoder status\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool raw()" + } + ], + "py_def": "def raw(self, callback: typing.Callable[[list[pnyp_t], int], None]) -> maix.err.Err" + }, + "digit": { + "type": "func", + "name": "digit", + "doc": { + "brief": "Init digit decoder, it will output the Chinese digit recognition results within the last 4 seconds.", + "param": { + "blank": "If it exceeds this value, insert a '_' in the output result to indicate idle mute.", + "callback": "digit decoder user callback." + }, + "return": "err::Err type, if init success, return err::ERR_NONE", + "maixpy": "maix.nn.Speech.digit", + "py_doc": "Init digit decoder, it will output the Chinese digit recognition results within the last 4 seconds.\n\nArgs:\n - blank: If it exceeds this value, insert a '_' in the output result to indicate idle mute.\n - callback: digit decoder user callback.\n\n\nReturns: err::Err type, if init success, return err::ERR_NONE\n" + }, + "args": [ + [ + "int", + "blank", + null + ], + [ + "std::function", + "callback", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err digit(int blank, std::function callback)", + "overload": [ + { + "type": "func", + "name": "digit", + "doc": { + "brief": "Get digit decoder status", + "return": "bool, digit decoder status", + "maixpy": "maix.nn.Speech.digit", + "py_doc": "Get digit decoder status\n\nReturns: bool, digit decoder status\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool digit()" + }, + { + "type": "func", + "name": "digit", + "doc": { + "brief": "Init digit decoder, it will output the Chinese digit recognition results within the last 4 seconds.", + "param": { + "blank": "If it exceeds this value, insert a '_' in the output result to indicate idle mute.", + "callback": "digit decoder user callback." + }, + "return": "err::Err type, if init success, return err::ERR_NONE", + "maixpy": "maix.nn.Speech.digit", + "py_doc": "Init digit decoder, it will output the Chinese digit recognition results within the last 4 seconds.\n\nArgs:\n - blank: If it exceeds this value, insert a '_' in the output result to indicate idle mute.\n - callback: digit decoder user callback.\n\n\nReturns: err::Err type, if init success, return err::ERR_NONE\n" + }, + "args": [ + [ + "int", + "blank", + null + ], + [ + "std::function", + "callback", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err digit(int blank, std::function callback)" + }, + { + "type": "func", + "name": "digit", + "doc": { + "brief": "Get digit decoder status", + "return": "bool, digit decoder status", + "maixpy": "maix.nn.Speech.digit", + "py_doc": "Get digit decoder status\n\nReturns: bool, digit decoder status\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool digit()" + } + ], + "py_def": "def digit(self, blank: int, callback: typing.Callable[[str, int], None]) -> maix.err.Err" + }, + "kws": { + "type": "func", + "name": "kws", + "doc": { + "brief": "Init kws decoder, it will output a probability list of all registered keywords in the latest frame,\\nusers can set their own thresholds for wake-up.", + "param": { + "kw_tbl": "Keyword list, filled in with spaces separated by pinyin, for example: xiao3 ai4 tong2 xue2", + "kw_gate": "kw_gate, keyword probability gate table, the number should be the same as kw_tbl", + "auto_similar": "Whether to perform automatic homophone processing,\nsetting it to true will automatically calculate the probability by using pinyin with different tones as homophones", + "callback": "digit decoder user callback." + }, + "return": "err::Err type, if init success, return err::ERR_NONE", + "maixpy": "maix.nn.Speech.kws", + "py_doc": "Init kws decoder, it will output a probability list of all registered keywords in the latest frame,\nusers can set their own thresholds for wake-up.\n\nArgs:\n - kw_tbl: Keyword list, filled in with spaces separated by pinyin, for example: xiao3 ai4 tong2 xue2\n - kw_gate: kw_gate, keyword probability gate table, the number should be the same as kw_tbl\n - auto_similar: Whether to perform automatic homophone processing,\nsetting it to true will automatically calculate the probability by using pinyin with different tones as homophones\n - callback: digit decoder user callback.\n\n\nReturns: err::Err type, if init success, return err::ERR_NONE\n" + }, + "args": [ + [ + "std::vector", + "kw_tbl", + null + ], + [ + "std::vector", + "kw_gate", + null + ], + [ + "std::function, int)>", + "callback", + null + ], + [ + "bool", + "auto_similar", + "true" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err kws(std::vector kw_tbl, std::vector kw_gate, std::function, int)> callback, bool auto_similar = true)", + "overload": [ + { + "type": "func", + "name": "kws", + "doc": { + "brief": "Get kws decoder status", + "return": "bool, kws decoder status", + "maixpy": "maix.nn.Speech.kws", + "py_doc": "Get kws decoder status\n\nReturns: bool, kws decoder status\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool kws()" + }, + { + "type": "func", + "name": "kws", + "doc": { + "brief": "Init kws decoder, it will output a probability list of all registered keywords in the latest frame,\\nusers can set their own thresholds for wake-up.", + "param": { + "kw_tbl": "Keyword list, filled in with spaces separated by pinyin, for example: xiao3 ai4 tong2 xue2", + "kw_gate": "kw_gate, keyword probability gate table, the number should be the same as kw_tbl", + "auto_similar": "Whether to perform automatic homophone processing,\nsetting it to true will automatically calculate the probability by using pinyin with different tones as homophones", + "callback": "digit decoder user callback." + }, + "return": "err::Err type, if init success, return err::ERR_NONE", + "maixpy": "maix.nn.Speech.kws", + "py_doc": "Init kws decoder, it will output a probability list of all registered keywords in the latest frame,\nusers can set their own thresholds for wake-up.\n\nArgs:\n - kw_tbl: Keyword list, filled in with spaces separated by pinyin, for example: xiao3 ai4 tong2 xue2\n - kw_gate: kw_gate, keyword probability gate table, the number should be the same as kw_tbl\n - auto_similar: Whether to perform automatic homophone processing,\nsetting it to true will automatically calculate the probability by using pinyin with different tones as homophones\n - callback: digit decoder user callback.\n\n\nReturns: err::Err type, if init success, return err::ERR_NONE\n" + }, + "args": [ + [ + "std::vector", + "kw_tbl", + null + ], + [ + "std::vector", + "kw_gate", + null + ], + [ + "std::function, int)>", + "callback", + null + ], + [ + "bool", + "auto_similar", + "true" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err kws(std::vector kw_tbl, std::vector kw_gate, std::function, int)> callback, bool auto_similar = true)" + }, + { + "type": "func", + "name": "kws", + "doc": { + "brief": "Get kws decoder status", + "return": "bool, kws decoder status", + "maixpy": "maix.nn.Speech.kws", + "py_doc": "Get kws decoder status\n\nReturns: bool, kws decoder status\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool kws()" + } + ], + "py_def": "def kws(self, kw_tbl: list[str], kw_gate: list[float], callback: typing.Callable[[list[float], int], None], auto_similar: bool = True) -> maix.err.Err" + }, + "lvcsr": { + "type": "func", + "name": "lvcsr", + "doc": { + "brief": "Init lvcsr decoder, it will output continuous speech recognition results (less than 1024 Chinese characters).", + "param": { + "sfst_name": "Sfst file path.", + "sym_name": "Sym file path (output symbol table).", + "phones_txt": "Path to phones.bin (pinyin table).", + "words_txt": "Path to words.bin (dictionary table).", + "callback": "lvcsr decoder user callback.", + "beam": "The beam size for WFST search is set to 8 by default, and it is recommended to be between 3 and 9.\nThe larger the size, the larger the search space, and the more accurate but slower the search.", + "bg_prob": "The absolute value of the natural logarithm of the default probability value for background pinyin\noutside of BEAM-CNT is set to 10 by default.", + "scale": "acoustics_cost = log(pny_prob)*scale.", + "mmap": "use mmap to load the WFST decoding image,\nIf set to true, the beam should be less than 5." + }, + "return": "err::Err type, if init success, return err::ERR_NONE", + "maixpy": "maix.nn.Speech.lvcsr", + "py_doc": "Init lvcsr decoder, it will output continuous speech recognition results (less than 1024 Chinese characters).\n\nArgs:\n - sfst_name: Sfst file path.\n - sym_name: Sym file path (output symbol table).\n - phones_txt: Path to phones.bin (pinyin table).\n - words_txt: Path to words.bin (dictionary table).\n - callback: lvcsr decoder user callback.\n - beam: The beam size for WFST search is set to 8 by default, and it is recommended to be between 3 and 9.\nThe larger the size, the larger the search space, and the more accurate but slower the search.\n - bg_prob: The absolute value of the natural logarithm of the default probability value for background pinyin\noutside of BEAM-CNT is set to 10 by default.\n - scale: acoustics_cost = log(pny_prob)*scale.\n - mmap: use mmap to load the WFST decoding image,\nIf set to true, the beam should be less than 5.\n\n\nReturns: err::Err type, if init success, return err::ERR_NONE\n" + }, + "args": [ + [ + "const string &", + "sfst_name", + null + ], + [ + "const string &", + "sym_name", + null + ], + [ + "const string &", + "phones_txt", + null + ], + [ + "const string &", + "words_txt", + null + ], + [ + "std::function, int)>", + "callback", + null + ], + [ + "float", + "beam", + "8" + ], + [ + "float", + "bg_prob", + "10" + ], + [ + "float", + "scale", + "0.5" + ], + [ + "bool", + "mmap", + "false" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err lvcsr(const string &sfst_name, const string &sym_name,\n const string &phones_txt, const string &words_txt, \n std::function, int)> callback,\n float beam = 8, float bg_prob = 10, float scale = 0.5, bool mmap = false)", + "overload": [ + { + "type": "func", + "name": "lvcsr", + "doc": { + "brief": "Get lvcsr decoder status", + "return": "bool, lvcsr decoder status", + "maixpy": "maix.nn.Speech.lvcsr", + "py_doc": "Get lvcsr decoder status\n\nReturns: bool, lvcsr decoder status\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool lvcsr()" + }, + { + "type": "func", + "name": "lvcsr", + "doc": { + "brief": "Init lvcsr decoder, it will output continuous speech recognition results (less than 1024 Chinese characters).", + "param": { + "sfst_name": "Sfst file path.", + "sym_name": "Sym file path (output symbol table).", + "phones_txt": "Path to phones.bin (pinyin table).", + "words_txt": "Path to words.bin (dictionary table).", + "callback": "lvcsr decoder user callback.", + "beam": "The beam size for WFST search is set to 8 by default, and it is recommended to be between 3 and 9.\nThe larger the size, the larger the search space, and the more accurate but slower the search.", + "bg_prob": "The absolute value of the natural logarithm of the default probability value for background pinyin\noutside of BEAM-CNT is set to 10 by default.", + "scale": "acoustics_cost = log(pny_prob)*scale.", + "mmap": "use mmap to load the WFST decoding image,\nIf set to true, the beam should be less than 5." + }, + "return": "err::Err type, if init success, return err::ERR_NONE", + "maixpy": "maix.nn.Speech.lvcsr", + "py_doc": "Init lvcsr decoder, it will output continuous speech recognition results (less than 1024 Chinese characters).\n\nArgs:\n - sfst_name: Sfst file path.\n - sym_name: Sym file path (output symbol table).\n - phones_txt: Path to phones.bin (pinyin table).\n - words_txt: Path to words.bin (dictionary table).\n - callback: lvcsr decoder user callback.\n - beam: The beam size for WFST search is set to 8 by default, and it is recommended to be between 3 and 9.\nThe larger the size, the larger the search space, and the more accurate but slower the search.\n - bg_prob: The absolute value of the natural logarithm of the default probability value for background pinyin\noutside of BEAM-CNT is set to 10 by default.\n - scale: acoustics_cost = log(pny_prob)*scale.\n - mmap: use mmap to load the WFST decoding image,\nIf set to true, the beam should be less than 5.\n\n\nReturns: err::Err type, if init success, return err::ERR_NONE\n" + }, + "args": [ + [ + "const string &", + "sfst_name", + null + ], + [ + "const string &", + "sym_name", + null + ], + [ + "const string &", + "phones_txt", + null + ], + [ + "const string &", + "words_txt", + null + ], + [ + "std::function, int)>", + "callback", + null + ], + [ + "float", + "beam", + "8" + ], + [ + "float", + "bg_prob", + "10" + ], + [ + "float", + "scale", + "0.5" + ], + [ + "bool", + "mmap", + "false" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err lvcsr(const string &sfst_name, const string &sym_name,\n const string &phones_txt, const string &words_txt, \n std::function, int)> callback,\n float beam = 8, float bg_prob = 10, float scale = 0.5, bool mmap = false)" + }, + { + "type": "func", + "name": "lvcsr", + "doc": { + "brief": "Get lvcsr decoder status", + "return": "bool, lvcsr decoder status", + "maixpy": "maix.nn.Speech.lvcsr", + "py_doc": "Get lvcsr decoder status\n\nReturns: bool, lvcsr decoder status\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool lvcsr()" + } + ], + "py_def": "def lvcsr(self, sfst_name: str, sym_name: str, phones_txt: str, words_txt: str, callback: typing.Callable[[tuple[str, str], int], None], beam: float = 8, bg_prob: float = 10, scale: float = 0.5, mmap: bool = False) -> maix.err.Err" + }, + "run": { + "type": "func", + "name": "run", + "doc": { + "brief": "Run speech recognition, user can run 1 frame at a time and do other processing after running,\\nor it can run continuously within a thread and be stopped by an external thread.", + "param": { + "frame": "The number of frames per run." + }, + "return": "int type, return actual number of frames in the run.", + "maixpy": "maix.nn.Speech.run", + "py_doc": "Run speech recognition, user can run 1 frame at a time and do other processing after running,\nor it can run continuously within a thread and be stopped by an external thread.\n\nArgs:\n - frame: The number of frames per run.\n\n\nReturns: int type, return actual number of frames in the run.\n" + }, + "args": [ + [ + "int", + "frame", + null + ] + ], + "ret_type": "int", + "static": false, + "def": "int run(int frame)", + "overload": [ + { + "type": "func", + "name": "run", + "doc": { + "brief": "Run speech recognition, user can run 1 frame at a time and do other processing after running,\\nor it can run continuously within a thread and be stopped by an external thread.", + "param": { + "frame": "The number of frames per run." + }, + "return": "int type, return actual number of frames in the run.", + "maixpy": "maix.nn.Speech.run", + "py_doc": "Run speech recognition, user can run 1 frame at a time and do other processing after running,\nor it can run continuously within a thread and be stopped by an external thread.\n\nArgs:\n - frame: The number of frames per run.\n\n\nReturns: int type, return actual number of frames in the run.\n" + }, + "args": [ + [ + "int", + "frame", + null + ] + ], + "ret_type": "int", + "static": false, + "def": "int run(int frame)" + } + ], + "py_def": "def run(self, frame: int) -> int" + }, + "clear": { + "type": "func", + "name": "clear", + "doc": { + "brief": "Reset internal cache operation", + "maixpy": "maix.nn.Speech.clear", + "py_doc": "Reset internal cache operation" + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void clear()", + "overload": [ + { + "type": "func", + "name": "clear", + "doc": { + "brief": "Reset internal cache operation", + "maixpy": "maix.nn.Speech.clear", + "py_doc": "Reset internal cache operation" + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void clear()" + } + ], + "py_def": "def clear(self) -> None" + }, + "frame_time": { + "type": "func", + "name": "frame_time", + "doc": { + "brief": "Get the time of one frame.", + "return": "int type, return the time of one frame.", + "maixpy": "maix.nn.Speech.frame_time", + "py_doc": "Get the time of one frame.\n\nReturns: int type, return the time of one frame.\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int frame_time()", + "overload": [ + { + "type": "func", + "name": "frame_time", + "doc": { + "brief": "Get the time of one frame.", + "return": "int type, return the time of one frame.", + "maixpy": "maix.nn.Speech.frame_time", + "py_doc": "Get the time of one frame.\n\nReturns: int type, return the time of one frame.\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int frame_time()" + } + ], + "py_def": "def frame_time(self) -> int" + }, + "vocab": { + "type": "func", + "name": "vocab", + "doc": { + "brief": "Get the acoustic model dictionary.", + "return": "std::pair type, return the dictionary and length.", + "maixpy": "maix.nn.Speech.vocab", + "py_doc": "Get the acoustic model dictionary.\n\nReturns: std::pair type, return the dictionary and length.\n" + }, + "args": [], + "ret_type": "std::pair", + "static": false, + "def": "std::pair vocab()", + "overload": [ + { + "type": "func", + "name": "vocab", + "doc": { + "brief": "Get the acoustic model dictionary.", + "return": "std::pair type, return the dictionary and length.", + "maixpy": "maix.nn.Speech.vocab", + "py_doc": "Get the acoustic model dictionary.\n\nReturns: std::pair type, return the dictionary and length.\n" + }, + "args": [], + "ret_type": "std::pair", + "static": false, + "def": "std::pair vocab()" + } + ], + "py_def": "def vocab(self) -> tuple[str, int]" + }, + "similar": { + "type": "func", + "name": "similar", + "doc": { + "brief": "Manually register mute words, and each pinyin can register up to 10 homophones,\\nplease note that using this interface to register homophones will overwrite,\\nthe homophone table automatically generated in the \\\"automatic homophone processing\\\" feature.", + "param": { + "dev_type": "device type want to detect, can choose between WAV, PCM, or MIC.", + "device_name": "device name want to detect, can choose a WAV file, a PCM file, or a MIC device name." + }, + "return": "err::Err type, if init success, return err::ERR_NONE", + "maixpy": "maix.nn.Speech.similar", + "py_doc": "Manually register mute words, and each pinyin can register up to 10 homophones,\nplease note that using this interface to register homophones will overwrite,\nthe homophone table automatically generated in the \"automatic homophone processing\" feature.\n\nArgs:\n - dev_type: device type want to detect, can choose between WAV, PCM, or MIC.\n - device_name: device name want to detect, can choose a WAV file, a PCM file, or a MIC device name.\n\n\nReturns: err::Err type, if init success, return err::ERR_NONE\n" + }, + "args": [ + [ + "const string &", + "pny", + null + ], + [ + "std::vector", + "similar_pnys", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err similar(const string &pny, std::vector similar_pnys)", + "overload": [ + { + "type": "func", + "name": "similar", + "doc": { + "brief": "Manually register mute words, and each pinyin can register up to 10 homophones,\\nplease note that using this interface to register homophones will overwrite,\\nthe homophone table automatically generated in the \\\"automatic homophone processing\\\" feature.", + "param": { + "dev_type": "device type want to detect, can choose between WAV, PCM, or MIC.", + "device_name": "device name want to detect, can choose a WAV file, a PCM file, or a MIC device name." + }, + "return": "err::Err type, if init success, return err::ERR_NONE", + "maixpy": "maix.nn.Speech.similar", + "py_doc": "Manually register mute words, and each pinyin can register up to 10 homophones,\nplease note that using this interface to register homophones will overwrite,\nthe homophone table automatically generated in the \"automatic homophone processing\" feature.\n\nArgs:\n - dev_type: device type want to detect, can choose between WAV, PCM, or MIC.\n - device_name: device name want to detect, can choose a WAV file, a PCM file, or a MIC device name.\n\n\nReturns: err::Err type, if init success, return err::ERR_NONE\n" + }, + "args": [ + [ + "const string &", + "pny", + null + ], + [ + "std::vector", + "similar_pnys", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err similar(const string &pny, std::vector similar_pnys)" + } + ], + "py_def": "def similar(self, pny: str, similar_pnys: list[str]) -> maix.err.Err" + }, + "mean": { + "type": "var", + "name": "mean", + "doc": { + "brief": "Get mean value, list type", + "maixpy": "maix.nn.Speech.mean", + "py_doc": "Get mean value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector mean" + }, + "scale": { + "type": "var", + "name": "scale", + "doc": { + "brief": "Get scale value, list type", + "maixpy": "maix.nn.Speech.scale", + "py_doc": "Get scale value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector scale" + }, + "dev_type": { + "type": "func", + "name": "dev_type", + "doc": { + "brief": "get device type", + "return": "nn::SpeechDevice type, see SpeechDevice of this module", + "maixpy": "maix.nn.Speech.dev_type", + "py_doc": "get device type\n\nReturns: nn::SpeechDevice type, see SpeechDevice of this module\n" + }, + "args": [], + "ret_type": "nn::SpeechDevice", + "static": false, + "def": "nn::SpeechDevice dev_type()", + "overload": [ + { + "type": "func", + "name": "dev_type", + "doc": { + "brief": "get device type", + "return": "nn::SpeechDevice type, see SpeechDevice of this module", + "maixpy": "maix.nn.Speech.dev_type", + "py_doc": "get device type\n\nReturns: nn::SpeechDevice type, see SpeechDevice of this module\n" + }, + "args": [], + "ret_type": "nn::SpeechDevice", + "static": false, + "def": "nn::SpeechDevice dev_type()" + } + ], + "py_def": "def dev_type(self) -> SpeechDevice" + } + }, + "def": "class Speech" + }, + "YOLOv8": { + "type": "class", + "name": "YOLOv8", + "doc": { + "brief": "YOLOv8 class", + "maixpy": "maix.nn.YOLOv8", + "py_doc": "YOLOv8 class" + }, + "members": { + "__init__": { + "type": "func", + "name": "YOLOv8", + "doc": { + "brief": "Constructor of YOLOv8 class", + "param": { + "model": "model path, default empty, you can load model later by load function.", + "dual_buff": "direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.\nIf you want to ensure every time forward output the input's result, set this arg to false please.\nDefault true to ensure speed." + }, + "throw": "If model arg is not empty and load failed, will throw err::Exception.", + "maixpy": "maix.nn.YOLOv8.__init__", + "maixcdk": "maix.nn.YOLOv8.YOLOv8", + "py_doc": "Constructor of YOLOv8 class\n\nArgs:\n - model: model path, default empty, you can load model later by load function.\n - dual_buff: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.\nIf you want to ensure every time forward output the input's result, set this arg to false please.\nDefault true to ensure speed.\n" + }, + "args": [ + [ + "const string &", + "model", + "\"\"" + ], + [ + "bool", + "dual_buff", + "true" + ] + ], + "ret_type": null, + "static": false, + "def": "YOLOv8(const string &model = \"\", bool dual_buff = true)", + "py_def": "def __init__(self, model: str = '', dual_buff: bool = True) -> None" + }, + "load": { + "type": "func", + "name": "load", + "doc": { + "brief": "Load model from file", + "param": { + "model": "Model path want to load" + }, + "return": "err::Err", + "maixpy": "maix.nn.YOLOv8.load", + "py_doc": "Load model from file\n\nArgs:\n - model: Model path want to load\n\n\nReturns: err::Err\n" + }, + "args": [ + [ + "const string &", + "model", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err load(const string &model)", + "py_def": "def load(self, model: str) -> maix.err.Err" + }, + "detect": { + "type": "func", + "name": "detect", + "doc": { + "brief": "Detect objects from image", + "param": { + "img": "Image want to detect, if image's size not match model input's, will auto resize with fit method.", + "conf_th": "Confidence threshold, default 0.5.", + "iou_th": "IoU threshold, default 0.45.", + "fit": "Resize method, default image.Fit.FIT_CONTAIN.", + "keypoint_th": "keypoint threshold, default 0.5, only for yolov8-pose model." + }, + "throw": "If image format not match model input format, will throw err::Exception.", + "return": "Object list. In C++, you should delete it after use.\nIf model is yolov8-pose, object's points have value, and if points' value < 0 means that point is invalid(conf < keypoint_th).", + "maixpy": "maix.nn.YOLOv8.detect", + "py_doc": "Detect objects from image\n\nArgs:\n - img: Image want to detect, if image's size not match model input's, will auto resize with fit method.\n - conf_th: Confidence threshold, default 0.5.\n - iou_th: IoU threshold, default 0.45.\n - fit: Resize method, default image.Fit.FIT_CONTAIN.\n - keypoint_th: keypoint threshold, default 0.5, only for yolov8-pose model.\n\n\nReturns: Object list. In C++, you should delete it after use.\nIf model is yolov8-pose, object's points have value, and if points' value < 0 means that point is invalid(conf < keypoint_th).\n" + }, + "args": [ + [ + "image::Image &", + "img", + null + ], + [ + "float", + "conf_th", + "0.5" + ], + [ + "float", + "iou_th", + "0.45" + ], + [ + "maix::image::Fit", + "fit", + "maix::image::FIT_CONTAIN" + ], + [ + "float", + "keypoint_th", + "0.5" + ] + ], + "ret_type": "nn::Objects*", + "static": false, + "def": "nn::Objects *detect(image::Image &img, float conf_th = 0.5, float iou_th = 0.45, maix::image::Fit fit = maix::image::FIT_CONTAIN, float keypoint_th = 0.5)", + "py_def": "def detect(self, img: maix.image.Image, conf_th: float = 0.5, iou_th: float = 0.45, fit: maix.image.Fit = ..., keypoint_th: float = 0.5) -> ..." + }, + "input_size": { + "type": "func", + "name": "input_size", + "doc": { + "brief": "Get model input size", + "return": "model input size", + "maixpy": "maix.nn.YOLOv8.input_size", + "py_doc": "Get model input size\n\nReturns: model input size\n" + }, + "args": [], + "ret_type": "image::Size", + "static": false, + "def": "image::Size input_size()", + "py_def": "def input_size(self) -> maix.image.Size" + }, + "input_width": { + "type": "func", + "name": "input_width", + "doc": { + "brief": "Get model input width", + "return": "model input size of width", + "maixpy": "maix.nn.YOLOv8.input_width", + "py_doc": "Get model input width\n\nReturns: model input size of width\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int input_width()", + "py_def": "def input_width(self) -> int" + }, + "input_height": { + "type": "func", + "name": "input_height", + "doc": { + "brief": "Get model input height", + "return": "model input size of height", + "maixpy": "maix.nn.YOLOv8.input_height", + "py_doc": "Get model input height\n\nReturns: model input size of height\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int input_height()", + "py_def": "def input_height(self) -> int" + }, + "input_format": { + "type": "func", + "name": "input_format", + "doc": { + "brief": "Get input image format", + "return": "input image format, image::Format type.", + "maixpy": "maix.nn.YOLOv8.input_format", + "py_doc": "Get input image format\n\nReturns: input image format, image::Format type.\n" + }, + "args": [], + "ret_type": "image::Format", + "static": false, + "def": "image::Format input_format()", + "py_def": "def input_format(self) -> maix.image.Format" + }, + "draw_pose": { + "type": "func", + "name": "draw_pose", + "doc": { + "brief": "Draw pose keypoints on image", + "param": { + "img": "image object, maix.image.Image type.", + "points": "keypoits, int list type, [x, y, x, y ...]", + "radius": "radius of points.", + "color": "color of points.", + "body": "true, if points' length is 17*2 and body is ture, will draw lines as human body, if set to false won't draw lines, default true." + }, + "maixpy": "maix.nn.YOLOv8.draw_pose", + "py_doc": "Draw pose keypoints on image\n\nArgs:\n - img: image object, maix.image.Image type.\n - points: keypoits, int list type, [x, y, x, y ...]\n - radius: radius of points.\n - color: color of points.\n - body: true, if points' length is 17*2 and body is ture, will draw lines as human body, if set to false won't draw lines, default true.\n" + }, + "args": [ + [ + "image::Image &", + "img", + null + ], + [ + "std::vector", + "points", + null + ], + [ + "int", + "radius", + "4" + ], + [ + "image::Color", + "color", + "image::COLOR_RED" + ], + [ + "bool", + "body", + "true" + ] + ], + "ret_type": "void", + "static": false, + "def": "void draw_pose(image::Image &img, std::vector points, int radius = 4, image::Color color = image::COLOR_RED, bool body = true)", + "py_def": "def draw_pose(self, img: maix.image.Image, points: list[int], radius: int = 4, color: maix.image.Color = ..., body: bool = True) -> None" + }, + "draw_seg_mask": { + "type": "func", + "name": "draw_seg_mask", + "doc": { + "brief": "Draw segmentation on image", + "param": { + "img": "image object, maix.image.Image type.", + "seg_mask": "segmentation mask image by detect method, a grayscale image", + "threshold": "only mask's value > threshold will be draw on image, value from 0 to 255." + }, + "maixpy": "maix.nn.YOLOv8.draw_seg_mask", + "py_doc": "Draw segmentation on image\n\nArgs:\n - img: image object, maix.image.Image type.\n - seg_mask: segmentation mask image by detect method, a grayscale image\n - threshold: only mask's value > threshold will be draw on image, value from 0 to 255.\n" + }, + "args": [ + [ + "image::Image &", + "img", + null + ], + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "image::Image &", + "seg_mask", + null + ], + [ + "int", + "threshold", + "127" + ] + ], + "ret_type": "void", + "static": false, + "def": "void draw_seg_mask(image::Image &img, int x, int y, image::Image &seg_mask, int threshold = 127)", + "py_def": "def draw_seg_mask(self, img: maix.image.Image, x: int, y: int, seg_mask: maix.image.Image, threshold: int = 127) -> None" + }, + "labels": { + "type": "var", + "name": "labels", + "doc": { + "brief": "Labels list", + "maixpy": "maix.nn.YOLOv8.labels", + "py_doc": "Labels list" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector labels" + }, + "label_path": { + "type": "var", + "name": "label_path", + "doc": { + "brief": "Label file path", + "maixpy": "maix.nn.YOLOv8.label_path", + "py_doc": "Label file path" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::string label_path" + }, + "mean": { + "type": "var", + "name": "mean", + "doc": { + "brief": "Get mean value, list type", + "maixpy": "maix.nn.YOLOv8.mean", + "py_doc": "Get mean value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector mean" + }, + "scale": { + "type": "var", + "name": "scale", + "doc": { + "brief": "Get scale value, list type", + "maixpy": "maix.nn.YOLOv8.scale", + "py_doc": "Get scale value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector scale" + } + }, + "def": "class YOLOv8" + }, + "Object": { + "type": "class", + "name": "Object", + "doc": { + "brief": "Object for detect result", + "maixpy": "maix.nn.Object", + "py_doc": "Object for detect result" + }, + "members": { + "__init__": { + "type": "func", + "name": "Object", + "doc": { + "brief": "Constructor of Object for detect result", + "param": { + "x": "left top x", + "y": "left top y", + "w": "width", + "h": "height", + "class_id": "class id", + "score": "score" + }, + "maixpy": "maix.nn.Object.__init__", + "maixcdk": "maix.nn.Object.Object", + "py_doc": "Constructor of Object for detect result\n\nArgs:\n - x: left top x\n - y: left top y\n - w: width\n - h: height\n - class_id: class id\n - score: score\n" + }, + "args": [ + [ + "int", + "x", + "0" + ], + [ + "int", + "y", + "0" + ], + [ + "int", + "w", + "0" + ], + [ + "int", + "h", + "0" + ], + [ + "int", + "class_id", + "0" + ], + [ + "float", + "score", + "0" + ], + [ + "std::vector", + "points", + "std::vector()" + ] + ], + "ret_type": null, + "static": false, + "def": "Object(int x = 0, int y = 0, int w = 0, int h = 0, int class_id = 0, float score = 0, std::vector points = std::vector())", + "py_def": "def __init__(self, x: int = 0, y: int = 0, w: int = 0, h: int = 0, class_id: int = 0, score: float = 0, points: list[int] = []) -> None" + }, + "__str__": { + "type": "func", + "name": "to_str", + "doc": { + "brief": "Object info to string", + "return": "Object info string", + "maixpy": "maix.nn.Object.__str__", + "maixcdk": "maix.nn.Object.to_str", + "py_doc": "Object info to string\n\nReturns: Object info string\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string to_str()", + "py_def": "def __str__(self) -> str" + }, + "x": { + "type": "var", + "name": "x", + "doc": { + "brief": "Object left top coordinate x", + "maixpy": "maix.nn.Object.x", + "py_doc": "Object left top coordinate x" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int x" + }, + "y": { + "type": "var", + "name": "y", + "doc": { + "brief": "Object left top coordinate y", + "maixpy": "maix.nn.Object.y", + "py_doc": "Object left top coordinate y" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int y" + }, + "w": { + "type": "var", + "name": "w", + "doc": { + "brief": "Object width", + "maixpy": "maix.nn.Object.w", + "py_doc": "Object width" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int w" + }, + "h": { + "type": "var", + "name": "h", + "doc": { + "brief": "Object height", + "maixpy": "maix.nn.Object.h", + "py_doc": "Object height" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int h" + }, + "class_id": { + "type": "var", + "name": "class_id", + "doc": { + "brief": "Object class id", + "maixpy": "maix.nn.Object.class_id", + "py_doc": "Object class id" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int class_id" + }, + "score": { + "type": "var", + "name": "score", + "doc": { + "brief": "Object score", + "maixpy": "maix.nn.Object.score", + "py_doc": "Object score" + }, + "value": null, + "static": false, + "readonly": false, + "def": "float score" + }, + "points": { + "type": "var", + "name": "points", + "doc": { + "brief": "keypoints", + "maixpy": "maix.nn.Object.points", + "py_doc": "keypoints" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector points" + }, + "seg_mask": { + "type": "var", + "name": "seg_mask", + "doc": { + "brief": "segmentation mask, uint8 list type, shape is h * w but flattened to one dimension, value fron 0 to 255.", + "attention": "For efficiency, it's a pointer in C++, use this carefully!", + "maixpy": "maix.nn.Object.seg_mask", + "py_doc": "segmentation mask, uint8 list type, shape is h * w but flattened to one dimension, value fron 0 to 255." + }, + "value": null, + "static": false, + "readonly": false, + "def": "image::Image *seg_mask" + } + }, + "def": "class Object" + }, + "ObjectFloat": { + "type": "class", + "name": "ObjectFloat", + "doc": { + "brief": "Object for detect result", + "maixpy": "maix.nn.ObjectFloat", + "py_doc": "Object for detect result" + }, + "members": { + "__init__": { + "type": "func", + "name": "ObjectFloat", + "doc": { + "brief": "Constructor of Object for detect result", + "param": { + "x": "left top x", + "y": "left top y", + "w": "width", + "h": "height", + "class_id": "class id", + "score": "score" + }, + "maixpy": "maix.nn.ObjectFloat.__init__", + "maixcdk": "maix.nn.ObjectFloat.ObjectFloat", + "py_doc": "Constructor of Object for detect result\n\nArgs:\n - x: left top x\n - y: left top y\n - w: width\n - h: height\n - class_id: class id\n - score: score\n" + }, + "args": [ + [ + "float", + "x", + "0" + ], + [ + "float", + "y", + "0" + ], + [ + "float", + "w", + "0" + ], + [ + "float", + "h", + "0" + ], + [ + "float", + "class_id", + "0" + ], + [ + "float", + "score", + "0" + ], + [ + "std::vector", + "points", + "std::vector()" + ] + ], + "ret_type": null, + "static": false, + "def": "ObjectFloat(float x = 0, float y = 0, float w = 0, float h = 0, float class_id = 0, float score = 0, std::vector points = std::vector())", + "py_def": "def __init__(self, x: float = 0, y: float = 0, w: float = 0, h: float = 0, class_id: float = 0, score: float = 0, points: list[float] = []) -> None" + }, + "__str__": { + "type": "func", + "name": "to_str", + "doc": { + "brief": "Object info to string", + "return": "Object info string", + "maixpy": "maix.nn.ObjectFloat.__str__", + "maixcdk": "maix.nn.ObjectFloat.to_str", + "py_doc": "Object info to string\n\nReturns: Object info string\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string to_str()", + "py_def": "def __str__(self) -> str" + }, + "x": { + "type": "var", + "name": "x", + "doc": { + "brief": "Object left top coordinate x", + "maixpy": "maix.nn.ObjectFloat.x", + "py_doc": "Object left top coordinate x" + }, + "value": null, + "static": false, + "readonly": false, + "def": "float x" + }, + "y": { + "type": "var", + "name": "y", + "doc": { + "brief": "Object left top coordinate y", + "maixpy": "maix.nn.ObjectFloat.y", + "py_doc": "Object left top coordinate y" + }, + "value": null, + "static": false, + "readonly": false, + "def": "float y" + }, + "w": { + "type": "var", + "name": "w", + "doc": { + "brief": "Object width", + "maixpy": "maix.nn.ObjectFloat.w", + "py_doc": "Object width" + }, + "value": null, + "static": false, + "readonly": false, + "def": "float w" + }, + "h": { + "type": "var", + "name": "h", + "doc": { + "brief": "Object height", + "maixpy": "maix.nn.ObjectFloat.h", + "py_doc": "Object height" + }, + "value": null, + "static": false, + "readonly": false, + "def": "float h" + }, + "class_id": { + "type": "var", + "name": "class_id", + "doc": { + "brief": "Object class id", + "maixpy": "maix.nn.ObjectFloat.class_id", + "py_doc": "Object class id" + }, + "value": null, + "static": false, + "readonly": false, + "def": "float class_id" + }, + "score": { + "type": "var", + "name": "score", + "doc": { + "brief": "Object score", + "maixpy": "maix.nn.ObjectFloat.score", + "py_doc": "Object score" + }, + "value": null, + "static": false, + "readonly": false, + "def": "float score" + }, + "points": { + "type": "var", + "name": "points", + "doc": { + "brief": "keypoints", + "maixpy": "maix.nn.ObjectFloat.points", + "py_doc": "keypoints" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector points" + } + }, + "def": "class ObjectFloat" + }, + "Objects": { + "type": "class", + "name": "Objects", + "doc": { + "brief": "Objects Class for detect result", + "maixpy": "maix.nn.Objects", + "py_doc": "Objects Class for detect result" + }, + "members": { + "__init__": { + "type": "func", + "name": "Objects", + "doc": { + "brief": "Constructor of Objects class", + "maixpy": "maix.nn.Objects.__init__", + "maixcdk": "maix.nn.Objects.Objects", + "py_doc": "Constructor of Objects class" + }, + "args": [], + "ret_type": null, + "static": false, + "def": "Objects()", + "py_def": "def __init__(self) -> None" + }, + "add": { + "type": "func", + "name": "add", + "doc": { + "brief": "Add object to objects", + "throw": "Throw exception if no memory", + "maixpy": "maix.nn.Objects.add", + "py_doc": "Add object to objects" + }, + "args": [ + [ + "int", + "x", + "0" + ], + [ + "int", + "y", + "0" + ], + [ + "int", + "w", + "0" + ], + [ + "int", + "h", + "0" + ], + [ + "int", + "class_id", + "0" + ], + [ + "float", + "score", + "0" + ], + [ + "std::vector", + "points", + "std::vector()" + ] + ], + "ret_type": "nn::Object&", + "static": false, + "def": "nn::Object &add(int x = 0, int y = 0, int w = 0, int h = 0, int class_id = 0, float score = 0, std::vector points = std::vector())", + "py_def": "def add(self, x: int = 0, y: int = 0, w: int = 0, h: int = 0, class_id: int = 0, score: float = 0, points: list[int] = []) -> Object" + }, + "remove": { + "type": "func", + "name": "remove", + "doc": { + "brief": "Remove object form objects", + "maixpy": "maix.nn.Objects.remove", + "py_doc": "Remove object form objects" + }, + "args": [ + [ + "int", + "idx", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err remove(int idx)", + "py_def": "def remove(self, idx: int) -> maix.err.Err" + }, + "at": { + "type": "func", + "name": "at", + "doc": { + "brief": "Get object item", + "maixpy": "maix.nn.Objects.at", + "py_doc": "Get object item" + }, + "args": [ + [ + "int", + "idx", + null + ] + ], + "ret_type": "nn::Object&", + "static": false, + "def": "nn::Object &at(int idx)", + "py_def": "def at(self, idx: int) -> Object" + }, + "__item__": { + "type": "func", + "name": "operator[]", + "doc": { + "brief": "Get object item", + "maixpy": "maix.nn.Objects.__item__", + "maixcdk": "maix.nn.Objects.[]", + "py_doc": "Get object item" + }, + "args": [ + [ + "int", + "idx", + null + ] + ], + "ret_type": "nn::Object&", + "static": false, + "def": "nn::Object &operator[](int idx)", + "py_def": "def __item__(self, idx: int) -> Object" + }, + "__len__": { + "type": "func", + "name": "size", + "doc": { + "brief": "Get size", + "maixpy": "maix.nn.Objects.__len__", + "maixcdk": "maix.nn.Objects.size", + "py_doc": "Get size" + }, + "args": [], + "ret_type": "size_t", + "static": false, + "def": "size_t size()", + "py_def": "def __len__(self) -> int" + }, + "__iter__": { + "type": "func", + "name": "begin", + "doc": { + "brief": "Begin", + "maixpy": "maix.nn.Objects.__iter__", + "maixcdk": "maix.nn.Objects.begin", + "py_doc": "Begin" + }, + "args": [], + "ret_type": "std::vector::iterator", + "static": false, + "def": "std::vector::iterator begin()", + "py_def": "def __iter__(self) -> typing.Iterator" + } + }, + "def": "class Objects" + }, + "MUD": { + "type": "class", + "name": "MUD", + "doc": { + "brief": "MUD(model universal describe file) class", + "maixpy": "maix.nn.MUD", + "py_doc": "MUD(model universal describe file) class" + }, + "members": { + "__init__": { + "type": "func", + "name": "MUD", + "doc": { + "brief": "MUD constructor", + "param": { + "model_path": "direction [in], model file path, model format can be MUD(model universal describe file) file.\nIf model_path set, will load model from file, load failed will raise err.Exception.\nIf model_path not set, you can load model later by load function." + }, + "maixpy": "maix.nn.MUD.__init__", + "maixcdk": "maix.nn.MUD.MUD", + "py_doc": "MUD constructor\n\nArgs:\n - model_path: direction [in], model file path, model format can be MUD(model universal describe file) file.\nIf model_path set, will load model from file, load failed will raise err.Exception.\nIf model_path not set, you can load model later by load function.\n" + }, + "args": [ + [ + "const char *", + "model_path", + "nullptr" + ] + ], + "ret_type": null, + "static": false, + "def": "MUD(const char *model_path = nullptr)", + "py_def": "def __init__(self, model_path: str = None) -> None" + }, + "load": { + "type": "func", + "name": "load", + "doc": { + "brief": "Load model from file", + "param": { + "model_path": "direction [in], model file path, model format can be MUD(model universal describe file) file." + }, + "return": "error code, if load success, return err::ERR_NONE", + "maixpy": "maix.nn.MUD.load", + "py_doc": "Load model from file\n\nArgs:\n - model_path: direction [in], model file path, model format can be MUD(model universal describe file) file.\n\n\nReturns: error code, if load success, return err::ERR_NONE\n" + }, + "args": [ + [ + "const std::string &", + "model_path", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err load(const std::string &model_path)", + "py_def": "def load(self, model_path: str) -> maix.err.Err" + }, + "type": { + "type": "var", + "name": "type", + "doc": { + "brief": "Model type, string type", + "maixpy": "maix.nn.MUD.type", + "py_doc": "Model type, string type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::string type" + }, + "items": { + "type": "var", + "name": "items", + "doc": { + "brief": "Model config items, different model type has different config items", + "maixpy": "maix.nn.MUD.items", + "py_doc": "Model config items, different model type has different config items" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::map> items" + } + }, + "def": "class MUD" + }, + "LayerInfo": { + "type": "class", + "name": "LayerInfo", + "doc": { + "brief": "NN model layer info", + "maixpy": "maix.nn.LayerInfo", + "py_doc": "NN model layer info" + }, + "members": { + "__init__": { + "type": "func", + "name": "LayerInfo", + "doc": { + "brief": "LayerInfo constructor", + "param": { + "name": "direction [in], layer name", + "dtype": "direction [in], layer data type", + "shape": "direction [in], layer shape" + }, + "maixpy": "maix.nn.LayerInfo.__init__", + "maixcdk": "maix.nn.LayerInfo.LayerInfo", + "py_doc": "LayerInfo constructor\n\nArgs:\n - name: direction [in], layer name\n - dtype: direction [in], layer data type\n - shape: direction [in], layer shape\n" + }, + "args": [ + [ + "const std::string &", + "name", + "\"\"" + ], + [ + "tensor::DType", + "dtype", + "tensor::DType::FLOAT32" + ], + [ + "std::vector", + "shape", + "std::vector()" + ] + ], + "ret_type": null, + "static": false, + "def": "LayerInfo(const std::string &name = \"\", tensor::DType dtype = tensor::DType::FLOAT32, std::vector shape = std::vector())", + "py_def": "def __init__(self, name: str = '', dtype: maix.tensor.DType = ..., shape: list[int] = []) -> None" + }, + "name": { + "type": "var", + "name": "name", + "doc": { + "brief": "Layer name", + "maixpy": "maix.nn.LayerInfo.name", + "py_doc": "Layer name" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::string name" + }, + "dtype": { + "type": "var", + "name": "dtype", + "doc": { + "brief": "Layer data type", + "attention": "If model is quantized, this is the real quantized data type like int8 float16,\nin most scene, inputs and outputs we actually use float32 in API like forward.", + "maixpy": "maix.nn.LayerInfo.dtype", + "py_doc": "Layer data type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "tensor::DType dtype" + }, + "shape": { + "type": "var", + "name": "shape", + "doc": { + "brief": "Layer shape", + "maixpy": "maix.nn.LayerInfo.shape", + "py_doc": "Layer shape" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector shape" + }, + "shape_int": { + "type": "func", + "name": "shape_int", + "doc": { + "brief": "Shape as one int type, multiply all dims of shape", + "maixpy": "maix.nn.LayerInfo.shape_int", + "py_doc": "Shape as one int type, multiply all dims of shape" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int shape_int()", + "py_def": "def shape_int(self) -> int" + }, + "to_str": { + "type": "func", + "name": "to_str", + "doc": { + "brief": "To string", + "maixpy": "maix.nn.LayerInfo.to_str", + "py_doc": "To string" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string to_str()", + "py_def": "def to_str(self) -> str" + }, + "__str__": { + "type": "func", + "name": "__str__", + "doc": { + "brief": "To string", + "maixpy": "maix.nn.LayerInfo.__str__", + "py_doc": "To string" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string __str__()", + "py_def": "def __str__(self) -> str" + } + }, + "def": "class LayerInfo" + }, + "NN": { + "type": "class", + "name": "NN", + "doc": { + "brief": "Neural network class", + "maixpy": "maix.nn.NN", + "py_doc": "Neural network class" + }, + "members": { + "__init__": { + "type": "func", + "name": "NN", + "doc": { + "brief": "Neural network constructor", + "param": { + "model": "direction [in], model file path, model format can be MUD(model universal describe file) file.\nIf model_path set, will load model from file, load failed will raise err.Exception.\nIf model_path not set, you can load model later by load function.", + "dual_buff": "direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.\nIf you want to ensure every time forward output the input's result, set this arg to false please.\nDefault true to ensure speed." + }, + "maixpy": "maix.nn.NN.__init__", + "py_doc": "Neural network constructor\n\nArgs:\n - model: direction [in], model file path, model format can be MUD(model universal describe file) file.\nIf model_path set, will load model from file, load failed will raise err.Exception.\nIf model_path not set, you can load model later by load function.\n - dual_buff: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.\nIf you want to ensure every time forward output the input's result, set this arg to false please.\nDefault true to ensure speed.\n" + }, + "args": [ + [ + "const std::string &", + "model", + "\"\"" + ], + [ + "bool", + "dual_buff", + "true" + ] + ], + "ret_type": null, + "static": false, + "def": "NN(const std::string &model = \"\", bool dual_buff = true)", + "py_def": "def __init__(self, model: str = '', dual_buff: bool = True) -> None" + }, + "load": { + "type": "func", + "name": "load", + "doc": { + "brief": "Load model from file", + "param": { + "model": "direction [in], model file path, model format can be MUD(model universal describe file) file." + }, + "return": "error code, if load success, return err::ERR_NONE", + "maixpy": "maix.nn.NN.load", + "py_doc": "Load model from file\n\nArgs:\n - model: direction [in], model file path, model format can be MUD(model universal describe file) file.\n\n\nReturns: error code, if load success, return err::ERR_NONE\n" + }, + "args": [ + [ + "const std::string &", + "model", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err load(const std::string &model)", + "py_def": "def load(self, model: str) -> maix.err.Err" + }, + "loaded": { + "type": "func", + "name": "loaded", + "doc": { + "brief": "Is model loaded", + "return": "true if model loaded, else false", + "maixpy": "maix.nn.NN.loaded", + "py_doc": "Is model loaded\n\nReturns: true if model loaded, else false\n" + }, + "args": [], + "ret_type": "bool", + "static": false, + "def": "bool loaded()", + "py_def": "def loaded(self) -> bool" + }, + "set_dual_buff": { + "type": "func", + "name": "set_dual_buff", + "doc": { + "brief": "Enable dual buff or disable dual buff", + "param": { + "enable": "true to enable, false to disable" + }, + "maixpy": "maix.nn.NN.set_dual_buff", + "py_doc": "Enable dual buff or disable dual buff\n\nArgs:\n - enable: true to enable, false to disable\n" + }, + "args": [ + [ + "bool", + "enable", + null + ] + ], + "ret_type": "void", + "static": false, + "def": "void set_dual_buff(bool enable)", + "py_def": "def set_dual_buff(self, enable: bool) -> None" + }, + "inputs_info": { + "type": "func", + "name": "inputs_info", + "doc": { + "brief": "Get model input layer info", + "return": "input layer info", + "maixpy": "maix.nn.NN.inputs_info", + "py_doc": "Get model input layer info\n\nReturns: input layer info\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector inputs_info()", + "py_def": "def inputs_info(self) -> list[LayerInfo]" + }, + "outputs_info": { + "type": "func", + "name": "outputs_info", + "doc": { + "brief": "Get model output layer info", + "return": "output layer info", + "maixpy": "maix.nn.NN.outputs_info", + "py_doc": "Get model output layer info\n\nReturns: output layer info\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector outputs_info()", + "py_def": "def outputs_info(self) -> list[LayerInfo]" + }, + "extra_info": { + "type": "func", + "name": "extra_info", + "doc": { + "brief": "Get model extra info define in MUD file", + "return": "extra info, dict type, key-value object, attention: key and value are all string type.", + "maixpy": "maix.nn.NN.extra_info", + "py_doc": "Get model extra info define in MUD file\n\nReturns: extra info, dict type, key-value object, attention: key and value are all string type.\n" + }, + "args": [], + "ret_type": "std::map", + "static": false, + "def": "std::map extra_info()", + "py_def": "def extra_info(self) -> dict[str, str]" + }, + "forward": { + "type": "func", + "name": "forward", + "doc": { + "brief": "forward run model, get output of model,\\nthis is specially for MaixPy, not efficient, but easy to use in MaixPy", + "param": { + "input": "direction [in], input tensor", + "copy_result": "If set true, will copy result to a new variable; else will use a internal memory, you can only use it until to the next forward.\nDefault true to avoid problems, you can set it to false manually to make speed faster.", + "dual_buff_wait": "bool type, only for dual_buff mode, if true, will inference this image and wait for result, default false." + }, + "return": "output tensor. In C++, you should manually delete tensors in return value and return value.\nIf dual_buff mode, it can be NULL(None in MaixPy) means not ready.", + "throw": "if error ocurrs like no memory or arg error, will raise err.Exception.", + "maixpy": "maix.nn.NN.forward", + "py_doc": "forward run model, get output of model,\nthis is specially for MaixPy, not efficient, but easy to use in MaixPy\n\nArgs:\n - input: direction [in], input tensor\n - copy_result: If set true, will copy result to a new variable; else will use a internal memory, you can only use it until to the next forward.\nDefault true to avoid problems, you can set it to false manually to make speed faster.\n - dual_buff_wait: bool type, only for dual_buff mode, if true, will inference this image and wait for result, default false.\n\n\nReturns: output tensor. In C++, you should manually delete tensors in return value and return value.\nIf dual_buff mode, it can be NULL(None in MaixPy) means not ready.\n" + }, + "args": [ + [ + "tensor::Tensors &", + "inputs", + null + ], + [ + "bool", + "copy_result", + "true" + ], + [ + "bool", + "dual_buff_wait", + "false" + ] + ], + "ret_type": "tensor::Tensors*", + "static": false, + "def": "tensor::Tensors *forward(tensor::Tensors &inputs, bool copy_result = true, bool dual_buff_wait = false)", + "py_def": "def forward(self, inputs: maix.tensor.Tensors, copy_result: bool = True, dual_buff_wait: bool = False) -> maix.tensor.Tensors" + }, + "forward_image": { + "type": "func", + "name": "forward_image", + "doc": { + "brief": "forward model, param is image", + "param": { + "img": "input image", + "mean": "mean value, a list type, e.g. [0.485, 0.456, 0.406], default is empty list means not normalize.", + "scale": "scale value, a list type, e.g. [1/0.229, 1/0.224, 1/0.225], default is empty list means not normalize.", + "fit": "fit mode, if the image size of input not equal to model's input, it will auto resize use this fit method,\ndefault is image.Fit.FIT_FILL for easy coordinate calculation, but for more accurate result, use image.Fit.FIT_CONTAIN is better.", + "copy_result": "If set true, will copy result to a new variable; else will use a internal memory, you can only use it until to the next forward.\nDefault true to avoid problems, you can set it to false manually to make speed faster.", + "dual_buff_wait": "bool type, only for dual_buff mode, if true, will inference this image and wait for result, default false." + }, + "return": "output tensor. In C++, you should manually delete tensors in return value and return value.\nIf dual_buff mode, it can be NULL(None in MaixPy) means not ready.", + "throw": "If error occurs, like arg error or alloc memory failed, will raise err.Exception.", + "maixpy": "maix.nn.NN.forward_image", + "py_doc": "forward model, param is image\n\nArgs:\n - img: input image\n - mean: mean value, a list type, e.g. [0.485, 0.456, 0.406], default is empty list means not normalize.\n - scale: scale value, a list type, e.g. [1/0.229, 1/0.224, 1/0.225], default is empty list means not normalize.\n - fit: fit mode, if the image size of input not equal to model's input, it will auto resize use this fit method,\ndefault is image.Fit.FIT_FILL for easy coordinate calculation, but for more accurate result, use image.Fit.FIT_CONTAIN is better.\n - copy_result: If set true, will copy result to a new variable; else will use a internal memory, you can only use it until to the next forward.\nDefault true to avoid problems, you can set it to false manually to make speed faster.\n - dual_buff_wait: bool type, only for dual_buff mode, if true, will inference this image and wait for result, default false.\n\n\nReturns: output tensor. In C++, you should manually delete tensors in return value and return value.\nIf dual_buff mode, it can be NULL(None in MaixPy) means not ready.\n" + }, + "args": [ + [ + "image::Image &", + "img", + null + ], + [ + "std::vector", + "mean", + "std::vector()" + ], + [ + "std::vector", + "scale", + "std::vector()" + ], + [ + "image::Fit", + "fit", + "image::Fit::FIT_FILL" + ], + [ + "bool", + "copy_result", + "true" + ], + [ + "bool", + "dual_buff_wait", + "false" + ] + ], + "ret_type": "tensor::Tensors*", + "static": false, + "def": "tensor::Tensors *forward_image(image::Image &img, std::vector mean = std::vector(), std::vector scale = std::vector(), image::Fit fit = image::Fit::FIT_FILL, bool copy_result = true, bool dual_buff_wait = false)", + "py_def": "def forward_image(self, img: maix.image.Image, mean: list[float] = [], scale: list[float] = [], fit: maix.image.Fit = ..., copy_result: bool = True, dual_buff_wait: bool = False) -> maix.tensor.Tensors" + } + }, + "def": "class NN" + }, + "FaceObject": { + "type": "class", + "name": "FaceObject", + "doc": { + "brief": "Face object", + "maixpy": "maix.nn.FaceObject", + "py_doc": "Face object" + }, + "members": { + "__init__": { + "type": "func", + "name": "FaceObject", + "doc": { + "brief": "Constructor", + "maixpy": "maix.nn.FaceObject.__init__", + "maixcdk": "maix.nn.FaceObject.FaceObject", + "py_doc": "Constructor" + }, + "args": [ + [ + "int", + "x", + "0" + ], + [ + "int", + "y", + "0" + ], + [ + "int", + "w", + "0" + ], + [ + "int", + "h", + "0" + ], + [ + "int", + "class_id", + "0" + ], + [ + "float", + "score", + "0" + ], + [ + "std::vector", + "points", + "std::vector()" + ], + [ + "std::vector", + "feature", + "std::vector()" + ], + [ + "image::Image", + "face", + "image::Image()" + ] + ], + "ret_type": null, + "static": false, + "def": "FaceObject(int x = 0, int y = 0, int w = 0, int h = 0, int class_id = 0, float score = 0, std::vector points = std::vector(), std::vector feature = std::vector(), image::Image face = image::Image())", + "py_def": "def __init__(self, x: int = 0, y: int = 0, w: int = 0, h: int = 0, class_id: int = 0, score: float = 0, points: list[int] = [], feature: list[float] = [], face: maix.image.Image = ...) -> None" + }, + "__str__": { + "type": "func", + "name": "to_str", + "doc": { + "brief": "FaceObject info to string", + "return": "FaceObject info string", + "maixpy": "maix.nn.FaceObject.__str__", + "maixcdk": "maix.nn.FaceObject.to_str", + "py_doc": "FaceObject info to string\n\nReturns: FaceObject info string\n" + }, + "args": [], + "ret_type": "std::string", + "static": false, + "def": "std::string to_str()", + "py_def": "def __str__(self) -> str" + }, + "x": { + "type": "var", + "name": "x", + "doc": { + "brief": "FaceObject left top coordinate x", + "maixpy": "maix.nn.FaceObject.x", + "py_doc": "FaceObject left top coordinate x" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int x" + }, + "y": { + "type": "var", + "name": "y", + "doc": { + "brief": "FaceObject left top coordinate y", + "maixpy": "maix.nn.FaceObject.y", + "py_doc": "FaceObject left top coordinate y" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int y" + }, + "w": { + "type": "var", + "name": "w", + "doc": { + "brief": "FaceObject width", + "maixpy": "maix.nn.FaceObject.w", + "py_doc": "FaceObject width" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int w" + }, + "h": { + "type": "var", + "name": "h", + "doc": { + "brief": "FaceObject height", + "maixpy": "maix.nn.FaceObject.h", + "py_doc": "FaceObject height" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int h" + }, + "class_id": { + "type": "var", + "name": "class_id", + "doc": { + "brief": "FaceObject class id", + "maixpy": "maix.nn.FaceObject.class_id", + "py_doc": "FaceObject class id" + }, + "value": null, + "static": false, + "readonly": false, + "def": "int class_id" + }, + "score": { + "type": "var", + "name": "score", + "doc": { + "brief": "FaceObject score", + "maixpy": "maix.nn.FaceObject.score", + "py_doc": "FaceObject score" + }, + "value": null, + "static": false, + "readonly": false, + "def": "float score" + }, + "points": { + "type": "var", + "name": "points", + "doc": { + "brief": "keypoints", + "maixpy": "maix.nn.FaceObject.points", + "py_doc": "keypoints" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector points" + }, + "feature": { + "type": "var", + "name": "feature", + "doc": { + "brief": "feature, float list type", + "maixpy": "maix.nn.FaceObject.feature", + "py_doc": "feature, float list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector feature" + }, + "face": { + "type": "var", + "name": "face", + "doc": { + "brief": "face image", + "maixpy": "maix.nn.FaceObject.face", + "py_doc": "face image" + }, + "value": null, + "static": false, + "readonly": false, + "def": "image::Image face" + } + }, + "def": "class FaceObject" + }, + "FaceRecognizer": { + "type": "class", + "name": "FaceRecognizer", + "doc": { + "brief": "FaceRecognizer class", + "maixpy": "maix.nn.FaceRecognizer", + "py_doc": "FaceRecognizer class" + }, + "members": { + "__init__": { + "type": "func", + "name": "FaceRecognizer", + "doc": { + "brief": "Constructor of FaceRecognizer class", + "param": { + "detect_model": "face detect model path, default empty, you can load model later by load function.", + "feature_model": "feature extract model", + "dual_buff": "direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.\nIf you want to ensure every time forward output the input's result, set this arg to false please.\nDefault true to ensure speed." + }, + "throw": "If model arg is not empty and load failed, will throw err::Exception.", + "maixpy": "maix.nn.FaceRecognizer.__init__", + "maixcdk": "maix.nn.FaceRecognizer.FaceRecognizer", + "py_doc": "Constructor of FaceRecognizer class\n\nArgs:\n - detect_model: face detect model path, default empty, you can load model later by load function.\n - feature_model: feature extract model\n - dual_buff: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.\nIf you want to ensure every time forward output the input's result, set this arg to false please.\nDefault true to ensure speed.\n" + }, + "args": [ + [ + "const string &", + "detect_model", + "\"\"" + ], + [ + "const string &", + "feature_model", + "\"\"" + ], + [ + "bool", + "dual_buff", + "true" + ] + ], + "ret_type": null, + "static": false, + "def": "FaceRecognizer(const string &detect_model = \"\", const string &feature_model = \"\", bool dual_buff = true)", + "py_def": "def __init__(self, detect_model: str = '', feature_model: str = '', dual_buff: bool = True) -> None" + }, + "load": { + "type": "func", + "name": "load", + "doc": { + "brief": "Load model from file", + "param": { + "detect_model": "face detect model path, default empty, you can load model later by load function.", + "feature_model": "feature extract model" + }, + "return": "err::Err", + "maixpy": "maix.nn.FaceRecognizer.load", + "py_doc": "Load model from file\n\nArgs:\n - detect_model: face detect model path, default empty, you can load model later by load function.\n - feature_model: feature extract model\n\n\nReturns: err::Err\n" + }, + "args": [ + [ + "const string &", + "detect_model", + null + ], + [ + "const string &", + "feature_model", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err load(const string &detect_model, const string &feature_model)", + "py_def": "def load(self, detect_model: str, feature_model: str) -> maix.err.Err" + }, + "recognize": { + "type": "func", + "name": "recognize", + "doc": { + "brief": "Detect objects from image", + "param": { + "img": "Image want to detect, if image's size not match model input's, will auto resize with fit method.", + "conf_th": "Detect confidence threshold, default 0.5.", + "iou_th": "Detect IoU threshold, default 0.45.", + "compare_th": "Compare two face score threshold, default 0.8, if two faces' score < this value, will see this face fas unknown.", + "get_feature": "return feature or not, if true will copy features to result, if false will not copy feature to result to save time and memory.", + "get_face": "return face image or not, if true result object's face attribute will valid, or face sttribute is empty. Get face image will alloc memory and copy image, so will lead to slower speed.", + "fit": "Resize method, default image.Fit.FIT_CONTAIN." + }, + "throw": "If image format not match model input format, will throw err::Exception.", + "return": "FaceObject list. In C++, you should delete it after use.", + "maixpy": "maix.nn.FaceRecognizer.recognize", + "py_doc": "Detect objects from image\n\nArgs:\n - img: Image want to detect, if image's size not match model input's, will auto resize with fit method.\n - conf_th: Detect confidence threshold, default 0.5.\n - iou_th: Detect IoU threshold, default 0.45.\n - compare_th: Compare two face score threshold, default 0.8, if two faces' score < this value, will see this face fas unknown.\n - get_feature: return feature or not, if true will copy features to result, if false will not copy feature to result to save time and memory.\n - get_face: return face image or not, if true result object's face attribute will valid, or face sttribute is empty. Get face image will alloc memory and copy image, so will lead to slower speed.\n - fit: Resize method, default image.Fit.FIT_CONTAIN.\n\n\nReturns: FaceObject list. In C++, you should delete it after use.\n" + }, + "args": [ + [ + "image::Image &", + "img", + null + ], + [ + "float", + "conf_th", + "0.5" + ], + [ + "float", + "iou_th", + "0.45" + ], + [ + "float", + "compare_th", + "0.8" + ], + [ + "bool", + "get_feature", + "false" + ], + [ + "bool", + "get_face", + "false" + ], + [ + "maix::image::Fit", + "fit", + "maix::image::FIT_CONTAIN" + ] + ], + "ret_type": "std::vector*", + "static": false, + "def": "std::vector *recognize(image::Image &img, float conf_th = 0.5, float iou_th = 0.45, float compare_th = 0.8, bool get_feature = false, bool get_face = false, maix::image::Fit fit = maix::image::FIT_CONTAIN)", + "py_def": "def recognize(self, img: maix.image.Image, conf_th: float = 0.5, iou_th: float = 0.45, compare_th: float = 0.8, get_feature: bool = False, get_face: bool = False, fit: maix.image.Fit = ...) -> list[FaceObject]" + }, + "add_face": { + "type": "func", + "name": "add_face", + "doc": { + "brief": "Add face to lib", + "param": { + "face": "face object, find by recognize", + "label": "face label(name)" + }, + "maixpy": "maix.nn.FaceRecognizer.add_face", + "py_doc": "Add face to lib\n\nArgs:\n - face: face object, find by recognize\n - label: face label(name)\n" + }, + "args": [ + [ + "nn::FaceObject *", + "face", + null + ], + [ + "const std::string &", + "label", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err add_face(nn::FaceObject *face, const std::string &label)", + "py_def": "def add_face(self, face: FaceObject, label: str) -> maix.err.Err" + }, + "remove_face": { + "type": "func", + "name": "remove_face", + "doc": { + "brief": "remove face from lib", + "param": { + "idx": "index of face in lib, default -1 means use label, idx and label must have one, idx have high priotiry.", + "label": "which face to remove, default to empty string mean use idx, idx and label must have one, idx have high priotiry." + }, + "maixpy": "maix.nn.FaceRecognizer.remove_face", + "py_doc": "remove face from lib\n\nArgs:\n - idx: index of face in lib, default -1 means use label, idx and label must have one, idx have high priotiry.\n - label: which face to remove, default to empty string mean use idx, idx and label must have one, idx have high priotiry.\n" + }, + "args": [ + [ + "int", + "idx", + "-1" + ], + [ + "const std::string &", + "label", + "\"\"" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err remove_face(int idx = -1, const std::string &label = \"\")", + "py_def": "def remove_face(self, idx: int = -1, label: str = '') -> maix.err.Err" + }, + "save_faces": { + "type": "func", + "name": "save_faces", + "doc": { + "brief": "Save faces info to a file", + "param": { + "path": "where to save, string type." + }, + "return": "err.Err type", + "maixpy": "maix.nn.FaceRecognizer.save_faces", + "py_doc": "Save faces info to a file\n\nArgs:\n - path: where to save, string type.\n\n\nReturns: err.Err type\n" + }, + "args": [ + [ + "const std::string &", + "path", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err save_faces(const std::string &path)", + "py_def": "def save_faces(self, path: str) -> maix.err.Err" + }, + "load_faces": { + "type": "func", + "name": "load_faces", + "doc": { + "brief": "Load faces info from a file", + "param": { + "path": "from where to load, string type." + }, + "return": "err::Err type", + "maixpy": "maix.nn.FaceRecognizer.load_faces", + "py_doc": "Load faces info from a file\n\nArgs:\n - path: from where to load, string type.\n\n\nReturns: err::Err type\n" + }, + "args": [ + [ + "const std::string &", + "path", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err load_faces(const std::string &path)", + "py_def": "def load_faces(self, path: str) -> maix.err.Err" + }, + "input_size": { + "type": "func", + "name": "input_size", + "doc": { + "brief": "Get model input size", + "return": "model input size", + "maixpy": "maix.nn.FaceRecognizer.input_size", + "py_doc": "Get model input size\n\nReturns: model input size\n" + }, + "args": [], + "ret_type": "image::Size", + "static": false, + "def": "image::Size input_size()", + "py_def": "def input_size(self) -> maix.image.Size" + }, + "input_width": { + "type": "func", + "name": "input_width", + "doc": { + "brief": "Get model input width", + "return": "model input size of width", + "maixpy": "maix.nn.FaceRecognizer.input_width", + "py_doc": "Get model input width\n\nReturns: model input size of width\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int input_width()", + "py_def": "def input_width(self) -> int" + }, + "input_height": { + "type": "func", + "name": "input_height", + "doc": { + "brief": "Get model input height", + "return": "model input size of height", + "maixpy": "maix.nn.FaceRecognizer.input_height", + "py_doc": "Get model input height\n\nReturns: model input size of height\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int input_height()", + "py_def": "def input_height(self) -> int" + }, + "input_format": { + "type": "func", + "name": "input_format", + "doc": { + "brief": "Get input image format", + "return": "input image format, image::Format type.", + "maixpy": "maix.nn.FaceRecognizer.input_format", + "py_doc": "Get input image format\n\nReturns: input image format, image::Format type.\n" + }, + "args": [], + "ret_type": "image::Format", + "static": false, + "def": "image::Format input_format()", + "py_def": "def input_format(self) -> maix.image.Format" + }, + "mean_detector": { + "type": "var", + "name": "mean_detector", + "doc": { + "brief": "Get mean value, list type", + "maixpy": "maix.nn.FaceRecognizer.mean_detector", + "py_doc": "Get mean value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector mean_detector" + }, + "scale_detector": { + "type": "var", + "name": "scale_detector", + "doc": { + "brief": "Get scale value, list type", + "maixpy": "maix.nn.FaceRecognizer.scale_detector", + "py_doc": "Get scale value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector scale_detector" + }, + "mean_feature": { + "type": "var", + "name": "mean_feature", + "doc": { + "brief": "Get mean value, list type", + "maixpy": "maix.nn.FaceRecognizer.mean_feature", + "py_doc": "Get mean value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector mean_feature" + }, + "scale_feature": { + "type": "var", + "name": "scale_feature", + "doc": { + "brief": "Get scale value, list type", + "maixpy": "maix.nn.FaceRecognizer.scale_feature", + "py_doc": "Get scale value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector scale_feature" + }, + "labels": { + "type": "var", + "name": "labels", + "doc": { + "brief": "labels, list type, first is \\\"unknown\\\"", + "maixpy": "maix.nn.FaceRecognizer.labels", + "py_doc": "labels, list type, first is \"unknown\"" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector labels" + }, + "features": { + "type": "var", + "name": "features", + "doc": { + "brief": "features", + "maixpy": "maix.nn.FaceRecognizer.features", + "py_doc": "features" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector> features" + } + }, + "def": "class FaceRecognizer" + }, + "SelfLearnClassifier": { + "type": "class", + "name": "SelfLearnClassifier", + "doc": { + "brief": "SelfLearnClassifier", + "maixpy": "maix.nn.SelfLearnClassifier", + "py_doc": "SelfLearnClassifier" + }, + "members": { + "__init__": { + "type": "func", + "name": "SelfLearnClassifier", + "doc": { + "brief": "Construct a new SelfLearnClassifier object", + "param": { + "model": "MUD model path, if empty, will not load model, you can call load_model() later.\nif not empty, will load model and will raise err::Exception if load failed.", + "dual_buff": "direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.\nIf you want to ensure every time forward output the input's result, set this arg to false please.\nDefault true to ensure speed." + }, + "maixpy": "maix.nn.SelfLearnClassifier.__init__", + "maixcdk": "maix.nn.SelfLearnClassifier.SelfLearnClassifier", + "py_doc": "Construct a new SelfLearnClassifier object\n\nArgs:\n - model: MUD model path, if empty, will not load model, you can call load_model() later.\nif not empty, will load model and will raise err::Exception if load failed.\n - dual_buff: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.\nIf you want to ensure every time forward output the input's result, set this arg to false please.\nDefault true to ensure speed.\n" + }, + "args": [ + [ + "const std::string &", + "model", + "\"\"" + ], + [ + "bool", + "dual_buff", + "true" + ] + ], + "ret_type": null, + "static": false, + "def": "SelfLearnClassifier(const std::string &model = \"\", bool dual_buff = true)", + "py_def": "def __init__(self, model: str = '', dual_buff: bool = True) -> None" + }, + "load_model": { + "type": "func", + "name": "load_model", + "doc": { + "brief": "Load model from file, model format is .mud,\\nMUD file should contain [extra] section, have key-values:\\n- model_type: classifier_no_top\\n- input_type: rgb or bgr\\n- mean: 123.675, 116.28, 103.53\\n- scale: 0.017124753831663668, 0.01750700280112045, 0.017429193899782137", + "param": { + "model": "MUD model path" + }, + "return": "error code, if load failed, return error code", + "maixpy": "maix.nn.SelfLearnClassifier.load_model", + "py_doc": "Load model from file, model format is .mud,\nMUD file should contain [extra] section, have key-values:\n- model_type: classifier_no_top\n- input_type: rgb or bgr\n- mean: 123.675, 116.28, 103.53\n- scale: 0.017124753831663668, 0.01750700280112045, 0.017429193899782137\n\nArgs:\n - model: MUD model path\n\n\nReturns: error code, if load failed, return error code\n" + }, + "args": [ + [ + "const string &", + "model", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err load_model(const string &model)", + "py_def": "def load_model(self, model: str) -> maix.err.Err" + }, + "classify": { + "type": "func", + "name": "classify", + "doc": { + "brief": "Classify image", + "param": { + "img": "image, format should match model input_type\uff0c or will raise err.Exception", + "fit": "image resize fit mode, default Fit.FIT_COVER, see image.Fit." + }, + "throw": "If error occurred, will raise err::Exception, you can find reason in log, mostly caused by args error or hardware error.", + "return": "result, a list of (idx, distance), smaller distance means more similar. In C++, you need to delete it after use.", + "maixpy": "maix.nn.SelfLearnClassifier.classify", + "py_doc": "Classify image\n\nArgs:\n - img: image, format should match model input_type\uff0c or will raise err.Exception\n - fit: image resize fit mode, default Fit.FIT_COVER, see image.Fit.\n\n\nReturns: result, a list of (idx, distance), smaller distance means more similar. In C++, you need to delete it after use.\n" + }, + "args": [ + [ + "image::Image &", + "img", + null + ], + [ + "image::Fit", + "fit", + "image::FIT_COVER" + ] + ], + "ret_type": "std::vector>*", + "static": false, + "def": "std::vector> *classify(image::Image &img, image::Fit fit = image::FIT_COVER)", + "py_def": "def classify(self, img: maix.image.Image, fit: maix.image.Fit = ...) -> list[tuple[int, float]]" + }, + "add_class": { + "type": "func", + "name": "add_class", + "doc": { + "brief": "Add a class to recognize", + "param": { + "img": "Add a image as a new class", + "fit": "image resize fit mode, default Fit.FIT_COVER, see image.Fit." + }, + "maixpy": "maix.nn.SelfLearnClassifier.add_class", + "py_doc": "Add a class to recognize\n\nArgs:\n - img: Add a image as a new class\n - fit: image resize fit mode, default Fit.FIT_COVER, see image.Fit.\n" + }, + "args": [ + [ + "image::Image &", + "img", + null + ], + [ + "image::Fit", + "fit", + "image::FIT_COVER" + ] + ], + "ret_type": "void", + "static": false, + "def": "void add_class(image::Image &img, image::Fit fit = image::FIT_COVER)", + "py_def": "def add_class(self, img: maix.image.Image, fit: maix.image.Fit = ...) -> None" + }, + "class_num": { + "type": "func", + "name": "class_num", + "doc": { + "brief": "Get class number", + "maixpy": "maix.nn.SelfLearnClassifier.class_num", + "py_doc": "Get class number" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int class_num()", + "py_def": "def class_num(self) -> int" + }, + "rm_class": { + "type": "func", + "name": "rm_class", + "doc": { + "brief": "Remove a class", + "param": { + "idx": "index, value from 0 to class_num();" + }, + "maixpy": "maix.nn.SelfLearnClassifier.rm_class", + "py_doc": "Remove a class\n\nArgs:\n - idx: index, value from 0 to class_num();\n" + }, + "args": [ + [ + "int", + "idx", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err rm_class(int idx)", + "py_def": "def rm_class(self, idx: int) -> maix.err.Err" + }, + "add_sample": { + "type": "func", + "name": "add_sample", + "doc": { + "brief": "Add sample, you should call learn method after add some samples to learn classes.\\nSample image can be any of classes we already added.", + "param": { + "img": "Add a image as a new sample." + }, + "maixpy": "maix.nn.SelfLearnClassifier.add_sample", + "py_doc": "Add sample, you should call learn method after add some samples to learn classes.\nSample image can be any of classes we already added.\n\nArgs:\n - img: Add a image as a new sample.\n" + }, + "args": [ + [ + "image::Image &", + "img", + null + ], + [ + "image::Fit", + "fit", + "image::FIT_COVER" + ] + ], + "ret_type": "void", + "static": false, + "def": "void add_sample(image::Image &img, image::Fit fit = image::FIT_COVER)", + "py_def": "def add_sample(self, img: maix.image.Image, fit: maix.image.Fit = ...) -> None" + }, + "rm_sample": { + "type": "func", + "name": "rm_sample", + "doc": { + "brief": "Remove a sample", + "param": { + "idx": "index, value from 0 to sample_num();" + }, + "maixpy": "maix.nn.SelfLearnClassifier.rm_sample", + "py_doc": "Remove a sample\n\nArgs:\n - idx: index, value from 0 to sample_num();\n" + }, + "args": [ + [ + "int", + "idx", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err rm_sample(int idx)", + "py_def": "def rm_sample(self, idx: int) -> maix.err.Err" + }, + "sample_num": { + "type": "func", + "name": "sample_num", + "doc": { + "brief": "Get sample number", + "maixpy": "maix.nn.SelfLearnClassifier.sample_num", + "py_doc": "Get sample number" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int sample_num()", + "py_def": "def sample_num(self) -> int" + }, + "learn": { + "type": "func", + "name": "learn", + "doc": { + "brief": "Start auto learn class features from classes image and samples.\\nYou should call this method after you add some samples.", + "return": "learn epoch(times), 0 means learn nothing.", + "maixpy": "maix.nn.SelfLearnClassifier.learn", + "py_doc": "Start auto learn class features from classes image and samples.\nYou should call this method after you add some samples.\n\nReturns: learn epoch(times), 0 means learn nothing.\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int learn()", + "py_def": "def learn(self) -> int" + }, + "clear": { + "type": "func", + "name": "clear", + "doc": { + "brief": "Clear all class and samples", + "maixpy": "maix.nn.SelfLearnClassifier.clear", + "py_doc": "Clear all class and samples" + }, + "args": [], + "ret_type": "void", + "static": false, + "def": "void clear()", + "py_def": "def clear(self) -> None" + }, + "input_size": { + "type": "func", + "name": "input_size", + "doc": { + "brief": "Get model input size, only for image input", + "return": "model input size", + "maixpy": "maix.nn.SelfLearnClassifier.input_size", + "py_doc": "Get model input size, only for image input\n\nReturns: model input size\n" + }, + "args": [], + "ret_type": "image::Size", + "static": false, + "def": "image::Size input_size()", + "py_def": "def input_size(self) -> maix.image.Size" + }, + "input_width": { + "type": "func", + "name": "input_width", + "doc": { + "brief": "Get model input width, only for image input", + "return": "model input size of width", + "maixpy": "maix.nn.SelfLearnClassifier.input_width", + "py_doc": "Get model input width, only for image input\n\nReturns: model input size of width\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int input_width()", + "py_def": "def input_width(self) -> int" + }, + "input_height": { + "type": "func", + "name": "input_height", + "doc": { + "brief": "Get model input height, only for image input", + "return": "model input size of height", + "maixpy": "maix.nn.SelfLearnClassifier.input_height", + "py_doc": "Get model input height, only for image input\n\nReturns: model input size of height\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int input_height()", + "py_def": "def input_height(self) -> int" + }, + "input_format": { + "type": "func", + "name": "input_format", + "doc": { + "brief": "Get input image format, only for image input", + "return": "input image format, image::Format type.", + "maixpy": "maix.nn.SelfLearnClassifier.input_format", + "py_doc": "Get input image format, only for image input\n\nReturns: input image format, image::Format type.\n" + }, + "args": [], + "ret_type": "image::Format", + "static": false, + "def": "image::Format input_format()", + "py_def": "def input_format(self) -> maix.image.Format" + }, + "input_shape": { + "type": "func", + "name": "input_shape", + "doc": { + "brief": "Get input shape, if have multiple input, only return first input shape", + "return": "input shape, list type", + "maixpy": "maix.nn.SelfLearnClassifier.input_shape", + "py_doc": "Get input shape, if have multiple input, only return first input shape\n\nReturns: input shape, list type\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector input_shape()", + "py_def": "def input_shape(self) -> list[int]" + }, + "save": { + "type": "func", + "name": "save", + "doc": { + "brief": "Save features and labels to a binary file", + "param": { + "path": "file path to save, e.g. /root/my_classes.bin", + "labels": "class labels, can be None, or length must equal to class num, or will return err::Err" + }, + "return": "maix.err.Err if labels exists but length not equal to class num, or save file failed, or class num is 0.", + "maixpy": "maix.nn.SelfLearnClassifier.save", + "py_doc": "Save features and labels to a binary file\n\nArgs:\n - path: file path to save, e.g. /root/my_classes.bin\n - labels: class labels, can be None, or length must equal to class num, or will return err::Err\n\n\nReturns: maix.err.Err if labels exists but length not equal to class num, or save file failed, or class num is 0.\n" + }, + "args": [ + [ + "const std::string &", + "path", + null + ], + [ + "const std::vector &", + "labels", + "std::vector()" + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err save(const std::string &path, const std::vector &labels = std::vector())", + "py_def": "def save(self, path: str, labels: list[str] = []) -> maix.err.Err" + }, + "load": { + "type": "func", + "name": "load", + "doc": { + "brief": "Load features info from binary file", + "param": { + "path": "feature info binary file path, e.g. /root/my_classes.bin" + }, + "maixpy": "maix.nn.SelfLearnClassifier.load", + "py_doc": "Load features info from binary file\n\nArgs:\n - path: feature info binary file path, e.g. /root/my_classes.bin\n" + }, + "args": [ + [ + "const std::string &", + "path", + null + ] + ], + "ret_type": "std::vector", + "static": false, + "def": "std::vector load(const std::string &path)", + "py_def": "def load(self, path: str) -> list[str]" + }, + "labels": { + "type": "var", + "name": "labels", + "doc": { + "brief": "Labels list", + "maixpy": "maix.nn.SelfLearnClassifier.labels", + "py_doc": "Labels list" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector labels" + }, + "label_path": { + "type": "var", + "name": "label_path", + "doc": { + "brief": "Label file path", + "maixpy": "maix.nn.SelfLearnClassifier.label_path", + "py_doc": "Label file path" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::string label_path" + }, + "mean": { + "type": "var", + "name": "mean", + "doc": { + "brief": "Get mean value, list type", + "maixpy": "maix.nn.SelfLearnClassifier.mean", + "py_doc": "Get mean value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector mean" + }, + "scale": { + "type": "var", + "name": "scale", + "doc": { + "brief": "Get scale value, list type", + "maixpy": "maix.nn.SelfLearnClassifier.scale", + "py_doc": "Get scale value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector scale" + } + }, + "def": "class SelfLearnClassifier" + }, + "YOLOv5": { + "type": "class", + "name": "YOLOv5", + "doc": { + "brief": "YOLOv5 class", + "maixpy": "maix.nn.YOLOv5", + "py_doc": "YOLOv5 class" + }, + "members": { + "__init__": { + "type": "func", + "name": "YOLOv5", + "doc": { + "brief": "Constructor of YOLOv5 class", + "param": { + "model": "model path, default empty, you can load model later by load function.", + "dual_buff": "direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.\nIf you want to ensure every time forward output the input's result, set this arg to false please.\nDefault true to ensure speed." + }, + "throw": "If model arg is not empty and load failed, will throw err::Exception.", + "maixpy": "maix.nn.YOLOv5.__init__", + "maixcdk": "maix.nn.YOLOv5.YOLOv5", + "py_doc": "Constructor of YOLOv5 class\n\nArgs:\n - model: model path, default empty, you can load model later by load function.\n - dual_buff: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.\nIf you want to ensure every time forward output the input's result, set this arg to false please.\nDefault true to ensure speed.\n" + }, + "args": [ + [ + "const string &", + "model", + "\"\"" + ], + [ + "bool", + "dual_buff", + "true" + ] + ], + "ret_type": null, + "static": false, + "def": "YOLOv5(const string &model = \"\", bool dual_buff = true)", + "py_def": "def __init__(self, model: str = '', dual_buff: bool = True) -> None" + }, + "load": { + "type": "func", + "name": "load", + "doc": { + "brief": "Load model from file", + "param": { + "model": "Model path want to load" + }, + "return": "err::Err", + "maixpy": "maix.nn.YOLOv5.load", + "py_doc": "Load model from file\n\nArgs:\n - model: Model path want to load\n\n\nReturns: err::Err\n" + }, + "args": [ + [ + "const string &", + "model", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err load(const string &model)", + "py_def": "def load(self, model: str) -> maix.err.Err" + }, + "detect": { + "type": "func", + "name": "detect", + "doc": { + "brief": "Detect objects from image", + "param": { + "img": "Image want to detect, if image's size not match model input's, will auto resize with fit method.", + "conf_th": "Confidence threshold, default 0.5.", + "iou_th": "IoU threshold, default 0.45.", + "fit": "Resize method, default image.Fit.FIT_CONTAIN." + }, + "throw": "If image format not match model input format, will throw err::Exception.", + "return": "Object list. In C++, you should delete it after use.", + "maixpy": "maix.nn.YOLOv5.detect", + "py_doc": "Detect objects from image\n\nArgs:\n - img: Image want to detect, if image's size not match model input's, will auto resize with fit method.\n - conf_th: Confidence threshold, default 0.5.\n - iou_th: IoU threshold, default 0.45.\n - fit: Resize method, default image.Fit.FIT_CONTAIN.\n\n\nReturns: Object list. In C++, you should delete it after use.\n" + }, + "args": [ + [ + "image::Image &", + "img", + null + ], + [ + "float", + "conf_th", + "0.5" + ], + [ + "float", + "iou_th", + "0.45" + ], + [ + "maix::image::Fit", + "fit", + "maix::image::FIT_CONTAIN" + ] + ], + "ret_type": "std::vector*", + "static": false, + "def": "std::vector *detect(image::Image &img, float conf_th = 0.5, float iou_th = 0.45, maix::image::Fit fit = maix::image::FIT_CONTAIN)", + "py_def": "def detect(self, img: maix.image.Image, conf_th: float = 0.5, iou_th: float = 0.45, fit: maix.image.Fit = ...) -> list[Object]" + }, + "input_size": { + "type": "func", + "name": "input_size", + "doc": { + "brief": "Get model input size", + "return": "model input size", + "maixpy": "maix.nn.YOLOv5.input_size", + "py_doc": "Get model input size\n\nReturns: model input size\n" + }, + "args": [], + "ret_type": "image::Size", + "static": false, + "def": "image::Size input_size()", + "py_def": "def input_size(self) -> maix.image.Size" + }, + "input_width": { + "type": "func", + "name": "input_width", + "doc": { + "brief": "Get model input width", + "return": "model input size of width", + "maixpy": "maix.nn.YOLOv5.input_width", + "py_doc": "Get model input width\n\nReturns: model input size of width\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int input_width()", + "py_def": "def input_width(self) -> int" + }, + "input_height": { + "type": "func", + "name": "input_height", + "doc": { + "brief": "Get model input height", + "return": "model input size of height", + "maixpy": "maix.nn.YOLOv5.input_height", + "py_doc": "Get model input height\n\nReturns: model input size of height\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int input_height()", + "py_def": "def input_height(self) -> int" + }, + "input_format": { + "type": "func", + "name": "input_format", + "doc": { + "brief": "Get input image format", + "return": "input image format, image::Format type.", + "maixpy": "maix.nn.YOLOv5.input_format", + "py_doc": "Get input image format\n\nReturns: input image format, image::Format type.\n" + }, + "args": [], + "ret_type": "image::Format", + "static": false, + "def": "image::Format input_format()", + "py_def": "def input_format(self) -> maix.image.Format" + }, + "labels": { + "type": "var", + "name": "labels", + "doc": { + "brief": "Labels list", + "maixpy": "maix.nn.YOLOv5.labels", + "py_doc": "Labels list" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector labels" + }, + "label_path": { + "type": "var", + "name": "label_path", + "doc": { + "brief": "Label file path", + "maixpy": "maix.nn.YOLOv5.label_path", + "py_doc": "Label file path" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::string label_path" + }, + "mean": { + "type": "var", + "name": "mean", + "doc": { + "brief": "Get mean value, list type", + "maixpy": "maix.nn.YOLOv5.mean", + "py_doc": "Get mean value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector mean" + }, + "scale": { + "type": "var", + "name": "scale", + "doc": { + "brief": "Get scale value, list type", + "maixpy": "maix.nn.YOLOv5.scale", + "py_doc": "Get scale value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector scale" + }, + "anchors": { + "type": "var", + "name": "anchors", + "doc": { + "brief": "Get anchors", + "maixpy": "maix.nn.YOLOv5.anchors", + "py_doc": "Get anchors" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector anchors" + } + }, + "def": "class YOLOv5" + }, + "YOLO11": { + "type": "class", + "name": "YOLO11", + "doc": { + "brief": "YOLO11 class", + "maixpy": "maix.nn.YOLO11", + "py_doc": "YOLO11 class" + }, + "members": { + "__init__": { + "type": "func", + "name": "YOLO11", + "doc": { + "brief": "Constructor of YOLO11 class", + "param": { + "model": "model path, default empty, you can load model later by load function.", + "dual_buff": "direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.\nIf you want to ensure every time forward output the input's result, set this arg to false please.\nDefault true to ensure speed." + }, + "throw": "If model arg is not empty and load failed, will throw err::Exception.", + "maixpy": "maix.nn.YOLO11.__init__", + "maixcdk": "maix.nn.YOLO11.YOLO11", + "py_doc": "Constructor of YOLO11 class\n\nArgs:\n - model: model path, default empty, you can load model later by load function.\n - dual_buff: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.\nIf you want to ensure every time forward output the input's result, set this arg to false please.\nDefault true to ensure speed.\n" + }, + "args": [ + [ + "const string &", + "model", + "\"\"" + ], + [ + "bool", + "dual_buff", + "true" + ] + ], + "ret_type": null, + "static": false, + "def": "YOLO11(const string &model = \"\", bool dual_buff = true)", + "py_def": "def __init__(self, model: str = '', dual_buff: bool = True) -> None" + }, + "load": { + "type": "func", + "name": "load", + "doc": { + "brief": "Load model from file", + "param": { + "model": "Model path want to load" + }, + "return": "err::Err", + "maixpy": "maix.nn.YOLO11.load", + "py_doc": "Load model from file\n\nArgs:\n - model: Model path want to load\n\n\nReturns: err::Err\n" + }, + "args": [ + [ + "const string &", + "model", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err load(const string &model)", + "py_def": "def load(self, model: str) -> maix.err.Err" + }, + "detect": { + "type": "func", + "name": "detect", + "doc": { + "brief": "Detect objects from image", + "param": { + "img": "Image want to detect, if image's size not match model input's, will auto resize with fit method.", + "conf_th": "Confidence threshold, default 0.5.", + "iou_th": "IoU threshold, default 0.45.", + "fit": "Resize method, default image.Fit.FIT_CONTAIN.", + "keypoint_th": "keypoint threshold, default 0.5, only for yolo11-pose model." + }, + "throw": "If image format not match model input format, will throw err::Exception.", + "return": "Object list. In C++, you should delete it after use.\nIf model is yolo11-pose, object's points have value, and if points' value < 0 means that point is invalid(conf < keypoint_th).", + "maixpy": "maix.nn.YOLO11.detect", + "py_doc": "Detect objects from image\n\nArgs:\n - img: Image want to detect, if image's size not match model input's, will auto resize with fit method.\n - conf_th: Confidence threshold, default 0.5.\n - iou_th: IoU threshold, default 0.45.\n - fit: Resize method, default image.Fit.FIT_CONTAIN.\n - keypoint_th: keypoint threshold, default 0.5, only for yolo11-pose model.\n\n\nReturns: Object list. In C++, you should delete it after use.\nIf model is yolo11-pose, object's points have value, and if points' value < 0 means that point is invalid(conf < keypoint_th).\n" + }, + "args": [ + [ + "image::Image &", + "img", + null + ], + [ + "float", + "conf_th", + "0.5" + ], + [ + "float", + "iou_th", + "0.45" + ], + [ + "maix::image::Fit", + "fit", + "maix::image::FIT_CONTAIN" + ], + [ + "float", + "keypoint_th", + "0.5" + ] + ], + "ret_type": "nn::Objects*", + "static": false, + "def": "nn::Objects *detect(image::Image &img, float conf_th = 0.5, float iou_th = 0.45, maix::image::Fit fit = maix::image::FIT_CONTAIN, float keypoint_th = 0.5)", + "py_def": "def detect(self, img: maix.image.Image, conf_th: float = 0.5, iou_th: float = 0.45, fit: maix.image.Fit = ..., keypoint_th: float = 0.5) -> Objects" + }, + "input_size": { + "type": "func", + "name": "input_size", + "doc": { + "brief": "Get model input size", + "return": "model input size", + "maixpy": "maix.nn.YOLO11.input_size", + "py_doc": "Get model input size\n\nReturns: model input size\n" + }, + "args": [], + "ret_type": "image::Size", + "static": false, + "def": "image::Size input_size()", + "py_def": "def input_size(self) -> maix.image.Size" + }, + "input_width": { + "type": "func", + "name": "input_width", + "doc": { + "brief": "Get model input width", + "return": "model input size of width", + "maixpy": "maix.nn.YOLO11.input_width", + "py_doc": "Get model input width\n\nReturns: model input size of width\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int input_width()", + "py_def": "def input_width(self) -> int" + }, + "input_height": { + "type": "func", + "name": "input_height", + "doc": { + "brief": "Get model input height", + "return": "model input size of height", + "maixpy": "maix.nn.YOLO11.input_height", + "py_doc": "Get model input height\n\nReturns: model input size of height\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int input_height()", + "py_def": "def input_height(self) -> int" + }, + "input_format": { + "type": "func", + "name": "input_format", + "doc": { + "brief": "Get input image format", + "return": "input image format, image::Format type.", + "maixpy": "maix.nn.YOLO11.input_format", + "py_doc": "Get input image format\n\nReturns: input image format, image::Format type.\n" + }, + "args": [], + "ret_type": "image::Format", + "static": false, + "def": "image::Format input_format()", + "py_def": "def input_format(self) -> maix.image.Format" + }, + "draw_pose": { + "type": "func", + "name": "draw_pose", + "doc": { + "brief": "Draw pose keypoints on image", + "param": { + "img": "image object, maix.image.Image type.", + "points": "keypoits, int list type, [x, y, x, y ...]", + "radius": "radius of points.", + "color": "color of points.", + "body": "true, if points' length is 17*2 and body is ture, will draw lines as human body, if set to false won't draw lines, default true." + }, + "maixpy": "maix.nn.YOLO11.draw_pose", + "py_doc": "Draw pose keypoints on image\n\nArgs:\n - img: image object, maix.image.Image type.\n - points: keypoits, int list type, [x, y, x, y ...]\n - radius: radius of points.\n - color: color of points.\n - body: true, if points' length is 17*2 and body is ture, will draw lines as human body, if set to false won't draw lines, default true.\n" + }, + "args": [ + [ + "image::Image &", + "img", + null + ], + [ + "std::vector", + "points", + null + ], + [ + "int", + "radius", + "4" + ], + [ + "image::Color", + "color", + "image::COLOR_RED" + ], + [ + "bool", + "body", + "true" + ] + ], + "ret_type": "void", + "static": false, + "def": "void draw_pose(image::Image &img, std::vector points, int radius = 4, image::Color color = image::COLOR_RED, bool body = true)", + "py_def": "def draw_pose(self, img: maix.image.Image, points: list[int], radius: int = 4, color: maix.image.Color = ..., body: bool = True) -> None" + }, + "draw_seg_mask": { + "type": "func", + "name": "draw_seg_mask", + "doc": { + "brief": "Draw segmentation on image", + "param": { + "img": "image object, maix.image.Image type.", + "seg_mask": "segmentation mask image by detect method, a grayscale image", + "threshold": "only mask's value > threshold will be draw on image, value from 0 to 255." + }, + "maixpy": "maix.nn.YOLO11.draw_seg_mask", + "py_doc": "Draw segmentation on image\n\nArgs:\n - img: image object, maix.image.Image type.\n - seg_mask: segmentation mask image by detect method, a grayscale image\n - threshold: only mask's value > threshold will be draw on image, value from 0 to 255.\n" + }, + "args": [ + [ + "image::Image &", + "img", + null + ], + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "image::Image &", + "seg_mask", + null + ], + [ + "int", + "threshold", + "127" + ] + ], + "ret_type": "void", + "static": false, + "def": "void draw_seg_mask(image::Image &img, int x, int y, image::Image &seg_mask, int threshold = 127)", + "py_def": "def draw_seg_mask(self, img: maix.image.Image, x: int, y: int, seg_mask: maix.image.Image, threshold: int = 127) -> None" + }, + "labels": { + "type": "var", + "name": "labels", + "doc": { + "brief": "Labels list", + "maixpy": "maix.nn.YOLO11.labels", + "py_doc": "Labels list" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector labels" + }, + "label_path": { + "type": "var", + "name": "label_path", + "doc": { + "brief": "Label file path", + "maixpy": "maix.nn.YOLO11.label_path", + "py_doc": "Label file path" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::string label_path" + }, + "mean": { + "type": "var", + "name": "mean", + "doc": { + "brief": "Get mean value, list type", + "maixpy": "maix.nn.YOLO11.mean", + "py_doc": "Get mean value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector mean" + }, + "scale": { + "type": "var", + "name": "scale", + "doc": { + "brief": "Get scale value, list type", + "maixpy": "maix.nn.YOLO11.scale", + "py_doc": "Get scale value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector scale" + } + }, + "def": "class YOLO11" + }, + "Classifier": { + "type": "class", + "name": "Classifier", + "doc": { + "brief": "Classifier", + "maixpy": "maix.nn.Classifier", + "py_doc": "Classifier" + }, + "members": { + "__init__": { + "type": "func", + "name": "Classifier", + "doc": { + "brief": "Construct a new Classifier object", + "param": { + "model": "MUD model path, if empty, will not load model, you can call load() later.\nif not empty, will load model and will raise err::Exception if load failed.", + "dual_buff": "direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.\nIf you want to ensure every time forward output the input's result, set this arg to false please.\nDefault true to ensure speed." + }, + "maixpy": "maix.nn.Classifier.__init__", + "maixcdk": "maix.nn.Classifier.Classifier", + "py_doc": "Construct a new Classifier object\n\nArgs:\n - model: MUD model path, if empty, will not load model, you can call load() later.\nif not empty, will load model and will raise err::Exception if load failed.\n - dual_buff: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.\nIf you want to ensure every time forward output the input's result, set this arg to false please.\nDefault true to ensure speed.\n" + }, + "args": [ + [ + "const string &", + "model", + "\"\"" + ], + [ + "bool", + "dual_buff", + "true" + ] + ], + "ret_type": null, + "static": false, + "def": "Classifier(const string &model = \"\", bool dual_buff = true)", + "py_def": "def __init__(self, model: str = '', dual_buff: bool = True) -> None" + }, + "load": { + "type": "func", + "name": "load", + "doc": { + "brief": "Load model from file, model format is .mud,\\nMUD file should contain [extra] section, have key-values:\\n- model_type: classifier\\n- input_type: rgb or bgr\\n- mean: 123.675, 116.28, 103.53\\n- scale: 0.017124753831663668, 0.01750700280112045, 0.017429193899782137\\n- labels: imagenet_classes.txt", + "param": { + "model": "MUD model path" + }, + "return": "error code, if load failed, return error code", + "maixpy": "maix.nn.Classifier.load", + "py_doc": "Load model from file, model format is .mud,\nMUD file should contain [extra] section, have key-values:\n- model_type: classifier\n- input_type: rgb or bgr\n- mean: 123.675, 116.28, 103.53\n- scale: 0.017124753831663668, 0.01750700280112045, 0.017429193899782137\n- labels: imagenet_classes.txt\n\nArgs:\n - model: MUD model path\n\n\nReturns: error code, if load failed, return error code\n" + }, + "args": [ + [ + "const string &", + "model", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err load(const string &model)", + "py_def": "def load(self, model: str) -> maix.err.Err" + }, + "classify": { + "type": "func", + "name": "classify", + "doc": { + "brief": "Forward image to model, get result. Only for image input, use classify_raw for tensor input.", + "param": { + "img": "image, format should match model input_type\uff0c or will raise err.Exception", + "softmax": "if true, will do softmax to result, or will return raw value", + "fit": "image resize fit mode, default Fit.FIT_COVER, see image.Fit." + }, + "throw": "If error occurred, will raise err::Exception, you can find reason in log, mostly caused by args error or hardware error.", + "return": "result, a list of (label, score). If in dual_buff mode, value can be one element list and score is zero when not ready. In C++, you need to delete it after use.", + "maixpy": "maix.nn.Classifier.classify", + "py_doc": "Forward image to model, get result. Only for image input, use classify_raw for tensor input.\n\nArgs:\n - img: image, format should match model input_type\uff0c or will raise err.Exception\n - softmax: if true, will do softmax to result, or will return raw value\n - fit: image resize fit mode, default Fit.FIT_COVER, see image.Fit.\n\n\nReturns: result, a list of (label, score). If in dual_buff mode, value can be one element list and score is zero when not ready. In C++, you need to delete it after use.\n" + }, + "args": [ + [ + "image::Image &", + "img", + null + ], + [ + "bool", + "softmax", + "true" + ], + [ + "image::Fit", + "fit", + "image::FIT_COVER" + ] + ], + "ret_type": "std::vector>*", + "static": false, + "def": "std::vector> *classify(image::Image &img, bool softmax = true, image::Fit fit = image::FIT_COVER)", + "py_def": "def classify(self, img: maix.image.Image, softmax: bool = True, fit: maix.image.Fit = ...) -> list[tuple[int, float]]" + }, + "classify_raw": { + "type": "func", + "name": "classify_raw", + "doc": { + "brief": "Forward tensor data to model, get result", + "param": { + "data": "tensor data, format should match model input_type\uff0c or will raise err.Excetion", + "softmax": "if true, will do softmax to result, or will return raw value" + }, + "throw": "If error occurred, will raise err::Exception, you can find reason in log, mostly caused by args error or hardware error.", + "return": "result, a list of (label, score). In C++, you need to delete it after use.", + "maixpy": "maix.nn.Classifier.classify_raw", + "py_doc": "Forward tensor data to model, get result\n\nArgs:\n - data: tensor data, format should match model input_type\uff0c or will raise err.Excetion\n - softmax: if true, will do softmax to result, or will return raw value\n\n\nReturns: result, a list of (label, score). In C++, you need to delete it after use.\n" + }, + "args": [ + [ + "tensor::Tensor &", + "data", + null + ], + [ + "bool", + "softmax", + "true" + ] + ], + "ret_type": "std::vector>*", + "static": false, + "def": "std::vector> *classify_raw(tensor::Tensor &data, bool softmax = true)", + "py_def": "def classify_raw(self, data: maix.tensor.Tensor, softmax: bool = True) -> list[tuple[int, float]]" + }, + "input_size": { + "type": "func", + "name": "input_size", + "doc": { + "brief": "Get model input size, only for image input", + "return": "model input size", + "maixpy": "maix.nn.Classifier.input_size", + "py_doc": "Get model input size, only for image input\n\nReturns: model input size\n" + }, + "args": [], + "ret_type": "image::Size", + "static": false, + "def": "image::Size input_size()", + "py_def": "def input_size(self) -> maix.image.Size" + }, + "input_width": { + "type": "func", + "name": "input_width", + "doc": { + "brief": "Get model input width, only for image input", + "return": "model input size of width", + "maixpy": "maix.nn.Classifier.input_width", + "py_doc": "Get model input width, only for image input\n\nReturns: model input size of width\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int input_width()", + "py_def": "def input_width(self) -> int" + }, + "input_height": { + "type": "func", + "name": "input_height", + "doc": { + "brief": "Get model input height, only for image input", + "return": "model input size of height", + "maixpy": "maix.nn.Classifier.input_height", + "py_doc": "Get model input height, only for image input\n\nReturns: model input size of height\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int input_height()", + "py_def": "def input_height(self) -> int" + }, + "input_format": { + "type": "func", + "name": "input_format", + "doc": { + "brief": "Get input image format, only for image input", + "return": "input image format, image::Format type.", + "maixpy": "maix.nn.Classifier.input_format", + "py_doc": "Get input image format, only for image input\n\nReturns: input image format, image::Format type.\n" + }, + "args": [], + "ret_type": "image::Format", + "static": false, + "def": "image::Format input_format()", + "py_def": "def input_format(self) -> maix.image.Format" + }, + "input_shape": { + "type": "func", + "name": "input_shape", + "doc": { + "brief": "Get input shape, if have multiple input, only return first input shape", + "return": "input shape, list type", + "maixpy": "maix.nn.Classifier.input_shape", + "py_doc": "Get input shape, if have multiple input, only return first input shape\n\nReturns: input shape, list type\n" + }, + "args": [], + "ret_type": "std::vector", + "static": false, + "def": "std::vector input_shape()", + "py_def": "def input_shape(self) -> list[int]" + }, + "labels": { + "type": "var", + "name": "labels", + "doc": { + "brief": "Labels list", + "maixpy": "maix.nn.Classifier.labels", + "py_doc": "Labels list" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector labels" + }, + "label_path": { + "type": "var", + "name": "label_path", + "doc": { + "brief": "Label file path", + "maixpy": "maix.nn.Classifier.label_path", + "py_doc": "Label file path" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::string label_path" + }, + "mean": { + "type": "var", + "name": "mean", + "doc": { + "brief": "Get mean value, list type", + "maixpy": "maix.nn.Classifier.mean", + "py_doc": "Get mean value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector mean" + }, + "scale": { + "type": "var", + "name": "scale", + "doc": { + "brief": "Get scale value, list type", + "maixpy": "maix.nn.Classifier.scale", + "py_doc": "Get scale value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector scale" + } + }, + "def": "class Classifier" + }, + "Retinaface": { + "type": "class", + "name": "Retinaface", + "doc": { + "brief": "Retinaface class", + "maixpy": "maix.nn.Retinaface", + "py_doc": "Retinaface class" + }, + "members": { + "__init__": { + "type": "func", + "name": "Retinaface", + "doc": { + "brief": "Constructor of Retinaface class", + "param": { + "model": "model path, default empty, you can load model later by load function.", + "dual_buff": "direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.\nIf you want to ensure every time forward output the input's result, set this arg to false please.\nDefault true to ensure speed." + }, + "throw": "If model arg is not empty and load failed, will throw err::Exception.", + "maixpy": "maix.nn.Retinaface.__init__", + "maixcdk": "maix.nn.Retinaface.Retinaface", + "py_doc": "Constructor of Retinaface class\n\nArgs:\n - model: model path, default empty, you can load model later by load function.\n - dual_buff: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.\nIf you want to ensure every time forward output the input's result, set this arg to false please.\nDefault true to ensure speed.\n" + }, + "args": [ + [ + "const string &", + "model", + "\"\"" + ], + [ + "bool", + "dual_buff", + "true" + ] + ], + "ret_type": null, + "static": false, + "def": "Retinaface(const string &model = \"\", bool dual_buff = true)", + "py_def": "def __init__(self, model: str = '', dual_buff: bool = True) -> None" + }, + "load": { + "type": "func", + "name": "load", + "doc": { + "brief": "Load model from file", + "param": { + "model": "Model path want to load" + }, + "return": "err::Err", + "maixpy": "maix.nn.Retinaface.load", + "py_doc": "Load model from file\n\nArgs:\n - model: Model path want to load\n\n\nReturns: err::Err\n" + }, + "args": [ + [ + "const string &", + "model", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err load(const string &model)", + "py_def": "def load(self, model: str) -> maix.err.Err" + }, + "detect": { + "type": "func", + "name": "detect", + "doc": { + "brief": "Detect objects from image", + "param": { + "img": "Image want to detect, if image's size not match model input's, will auto resize with fit method.", + "conf_th": "Confidence threshold, default 0.4.", + "iou_th": "IoU threshold, default 0.45.", + "fit": "Resize method, default image.Fit.FIT_CONTAIN." + }, + "throw": "If image format not match model input format, will throw err::Exception.", + "return": "Object list. In C++, you should delete it after use.", + "maixpy": "maix.nn.Retinaface.detect", + "py_doc": "Detect objects from image\n\nArgs:\n - img: Image want to detect, if image's size not match model input's, will auto resize with fit method.\n - conf_th: Confidence threshold, default 0.4.\n - iou_th: IoU threshold, default 0.45.\n - fit: Resize method, default image.Fit.FIT_CONTAIN.\n\n\nReturns: Object list. In C++, you should delete it after use.\n" + }, + "args": [ + [ + "image::Image &", + "img", + null + ], + [ + "float", + "conf_th", + "0.4" + ], + [ + "float", + "iou_th", + "0.45" + ], + [ + "maix::image::Fit", + "fit", + "maix::image::FIT_CONTAIN" + ] + ], + "ret_type": "std::vector*", + "static": false, + "def": "std::vector *detect(image::Image &img, float conf_th = 0.4, float iou_th = 0.45, maix::image::Fit fit = maix::image::FIT_CONTAIN)", + "py_def": "def detect(self, img: maix.image.Image, conf_th: float = 0.4, iou_th: float = 0.45, fit: maix.image.Fit = ...) -> list[Object]" + }, + "input_size": { + "type": "func", + "name": "input_size", + "doc": { + "brief": "Get model input size", + "return": "model input size", + "maixpy": "maix.nn.Retinaface.input_size", + "py_doc": "Get model input size\n\nReturns: model input size\n" + }, + "args": [], + "ret_type": "image::Size", + "static": false, + "def": "image::Size input_size()", + "py_def": "def input_size(self) -> maix.image.Size" + }, + "input_width": { + "type": "func", + "name": "input_width", + "doc": { + "brief": "Get model input width", + "return": "model input size of width", + "maixpy": "maix.nn.Retinaface.input_width", + "py_doc": "Get model input width\n\nReturns: model input size of width\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int input_width()", + "py_def": "def input_width(self) -> int" + }, + "input_height": { + "type": "func", + "name": "input_height", + "doc": { + "brief": "Get model input height", + "return": "model input size of height", + "maixpy": "maix.nn.Retinaface.input_height", + "py_doc": "Get model input height\n\nReturns: model input size of height\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int input_height()", + "py_def": "def input_height(self) -> int" + }, + "input_format": { + "type": "func", + "name": "input_format", + "doc": { + "brief": "Get input image format", + "return": "input image format, image::Format type.", + "maixpy": "maix.nn.Retinaface.input_format", + "py_doc": "Get input image format\n\nReturns: input image format, image::Format type.\n" + }, + "args": [], + "ret_type": "image::Format", + "static": false, + "def": "image::Format input_format()", + "py_def": "def input_format(self) -> maix.image.Format" + }, + "mean": { + "type": "var", + "name": "mean", + "doc": { + "brief": "Get mean value, list type", + "maixpy": "maix.nn.Retinaface.mean", + "py_doc": "Get mean value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector mean" + }, + "scale": { + "type": "var", + "name": "scale", + "doc": { + "brief": "Get scale value, list type", + "maixpy": "maix.nn.Retinaface.scale", + "py_doc": "Get scale value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector scale" + } + }, + "def": "class Retinaface" + }, + "F": { + "type": "module", + "doc": { + "brief": "maix.nn.F module" + }, + "members": { + "softmax": { + "type": "func", + "name": "softmax", + "doc": { + "brief": "Softmax, only support 1D tensor, multi-dimension tensor will be treated as 1D tensor", + "param": { + "tensor": "input tensor", + "replace": "change input tensor data directly, if not, will create a new tensor" + }, + "throw": "If arg error, will raise err.Exception error", + "return": "output tensor, if arg replace is true, return the arg tensor's address.\nIf not replace, return a new object, so In C++, you should delete it manually in this case!", + "maixpy": "maix.nn.F.softmax", + "py_doc": "Softmax, only support 1D tensor, multi-dimension tensor will be treated as 1D tensor\n\nArgs:\n - tensor: input tensor\n - replace: change input tensor data directly, if not, will create a new tensor\n\n\nReturns: output tensor, if arg replace is true, return the arg tensor's address.\nIf not replace, return a new object, so In C++, you should delete it manually in this case!\n" + }, + "args": [ + [ + "tensor::Tensor *", + "tensor", + null + ], + [ + "bool", + "replace", + null + ] + ], + "ret_type": "tensor::Tensor*", + "static": false, + "def": "tensor::Tensor *softmax(tensor::Tensor *tensor, bool replace)", + "py_def": "def softmax(tensor: maix.tensor.Tensor, replace: bool) -> maix.tensor.Tensor" + } + }, + "auto_add": true + }, + "FaceDetector": { + "type": "class", + "name": "FaceDetector", + "doc": { + "brief": "FaceDetector class", + "maixpy": "maix.nn.FaceDetector", + "py_doc": "FaceDetector class" + }, + "members": { + "__init__": { + "type": "func", + "name": "FaceDetector", + "doc": { + "brief": "Constructor of FaceDetector class", + "param": { + "model": "model path, default empty, you can load model later by load function.", + "dual_buff": "direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.\nIf you want to ensure every time forward output the input's result, set this arg to false please.\nDefault true to ensure speed." + }, + "throw": "If model arg is not empty and load failed, will throw err::Exception.", + "maixpy": "maix.nn.FaceDetector.__init__", + "maixcdk": "maix.nn.FaceDetector.FaceDetector", + "py_doc": "Constructor of FaceDetector class\n\nArgs:\n - model: model path, default empty, you can load model later by load function.\n - dual_buff: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.\nIf you want to ensure every time forward output the input's result, set this arg to false please.\nDefault true to ensure speed.\n" + }, + "args": [ + [ + "const string &", + "model", + "\"\"" + ], + [ + "bool", + "dual_buff", + "true" + ] + ], + "ret_type": null, + "static": false, + "def": "FaceDetector(const string &model = \"\", bool dual_buff = true)", + "py_def": "def __init__(self, model: str = '', dual_buff: bool = True) -> None" + }, + "load": { + "type": "func", + "name": "load", + "doc": { + "brief": "Load model from file", + "param": { + "model": "Model path want to load" + }, + "return": "err::Err", + "maixpy": "maix.nn.FaceDetector.load", + "py_doc": "Load model from file\n\nArgs:\n - model: Model path want to load\n\n\nReturns: err::Err\n" + }, + "args": [ + [ + "const string &", + "model", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err load(const string &model)", + "py_def": "def load(self, model: str) -> maix.err.Err" + }, + "detect": { + "type": "func", + "name": "detect", + "doc": { + "brief": "Detect objects from image", + "param": { + "img": "Image want to detect, if image's size not match model input's, will auto resize with fit method.", + "conf_th": "Confidence threshold, default 0.5.", + "iou_th": "IoU threshold, default 0.45.", + "fit": "Resize method, default image.Fit.FIT_CONTAIN." + }, + "throw": "If image format not match model input format, will throw err::Exception.", + "return": "Object list. In C++, you should delete it after use.", + "maixpy": "maix.nn.FaceDetector.detect", + "py_doc": "Detect objects from image\n\nArgs:\n - img: Image want to detect, if image's size not match model input's, will auto resize with fit method.\n - conf_th: Confidence threshold, default 0.5.\n - iou_th: IoU threshold, default 0.45.\n - fit: Resize method, default image.Fit.FIT_CONTAIN.\n\n\nReturns: Object list. In C++, you should delete it after use.\n" + }, + "args": [ + [ + "image::Image &", + "img", + null + ], + [ + "float", + "conf_th", + "0.5" + ], + [ + "float", + "iou_th", + "0.45" + ], + [ + "maix::image::Fit", + "fit", + "maix::image::FIT_CONTAIN" + ] + ], + "ret_type": "std::vector*", + "static": false, + "def": "std::vector *detect(image::Image &img, float conf_th = 0.5, float iou_th = 0.45, maix::image::Fit fit = maix::image::FIT_CONTAIN)", + "py_def": "def detect(self, img: maix.image.Image, conf_th: float = 0.5, iou_th: float = 0.45, fit: maix.image.Fit = ...) -> list[Object]" + }, + "input_size": { + "type": "func", + "name": "input_size", + "doc": { + "brief": "Get model input size", + "return": "model input size", + "maixpy": "maix.nn.FaceDetector.input_size", + "py_doc": "Get model input size\n\nReturns: model input size\n" + }, + "args": [], + "ret_type": "image::Size", + "static": false, + "def": "image::Size input_size()", + "py_def": "def input_size(self) -> maix.image.Size" + }, + "input_width": { + "type": "func", + "name": "input_width", + "doc": { + "brief": "Get model input width", + "return": "model input size of width", + "maixpy": "maix.nn.FaceDetector.input_width", + "py_doc": "Get model input width\n\nReturns: model input size of width\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int input_width()", + "py_def": "def input_width(self) -> int" + }, + "input_height": { + "type": "func", + "name": "input_height", + "doc": { + "brief": "Get model input height", + "return": "model input size of height", + "maixpy": "maix.nn.FaceDetector.input_height", + "py_doc": "Get model input height\n\nReturns: model input size of height\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int input_height()", + "py_def": "def input_height(self) -> int" + }, + "input_format": { + "type": "func", + "name": "input_format", + "doc": { + "brief": "Get input image format", + "return": "input image format, image::Format type.", + "maixpy": "maix.nn.FaceDetector.input_format", + "py_doc": "Get input image format\n\nReturns: input image format, image::Format type.\n" + }, + "args": [], + "ret_type": "image::Format", + "static": false, + "def": "image::Format input_format()", + "py_def": "def input_format(self) -> maix.image.Format" + }, + "mean": { + "type": "var", + "name": "mean", + "doc": { + "brief": "Get mean value, list type", + "maixpy": "maix.nn.FaceDetector.mean", + "py_doc": "Get mean value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector mean" + }, + "scale": { + "type": "var", + "name": "scale", + "doc": { + "brief": "Get scale value, list type", + "maixpy": "maix.nn.FaceDetector.scale", + "py_doc": "Get scale value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector scale" + } + }, + "def": "class FaceDetector" + }, + "PP_OCR": { + "type": "class", + "name": "PP_OCR", + "doc": { + "brief": "PP_OCR class", + "maixpy": "maix.nn.PP_OCR", + "py_doc": "PP_OCR class" + }, + "members": { + "__init__": { + "type": "func", + "name": "PP_OCR", + "doc": { + "brief": "Constructor of PP_OCR class", + "param": { + "model": "model path, default empty, you can load model later by load function." + }, + "throw": "If model arg is not empty and load failed, will throw err::Exception.", + "maixpy": "maix.nn.PP_OCR.__init__", + "maixcdk": "maix.nn.PP_OCR.PP_OCR", + "py_doc": "Constructor of PP_OCR class\n\nArgs:\n - model: model path, default empty, you can load model later by load function.\n" + }, + "args": [ + [ + "const string &", + "model", + "\"\"" + ] + ], + "ret_type": null, + "static": false, + "def": "PP_OCR(const string &model = \"\")", + "py_def": "def __init__(self, model: str = '') -> None" + }, + "load": { + "type": "func", + "name": "load", + "doc": { + "brief": "Load model from file", + "param": { + "model": "Model path want to load" + }, + "return": "err::Err", + "maixpy": "maix.nn.PP_OCR.load", + "py_doc": "Load model from file\n\nArgs:\n - model: Model path want to load\n\n\nReturns: err::Err\n" + }, + "args": [ + [ + "const string &", + "model", + null + ] + ], + "ret_type": "err::Err", + "static": false, + "def": "err::Err load(const string &model)", + "py_def": "def load(self, model: str) -> maix.err.Err" + }, + "detect": { + "type": "func", + "name": "detect", + "doc": { + "brief": "Detect objects from image", + "param": { + "img": "Image want to detect, if image's size not match model input's, will auto resize with fit method.", + "thresh": "Confidence threshold where pixels have charactor, default 0.3.", + "box_thresh": "Box threshold, the box prob higher than this value will be valid, default 0.6.", + "fit": "Resize method, default image.Fit.FIT_CONTAIN.", + "char_box": "Calculate every charactor's box, default false, if true then you can get charactor's box by nn.OCR_Object's char_boxes attribute." + }, + "throw": "If image format not match model input format or no memory, will throw err::Exception.", + "return": "nn.OCR_Objects type. In C++, you should delete it after use.", + "maixpy": "maix.nn.PP_OCR.detect", + "py_doc": "Detect objects from image\n\nArgs:\n - img: Image want to detect, if image's size not match model input's, will auto resize with fit method.\n - thresh: Confidence threshold where pixels have charactor, default 0.3.\n - box_thresh: Box threshold, the box prob higher than this value will be valid, default 0.6.\n - fit: Resize method, default image.Fit.FIT_CONTAIN.\n - char_box: Calculate every charactor's box, default false, if true then you can get charactor's box by nn.OCR_Object's char_boxes attribute.\n\n\nReturns: nn.OCR_Objects type. In C++, you should delete it after use.\n" + }, + "args": [ + [ + "image::Image &", + "img", + null + ], + [ + "float", + "thresh", + "0.3" + ], + [ + "float", + "box_thresh", + "0.6" + ], + [ + "maix::image::Fit", + "fit", + "maix::image::FIT_CONTAIN" + ], + [ + "bool", + "char_box", + "false" + ] + ], + "ret_type": "nn::OCR_Objects*", + "static": false, + "def": "nn::OCR_Objects *detect(image::Image &img, float thresh = 0.3, float box_thresh = 0.6, maix::image::Fit fit = maix::image::FIT_CONTAIN, bool char_box = false)", + "py_def": "def detect(self, img: maix.image.Image, thresh: float = 0.3, box_thresh: float = 0.6, fit: maix.image.Fit = ..., char_box: bool = False) -> OCR_Objects" + }, + "recognize": { + "type": "func", + "name": "recognize", + "doc": { + "brief": "Only recognize, not detect", + "param": { + "img": "image to recognize chractors, can be a stanrd cropped charactors image,\nif crop image not standard, you can use box_points to assgin where the charactors' 4 corner is.", + "box_points": "list type, length must be 8 or 0, default empty means not transfer image to standard image.\n4 points postiion, format: [x1, y1, x2, y2, x3, y3, x4, y4], point 1 at the left-top, point 2 right-top...", + "char_box": "Calculate every charactor's box, default false, if true then you can get charactor's box by nn.OCR_Object's char_boxes attribute." + }, + "maixpy": "maix.nn.PP_OCR.recognize", + "py_doc": "Only recognize, not detect\n\nArgs:\n - img: image to recognize chractors, can be a stanrd cropped charactors image,\nif crop image not standard, you can use box_points to assgin where the charactors' 4 corner is.\n - box_points: list type, length must be 8 or 0, default empty means not transfer image to standard image.\n4 points postiion, format: [x1, y1, x2, y2, x3, y3, x4, y4], point 1 at the left-top, point 2 right-top...\n - char_box: Calculate every charactor's box, default false, if true then you can get charactor's box by nn.OCR_Object's char_boxes attribute.\n" + }, + "args": [ + [ + "image::Image &", + "img", + null + ], + [ + "const std::vector &", + "box_points", + "std::vector()" + ] + ], + "ret_type": "nn::OCR_Object*", + "static": false, + "def": "nn::OCR_Object *recognize(image::Image &img, const std::vector &box_points = std::vector())", + "py_def": "def recognize(self, img: maix.image.Image, box_points: list[int] = []) -> OCR_Object" + }, + "draw_seg_mask": { + "type": "func", + "name": "draw_seg_mask", + "doc": { + "brief": "Draw segmentation on image", + "param": { + "img": "image object, maix.image.Image type.", + "seg_mask": "segmentation mask image by detect method, a grayscale image", + "threshold": "only mask's value > threshold will be draw on image, value from 0 to 255." + }, + "maixpy": "maix.nn.PP_OCR.draw_seg_mask", + "py_doc": "Draw segmentation on image\n\nArgs:\n - img: image object, maix.image.Image type.\n - seg_mask: segmentation mask image by detect method, a grayscale image\n - threshold: only mask's value > threshold will be draw on image, value from 0 to 255.\n" + }, + "args": [ + [ + "image::Image &", + "img", + null + ], + [ + "int", + "x", + null + ], + [ + "int", + "y", + null + ], + [ + "image::Image &", + "seg_mask", + null + ], + [ + "int", + "threshold", + "127" + ] + ], + "ret_type": "void", + "static": false, + "def": "void draw_seg_mask(image::Image &img, int x, int y, image::Image &seg_mask, int threshold = 127)", + "py_def": "def draw_seg_mask(self, img: maix.image.Image, x: int, y: int, seg_mask: maix.image.Image, threshold: int = 127) -> None" + }, + "input_size": { + "type": "func", + "name": "input_size", + "doc": { + "brief": "Get model input size", + "return": "model input size", + "maixpy": "maix.nn.PP_OCR.input_size", + "py_doc": "Get model input size\n\nReturns: model input size\n" + }, + "args": [], + "ret_type": "image::Size", + "static": false, + "def": "image::Size input_size()", + "py_def": "def input_size(self) -> maix.image.Size" + }, + "input_width": { + "type": "func", + "name": "input_width", + "doc": { + "brief": "Get model input width", + "return": "model input size of width", + "maixpy": "maix.nn.PP_OCR.input_width", + "py_doc": "Get model input width\n\nReturns: model input size of width\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int input_width()", + "py_def": "def input_width(self) -> int" + }, + "input_height": { + "type": "func", + "name": "input_height", + "doc": { + "brief": "Get model input height", + "return": "model input size of height", + "maixpy": "maix.nn.PP_OCR.input_height", + "py_doc": "Get model input height\n\nReturns: model input size of height\n" + }, + "args": [], + "ret_type": "int", + "static": false, + "def": "int input_height()", + "py_def": "def input_height(self) -> int" + }, + "input_format": { + "type": "func", + "name": "input_format", + "doc": { + "brief": "Get input image format", + "return": "input image format, image::Format type.", + "maixpy": "maix.nn.PP_OCR.input_format", + "py_doc": "Get input image format\n\nReturns: input image format, image::Format type.\n" + }, + "args": [], + "ret_type": "image::Format", + "static": false, + "def": "image::Format input_format()", + "py_def": "def input_format(self) -> maix.image.Format" + }, + "mean": { + "type": "var", + "name": "mean", + "doc": { + "brief": "Get mean value, list type", + "maixpy": "maix.nn.PP_OCR.mean", + "py_doc": "Get mean value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector mean" + }, + "scale": { + "type": "var", + "name": "scale", + "doc": { + "brief": "Get scale value, list type", + "maixpy": "maix.nn.PP_OCR.scale", + "py_doc": "Get scale value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector scale" + }, + "rec_mean": { + "type": "var", + "name": "rec_mean", + "doc": { + "brief": "Get mean value, list type", + "maixpy": "maix.nn.PP_OCR.rec_mean", + "py_doc": "Get mean value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector rec_mean" + }, + "rec_scale": { + "type": "var", + "name": "rec_scale", + "doc": { + "brief": "Get scale value, list type", + "maixpy": "maix.nn.PP_OCR.rec_scale", + "py_doc": "Get scale value, list type" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector rec_scale" + }, + "labels": { + "type": "var", + "name": "labels", + "doc": { + "brief": "labels (charactors)", + "maixpy": "maix.nn.PP_OCR.labels", + "py_doc": "labels (charactors)" + }, + "value": null, + "static": false, + "readonly": false, + "def": "std::vector labels" + }, + "det": { + "type": "var", + "name": "det", + "doc": { + "brief": "model have detect model", + "maixpy": "maix.nn.PP_OCR.det", + "py_doc": "model have detect model" + }, + "value": null, + "static": false, + "readonly": false, + "def": "bool det" + }, + "rec": { + "type": "var", + "name": "rec", + "doc": { + "brief": "model have recognize model", + "maixpy": "maix.nn.PP_OCR.rec", + "py_doc": "model have recognize model" + }, + "value": null, + "static": false, + "readonly": false, + "def": "bool rec" + } + }, + "def": "class PP_OCR" + } + }, + "auto_add": true + } + } + } + } +} \ No newline at end of file diff --git a/maixpy/api/config.json b/maixpy/api/config.json new file mode 100644 index 00000000..ff4f116e --- /dev/null +++ b/maixpy/api/config.json @@ -0,0 +1,4 @@ +{ + "import": "config_en", + "name": "MaixPy API Reference" +} diff --git a/maixpy/api/index.html b/maixpy/api/index.html new file mode 100644 index 00000000..e667bab3 --- /dev/null +++ b/maixpy/api/index.html @@ -0,0 +1,432 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy API -- Maix AI machine vision platform Python API - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixPy API -- Maix AI machine vision platform Python API

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

You can read API doc at MaixPy API on Sipeed Wiki

+

If you want to preview API doc offline, build MaixPy, and API doc will be generated in MaixPy/docs/api/ directory.

+
+

For MaixPy developer: This API documentation is generated from the source code, DO NOT edit this file manually!

+
+

MaixPy API documentation, modules:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
modulebrief
maix.errmaix.err module
maix.tensormaix.tensor module
maix.imagemaix.image module, image related definition and functions
maix.cameramaix.camera module, access camera device and get image from it
maix.displaymaix.display module, control display device and show image on it
maix.ext_devmaix.ext_dev module
maix.audiomaix.audio module
maix.trackermaix.tracker module
maix.httpmaix.http module
maix.rtspmaix.rtsp module
maix.rtmpmaix.rtmp module
maix.touchscreenmaix.touchscreen module
maix.videomaix.video module
maix.networkmaix.network module
maix.commmaix.comm module
maix.fsmaix.fs module
maix.appmaix.app module
maix.protocolmaix.protocol module
maix.timemaix.time module
maix.exampleexample module, this will be maix.example module in MaixPy, maix::example namespace in MaixCDK
maix.utilmaix.util module
maix.threadmaix.thread module
maix.sysmaix.sys module
maix.i18nmaix.i18n module
maix.peripheralChip's peripheral driver
maix.nnmaix.nn module
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/app.html b/maixpy/api/maix/app.html new file mode 100644 index 00000000..9b7b8703 --- /dev/null +++ b/maixpy/api/maix/app.html @@ -0,0 +1,1327 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.app - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.app

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.app module

+
+

You can use maix.app to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Variable

+

Function

+

app_id

+ +
def app_id() -> str
+
+

Get current APP ID.

+ + + + + + + + + + + + + +
itemdescription
returnAPP ID.
+
+

C++ defination code:

+ +
string app_id()
+
+
+

set_app_id

+ +
def set_app_id(app_id: str) -> str
+
+

Set current APP ID.

+ + + + + + + + + + + + + +
itemdescription
paramapp_id: APP ID.
+
+

C++ defination code:

+ +
string set_app_id(const string &app_id)
+
+
+

get_apps_info_path

+ +
def get_apps_info_path() -> str
+
+

Get APP info file path.

+
+

C++ defination code:

+ +
string get_apps_info_path()
+
+
+

get_apps_info

+ +
def get_apps_info(ignore_launcher: bool = False, ignore_app_store: bool = False) -> list[APP_Info]
+
+

Get APP info list.

+ + + + + + + + + + + + + + + + + +
itemdescription
paramignore_launcher: if true, ignore launcher APP. default false.
ignore_app_store: if true, ignore app store APP. default false.
returnAPP info list. APP_Info object list.
+
+

C++ defination code:

+ +
vector<app::APP_Info> &get_apps_info(bool ignore_launcher = false, bool ignore_app_store = false)
+
+
+

get_app_info

+ +
def get_app_info(app_id: str) -> APP_Info
+
+

Get app info by app id.

+ + + + + + + + + + + + + +
itemdescription
returnapp.APP_Info type.
+
+

C++ defination code:

+ +
app::APP_Info get_app_info(const std::string &app_id)
+
+
+

get_app_data_path

+ +
def get_app_data_path() -> str
+
+

Get APP info, APP can store private data in this directory.

+ + + + + + + + + + + + + +
itemdescription
returnAPP data path "./data", just return the data folder in current path because APP executed in app install path or project path.
So, you must execute your program in you project path to use the project/data folder when you debug your APP.
+
+

C++ defination code:

+ +
string get_app_data_path()
+
+
+

get_app_path

+ +
def get_app_path(app_id: str = '') -> str
+
+

Get APP path.

+ + + + + + + + + + + + + + + + + +
itemdescription
paramapp_id: APP ID, if empty, return current APP path, else return the APP path by app_id.
returnAPP path, just return the current path because APP executed in app install path or project path.
So, you must execute your program in you project path to use the project/data folder when you debug your APP.
+
+

C++ defination code:

+ +
string get_app_path(const string &app_id = "")
+
+
+

get_tmp_path

+ +
def get_tmp_path() -> str
+
+

Get global temporary data path, APPs can use this path as temporary data directory.

+ + + + + + + + + + + + + +
itemdescription
returntemporary data path.
+
+

C++ defination code:

+ +
string get_tmp_path()
+
+
+

get_share_path

+ +
def get_share_path() -> str
+
+

Get data path of share, shared data like picture and video will put in this directory

+ + + + + + + + + + + + + +
itemdescription
returnshare data path.
+
+

C++ defination code:

+ +
string get_share_path()
+
+
+

get_picture_path

+ +
def get_picture_path() -> str
+
+

Get picture path of share, shared picture will put in this directory

+ + + + + + + + + + + + + +
itemdescription
returnshare picture path.
+
+

C++ defination code:

+ +
string get_picture_path()
+
+
+

get_video_path

+ +
def get_video_path() -> str
+
+

Get video path of share, shared video will put in this directory

+ + + + + + + + + + + + + +
itemdescription
returnshare video path.
+
+

C++ defination code:

+ +
string get_video_path()
+
+
+

get_font_path

+ +
def get_font_path() -> str
+
+

Get font path of share, shared font will put in this directory

+ + + + + + + + + + + + + +
itemdescription
returnshare font path.
+
+

C++ defination code:

+ +
string get_font_path()
+
+
+

get_icon_path

+ +
def get_icon_path() -> str
+
+

Get icon path of share, shared icon will put in this directory

+ + + + + + + + + + + + + +
itemdescription
returnshare icon path.
+
+

C++ defination code:

+ +
string get_icon_path()
+
+
+

get_sys_config_kv

+ +
def get_sys_config_kv(item: str, key: str, value: str = '', from_cache: bool = True) -> str
+
+

Get system config item value.

+ + + + + + + + + + + + + + + + + +
itemdescription
paramitem: name of setting item, e.g. wifi, language. more see settings APP.
key: config key, e.g. for wifi, key can be ssid, for language, key can be locale.
value: default value, if not found, return this value.
from_cache: if true, read from cache, if false, read from file.
returnconfig value, always string type, if not found, return empty string.
+
+

C++ defination code:

+ +
string get_sys_config_kv(const string &item, const string &key, const string &value = "", bool from_cache = true)
+
+
+

get_app_config_kv

+ +
def get_app_config_kv(item: str, key: str, value: str = '', from_cache: bool = True) -> str
+
+

Get APP config item value.

+ + + + + + + + + + + + + + + + + +
itemdescription
paramitem: name of setting item, e.g. user_info
key: config key, e.g. for user_info, key can be name, age etc.
value: default value, if not found, return this value.
from_cache: if true, read from cache, if false, read from file.
returnconfig value, always string type, if not found, return empty string.
+
+

C++ defination code:

+ +
string get_app_config_kv(const string &item, const string &key, const string &value = "", bool from_cache = true)
+
+
+

set_app_config_kv

+ +
def set_app_config_kv(item: str, key: str, value: str, write_file: bool = True) -> maix.err.Err
+
+

Set APP config item value.

+ + + + + + + + + + + + + + + + + +
itemdescription
paramitem: name of setting item, e.g. user_info
key: config key, e.g. for user_info, key can be name, age etc.
value: config value, always string type.
write_file: if true, write to file, if false, just write to cache.
returnerr::Err
+
+

C++ defination code:

+ +
err::Err set_app_config_kv(const string &item, const string &key, const string &value, bool write_file = true)
+
+
+

get_app_config_path

+ +
def get_app_config_path() -> str
+
+

Get APP config path, ini format, so you can use your own ini parser to parse it like configparser in Python.\nAll APP config info is recommended to store in this file.

+ + + + + + + + + + + + + +
itemdescription
returnAPP config path(ini format).
+
+

C++ defination code:

+ +
string get_app_config_path()
+
+
+

set_exit_msg

+ +
def set_exit_msg(code: maix.err.Err, msg: str) -> maix.err.Err
+
+

Set APP exit code and exit message.\nIf code != 0, the launcher will show a dialog to user, and display the msg.

+ + + + + + + + + + + + + + + + + +
itemdescription
paramcode: exit code, 0 means success, other means error, if code is 0, do nothing.
msg: exit message, if code is 0, msg is not used.
returnexit code, the same as arg @code.
+
+

C++ defination code:

+ +
err::Err set_exit_msg(err::Err code, const string &msg)
+
+
+

get_exit_msg

+ +
def get_exit_msg(cache: bool = False) -> tuple[str, maix.err.Err, str]
+
+

Get APP exit code and exit message.

+ + + + + + + + + + + + + + + + + +
itemdescription
paramcache: if true, read from cache, if false, read from file. default false.
returnexit return app_id, exit code and exit message.
+
+

C++ defination code:

+ +
tuple<string, err::Err, string> get_exit_msg(bool cache = false)
+
+
+

have_exit_msg

+ +
def have_exit_msg(cache: bool = False) -> bool
+
+

Check if have exit msg

+ + + + + + + + + + + + + + + + + +
itemdescription
paramcache: if true, just check from cache, if false, check from file. default false.
returntrue if have exit msg, false if not.
+
+

C++ defination code:

+ +
bool have_exit_msg(bool cache = false)
+
+
+

switch_app

+ +
def switch_app(app_id: str, idx: int = -1, start_param: str = '') -> None
+
+

Exit this APP and start another APP(by launcher).\nCall this API will call set_exit_flag(true), you should check app::need_exit() in your code.\nAnd exit this APP if app::need_exit() return true.

+ + + + + + + + + + + + + + + + + +
itemdescription
paramapp_id: APP ID which will be started. app_id and idx must have one is valid.
idx: APP index. app_id and idx must have one is valid.
start_param: string type, will send to app, app can get this param by app.get_start_param()
attentionIf app id or idx the same as current app, do nothing.
+
+

C++ defination code:

+ +
void switch_app(const string &app_id, int idx = -1, const std::string &start_param = "")
+
+
+

get_start_param

+ +
def get_start_param() -> str
+
+

Get start param set by caller

+ + + + + + + + + + + + + +
itemdescription
returnparam, string type
+
+

C++ defination code:

+ +
const std::string get_start_param()
+
+
+

need_exit

+ +
def need_exit() -> bool
+
+

Shoule this APP exit?

+ + + + + + + + + + + + + + + + + +
itemdescription
returntrue if this APP should exit, false if not.
attentionThis API is a function, not a variable.
+
+

C++ defination code:

+ +
bool need_exit()
+
+
+

running

+ +
def running() -> bool
+
+

App should running? The same as !app::need_exit() (not app::need_exit() in MaixPy).

+ + + + + + + + + + + + + + + + + +
itemdescription
returntrue if this APP should running, false if not.
attentionThis API is a function, not a variable.
+
+

C++ defination code:

+ +
bool running()
+
+
+

set_exit_flag

+ +
def set_exit_flag(exit: bool) -> None
+
+

Set exit flag. You can get exit flag by app.need_exit().

+ + + + + + + + + + + + + +
itemdescription
paramexit: true if this APP should exit, false if not.
+
+

C++ defination code:

+ +
void set_exit_flag(bool exit)
+
+
+

Class

+

Version

+

APP version

+
+

C++ defination code:

+ +
class Version
+
+
+

__str__

+ +
def __str__(self) -> str
+
+

Convert to string, e.g. 1.0.0

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
std::string __str__()
+
+
+

from_str

+ +
def from_str(version_str: str) -> Version
+
+

Convert from string, e.g. "1.0.0"

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticTrue
+
+

C++ defination code:

+ +
static app::Version from_str(const string &version_str)
+
+
+

APP_Info

+

APP info

+
+

C++ defination code:

+ +
class APP_Info
+
+
+

id

+

APP id

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
string id
+
+
+

name

+

APP name

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
string name
+
+
+

icon

+

APP icon

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
string icon
+
+
+

version

+

APP version

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
Version version
+
+
+

exec

+

APP exec

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
string exec
+
+
+

author

+

APP author

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
string author
+
+
+

desc

+

APP desc

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
string desc
+
+
+

names

+

APP names

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
map<string, string> names
+
+
+

descs

+

APP descs

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
map<string, string> descs
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/audio.html b/maixpy/api/maix/audio.html new file mode 100644 index 00000000..37b6774f --- /dev/null +++ b/maixpy/api/maix/audio.html @@ -0,0 +1,861 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.audio - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.audio

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.audio module

+
+

You can use maix.audio to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Format

+

Audio type

+ + + + + + + + + + + + + +
itemdescribe
valuesFMT_NONE: format invalid
FMT_S8: unsigned 8 bits
FMT_S16_LE: signed 16 bits, little endian
FMT_S32_LE: signed 32 bits, little endian
FMT_S16_BE: signed 16 bits, big endian
FMT_S32_BE: signed 32 bits, big endian
FMT_U8: unsigned 8 bits
FMT_U16_LE: unsigned 16 bits, little endian
FMT_U32_LE: unsigned 32 bits, little endian
FMT_U16_BE: unsigned 16 bits, big endian
FMT_U32_BE: unsigned 32 bits, big endian
+
+

C++ defination code:

+ +
enum Format
+    {
+        FMT_NONE = 0,       // format invalid
+        FMT_S8,             // unsigned 8 bits
+        FMT_S16_LE,         // signed 16 bits, little endian
+        FMT_S32_LE,         // signed 32 bits, little endian
+        FMT_S16_BE,         // signed 16 bits, big endian
+        FMT_S32_BE,         // signed 32 bits, big endian
+        FMT_U8,             // unsigned 8 bits
+        FMT_U16_LE,         // unsigned 16 bits, little endian
+        FMT_U32_LE,         // unsigned 32 bits, little endian
+        FMT_U16_BE,         // unsigned 16 bits, big endian
+        FMT_U32_BE,         // unsigned 32 bits, big endian
+    }
+
+
+

Variable

+

Function

+

Class

+

Recorder

+

Recorder class

+
+

C++ defination code:

+ +
class Recorder
+
+
+

__init__

+ +
def __init__(self, path: str = '', sample_rate: int = 48000, format: Format = ..., channel: int = 1) -> None
+
+

Construct a new Recorder object. currectly only pcm and wav formats supported.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parampath: record path. the path determines the location where you save the file, if path is none, the audio module will not save file.
sample_rate: record sample rate, default is 48000(48KHz), means 48000 samples per second.
format: record sample format, default is audio::Format::FMT_S16_LE, means sampling 16 bits at a time and save as signed 16 bits, little endian. see @audio::Format
channel: record sample channel, default is 1, means 1 channel sampling at the same time
staticFalse
+
+

C++ defination code:

+ +
Recorder(std::string path = std::string(), int sample_rate = 48000, audio::Format format = audio::Format::FMT_S16_LE, int channel = 1)
+
+
+

volume

+ +
def volume(self, value: int = -1) -> int
+
+

Set/Get record volume

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramvalue: volume value, If you use this parameter, audio will set the value to volume,
if you don't, it will return the current volume. range is [0, 100].
returnthe current volume
staticFalse
+
+

C++ defination code:

+ +
int volume(int value = -1)
+
+
+

mute

+ +
def mute(self, data: int = -1) -> bool
+
+

Mute

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdata: mute data, If you set this parameter to true, audio will set the value to mute,
if you don't, it will return the current mute status.
returnReturns whether mute is currently enabled.
staticFalse
+
+

C++ defination code:

+ +
bool mute(int data = -1)
+
+
+

record

+ +
def record(*args, **kwargs)
+
+

Record, Read all cached data in buffer and return. If there is no audio data in the buffer, may return empty data.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramrecord_ms: Block and record audio data lasting record_ms milliseconds and save it to a file, the return value does not return audio data. Only valid if the initialisation path is set.
returnpcm data. datatype @see Bytes. If you pass in record_ms parameter, the return value is an empty Bytes object.
staticFalse
+
+

C++ defination code:

+ +
maix::Bytes *record(int record_ms = -1)
+
+
+

finish

+ +
def finish(self) -> maix.err.Err
+
+

Finish the record, if you have passed in the path, this api will save the audio data to file.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerror code, err::ERR_NONE means success, others means failed
staticFalse
+
+

C++ defination code:

+ +
err::Err finish()
+
+
+

sample_rate

+ +
def sample_rate(self) -> int
+
+

Get sample rate

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturns sample rate
staticFalse
+
+

C++ defination code:

+ +
int sample_rate()
+
+
+

format

+ +
def format(self) -> Format
+
+

Get sample format

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturns sample format
staticFalse
+
+

C++ defination code:

+ +
audio::Format format()
+
+
+

channel

+ +
def channel(self) -> int
+
+

Get sample channel

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturns sample channel
staticFalse
+
+

C++ defination code:

+ +
int channel()
+
+
+

Player

+

Player class

+
+

C++ defination code:

+ +
class Player
+
+
+

__init__

+ +
def __init__(self, path: str = '', sample_rate: int = 48000, format: Format = ..., channel: int = 1) -> None
+
+

Construct a new Player object

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parampath: player path. the path determines the location where you save the file, if path is none, the audio module will not save file.
sample_rate: player sample rate, default is 48000(48KHz), means 48000 samples per second.
format: player sample format, default is audio::Format::FMT_S16_LE, means sampling 16 bits at a time and save as signed 16 bits, little endian. see @audio::Format
channel: player sample channel, default is 1, means 1 channel sampling at the same time
staticFalse
+
+

C++ defination code:

+ +
Player(std::string path = std::string(), int sample_rate = 48000, audio::Format format = audio::Format::FMT_S16_LE, int channel = 1)
+
+
+

volume

+ +
def volume(self, value: int = -1) -> int
+
+

Set/Get player volume(Not support now)

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramvalue: volume value, If you use this parameter, audio will set the value to volume,
if you don't, it will return the current volume.
returnthe current volume
staticFalse
+
+

C++ defination code:

+ +
int volume(int value = -1)
+
+
+

play

+ +
def play(self, data: maix.Bytes(bytes) = b'') -> maix.err.Err
+
+

Play

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdata: audio data, must be raw data
returnerror code, err::ERR_NONE means success, others means failed
staticFalse
+
+

C++ defination code:

+ +
err::Err play(maix::Bytes *data = maix::audio::Player::NoneBytes)
+
+
+

sample_rate

+ +
def sample_rate(self) -> int
+
+

Get sample rate

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturns sample rate
staticFalse
+
+

C++ defination code:

+ +
int sample_rate()
+
+
+

format

+ +
def format(self) -> Format
+
+

Get sample format

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturns sample format
staticFalse
+
+

C++ defination code:

+ +
audio::Format format()
+
+
+

channel

+ +
def channel(self) -> int
+
+

Get sample channel

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturns sample channel
staticFalse
+
+

C++ defination code:

+ +
int channel()
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/camera.html b/maixpy/api/maix/camera.html new file mode 100644 index 00000000..136da33c --- /dev/null +++ b/maixpy/api/maix/camera.html @@ -0,0 +1,1604 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.camera - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.camera

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.camera module, access camera device and get image from it

+
+

You can use maix.camera to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Variable

+

Function

+

list_devices

+ +
def list_devices() -> list[str]
+
+

List all supported camera devices.

+ + + + + + + + + + + + + +
itemdescription
returnReturns the path to the camera device.
+
+

C++ defination code:

+ +
std::vector<std::string> list_devices()
+
+
+

set_regs_enable

+ +
def set_regs_enable(enable: bool = True) -> None
+
+

Enable set camera registers, default is false, if set to true, will not set camera registers, you can manually set registers by write_reg API.

+ + + + + + + + + + + + + +
itemdescription
paramenable: enable/disable set camera registers
+
+

C++ defination code:

+ +
void set_regs_enable(bool enable = true)
+
+
+

Class

+

Camera

+

Camera class

+
+

C++ defination code:

+ +
class Camera
+
+
+

__init__

+ +
def __init__(self, width: int = -1, height: int = -1, format: maix.image.Format = ..., device: str = None, fps: float = -1, buff_num: int = 3, open: bool = True, raw: bool = False) -> None
+
+

Construct a new Camera object.\nMaximum resolution support 2560x1440.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramwidth: camera width, default is -1, means auto, mostly means max width of camera support
height: camera height, default is -1, means auto, mostly means max height of camera support
format: camera output format, default is image.Format.FMT_RGB888
device: camera device path, you can get devices by list_devices method, by default(value is NULL(None in MaixPy)) means the first device
fps: camera fps, default is -1, means auto, mostly means max fps of camera support
buff_num: camera buffer number, default is 3, means 3 buffer, one used by user, one used for cache the next frame,
more than one buffer will accelerate image read speed, but will cost more memory.
open: If true, camera will automatically call open() after creation. default is true.
raw: If true, you can use read_raw() to capture the raw image output from the sensor.
staticFalse
+
+

C++ defination code:

+ +
Camera(int width = -1, int height = -1, image::Format format = image::FMT_RGB888, const char *device = nullptr, double fps = -1, int buff_num = 3, bool open = true, bool raw = false)
+
+
+

get_ch_nums

+ +
def get_ch_nums(self) -> int
+
+

Get the number of channels supported by the camera.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the maximum number of channels.
staticFalse
+
+

C++ defination code:

+ +
int get_ch_nums()
+
+
+

open

+ +
def open(self, width: int = -1, height: int = -1, format: maix.image.Format = ..., fps: float = -1, buff_num: int = -1) -> maix.err.Err
+
+

Open camera and run

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramwidth: camera width, default is -1, means auto, mostly means max width of camera support
height: camera height, default is -1, means auto, mostly means max height of camera support
format: camera output format, default same as the constructor's format argument
fps: camera fps, default is -1, means auto, mostly means max fps of camera support
buff_num: camera buffer number, default is 3, means 3 buffer, one used by user, one used for cache the next frame,
more than one buffer will accelerate image read speed, but will cost more memory.
returnerror code, err::ERR_NONE means success, others means failed
staticFalse
+
+

C++ defination code:

+ +
err::Err open(int width = -1, int height = -1, image::Format format = image::FMT_INVALID, double fps = -1, int buff_num = -1)
+
+
+

read

+ +
def read(self, buff: capsule = None, buff_size: int = 0, block: bool = True, block_ms: int = -1) -> maix.image.Image
+
+

Get one frame image from camera buffer, must call open method before read.\nIf open method not called, will call it automatically, if open failed, will throw exception!\nSo call open method before read is recommended.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parambuff: buffer to store image data, if buff is nullptr, will alloc memory automatically.
In MaixPy, default to None, you can create a image.Image object, then pass img.data() to buff.
block: block read, default is true, means block util read image successfully,
if set to false, will return nullptr if no image in buffer
block_ms: block read timeout
returnimage::Image object, if failed, return nullptr, you should delete if manually in C++
staticFalse
+
+

C++ defination code:

+ +
image::Image *read(void *buff = nullptr, size_t buff_size = 0, bool block = true, int block_ms = -1)
+
+
+

read_raw

+ +
def read_raw(self) -> maix.image.Image
+
+

Read the raw image and obtain the width, height, and format of the raw image through the returned Image object.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
noteThe raw image is in a Bayer format, and its width and height are affected by the driver. Modifying the size and format is generally not allowed.
returnimage::Image object, if failed, return nullptr, you should delete if manually in C++
staticFalse
+
+

C++ defination code:

+ +
image::Image *read_raw()
+
+
+

clear_buff

+ +
def clear_buff(self) -> None
+
+

Clear buff to ensure the next read image is the latest image

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
void clear_buff()
+
+
+

skip_frames

+ +
def skip_frames(self, num: int) -> None
+
+

Read some frames and drop, this is usually used avoid read not stable image when camera just opened.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramnum: number of frames to read and drop
staticFalse
+
+

C++ defination code:

+ +
void skip_frames(int num)
+
+
+

close

+ +
def close(self) -> None
+
+

Close camera

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
void close()
+
+
+

add_channel

+ +
def add_channel(self, width: int = -1, height: int = -1, format: maix.image.Format = ..., fps: float = -1, buff_num: int = 3, open: bool = True) -> Camera
+
+

Add a new channel and return a new Camera object, you can use close() to close this channel.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramwidth: camera width, default is -1, means auto, mostly means max width of camera support
height: camera height, default is -1, means auto, mostly means max height of camera support
format: camera output format, default is RGB888
fps: camera fps, default is -1, means auto, mostly means max fps of camera support
buff_num: camera buffer number, default is 3, means 3 buffer, one used by user, one used for cache the next frame,
more than one buffer will accelerate image read speed, but will cost more memory.
open: If true, camera will automatically call open() after creation. default is true.
returnnew Camera object
staticFalse
+
+

C++ defination code:

+ +
camera::Camera *add_channel(int width = -1, int height = -1, image::Format format = image::FMT_RGB888, double fps = -1, int buff_num = 3, bool open = true)
+
+
+

is_opened

+ +
def is_opened(self) -> bool
+
+

Check if camera is opened

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returntrue if camera is opened, false if not
staticFalse
+
+

C++ defination code:

+ +
bool is_opened()
+
+
+

is_closed

+ +
def is_closed(self) -> bool
+
+

check camera device is closed or not

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnclosed or not, bool type
staticFalse
+
+

C++ defination code:

+ +
bool is_closed()
+
+
+

width

+ +
def width(self) -> int
+
+

Get camera width

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returncamera width
staticFalse
+
+

C++ defination code:

+ +
int width()
+
+
+

height

+ +
def height(self) -> int
+
+

Get camera height

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returncamera height
staticFalse
+
+

C++ defination code:

+ +
int height()
+
+
+

fps

+ +
def fps(self) -> float
+
+

Get camera fps

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returncamera fps
staticFalse
+
+

C++ defination code:

+ +
double fps()
+
+
+

format

+ +
def format(self) -> maix.image.Format
+
+

Get camera output format

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returncamera output format, image::Format object
staticFalse
+
+

C++ defination code:

+ +
image::Format format()
+
+
+

buff_num

+ +
def buff_num(self) -> int
+
+

Get camera buffer number

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returncamera buffer number
staticFalse
+
+

C++ defination code:

+ +
int buff_num()
+
+
+

hmirror

+ +
def hmirror(self, value: int = -1) -> int
+
+

Set/Get camera horizontal mirror

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returncamera horizontal mirror
staticFalse
+
+

C++ defination code:

+ +
int hmirror(int value = -1)
+
+
+

vflip

+ +
def vflip(self, value: int = -1) -> int
+
+

Set/Get camera vertical flip

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returncamera vertical flip
staticFalse
+
+

C++ defination code:

+ +
int vflip(int value = -1)
+
+
+

device

+ +
def device(self) -> str
+
+

Get camera device path

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returncamera device path
staticFalse
+
+

C++ defination code:

+ +
std::string device()
+
+
+

write_reg

+ +
def write_reg(self, addr: int, data: int, bit_width: int = 8) -> maix.err.Err
+
+

Write camera register

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramaddr: register address
data: register data
bit_width: register data bit width, default is 8
returnerror code, err::ERR_NONE means success, others means failed
staticFalse
+
+

C++ defination code:

+ +
err::Err write_reg(int addr, int data, int bit_width = 8)
+
+
+

read_reg

+ +
def read_reg(self, addr: int, bit_width: int = 8) -> int
+
+

Read camera register

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramaddr: register address
bit_width: register data bit width, default is 8
returnregister data, -1 means failed
staticFalse
+
+

C++ defination code:

+ +
int read_reg(int addr, int bit_width = 8)
+
+
+

show_colorbar

+ +
def show_colorbar(self, enable: bool) -> maix.err.Err
+
+

Camera output color bar image for test

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramenable: enable/disable color bar
returnerror code, err::ERR_NONE means success, others means failed
staticFalse
+
+

C++ defination code:

+ +
err::Err show_colorbar(bool enable)
+
+
+

get_channel

+ +
def get_channel(self) -> int
+
+

Get channel of camera

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnchannel number
staticFalse
+
+

C++ defination code:

+ +
int get_channel()
+
+
+

set_resolution

+ +
def set_resolution(self, width: int, height: int) -> maix.err.Err
+
+

Set camera resolution

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramwidth: new width
height: new height
returnerror code, err::ERR_NONE means success, others means failed
staticFalse
+
+

C++ defination code:

+ +
err::Err set_resolution(int width, int height)
+
+
+

set_fps

+ +
def set_fps(self, fps: float) -> maix.err.Err
+
+

Set camera fps

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramfps: new fps
returnerror code, err::ERR_NONE means success, others means failed
staticFalse
+
+

C++ defination code:

+ +
err::Err set_fps(double fps)
+
+
+

exposure

+ +
def exposure(self, value: int = -1) -> int
+
+

Set/Get camera exposure

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
attentionThis method will affect the isp and thus the image, so please be careful with it.
paramvalue: exposure time. unit: us
If value == -1, return exposure time.
If value != 0, set and return exposure time.
returncamera exposure time
staticFalse
+
+

C++ defination code:

+ +
int exposure(int value = -1)
+
+
+

gain

+ +
def gain(self, value: int = -1) -> int
+
+

Set/Get camera gain

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
attentionThis method will affect the isp and thus the image, so please be careful with it.
paramvalue: camera gain.
If value == -1, returns camera gain.
If value != 0, set and return camera gain.
returncamera gain
staticFalse
+
+

C++ defination code:

+ +
int gain(int value = -1)
+
+
+

luma

+ +
def luma(self, value: int = -1) -> int
+
+

Set/Get camera luma

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
attentionThis method will affect the isp and thus the image, so please be careful with it.
paramvalue: luma value, range is [0, 100]
If value == -1, returns luma value.
If value != 0, set and return luma value.
returnreturns luma value
staticFalse
+
+

C++ defination code:

+ +
int luma(int value = -1)
+
+
+

constrast

+ +
def constrast(self, value: int = -1) -> int
+
+

Set/Get camera constrast

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
attentionThis method will affect the isp and thus the image, so please be careful with it.
paramvalue: constrast value, range is [0, 100]
If value == -1, returns constrast value.
If value != 0, set and return constrast value.
returnreturns constrast value
staticFalse
+
+

C++ defination code:

+ +
int constrast(int value = -1)
+
+
+

saturation

+ +
def saturation(self, value: int = -1) -> int
+
+

Set/Get camera saturation

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
attentionThis method will affect the isp and thus the image, so please be careful with it.
paramvalue: saturation value, range is [0, 100]
If value == -1, returns saturation value.
If value != 0, set and return saturation value.
returnreturns saturation value
staticFalse
+
+

C++ defination code:

+ +
int saturation(int value = -1)
+
+
+

awb_mode

+ +
def awb_mode(self, value: int = -1) -> int
+
+

Set/Get white balance mode (deprecated interface)

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
attentionThis method will affect the isp and thus the image, so please be careful with it.
This interface may be deprecated in the future, and there may be incompatibilities in the definition of the parameters of the new interface
paramvalue: value = 0, means set white balance to auto mode, value = 1, means set white balance to manual mode, default is auto mode.
returnreturns awb mode
staticFalse
+
+

C++ defination code:

+ +
int awb_mode(int value = -1)
+
+
+

set_awb

+ +
def set_awb(self, mode: int = -1) -> int
+
+

Set/Get white balance mode

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
attentionThis method will affect the isp and thus the image, so please be careful with it.
paramvalue: value = 0, means set white balance to manual mode, value = 1, means set white balance to auto mode, default is auto mode.
returnreturns awb mode
staticFalse
+
+

C++ defination code:

+ +
int set_awb(int mode = -1)
+
+
+

exp_mode

+ +
def exp_mode(self, value: int = -1) -> int
+
+

Set/Get exposure mode (deprecated interface)

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
attentionThis method will affect the isp and thus the image, so please be careful with it.
This interface may be deprecated in the future, and there may be incompatibilities in the definition of the parameters of the new interface
paramvalue: value = 0, means set exposure to auto mode, value = 1, means set exposure to manual mode, default is auto mode.
returnreturns exposure mode
staticFalse
+
+

C++ defination code:

+ +
int exp_mode(int value = -1)
+
+
+

set_windowing

+ +
def set_windowing(self, roi: list[int]) -> maix.err.Err
+
+

Set window size of camera

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramroi: Support two input formats, [x,y,w,h] set the coordinates and size of the window;
[w,h] set the size of the window, when the window is centred.
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err set_windowing(std::vector<int> roi)
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/comm.html b/maixpy/api/maix/comm.html new file mode 100644 index 00000000..22da1794 --- /dev/null +++ b/maixpy/api/maix/comm.html @@ -0,0 +1,547 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.comm - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.comm

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.comm module

+
+

You can use maix.comm to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Variable

+

Function

+

add_default_comm_listener

+

Add default CommProtocol listener.\nWhen the application uses this port, the listening thread will immediately\nrelease the port resources and exit. If you need to start the default listening thread again,\nplease release the default port resources and then call this function.

+
+

C++ defination code:

+ +
void add_default_comm_listener()
+
+
+

rm_default_comm_listener

+

Remove default CommProtocol listener.

+ + + + + + + + + + + + + +
itemdescription
returnbool type.
+
+

C++ defination code:

+ +
bool rm_default_comm_listener()
+
+
+

Class

+

CommProtocol

+

Class for communication protocol

+
+

C++ defination code:

+ +
class CommProtocol
+
+
+

__init__

+ +
def __init__(self, buff_size: int = 1024, header: int = 3148663466) -> None
+
+

Construct a new CommProtocol object

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parambuff_size: buffer size, default to 1024 bytes
staticFalse
+
+

C++ defination code:

+ +
CommProtocol(int buff_size = 1024, uint32_t header=maix::protocol::HEADER)
+
+
+

get_msg

+ +
def get_msg(self, timeout: int = 0) -> ...
+
+

Read data to buffer, and try to decode it as maix.protocol.MSG object

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramtimeout: unit ms, 0 means return immediatly, -1 means block util have msg, >0 means block until have msg or timeout.
returndecoded data, if nullptr, means no valid frame found.
Attentioin, delete it after use in C++.
staticFalse
+
+

C++ defination code:

+ +
protocol::MSG *get_msg(int timeout = 0)
+
+
+

resp_ok

+ +
def resp_ok(self, cmd: int, body: maix.Bytes(bytes) = None) -> maix.err.Err
+
+

Send response ok(success) message

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramcmd: CMD value
body: response body, can be null
returnencoded data, if nullptr, means error, and the error code is -err.Err.
Attentioin, delete it after use in C++.
staticFalse
+
+

C++ defination code:

+ +
err::Err resp_ok(uint8_t cmd, Bytes *body = nullptr)
+
+
+

report

+ +
def report(self, cmd: int, body: maix.Bytes(bytes) = None) -> maix.err.Err
+
+

Send report message

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramcmd: CMD value
body: report body, can be null
returnencoded data, if nullptr, means error, and the error code is -err.Err.
Attentioin, delete it after use in C++.
staticFalse
+
+

C++ defination code:

+ +
err::Err report(uint8_t cmd, Bytes *body = nullptr)
+
+
+

resp_err

+ +
def resp_err(self, cmd: int, code: maix.err.Err, msg: str) -> maix.err.Err
+
+

Encode response error message to buffer

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramcmd: CMD value
code: error code
msg: error message
returnencoded data, if nullptr, means error, and the error code is -err.Err.
Attentioin, delete it after use in C++.
staticFalse
+
+

C++ defination code:

+ +
err::Err resp_err(uint8_t cmd, err::Err code, const std::string &msg)
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/display.html b/maixpy/api/maix/display.html new file mode 100644 index 00000000..894b690f --- /dev/null +++ b/maixpy/api/maix/display.html @@ -0,0 +1,913 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.display - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.display

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.display module, control display device and show image on it

+
+

You can use maix.display to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Variable

+

Function

+

send_to_maixvision

+ +
def send_to_maixvision(img: maix.image.Image) -> None
+
+

Send image to MaixVision work station if connected.\nIf you want to debug your program an don't want to initialize display, use this method.

+ + + + + + + + + + + + + +
itemdescription
paramimg: image to send, image.Image object
+
+

C++ defination code:

+ +
void send_to_maixvision(image::Image &img)
+
+
+

Class

+

Display

+

Display class

+
+

C++ defination code:

+ +
class Display
+
+
+

__init__

+ +
def __init__(self, width: int = -1, height: int = -1, format: maix.image.Format = ..., device: str = None, open: bool = True) -> None
+
+

Construct a new Display object

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramwidth: display width, by default(value is -1) means auto detect,
if width > max device supported width, will auto set to max device supported width
height: display height, by default(value is -1) means auto detect,
if height > max device supported height, will auto set to max device supported height
device: display device name, you can get devices by list_devices method, by default(value is NULL(None in MaixPy)) means the first device
open: If true, display will automatically call open() after creation. default is true.
staticFalse
+
+

C++ defination code:

+ +
Display(int width = -1, int height = -1, image::Format format = image::FMT_RGB888, const char *device = nullptr, bool open = true)
+
+
+

width

+ +
def width(self) -> int
+
+

Get display width

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnwidth
staticFalse
+
+

C++ defination code:

+ +
int width()
+
+
+

height

+ +
def height(self) -> int
+
+

Get display height

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramch: channel to get, by default(value is 0) means the first channel
returnheight
staticFalse
+
+

C++ defination code:

+ +
int height()
+
+
+

size

+ +
def size(self) -> list[int]
+
+

Get display size

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramch: channel to get, by default(value is 0) means the first channel
returnsize A list type in MaixPy, [width, height]
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> size()
+
+
+

format

+ +
def format(self) -> maix.image.Format
+
+

Get display format

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnformat
staticFalse
+
+

C++ defination code:

+ +
image::Format format()
+
+
+

open

+ +
def open(self, width: int = -1, height: int = -1, format: maix.image.Format = ...) -> maix.err.Err
+
+

open display device, if already opened, will return err.ERR_NONE.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramwidth: display width, default is -1, means auto, mostly means max width of display support
height: display height, default is -1, means auto, mostly means max height of display support
format: display output format, default is RGB888
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err open(int width = -1, int height = -1, image::Format format = image::FMT_INVALID)
+
+
+

close

+ +
def close(self) -> maix.err.Err
+
+

close display device

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err close()
+
+
+

add_channel

+ +
def add_channel(self, width: int = -1, height: int = -1, format: maix.image.Format = ..., open: bool = True) -> Display
+
+

Add a new channel and return a new Display object, you can use close() to close this channel.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
attentionIf a new disp channel is created, it is recommended to set fit=image::FIT_COVER or fit=image::FIT_FILL when running show for the main channel,
otherwise the display of the new disp channel may be abnormal.
paramwidth: display width, default is -1, means auto, mostly means max width of display support. Maximum width must not exceed the main channel.
height: display height, default is -1, means auto, mostly means max height of display support. Maximum height must not exceed the main channel.
format: display output format, default is FMT_BGRA8888
open: If true, display will automatically call open() after creation. default is true.
returnnew Display object
staticFalse
+
+

C++ defination code:

+ +
display::Display *add_channel(int width = -1, int height = -1, image::Format format = image::FMT_BGRA8888, bool open = true)
+
+
+

is_opened

+ +
def is_opened(self) -> bool
+
+

check display device is opened or not

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnopened or not, bool type
staticFalse
+
+

C++ defination code:

+ +
bool is_opened()
+
+
+

is_closed

+ +
def is_closed(self) -> bool
+
+

check display device is closed or not

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnclosed or not, bool type
staticFalse
+
+

C++ defination code:

+ +
bool is_closed()
+
+
+

show

+ +
def show(self, img: maix.image.Image, fit: maix.image.Fit = ...) -> maix.err.Err
+
+

show image on display device, and will also send to MaixVision work station if connected.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimg: image to show, image.Image object,
if the size of image smaller than display size, will show in the center of display;
if the size of image bigger than display size, will auto resize to display size and keep ratio, fill blank with black color.
fit: image in screen fit mode, by default(value is image.FIT_CONTAIN), @see image.Fit for more details
e.g. image.FIT_CONTAIN means resize image to fit display size and keep ratio, fill blank with black color.
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err show(image::Image &img, image::Fit fit = image::FIT_CONTAIN)
+
+
+

device

+ +
def device(self) -> str
+
+

Get display device path

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returndisplay device path
staticFalse
+
+

C++ defination code:

+ +
std::string device()
+
+
+

set_backlight

+ +
def set_backlight(self, value: float) -> None
+
+

Set display backlight

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramvalue: backlight value, float type, range is [0, 100]
staticFalse
+
+

C++ defination code:

+ +
void set_backlight(float value)
+
+
+

get_backlight

+ +
def get_backlight(self) -> float
+
+

Get display backlight

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnvalue backlight value, float type, range is [0, 100]
staticFalse
+
+

C++ defination code:

+ +
float get_backlight()
+
+
+

set_hmirror

+ +
def set_hmirror(self, en: bool) -> maix.err.Err
+
+

Set display mirror

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramen: enable/disable mirror
staticFalse
+
+

C++ defination code:

+ +
err::Err set_hmirror(bool en)
+
+
+

set_vflip

+ +
def set_vflip(self, en: bool) -> maix.err.Err
+
+

Set display flip

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramen: enable/disable flip
staticFalse
+
+

C++ defination code:

+ +
err::Err set_vflip(bool en)
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/err.html b/maixpy/api/maix/err.html new file mode 100644 index 00000000..fd62217a --- /dev/null +++ b/maixpy/api/maix/err.html @@ -0,0 +1,537 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.err - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.err

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.err module

+
+

You can use maix.err to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Err

+

Maix Error code

+ + + + + + + + + + + + + +
itemdescribe
valuesERR_NONE: No error
ERR_ARGS: Invalid arguments
ERR_NO_MEM: No memory
ERR_NOT_IMPL: Not implemented
ERR_NOT_READY: Not ready
ERR_NOT_INIT: Not initialized
ERR_NOT_OPEN: Not opened
ERR_NOT_PERMIT: Not permitted
ERR_REOPEN: Re-open
ERR_BUSY: Busy
ERR_READ: Read error
ERR_WRITE: Write error
ERR_TIMEOUT: Timeout
ERR_RUNTIME: Runtime error
ERR_IO: IO error
ERR_NOT_FOUND: Not found
ERR_ALREAY_EXIST: Already exist
ERR_BUFF_FULL: Buffer full
ERR_BUFF_EMPTY: Buffer empty
ERR_CANCEL: Cancel
ERR_OVERFLOW: Overflow
ERR_MAX:
+
+

C++ defination code:

+ +
enum Err
+    {
+        // !!! fixed error code, DO NOT change number already defined, only append new error code
+        ERR_NONE        = 0,   // No error
+        ERR_ARGS           ,   // Invalid arguments
+        ERR_NO_MEM         ,   // No memory
+        ERR_NOT_IMPL       ,   // Not implemented
+        ERR_NOT_READY      ,   // Not ready
+        ERR_NOT_INIT       ,   // Not initialized
+        ERR_NOT_OPEN       ,   // Not opened
+        ERR_NOT_PERMIT     ,   // Not permitted
+        ERR_REOPEN         ,   // Re-open
+        ERR_BUSY           ,   // Busy
+        ERR_READ           ,   // Read error
+        ERR_WRITE          ,   // Write error
+        ERR_TIMEOUT        ,   // Timeout
+        ERR_RUNTIME        ,   // Runtime error
+        ERR_IO             ,   // IO error
+        ERR_NOT_FOUND      ,   // Not found
+        ERR_ALREAY_EXIST   ,   // Already exist
+        ERR_BUFF_FULL      ,   // Buffer full
+        ERR_BUFF_EMPTY     ,   // Buffer empty
+        ERR_CANCEL         ,   // Cancel
+        ERR_OVERFLOW       ,   // Overflow
+        ERR_MAX,
+    }
+
+
+

Variable

+

Function

+

to_str

+ +
def to_str(e: Err) -> str
+
+

Error code to string

+ + + + + + + + + + + + + + + + + +
itemdescription
parame: direction [in], error code, err::Err type
returnerror string
+
+

C++ defination code:

+ +
std::string to_str(err::Err e)
+
+
+

get_error

+ +
def get_error() -> str
+
+

get last error string

+ + + + + + + + + + + + + +
itemdescription
returnerror string
+
+

C++ defination code:

+ +
std::string& get_error()
+
+
+

set_error

+ +
def set_error(str: str) -> None
+
+

set last error string

+ + + + + + + + + + + + + +
itemdescription
paramstr: direction [in], error string
+
+

C++ defination code:

+ +
void set_error(const std::string &str)
+
+
+

check_raise

+ +
def check_raise(e: Err, msg: str = '') -> None
+
+

Check error code, if not ERR_NONE, raise err.Exception

+ + + + + + + + + + + + + +
itemdescription
parame: direction [in], error code, err::Err type
msg: direction [in], error message
+
+

C++ defination code:

+ +
void check_raise(err::Err e, const std::string &msg = "")
+
+
+

check_bool_raise

+ +
def check_bool_raise(ok: bool, msg: str = '') -> None
+
+

Check condition, if false, raise err.Exception

+ + + + + + + + + + + + + +
itemdescription
paramok: direction [in], condition, if true, do nothing, if false, raise err.Exception
msg: direction [in], error message
+
+

C++ defination code:

+ +
void check_bool_raise(bool ok, const std::string &msg = "")
+
+
+

check_null_raise

+ +
def check_null_raise(ptr: capsule, msg: str = '') -> None
+
+

Check NULL pointer, if NULL, raise exception

+ + + + + + + + + + + + + +
itemdescription
paramptr: direction [in], pointer
msg: direction [in], error message
+
+

C++ defination code:

+ +
void check_null_raise(void *ptr, const std::string &msg = "")
+
+
+

Class

+

Exception

+

Maix Exception

+
+

C++ defination code:

+ +
class Exception : public std::exception
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/example.html b/maixpy/api/maix/example.html new file mode 100644 index 00000000..ce6fae30 --- /dev/null +++ b/maixpy/api/maix/example.html @@ -0,0 +1,1274 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.example - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.example

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

example module, this will be maix.example module in MaixPy, maix::example namespace in MaixCDK

+
+

You can use maix.example to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Kind

+

Example enum(not recommend! See Kind2)

+ + + + + + + + + + + + + +
itemdescribe
valuesKIND_NONE: Kind none, value always 0, other enum value will auto increase
KIND_DOG: Kind dog
KIND_CAT: Kind cat, value is auto generated according to KING_DOG
KIND_BIRD:
KIND_MAX: Max Kind quantity
You can get max Kind value by KIND_MAX - 1
+
+

C++ defination code:

+ +
enum Kind
+        {
+            KIND_NONE = 0, /** Kind none, value always 0, other enum value will auto increase */
+            KIND_DOG,      /** Kind dog*/
+            KIND_CAT,      // Kind cat, value is auto generated according to KING_DOG
+            KIND_BIRD,
+            KIND_MAX /* Max Kind quantity,
+                        You can get max Kind value by KIND_MAX - 1
+                     */
+        }
+
+
+

Kind2

+

Example enum class(recommend!)

+ + + + + + + + + + + + + +
itemdescribe
valuesNONE: Kind none, value always 0, other enum value will auto increase
DOG: Kind dog
CAT: Kind cat, value is auto generated according to KING_DOG
BIRD:
MAX: Max Kind quantity
You can get max Kind value by KIND_MAX - 1
+
+

C++ defination code:

+ +
enum class Kind2
+        {
+            NONE = 0, /** Kind none, value always 0, other enum value will auto increase */
+            DOG,      /** Kind dog*/
+            CAT,      // Kind cat, value is auto generated according to KING_DOG
+            BIRD,
+            MAX       /* Max Kind quantity,
+                         You can get max Kind value by KIND_MAX - 1
+                      */
+        }
+
+
+

Variable

+

var1

+

Example module variable

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
attentionIt's a copy of this variable in MaixPy,
so change it in C++ (e.g. update var in hello function) will not take effect the var inMaixPy.
So we add const for this var to avoid this mistake.
value"Sipeed"
readonlyTrue
+
+

C++ defination code:

+ +
const std::string var1 = "Sipeed"
+
+
+

list_var

+

Tensor data type size in bytes

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
attention1. DO NOT use C/C++ array directly for python API, the python wrapper not support it.
Use std::vector instead.
2. It's a copy of this variable in MaixPy,
so change it in C++ (e.g. update var in hello function) will not take effect the var inMaixPy.
So we add const for this var to avoid this mistake.
value{
0, 1, 2, 3, 4, 5, 6, 7, 8, 9}
readonlyTrue
+
+

C++ defination code:

+ +
const std::vector<int> list_var = {
+            0, 1, 2, 3, 4, 5, 6, 7, 8, 9}
+
+
+

test_var

+

Example module variable test_var

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
attentionIt's a copy of this variable in MaixPy, so if you change it in C++, it will not take effect in MaixPy.
And change it in MaixPy will not take effect in C++ as well !!!
If you want to use vars shared between C++ and MaixPy, you can create a class and use its member.
value100
readonlyFalse
+
+

C++ defination code:

+ +
int test_var = 100
+
+
+

Function

+

hello

+ +
def hello(name: str) -> str
+
+

say hello to someone

+ + + + + + + + + + + + + + + + + +
itemdescription
paramname: direction [in], name of someone, string type
returnstring type, content is hello + name
+
+

C++ defination code:

+ +
std::string hello(std::string name)
+
+
+

change_arg_name

+ +
def change_arg_name(e: Example) -> Example
+
+

Change arg name example

+ + + + + + + + + + + + + + + + + +
itemdescription
parame: Example object
returnsame as arg
+
+

C++ defination code:

+ +
example::Example *change_arg_name(example::Example *e)
+
+
+

change_arg_name2

+ +
def change_arg_name2(e: Example) -> None
+
+

Change arg name example

+ + + + + + + + + + + + + +
itemdescription
parame: Example object
+
+

C++ defination code:

+ +
void change_arg_name2(example::Example &e)
+
+
+

Class

+

Test

+

Test class

+
+

C++ defination code:

+ +
class Test
+
+
+

__init__

+ +
def __init__(self) -> None
+
+

Test constructor

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
Test()
+
+
+

Example

+

Example class\nthis class will be export to MaixPy as maix.example.Example

+
+

C++ defination code:

+ +
class Example
+
+
+

__init__

+ +
def __init__(self, name: str, age: int = 18, pet: Kind = ...) -> None
+
+

Example constructor\nthis constructor will be export to MaixPy as maix.example.Example.init

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramname: direction [in], name of Example, string type
age: direction [in], age of Example, int type, default is 18, value range is [0, 100]
attentionto make auto generate code work, param Kind should with full namespace name example::Kind instead of Kind,
namespace maix can be ignored.
staticFalse
+
+

C++ defination code:

+ +
Example(std::string &name, int age = 18, example::Kind pet = example::KIND_NONE)
+
+
+

get_name

+ +
def get_name(self) -> str
+
+

get name of Example\nyou can also get name by property name.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnname of Example, string type
staticFalse
+
+

C++ defination code:

+ +
std::string get_name()
+
+
+

get_age

+ +
def get_age(self) -> int
+
+

get age of Example

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnage of Example, int type, value range is [0, 100]
staticFalse
+
+

C++ defination code:

+ +
int get_age()
+
+
+

set_name

+ +
def set_name(self, name: str) -> None
+
+

set name of Example

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramname: name of Example, string type
staticFalse
+
+

C++ defination code:

+ +
void set_name(std::string name)
+
+
+

set_age

+ +
def set_age(self, age: int) -> None
+
+

set age of Example

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramage: age of Example, int type, value range is [0, 100]
staticFalse
+
+

C++ defination code:

+ +
void set_age(int age)
+
+
+

set_pet

+ +
def set_pet(self, pet: Kind) -> None
+
+

Example enum member

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
attention
staticFalse
+
+

C++ defination code:

+ +
void set_pet(example::Kind pet)
+
+
+

get_pet

+ +
def get_pet(self) -> Kind
+
+

Example enum member

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
example::Kind get_pet()
+
+
+

get_list

+ +
def get_list(self, in: list[int]) -> list[int]
+
+

get list example

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramin: direction [in], input list, items are int type.
In MaixPy, you can pass list or tuple to this API
returnlist, items are int type, content is [1, 2, 3] + in. Alloc item, del in MaixPy will auto free memory.
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> *get_list(std::vector<int> in)
+
+
+

get_dict

+ +
def get_dict(self, in: dict[str, int]) -> dict[str, int]
+
+

Example dict API

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramin: direction [in], input dict, key is string type, value is int type.
In MaixPy, you can pass dict to this API
returndict, key is string type, value is int type, content is {"a": 1} + in
In MaixPy, return type is dict object
staticFalse
+
+

C++ defination code:

+ +
std::map<std::string, int> get_dict(std::map<std::string, int> &in)
+
+
+

hello

+ +
def hello(name: str) -> str
+
+

say hello to someone

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramname: name of someone, string type
returnstring type, content is Example::hello_str + name
staticTrue
+
+

C++ defination code:

+ +
static std::string hello(std::string name)
+
+
+

hello_bytes

+ +
def hello_bytes(*args, **kwargs)
+
+

param is bytes example

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parambytes: bytes type param
returnbytes type, return value is bytes changed value
staticTrue
+
+

C++ defination code:

+ +
static Bytes *hello_bytes(Bytes &bytes)
+
+
+

callback

+ +
def callback(cb: typing.Callable[[int, int], int]) -> int
+
+

Callback example

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramcb: callback function, param is two int type, return is int type
returnint type, return value is cb's return value.
staticTrue
+
+

C++ defination code:

+ +
static int callback(std::function<int(int, int)> cb)
+
+
+

callback2

+ +
def callback2(cb: typing.Callable[[list[int], int], int]) -> int
+
+

Callback example

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramcb: callback function, param is a int list type and int type, return is int type
returnint type, return value is cb's return value.
staticTrue
+
+

C++ defination code:

+ +
static int callback2(std::function<int(std::vector<int>, int)> cb)
+
+
+

hello_dict

+ +
def hello_dict(dict: dict[str, int]) -> dict[str, int]
+
+

Dict param example

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdict: dict type param, key is string type, value is int type
staticTrue
+
+

C++ defination code:

+ +
static std::map<std::string, int> *hello_dict(std::map<std::string, int> *dict)
+
+
+

name

+

name member of Example

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::string name
+
+
+

age

+

age member of Example, value range should be [0, 100]

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int age
+
+
+

hello_str

+

hello_str member of Example, default value is "hello "

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticTrue
readonlyFalse
+
+

C++ defination code:

+ +
static std::string hello_str
+
+
+

var1

+

Example module readonly variable

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyTrue
+
+

C++ defination code:

+ +
const std::string var1 = "Example.var1"
+
+
+

var2

+

Example module readonly variable

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyTrue
+
+

C++ defination code:

+ +
std::string var2 = "Example.var2"
+
+
+

dict_test

+ +
def dict_test() -> dict[str, Test]
+
+

dict_test, return dict type, and element is pointer type(alloc in C++).\nHere when the returned Tensor object will auto delete by Python GC.

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticTrue
+
+

C++ defination code:

+ +
static std::map<std::string, example::Test *> *dict_test()
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/ext_dev.html b/maixpy/api/maix/ext_dev.html new file mode 100644 index 00000000..35cfe1f7 --- /dev/null +++ b/maixpy/api/maix/ext_dev.html @@ -0,0 +1,353 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.ext_dev - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.ext_dev

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.ext_dev module

+
+

You can use maix.ext_dev to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+ + + + + + + + + + + + + + + + + + + + + + + + + +
modulebrief
imumaix.ext_dev.imu module
qmi8658maix.ext_dev.qmi8658 module
tmc2209maix.ext_dev.tmc2209 module
bm8563maix.ext_dev.bm8563 module
+

Enum

+

Variable

+

Function

+

Class

+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/ext_dev/bm8563.html b/maixpy/api/maix/ext_dev/bm8563.html new file mode 100644 index 00000000..e82cd7ae --- /dev/null +++ b/maixpy/api/maix/ext_dev/bm8563.html @@ -0,0 +1,575 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.ext_dev.bm8563 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.ext_dev.bm8563

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.ext_dev.bm8563 module

+
+

You can use maix.ext_dev.bm8563 to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Variable

+

Function

+

Class

+

BM8563

+

Peripheral BM8563 class

+
+

C++ defination code:

+ +
class BM8563
+
+
+

__init__

+ +
def __init__(self, i2c_bus: int = -1) -> None
+
+

BM8563 constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parami2c_bus: i2c bus number.
staticFalse
+
+

C++ defination code:

+ +
BM8563(int i2c_bus=-1)
+
+
+

datetime

+ +
def datetime(self, timetuple: list[int] = []) -> list[int]
+
+

Get or set the date and time of the BM8563.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramtimetuple: time tuple, like (year, month, day[, hour[, minute[, second]]])
returntime tuple, like (year, month, day[, hour[, minute[, second]]])
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> datetime(std::vector<int> timetuple=std::vector<int>())
+
+
+

init

+ +
def init(self, timetuple: list[int]) -> maix.err.Err
+
+

Initialise the BM8563.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramtimetuple: time tuple, like (year, month, day[, hour[, minute[, second]]])
returnerr::Err type, if init success, return err::ERR_NONE
staticFalse
+
+

C++ defination code:

+ +
err::Err init(std::vector<int> timetuple)
+
+
+

now

+ +
def now(self) -> list[int]
+
+

Get get the current datetime.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returntime tuple, like (year, month, day[, hour[, minute[, second]]])
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> now()
+
+
+

deinit

+ +
def deinit(self) -> maix.err.Err
+
+

Deinit the BM8563.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerr::Err err::Err type, if deinit success, return err::ERR_NONE
staticFalse
+
+

C++ defination code:

+ +
err::Err deinit()
+
+
+

hctosys

+ +
def hctosys(self) -> maix.err.Err
+
+

Set the system time from the BM8563

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerr::Err type
staticFalse
+
+

C++ defination code:

+ +
err::Err hctosys()
+
+
+

systohc

+ +
def systohc(self) -> maix.err.Err
+
+

Set the BM8563 from the system time

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerr::Err type
staticFalse
+
+

C++ defination code:

+ +
err::Err systohc()
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/ext_dev/imu.html b/maixpy/api/maix/ext_dev/imu.html new file mode 100644 index 00000000..fe3716e7 --- /dev/null +++ b/maixpy/api/maix/ext_dev/imu.html @@ -0,0 +1,803 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.ext_dev.imu - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.ext_dev.imu

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.ext_dev.imu module

+
+

You can use maix.ext_dev.imu to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Mode

+

imu mode

+ + + + + + + + + + + + + +
itemdescribe
valuesACC_ONLY:
GYRO_ONLY:
DUAL:
+
+

C++ defination code:

+ +
enum class Mode {
+    ACC_ONLY = 0,
+    GYRO_ONLY,
+    DUAL
+}
+
+
+

AccScale

+

imu acc scale

+ + + + + + + + + + + + + +
itemdescribe
valuesACC_SCALE_2G:
ACC_SCALE_4G:
ACC_SCALE_8G:
ACC_SCALE_16G:
+
+

C++ defination code:

+ +
enum class AccScale {
+    ACC_SCALE_2G = 0,
+    ACC_SCALE_4G,
+    ACC_SCALE_8G,
+    ACC_SCALE_16G
+}
+
+
+

AccOdr

+

imu acc output data rate

+ + + + + + + + + + + + + +
itemdescribe
valuesACC_ODR_8000: Accelerometer ODR set to 8000 Hz.
ACC_ODR_4000: Accelerometer ODR set to 4000 Hz.
ACC_ODR_2000: Accelerometer ODR set to 2000 Hz.
ACC_ODR_1000: Accelerometer ODR set to 1000 Hz.
ACC_ODR_500: Accelerometer ODR set to 500 Hz.
ACC_ODR_250: Accelerometer ODR set to 250 Hz.
ACC_ODR_125: Accelerometer ODR set to 125 Hz.
ACC_ODR_62_5: Accelerometer ODR set to 62.5 Hz.
ACC_ODR_31_25: Accelerometer ODR set to 31.25 Hz.
ACC_ODR_128: Accelerometer ODR set to 128 Hz.
ACC_ODR_21: Accelerometer ODR set to 21 Hz.
ACC_ODR_11: Accelerometer ODR set to 11 Hz.
ACC_ODR_3: Accelerometer ODR set to 3 Hz.
+
+

C++ defination code:

+ +
enum class AccOdr {
+    ACC_ODR_8000,      // Accelerometer ODR set to 8000 Hz.
+    ACC_ODR_4000,      // Accelerometer ODR set to 4000 Hz.
+    ACC_ODR_2000,      // Accelerometer ODR set to 2000 Hz.
+    ACC_ODR_1000,      // Accelerometer ODR set to 1000 Hz.
+    ACC_ODR_500,       // Accelerometer ODR set to 500 Hz.
+    ACC_ODR_250,       // Accelerometer ODR set to 250 Hz.
+    ACC_ODR_125,       // Accelerometer ODR set to 125 Hz.
+    ACC_ODR_62_5,      // Accelerometer ODR set to 62.5 Hz.
+    ACC_ODR_31_25,     // Accelerometer ODR set to 31.25 Hz.
+    ACC_ODR_128 = 12,  // Accelerometer ODR set to 128 Hz.
+    ACC_ODR_21,        // Accelerometer ODR set to 21 Hz.
+    ACC_ODR_11,        // Accelerometer ODR set to 11 Hz.
+    ACC_ODR_3,         // Accelerometer ODR set to 3 Hz.
+}
+
+
+

GyroScale

+

imu gyro scale

+ + + + + + + + + + + + + +
itemdescribe
valuesGYRO_SCALE_16DPS: Gyroscope scale set to ±16 degrees per second.
GYRO_SCALE_32DPS: Gyroscope scale set to ±32 degrees per second.
GYRO_SCALE_64DPS: Gyroscope scale set to ±64 degrees per second.
GYRO_SCALE_128DPS: Gyroscope scale set to ±128 degrees per second.
GYRO_SCALE_256DPS: Gyroscope scale set to ±256 degrees per second.
GYRO_SCALE_512DPS: Gyroscope scale set to ±512 degrees per second.
GYRO_SCALE_1024DPS: Gyroscope scale set to ±1024 degrees per second.
GYRO_SCALE_2048DPS: Gyroscope scale set to ±2048 degrees per second.
+
+

C++ defination code:

+ +
enum class GyroScale {
+    GYRO_SCALE_16DPS = 0,       // Gyroscope scale set to ±16 degrees per second.
+    GYRO_SCALE_32DPS,            // Gyroscope scale set to ±32 degrees per second.
+    GYRO_SCALE_64DPS,            // Gyroscope scale set to ±64 degrees per second.
+    GYRO_SCALE_128DPS,           // Gyroscope scale set to ±128 degrees per second.
+    GYRO_SCALE_256DPS,           // Gyroscope scale set to ±256 degrees per second.
+    GYRO_SCALE_512DPS,           // Gyroscope scale set to ±512 degrees per second.
+    GYRO_SCALE_1024DPS,          // Gyroscope scale set to ±1024 degrees per second.
+    GYRO_SCALE_2048DPS,          // Gyroscope scale set to ±2048 degrees per second.
+}
+
+
+

GyroOdr

+

imu gyro output data rate

+ + + + + + + + + + + + + +
itemdescribe
valuesGYRO_ODR_8000: Gyroscope ODR set to 8000 Hz.
GYRO_ODR_4000: Gyroscope ODR set to 4000 Hz.
GYRO_ODR_2000: Gyroscope ODR set to 2000 Hz.
GYRO_ODR_1000: Gyroscope ODR set to 1000 Hz.
GYRO_ODR_500: Gyroscope ODR set to 500 Hz.
GYRO_ODR_250: Gyroscope ODR set to 250 Hz.
GYRO_ODR_125: Gyroscope ODR set to 125 Hz.
GYRO_ODR_62_5: Gyroscope ODR set to 62.5 Hz.
GYRO_ODR_31_25: Gyroscope ODR set to 31.25 Hz.
+
+

C++ defination code:

+ +
enum class GyroOdr {
+    GYRO_ODR_8000,     // Gyroscope ODR set to 8000 Hz.
+    GYRO_ODR_4000,     // Gyroscope ODR set to 4000 Hz.
+    GYRO_ODR_2000,     // Gyroscope ODR set to 2000 Hz.
+    GYRO_ODR_1000,     // Gyroscope ODR set to 1000 Hz.
+    GYRO_ODR_500,      // Gyroscope ODR set to 500 Hz.
+    GYRO_ODR_250,      // Gyroscope ODR set to 250 Hz.
+    GYRO_ODR_125,      // Gyroscope ODR set to 125 Hz.
+    GYRO_ODR_62_5,     // Gyroscope ODR set to 62.5 Hz.
+    GYRO_ODR_31_25,    // Gyroscope ODR set to 31.25 Hz.
+}
+
+
+

Variable

+

Function

+

Class

+

IMU

+

QMI8656 driver class

+
+

C++ defination code:

+ +
class IMU
+
+
+

__init__

+ +
def __init__(self, driver: str, i2c_bus: int = -1, addr: int = 107, freq: int = 400000, mode: Mode = ..., acc_scale: AccScale = ..., acc_odr: AccOdr = ..., gyro_scale: GyroScale = ..., gyro_odr: GyroOdr = ..., block: bool = True) -> None
+
+

Construct a new IMU object, will open IMU

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdriver: driver name, only support "qmi8656"
i2c_bus: i2c bus number. Automatically selects the on-board imu when -1 is passed in.
addr: IMU i2c addr.
freq: IMU freq
mode: IMU Mode: ACC_ONLY/GYRO_ONLY/DUAL
acc_scale: acc scale, see @imu::AccScale
acc_odr: acc output data rate, see @imu::AccOdr
gyro_scale: gyro scale, see @imu::GyroScale
gyro_odr: gyro output data rate, see @imu::GyroOdr
block: block or non-block, defalut is true
staticFalse
+
+

C++ defination code:

+ +
IMU(std::string driver, int i2c_bus=-1, int addr=0x6B, int freq=400000,
+            maix::ext_dev::imu::Mode mode=maix::ext_dev::imu::Mode::DUAL,
+            maix::ext_dev::imu::AccScale acc_scale=maix::ext_dev::imu::AccScale::ACC_SCALE_2G,
+            maix::ext_dev::imu::AccOdr acc_odr=maix::ext_dev::imu::AccOdr::ACC_ODR_8000,
+            maix::ext_dev::imu::GyroScale gyro_scale=maix::ext_dev::imu::GyroScale::GYRO_SCALE_16DPS,
+            maix::ext_dev::imu::GyroOdr gyro_odr=maix::ext_dev::imu::GyroOdr::GYRO_ODR_8000,
+            bool block=true)
+
+
+

read

+ +
def read(self) -> list[float]
+
+

Read data from IMU.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnlist type. If only one of the outputs is initialized, only [x,y,z] of that output will be returned.
If all outputs are initialized, [acc_x, acc_y, acc_z, gyro_x, gyro_y, gyro_z] is returned.
staticFalse
+
+

C++ defination code:

+ +
std::vector<float> read()
+
+
+

calculate_calibration

+ +
def calculate_calibration(self, time_ms: int = 30000) -> maix.err.Err
+
+

Caculate calibration, save calibration data to /maixapp/shart/imu_calibration

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramtime_ms: caculate max time, unit:ms
returnerr::Err
staticFalse
+
+

C++ defination code:

+ +
err::Err calculate_calibration(uint64_t time_ms = 30 * 1000)
+
+
+

get_calibration

+ +
def get_calibration(self) -> list[float]
+
+

Get calibration data

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn an array, format is [acc_x_bias, acc_y_bias, acc_z_bias, gyro_x_bias, gyro_y_bias, gyro_z_bias]
If the calibration file cannot be found, an empty array will be returned.
staticFalse
+
+

C++ defination code:

+ +
std::vector<double> get_calibration()
+
+
+

Gcsv

+

Gcsv class

+
+

C++ defination code:

+ +
class Gcsv
+
+
+

__init__

+ +
def __init__(self) -> None
+
+

Construct a new IMU object

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
Gcsv()
+
+
+

open

+ +
def open(self, path: str, tscale: float = 0.001, gscale: float = 1, ascale: float = 1, mscale: float = 1, version: str = '1.3', id: str = 'imu', orientation: str = 'YxZ') -> maix.err.Err
+
+

Open a file

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parampath: the path where data will be saved
tscale: time scale, default is 0.001
gscale: gyroscope scale factor, default is 1, unit:g
ascale: accelerometer scale factor, default is 1, unit:radians/second
mscale: magnetometer scale factor, default is 1(unused)
version: version number, default is "1.3"
id: identifier for the IMU, default is "imu"
orientation: sensor orientation, default is "YxZ"
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err open(std::string path, double tscale = 0.001, double gscale = 1, double ascale = 1, double mscale = 1, std::string version = "1.3", std::string id = "imu", std::string orientation = "YxZ")
+
+
+

close

+ +
def close(self) -> maix.err.Err
+
+

Close file

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err close()
+
+
+

is_opened

+ +
def is_opened(self) -> bool
+
+

Check if the object is already open

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returntrue, opened; false, not opened
staticFalse
+
+

C++ defination code:

+ +
bool is_opened()
+
+
+

write

+ +
def write(self, timestamp: float, gyro: list[float], acc: list[float], mag: list[float] = []) -> maix.err.Err
+
+

Write imu data to gcsv file

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramt: Timestamp of the current data. The actual value is equal to t * tscale. unit:s
gyro: Gyroscope data must be an array consisting of x, y, and z-axis data. The actual value is equal to gyro * gscale. unit:g
acc: Acceleration data must be an array consisting of x, y, and z-axis data. The actual value is equal to acc * ascale.unit:radians/second
mag: Magnetic data must be an array consisting of x, y, and z-axis data. Currently not supported.
staticFalse
+
+

C++ defination code:

+ +
err::Err write(double timestamp, std::vector<double> gyro, std::vector<double> acc, std::vector<double> mag = std::vector<double>())
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/ext_dev/qmi8658.html b/maixpy/api/maix/ext_dev/qmi8658.html new file mode 100644 index 00000000..c1ce9ca2 --- /dev/null +++ b/maixpy/api/maix/ext_dev/qmi8658.html @@ -0,0 +1,408 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.ext_dev.qmi8658 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.ext_dev.qmi8658

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.ext_dev.qmi8658 module

+
+

You can use maix.ext_dev.qmi8658 to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Variable

+

Function

+

Class

+

QMI8658

+

QMI8656 driver class

+
+

C++ defination code:

+ +
class QMI8658
+
+
+

__init__

+ +
def __init__(self, i2c_bus: int = -1, addr: int = 107, freq: int = 400000, mode: maix.ext_dev.imu.Mode = ..., acc_scale: maix.ext_dev.imu.AccScale = ..., acc_odr: maix.ext_dev.imu.AccOdr = ..., gyro_scale: maix.ext_dev.imu.GyroScale = ..., gyro_odr: maix.ext_dev.imu.GyroOdr = ..., block: bool = True) -> None
+
+

Construct a new QMI8658 object, will open QMI8658

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parami2c_bus: i2c bus number. Automatically selects the on-board qmi8658 when -1 is passed in.
addr: QMI8658 i2c addr.
freq: QMI8658 freq
mode: QMI8658 Mode: ACC_ONLY/GYRO_ONLY/DUAL
acc_scale: acc scale, see @qmi8658::AccScale
acc_odr: acc output data rate, see @qmi8658::AccOdr
gyro_scale: gyro scale, see @qmi8658::GyroScale
gyro_odr: gyro output data rate, see @qmi8658::GyroOdr
block: block or non-block, defalut is true
staticFalse
+
+

C++ defination code:

+ +
QMI8658(int i2c_bus=-1, int addr=0x6B, int freq=400000,
+            maix::ext_dev::imu::Mode mode=maix::ext_dev::imu::Mode::DUAL,
+            maix::ext_dev::imu::AccScale acc_scale=maix::ext_dev::imu::AccScale::ACC_SCALE_2G,
+            maix::ext_dev::imu::AccOdr acc_odr=maix::ext_dev::imu::AccOdr::ACC_ODR_8000,
+            maix::ext_dev::imu::GyroScale gyro_scale=maix::ext_dev::imu::GyroScale::GYRO_SCALE_16DPS,
+            maix::ext_dev::imu::GyroOdr gyro_odr=maix::ext_dev::imu::GyroOdr::GYRO_ODR_8000,
+            bool block=true)
+
+
+

read

+ +
def read(self) -> list[float]
+
+

Read data from QMI8658.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnlist type. If only one of the outputs is initialized, only [x,y,z] of that output will be returned.
If all outputs are initialized, [acc_x, acc_y, acc_z, gyro_x, gyro_y, gyro_z] is returned.
staticFalse
+
+

C++ defination code:

+ +
std::vector<float> read()
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/ext_dev/tmc2209.html b/maixpy/api/maix/ext_dev/tmc2209.html new file mode 100644 index 00000000..6201f9ee --- /dev/null +++ b/maixpy/api/maix/ext_dev/tmc2209.html @@ -0,0 +1,886 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.ext_dev.tmc2209 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.ext_dev.tmc2209

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.ext_dev.tmc2209 module

+
+

You can use maix.ext_dev.tmc2209 to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Variable

+

Function

+

slide_scan

+ +
def slide_scan(port: str, addr: int, baud: int, step_angle: float, micro_step: int, round_mm: float, speed_mm_s: float, dir: bool = True, use_internal_sense_resistors: bool = True, run_current_per: int = 100, hold_current_per: int = 100, conf_save_path: str = './slide_conf.bin', force_update: bool = True) -> None
+
+

Scan and initialize the slide with the given parameters

+ + + + + + + + + + + + + +
itemdescription
paramport: UART port, string type.
addr: TMC2209 UART address, range 0x00~0x03, integer type.
baud: UART baud rate, integer type.
step_angle: Motor step angle, float type.
micro_step: Motor micro step, options: 1/2/4/8/16/32/64/128/256, integer type.
round_mm: Round distance in mm, float type.
speed_mm_s: Speed of the slide in mm/s, float type.
dir: Direction of movement, boolean type. Default is true.
use_internal_sense_resistors: Enable internal sense resistors if true, disable if false, boolean type. Default is true.
run_current_per: Motor run current percentage, range 0~100(%), integer type. Default is 100%.
hold_current_per: Motor hold current percentage, range 0~100(%), integer type. Default is 100%.
conf_save_path: Configuration save path, string type. Default is "./slide_conf.bin".
force_update: Force update the configuration if true, boolean type. Default is true.
+
+

C++ defination code:

+ +
void slide_scan(const char* port, uint8_t addr, long baud, /* Uart init param */
+                float step_angle, uint16_t micro_step, float round_mm, /* Motor init param */
+                float speed_mm_s, bool dir=true, bool use_internal_sense_resistors=true, uint8_t run_current_per=100,
+                uint8_t hold_current_per=100, const std::string conf_save_path="./slide_conf.bin",
+                bool force_update=true  /* Driver init param */)
+
+
+

slide_test

+ +
def slide_test(port: str, addr: int, baud: int, step_angle: float, micro_step: int, round_mm: float, speed_mm_s: float, dir: bool = True, use_internal_sense_resistors: bool = True, run_current_per: int = 100, hold_current_per: int = 100, conf_save_path: str = './slide_conf.bin') -> None
+
+

Test the slide with the given parameters\nThis function tests the slide by moving it in the specified direction until a stall condition is detected, as defined in the configuration file.

+ + + + + + + + + + + + + +
itemdescription
paramport: UART port, string type.
addr: TMC2209 UART address, range 0x00~0x03, integer type.
baud: UART baud rate, integer type.
step_angle: Motor step angle, float type.
micro_step: Motor micro step, options: 1/2/4/8/16/32/64/128/256, integer type.
round_mm: Round distance in mm, float type.
speed_mm_s: Speed of the slide in mm/s, float type.
dir: Direction of movement, boolean type. Default is true.
use_internal_sense_resistors: Enable internal sense resistors if true, disable if false, boolean type. Default is true.
run_current_per: Motor run current percentage, range 0~100(%), integer type. Default is 100%.
hold_current_per: Motor hold current percentage, range 0~100(%), integer type. Default is 100%.
conf_save_path: Configuration save path, string type. Default is "./slide_conf.bin".
+
+

C++ defination code:

+ +
void slide_test(const char* port, uint8_t addr, long baud, /* Uart init param */
+                float step_angle, uint16_t micro_step, float round_mm, /* Motor init param */
+                float speed_mm_s, bool dir=true, bool use_internal_sense_resistors=true, uint8_t run_current_per=100,
+                uint8_t hold_current_per=100, const std::string conf_save_path="./slide_conf.bin"/* Driver init param */)
+
+
+

Class

+

Slide

+

Slide Class

+
+

C++ defination code:

+ +
class Slide
+
+
+

__init__

+ +
def __init__(self, port: str, addr: int, baud: int, step_angle: float, micro_step: int, round_mm: float, speed_mm_s: float = -1, use_internal_sense_resistors: bool = True, run_current_per: int = 100, hold_current_per: int = 100, cfg_file_path: str = '') -> None
+
+

Constructor for Slide\nInitializes the Slide object with the specified parameters.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramport: UART port, string type.
addr: TMC2209 UART address, range 0x00~0x03, integer type.
baud: UART baud rate, integer type.
step_angle: Motor step angle, float type.
micro_step: Motor micro step, options: 1/2/4/8/16/32/64/128/256, integer type.
round_mm: Round distance in mm, float type.
speed_mm_s: Speed of the slide in mm/s, float type. Default is -1, indicating the use of a default speed factor.
use_internal_sense_resistors: Enable internal sense resistors if TRUE, disable if FALSE, boolean type. Default is TRUE.
run_current_per: Motor run current percentage, range 0~100(%), integer type. Default is 100%.
hold_current_per: Motor hold current percentage, range 0~100(%), integer type. Default is 100%.
cfg_file_path: Configuration file path, string type. Default is an empty string, indicating no configuration file.
staticFalse
+
+

C++ defination code:

+ +
Slide(const char* port, uint8_t addr, long baud, /* Uart init param */
+            float step_angle, uint16_t micro_step, float round_mm,   /* Motor init param */
+            float speed_mm_s=-1, bool use_internal_sense_resistors=true, uint8_t run_current_per=100,
+            uint8_t hold_current_per=100, std::string cfg_file_path="" /* Driver init param */)
+
+
+

load_conf

+ +
def load_conf(self, path: str) -> None
+
+

Load configuration from a file\nLoads the configuration settings for the slide from the specified file path.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parampath: Path to the configuration file, string type.
staticFalse
+
+

C++ defination code:

+ +
void load_conf(std::string path)
+
+
+

move

+ +
def move(self, oft: float, speed_mm_s: int = -1, check: bool = True) -> None
+
+

Move the slide by a specified length\nMoves the slide by the specified length at the given speed. Optionally checks for stall conditions.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramoft: Length to move, float type.
speed_mm_s: Speed in mm/s. Default is -1, indicating the use of the default speed set during initialization.
check: Enable movement check if true, boolean type. Default is true.
staticFalse
+
+

C++ defination code:

+ +
void move(float oft, int speed_mm_s=-1, bool check=true)
+
+
+

reset

+ +
def reset(self, dir: bool = False, speed_mm_s: int = -1) -> None
+
+

Reset the slide position\nResets the slide position in the specified direction at the given speed.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdir: Direction of reset, boolean type. Default is false.
speed_mm_s: Speed in mm/s. Default is -1, indicating the use of the speed set during initialization.
staticFalse
+
+

C++ defination code:

+ +
void reset(bool dir=false, int speed_mm_s=-1)
+
+
+

stop_default_per

+

Get or set the stop default percentage\nRetrieves or sets the stop default percentage. If the parameter is -1, it returns the current setting.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramper: Stop default percentage, range 0~100(%), integer type. Default is -1, indicating no change.
returnint Current stop default percentage if per is -1, otherwise the new set percentage.
staticFalse
+
+

C++ defination code:

+ +
int stop_default_per(int per=-1)
+
+
+

run_current_per

+ +
def run_current_per(self, per: int = -1) -> int
+
+

Get or set the run current percentage\nRetrieves or sets the run current percentage. If the parameter is -1, it returns the current setting.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramper: Run current percentage, range 0~100(%), integer type. Default is -1, indicating no change.
returnint Current run current percentage if per is -1, otherwise the new set percentage.
staticFalse
+
+

C++ defination code:

+ +
int run_current_per(int per=-1)
+
+
+

hold_current_per

+ +
def hold_current_per(self, per: int = -1) -> int
+
+

Get or set the hold current percentage\nRetrieves or sets the hold current percentage. If the parameter is -1, it returns the current setting.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramper: Hold current percentage, range 0~100(%), integer type. Default is -1, indicating no change.
returnint Current hold current percentage if per is -1, otherwise the new set percentage.
staticFalse
+
+

C++ defination code:

+ +
int hold_current_per(int per=-1)
+
+
+

use_internal_sense_resistors

+ +
def use_internal_sense_resistors(self, b: bool = True) -> None
+
+

Enable or disable internal sense resistors\nEnables or disables the internal sense resistors based on the provided boolean value.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramb: Boolean value to enable (true) or disable (false) internal sense resistors. Default is true.
staticFalse
+
+

C++ defination code:

+ +
void use_internal_sense_resistors(bool b=true)
+
+
+

ScrewSlide

+

ScrewSlide Class

+
+

C++ defination code:

+ +
class ScrewSlide
+
+
+

__init__

+ +
def __init__(self, port: str, addr: int, baud: int, step_angle: float, micro_step: int, screw_pitch: float, speed_mm_s: float = -1, use_internal_sense_resistors: bool = True, run_current_per: int = 100, hold_current_per: int = 100) -> None
+
+

Constructor for ScrewSlide

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramport: UART port, string type.
addr: TMC2209 UART address, range 0x00~0x03, integer type.
baud: UART baud rate, integer type.
step_angle: Motor step angle, float type.
micro_step: Motor micro step, options: 1/2/4/8/16/32/64/128/256, integer type.
screw_pitch: Screw pitch of the slide, integer type.
speed_mm_s: Speed of the slide in mm/s, 10 means 10mm/s, float type.
Default is -1, indicating the use of a default speed factor.
use_internal_sense_resistors: Enable internal sense resistors if TRUE,
disable if FALSE, boolean type. Default is TRUE.
run_current_per: Motor run current percentage, range 0~100(%), integer type. Default is 100%.
hold_current_per: Motor hold current percentage, range 0~100(%), integer type. Default is 100%.
staticFalse
+
+

C++ defination code:

+ +
ScrewSlide(const char* port, uint8_t addr, long baud, /* Uart init param */
+                float step_angle, uint16_t micro_step, float screw_pitch,   /* Motor init param */
+                float speed_mm_s=-1, bool use_internal_sense_resistors=true, uint8_t run_current_per=100,
+                uint8_t hold_current_per=100)
+
+
+

move

+ +
def move(self, oft: float, speed_mm_s: int = -1, callback: typing.Callable[[float], bool] = None) -> None
+
+

Move the slide by a specified length

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramoft: Length to move, 10 means 10mm, float type.
Positive values move the slide in the positive direction, negative values move it in the opposite direction.
speed_mm_s: Speed in mm/s. Default is -1, indicating the use of the default speed set during initialization.
callback: Callback function to be called during movement.
The callback function receives the current progress percentage (0~100%) of the movement.
If the callback returns true, the move operation will be terminated immediately. Default is nullptr.
staticFalse
+
+

C++ defination code:

+ +
void move(float oft, int speed_mm_s=-1, std::function<bool(float)> callback=nullptr)
+
+
+

reset

+ +
def reset(self, callback: typing.Callable[[], bool], dir: bool = False, speed_mm_s: int = -1) -> None
+
+

Reset the slide position

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramcallback: Callback function to be called during the reset loop.
The reset operation will only terminate if the callback returns true.
dir: Direction of reset. Default is false.
speed_mm_s: Speed in mm/s. Default is -1, indicating the use of the speed set during initialization.
staticFalse
+
+

C++ defination code:

+ +
void reset(std::function<bool(void)> callback, bool dir=false, int speed_mm_s=-1)
+
+
+

run_current_per

+ +
def run_current_per(self, per: int = -1) -> int
+
+

Get or set the run current percentage

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramper: Run current percentage, range 0~100(%).
Default is -1, indicating no change and returning the current run current percentage.
returnint Current run current percentage if per is -1, otherwise the new set percentage.
staticFalse
+
+

C++ defination code:

+ +
int run_current_per(int per=-1)
+
+
+

hold_current_per

+ +
def hold_current_per(self, per: int = -1) -> int
+
+

Get or set the hold current percentage

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramper: Hold current percentage, range 0~100(%). Default is -1, indicating no change and returning the current hold current percentage.
returnint Current hold current percentage if per is -1, otherwise the new set percentage.
staticFalse
+
+

C++ defination code:

+ +
int hold_current_per(int per=-1)
+
+
+

use_internal_sense_resistors

+ +
def use_internal_sense_resistors(self, b: bool = True) -> None
+
+

Enable or disable internal sense resistors

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramb: Boolean value to enable (true) or disable (false) internal sense resistors. Default is true.
staticFalse
+
+

C++ defination code:

+ +
void use_internal_sense_resistors(bool b=true)
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/fs.html b/maixpy/api/maix/fs.html new file mode 100644 index 00000000..963f8d9e --- /dev/null +++ b/maixpy/api/maix/fs.html @@ -0,0 +1,1280 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.fs - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.fs

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.fs module

+
+

You can use maix.fs to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

SEEK

+

SEEK enums

+ + + + + + + + + + + + + +
itemdescribe
valuesSEEK_SET: Seek from beginning of file.
SEEK_CUR: Seek from current position.
SEEK_END: Seek from end of file.
+
+

C++ defination code:

+ +
enum SEEK
+    {
+        SEEK_SET = 0,  // Seek from beginning of file.
+        SEEK_CUR = 1,  // Seek from current position.
+        SEEK_END = 2,  // Seek from end of file.
+    }
+
+
+

Variable

+

Function

+

isabs

+ +
def isabs(path: str) -> bool
+
+

Check if the path is absolute path

+ + + + + + + + + + + + + + + + + +
itemdescription
parampath: path to check
returntrue if path is absolute path
+
+

C++ defination code:

+ +
bool isabs(const std::string &path)
+
+
+

isdir

+ +
def isdir(path: str) -> bool
+
+

Check if the path is a directory, if not exist, throw exception

+ + + + + + + + + + + + + + + + + +
itemdescription
parampath: path to check
returntrue if path is a directory
+
+

C++ defination code:

+ +
bool isdir(const std::string &path)
+
+
+

isfile

+ +
def isfile(path: str) -> bool
+
+

Check if the path is a file, if not exist, throw exception

+ + + + + + + + + + + + + + + + + +
itemdescription
parampath: path to check
returntrue if path is a file
+
+

C++ defination code:

+ +
bool isfile(const std::string &path)
+
+
+ + +
def islink(path: str) -> bool
+
+

Check if the path is a link, if not exist, throw exception

+ + + + + + + + + + + + + + + + + +
itemdescription
parampath: path to check
returntrue if path is a link
+
+

C++ defination code:

+ +
bool islink(const std::string &path)
+
+
+ + +
def symlink(src: str, link: str, force: bool = False) -> maix.err.Err
+
+

Create soft link

+ + + + + + + + + + + + + +
itemdescription
paramsrc: real file path
link: link file path
force: force link, if already have link file, will delet it first then create.
+
+

C++ defination code:

+ +
err::Err symlink(const std::string &src, const std::string &link, bool force = false)
+
+
+

exists

+ +
def exists(path: str) -> bool
+
+

Check if the path exists

+ + + + + + + + + + + + + + + + + +
itemdescription
parampath: path to check
returntrue if path exists
+
+

C++ defination code:

+ +
bool exists(const std::string &path)
+
+
+

mkdir

+ +
def mkdir(path: str, exist_ok: bool = True, recursive: bool = True) -> maix.err.Err
+
+

Create a directory recursively

+ + + + + + + + + + + + + + + + + +
itemdescription
parampath: path to create
exist_ok: if true, also return true if directory already exists
recursive: if true, create directory recursively, otherwise, only create one directory, default is true
returnerr::ERR_NONE(err.Err.ERR_NONE in MaixPy) if success, other error code if failed
+
+

C++ defination code:

+ +
err::Err mkdir(const std::string &path, bool exist_ok = true, bool recursive = true)
+
+
+

rmdir

+ +
def rmdir(path: str, recursive: bool = False) -> maix.err.Err
+
+

Remove a directory

+ + + + + + + + + + + + + + + + + +
itemdescription
parampath: path to remove
recursive: if true, remove directory recursively, otherwise, only remove empty directory, default is false
returnerr::ERR_NONE(err.Err.ERR_NONE in MaixPy) if success, other error code if failed
+
+

C++ defination code:

+ +
err::Err rmdir(const std::string &path, bool recursive = false)
+
+
+

remove

+ +
def remove(path: str) -> maix.err.Err
+
+

Remove a file

+ + + + + + + + + + + + + + + + + +
itemdescription
parampath: path to remove
returnerr::ERR_NONE(err.Err.ERR_NONE in MaixPy) if success, other error code if failed
+
+

C++ defination code:

+ +
err::Err remove(const std::string &path)
+
+
+

rename

+ +
def rename(src: str, dst: str) -> maix.err.Err
+
+

Rename a file or directory

+ + + + + + + + + + + + + + + + + +
itemdescription
paramsrc: source path
dst: destination path, if destination dirs not exist, will auto create
returnerr::ERR_NONE(err.Err.ERR_NONE in MaixPy) if success, other error code if failed
+
+

C++ defination code:

+ +
err::Err rename(const std::string &src, const std::string &dst)
+
+
+

sync

+ +
def sync() -> None
+
+

Sync files, ensure they're wrriten to disk from RAM

+
+

C++ defination code:

+ +
void sync()
+
+
+

getsize

+ +
def getsize(path: str) -> int
+
+

Get file size

+ + + + + + + + + + + + + + + + + +
itemdescription
parampath: path to get size
returnfile size if success, -err::Err code if failed
+
+

C++ defination code:

+ +
int getsize(const std::string &path)
+
+
+

dirname

+ +
def dirname(path: str) -> str
+
+

Get directory name of path

+ + + + + + + + + + + + + + + + + +
itemdescription
parampath: path to get dirname
returndirname if success, empty string if failed
+
+

C++ defination code:

+ +
std::string dirname(const std::string &path)
+
+
+

basename

+ +
def basename(path: str) -> str
+
+

Get base name of path

+ + + + + + + + + + + + + + + + + +
itemdescription
parampath: path to get basename
returnbasename if success, empty string if failed
+
+

C++ defination code:

+ +
std::string basename(const std::string &path)
+
+
+

abspath

+ +
def abspath(path: str) -> str
+
+

Get absolute path

+ + + + + + + + + + + + + + + + + +
itemdescription
parampath: path to get absolute path
returnabsolute path if success, empty string if failed
+
+

C++ defination code:

+ +
std::string abspath(const std::string &path)
+
+
+

getcwd

+ +
def getcwd() -> str
+
+

Get current working directory

+ + + + + + + + + + + + + +
itemdescription
returncurrent working directory absolute path
+
+

C++ defination code:

+ +
std::string getcwd()
+
+
+

realpath

+ +
def realpath(path: str) -> str
+
+

Get realpath of path

+ + + + + + + + + + + + + + + + + +
itemdescription
parampath: path to get realpath
returnrealpath if success, empty string if failed
+
+

C++ defination code:

+ +
std::string realpath(const std::string &path)
+
+
+

splitext

+ +
def splitext(path: str) -> list[str]
+
+

Get file extension

+ + + + + + + + + + + + + + + + + +
itemdescription
parampath: path to get extension
returnprefix_path and extension list if success, empty string if failed
+
+

C++ defination code:

+ +
std::vector<std::string> splitext(const std::string &path)
+
+
+

listdir

+ +
def listdir(path: str, recursive: bool = False, full_path: bool = False) -> list[str]
+
+

List files in directory

+ + + + + + + + + + + + + + + + + +
itemdescription
parampath: path to list
recursive: if true, list recursively, otherwise, only list current directory, default is false
full_path: if true, return full path, otherwise, only return basename, default is false
returnfiles list if success, nullptr if failed, you should manually delete it in C++.
+
+

C++ defination code:

+ +
std::vector<std::string> *listdir(const std::string &path, bool recursive = false, bool full_path = false)
+
+
+

open

+ +
def open(path: str, mode: str) -> File
+
+

Open a file, and return a File object

+ + + + + + + + + + + + + + + + + +
itemdescription
parampath: path to open
mode: open mode, support "r", "w", "a", "r+", "w+", "a+", "rb", "wb", "ab", "rb+", "wb+", "ab+"
returnFile object if success(need to delete object manually in C/C++), nullptr if failed
+
+

C++ defination code:

+ +
fs::File *open(const std::string &path, const std::string &mode)
+
+
+

tempdir

+ +
def tempdir() -> str
+
+

Get temp files directory

+ + + + + + + + + + + + + +
itemdescription
returntemp files directory
+
+

C++ defination code:

+ +
std::string tempdir()
+
+
+

Class

+

File

+

File read write ops

+
+

C++ defination code:

+ +
class File
+
+
+

__init__

+ +
def __init__(self) -> None
+
+

Construct File object

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
File()
+
+
+

open

+ +
def open(self, path: str, mode: str) -> maix.err.Err
+
+

Open a file

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parampath: path to open
mode: open mode, support "r", "w", "a", "r+", "w+", "a+", "rb", "wb", "ab", "rb+", "wb+", "ab+"
returnerr::ERR_NONE(err.Err.ERR_NONE in MaixPy) if success, other error code if failed
staticFalse
+
+

C++ defination code:

+ +
err::Err open(const std::string &path, const std::string &mode)
+
+
+

close

+ +
def close(self) -> None
+
+

Close a file

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
void close()
+
+
+

read

+ +
def read(self, size: int) -> list[int]
+
+

Read data from file API2

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramsize: max read size
returnbytes data if success(need delete manually in C/C++), nullptr if failed
staticFalse
+
+

C++ defination code:

+ +
std::vector<uint8_t> *read(int size)
+
+
+

readline

+ +
def readline(self) -> str
+
+

Read line from file

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnline if success, empty string if failed. You need to delete the returned object manually in C/C++.
staticFalse
+
+

C++ defination code:

+ +
std::string *readline()
+
+
+

eof

+ +
def eof(self) -> int
+
+

End of file or not

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
return0 if not reach end of file, else eof.
staticFalse
+
+

C++ defination code:

+ +
int eof()
+
+
+

write

+ +
def write(self, buf: list[int]) -> int
+
+

Write data to file API2

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parambuf: buffer to write
returnwrite size if success, -err::Err code if failed
staticFalse
+
+

C++ defination code:

+ +
int write(const std::vector<uint8_t> &buf)
+
+
+

seek

+ +
def seek(self, offset: int, whence: int) -> int
+
+

Seek file position

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramoffset: offset to seek
whence: @see maix.fs.SEEK
returnnew position if success, -err::Err code if failed
staticFalse
+
+

C++ defination code:

+ +
int seek(int offset, int whence)
+
+
+

tell

+ +
def tell(self) -> int
+
+

Get file position

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnfile position if success, -err::Err code if failed
staticFalse
+
+

C++ defination code:

+ +
int tell()
+
+
+

flush

+ +
def flush(self) -> maix.err.Err
+
+

Flush file

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerr::ERR_NONE(err.Err.ERR_NONE in MaixPy) if success, other error code if failed
staticFalse
+
+

C++ defination code:

+ +
err::Err flush()
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/http.html b/maixpy/api/maix/http.html new file mode 100644 index 00000000..dc72bd04 --- /dev/null +++ b/maixpy/api/maix/http.html @@ -0,0 +1,576 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.http - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.http

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.http module

+
+

You can use maix.http to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Variable

+

Function

+

Class

+

JpegStreamer

+

JpegStreamer class

+
+

C++ defination code:

+ +
class JpegStreamer
+
+
+

__init__

+ +
def __init__(self, host: str = '', port: int = 8000, client_number: int = 16) -> None
+
+

Construct a new jpeg streamer object

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
noteYou can get the picture stream through http://host:port/stream, you can also get it through http://ip:port, and you can add personal style through set_html() at this time
paramhost: http host
port: http port, default is 8000
client_number: the max number of client
staticFalse
+
+

C++ defination code:

+ +
JpegStreamer(std::string host = std::string(), int port = 8000, int client_number = 16)
+
+
+

start

+ +
def start(self) -> maix.err.Err
+
+

start jpeg streame

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerror code, err::ERR_NONE means success, others means failed
staticFalse
+
+

C++ defination code:

+ +
err::Err start()
+
+
+

start (overload 1)

+

stop http

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerror code, err::ERR_NONE means success, others means failed
staticFalse
+
+

C++ defination code:

+ +
err::Err stop()
+
+
+

write

+ +
def write(self, img: maix.image.Image) -> maix.err.Err
+
+

Write data to http

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimg: image object
returnerror code, err::ERR_NONE means success, others means failed
staticFalse
+
+

C++ defination code:

+ +
err::Err write(image::Image *img)
+
+
+

set_html

+ +
def set_html(self, data: str) -> maix.err.Err
+
+

add your style in this api\ndefault is:\n\n\n

JPG Stream

\n\n\n

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdata: html code
returnerror code, err::ERR_NONE means success, others means failed
staticFalse
+
+

C++ defination code:

+ +
err::Err set_html(std::string data)
+
+
+

host

+ +
def host(self) -> str
+
+

Get host

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnhost name
staticFalse
+
+

C++ defination code:

+ +
std::string host()
+
+
+

port

+ +
def port(self) -> int
+
+

Get port

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnport
staticFalse
+
+

C++ defination code:

+ +
int port()
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/i18n.html b/maixpy/api/maix/i18n.html new file mode 100644 index 00000000..c288f8cb --- /dev/null +++ b/maixpy/api/maix/i18n.html @@ -0,0 +1,685 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.i18n - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.i18n

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.i18n module

+
+

You can use maix.i18n to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Variable

+

locales

+

i18n locales list

+ + + + + + + + + + + + + + + + + +
itemdescription
value{
"en",
"zh",
"zh-tw",
"ja"}
readonlyFalse
+
+

C++ defination code:

+ +
static std::vector<std::string> locales = {
+        "en",
+        "zh",
+        "zh-tw",
+        "ja"}
+
+
+

names

+

i18n language names list

+ + + + + + + + + + + + + + + + + +
itemdescription
value{
"English",
"简体中文",
"繁體中文",
"日本語"}
readonlyTrue
+
+

C++ defination code:

+ +
const static std::vector<std::string> names = {
+        "English",
+        "简体中文",
+        "繁體中文",
+        "日本語"}
+
+
+

Function

+

get_locale

+ +
def get_locale() -> str
+
+

Get system config of locale.

+ + + + + + + + + + + + + +
itemdescription
returnlanguage locale, e.g. en, zh, zh_CN, zh_TW, etc.
+
+

C++ defination code:

+ +
string get_locale()
+
+
+

get_language_name

+ +
def get_language_name() -> str
+
+

Get system config of language name.

+ + + + + + + + + + + + + +
itemdescription
returnlanguage name, e.g. English, 简体中文, 繁體中文, etc.
+
+

C++ defination code:

+ +
string get_language_name()
+
+
+

load_trans_yaml

+ +
def load_trans_yaml(locales_dir: str) -> dict[str, dict[str, str]]
+
+

Load translations from yaml files.

+ + + + + + + + + + + + + + + + + +
itemdescription
paramlocales_dir: translation yaml files directory.
returnA dict contains all translations, e.g. {"zh":{"hello": "你好"}, "en":{"hello": "hello"}}, you should delete it after use in C++.
+
+

C++ defination code:

+ +
const std::map<string, std::map<string, string>> *load_trans_yaml(const std::string &locales_dir)
+
+
+

Class

+

Trans

+

Translate helper class.

+
+

C++ defination code:

+ +
class Trans
+
+
+

__init__

+ +
def __init__(self, locales_dict: dict[str, dict[str, str]] = {}) -> None
+
+

Translate helper class constructor.\nBy default locale is get by i18n.get_locale() function which set by system settings.\nBut you can also manually set by set_locale function temporarily.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramlocales_dict: locales dict, e.g. {"zh": {"Confirm": "确认", "OK": "好的"}, "en": {"Confirm": "Confirm", "OK": "OK"}}
staticFalse
+
+

C++ defination code:

+ +
Trans(const std::map<string, const std::map<string, string>> &locales_dict = std::map<string, const std::map<string, string>>())
+
+
+

load

+ +
def load(self, locales_dir: str) -> maix.err.Err
+
+

Load translation from yaml files generated by maixtool i18n command.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramlocales_dir: the translation files directory.
returnerr.Err type, no error will return err.Err.ERR_NONE.
staticFalse
+
+

C++ defination code:

+ +
err::Err load(const std::string &locales_dir)
+
+
+

update_dict

+ +
def update_dict(self, dict: dict[str, dict[str, str]]) -> maix.err.Err
+
+

Update translation dict.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdict: the new translation dict.
returnerr.Err type, no error will return err.Err.ERR_NONE.
staticFalse
+
+

C++ defination code:

+ +
err::Err update_dict(const std::map<std::string, const std::map<std::string, std::string>> &dict)
+
+
+

tr

+ +
def tr(self, key: str, locale: str = '') -> str
+
+

Translate string by key.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramkey: string key, e.g. "Confirm"
locale: locale name, if not assign, use default locale set by system settings or set_locale function.
returntranslated string, if find translation, return it, or return key, e.g. "确认", "Confirm", etc.
staticFalse
+
+

C++ defination code:

+ +
string tr(const string &key, const string locale = "")
+
+
+

set_locale

+ +
def set_locale(self, locale: str) -> None
+
+

Set locale temporarily, will not affect system settings.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramlocale: locale name, e.g. "zh", "en", etc. @see maix.i18n.locales
staticFalse
+
+

C++ defination code:

+ +
void set_locale(const string &locale)
+
+
+

get_locale

+ +
def get_locale(self) -> str
+
+

Get current locale.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnlocale name, e.g. "zh", "en", etc. @see maix.i18n.locales
staticFalse
+
+

C++ defination code:

+ +
string get_locale()
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/image.html b/maixpy/api/maix/image.html new file mode 100644 index 00000000..9becc925 --- /dev/null +++ b/maixpy/api/maix/image.html @@ -0,0 +1,12037 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.image - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.image

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.image module, image related definition and functions

+
+

You can use maix.image to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Format

+

Image formats

+ + + + + + + + + + + + + + + + + +
itemdescribe
attentionfor MaixPy firmware developers, update this enum will also need to update the fmt_size and fmt_names too !!!
valuesFMT_RGB888: RGBRGB...RGB, R at the lowest address
FMT_BGR888: BGRBGR...BGR, B at the lowest address
FMT_RGBA8888: RGBARGBA...RGBA, R at the lowest address
FMT_BGRA8888: BGRABGRA...BGRA, B at the lowest address
FMT_RGB565:
FMT_BGR565:
FMT_YUV422SP: YYY...UVUVUV...UVUV
FMT_YUV422P: YYY...UUU...VVV
FMT_YVU420SP: YYY...VUVUVU...VUVU, NV21
FMT_YUV420SP: YYY...UVUVUV...UVUV, NV12
FMT_YVU420P: YYY...VVV...UUU
FMT_YUV420P: YYY...UUU...VVV
FMT_GRAYSCALE:
FMT_BGGR6: 6-bit Bayer format with a BGGR pattern.
FMT_GBRG6: 6-bit Bayer format with a GBRG pattern.
FMT_GRBG6: 6-bit Bayer format with a GRBG pattern.
FMT_RGGB6: 6-bit Bayer format with a RGGB pattern.
FMT_BGGR8: 8-bit Bayer format with a BGGR pattern.
FMT_GBRG8: 8-bit Bayer format with a GBRG pattern.
FMT_GRBG8: 8-bit Bayer format with a GRBG pattern.
FMT_RGGB8: 8-bit Bayer format with a RGGB pattern.
FMT_BGGR10: 10-bit Bayer format with a BGGR pattern.
FMT_GBRG10: 10-bit Bayer format with a GBRG pattern.
FMT_GRBG10: 10-bit Bayer format with a GRBG pattern.
FMT_RGGB10: 10-bit Bayer format with a RGGB pattern.
FMT_BGGR12: 12-bit Bayer format with a BGGR pattern.
FMT_GBRG12: 12-bit Bayer format with a GBRG pattern.
FMT_GRBG12: 12-bit Bayer format with a GRBG pattern.
FMT_RGGB12: 12-bit Bayer format with a RGGB pattern.
FMT_UNCOMPRESSED_MAX:
FMT_COMPRESSED_MIN:
FMT_JPEG:
FMT_PNG:
FMT_COMPRESSED_MAX:
FMT_INVALID: format not valid
+
+

C++ defination code:

+ +
enum Format
+    {
+        FMT_RGB888 = 0, // RGBRGB...RGB, R at the lowest address
+        FMT_BGR888,     // BGRBGR...BGR, B at the lowest address
+        FMT_RGBA8888,   // RGBARGBA...RGBA, R at the lowest address
+        FMT_BGRA8888,   // BGRABGRA...BGRA, B at the lowest address
+        FMT_RGB565,
+        FMT_BGR565,
+        FMT_YUV422SP, // YYY...UVUVUV...UVUV
+        FMT_YUV422P,  // YYY...UUU...VVV
+        FMT_YVU420SP, // YYY...VUVUVU...VUVU, NV21
+        FMT_YUV420SP, // YYY...UVUVUV...UVUV, NV12
+        FMT_YVU420P,  // YYY...VVV...UUU
+        FMT_YUV420P,  // YYY...UUU...VVV
+        FMT_GRAYSCALE,
+        FMT_BGGR6,      // 6-bit Bayer format with a BGGR pattern.
+        FMT_GBRG6,      // 6-bit Bayer format with a GBRG pattern.
+        FMT_GRBG6,      // 6-bit Bayer format with a GRBG pattern.
+        FMT_RGGB6,      // 6-bit Bayer format with a RGGB pattern.
+        FMT_BGGR8,      // 8-bit Bayer format with a BGGR pattern.
+        FMT_GBRG8,      // 8-bit Bayer format with a GBRG pattern.
+        FMT_GRBG8,      // 8-bit Bayer format with a GRBG pattern.
+        FMT_RGGB8,      // 8-bit Bayer format with a RGGB pattern.
+        FMT_BGGR10,     // 10-bit Bayer format with a BGGR pattern.
+        FMT_GBRG10,     // 10-bit Bayer format with a GBRG pattern.
+        FMT_GRBG10,     // 10-bit Bayer format with a GRBG pattern.
+        FMT_RGGB10,     // 10-bit Bayer format with a RGGB pattern.
+        FMT_BGGR12,     // 12-bit Bayer format with a BGGR pattern.
+        FMT_GBRG12,     // 12-bit Bayer format with a GBRG pattern.
+        FMT_GRBG12,     // 12-bit Bayer format with a GRBG pattern.
+        FMT_RGGB12,     // 12-bit Bayer format with a RGGB pattern.
+        FMT_UNCOMPRESSED_MAX,
+
+        // compressed format below, not compressed should define upper
+        FMT_COMPRESSED_MIN,
+        FMT_JPEG,
+        FMT_PNG,
+        FMT_COMPRESSED_MAX,
+
+        FMT_INVALID = 0xFF  // format not valid
+    }
+
+
+

Fit

+

Object fit method

+ + + + + + + + + + + + + +
itemdescribe
valuesFIT_NONE: no object fit, keep original
FIT_FILL: width to new width, height to new height, may be stretch
FIT_CONTAIN: keep aspect ratio, fill blank area with black color
FIT_COVER: keep aspect ratio, crop image to fit new size
FIT_MAX:
+
+

C++ defination code:

+ +
enum Fit
+    {
+        FIT_NONE = -1, // no object fit, keep original
+        FIT_FILL = 0,  // width to new width, height to new height, may be stretch
+        FIT_CONTAIN,   // keep aspect ratio, fill blank area with black color
+        FIT_COVER,     // keep aspect ratio, crop image to fit new size
+        FIT_MAX
+    }
+
+
+

ResizeMethod

+

Resize method

+ + + + + + + + + + + + + +
itemdescribe
valuesNEAREST:
BILINEAR:
BICUBIC:
AREA:
LANCZOS:
HAMMING:
RESIZE_METHOD_MAX:
+
+

C++ defination code:

+ +
enum ResizeMethod
+    {
+        NEAREST = 0,
+        BILINEAR,
+        BICUBIC,
+        AREA,
+        LANCZOS,
+        HAMMING,
+        RESIZE_METHOD_MAX
+    }
+
+
+

ApriltagFamilies

+

Family of apriltag

+ + + + + + + + + + + + + +
itemdescribe
valuesTAG16H5:
TAG25H7:
TAG25H9:
TAG36H10:
TAG36H11:
ARTOOLKIT:
+
+

C++ defination code:

+ +
enum ApriltagFamilies
+    {
+        TAG16H5   = 1,
+        TAG25H7   = 2,
+        TAG25H9   = 4,
+        TAG36H10  = 8,
+        TAG36H11  = 16,
+        ARTOOLKIT = 32
+    }
+
+
+

TemplateMatch

+

Template match method

+ + + + + + + + + + + + + +
itemdescribe
valuesSEARCH_EX: Exhaustive search
SEARCH_DS: Diamond search
+
+

C++ defination code:

+ +
enum TemplateMatch
+    {
+        SEARCH_EX,  // Exhaustive search
+        SEARCH_DS,  // Diamond search
+    }
+
+
+

CornerDetector

+

CornerDetector class

+ + + + + + + + + + + + + +
itemdescribe
valuesCORNER_FAST:
CORNER_AGAST:
+
+

C++ defination code:

+ +
enum CornerDetector
+    {
+        CORNER_FAST,
+        CORNER_AGAST
+    }
+
+
+

EdgeDetector

+

EdgeDetector class

+ + + + + + + + + + + + + +
itemdescribe
valuesEDGE_CANNY:
EDGE_SIMPLE:
+
+

C++ defination code:

+ +
enum EdgeDetector
+    {
+        EDGE_CANNY,
+        EDGE_SIMPLE,
+    }
+
+
+

Variable

+

fmt_size

+

Image format size in bytes

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
attentionIt's a copy of this variable in MaixPy,
so change it in C++ (e.g. update var in hello function) will not take effect the var inMaixPy.
So we add const for this var to avoid this mistake.
value{
3,
3,
4,
4,
2,
2,
2,
2,
1.5,
1.5,
1.5,
1.5,
1, // grayscale
0.75, // 6-bit Bayer format
0.75, // 6-bit Bayer format
0.75, // 6-bit Bayer format
0.75, // 6-bit Bayer format
1, // 8-bit Bayer format
1, // 8-bit Bayer format
1, // 8-bit Bayer format
1, // 8-bit Bayer format
1.25, // 10-bit Bayer format
1.25, // 10-bit Bayer format
1.25, // 10-bit Bayer format
1.25, // 10-bit Bayer format
1.5, // 12-bit Bayer format
1.5, // 12-bit Bayer format
1.5, // 12-bit Bayer format
1.5, // 12-bit Bayer format
0, // uncompereed_max
0, // compressed_min
1, // jpeg
1, // png
0, // compressed_max
0 // invalid
}
readonlyTrue
+
+

C++ defination code:

+ +
const std::vector<float> fmt_size = {
+        3,
+        3,
+        4,
+        4,
+        2,
+        2,
+        2,
+        2,
+        1.5,
+        1.5,
+        1.5,
+        1.5,
+        1, // grayscale
+        0.75,   // 6-bit Bayer format
+        0.75,   // 6-bit Bayer format
+        0.75,   // 6-bit Bayer format
+        0.75,   // 6-bit Bayer format
+        1,      // 8-bit Bayer format
+        1,      // 8-bit Bayer format
+        1,      // 8-bit Bayer format
+        1,      // 8-bit Bayer format
+        1.25,   // 10-bit Bayer format
+        1.25,   // 10-bit Bayer format
+        1.25,   // 10-bit Bayer format
+        1.25,   // 10-bit Bayer format
+        1.5,    // 12-bit Bayer format
+        1.5,    // 12-bit Bayer format
+        1.5,    // 12-bit Bayer format
+        1.5,    // 12-bit Bayer format
+        0, // uncompereed_max
+        0, // compressed_min
+        1, // jpeg
+        1, // png
+        0, // compressed_max
+        0  // invalid
+        }
+
+
+

fmt_names

+

Image format string

+ + + + + + + + + + + + + + + + + +
itemdescription
value{
"RGB888",
"BGR888",
"RGBA8888",
"BGRA8888",
"RGB565",
"BGR565",
"YUV422SP",
"YUV422P",
"YVU420SP",
"YUV420SP",
"YVU420P",
"YUV420P",
"GRAYSCALE",
"BGGR6",
"GBRG6",
"GRBG6",
"RG6B6",
"BGGR8",
"GBRG8",
"GRBG8",
"RG6B8",
"BGGR10",
"GBRG10",
"GRBG10",
"RG6B10",
"BGGR12",
"GBRG12",
"GRBG12",
"RG6B12",
"UNCOMPRESSED_MAX",
"COMPRESSED_MIN",
"JPEG",
"PNG",
"COMPRESSED_MAX",
"INVALID"
}
readonlyTrue
+
+

C++ defination code:

+ +
const std::vector<std::string> fmt_names = {
+        "RGB888",
+        "BGR888",
+        "RGBA8888",
+        "BGRA8888",
+        "RGB565",
+        "BGR565",
+        "YUV422SP",
+        "YUV422P",
+        "YVU420SP",
+        "YUV420SP",
+        "YVU420P",
+        "YUV420P",
+        "GRAYSCALE",
+        "BGGR6",
+        "GBRG6",
+        "GRBG6",
+        "RG6B6",
+        "BGGR8",
+        "GBRG8",
+        "GRBG8",
+        "RG6B8",
+        "BGGR10",
+        "GBRG10",
+        "GRBG10",
+        "RG6B10",
+        "BGGR12",
+        "GBRG12",
+        "GRBG12",
+        "RG6B12",
+        "UNCOMPRESSED_MAX",
+        "COMPRESSED_MIN",
+        "JPEG",
+        "PNG",
+        "COMPRESSED_MAX",
+        "INVALID"
+        }
+
+
+

COLOR_WHITE

+

Predefined color white

+ + + + + + + + + + + + + + + + + +
itemdescription
valueimage::Color::from_rgb(255, 255, 255)
readonlyTrue
+
+

C++ defination code:

+ +
const image::Color COLOR_WHITE = image::Color::from_rgb(255, 255, 255)
+
+
+

COLOR_BLACK

+

Predefined color black

+ + + + + + + + + + + + + + + + + +
itemdescription
valueimage::Color::from_rgb(0, 0, 0)
readonlyTrue
+
+

C++ defination code:

+ +
const image::Color COLOR_BLACK = image::Color::from_rgb(0, 0, 0)
+
+
+

COLOR_RED

+

Predefined color red

+ + + + + + + + + + + + + + + + + +
itemdescription
valueimage::Color::from_rgb(255, 0, 0)
readonlyTrue
+
+

C++ defination code:

+ +
const image::Color COLOR_RED = image::Color::from_rgb(255, 0, 0)
+
+
+

COLOR_GREEN

+

Predefined color green

+ + + + + + + + + + + + + + + + + +
itemdescription
valueimage::Color::from_rgb(0, 255, 0)
readonlyTrue
+
+

C++ defination code:

+ +
const image::Color COLOR_GREEN = image::Color::from_rgb(0, 255, 0)
+
+
+

COLOR_BLUE

+

Predefined color blue

+ + + + + + + + + + + + + + + + + +
itemdescription
valueimage::Color::from_rgb(0, 0, 255)
readonlyTrue
+
+

C++ defination code:

+ +
const image::Color COLOR_BLUE = image::Color::from_rgb(0, 0, 255)
+
+
+

COLOR_YELLOW

+

Predefined color yellow

+ + + + + + + + + + + + + + + + + +
itemdescription
valueimage::Color::from_rgb(255, 255, 0)
readonlyTrue
+
+

C++ defination code:

+ +
const image::Color COLOR_YELLOW = image::Color::from_rgb(255, 255, 0)
+
+
+

COLOR_PURPLE

+

Predefined color purple

+ + + + + + + + + + + + + + + + + +
itemdescription
valueimage::Color::from_rgb(143, 0, 255)
readonlyTrue
+
+

C++ defination code:

+ +
const image::Color COLOR_PURPLE = image::Color::from_rgb(143, 0, 255)
+
+
+

COLOR_ORANGE

+

Predefined color orange

+ + + + + + + + + + + + + + + + + +
itemdescription
valueimage::Color::from_rgb(255, 127, 0)
readonlyTrue
+
+

C++ defination code:

+ +
const image::Color COLOR_ORANGE = image::Color::from_rgb(255, 127, 0)
+
+
+

COLOR_GRAY

+

Predefined color gray

+ + + + + + + + + + + + + + + + + +
itemdescription
valueimage::Color::from_rgb(127, 127, 127)
readonlyTrue
+
+

C++ defination code:

+ +
const image::Color COLOR_GRAY = image::Color::from_rgb(127, 127, 127)
+
+
+

Function

+

resize_map_pos

+ +
def resize_map_pos(w_in: int, h_in: int, w_out: int, h_out: int, fit: Fit, x: int, y: int, w: int = -1, h: int = -1) -> list[int]
+
+

map point position or rectangle position from one image size to another image size(resize)

+ + + + + + + + + + + + + + + + + +
itemdescription
paramint: h_out target image height
fit: resize method, see maix.image.Fit
x: original point x, or rectagle left-top point's x
y: original point y, or rectagle left-top point's y
w: original rectagle width, can be -1 if not use this arg, default -1.
h: original rectagle height, can be -1 if not use this arg, default -1.
returnlist type, [x, y] if map point, [x, y, w, h] if resize rectangle.
+
+

C++ defination code:

+ +
std::vector<int> resize_map_pos(int w_in, int h_in, int w_out, int h_out, image::Fit fit, int x, int y, int w = -1, int h = -1)
+
+
+

resize_map_pos (overload 1)

+

map point position or rectangle position from this image size to another image size(resize)

+ + + + + + + + + + + + + + + + + +
itemdescription
paramint: h_out target image height
fit: resize method, see maix.image.Fit
x: original point x, or rectagle left-top point's x
y: original point y, or rectagle left-top point's y
w: original rectagle width, can be -1 if not use this arg, default -1.
h: original rectagle height, can be -1 if not use this arg, default -1.
returnlist type, [x, y] if map point, [x, y, w, h] if resize rectangle.
+
+

C++ defination code:

+ +
std::vector<int> resize_map_pos(int w_out, int h_out, image::Fit fit, int x, int y, int w = -1, int h = -1)
+
+
+

resize_map_pos_reverse

+ +
def resize_map_pos_reverse(w_in: int, h_in: int, w_out: int, h_out: int, fit: Fit, x: int, y: int, w: int = -1, h: int = -1) -> list[int]
+
+

reverse resize_map_pos method, when we call image.resize method resiz image 'a' to image 'b', we want to known the original position on 'a' whith a knew point on 'b'

+ + + + + + + + + + + + + + + + + +
itemdescription
paramint: h_out image height after resized
fit: resize method, see maix.image.Fit
x: point on resized image x, or rectagle left-top point's x
y: original point y, or rectagle left-top point's y
w: original rectagle width, can be -1 if not use this arg, default -1.
h: original rectagle height, can be -1 if not use this arg, default -1.
returnlist type, [x, y] if map point, [x, y, w, h] if resize rectangle.
+
+

C++ defination code:

+ +
std::vector<int> resize_map_pos_reverse(int w_in, int h_in, int w_out, int h_out, image::Fit fit, int x, int y, int w = -1, int h = -1)
+
+
+

load

+ +
def load(path: str, format: Format = ...) -> Image
+
+

Load image from file, and convert to Image object

+ + + + + + + + + + + + + + + + + +
itemdescription
parampath: image file path
format: read as this format, if not match, will convert to this format, by default is RGB888
returnImage object, if load failed, will return None(nullptr in C++), so you should care about it.
+
+

C++ defination code:

+ +
image::Image *load(const char *path, image::Format format = image::Format::FMT_RGB888)
+
+
+

from_bytes

+ +
def from_bytes(width: int, height: int, format: Format, data: maix.Bytes(bytes), copy: bool = True) -> Image
+
+

Create image from bytes

+ + + + + + + + + + + + + + + + + +
itemdescription
paramwidth: image width
height: image height
format: image format
data: image data, if data is None, will malloc memory for image data
If the image is in jpeg format, data must be filled in.
copy: if true and data is not None, will copy data to new buffer, else will use data directly. default is true to avoid memory leak.
Use it carefully!!!
returnImage object
+
+

C++ defination code:

+ +
image::Image *from_bytes(int width, int height, image::Format format, Bytes *data, bool copy = true)
+
+
+

load_font

+ +
def load_font(name: str, path: str, size: int = 16) -> maix.err.Err
+
+

Load font from file

+ + + + + + + + + + + + + + + + + +
itemdescription
paramname: font name, used to identify font
path: font file path, support ttf, ttc, otf
size: font size, font height, by default is 16
returnerror code, err::ERR_NONE is ok, other is error
+
+

C++ defination code:

+ +
err::Err load_font(const std::string &name, const char *path, int size = 16)
+
+
+

set_default_font

+

Set default font, if not call this method, default is hershey_plain

+ + + + + + + + + + + + + + + + + +
itemdescription
paramname: font name, supported names can be get by fonts()
returnerror code, err::ERR_NONE is ok, other is error
+
+

C++ defination code:

+ +
err::Err set_default_font(const std::string &name)
+
+
+

fonts

+ +
def fonts() -> list[str]
+
+

Get all loaded fonts

+ + + + + + + + + + + + + +
itemdescription
returnall loaded fonts, string list type
+
+

C++ defination code:

+ +
std::vector<std::string> *fonts()
+
+
+

string_size

+ +
def string_size(string: str, scale: float = 1, thickness: int = 1, font: str = '') -> Size
+
+

Get text rendered width and height

+ + + + + + + + + + + + + + + + + +
itemdescription
paramstring: text content
scale: font scale, by default(value is 1)
thickness: text thickness(line width), by default(value is 1)
returntext rendered width and height, [width, height]
+
+

C++ defination code:

+ +
image::Size string_size(std::string string, float scale = 1, int thickness = 1, const std::string &font = "")
+
+
+

cv2image

+ +
def cv2image(array: numpy.ndarray[numpy.uint8], bgr: bool = True, copy: bool = True) -> Image
+
+

OpenCV Mat(numpy array object) to Image object

+ + + + + + + + + + + + + + + + + +
itemdescription
paramarray: numpy array object, must be a 3-dim or 2-dim continuous array with shape hwc or hw
bgr: if set bgr, the return image will be marked as BGR888 or BGRA8888 format, grayscale will ignore this arg.
copy: if true, will alloc new buffer and copy data, else will directly use array's data buffer, default true.
Use this arg carefully, when set to false, ther array MUST keep alive until we don't use the return img of this func, or will cause program crash.
returnImage object
+
+

C++ defination code:

+ +
image::Image *cv2image(py::array_t<uint8_t, py::array::c_style> array, bool bgr = true, bool copy = true)
+
+
+

image2cv

+ +
def image2cv(img: Image, ensure_bgr: bool = True, copy: bool = True) -> numpy.ndarray[numpy.uint8]
+
+

Image object to OpenCV Mat(numpy array object)

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
paramimg: Image object, maix.image.Image type.
ensure_bgr: auto convert to BGR888 or BGRA8888 if img format is not BGR or BGRA, if set to false, will not auto convert and directly use img's data, default true.
copy: Whether alloc new image and copy data or not, if ensure_bgr and img is not bgr or bgra format, always copy,
if not copy, array object will directly use img's data buffer, will faster but change array will affect img's data, default true.
attentiontake care of ensure_bgr and copy param.
returnnumpy array object
+
+

C++ defination code:

+ +
py::array_t<uint8_t, py::array::c_style> image2cv(image::Image *img, bool ensure_bgr = true, bool copy = true)
+
+
+

Class

+

Size

+

Image size type

+
+

C++ defination code:

+ +
class Size
+
+
+

__init__

+ +
def __init__(self, width: int = 0, height: int = 0) -> None
+
+

Construct a new Size object

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramwidth: image width
height: image height
staticFalse
+
+

C++ defination code:

+ +
Size(int width = 0, int height = 0)
+
+
+

width

+ +
def width(self, width: int = -1) -> int
+
+

width of size

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramwidth: set new width, if not set, only return current width
staticFalse
+
+

C++ defination code:

+ +
int width(int width = -1)
+
+
+

height

+ +
def height(self, height: int = -1) -> int
+
+

height of size

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramheight: set new height, if not set, only return current height
staticFalse
+
+

C++ defination code:

+ +
int height(int height = -1)
+
+
+

__getitem__

+ +
def __getitem__(self, index: int) -> int
+
+

Subscript operator

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramindex: 0 for width, 1 for height
returnint& width or height
staticFalse
+
+

C++ defination code:

+ +
int &operator[](int index)
+
+
+

__str__

+ +
def __str__(self) -> str
+
+

to string

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
std::string __str__()
+
+
+

Line

+

Line class

+
+

C++ defination code:

+ +
class Line
+
+
+

__init__

+ +
def __init__(self, x1: int, y1: int, x2: int, y2: int, magnitude: int = 0, theta: int = 0, rho: int = 0) -> None
+
+

Line constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx1: coordinate x1 of the straight line
y1: coordinate y1 of the straight line
x2: coordinate x2 of the straight line
y2: coordinate y2 of the straight line
magnitude: magnitude of the straight line after Hough transformation
theta: angle of the straight line after Hough transformation
rho: p-value of the straight line after Hough transformation
staticFalse
+
+

C++ defination code:

+ +
Line(int x1, int y1, int x2, int y2, int magnitude = 0, int theta = 0, int rho = 0)
+
+
+

__getitem__

+ +
def __getitem__(self, index: int) -> int
+
+

Subscript operator

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramindex: [0] get x1 of line
[1] get y1 of line
[2] get x2 of line
[3] get y2 of line
[4] get length of line
[5] get magnitude of the straight line after Hough transformation
[6] get angle of the straight line after Hough transformation (0-179 degrees)
[7] get p-value of the straight line after Hough transformation
returnint&
staticFalse
+
+

C++ defination code:

+ +
int &__getitem__(int index)
+
+
+

x1

+ +
def x1(self) -> int
+
+

get x1 of line

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn x1 of the line, type is int
staticFalse
+
+

C++ defination code:

+ +
int x1()
+
+
+

y1

+ +
def y1(self) -> int
+
+

get y1 of line

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn y1 of the line, type is int
staticFalse
+
+

C++ defination code:

+ +
int y1()
+
+
+

x2

+ +
def x2(self) -> int
+
+

get x2 of line

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn x2 of the line, type is int
staticFalse
+
+

C++ defination code:

+ +
int x2()
+
+
+

y2

+ +
def y2(self) -> int
+
+

get y2 of line

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn y2 of the line, type is int
staticFalse
+
+

C++ defination code:

+ +
int y2()
+
+
+

length

+ +
def length(self) -> int
+
+

get length of line

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn length of the line, type is int
staticFalse
+
+

C++ defination code:

+ +
int length()
+
+
+

magnitude

+ +
def magnitude(self) -> int
+
+

get magnitude of the straight line after Hough transformation

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn magnitude, type is int
staticFalse
+
+

C++ defination code:

+ +
int magnitude()
+
+
+

theta

+ +
def theta(self) -> int
+
+

get angle of the straight line after Hough transformation (0-179 degrees)

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn angle, type is int
staticFalse
+
+

C++ defination code:

+ +
int theta()
+
+
+

rho

+ +
def rho(self) -> int
+
+

get p-value of the straight line after Hough transformation

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn p-value, type is int
staticFalse
+
+

C++ defination code:

+ +
int rho()
+
+
+

Rect

+

Rect class

+
+

C++ defination code:

+ +
class Rect
+
+
+

__init__

+ +
def __init__(self, corners: list[list[int]], x: int, y: int, w: int, h: int, magnitude: int = 0) -> None
+
+

Rect constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramcorners: corners of rect
x: coordinate x of the straight line
y: coordinate y of the straight line
w: coordinate w of the straight line
h: coordinate h of the straight line
magnitude: magnitude of the straight line after Hough transformation
staticFalse
+
+

C++ defination code:

+ +
Rect(std::vector<std::vector<int>> &corners, int x, int y, int w, int h, int magnitude = 0)
+
+
+

__getitem__

+ +
def __getitem__(self, index: int) -> int
+
+

Subscript operator

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramindex: [0] get x of rect
[1] get y of rect
[2] get w of rect
[3] get h of rect
[4] get magnitude of the straight line after Hough transformation
returnint&
staticFalse
+
+

C++ defination code:

+ +
int &__getitem__(int index)
+
+
+

corners

+ +
def corners(self) -> list[list[int]]
+
+

get corners of rect

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn the coordinate of the rect.
staticFalse
+
+

C++ defination code:

+ +
std::vector<std::vector<int>> corners()
+
+
+

rect

+ +
def rect(self) -> list[int]
+
+

get rectangle of rect

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn the rectangle of the rect. format is {x, y, w, h}, type is std::vector
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> rect()
+
+
+

x

+ +
def x(self) -> int
+
+

get x of rect

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn x of the rect, type is int
staticFalse
+
+

C++ defination code:

+ +
int x()
+
+
+

y

+ +
def y(self) -> int
+
+

get y of rect

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn y of the rect, type is int
staticFalse
+
+

C++ defination code:

+ +
int y()
+
+
+

w

+ +
def w(self) -> int
+
+

get w of rect

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn w of the rect, type is int
staticFalse
+
+

C++ defination code:

+ +
int w()
+
+
+

h

+ +
def h(self) -> int
+
+

get h of rect

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn h of the rect, type is int
staticFalse
+
+

C++ defination code:

+ +
int h()
+
+
+

magnitude

+ +
def magnitude(self) -> int
+
+

get magnitude of the straight line after Hough transformation

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn magnitude, type is int
staticFalse
+
+

C++ defination code:

+ +
int magnitude()
+
+
+

Circle

+

circle class

+
+

C++ defination code:

+ +
class Circle
+
+
+

__init__

+ +
def __init__(self, x: int, y: int, r: int, magnitude: int) -> None
+
+

Circle constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx: coordinate x of the circle
y: coordinate y of the circle
r: coordinate r of the circle
magnitude: coordinate y2 of the straight line
staticFalse
+
+

C++ defination code:

+ +
Circle(int x, int y, int r, int magnitude)
+
+
+

__getitem__

+ +
def __getitem__(self, index: int) -> int
+
+

Subscript operator

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramindex: [0] get x of circle
[1] get y of circle
[2] get r of circle
[3] get magnitude of the circle after Hough transformation
returnint&
staticFalse
+
+

C++ defination code:

+ +
int &__getitem__(int index)
+
+
+

x

+ +
def x(self) -> int
+
+

get x of circle

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn x of the circle, type is int
staticFalse
+
+

C++ defination code:

+ +
int x()
+
+
+

y

+ +
def y(self) -> int
+
+

get y of circle

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn y of the circle, type is int
staticFalse
+
+

C++ defination code:

+ +
int y()
+
+
+

r

+ +
def r(self) -> int
+
+

get r of circle

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn r of the circle, type is int
staticFalse
+
+

C++ defination code:

+ +
int r()
+
+
+

magnitude

+ +
def magnitude(self) -> int
+
+

get magnitude of the circle after Hough transformation

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn magnitude, type is int
staticFalse
+
+

C++ defination code:

+ +
int magnitude()
+
+
+

Blob

+

Blob class

+
+

C++ defination code:

+ +
class Blob
+
+
+

__init__

+ +
def __init__(self, rect: list[int], corners: list[list[int]], mini_corners: list[list[int]], cx: float, cy: float, pixels: int, rotation: float, code: int, count: int, perimeter: int, roundness: float, x_hist_bins: list[int], y_hist_bins: list[int]) -> None
+
+

Blob constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramrect: blob rect, type is std::vector
corners: blob corners, type is std::vector<std::vector>
mini_corners: blob mini_corners, type is std::vector<std::vector>
cx: blob center x, type is float
cy: blob center y, type is float
pixels: blob pixels, type is int
rotation: blob rotation, type is float
code: blob code, type is int
count: blob count, type is int
perimeter: blob perimeter, type is int
roundness: blob roundness, type is float
x_hist_bins: blob x_hist_bins, type is std::vector
y_hist_bins: blob y_hist_bins, type is std::vector
staticFalse
+
+

C++ defination code:

+ +
Blob(std::vector<int> &rect, std::vector<std::vector<int>> &corners, std::vector<std::vector<int>> &mini_corners,float cx, float cy, int pixels, float rotation, int code, int count, int perimeter, float roundness, std::vector<int> &x_hist_bins, std::vector<int> &y_hist_bins)
+
+
+

__getitem__

+ +
def __getitem__(self, index: int) -> int
+
+

Subscript operator

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramindex: [0] Returns the blob’s bounding box x coordinate
[1] Returns the blob’s bounding box y coordinate
[2] Returns the blob’s bounding box w coordinate
[3] Returns the blob’s bounding box h coordinate
[4] Returns the number of pixels that are part of this blob
[5] Returns the centroid x position of the blob
[6] Returns the centroid y position of the blob
returnint& width or height
staticFalse
+
+

C++ defination code:

+ +
int &__getitem__(int index)
+
+
+

corners

+ +
def corners(self) -> list[list[int]]
+
+

get blob corners

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns a list of 4 (x,y) tuples of the 4 corners of the object.
(x0, y0)___________(x1, y1)



___________
(x3, y3) (x2, y2)
note: the order of corners may change
staticFalse
+
+

C++ defination code:

+ +
std::vector<std::vector<int>> corners()
+
+
+

mini_corners

+ +
def mini_corners(self) -> list[list[int]]
+
+

get blob mini corners

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns a list of 4 (x,y) tuples of the 4 corners than bound the min area rectangle of the blob.
(x0, y0)___________(x1, y1)



___________
(x3, y3) (x2, y2)
note: the order of corners may change
staticFalse
+
+

C++ defination code:

+ +
std::vector<std::vector<int>> mini_corners()
+
+
+

rect

+ +
def rect(self) -> list[int]
+
+

get blob rect

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the center coordinates and width and height of the rectangle. format is (x, y, w, h)
w
(x, y) ___________

h

___________
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> rect()
+
+
+

x

+ +
def x(self) -> int
+
+

get blob x of the upper left coordinate

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the x coordinate of the upper left corner of the rectangle.
staticFalse
+
+

C++ defination code:

+ +
int x()
+
+
+

y

+ +
def y(self) -> int
+
+

get blob y of the upper left coordinate

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the y coordinate of the upper left corner of the rectangle.
staticFalse
+
+

C++ defination code:

+ +
int y()
+
+
+

w

+ +
def w(self) -> int
+
+

get blob width

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the blob’s bounding box w coordinate
staticFalse
+
+

C++ defination code:

+ +
int w()
+
+
+

h

+ +
def h(self) -> int
+
+

get blob height

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the blob’s bounding box h coordinate
staticFalse
+
+

C++ defination code:

+ +
int h()
+
+
+

pixels

+ +
def pixels(self) -> int
+
+

get blob pixels

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the number of pixels that are part of this blob.
staticFalse
+
+

C++ defination code:

+ +
int pixels()
+
+
+

cx

+ +
def cx(self) -> int
+
+

get blob center x

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the centroid x position of the blob
staticFalse
+
+

C++ defination code:

+ +
int cx()
+
+
+

cy

+ +
def cy(self) -> int
+
+

get blob center y

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the centroid y position of the blob
staticFalse
+
+

C++ defination code:

+ +
int cy()
+
+
+

cxf

+ +
def cxf(self) -> float
+
+

get blob center x

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the centroid x position of the blob
staticFalse
+
+

C++ defination code:

+ +
float cxf()
+
+
+

cyf

+ +
def cyf(self) -> float
+
+

get blob center y

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the centroid y position of the blob
staticFalse
+
+

C++ defination code:

+ +
float cyf()
+
+
+

rotation

+ +
def rotation(self) -> float
+
+

get blob rotation

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the rotation of the blob in radians (float). If the blob is like a pencil or pen this value will be unique for 0-180 degrees.
staticFalse
+
+

C++ defination code:

+ +
float rotation()
+
+
+

rotation_rad

+ +
def rotation_rad(self) -> float
+
+

get blob rotation_rad

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the rotation of the blob in radians
staticFalse
+
+

C++ defination code:

+ +
float rotation_rad()
+
+
+

rotation_deg

+ +
def rotation_deg(self) -> int
+
+

get blob rotation_deg

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the rotation of the blob in degrees.
staticFalse
+
+

C++ defination code:

+ +
int rotation_deg()
+
+
+

code

+ +
def code(self) -> int
+
+

get blob code

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns a 32-bit binary number with a bit set in it for each color threshold that’s part of this blob
staticFalse
+
+

C++ defination code:

+ +
int code()
+
+
+

count

+ +
def count(self) -> int
+
+

get blob count

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the number of blobs merged into this blob.
staticFalse
+
+

C++ defination code:

+ +
int count()
+
+
+

perimeter

+ +
def perimeter(self) -> int
+
+

get blob merge_cnt

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the number of pixels on this blob’s perimeter.
staticFalse
+
+

C++ defination code:

+ +
int perimeter()
+
+
+

roundness

+ +
def roundness(self) -> float
+
+

get blob roundness

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns a value between 0 and 1 representing how round the object is
staticFalse
+
+

C++ defination code:

+ +
float roundness()
+
+
+

elongation

+ +
def elongation(self) -> float
+
+

get blob elongation

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturnsa value between 0 and 1 representing how long (not round) the object is
staticFalse
+
+

C++ defination code:

+ +
float elongation()
+
+
+

area

+ +
def area(self) -> int
+
+

get blob area

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the area of the bounding box around the blob
staticFalse
+
+

C++ defination code:

+ +
int area()
+
+
+

density

+ +
def density(self) -> float
+
+

get blob density

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the density ratio of the blob
staticFalse
+
+

C++ defination code:

+ +
float density()
+
+
+

extent

+ +
def extent(self) -> float
+
+

Alias for blob.density()

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the density ratio of the blob
staticFalse
+
+

C++ defination code:

+ +
float extent()
+
+
+

compactness

+ +
def compactness(self) -> float
+
+

get blob compactness

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the compactness ratio of the blob
staticFalse
+
+

C++ defination code:

+ +
float compactness()
+
+
+

solidity

+ +
def solidity(self) -> float
+
+

get blob solidity

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the solidity ratio of the blob
staticFalse
+
+

C++ defination code:

+ +
float solidity()
+
+
+

convexity

+ +
def convexity(self) -> float
+
+

get blob convexity

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns a value between 0 and 1 representing how convex the object is
staticFalse
+
+

C++ defination code:

+ +
float convexity()
+
+
+

x_hist_bins

+ +
def x_hist_bins(self) -> list[int]
+
+

get blob x_hist_bins

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the x_hist_bins of the blob
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> x_hist_bins()
+
+
+

y_hist_bins

+ +
def y_hist_bins(self) -> list[int]
+
+

get blob y_hist_bins

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the y_hist_bins of the blob
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> y_hist_bins()
+
+
+

major_axis_line

+ +
def major_axis_line(self) -> list[int]
+
+

get blob major_axis_line

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns a line tuple (x1, y1, x2, y2) of the minor axis of the blob.
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> major_axis_line()
+
+
+

minor_axis_line

+ +
def minor_axis_line(self) -> list[int]
+
+

get blob minor_axis_line

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns a line tuple (x1, y1, x2, y2) of the minor axis of the blob.
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> minor_axis_line()
+
+
+

enclosing_circle

+ +
def enclosing_circle(self) -> list[int]
+
+

get blob enclosing_circle

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns a circle tuple (x, y, r) of the circle that encloses the min area rectangle of a blob.
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> enclosing_circle()
+
+
+

enclosed_ellipse

+ +
def enclosed_ellipse(self) -> list[int]
+
+

get blob enclosed_ellipse

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns an ellipse tuple (x, y, rx, ry, rotation) of the ellipse that fits inside of the min area rectangle of a blob.
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> enclosed_ellipse()
+
+
+

QRCode

+

QRCode class

+
+

C++ defination code:

+ +
class QRCode
+
+
+

__init__

+ +
def __init__(self, rect: list[int], corners: list[list[int]], payload: str, version: int, ecc_level: int, mask: int, data_type: int, eci: int) -> None
+
+

QRCode constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramrect: rect of corners, type is std::vector
corners: corners of QRCode
payload: payload of the QRCode
version: version of the QRCode
ecc_level: ecc_level of the QRCode
mask: mask of the QRCode
data_type: data_type of the QRCode
eci: eci of the QRCode
staticFalse
+
+

C++ defination code:

+ +
QRCode(std::vector<int> &rect, std::vector<std::vector<int>> &corners, std::string &payload, int version, int ecc_level, int mask, int data_type, int eci)
+
+
+

__getitem__

+ +
def __getitem__(self, index: int) -> int
+
+

Subscript operator

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramindex: [0] Returns the qrcode’s bounding box x coordinate
[1] Returns the qrcode’s bounding box y coordinate
[2] Returns the qrcode’s bounding box w coordinate
[3] Returns the qrcode’s bounding box h coordinate
[4] Not support this index, try to use payload() method
[5] Returns the version of qrcode
[6] Returns the error correction level of qrcode
[7] Returns the mask of qrcode
[8] Returns the datatype of qrcode
[9] Returns the eci of qrcode
returnint&
staticFalse
+
+

C++ defination code:

+ +
int &__getitem__(int index)
+
+
+

corners

+ +
def corners(self) -> list[list[int]]
+
+

get coordinate of QRCode

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn the coordinate of the QRCode.
staticFalse
+
+

C++ defination code:

+ +
std::vector<std::vector<int>> corners()
+
+
+

rect

+ +
def rect(self) -> list[int]
+
+

get rectangle of QRCode

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn the rectangle of the QRCode. format is {x, y, w, h}, type is std::vector
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> rect()
+
+
+

x

+ +
def x(self) -> int
+
+

get x of QRCode

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn x of the QRCode, type is int
staticFalse
+
+

C++ defination code:

+ +
int x()
+
+
+

y

+ +
def y(self) -> int
+
+

get y of QRCode

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn y of the QRCode, type is int
staticFalse
+
+

C++ defination code:

+ +
int y()
+
+
+

w

+ +
def w(self) -> int
+
+

get w of QRCode

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn w of the QRCode, type is int
staticFalse
+
+

C++ defination code:

+ +
int w()
+
+
+

h

+ +
def h(self) -> int
+
+

get h of QRCode

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn h of the QRCode, type is int
staticFalse
+
+

C++ defination code:

+ +
int h()
+
+
+

payload

+ +
def payload(self) -> str
+
+

get QRCode payload

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn area of the QRCode
staticFalse
+
+

C++ defination code:

+ +
std::string payload()
+
+
+

version

+ +
def version(self) -> int
+
+

get QRCode version

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn version of the QRCode
staticFalse
+
+

C++ defination code:

+ +
int version()
+
+
+

ecc_level

+ +
def ecc_level(self) -> int
+
+

get QRCode error correction level

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn error correction level of the QRCode
staticFalse
+
+

C++ defination code:

+ +
int ecc_level()
+
+
+

mask

+ +
def mask(self) -> int
+
+

get QRCode mask

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn mask of the QRCode
staticFalse
+
+

C++ defination code:

+ +
int mask()
+
+
+

data_type

+ +
def data_type(self) -> int
+
+

get QRCode dataType

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn mask of the QRCode
staticFalse
+
+

C++ defination code:

+ +
int data_type()
+
+
+

eci

+ +
def eci(self) -> int
+
+

get QRCode eci

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn data of the QRCode
staticFalse
+
+

C++ defination code:

+ +
int eci()
+
+
+

is_numeric

+ +
def is_numeric(self) -> bool
+
+

check QRCode is numeric

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn true if the result type of the QRCode is numeric
staticFalse
+
+

C++ defination code:

+ +
bool is_numeric()
+
+
+

is_alphanumeric

+ +
def is_alphanumeric(self) -> bool
+
+

check QRCode is alphanumeric

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn true if the result type of the QRCode is alphanumeric
staticFalse
+
+

C++ defination code:

+ +
bool is_alphanumeric()
+
+
+

is_binary

+ +
def is_binary(self) -> bool
+
+

check QRCode is binary

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn true if the result type of the QRCode is binary
staticFalse
+
+

C++ defination code:

+ +
bool is_binary()
+
+
+

is_kanji

+ +
def is_kanji(self) -> bool
+
+

check QRCode is kanji

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn true if the result type of the QRCode is kanji
staticFalse
+
+

C++ defination code:

+ +
bool is_kanji()
+
+
+

AprilTag

+

AprilTag class

+
+

C++ defination code:

+ +
class AprilTag
+
+
+

__init__

+ +
def __init__(self, rect: list[int], corners: list[list[int]], id: int, famliy: int, centroid_x: float, centroid_y: float, rotation: float, decision_margin: float, hamming: int, goodness: float, x_translation: float, y_translation: float, z_translation: float, x_rotation: float, y_rotation: float, z_rotation: float) -> None
+
+

AprilTag constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramrect: Inlucdes the top-left corner and the width and height of the rectangle. format is {x, y, w, h}, type is std::vector
corners: Includes the four corners of the rectangle. format is {{x0, y0}, {x1, y1}, {x2, y2}, {x3, y3}}, type is std::vector<std::vector>
id: The id of the AprilTag
famliy: The family of the AprilTag
centroid_x: The x coordinate of the center of the AprilTag
centroid_y: The y coordinate of the center of the AprilTag
rotation: The rotation of the AprilTag
decision_margin: The decision_margin of the AprilTag
hamming: The hamming of the AprilTag
goodness: The goodness of the AprilTag
x_translation: The x_translation of the AprilTag
y_translation: The y_translation of the AprilTag
z_translation: The z_translation of the AprilTag
x_rotation: The x_rotation of the AprilTag
y_rotation: The y_rotation of the AprilTag
z_rotation: The z_rotation of the AprilTag
staticFalse
+
+

C++ defination code:

+ +
AprilTag(std::vector<int> &rect, std::vector<std::vector<int>> &corners, int id, int famliy, float centroid_x, float centroid_y, float rotation, float decision_margin, int hamming, float goodness, float x_translation, float y_translation, float z_translation, float x_rotation, float y_rotation, float z_rotation)
+
+
+

__getitem__

+ +
def __getitem__(self, index: int) -> int
+
+

Subscript operator

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramindex: [0] Returns the apriltag’s bounding box x coordinate
[1] Returns the apriltag’s bounding box y coordinate
[2] Returns the apriltag’s bounding box w coordinate
[3] Returns the apriltag’s bounding box h coordinate
[4] Returns the apriltag’s id
[5] Returns the apriltag’s family
[6] Not support
[7] Not support
[8] Not support
[9] Not support
[10] Returns the apriltag’s hamming
[11] Not support
[12] Not support
[13] Not support
[14] Not support
[15] Not support
[16] Not support
[17] Not support
returnint&
staticFalse
+
+

C++ defination code:

+ +
int &__getitem__(int index)
+
+
+

corners

+ +
def corners(self) -> list[list[int]]
+
+

get coordinate of AprilTag

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn the coordinate of the AprilTag.
staticFalse
+
+

C++ defination code:

+ +
std::vector<std::vector<int>> corners()
+
+
+

rect

+ +
def rect(self) -> list[int]
+
+

get rectangle of AprilTag

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn the rectangle of the AprilTag. format is {x, y, w, h}, type is std::vector
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> rect()
+
+
+

x

+ +
def x(self) -> int
+
+

get x of AprilTag

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn x of the AprilTag, type is int
staticFalse
+
+

C++ defination code:

+ +
int x()
+
+
+

y

+ +
def y(self) -> int
+
+

get y of AprilTag

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn y of the AprilTag, type is int
staticFalse
+
+

C++ defination code:

+ +
int y()
+
+
+

w

+ +
def w(self) -> int
+
+

get w of AprilTag

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn w of the AprilTag, type is int
staticFalse
+
+

C++ defination code:

+ +
int w()
+
+
+

h

+ +
def h(self) -> int
+
+

get h of AprilTag

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn h of the AprilTag, type is int
staticFalse
+
+

C++ defination code:

+ +
int h()
+
+
+

id

+ +
def id(self) -> int
+
+

get id of AprilTag

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn id of the AprilTag, type is int
staticFalse
+
+

C++ defination code:

+ +
int id()
+
+
+

family

+ +
def family(self) -> int
+
+

get family of AprilTag

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn family of the AprilTag, type is int
staticFalse
+
+

C++ defination code:

+ +
int family()
+
+
+

cx

+ +
def cx(self) -> int
+
+

get cx of AprilTag

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn cx of the AprilTag, type is int
staticFalse
+
+

C++ defination code:

+ +
int cx()
+
+
+

cxf

+ +
def cxf(self) -> float
+
+

get cxf of AprilTag

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn cxf of the AprilTag, type is float
staticFalse
+
+

C++ defination code:

+ +
float cxf()
+
+
+

cy

+ +
def cy(self) -> int
+
+

get cy of AprilTag

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn cy of the AprilTag, type is int
staticFalse
+
+

C++ defination code:

+ +
int cy()
+
+
+

cyf

+ +
def cyf(self) -> float
+
+

get cyf of AprilTag

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn cyf of the AprilTag, type is float
staticFalse
+
+

C++ defination code:

+ +
float cyf()
+
+
+

rotation

+ +
def rotation(self) -> float
+
+

get rotation of AprilTag

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn rotation of the AprilTag, type is float
staticFalse
+
+

C++ defination code:

+ +
float rotation()
+
+
+

decision_margin

+ +
def decision_margin(self) -> float
+
+

Get decision_margin of AprilTag

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the quality of the apriltag match (0.0 - 1.0) where 1.0 is the best.
staticFalse
+
+

C++ defination code:

+ +
float decision_margin()
+
+
+

hamming

+ +
def hamming(self) -> int
+
+

get hamming of AprilTag

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the number of accepted bit errors for this tag.
return 0, means 0 bit errors will be accepted.
1 is TAG25H7, means up to 1 bit error may be accepted
2 is TAG25H9, means up to 3 bit errors may be accepted
3 is TAG36H10, means up to 3 bit errors may be accepted
4 is TAG36H11, means up to 4 bit errors may be accepted
5 is ARTOOLKIT, means 0 bit errors will be accepted
staticFalse
+
+

C++ defination code:

+ +
int hamming()
+
+
+

goodness

+ +
def goodness(self) -> float
+
+

get goodness of AprilTag

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn goodness of the AprilTag, type is float
Note: This value is always 0.0 for now.
staticFalse
+
+

C++ defination code:

+ +
float goodness()
+
+
+

x_translation

+ +
def x_translation(self) -> float
+
+

get x_translation of AprilTag

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn x_translation of the AprilTag, type is float
staticFalse
+
+

C++ defination code:

+ +
float x_translation()
+
+
+

y_translation

+ +
def y_translation(self) -> float
+
+

get y_translation of AprilTag

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn y_translation of the AprilTag, type is float
staticFalse
+
+

C++ defination code:

+ +
float y_translation()
+
+
+

z_translation

+ +
def z_translation(self) -> float
+
+

get z_translation of AprilTag

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn z_translation of the AprilTag, type is float
staticFalse
+
+

C++ defination code:

+ +
float z_translation()
+
+
+

x_rotation

+ +
def x_rotation(self) -> float
+
+

get x_rotation of AprilTag

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn x_rotation of the AprilTag, type is float
staticFalse
+
+

C++ defination code:

+ +
float x_rotation()
+
+
+

y_rotation

+ +
def y_rotation(self) -> float
+
+

get y_rotation of AprilTag

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn y_rotation of the AprilTag, type is float
staticFalse
+
+

C++ defination code:

+ +
float y_rotation()
+
+
+

z_rotation

+ +
def z_rotation(self) -> float
+
+

get z_rotation of AprilTag

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn z_rotation of the AprilTag, type is float
staticFalse
+
+

C++ defination code:

+ +
float z_rotation()
+
+
+

DataMatrix

+

DataMatrix class

+
+

C++ defination code:

+ +
class DataMatrix
+
+
+

__init__

+ +
def __init__(self, rect: list[int], corners: list[list[int]], payload: str, rotation: float, rows: int, columns: int, capacity: int, padding: int) -> None
+
+

DataMatrix constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramrect: Inlucdes the top-left corner and the width and height of the rectangle. format is {x, y, w, h}, type is std::vector
corners: Includes the four corners of the rectangle. format is {{x0, y0}, {x1, y1}, {x2, y2}, {x3, y3}}, type is std::vector<std::vector>
payload: The payload of the DataMatrix
rotation: The rotation of the DataMatrix
rows: The rows of the DataMatrix
columns: The columns of the DataMatrix
capacity: The capacity of the DataMatrix
padding: The padding of the DataMatrix
staticFalse
+
+

C++ defination code:

+ +
DataMatrix(std::vector<int> &rect, std::vector<std::vector<int>> &corners, std::string &payload, float rotation, int rows, int columns, int capacity, int padding)
+
+
+

__getitem__

+ +
def __getitem__(self, index: int) -> int
+
+

Subscript operator

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramindex: [0] get x of DataMatrix
[1] get y of DataMatrix
[2] get w of DataMatrix
[3] get h of DataMatrix
[4] Not support this index, try to use payload() method
[5] Not support this index, try to use rotation() method
[6] get rows of DataMatrix
[7] get columns of DataMatrix
[8] get capacity of DataMatrix
[9] get padding of DataMatrix
returnint&
staticFalse
+
+

C++ defination code:

+ +
int &__getitem__(int index)
+
+
+

corners

+ +
def corners(self) -> list[list[int]]
+
+

get coordinate of DataMatrix

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn the coordinate of the DataMatrix.
staticFalse
+
+

C++ defination code:

+ +
std::vector<std::vector<int>> corners()
+
+
+

rect

+ +
def rect(self) -> list[int]
+
+

get rectangle of DataMatrix

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn the rectangle of the DataMatrix. format is {x, y, w, h}, type is std::vector
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> rect()
+
+
+

x

+ +
def x(self) -> int
+
+

get x of DataMatrix

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn x of the DataMatrix, type is int
staticFalse
+
+

C++ defination code:

+ +
int x()
+
+
+

y

+ +
def y(self) -> int
+
+

get y of DataMatrix

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn y of the DataMatrix, type is int
staticFalse
+
+

C++ defination code:

+ +
int y()
+
+
+

w

+ +
def w(self) -> int
+
+

get w of DataMatrix

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn w of the DataMatrix, type is int
staticFalse
+
+

C++ defination code:

+ +
int w()
+
+
+

h

+ +
def h(self) -> int
+
+

get h of DataMatrix

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn h of the DataMatrix, type is int
staticFalse
+
+

C++ defination code:

+ +
int h()
+
+
+

payload

+ +
def payload(self) -> str
+
+

get payload of DataMatrix

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn payload of the DataMatrix, type is std::string
staticFalse
+
+

C++ defination code:

+ +
std::string payload()
+
+
+

rotation

+ +
def rotation(self) -> float
+
+

get rotation of DataMatrix

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn rotation of the DataMatrix, type is float
staticFalse
+
+

C++ defination code:

+ +
float rotation()
+
+
+

rows

+ +
def rows(self) -> int
+
+

get rows of DataMatrix

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn rows of the DataMatrix, type is int
staticFalse
+
+

C++ defination code:

+ +
int rows()
+
+
+

columns

+ +
def columns(self) -> int
+
+

get columns of DataMatrix

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn columns of the DataMatrix, type is int
staticFalse
+
+

C++ defination code:

+ +
int columns()
+
+
+

capacity

+ +
def capacity(self) -> int
+
+

get capacity of DataMatrix

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturns how many characters could fit in this data matrix, type is int
staticFalse
+
+

C++ defination code:

+ +
int capacity()
+
+
+

padding

+ +
def padding(self) -> int
+
+

get padding of DataMatrix

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturns how many unused characters are in this data matrix, type is int
staticFalse
+
+

C++ defination code:

+ +
int padding()
+
+
+

BarCode

+

BarCode class

+
+

C++ defination code:

+ +
class BarCode
+
+
+

__init__

+ +
def __init__(self, rect: list[int], corners: list[list[int]], payload: str, type: int, rotation: float, quality: int) -> None
+
+

BarCode constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramrect: Inlucdes the top-left corner and the width and height of the rectangle. format is {x, y, w, h}, type is std::vector
corners: Includes the four corners of the rectangle. format is {{x0, y0}, {x1, y1}, {x2, y2}, {x3, y3}}, type is std::vector<std::vector>
payload: The payload of the BarCode
type: The type of the BarCode
rotation: The rotation of the BarCode
quality: The quality of the BarCode
staticFalse
+
+

C++ defination code:

+ +
BarCode(std::vector<int> &rect, std::vector<std::vector<int>> &corners, std::string &payload, int type, float rotation, int quality)
+
+
+

__getitem__

+ +
def __getitem__(self, index: int) -> int
+
+

Subscript operator

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramindex: [0] get x of BarCode
[1] get y of BarCode
[2] get w of BarCode
[3] get h of BarCode
[4] Not support this index, try to use payload() method
[5] get type of BarCode
[6] Not support this index, try to use rotation() method
[7] get quality of BarCode
returnint&
staticFalse
+
+

C++ defination code:

+ +
int &__getitem__(int index)
+
+
+

corners

+ +
def corners(self) -> list[list[int]]
+
+

get coordinate of BarCode

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn the coordinate of the BarCode.
staticFalse
+
+

C++ defination code:

+ +
std::vector<std::vector<int>> corners()
+
+
+

rect

+ +
def rect(self) -> list[int]
+
+

get rectangle of BarCode

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn the rectangle of the BarCode. format is {x, y, w, h}, type is std::vector
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> rect()
+
+
+

x

+ +
def x(self) -> int
+
+

get x of BarCode

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn x of the BarCode, type is int
staticFalse
+
+

C++ defination code:

+ +
int x()
+
+
+

y

+ +
def y(self) -> int
+
+

get y of BarCode

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn y of the BarCode, type is int
staticFalse
+
+

C++ defination code:

+ +
int y()
+
+
+

w

+ +
def w(self) -> int
+
+

get w of BarCode

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn w of the BarCode, type is int
staticFalse
+
+

C++ defination code:

+ +
int w()
+
+
+

h

+ +
def h(self) -> int
+
+

get h of BarCode

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn h of the BarCode, type is int
staticFalse
+
+

C++ defination code:

+ +
int h()
+
+
+

payload

+ +
def payload(self) -> str
+
+

get payload of BarCode

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn payload of the BarCode, type is std::string
staticFalse
+
+

C++ defination code:

+ +
std::string payload()
+
+
+

type

+ +
def type(self) -> int
+
+

get type of BarCode

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn type of the BarCode, type is int
staticFalse
+
+

C++ defination code:

+ +
int type()
+
+
+

rotation

+ +
def rotation(self) -> float
+
+

get rotation of BarCode

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn rotation of the BarCode, type is float. FIXME: always return 0.0
staticFalse
+
+

C++ defination code:

+ +
float rotation()
+
+
+

quality

+ +
def quality(self) -> int
+
+

get quality of BarCode

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn quality of the BarCode, type is int
staticFalse
+
+

C++ defination code:

+ +
int quality()
+
+
+

Statistics

+

Statistics class

+
+

C++ defination code:

+ +
class Statistics
+
+
+

__init__

+ +
def __init__(self, format: Format, l_statistics: list[int], a_statistics: list[int], b_statistics: list[int]) -> None
+
+

Statistics constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramformat: The statistics source image format
l_statistics: The statistics of the L channel. format is {mean, median, mode, std_dev, min, max, lq, uq}, type is std::vector
a_statistics: The statistics of the A channel. format is {mean, median, mode, std_dev, min, max, lq, uq}, type is std::vector
b_statistics: The statistics of the B channel. format is {mean, median, mode, std_dev, min, max, lq, uq}, type is std::vector
staticFalse
+
+

C++ defination code:

+ +
Statistics(image::Format format, std::vector<int> &l_statistics, std::vector<int> &a_statistics, std::vector<int> &b_statistics)
+
+
+

__getitem__

+ +
def __getitem__(self, index: int) -> int
+
+

Subscript operator

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramindex: array index
returnint&
staticFalse
+
+

C++ defination code:

+ +
int &__getitem__(int index)
+
+
+

format

+ +
def format(self) -> Format
+
+

get format of Statistics source image

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn format of the Statistics source image, type is image::Format
staticFalse
+
+

C++ defination code:

+ +
image::Format format()
+
+
+

l_mean

+ +
def l_mean(self) -> int
+
+

get L channel mean

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn L channel mean, type is int
staticFalse
+
+

C++ defination code:

+ +
int l_mean()
+
+
+

l_median

+ +
def l_median(self) -> int
+
+

get L channel median

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn L channel median, type is int
staticFalse
+
+

C++ defination code:

+ +
int l_median()
+
+
+

l_mode

+ +
def l_mode(self) -> int
+
+

get L channel mode

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn L channel mode, type is int
staticFalse
+
+

C++ defination code:

+ +
int l_mode()
+
+
+

l_std_dev

+ +
def l_std_dev(self) -> int
+
+

get L channel std_dev

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn L channel std_dev, type is int
staticFalse
+
+

C++ defination code:

+ +
int l_std_dev()
+
+
+

l_min

+ +
def l_min(self) -> int
+
+

get L channel min

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn L channel min, type is int
staticFalse
+
+

C++ defination code:

+ +
int l_min()
+
+
+

l_max

+ +
def l_max(self) -> int
+
+

get L channel max

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn L channel max, type is int
staticFalse
+
+

C++ defination code:

+ +
int l_max()
+
+
+

l_lq

+ +
def l_lq(self) -> int
+
+

get L channel lq

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn L channel lq, type is int
staticFalse
+
+

C++ defination code:

+ +
int l_lq()
+
+
+

l_uq

+ +
def l_uq(self) -> int
+
+

get L channel uq

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn L channel uq, type is int
staticFalse
+
+

C++ defination code:

+ +
int l_uq()
+
+
+

a_mean

+ +
def a_mean(self) -> int
+
+

get A channel mean

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn A channel mean, type is int
staticFalse
+
+

C++ defination code:

+ +
int a_mean()
+
+
+

a_median

+ +
def a_median(self) -> int
+
+

get A channea median

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn A channel median, type is int
staticFalse
+
+

C++ defination code:

+ +
int a_median()
+
+
+

a_mode

+ +
def a_mode(self) -> int
+
+

get A channel mode

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn A channel mode, type is int
staticFalse
+
+

C++ defination code:

+ +
int a_mode()
+
+
+

a_std_dev

+ +
def a_std_dev(self) -> int
+
+

get A channel std_dev

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn A channel std_dev, type is int
staticFalse
+
+

C++ defination code:

+ +
int a_std_dev()
+
+
+

a_min

+ +
def a_min(self) -> int
+
+

get A channel min

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn A channel min, type is int
staticFalse
+
+

C++ defination code:

+ +
int a_min()
+
+
+

a_max

+ +
def a_max(self) -> int
+
+

get A channel max

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn A channel max, type is int
staticFalse
+
+

C++ defination code:

+ +
int a_max()
+
+
+

a_lq

+ +
def a_lq(self) -> int
+
+

get A channel lq

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn A channel lq, type is int
staticFalse
+
+

C++ defination code:

+ +
int a_lq()
+
+
+

a_uq

+ +
def a_uq(self) -> int
+
+

get A channel uq

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn A channel uq, type is int
staticFalse
+
+

C++ defination code:

+ +
int a_uq()
+
+
+

b_mean

+ +
def b_mean(self) -> int
+
+

get B channel mean

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn B channel mean, type is int
staticFalse
+
+

C++ defination code:

+ +
int b_mean()
+
+
+

b_median

+ +
def b_median(self) -> int
+
+

get B channea median

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn B channel median, type is int
staticFalse
+
+

C++ defination code:

+ +
int b_median()
+
+
+

b_mode

+ +
def b_mode(self) -> int
+
+

get B channel mode

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn B channel mode, type is int
staticFalse
+
+

C++ defination code:

+ +
int b_mode()
+
+
+

b_std_dev

+ +
def b_std_dev(self) -> int
+
+

get B channel std_dev

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn B channel std_dev, type is int
staticFalse
+
+

C++ defination code:

+ +
int b_std_dev()
+
+
+

b_min

+ +
def b_min(self) -> int
+
+

get B channel min

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn B channel min, type is int
staticFalse
+
+

C++ defination code:

+ +
int b_min()
+
+
+

b_max

+ +
def b_max(self) -> int
+
+

get B channel max

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn B channel max, type is int
staticFalse
+
+

C++ defination code:

+ +
int b_max()
+
+
+

b_lq

+ +
def b_lq(self) -> int
+
+

get B channel lq

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn B channel lq, type is int
staticFalse
+
+

C++ defination code:

+ +
int b_lq()
+
+
+

b_uq

+ +
def b_uq(self) -> int
+
+

get B channel uq

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn B channel uq, type is int
staticFalse
+
+

C++ defination code:

+ +
int b_uq()
+
+
+

Displacement

+

Displacement class

+
+

C++ defination code:

+ +
class Displacement
+
+
+

__init__

+ +
def __init__(self, x_translation: float, y_translation: float, rotation: float, scale: float, response: float) -> None
+
+

Displacement constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx_translation: The x_translation of the Displacement
y_translation: The y_translation of the Displacement
rotation: The rotation of the Displacement
scale: The scale of the Displacement
response: The response of the Displacement
staticFalse
+
+

C++ defination code:

+ +
Displacement(float x_translation, float y_translation, float rotation, float scale, float response)
+
+
+

__getitem__

+ +
def __getitem__(self, index: int) -> int
+
+

Subscript operator

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramindex: array index
returnint&
staticFalse
+
+

C++ defination code:

+ +
int &__getitem__(int index)
+
+
+

x_translation

+ +
def x_translation(self) -> float
+
+

get x_translation of Displacement

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn x_translation of the Displacement, type is float
staticFalse
+
+

C++ defination code:

+ +
float x_translation()
+
+
+

y_translation

+ +
def y_translation(self) -> float
+
+

get y_translation of Displacement

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn y_translation of the Displacement, type is float
staticFalse
+
+

C++ defination code:

+ +
float y_translation()
+
+
+

rotation

+ +
def rotation(self) -> float
+
+

get rotation of Displacement

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn rotation of the Displacement, type is float
staticFalse
+
+

C++ defination code:

+ +
float rotation()
+
+
+

scale

+ +
def scale(self) -> float
+
+

get scale of Displacement

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn scale of the Displacement, type is float
staticFalse
+
+

C++ defination code:

+ +
float scale()
+
+
+

response

+ +
def response(self) -> float
+
+

get response of Displacement

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn response of the Displacement, type is float
staticFalse
+
+

C++ defination code:

+ +
float response()
+
+
+

Percentile

+

Percentile class

+
+

C++ defination code:

+ +
class Percentile
+
+
+

__init__

+ +
def __init__(self, l_value: int, a_value: int = 0, b_value: int = 0) -> None
+
+

Percentile constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paraml_value: for grayscale image, it is grayscale percentile value (between 0 and 255).
for rgb888 image, it is l channel percentile value of lab (between 0 and 100).
a_value: for rgb888 image, it is a channel percentile value of lab format(between -128 and 127).
b_value: for rgb888 image, it is b channel percentile value of lab format(between -128 and 127).
staticFalse
+
+

C++ defination code:

+ +
Percentile(int l_value, int a_value = 0, int b_value = 0)
+
+
+

__getitem__

+ +
def __getitem__(self, index: int) -> int
+
+

Subscript operator

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
int &__getitem__(int index)
+
+
+

value

+ +
def value(self) -> int
+
+

Return the grayscale percentile value (between 0 and 255).

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturns grayscale percentile value
staticFalse
+
+

C++ defination code:

+ +
int value()
+
+
+

l_value

+ +
def l_value(self) -> int
+
+

Return the l channel percentile value of lab format (between 0 and 100).

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturns l channel percentile value
staticFalse
+
+

C++ defination code:

+ +
int l_value()
+
+
+

a_value

+ +
def a_value(self) -> int
+
+

Return the a channel percentile value of lab format (between -128 and 127).

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturns a channel percentile value
staticFalse
+
+

C++ defination code:

+ +
int a_value()
+
+
+

b_value

+ +
def b_value(self) -> int
+
+

Return the b channel percentile value of lab format (between -128 and 127).

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturns b channel percentile value
staticFalse
+
+

C++ defination code:

+ +
int b_value()
+
+
+

Threshold

+

Threshold class

+
+

C++ defination code:

+ +
class Threshold
+
+
+

__init__

+ +
def __init__(self, l_value: int, a_value: int = 0, b_value: int = 0) -> None
+
+

Threshold constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paraml_value: for grayscale image, it is grayscale threshold value (between 0 and 255).
for rgb888 image, it is l channel threshold value of lab (between 0 and 100).
a_value: for rgb888 image, it is a channel threshold value of lab format(between -128 and 127).
b_value: for rgb888 image, it is b channel threshold value of lab format(between -128 and 127).
staticFalse
+
+

C++ defination code:

+ +
Threshold(int l_value, int a_value = 0, int b_value = 0)
+
+
+

__getitem__

+ +
def __getitem__(self, index: int) -> int
+
+

Subscript operator

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
int &__getitem__(int index)
+
+
+

value

+ +
def value(self) -> int
+
+

Return the grayscale threshold value (between 0 and 255).

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturns grayscale threshold value
staticFalse
+
+

C++ defination code:

+ +
int value()
+
+
+

l_value

+ +
def l_value(self) -> int
+
+

Return the l channel threshold value of lab format (between 0 and 100).

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturns l channel percentile value
staticFalse
+
+

C++ defination code:

+ +
int l_value()
+
+
+

a_value

+ +
def a_value(self) -> int
+
+

Return the a channel threshold value of lab format (between -128 and 127).

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturns a channel percentile value
staticFalse
+
+

C++ defination code:

+ +
int a_value()
+
+
+

b_value

+ +
def b_value(self) -> int
+
+

Return the b channel threshold value of lab format (between -128 and 127).

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturns b channel percentile value
staticFalse
+
+

C++ defination code:

+ +
int b_value()
+
+
+

Histogram

+

Histogram class

+
+

C++ defination code:

+ +
class Histogram
+
+
+

__init__

+ +
def __init__(self, l_bin: list[float], a_bin: list[float], b_bin: list[float], format: Format = ...) -> None
+
+

Histogram constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paraml_value: for grayscale image, it is grayscale threshold value list (the range of element values in the list is 0 and 255).
for rgb888 image, it is l channel threshold value list of lab (the range of element values in the list is 0 and 100).
a_value: for rgb888 image, it is a channel threshold value list of lab format(the range of element values in the list is -128 and 127).
b_value: for rgb888 image, it is b channel threshold value list of lab format(the range of element values in the list is -128 and 127).
format: format of the source image
staticFalse
+
+

C++ defination code:

+ +
Histogram(std::vector<float> l_bin, std::vector<float> a_bin, std::vector<float> b_bin, image::Format format = image::Format::FMT_RGB888)
+
+
+

__getitem__

+ +
def __getitem__(self, index: int) -> int
+
+

Subscript operator

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
int &__getitem__(int index)
+
+
+

bins

+ +
def bins(self) -> list[float]
+
+

Returns a list of floats for the grayscale histogram.

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
std::vector<float> bins()
+
+
+

l_bins

+ +
def l_bins(self) -> list[float]
+
+

Returns a list of floats for the RGB565 histogram LAB L channel.

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
std::vector<float> l_bins()
+
+
+

a_bins

+ +
def a_bins(self) -> list[float]
+
+

Returns a list of floats for the RGB565 histogram LAB A channel.

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
std::vector<float> a_bins()
+
+
+

b_bins

+ +
def b_bins(self) -> list[float]
+
+

Returns a list of floats for the RGB565 histogram LAB B channel.

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
std::vector<float> b_bins()
+
+
+

get_percentile

+ +
def get_percentile(self, percentile: float) -> Percentile
+
+

Computes the CDF of the histogram channels and returns a image::Percentile object

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parampercentile: the values of the histogram at the passed in percentile (0.0 - 1.0) (float).
So, if you pass in 0.1 this method will tell you (going from left-to-right in the histogram)
what bin when summed into an accumulator caused the accumulator to cross 0.1. This is useful
to determine min (with 0.1) and max (with 0.9) of a color distribution without outlier effects
ruining your results for adaptive color tracking.
returnimage::Percentile object
staticFalse
+
+

C++ defination code:

+ +
image::Percentile get_percentile(float percentile)
+
+
+

get_threshold

+ +
def get_threshold(self) -> Threshold
+
+

Uses Otsu’s Method to compute the optimal threshold values that split the histogram into two halves for each channel of the histogram and returns a image::Threshold object.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnimage::Threshold object
staticFalse
+
+

C++ defination code:

+ +
image::Threshold get_threshold()
+
+
+

get_statistics

+ +
def get_statistics(self) -> Statistics
+
+

Computes the mean, median, mode, standard deviation, min, max, lower quartile, and upper quartile of each color channel in the histogram and returns a image::Statistics object.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnimage::Statistics object
staticFalse
+
+

C++ defination code:

+ +
image::Statistics get_statistics()
+
+
+

LBPKeyPoint

+

LBPKeyPoint class

+
+

C++ defination code:

+ +
class LBPKeyPoint
+
+
+

__init__

+ +
def __init__(self, data: list[int]) -> None
+
+

LBPKeyPoint constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdata: The data of the LBPKeyPoint
staticFalse
+
+

C++ defination code:

+ +
LBPKeyPoint(std::valarray<uint8_t> &data)
+
+
+

KeyPoint

+

KeyPoint class

+
+

C++ defination code:

+ +
class KeyPoint
+
+
+

__init__

+ +
def __init__(self, x: int, y: int, score: int, octave: int, angle: int, matched: int, desc: list[int]) -> None
+
+

KeyPoint constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx: The x of the KeyPoint
y: The y of the KeyPoint
score: The score of the KeyPoint
octave: The octave of the KeyPoint
angle: The angle of the KeyPoint
matched: The matched of the KeyPoint
desc: The desc of the KeyPoint
staticFalse
+
+

C++ defination code:

+ +
KeyPoint(uint16_t x, uint16_t y, uint16_t score, uint16_t octave, uint16_t angle, uint16_t matched, std::vector<uint8_t> &desc)
+
+
+

KPTMatch

+

KPTMatch class

+
+

C++ defination code:

+ +
class KPTMatch
+
+
+

__init__

+ +
def __init__(self, cx: int, cy: int, x: int, y: int, w: int, h: int, score: int, theta: int, match: int) -> None
+
+

KPTMatch constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramcx: The cx of the KPTMatch
cy: The cy of the KPTMatch
x: The x of the KPTMatch
y: The y of the KPTMatch
w: The w of the KPTMatch
h: The h of the KPTMatch
score: The score of the KPTMatch
theta: The theta of the KPTMatch
match: The match of the KPTMatch
staticFalse
+
+

C++ defination code:

+ +
KPTMatch(int cx, int cy, int x, int y, int w, int h, int score, int theta, int match)
+
+
+

ORBKeyPoint

+

ORBKeyPoint class

+
+

C++ defination code:

+ +
class ORBKeyPoint
+
+
+

__init__

+ +
def __init__(self, data: list[KeyPoint], threshold: int, normalized: bool) -> None
+
+

ORBKeyPoint constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdata: The data of the ORBKeyPoint
threshold: The threshold of the ORBKeyPoint
normalized: The normalized of the ORBKeyPoint
staticFalse
+
+

C++ defination code:

+ +
ORBKeyPoint(std::vector<image::KeyPoint> &data, int threshold, bool normalized)
+
+
+

get_data

+ +
def get_data(self) -> list[KeyPoint]
+
+

get data of ORBKeyPoint

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnreturn data of the ORBKeyPoint, type is std::vector
staticFalse
+
+

C++ defination code:

+ +
std::vector<image::KeyPoint> get_data()
+
+
+

HaarCascade

+

HaarCascade class

+
+

C++ defination code:

+ +
class HaarCascade
+
+
+

__init__

+ +
def __init__(self) -> None
+
+

HaarCascade constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdata: The data of the HaarCascade
threshold: The threshold of the HaarCascade
normalized: The normalized of the HaarCascade
staticFalse
+
+

C++ defination code:

+ +
HaarCascade()
+
+
+

Color

+

Color class

+
+

C++ defination code:

+ +
class Color
+
+
+

__init__

+ +
def __init__(self, ch1: int, ch2: int = 0, ch3: int = 0, alpha: float = 0, format: Format = ...) -> None
+
+

Color constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramalpha: alpha channel, value range: 0 ~ 1
staticFalse
+
+

C++ defination code:

+ +
Color(uint8_t ch1, uint8_t ch2 = 0, uint8_t ch3 = 0, float alpha = 0, image::Format format = image::FMT_GRAYSCALE)
+
+
+

r

+

Color red channel

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
uint8_t r
+
+
+

g

+

Color green channel

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
uint8_t g
+
+
+

b

+

Color blue channel

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
uint8_t b
+
+
+

alpha

+

Color alpha channel, value from 0.0 to 1.0, float value

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
float alpha
+
+
+

gray

+

Color gray channel

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
uint8_t gray
+
+
+

format

+

Color format

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
image::Format format
+
+
+

hex

+ +
def hex(self) -> int
+
+

Get color's hex value

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
uint32_t hex()
+
+
+

from_rgb

+ +
def from_rgb(r: int, g: int, b: int) -> Color
+
+

Create Color object from RGB channels

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticTrue
+
+

C++ defination code:

+ +
static image::Color from_rgb(uint8_t r, uint8_t g, uint8_t b)
+
+
+

from_bgr

+ +
def from_bgr(b: int, g: int, r: int) -> Color
+
+

Create Color object from BGR channels

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticTrue
+
+

C++ defination code:

+ +
static image::Color from_bgr(uint8_t b, uint8_t g, uint8_t r)
+
+
+

from_gray

+ +
def from_gray(gray: int) -> Color
+
+

Create Color object from gray channel

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticTrue
+
+

C++ defination code:

+ +
static image::Color from_gray(uint8_t gray)
+
+
+

from_rgba

+ +
def from_rgba(r: int, g: int, b: int, alpha: float) -> Color
+
+

Create Color object from RGBA channels

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramalpha: alpha channel, float value, value range: 0 ~ 1
staticTrue
+
+

C++ defination code:

+ +
static image::Color from_rgba(uint8_t r, uint8_t g, uint8_t b, float alpha)
+
+
+

from_bgra

+ +
def from_bgra(b: int, g: int, r: int, alpha: float) -> Color
+
+

Create Color object from BGRA channels

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramalpha: alpha channel, float value, value range: 0 ~ 1
staticTrue
+
+

C++ defination code:

+ +
static image::Color from_bgra(uint8_t b, uint8_t g, uint8_t r, float alpha)
+
+
+

from_hex

+ +
def from_hex(hex: int, format: Format) -> Color
+
+

Create Color object from hex value

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramhex: hex value, e.g. 0x0000FF00, lower address if first channel
format: color format, @see image::Format
staticTrue
+
+

C++ defination code:

+ +
static image::Color from_hex(uint32_t hex, image::Format &format)
+
+
+

to_format

+ +
def to_format(self, format: Format) -> None
+
+

Convert Color format

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramformat: format want to convert to, @see image::Format, only support RGB888, BGR888, RGBA8888, BGRA8888, GRAYSCALE.
staticFalse
+
+

C++ defination code:

+ +
void to_format(const image::Format &format)
+
+
+

to_format2

+ +
def to_format2(self, format: Format) -> Color
+
+

Convert color format and return a new Color object

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramformat: format want to convert to, @see image::Format, only support RGB888, BGR888, RGBA8888, BGRA8888, GRAYSCALE.
returnnew Color object, you need to delete it manually in C++.
staticFalse
+
+

C++ defination code:

+ +
image::Color *to_format2(const image::Format &format)
+
+
+

Image

+

Image class

+
+

C++ defination code:

+ +
class Image
+
+
+

__init__

+ +
def __init__(self, width: int, height: int, format: Format = ...) -> None
+
+

Image constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramwidth: image width, should > 0
height: image height, should > 0
format: image format @see image::Format
staticFalse
+
+

C++ defination code:

+ +
Image(int width, int height, image::Format format = image::Format::FMT_RGB888)
+
+
+

format

+ +
def format(self) -> Format
+
+

Get image's format

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
seeimage.Format
staticFalse
+
+

C++ defination code:

+ +
image::Format format()
+
+
+

size

+ +
def size(self) -> Size
+
+

Get image's size, [width, height]

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
image::Size size()
+
+
+

data_size

+ +
def data_size(self) -> int
+
+

Get image's data size

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
int data_size()
+
+
+

width

+ +
def width(self) -> int
+
+

Get image's width

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
int width()
+
+
+

height

+ +
def height(self) -> int
+
+

Get image's height

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
int height()
+
+
+

data

+ +
def data(self) -> capsule
+
+

Get image's data pointer.\nIn MaixPy is capsule object.

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
void *data()
+
+
+

__str__

+ +
def __str__(self) -> str
+
+

To string method

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
std::string __str__()
+
+
+

to_str

+ +
def to_str(self) -> str
+
+

To string method

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
std::string to_str()
+
+
+

get_pixel

+ +
def get_pixel(self, x: int, y: int, rgbtuple: bool = False) -> list[int]
+
+

Get pixel of image

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx: pixel's coordinate x. x must less than image's width
y: pixel's coordinate y. y must less than image's height
rgbtuple: switch return value method. rgbtuple decides whether to split the return or not. default is false.
returnpixel value,
According to image format and rgbtuple, return different value:
format is FMT_RGB888, rgbtuple is true, return [R, G, B]; rgbtuple is false, return [RGB]
foramt is FMT_BGR888, rgbtuple is true, return [B, G, R]; rgbtuple is false, return [BGR]
format is FMT_GRAYSCALE, return [GRAY];
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> get_pixel(int x, int y, bool rgbtuple = false)
+
+
+

set_pixel

+ +
def set_pixel(self, x: int, y: int, pixel: list[int]) -> maix.err.Err
+
+

Set pixel of image

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx: pixel's coordinate x. x must less than image's width
y: pixel's coordinate y. y must less than image's height
pixel: pixel value, according to image format and size of pixel, has different operation:
format is FMT_RGB888, pixel size must be 1 or 3, if size is 1, will split pixel[0] to [R, G, B]; if size is 3, will use pixel directly
format is FMT_BGR888, pixel size must be 1 or 3, if size is 1, will split pixel[0] to [B, G, R]; if size is 3, will use pixel directly
format is FMT_GRAYSCALE, pixel size must be 1, will use pixel directly
returnerror code, Err::ERR_NONE is ok, other is error
staticFalse
+
+

C++ defination code:

+ +
err::Err set_pixel(int x, int y, std::vector<int> pixel)
+
+
+

to_tensor

+ +
def to_tensor(self, chw: bool = False, copy: bool = True) -> maix.tensor.Tensor
+
+

Convert Image object to tensor::Tensor object

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramchw: if true, the shape of tensor is [C, H, W], else [H, W, C]
copy: if true, will alloc memory for tensor data, else will use the memory of Image object
returntensor::Tensor object pointer, an allocated tensor object
staticFalse
+
+

C++ defination code:

+ +
tensor::Tensor *to_tensor(bool chw = false, bool copy = true)
+
+
+

to_bytes

+ +
def to_bytes(*args, **kwargs)
+
+

Get image's data and convert to array bytes

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramcopy: if true, will alloc memory and copy data to new buffer,
else will use the memory of Image object, delete bytes object will not affect Image object,
but delete Image object will make bytes object invalid, it may cause program crash !!!!
So use this param carefully.
returnimage's data bytes, need be delete by caller in C++.
staticFalse
+
+

C++ defination code:

+ +
Bytes *to_bytes(bool copy = true)
+
+
+

to_format

+ +
def to_format(self, format: Format) -> Image
+
+

Convert image to specific format

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramformat: format want to convert to, @see image::Format, only support RGB888, BGR888, RGBA8888, BGRA8888, GRAYSCALE, JPEG.
returnnew image object. Need be delete by caller in C++.
throwerr.Exception, if two images' format not support, or already the format, will raise exception
staticFalse
+
+

C++ defination code:

+ +
image::Image *to_format(const image::Format &format)
+
+
+

to_jpeg

+ +
def to_jpeg(self, quality: int = 95) -> Image
+
+

Convert image to jpeg

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramquality: the quality of jpg, default is 95. For MaixCAM supported range is (50, 100], if <= 50 will be fixed to 51.
returnnew image object. Need be delete by caller in C++.
throwerr.Exception, if two images' format not support, or already the format, will raise exception
staticFalse
+
+

C++ defination code:

+ +
image::Image *to_jpeg(int quality = 95)
+
+
+

draw_image

+ +
def draw_image(self, x: int, y: int, img: Image) -> Image
+
+

Draw image on this image

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx: left top corner of image point's coordinate x
y: left top corner of image point's coordinate y
img: image object to draw, the caller's channel must <= the args' channel,
e.g. caller is RGB888, args is RGBA8888, will throw exception, but caller is RGBA8888, args is RGB888 or RGBA8888 is ok
returnthis image object self
staticFalse
+
+

C++ defination code:

+ +
image::Image *draw_image(int x, int y, image::Image &img)
+
+
+

draw_rect

+ +
def draw_rect(self, x: int, y: int, w: int, h: int, color: Color, thickness: int = 1) -> Image
+
+

Fill rectangle color to image

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx: left top corner of rectangle point's coordinate x
y: left top corner of rectangle point's coordinate y
w: rectangle width
h: rectangle height
color: rectangle color
thickness: rectangle thickness(line width), by default(value is 1), -1 means fill rectangle
returnthis image object self
staticFalse
+
+

C++ defination code:

+ +
image::Image *draw_rect(int x, int y, int w, int h, const image::Color &color, int thickness = 1)
+
+
+

draw_line

+ +
def draw_line(self, x1: int, y1: int, x2: int, y2: int, color: Color, thickness: int = 1) -> Image
+
+

Draw line on image

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx1: start point's coordinate x
y1: start point's coordinate y
x2: end point's coordinate x
y2: end point's coordinate y
color: line color @see image::Color
thickness: line thickness(line width), by default(value is 1)
returnthis image object self
staticFalse
+
+

C++ defination code:

+ +
image::Image *draw_line(int x1, int y1, int x2, int y2, const image::Color &color, int thickness = 1)
+
+
+

draw_circle

+ +
def draw_circle(self, x: int, y: int, radius: int, color: Color, thickness: int = 1) -> Image
+
+

Draw circle on image

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx: circle center point's coordinate x
y: circle center point's coordinate y
radius: circle radius
color: circle color @see image::Color
thickness: circle thickness(line width), default -1 means fill circle
returnthis image object self
staticFalse
+
+

C++ defination code:

+ +
image::Image *draw_circle(int x, int y, int radius, const image::Color &color, int thickness = 1)
+
+
+

draw_ellipse

+ +
def draw_ellipse(self, x: int, y: int, a: int, b: int, angle: float, start_angle: float, end_angle: float, color: Color, thickness: int = 1) -> Image
+
+

Draw ellipse on image

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx: ellipse center point's coordinate x
y: ellipse center point's coordinate y
a: ellipse major axis length
b: ellipse minor axis length
angle: ellipse rotation angle
start_angle: ellipse start angle
end_angle: ellipse end angle
color: ellipse color @see image::Color
thickness: ellipse thickness(line width), by default(value is 1), -1 means fill ellipse
returnthis image object self
staticFalse
+
+

C++ defination code:

+ +
image::Image *draw_ellipse(int x, int y, int a, int b, float angle, float start_angle, float end_angle, const image::Color &color, int thickness = 1)
+
+
+

draw_string

+ +
def draw_string(self, x: int, y: int, textstring: str, color: Color = ..., scale: float = 1, thickness: int = -1, wrap: bool = True, wrap_space: int = 4, font: str = '') -> Image
+
+

Draw text on image

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx: text left top point's coordinate x
y: text left top point's coordinate y
string: text content
color: text color @see image::Color, default is white
scale: font scale, by default(value is 1)
thickness: text thickness(line width), if negative, the glyph is filled, by default(value is -1)
wrap: if true, will auto wrap text to next line if text width > image width, by default(value is true)
returnthis image object self
staticFalse
+
+

C++ defination code:

+ +
image::Image *draw_string(int x, int y, const std::string &textstring, const image::Color &color = image::COLOR_WHITE, float scale = 1, int thickness = -1,
+                                bool wrap = true, int wrap_space = 4, const std::string &font = "")
+
+
+

draw_cross

+ +
def draw_cross(self, x: int, y: int, color: Color, size: int = 5, thickness: int = 1) -> Image
+
+

Draw cross on image

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx: cross center point's coordinate x
y: cross center point's coordinate y
color: cross color @see image::Color
size: how long the lines of the cross extend, by default(value is 5). So the line length is 2 * size + thickness
thickness: cross thickness(line width), by default(value is 1)
staticFalse
+
+

C++ defination code:

+ +
image::Image *draw_cross(int x, int y, const image::Color &color, int size = 5, int thickness = 1)
+
+
+

draw_arrow

+ +
def draw_arrow(self, x0: int, y0: int, x1: int, y1: int, color: Color, thickness: int = 1) -> Image
+
+

Draw arrow on image

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx0: start coordinate of the arrow x0
y0: start coordinate of the arrow y0
x1: end coordinate of the arrow x1
y1: end coordinate of the arrow y1
color: cross color @see image::Color
thickness: cross thickness(line width), by default(value is 1)
returnthis image object self
staticFalse
+
+

C++ defination code:

+ +
image::Image *draw_arrow(int x0, int y0, int x1, int y1, const image::Color &color, int thickness = 1)
+
+
+

draw_edges

+ +
def draw_edges(self, corners: list[list[int]], color: Color, size: int = 0, thickness: int = 1, fill: bool = False) -> Image
+
+

Draw edges on image

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramcorners: edges, [[x0, y0], [x1, y1], [x2, y2], [x3, y3]]
color: edges color @see image::Color
size: the circle of radius size. TODO: support in the feature
thickness: edges thickness(line width), by default(value is 1)
fill: if true, will fill edges, by default(value is false)
returnthis image object self
staticFalse
+
+

C++ defination code:

+ +
image::Image *draw_edges(std::vector<std::vector<int>> corners, const image::Color &color, int size = 0, int thickness = 1, bool fill = false)
+
+
+

draw_keypoints

+ +
def draw_keypoints(self, keypoints: list[int], color: Color, size: int = 4, thickness: int = -1, line_thickness: int = 0) -> Image
+
+

Draw keypoints on image

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramkeypoints: keypoints, [x1, y1, x2, y2...] or [x, y, rotation_andle_in_degrees, x2, y2, rotation_andle_in_degrees2](TODO: rotation_andle_in_degrees support in the feature)
color: keypoints color @see image::Color
size: size of keypoints(radius)
thickness: keypoints thickness(line width), by default(value is -1 means fill circle)
line_thickness: line thickness, default 0 means not draw lines, > 0 will draw lines connect points.
returnthis image object self
staticFalse
+
+

C++ defination code:

+ +
image::Image *draw_keypoints(const std::vector<int> &keypoints, const image::Color &color, int size = 4, int thickness = -1, int line_thickness = 0)
+
+
+

resize

+ +
def resize(self, width: int, height: int, object_fit: Fit = ..., method: ResizeMethod = ...) -> Image
+
+

Resize image, will create a new resized image object

+ + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramwidth: new width, if value is -1, will use height to calculate aspect ratio
height: new height, if value is -1, will use width to calculate aspect ratio
object_fit: fill, contain, cover, by default is fill
method: resize method, by default is bilinear
returnAlways return a new resized image object even size not change, So in C++ you should take care of the return value to avoid memory leak.
And it's better to judge whether the size has changed before calling this function to make the program more efficient.
e.g.
if img->width() != width
img->height() != height:
img = img->resize(width, height);
staticFalse
+
+

C++ defination code:

+ +
image::Image *resize(int width, int height, image::Fit object_fit = image::Fit::FIT_FILL, image::ResizeMethod method = image::ResizeMethod::NEAREST)
+
+
+

affine

+ +
def affine(self, src_points: list[int], dst_points: list[int], width: int = -1, height: int = -1, method: ResizeMethod = ...) -> Image
+
+

Affine transform image, will create a new transformed image object

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramsrc_points: three source points, [x1, y1, x2, y2, x3, y3]
dst_points: three destination points, [x1, y1, x2, y2, x3, y3]
width: new width, if value is -1, will use height to calculate aspect ratio
height: new height, if value is -1, will use width to calculate aspect ratio
method: resize method, by default is bilinear
returnnew transformed image object
staticFalse
+
+

C++ defination code:

+ +
image::Image *affine(std::vector<int> src_points, std::vector<int> dst_points, int width = -1, int height = -1, image::ResizeMethod method = image::ResizeMethod::BILINEAR)
+
+
+

copy

+ +
def copy(self) -> Image
+
+

Copy image, will create a new copied image object

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnnew copied image object
staticFalse
+
+

C++ defination code:

+ +
image::Image *copy()
+
+
+

crop

+ +
def crop(self, x: int, y: int, w: int, h: int) -> Image
+
+

Crop image, will create a new cropped image object

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx: left top corner of crop rectangle point's coordinate x
y: left top corner of crop rectangle point's coordinate y
w: crop rectangle width
h: crop rectangle height
returnnew cropped image object
staticFalse
+
+

C++ defination code:

+ +
image::Image *crop(int x, int y, int w, int h)
+
+
+

rotate

+ +
def rotate(self, angle: float, width: int = -1, height: int = -1, method: ResizeMethod = ...) -> Image
+
+

Rotate image, will create a new rotated image object

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramangle: anti-clock wise rotate angle, if angle is 90 or 270, and width or height is -1, will swap width and height, or will throw exception
width: new width, if value is -1, will use height to calculate aspect ratio
height: new height, if value is -1, will use width to calculate aspect ratio
method: resize method, by default is bilinear
returnnew rotated image object
staticFalse
+
+

C++ defination code:

+ +
image::Image *rotate(float angle, int width = -1, int height = -1, image::ResizeMethod method = image::ResizeMethod::BILINEAR)
+
+
+

mean_pool

+ +
def mean_pool(self, x_div: int, y_div: int, copy: bool = False) -> Image
+
+

Finds the mean of x_div * y_div squares in the image and returns the modified image composed of the mean of each square.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx_div: The width of the squares.
y_div: The height of the squares.
copy: Select whether to return a new image or modify the original image. default is false.
If true, returns a new image composed of the mean of each square; If false, returns the modified image composed of the mean of each square.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *mean_pool(int x_div, int y_div, bool copy = false)
+
+
+

midpoint_pool

+ +
def midpoint_pool(self, x_div: int, y_div: int, bias: float = 0.5, copy: bool = False) -> Image
+
+

Finds the midpoint of x_div * y_div squares in the image and returns the modified image composed of the mean of each square.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx_div: The width of the squares.
y_div: The height of the squares.
bias: The bias of the midpoint. default is 0.5.
midpoint value is equal to (max * bias + min * (1 - bias))
copy: Select whether to return a new image or modify the original image. default is false.
If true, returns a new image composed of the midpoint of each square; If false, returns the modified image composed of the midpoint of each square.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *midpoint_pool(int x_div, int y_div, double bias = 0.5, bool copy = false)
+
+
+

compress

+ +
def compress(self, quality: int = 95) -> Image
+
+

JPEG compresses the image in place, the same as to_jpeg functioin, it's recommend to use to_jpeg instead.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramquality: The quality of the compressed image. default is 95.
returnReturns the compressed JPEG image
staticFalse
+
+

C++ defination code:

+ +
image::Image *compress(int quality = 95)
+
+
+

clear

+ +
def clear(self, mask: Image = None) -> Image
+
+

Sets all pixels in the image to zero

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *clear(image::Image *mask = nullptr)
+
+
+

mask_rectange

+ +
def mask_rectange(self, x: int = -1, y: int = -1, w: int = -1, h: int = -1) -> Image
+
+

Zeros a rectangular part of the image. If no arguments are supplied this method zeros the center of the image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx: The x coordinate of the top left corner of the rectangle.
y: The y coordinate of the top left corner of the rectangle.
w: The width of the rectangle.
h: The height of the rectangle.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *mask_rectange(int x = -1, int y = -1, int w = -1, int h = -1)
+
+
+

mask_circle

+ +
def mask_circle(self, x: int = -1, y: int = -1, radius: int = -1) -> Image
+
+

Zeros a circular part of the image. If no arguments are supplied this method zeros the center of the image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx: The x coordinate of the center of the circle.
y: The y coordinate of the center of the circle.
radius: The radius of the circle.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *mask_circle(int x = -1, int y = -1, int radius = -1)
+
+
+

mask_ellipse

+ +
def mask_ellipse(self, x: int = -1, y: int = -1, radius_x: int = -1, radius_y: int = -1, rotation_angle_in_degrees: float = 0) -> Image
+
+

Zeros a ellipse part of the image. If no arguments are supplied this method zeros the center of the image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx: The x coordinate of the center of the ellipse.
y: The y coordinate of the center of the ellipse.
radius_x: The radius of the ellipse in the x direction.
radius_y: The radius of the ellipse in the y direction.
rotation_angle_in_degrees: The rotation angle of the ellipse in degrees.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *mask_ellipse(int x = -1, int y = -1, int radius_x = -1, int radius_y = -1, float rotation_angle_in_degrees = 0)
+
+
+

binary

+ +
def binary(self, thresholds: list[list[int]] = [], invert: bool = False, zero: bool = False, mask: Image = None, to_bitmap: bool = False, copy: bool = False) -> Image
+
+

Sets all pixels in the image to black or white depending on if the pixel is inside of a threshold in the threshold list thresholds or not.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
noteFor GRAYSCALE format, Lmin and Lmax range is [0, 255]. For RGB888 format, Lmin and Lmax range is [0, 100].
paramthresholds: You can define multiple thresholds.
For GRAYSCALE format, you can use {{Lmin, Lmax}, ...} to define one or more thresholds.
For RGB888 format, you can use {{Lmin, Lmax, Amin, Amax, Bmin, Bmax}, ...} to define one or more thresholds.
Where the upper case L,A,B represent the L,A,B channels of the LAB image format, and min, max represent the minimum and maximum values of the corresponding channels.
invert: If true, the thresholds will be inverted before the operation. default is false.
zero: If zero is true, the image will be set the pixels within the threshold to 0, other pixels remain unchanged. If zero is false, the image will be set to black or white. default is false.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
to_bitmap: If true, the image will be converted to a bitmap image before thresholding. default is false. TODO: support in the feature
copy: Select whether to return a new image or modify the original image. default is false.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *binary(std::vector<std::vector<int>> thresholds = std::vector<std::vector<int>>(), bool invert = false, bool zero = false, image::Image *mask = nullptr, bool to_bitmap = false, bool copy = false)
+
+
+

invert

+ +
def invert(self) -> Image
+
+

Inverts the image in place.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the image after the operation is completed
staticFalse
+
+

C++ defination code:

+ +
image::Image *invert()
+
+
+

b_and

+ +
def b_and(self, other: Image, mask: Image = None) -> Image
+
+

Performs a bitwise and operation between the image and the other image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramother: The other image should be an image and should be the same size as the image being operated on. TODO: support path?
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *b_and(image::Image *other, image::Image *mask = nullptr)
+
+
+

b_nand

+ +
def b_nand(self, other: Image, mask: Image = None) -> Image
+
+

Performs a bitwise nand operation between the image and the other image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramother: The other image should be an image and should be the same size as the image being operated on. TODO: support path?
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *b_nand(image::Image *other, image::Image *mask = nullptr)
+
+
+

b_or

+ +
def b_or(self, other: Image, mask: Image = None) -> Image
+
+

Performs a bitwise or operation between the image and the other image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramother: The other image should be an image and should be the same size as the image being operated on. TODO: support path?
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *b_or(image::Image *other, image::Image *mask = nullptr)
+
+
+

b_nor

+ +
def b_nor(self, other: Image, mask: Image = None) -> Image
+
+

Performs a bitwise nor operation between the image and the other image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramother: The other image should be an image and should be the same size as the image being operated on. TODO: support path?
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *b_nor(image::Image *other, image::Image *mask = nullptr)
+
+
+

b_xor

+ +
def b_xor(self, other: Image, mask: Image = None) -> Image
+
+

Performs a bitwise xor operation between the image and the other image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramother: The other image should be an image and should be the same size as the image being operated on. TODO: support path?
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *b_xor(image::Image *other, image::Image *mask = nullptr)
+
+
+

b_xnor

+ +
def b_xnor(self, other: Image, mask: Image = None) -> Image
+
+

Performs a bitwise xnor operation between the image and the other image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramother: The other image should be an image and should be the same size as the image being operated on. TODO: support path?
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *b_xnor(image::Image *other, image::Image *mask = nullptr)
+
+
+

awb

+ +
def awb(self, max: bool = False) -> Image
+
+

Performs an auto white balance operation on the image. TODO: support in the feature

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammax: if True uses the white-patch algorithm instead. default is false.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *awb(bool max = false)
+
+
+

ccm

+ +
def ccm(self, matrix: list[float]) -> Image
+
+

Multiples the passed (3x3) or (4x3) floating-point color-correction-matrix with the image.\nnote: Grayscale format is not support.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammatrix: The color correction matrix to use. 3x3 or 4x3 matrix.
Weights may either be positive or negative, and the sum of each column in the 3x3 matrix should generally be 1.
example:
{
1, 0, 0,
0, 1, 0,
0, 0, 1,
}
Where the last row of the 4x3 matrix is an offset per color channel. If you add an offset you may wish to make the
weights sum to less than 1 to account for the offset.
example:
{
1, 0, 0,
0, 1, 0,
0, 0, 1,
0, 0, 0,
}
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *ccm(std::vector<float> &matrix)
+
+
+

gamma

+ +
def gamma(self, gamma: float = 1.0, contrast: float = 1.0, brightness: float = 0.0) -> Image
+
+

Quickly changes the image gamma, contrast, and brightness. Create a array whose size is usually 255,\nand use the parameters gamma, contrast, and brightness to calculate the value of the array, and then map the\nimage pixel value through the value of the array.\nThe calculation method for array is: array[array_idx] = (powf((array_idx / 255.0), (1 / gamma)) * contrast + brightness) * scale,\npowf is a function used to calculate floating point power.\narray is the array used for mapping.\narray_idx is the index of the array, the maximum value is determined according to the image format, usually 255.\nscale is a constant, the value is determined by the image format, usually 255.\nMapping method:\nAssume that a pixel value in the image is 128, then map the pixel value to the value of array[128]\nUsers can adjust the value of the array through the gamma, contrast, and brightness parameters.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramgamma: The contrast gamma greater than 1.0 makes the image darker in a non-linear manner while less than 1.0 makes the image brighter. default is 1.0.
contrast: The contrast value greater than 1.0 makes the image brighter in a linear manner while less than 1.0 makes the image darker. default is 1.0.
brightness: The brightness value greater than 0.0 makes the image brighter in a constant manner while less than 0.0 makes the image darker. default is 0.0.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *gamma(double gamma = 1.0, double contrast = 1.0, double brightness = 0.0)
+
+
+

gamma_corr

+ +
def gamma_corr(self, gamma: float, contrast: float = 1.0, brightness: float = 0.0) -> Image
+
+

Alias for Image.gamma.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramgamma: The contrast gamma greater than 1.0 makes the image darker in a non-linear manner while less than 1.0 makes the image brighter. default is 1.0.
contrast: The contrast value greater than 1.0 makes the image brighter in a linear manner while less than 1.0 makes the image darker. default is 1.0.
brightness: The brightness value greater than 0.0 makes the image brighter in a constant manner while less than 0.0 makes the image darker. default is 0.0.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *gamma_corr(double gamma, double contrast = 1.0, double brightness = 0.0)
+
+
+

negate

+ +
def negate(self) -> Image
+
+

Flips (numerically inverts) all pixels values in an image

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *negate()
+
+
+

replace

+ +
def replace(self, other: Image = None, hmirror: bool = False, vflip: bool = False, transpose: bool = False, mask: Image = None) -> Image
+
+

Replaces all pixels in the image with the corresponding pixels in the other image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramother: The other image should be an image and should be the same size as the image being operated on.
hmirror: If true, the image will be horizontally mirrored before the operation. default is false.
vflip: If true, the image will be vertically flipped before the operation. default is false.
transpose: If true, the image can be used to rotate 90 degrees or 270 degrees.
hmirror = false, vflip = false, transpose = false, the image will not be rotated.
hmirror = false, vflip = true, transpose = true, the image will be rotated 90 degrees.
hmirror = true, vflip = true, transpose = false, the image will be rotated 180 degrees.
hmirror = true, vflip = false, transpose = true, the image will be rotated 270 degrees.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *replace(image::Image *other = nullptr, bool hmirror = false, bool vflip = false, bool transpose = false, image::Image *mask = nullptr)
+
+
+

set

+ +
def set(self, other: Image, hmirror: bool = False, vflip: bool = False, transpose: bool = False, mask: Image = None) -> Image
+
+

Alias for Image::replace.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramother: The other image should be an image and should be the same size as the image being operated on.
hmirror: If true, the image will be horizontally mirrored before the operation. default is false.
vflip: If true, the image will be vertically flipped before the operation. default is false.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *set(image::Image *other, bool hmirror = false, bool vflip = false, bool transpose = false, image::Image *mask = nullptr)
+
+
+

add

+ +
def add(self, other: Image, mask: Image = None) -> Image
+
+

Adds the other image to the image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramother: The other image should be an image and should be the same size as the image being operated on. TODO: support path?
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *add(image::Image *other, image::Image *mask = nullptr)
+
+
+

sub

+ +
def sub(self, other: Image, reverse: bool = False, mask: Image = None) -> Image
+
+

Subtracts the other image from the image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramother: The other image should be an image and should be the same size as the image being operated on. TODO: support path?
reverse: If true, the image will be reversed before the operation. default is false.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *sub(image::Image *other, bool reverse = false, image::Image *mask = nullptr)
+
+
+

mul

+ +
def mul(self, other: Image, invert: bool = False, mask: Image = None) -> Image
+
+

Multiplies the image by the other image.\nNote: This method is meant for image blending and cannot multiply the pixels in the image by a scalar like 2.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramother: The other image should be an image and should be the same size as the image being operated on. TODO: support path?
invert: If true, the image will be change the multiplication operation from ab to 1/((1/a)(1/b)).
In particular, this lightens the image instead of darkening it (e.g. multiply versus burn operations). default is false.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *mul(image::Image *other, bool invert = false, image::Image *mask = nullptr)
+
+
+

div

+ +
def div(self, other: Image, invert: bool = False, mod: bool = False, mask: Image = None) -> Image
+
+

Divides the image by the other image.\nThis method is meant for image blending and cannot divide the pixels in the image by a scalar like 2.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramother: The other image should be an image and should be the same size as the image being operated on. TODO: support path?
invert: If true, the image will be change the division direction from a/b to b/a. default is false.
mod: If true, the image will be change the division operation to the modulus operation. default is false.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *div(image::Image *other, bool invert = false, bool mod = false, image::Image *mask = nullptr)
+
+
+

min

+ +
def min(self, other: Image, mask: Image = None) -> Image
+
+

Caculate the minimum of each pixel in the image and the other image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramother: The other image should be an image and should be the same size as the image being operated on.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *min(image::Image *other, image::Image *mask = nullptr)
+
+
+

max

+ +
def max(self, other: Image, mask: Image = None) -> Image
+
+

Caculate the maximum of each pixel in the image and the other image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramother: The other image should be an image and should be the same size as the image being operated on.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *max(image::Image *other, image::Image *mask = nullptr)
+
+
+

difference

+ +
def difference(self, other: Image, mask: Image = None) -> Image
+
+

Caculate the absolute value of the difference between each pixel in the image and the other image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramother: The other image should be an image and should be the same size as the image being operated on.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *difference(image::Image *other, image::Image *mask = nullptr)
+
+
+

blend

+ +
def blend(self, other: Image, alpha: int = 128, mask: Image = None) -> Image
+
+

Blends the image with the other image.\nres = alpha * this_img / 256 + (256 - alpha) * other_img / 256

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramother: The other image should be an image and should be the same size as the image being operated on.
alpha: The alpha value of the blend, the value range is [0, 256],default is 128.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *blend(image::Image *other, int alpha = 128, image::Image *mask = nullptr)
+
+
+

histeq

+ +
def histeq(self, adaptive: bool = False, clip_limit: int = -1, mask: Image = None) -> Image
+
+

Runs the histogram equalization algorithm on the image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramadaptive: If true, an adaptive histogram equalization method will be run on the image instead which as generally better results than non-adaptive histogram qualization but a longer run time. default is false.
clip_limit: Provides a way to limit the contrast of the adaptive histogram qualization. Use a small value for this, like 10, to produce good histogram equalized contrast limited images. default is -1.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *histeq(bool adaptive = false, int clip_limit = -1, image::Image *mask = nullptr)
+
+
+

mean

+ +
def mean(self, size: int, threshold: bool = False, offset: int = 0, invert: bool = False, mask: Image = None) -> Image
+
+

Standard mean blurring filter using a box filter.\nThe parameters offset and invert are valid when threshold is True.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramsize: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
threshold: If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel’s brightness in relation to the brightness of the kernel of pixels around them.
default is false.
offset: The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.
invert: If true, the image will be inverted before the operation. default is false.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *mean(int size, bool threshold = false, int offset = 0, bool invert = false, image::Image *mask = nullptr)
+
+
+

median

+ +
def median(self, size: int, percentile: float = 0.5, threshold: bool = False, offset: int = 0, invert: bool = False, mask: Image = None) -> Image
+
+

Runs the median filter on the image. The median filter is the best filter for smoothing surfaces while preserving edges but it is very slow.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramsize: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
percentile: This parameter controls the percentile of the value used in the kernel. You can set this to 0 for a min filter, 0.25 for a lower quartile filter, 0.75 for an upper quartile filter, and 1.0 for a max filter. default is 0.5.
threshold: If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel’s brightness in relation to the brightness of the kernel of pixels around them.
default is false.
offset: The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.
invert: If true, the image will be inverted before the operation. default is false.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *median(int size, double percentile = 0.5, bool threshold = false, int offset = 0, bool invert = false, image::Image *mask = nullptr)
+
+
+

mode

+ +
def mode(self, size: int, threshold: bool = False, offset: int = 0, invert: bool = False, mask: Image = None) -> Image
+
+

Runs the mode filter on the image by replacing each pixel with the mode of their neighbors.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramsize: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
threshold: If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel’s brightness in relation to the brightness of the kernel of pixels around them.
default is false.
offset: The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.
invert: If true, the image will be inverted before the operation. default is false.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *mode(int size, bool threshold = false, int offset = 0, bool invert = false, image::Image *mask = nullptr)
+
+
+

midpoint

+ +
def midpoint(self, size: int, bias: float = 0.5, threshold: bool = False, offset: int = 0, invert: bool = False, mask: Image = None) -> Image
+
+

Runs the midpoint filter on the image.This filter finds the midpoint (max * bias + min * (1 - bias)) of each pixel neighborhood in the image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramsize: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
bias: The bias of the midpoint. default is 0.5.
threshold: If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel’s brightness in relation to the brightness of the kernel of pixels around them.
default is false.
offset: The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.
invert: If true, the image will be inverted before the operation. default is false.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *midpoint(int size, double bias = 0.5, bool threshold = false, int offset = 0, bool invert = false, image::Image *mask = nullptr)
+
+
+

morph

+ +
def morph(self, size: int, kernel: list[int], mul: float = -1, add: float = 0.0, threshold: bool = False, offset: int = 0, invert: bool = False, mask: Image = None) -> Image
+
+

Convolves the image by a filter kernel. This allows you to do general purpose convolutions on an image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramsize: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
kernel: The kernel used for convolution. The kernel should be a list of lists of numbers. The kernel should be the same size as the actual kernel size.
mul: This parameter is used to multiply the convolved pixel results. default is auto.
add: This parameter is the value to be added to each convolution pixel result. default is 0.0.
threshold: If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel’s brightness in relation to the brightness of the kernel of pixels around them.
default is false.
offset: The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.
invert: If true, the image will be inverted before the operation. default is false.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *morph(int size, std::vector<int> kernel, float mul = -1, float add = 0.0, bool threshold = false, int offset = 0, bool invert = false, image::Image *mask = nullptr)
+
+
+

gaussian

+ +
def gaussian(self, size: int, unsharp: bool = False, mul: float = -1, add: float = 0.0, threshold: bool = False, offset: int = 0, invert: bool = False, mask: Image = None) -> Image
+
+

Convolves the image by a smoothing guassian kernel.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramsize: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
unsharp: If true, this method will perform an unsharp mask operation instead of gaussian filtering operation, this improves the clarity of image edges. default is false.
mul: This parameter is used to multiply the convolved pixel results. default is auto.
add: This parameter is the value to be added to each convolution pixel result. default is 0.0.
threshold: If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel’s brightness in relation to the brightness of the kernel of pixels around them.
default is false.
offset: The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.
invert: If true, the image will be inverted before the operation. default is false.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *gaussian(int size, bool unsharp = false, float mul = -1, float add = 0.0, bool threshold = false, int offset = 0, bool invert = false, image::Image *mask = nullptr)
+
+
+

laplacian

+ +
def laplacian(self, size: int, sharpen: bool = False, mul: float = -1, add: float = 0.0, threshold: bool = False, offset: int = 0, invert: bool = False, mask: Image = None) -> Image
+
+

Convolves the image by a edge detecting laplacian kernel.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramsize: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
sharpen: If True, this method will sharpen the image instead of an unthresholded edge detection image. Then increase the kernel size to improve image clarity. default is false.
mul: This parameter is used to multiply the convolved pixel results. default is auto.
add: This parameter is the value to be added to each convolution pixel result. default is 0.0.
threshold: If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel’s brightness in relation to the brightness of the kernel of pixels around them.
default is false.
offset: The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.
invert: If true, the image will be inverted before the operation. default is false.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *laplacian(int size, bool sharpen = false, float mul = -1, float add = 0.0, bool threshold = false, int offset = 0, bool invert = false, image::Image *mask = nullptr)
+
+
+

bilateral

+ +
def bilateral(self, size: int, color_sigma: float = 0.1, space_sigma: float = 1, threshold: bool = False, offset: int = 0, invert: bool = False, mask: Image = None) -> Image
+
+

Convolves the image by a bilateral filter.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramsize: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
color_sigma: Controls how closely colors are matched using the bilateral filter. default is 0.1.
space_sigma: Controls how closely pixels space-wise are blurred with each other. default is 1.
threshold: If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel’s brightness in relation to the brightness of the kernel of pixels around them.
default is false.
offset: The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.
invert: If true, the image will be inverted before the operation. default is false.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *bilateral(int size, double color_sigma = 0.1, double space_sigma = 1, bool threshold = false, int offset = 0, bool invert = false, image::Image *mask = nullptr)
+
+
+

linpolar

+ +
def linpolar(self, reverse: bool = False) -> Image
+
+

Re-project’s and image from cartessian coordinates to linear polar coordinates.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramreverse: If true, the image will be reverse polar transformed. default is false.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *linpolar(bool reverse = false)
+
+
+

logpolar

+ +
def logpolar(self, reverse: bool = False) -> Image
+
+

Re-project’s and image from cartessian coordinates to log polar coordinates.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramreverse: If true, the image will be reverse polar transformed. default is false.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *logpolar(bool reverse = false)
+
+
+

lens_corr

+ +
def lens_corr(self, strength: float = 1.8, zoom: float = 1.0, x_corr: float = 0.0, y_corr: float = 0.0) -> Image
+
+

Performs a lens correction operation on the image. TODO: support in the feature

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramstrength: The strength of the lens correction. default is 1.8.
zoom: The zoom of the lens correction. default is 1.0.
x_corr: The x correction of the lens correction. default is 0.0.
y_corr: The y correction of the lens correction. default is 0.0.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *lens_corr(double strength = 1.8, double zoom = 1.0, double x_corr = 0.0, double y_corr = 0.0)
+
+
+

rotation_corr

+ +
def rotation_corr(self, x_rotation: float = 0.0, y_rotation: float = 0.0, z_rotation: float = 0.0, x_translation: float = 0.0, y_translation: float = 0.0, zoom: float = 1.0, fov: float = 60.0, corners: list[float] = []) -> Image
+
+

Performs a rotation correction operation on the image. TODO: support in the feature

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx_rotation: The x rotation of the rotation correction. default is 0.0.
y_rotation: The y rotation of the rotation correction. default is 0.0.
z_rotation: The z rotation of the rotation correction. default is 0.0.
x_translation: The x translation of the rotation correction. default is 0.0.
y_translation: The y translation of the rotation correction. default is 0.0.
zoom: The zoom of the rotation correction. default is 1.0.
fov: The fov of the rotation correction. default is 60.0.
corners: The corners of the rotation correction. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *rotation_corr(double x_rotation = 0.0, double y_rotation = 0.0, double z_rotation = 0.0, double x_translation = 0.0, double y_translation = 0.0, double zoom = 1.0, double fov = 60.0, std::vector<float> corners = std::vector<float>())
+
+
+

get_histogram

+ +
def get_histogram(self, thresholds: list[list[int]] = [], invert: bool = False, roi: list[int] = [], bins: int = -1, l_bins: int = 100, a_bins: int = 256, b_bins: int = 256, difference: Image = None) -> Histogram
+
+

Computes the normalized histogram on all color channels and returns a image::Histogram object.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
noteFor GRAYSCALE format, Lmin and Lmax range is [0, 255]. For RGB888 format, Lmin and Lmax range is [0, 100].
paramthresholds: You can define multiple thresholds.
For GRAYSCALE format, you can use {{Lmin, Lmax}, ...} to define one or more thresholds.
For RGB888 format, you can use {{Lmin, Lmax, Amin, Amax, Bmin, Bmax}, ...} to define one or more thresholds.
Where the upper case L,A,B represent the L,A,B channels of the LAB image format, and min, max represent the minimum and maximum values of the corresponding channels.
invert: If true, the thresholds will be inverted before the operation. default is false.
roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
default is None, means whole image.
bins: The number of bins to use for the histogram.
In GRAYSCALE format, setting range is [2, 256], default is 100.
In RGB888 format, setting range is [2, 100], default is 100.
l_bins: The number of bins to use for the l channel of the histogram. Only valid in RGB888 format.
If an invalid value is set, bins will be used instead. The setting range is [2, 100], default is 100.
a_bins: The number of bins to use for the a channel of the histogram.
Only valid in RGB888 format.The setting range is [2, 256], default is 256.
b_bins: The number of bins to use for the b channel of the histogram.
Only valid in RGB888 format. The setting range is [2, 256], default is 256.
difference: difference may be set to an image object to cause this method to operate on the difference image between the current image and the difference image object.
default is None.
returnReturns image::Histogram object
staticFalse
+
+

C++ defination code:

+ +
image::Histogram get_histogram(std::vector<std::vector<int>> thresholds = std::vector<std::vector<int>>(), bool invert = false, std::vector<int> roi = std::vector<int>(), int bins = -1, int l_bins = 100, int a_bins = 256, int b_bins = 256, image::Image *difference = nullptr)
+
+
+

get_statistics

+ +
def get_statistics(self, thresholds: list[list[int]] = [], invert: bool = False, roi: list[int] = [], bins: int = -1, l_bins: int = -1, a_bins: int = -1, b_bins: int = -1, difference: Image = None) -> Statistics
+
+

Gets the statistics of the image. TODO: support in the feature

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
noteFor GRAYSCALE format, Lmin and Lmax range is [0, 255]. For RGB888 format, Lmin and Lmax range is [0, 100].
paramthresholds: You can define multiple thresholds.
For GRAYSCALE format, you can use {{Lmin, Lmax}, ...} to define one or more thresholds.
For RGB888 format, you can use {{Lmin, Lmax, Amin, Amax, Bmin, Bmax}, ...} to define one or more thresholds.
Where the upper case L,A,B represent the L,A,B channels of the LAB image format, and min, max represent the minimum and maximum values of the corresponding channels.
invert: If true, the image will be inverted before the operation. default is false.
roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
default is None, means whole image.
bins: The number of bins to use for the statistics. default is -1.
l_bins: The number of bins to use for the l channel of the statistics. default is -1.
a_bins: The number of bins to use for the a channel of the statistics. default is -1.
b_bins: The number of bins to use for the b channel of the statistics. default is -1.
difference: The difference image to use for the statistics. default is None.
returnReturns the statistics of the image
staticFalse
+
+

C++ defination code:

+ +
image::Statistics get_statistics(std::vector<std::vector<int>> thresholds = std::vector<std::vector<int>>(), bool invert = false, std::vector<int> roi = std::vector<int>(), int bins = -1, int l_bins = -1, int a_bins = -1, int b_bins = -1, image::Image *difference = nullptr)
+
+
+

get_regression

+ +
def get_regression(self, thresholds: list[list[int]] = [], invert: bool = False, roi: list[int] = [], x_stride: int = 2, y_stride: int = 1, area_threshold: int = 10, pixels_threshold: int = 10, robust: bool = False) -> list[Line]
+
+

Gets the regression of the image.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
noteFor GRAYSCALE format, Lmin and Lmax range is [0, 255]. For RGB888 format, Lmin and Lmax range is [0, 100].
paramthresholds: You can define multiple thresholds.
For GRAYSCALE format, you can use {{Lmin, Lmax}, ...} to define one or more thresholds.
For RGB888 format, you can use {{Lmin, Lmax, Amin, Amax, Bmin, Bmax}, ...} to define one or more thresholds.
Where the upper case L,A,B represent the L,A,B channels of the LAB image format, and min, max represent the minimum and maximum values of the corresponding channels.
invert: If true, the image will be inverted before the operation. default is false.
roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
default is None, means whole image.
x_stride: The x stride to use for the regression. default is 2.
y_stride: The y stride to use for the regression. default is 1.
area_threshold: The area threshold to use for the regression. default is 10.
pixels_threshold: The pixels threshold to use for the regression. default is 10.
robust: If true, the regression will be robust. default is false.
returnReturns the regression of the image
staticFalse
+
+

C++ defination code:

+ +
std::vector<image::Line> get_regression(std::vector<std::vector<int>> thresholds = std::vector<std::vector<int>>(), bool invert = false, std::vector<int> roi = std::vector<int>(), int x_stride = 2, int y_stride = 1, int area_threshold = 10, int pixels_threshold = 10, bool robust = false)
+
+
+

save

+ +
def save(self, path: str, quality: int = 95) -> maix.err.Err
+
+

Save image to file

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parampath: file path
quality: image quality, by default(value is 95), support jpeg and png format
returnerror code, err::ERR_NONE is ok, other is error
staticFalse
+
+

C++ defination code:

+ +
err::Err save(const char *path, int quality = 95)
+
+
+

flood_fill

+ +
def flood_fill(self, x: int, y: int, seed_threshold: float = 0.05, floating_threshold: float = 0.05, color: Color = ..., invert: bool = False, clear_background: bool = False, mask: Image = None) -> Image
+
+

Flood fills a region of the image starting from location x, y.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx: The x coordinate of the seed point.
y: The y coordinate of the seed point.
seed_threshold: The seed_threshold value controls how different any pixel in the fill area may be from the original starting pixel. default is 0.05.
floating_threshold: The floating_threshold value controls how different any pixel in the fill area may be from any neighbor pixels. default is 0.05.
color: The color to fill the region with. default is white.
invert: If true, the image will be inverted before the operation. default is false.
clear_background: If true, the background will be cleared before the operation. default is false.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None. FIXME: the mask image works abnormally
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *flood_fill(int x, int y, float seed_threshold = 0.05, float floating_threshold = 0.05, image::Color color = image::COLOR_WHITE, bool invert = false, bool clear_background = false, image::Image *mask = nullptr)
+
+
+

erode

+ +
def erode(self, size: int, threshold: int = -1, mask: Image = None) -> Image
+
+

Erodes the image in place.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramsize: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
threshold: The number of pixels in the kernel that are not 0. If it is less than or equal to the threshold, set the center pixel to black. default is (kernel_size - 1).
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *erode(int size, int threshold = -1, image::Image *mask = nullptr)
+
+
+

dilate

+ +
def dilate(self, size: int, threshold: int = 0, mask: Image = None) -> Image
+
+

Dilates the image in place.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramsize: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
threshold: The number of pixels in the kernel that are not 0. If it is greater than or equal to the threshold, set the center pixel to white. default is 0.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *dilate(int size, int threshold = 0, image::Image *mask = nullptr)
+
+
+

open

+ +
def open(self, size: int, threshold: int = 0, mask: Image = None) -> Image
+
+

Performs erosion and dilation on an image in order.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramsize: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
threshold: As the threshold for erosion and dilation, the actual threshold for erosion is (kernel_size - 1 - threshold), the actual threshold for dialation is threshold. default is 0.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *open(int size, int threshold = 0, image::Image *mask = nullptr)
+
+
+

close

+ +
def close(self, size: int, threshold: int = 0, mask: Image = None) -> Image
+
+

Performs dilation and erosion on an image in order.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramsize: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
threshold: As the threshold for erosion and dilation, the actual threshold for erosion is (kernel_size - 1 - threshold), the actual threshold for dialation is threshold. default is 0.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *close(int size, int threshold = 0, image::Image *mask = nullptr)
+
+
+

top_hat

+ +
def top_hat(self, size: int, threshold: int = 0, mask: Image = None) -> Image
+
+

Returns the image difference of the image and Image.open()’ed image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramsize: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
threshold: As the threshold for open method. default is 0.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *top_hat(int size, int threshold = 0, image::Image *mask = nullptr)
+
+
+

black_hat

+ +
def black_hat(self, size: int, threshold: int = 0, mask: Image = None) -> Image
+
+

Returns the image difference of the image and Image.close()’ed image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramsize: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
threshold: As the threshold for close method. default is 0.
mask: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
Only pixels set in the mask are modified. default is None.
returnReturns the image after the operation is completed.
staticFalse
+
+

C++ defination code:

+ +
image::Image *black_hat(int size, int threshold = 0, image::Image *mask = nullptr)
+
+
+

find_blobs

+ +
def find_blobs(self, thresholds: list[list[int]] = [], invert: bool = False, roi: list[int] = [], x_stride: int = 2, y_stride: int = 1, area_threshold: int = 10, pixels_threshold: int = 10, merge: bool = False, margin: int = 0, x_hist_bins_max: int = 0, y_hist_bins_max: int = 0) -> list[Blob]
+
+

Finds all blobs in the image and returns a list of image.Blob class which describe each Blob.\nPlease see the image.Blob object more more information.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
noteFor GRAYSCALE format, Lmin and Lmax range is [0, 255]. For RGB888 format, Lmin and Lmax range is [0, 100].
paramthresholds: You can define multiple thresholds.
For GRAYSCALE format, you can use {{Lmin, Lmax}, ...} to define one or more thresholds.
For RGB888 format, you can use {{Lmin, Lmax, Amin, Amax, Bmin, Bmax}, ...} to define one or more thresholds.
Where the upper case L,A,B represent the L,A,B channels of the LAB image format, and min, max represent the minimum and maximum values of the corresponding channels.
invert: if true, will invert thresholds before find blobs, default is false
roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
default is None, means whole image.
x_stride: x stride is the number of x pixels to skip when doing the hough transform. default is 2
y_stride: y_stride is the number of y pixels to skip when doing the hough transform. default is 1
area_threshold: area threshold, if the blob area is smaller than area_threshold, the blob is not returned, default is 10
pixels_threshold: pixels threshold, if the blob pixels is smaller than area_threshold, the blob is not returned,, default is 10.
when x_stride and y_stride is equal to 1, pixels_threshold is equivalent to area_threshold
merge: if True merges all not filtered out blobs whos bounding rectangles intersect each other. default is false
margin: margin can be used to increase or decrease the size of the bounding rectangles for blobs during the intersection test.
For example, with a margin of 1 blobs whos bounding rectangles are 1 pixel away from each other will be merged. default is 0
x_hist_bins_max: if set to non-zero populates a histogram buffer in each blob object with an x_histogram projection of all columns in the object. This value then sets the number of bins for that projection.
y_hist_bins_max: if set to non-zero populates a histogram buffer in each blob object with an y_histogram projection of all rows in the object. This value then sets the number of bins for that projection.
returnReturn the blob when found blobs, format is (blob1, blob2, ...), you can use blob class methods to do more operations.
staticFalse
+
+

C++ defination code:

+ +
std::vector<image::Blob> find_blobs(std::vector<std::vector<int>> thresholds = std::vector<std::vector<int>>(), bool invert = false, std::vector<int> roi = std::vector<int>(), int x_stride = 2, int y_stride = 1, int area_threshold = 10, int pixels_threshold = 10, bool merge = false, int margin = 0, int x_hist_bins_max = 0, int y_hist_bins_max = 0)
+
+
+

find_lines

+ +
def find_lines(self, roi: list[int] = [], x_stride: int = 2, y_stride: int = 1, threshold: float = 1000, theta_margin: float = 25, rho_margin: float = 25) -> list[Line]
+
+

Find lines in image

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramroi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
default is None, means whole image.
x_stride: x stride is the number of x pixels to skip when doing the hough transform. default is 2
y_stride: y_stride is the number of y pixels to skip when doing the hough transform. default is 1
threshold: threshold threshold controls what lines are detected from the hough transform. Only lines with a magnitude greater than or equal to threshold are returned.
The right value of threshold for your application is image dependent. default is 1000.
theta_margin: theta_margin controls the merging of detected lines. default is 25.
rho_margin: rho_margin controls the merging of detected lines. default is 25.
returnReturn the line when found lines, format is (line1, line2, ...), you can use line class methods to do more operations
staticFalse
+
+

C++ defination code:

+ +
std::vector<image::Line> find_lines(std::vector<int> roi = std::vector<int>(), int x_stride = 2, int y_stride = 1, double threshold = 1000, double theta_margin = 25, double rho_margin = 25)
+
+
+

find_line_segments

+ +
def find_line_segments(self, roi: list[int] = [], merge_distance: int = 0, max_theta_difference: int = 15) -> list[Line]
+
+

Finds all line segments in the image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramroi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
default is None, means whole image.
merge_distance: The maximum distance between two lines to merge them. default is 0.
max_theta_difference: The maximum difference between two lines to merge them. default is 15.
returnReturn the line when found lines, format is (line1, line2, ...), you can use line class methods to do more operations
staticFalse
+
+

C++ defination code:

+ +
std::vector<image::Line> find_line_segments(std::vector<int> roi = std::vector<int>(), int merge_distance = 0, int max_theta_difference = 15)
+
+
+

find_circles

+ +
def find_circles(self, roi: list[int] = [], x_stride: int = 2, y_stride: int = 1, threshold: int = 2000, x_margin: int = 10, y_margin: int = 10, r_margin: int = 10, r_min: int = 2, r_max: int = -1, r_step: int = 2) -> list[Circle]
+
+

Find circles in image

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramroi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
default is None, means whole image.
x_stride: x stride is the number of x pixels to skip when doing the hough transform. default is 2
y_stride: y_stride is the number of y pixels to skip when doing the hough transform. default is 1
threshold: threshold controls what circles are detected from the hough transform. Only circles with a magnitude greater than or equal to threshold are returned.
The right value of threshold for your application is image dependent.
x_margin: x_margin controls the merging of detected circles. Circles which are x_margin, y_margin, and r_margin pixels apart are merged. default is 10
y_margin: y_margin controls the merging of detected circles. Circles which are x_margin, y_margin, and r_margin pixels apart are merged. default is 10
r_margin: r_margin controls the merging of detected circles. Circles which are x_margin, y_margin, and r_margin pixels apart are merged. default is 10
r_min: r_min controls the minimum circle radius detected. Increase this to speed up the algorithm. default is 2
r_max: r_max controls the maximum circle radius detected. Decrease this to speed up the algorithm. default is min(roi.w / 2, roi.h / 2)
r_step: r_step controls how to step the radius detection by. default is 2.
returnReturn the circle when found circles, format is (circle1, circle2, ...), you can use circle class methods to do more operations
staticFalse
+
+

C++ defination code:

+ +
std::vector<image::Circle> find_circles(std::vector<int> roi = std::vector<int>(), int x_stride = 2, int y_stride = 1, int threshold = 2000, int x_margin = 10, int y_margin = 10, int r_margin = 10, int r_min = 2, int r_max = -1, int r_step = 2)
+
+
+

find_rects

+ +
def find_rects(self, roi: list[int] = [], threshold: int = 10000) -> list[Rect]
+
+

Finds all rects in the image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramroi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
default is None, means whole image.
threshold: The threshold to use for the rects. default is 10000.
returnReturns the rects of the image
staticFalse
+
+

C++ defination code:

+ +
std::vector<image::Rect> find_rects(std::vector<int> roi = std::vector<int>(), int threshold = 10000)
+
+
+

find_qrcodes

+ +
def find_qrcodes(self, roi: list[int] = []) -> list[QRCode]
+
+

Finds all qrcodes in the image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramroi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
default is None, means whole image.
returnReturns the qrcodes of the image
staticFalse
+
+

C++ defination code:

+ +
std::vector<image::QRCode> find_qrcodes(std::vector<int> roi = std::vector<int>())
+
+
+

find_apriltags

+ +
def find_apriltags(self, roi: list[int] = [], families: ApriltagFamilies = ..., fx: float = -1, fy: float = -1, cx: int = -1, cy: int = -1) -> list[AprilTag]
+
+

Finds all apriltags in the image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramroi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
default is None, means whole image.
families: The families to use for the apriltags. default is TAG36H11.
fx: The camera X focal length in pixels, default is -1.
fy: The camera Y focal length in pixels, default is -1.
cx: The camera X center in pixels, default is image.width / 2.
cy: The camera Y center in pixels, default is image.height / 2.
returnReturns the apriltags of the image
staticFalse
+
+

C++ defination code:

+ +
std::vector<image::AprilTag> find_apriltags(std::vector<int> roi = std::vector<int>(), image::ApriltagFamilies families = image::ApriltagFamilies::TAG36H11, float fx = -1, float fy = -1, int cx = -1, int cy = -1)
+
+
+

find_datamatrices

+ +
def find_datamatrices(self, roi: list[int] = [], effort: int = 200) -> list[DataMatrix]
+
+

Finds all datamatrices in the image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramroi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
default is None, means whole image.
effort: Controls how much time to spend trying to find data matrix matches. default is 200.
returnReturns the datamatrices of the image
staticFalse
+
+

C++ defination code:

+ +
std::vector<image::DataMatrix> find_datamatrices(std::vector<int> roi = std::vector<int>(), int effort = 200)
+
+
+

find_barcodes

+ +
def find_barcodes(self, roi: list[int] = []) -> list[BarCode]
+
+

Finds all barcodes in the image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramroi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
default is None, means whole image.
returnReturns the barcodes of the image
staticFalse
+
+

C++ defination code:

+ +
std::vector<image::BarCode> find_barcodes(std::vector<int> roi = std::vector<int>())
+
+
+

find_displacement

+ +
def find_displacement(self, template_image: Image, roi: list[int] = [], template_roi: list[int] = [], logpolar: bool = False) -> Displacement
+
+

Finds the displacement between the image and the template. TODO: support in the feature\nnote: this method must be used on power-of-2 image sizes

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramtemplate_image: The template image.
roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
default is None, means whole image.
template_roi: The region-of-interest rectangle (x, y, w, h) to work in. If not specified, it is equal to the image rectangle.
logpolar: If true, it will instead find rotation and scale changes between the two images. default is false.
returnReturns the displacement of the image
staticFalse
+
+

C++ defination code:

+ +
image::Displacement find_displacement(image::Image &template_image, std::vector<int> roi = std::vector<int>(), std::vector<int> template_roi = std::vector<int>(), bool logpolar = false)
+
+
+

find_template

+ +
def find_template(self, template_image: Image, threshold: float, roi: list[int] = [], step: int = 2, search: TemplateMatch = ...) -> list[int]
+
+

Finds the template in the image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramtemplate_image: The template image.
threshold: Threshold is floating point number (0.0-1.0) where a higher threshold prevents false positives while lowering the detection rate while a lower threshold does the opposite.
roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
default is None, means whole image. Only valid in SEARCH_EX mode.
step: The step size to use for the template. default is 2. Only valid in SEARCH_EX mode
search: The search method to use for the template. default is SEARCH_EX.
returnReturns a bounding box tuple (x, y, w, h) for the matching location otherwise None.
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> find_template(image::Image &template_image, float threshold, std::vector<int> roi = std::vector<int>(), int step = 2, image::TemplateMatch search = image::TemplateMatch::SEARCH_EX)
+
+
+

find_features

+ +
def find_features(self, cascade: int, threshold: float = 0.5, scale: float = 1.5, roi: list[int] = []) -> list[int]
+
+

Finds the features in the image. TODO: support in the feature

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramcascade: The cascade to use for the features. default is CASCADE_FRONTALFACE_ALT.
threshold: The threshold to use for the features. default is 0.5.
scale: The scale to use for the features. default is 1.5.
roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
default is None, means whole image.
returnReturns the features of the image
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> find_features(int cascade, float threshold = 0.5, float scale = 1.5, std::vector<int> roi = std::vector<int>())
+
+
+

find_lbp

+ +
def find_lbp(self, roi: list[int] = []) -> LBPKeyPoint
+
+

Finds the lbp in the image. TODO: support in the feature.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramroi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
default is None, means whole image.
returnReturns the lbp of the image
staticFalse
+
+

C++ defination code:

+ +
image::LBPKeyPoint find_lbp(std::vector<int> roi = std::vector<int>())
+
+
+

find_keypoints

+ +
def find_keypoints(self, roi: list[int] = [], threshold: int = 20, normalized: bool = False, scale_factor: float = 1.5, max_keypoints: int = 100, corner_detector: CornerDetector = ...) -> ORBKeyPoint
+
+

Finds the keypoints in the image. TODO: support in the feature.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramroi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
default is None, means whole image.
threshold: The threshold to use for the keypoints. default is 20.
normalized: If true, the image will be normalized before the operation. default is false.
scale_factor: The scale factor to use for the keypoints. default is 1.5.
max_keypoints: The maximum number of keypoints to use for the keypoints. default is 100.
corner_detector: The corner detector to use for the keypoints. default is CORNER_AGAST.
returnReturns the keypoints of the image
staticFalse
+
+

C++ defination code:

+ +
image::ORBKeyPoint find_keypoints(std::vector<int> roi = std::vector<int>(), int threshold = 20, bool normalized = false, float scale_factor = 1.5, int max_keypoints = 100, image::CornerDetector corner_detector = image::CornerDetector::CORNER_AGAST)
+
+
+

find_edges

+ +
def find_edges(self, edge_type: EdgeDetector, roi: list[int] = [], threshold: list[int] = [100, 200]) -> Image
+
+

Finds the edges in the image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramedge_type: The edge type to use for the edges. default is EDGE_CANNY.
roi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
default is None, means whole image.
threshold: The threshold to use for the edges. default is 20.
returnReturns the edges of the image
staticFalse
+
+

C++ defination code:

+ +
image::Image* find_edges(image::EdgeDetector edge_type, std::vector<int> roi = std::vector<int>(), std::vector<int> threshold = std::vector<int>({100, 200}))
+
+
+

find_hog

+ +
def find_hog(self, roi: list[int] = [], size: int = 8) -> Image
+
+

Finds the hog in the image. TODO: support in the feature

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramroi: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
default is None, means whole image.
size: The size to use for the hog. default is 8.
returnReturns the hog of the image
staticFalse
+
+

C++ defination code:

+ +
image::Image* find_hog(std::vector<int> roi = std::vector<int>(), int size = 8)
+
+
+

match_lbp_descriptor

+ +
def match_lbp_descriptor(self, desc1: LBPKeyPoint, desc2: LBPKeyPoint) -> int
+
+

Matches the lbp descriptor of the image. TODO: support in the feature

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdesc1: The descriptor to use for the match.
desc2: The descriptor to use for the match.
returnReturns the match of the image
staticFalse
+
+

C++ defination code:

+ +
int match_lbp_descriptor(image::LBPKeyPoint &desc1, image::LBPKeyPoint &desc2)
+
+
+

match_orb_descriptor

+ +
def match_orb_descriptor(self, desc1: ORBKeyPoint, desc2: ORBKeyPoint, threshold: int = 95, filter_outliers: bool = False) -> KPTMatch
+
+

Matches the orb descriptor of the image. TODO: support in the feature

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdesc1: The descriptor to use for the match.
desc2: The descriptor to use for the match.
threshold: The threshold to use for the match. default is 95.
filter_outliers: If true, the image will be filter_outliers before the operation. default is false.
returnReturns the match of the image
staticFalse
+
+

C++ defination code:

+ +
image::KPTMatch match_orb_descriptor(image::ORBKeyPoint &desc1, image::ORBKeyPoint &desc2, int threshold = 95, bool filter_outliers = false)
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/network.html b/maixpy/api/maix/network.html new file mode 100644 index 00000000..cd8f3837 --- /dev/null +++ b/maixpy/api/maix/network.html @@ -0,0 +1,366 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.network - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.network

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.network module

+
+

You can use maix.network to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+ + + + + + + + + + + + + +
modulebrief
wifimaix.network.wifi module
+

Enum

+

Variable

+

Function

+

have_network

+ +
def have_network() -> bool
+
+

Return if device have network(WiFi/Eth etc.)

+ + + + + + + + + + + + + +
itemdescription
returnTrue if have network, else False.
+
+

C++ defination code:

+ +
bool have_network()
+
+
+

Class

+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/network/wifi.html b/maixpy/api/maix/network/wifi.html new file mode 100644 index 00000000..e4c6f1c0 --- /dev/null +++ b/maixpy/api/maix/network/wifi.html @@ -0,0 +1,1051 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.network.wifi - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.network.wifi

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.network.wifi module

+
+

You can use maix.network.wifi to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Variable

+

Function

+

list_devices

+ +
def list_devices() -> list[str]
+
+

List WiFi interfaces

+ + + + + + + + + + + + + +
itemdescription
returnWiFi interface list, string type
+
+

C++ defination code:

+ +
std::vector<std::string> list_devices()
+
+
+

Class

+

AP_Info

+

WiFi AP info

+
+

C++ defination code:

+ +
class AP_Info
+
+
+

ssid

+

WiFi AP info SSID

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<uint8_t> ssid
+
+
+

bssid

+

WiFi AP info BSSID

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::string bssid
+
+
+

security

+

WiFi AP info security

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::string security
+
+
+

channel

+

WiFi AP info channel

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int channel
+
+
+

frequency

+

WiFi AP info frequency

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int frequency
+
+
+

rssi

+

WiFi AP info rssi

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int rssi
+
+
+

ssid_str

+ +
def ssid_str(self) -> str
+
+

WiFi AP info ssid_str

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
std::string ssid_str()
+
+
+

Wifi

+

Wifi class

+
+

C++ defination code:

+ +
class Wifi
+
+
+

__init__

+ +
def __init__(self, iface: str = 'wlan0') -> None
+
+

Wifi class

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramiface: wifi interface name, default is wlan0
staticFalse
+
+

C++ defination code:

+ +
Wifi(std::string iface = "wlan0")
+
+
+

get_ip

+ +
def get_ip(self) -> str
+
+

Get current WiFi ip

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnip, string type, if network not connected, will return empty string.
staticFalse
+
+

C++ defination code:

+ +
std::string get_ip()
+
+
+

get_mac

+ +
def get_mac(self) -> str
+
+

Get current WiFi MAC address

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnip, string type.
staticFalse
+
+

C++ defination code:

+ +
std::string get_mac()
+
+
+

get_ssid

+ +
def get_ssid(self, from_cache: bool = True) -> str
+
+

Get current WiFi SSID

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramfrom_cache: if true, will not read config from file, direct use ssid in cache.
attention, first time call this method will auto matically read config from file, and if call connect method will set cache.
returnSSID, string type.
staticFalse
+
+

C++ defination code:

+ +
std::string get_ssid(bool from_cache = true)
+
+
+

get_gateway

+ +
def get_gateway(self) -> str
+
+

Get current WiFi ip

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnip, string type, if network not connected, will return empty string.
staticFalse
+
+

C++ defination code:

+ +
std::string get_gateway()
+
+
+

start_scan

+ +
def start_scan(self) -> maix.err.Err
+
+

WiFi start scan AP info around in background.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnIf success, return err.Err.ERR_NONE, else means failed.
staticFalse
+
+

C++ defination code:

+ +
err::Err start_scan()
+
+
+

get_scan_result

+ +
def get_scan_result(self) -> list[AP_Info]
+
+

Get WiFi scan AP info.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnwifi.AP_Info list.
staticFalse
+
+

C++ defination code:

+ +
std::vector<network::wifi::AP_Info> get_scan_result()
+
+
+

stop_scan

+ +
def stop_scan(self) -> None
+
+

Stop WiFi scan AP info.

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
void stop_scan()
+
+
+

connect

+ +
def connect(self, ssid: str, password: str, wait: bool = True, timeout: int = 60) -> maix.err.Err
+
+

Connect to WiFi AP.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramssid: SSID of AP
password: password of AP, if no password, leave it empty.
wait: wait for got IP or failed or timeout.
timeout: connect timeout internal, unit second.
returnIf success, return err.Err.ERR_NONE, else means failed.
staticFalse
+
+

C++ defination code:

+ +
err::Err connect(const std::string &ssid, const std::string &password, bool wait = true, int timeout = 60)
+
+
+

disconnect

+ +
def disconnect(self) -> maix.err.Err
+
+

Disconnect from WiFi AP.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnIf success, return err.Err.ERR_NONE, else means failed.
staticFalse
+
+

C++ defination code:

+ +
err::Err disconnect()
+
+
+

is_connected

+ +
def is_connected(self) -> bool
+
+

See if WiFi is connected to AP.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnIf connected return true, else false.
staticFalse
+
+

C++ defination code:

+ +
bool is_connected()
+
+
+

start_ap

+ +
def start_ap(self, ssid: str, password: str, mode: str = 'g', channel: int = 0, ip: str = '192.168.66.1', netmask: str = '255.255.255.0', hidden: bool = False) -> maix.err.Err
+
+

Start WiFi AP.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramssid: SSID of AP.
password: password of AP, if no password, leave it empty.
ip: ip address of hostap, default empty string means auto generated one according to hardware.
netmask: netmask, default 255.255.255.0, now only support 255.255.255.0 .
mode: WiFi mode, default g(IEEE 802.11g (2.4 GHz)), a = IEEE 802.11a (5 GHz), b = IEEE 802.11b (2.4 GHz).
channel: WiFi channel number, 0 means auto select. MaixCAM not support auto, will default channel 1.
hidden: hidden SSID or not.
returnIf success, return err.Err.ERR_NONE, else means failed.
staticFalse
+
+

C++ defination code:

+ +
err::Err start_ap(const std::string &ssid, const std::string &password,
+                          std::string mode = "g", int channel = 0,
+                          const std::string &ip = "192.168.66.1", const std::string &netmask = "255.255.255.0",
+                          bool hidden = false)
+
+
+

stop_ap

+ +
def stop_ap(self) -> maix.err.Err
+
+

Stop WiFi AP.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnIf success, return err.Err.ERR_NONE, else means failed.
staticFalse
+
+

C++ defination code:

+ +
err::Err stop_ap()
+
+
+

is_ap_mode

+ +
def is_ap_mode(self) -> bool
+
+

Whether WiFi is AP mode

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnTrue if AP mode now, or False.
staticFalse
+
+

C++ defination code:

+ +
bool is_ap_mode()
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/nn.html b/maixpy/api/maix/nn.html new file mode 100644 index 00000000..875f5ebf --- /dev/null +++ b/maixpy/api/maix/nn.html @@ -0,0 +1,9008 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.nn - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.nn

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.nn module

+
+

You can use maix.nn to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+ + + + + + + + + + + + + +
modulebrief
Fmaix.nn.F module
+

Enum

+

SpeechDevice

+

speech device

+ + + + + + + + + + + + + +
itemdescribe
valuesDEVICE_NONE:
DEVICE_PCM:
DEVICE_MIC:
DEVICE_WAV:
+
+

C++ defination code:

+ +
enum SpeechDevice {
+    DEVICE_NONE = -1,
+    DEVICE_PCM,
+    DEVICE_MIC,
+    DEVICE_WAV,
+}
+
+
+

SpeechDecoder

+

speech decoder type

+ + + + + + + + + + + + + +
itemdescribe
valuesDECODER_RAW:
DECODER_DIG:
DECODER_LVCSR:
DECODER_KWS:
DECODER_ALL:
+
+

C++ defination code:

+ +
enum SpeechDecoder {
+    DECODER_RAW = 1,
+    DECODER_DIG = 2,
+    DECODER_LVCSR = 4,
+    DECODER_KWS = 8,
+    DECODER_ALL = 65535,
+}
+
+
+

Variable

+

Function

+

Class

+

NanoTrack

+

NanoTrack class

+
+

C++ defination code:

+ +
class NanoTrack
+
+
+

__init__

+ +
def __init__(self, model: str = '') -> None
+
+

Constructor of NanoTrack class

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel: model path, default empty, you can load model later by load function.
throwIf model arg is not empty and load failed, will throw err::Exception.
staticFalse
+
+

C++ defination code:

+ +
NanoTrack(const string &model = "")
+
+
+

load

+ +
def load(self, model: str) -> maix.err.Err
+
+

Load model from file

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel: Model path want to load
returnerr::Err
staticFalse
+
+

C++ defination code:

+ +
err::Err load(const string &model)
+
+
+

init

+ +
def init(self, img: maix.image.Image, x: int, y: int, w: int, h: int) -> None
+
+

Init tracker, give tacker first target image and target position.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimg: Image want to detect, target should be in this image.
x: the target position left top coordinate x.
y: the target position left top coordinate y.
w: the target width.
h: the target height.
throwIf image format not match model input format, will throw err::Exception.
staticFalse
+
+

C++ defination code:

+ +
void init(image::Image &img, int x, int y, int w, int h)
+
+
+

track

+ +
def track(self, img: maix.image.Image, threshold: float = 0.9) -> ...
+
+

Track object acoording to last object position and the init function learned target feature.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimg: image to detect object and track, can be any resolution, before detect it will crop a area according to last time target's position.
threshold: If score < threshold, will see this new detection is invalid, but remain return this new detecion, default 0.9.
returnobject, position and score, and detect area in points's first 4 element(x, y, w, h, center_x, center_y, input_size, target_size)
staticFalse
+
+

C++ defination code:

+ +
nn::Object track(image::Image &img, float threshold = 0.9)
+
+
+

input_size

+ +
def input_size(self) -> maix.image.Size
+
+

Get model input size

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size
staticFalse
+
+

C++ defination code:

+ +
image::Size input_size()
+
+
+

input_width

+ +
def input_width(self) -> int
+
+

Get model input width

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size of width
staticFalse
+
+

C++ defination code:

+ +
int input_width()
+
+
+

input_height

+ +
def input_height(self) -> int
+
+

Get model input height

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size of height
staticFalse
+
+

C++ defination code:

+ +
int input_height()
+
+
+

input_format

+ +
def input_format(self) -> maix.image.Format
+
+

Get input image format

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returninput image format, image::Format type.
staticFalse
+
+

C++ defination code:

+ +
image::Format input_format()
+
+
+

mean

+

Get mean value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> mean
+
+
+

scale

+

Get scale value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> scale
+
+
+

OCR_Box

+

Object for OCR detect box

+
+

C++ defination code:

+ +
class OCR_Box
+
+
+

__init__

+ +
def __init__(self, x1: int = 0, y1: int = 0, x2: int = 0, y2: int = 0, x3: int = 0, y3: int = 0, x4: int = 0, y4: int = 0) -> None
+
+

OCR_Box constructor

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
OCR_Box(int x1 = 0, int y1 = 0, int x2 = 0, int y2 = 0, int x3 = 0, int y3 = 0, int x4 = 0, int y4 = 0)
+
+
+

x1

+

left top point of box

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int x1
+
+
+

y1

+

left top point of box

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int y1
+
+
+

x2

+

right top point of box

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int x2
+
+
+

y2

+

right top point of box

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int y2
+
+
+

x3

+

right bottom point of box

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int x3
+
+
+

y3

+

right bottom point of box

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int y3
+
+
+

x4

+

left bottom point of box

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int x4
+
+
+

y4

+

left bottom point of box

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int y4
+
+
+

to_list

+ +
def to_list(self) -> list[int]
+
+

convert box point to a list type.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnlist type, element is int type, value [x1, y1, x2, y2, x3, y3, x4, y4].
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> to_list()
+
+
+

OCR_Object

+

Object for OCR detect result

+
+

C++ defination code:

+ +
class OCR_Object
+
+
+

__init__

+ +
def __init__(self, box: OCR_Box, idx_list: list[int], char_list: list[str], score: float = 0, char_pos: list[int] = []) -> None
+
+

Constructor of Object for OCR detect result

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramscore: score
staticFalse
+
+

C++ defination code:

+ +
OCR_Object(const nn::OCR_Box &box, const std::vector<int> &idx_list, const std::vector<std::string> &char_list, float score = 0, const std::vector<int> &char_pos = std::vector<int>())
+
+
+

box

+

OCR_Object box, 4 points box, first point at the left-top, clock-wise.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
nn::OCR_Box box
+
+
+

score

+

Object score

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
float score
+
+
+

idx_list

+

chars' idx list, element is int type.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<int> idx_list
+
+
+

char_pos

+

Chars' position relative to left

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<int> char_pos
+
+
+

char_str

+ +
def char_str(self) -> str
+
+

Get OCR_Object's charactors, return a string type.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnAll charactors in string type.
staticFalse
+
+

C++ defination code:

+ +
const std::string &char_str()
+
+
+

char_list

+ +
def char_list(self) -> list[str]
+
+

Get OCR_Object's charactors, return a list type.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnAll charactors in list type.
staticFalse
+
+

C++ defination code:

+ +
const std::vector<std::string> &char_list()
+
+
+

update_chars

+ +
def update_chars(self, char_list: list[str]) -> None
+
+

Set OCR_Object's charactors

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramchar_list: All charactors in list type.
staticFalse
+
+

C++ defination code:

+ +
void update_chars(const std::vector<std::string> &char_list)
+
+
+

__str__

+ +
def __str__(self) -> str
+
+

OCR_Object info to string

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnOCR_Object info string
staticFalse
+
+

C++ defination code:

+ +
std::string to_str()
+
+
+

OCR_Objects

+

OCR_Objects Class for detect result

+
+

C++ defination code:

+ +
class OCR_Objects
+
+
+

__init__

+ +
def __init__(self) -> None
+
+

Constructor of OCR_Objects class

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
OCR_Objects()
+
+
+

add

+ +
def add(self, box: OCR_Box, idx_list: list[int], char_list: list[str], score: float = 0, char_pos: list[int] = []) -> OCR_Object
+
+

Add object to objects

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
throwThrow exception if no memory
staticFalse
+
+

C++ defination code:

+ +
nn::OCR_Object &add(const nn::OCR_Box &box, const std::vector<int> &idx_list, const std::vector<std::string> &char_list, float score = 0, const std::vector<int> &char_pos = std::vector<int>())
+
+
+

remove

+ +
def remove(self, idx: int) -> maix.err.Err
+
+

Remove object form objects

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
err::Err remove(int idx)
+
+
+

at

+ +
def at(self, idx: int) -> OCR_Object
+
+

Get object item

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
nn::OCR_Object &at(int idx)
+
+
+

__item__

+ +
def __item__(self, idx: int) -> OCR_Object
+
+

Get object item

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
nn::OCR_Object &operator[](int idx)
+
+
+

__len__

+ +
def __len__(self) -> int
+
+

Get size

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
size_t size()
+
+
+

__iter__

+ +
def __iter__(self) -> typing.Iterator
+
+

Begin

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
std::vector<OCR_Object*>::iterator begin()
+
+
+

Speech

+

Speech

+
+

C++ defination code:

+ +
class Speech
+
+
+

__init__

+ +
def __init__(self, model: str = '') -> None
+
+

Construct a new Speech object

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel: model path, default empty, you can load model later by load function.
throwIf model arg is not empty and load failed, will throw err::Exception.
staticFalse
+
+

C++ defination code:

+ +
Speech(const string &model = "")
+
+
+

__init__ (overload 1)

+

Construct a new Speech object

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel: model path, default empty, you can load model later by load function.
throwIf model arg is not empty and load failed, will throw err::Exception.
staticFalse
+
+

C++ defination code:

+ +
Speech(const string &model = "")
+
+
+

load

+ +
def load(self, model: str) -> maix.err.Err
+
+

Load model from file

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel: Model path want to load
returnerr::Err
staticFalse
+
+

C++ defination code:

+ +
err::Err load(const string &model)
+
+
+

load (overload 1)

+

Load model from file

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel: Model path want to load
returnerr::Err
staticFalse
+
+

C++ defination code:

+ +
err::Err load(const string &model)
+
+
+

init

+ +
def init(self, dev_type: SpeechDevice, device_name: str) -> maix.err.Err
+
+

Init the ASR library and select the type and name of the audio device.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdev_type: device type want to detect, can choose between WAV, PCM, or MIC.
device_name: device name want to detect, can choose a WAV file, a PCM file, or a MIC device name.
throw1. If am model is not loaded, will throw err::ERR_NOT_IMPL.
2. If device is not supported, will throw err::ERR_NOT_IMPL.
returnerr::Err type, if init success, return err::ERR_NONE
staticFalse
+
+

C++ defination code:

+ +
err::Err init(nn::SpeechDevice dev_type, const string &device_name)
+
+
+

init (overload 1)

+

Init the ASR library and select the type and name of the audio device.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdev_type: device type want to detect, can choose between WAV, PCM, or MIC.
device_name: device name want to detect, can choose a WAV file, a PCM file, or a MIC device name.
throw1. If am model is not loaded, will throw err::ERR_NOT_IMPL.
2. If device is not supported, will throw err::ERR_NOT_IMPL.
returnerr::Err type, if init success, return err::ERR_NONE
staticFalse
+
+

C++ defination code:

+ +
err::Err init(nn::SpeechDevice dev_type, const string &device_name)
+
+
+

devive

+ +
def devive(self, dev_type: SpeechDevice, device_name: str) -> maix.err.Err
+
+

Reset the device, usually used for PCM/WAV recognition,\nsuch as identifying the next WAV file.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdev_type: device type want to detect, can choose between WAV, PCM, or MIC.
device_name: device name want to detect, can choose a WAV file, a PCM file, or a MIC device name.
throwIf device is not supported, will throw err::ERR_NOT_IMPL.
returnerr::Err type, if init success, return err::ERR_NONE
staticFalse
+
+

C++ defination code:

+ +
err::Err devive(nn::SpeechDevice dev_type, const string &device_name)
+
+
+

devive (overload 1)

+

Reset the device, usually used for PCM/WAV recognition,\nsuch as identifying the next WAV file.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdev_type: device type want to detect, can choose between WAV, PCM, or MIC.
device_name: device name want to detect, can choose a WAV file, a PCM file, or a MIC device name.
throwIf device is not supported, will throw err::ERR_NOT_IMPL.
returnerr::Err type, if init success, return err::ERR_NONE
staticFalse
+
+

C++ defination code:

+ +
err::Err devive(nn::SpeechDevice dev_type, const string &device_name)
+
+
+

deinit

+ +
def deinit(self) -> None
+
+

Deinit the ASR library.

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
void deinit()
+
+
+

deinit (overload 1)

+

Deinit the ASR library.

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
void deinit()
+
+
+

dec_deinit

+ +
def dec_deinit(self, decoder: SpeechDecoder) -> None
+
+

Deinit the decoder.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdecoder: decoder type want to deinit
can choose between DECODER_RAW, DECODER_DIG, DECODER_LVCSR, DECODER_KWS or DECODER_ALL.
throwIf device is not supported, will throw err::ERR_NOT_IMPL.
staticFalse
+
+

C++ defination code:

+ +
void dec_deinit(nn::SpeechDecoder decoder)
+
+
+

dec_deinit (overload 1)

+

Deinit the decoder.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdecoder: decoder type want to deinit
can choose between DECODER_RAW, DECODER_DIG, DECODER_LVCSR, DECODER_KWS or DECODER_ALL.
throwIf device is not supported, will throw err::ERR_NOT_IMPL.
staticFalse
+
+

C++ defination code:

+ +
void dec_deinit(nn::SpeechDecoder decoder)
+
+
+

raw

+ +
def raw(self, callback: typing.Callable[[list[pnyp_t], int], None]) -> maix.err.Err
+
+

Init raw decoder, it will output the prediction results of the original AM.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramcallback: raw decoder user callback.
returnerr::Err type, if init success, return err::ERR_NONE
staticFalse
+
+

C++ defination code:

+ +
err::Err raw(std::function<void(std::vector<pnyp_t>, int)> callback)
+
+
+

raw (overload 1)

+

Get raw decoder status

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnbool, raw decoder status
staticFalse
+
+

C++ defination code:

+ +
bool raw()
+
+
+

raw (overload 2)

+

Init raw decoder, it will output the prediction results of the original AM.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramcallback: raw decoder user callback.
returnerr::Err type, if init success, return err::ERR_NONE
staticFalse
+
+

C++ defination code:

+ +
err::Err raw(std::function<void(std::vector<pnyp_t>, int)> callback)
+
+
+

raw (overload 3)

+

Get raw decoder status

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnbool, raw decoder status
staticFalse
+
+

C++ defination code:

+ +
bool raw()
+
+
+

digit

+ +
def digit(self, blank: int, callback: typing.Callable[[str, int], None]) -> maix.err.Err
+
+

Init digit decoder, it will output the Chinese digit recognition results within the last 4 seconds.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramblank: If it exceeds this value, insert a '_' in the output result to indicate idle mute.
callback: digit decoder user callback.
returnerr::Err type, if init success, return err::ERR_NONE
staticFalse
+
+

C++ defination code:

+ +
err::Err digit(int blank, std::function<void(char*, int)> callback)
+
+
+

digit (overload 1)

+

Get digit decoder status

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnbool, digit decoder status
staticFalse
+
+

C++ defination code:

+ +
bool digit()
+
+
+

digit (overload 2)

+

Init digit decoder, it will output the Chinese digit recognition results within the last 4 seconds.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramblank: If it exceeds this value, insert a '_' in the output result to indicate idle mute.
callback: digit decoder user callback.
returnerr::Err type, if init success, return err::ERR_NONE
staticFalse
+
+

C++ defination code:

+ +
err::Err digit(int blank, std::function<void(char*, int)> callback)
+
+
+

digit (overload 3)

+

Get digit decoder status

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnbool, digit decoder status
staticFalse
+
+

C++ defination code:

+ +
bool digit()
+
+
+

kws

+ +
def kws(self, kw_tbl: list[str], kw_gate: list[float], callback: typing.Callable[[list[float], int], None], auto_similar: bool = True) -> maix.err.Err
+
+

Init kws decoder, it will output a probability list of all registered keywords in the latest frame,\nusers can set their own thresholds for wake-up.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramkw_tbl: Keyword list, filled in with spaces separated by pinyin, for example: xiao3 ai4 tong2 xue2
kw_gate: kw_gate, keyword probability gate table, the number should be the same as kw_tbl
auto_similar: Whether to perform automatic homophone processing,
setting it to true will automatically calculate the probability by using pinyin with different tones as homophones
callback: digit decoder user callback.
returnerr::Err type, if init success, return err::ERR_NONE
staticFalse
+
+

C++ defination code:

+ +
err::Err kws(std::vector<string> kw_tbl, std::vector<float> kw_gate, std::function<void(std::vector<float>, int)> callback, bool auto_similar = true)
+
+
+

kws (overload 1)

+

Get kws decoder status

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnbool, kws decoder status
staticFalse
+
+

C++ defination code:

+ +
bool kws()
+
+
+

kws (overload 2)

+

Init kws decoder, it will output a probability list of all registered keywords in the latest frame,\nusers can set their own thresholds for wake-up.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramkw_tbl: Keyword list, filled in with spaces separated by pinyin, for example: xiao3 ai4 tong2 xue2
kw_gate: kw_gate, keyword probability gate table, the number should be the same as kw_tbl
auto_similar: Whether to perform automatic homophone processing,
setting it to true will automatically calculate the probability by using pinyin with different tones as homophones
callback: digit decoder user callback.
returnerr::Err type, if init success, return err::ERR_NONE
staticFalse
+
+

C++ defination code:

+ +
err::Err kws(std::vector<string> kw_tbl, std::vector<float> kw_gate, std::function<void(std::vector<float>, int)> callback, bool auto_similar = true)
+
+
+

kws (overload 3)

+

Get kws decoder status

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnbool, kws decoder status
staticFalse
+
+

C++ defination code:

+ +
bool kws()
+
+
+

lvcsr

+ +
def lvcsr(self, sfst_name: str, sym_name: str, phones_txt: str, words_txt: str, callback: typing.Callable[[tuple[str, str], int], None], beam: float = 8, bg_prob: float = 10, scale: float = 0.5, mmap: bool = False) -> maix.err.Err
+
+

Init lvcsr decoder, it will output continuous speech recognition results (less than 1024 Chinese characters).

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramsfst_name: Sfst file path.
sym_name: Sym file path (output symbol table).
phones_txt: Path to phones.bin (pinyin table).
words_txt: Path to words.bin (dictionary table).
callback: lvcsr decoder user callback.
beam: The beam size for WFST search is set to 8 by default, and it is recommended to be between 3 and 9.
The larger the size, the larger the search space, and the more accurate but slower the search.
bg_prob: The absolute value of the natural logarithm of the default probability value for background pinyin
outside of BEAM-CNT is set to 10 by default.
scale: acoustics_cost = log(pny_prob)scale.
mmap*: use mmap to load the WFST decoding image,
If set to true, the beam should be less than 5.
returnerr::Err type, if init success, return err::ERR_NONE
staticFalse
+
+

C++ defination code:

+ +
err::Err lvcsr(const string &sfst_name, const string &sym_name,
+                       const string &phones_txt, const string &words_txt, 
+                       std::function<void(std::pair<char*, char*>, int)> callback,
+                       float beam = 8, float bg_prob = 10, float scale = 0.5, bool mmap = false)
+
+
+

lvcsr (overload 1)

+

Get lvcsr decoder status

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnbool, lvcsr decoder status
staticFalse
+
+

C++ defination code:

+ +
bool lvcsr()
+
+
+

lvcsr (overload 2)

+

Init lvcsr decoder, it will output continuous speech recognition results (less than 1024 Chinese characters).

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramsfst_name: Sfst file path.
sym_name: Sym file path (output symbol table).
phones_txt: Path to phones.bin (pinyin table).
words_txt: Path to words.bin (dictionary table).
callback: lvcsr decoder user callback.
beam: The beam size for WFST search is set to 8 by default, and it is recommended to be between 3 and 9.
The larger the size, the larger the search space, and the more accurate but slower the search.
bg_prob: The absolute value of the natural logarithm of the default probability value for background pinyin
outside of BEAM-CNT is set to 10 by default.
scale: acoustics_cost = log(pny_prob)scale.
mmap*: use mmap to load the WFST decoding image,
If set to true, the beam should be less than 5.
returnerr::Err type, if init success, return err::ERR_NONE
staticFalse
+
+

C++ defination code:

+ +
err::Err lvcsr(const string &sfst_name, const string &sym_name,
+                       const string &phones_txt, const string &words_txt, 
+                       std::function<void(std::pair<char*, char*>, int)> callback,
+                       float beam = 8, float bg_prob = 10, float scale = 0.5, bool mmap = false)
+
+
+

lvcsr (overload 3)

+

Get lvcsr decoder status

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnbool, lvcsr decoder status
staticFalse
+
+

C++ defination code:

+ +
bool lvcsr()
+
+
+

run

+ +
def run(self, frame: int) -> int
+
+

Run speech recognition, user can run 1 frame at a time and do other processing after running,\nor it can run continuously within a thread and be stopped by an external thread.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramframe: The number of frames per run.
returnint type, return actual number of frames in the run.
staticFalse
+
+

C++ defination code:

+ +
int run(int frame)
+
+
+

run (overload 1)

+

Run speech recognition, user can run 1 frame at a time and do other processing after running,\nor it can run continuously within a thread and be stopped by an external thread.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramframe: The number of frames per run.
returnint type, return actual number of frames in the run.
staticFalse
+
+

C++ defination code:

+ +
int run(int frame)
+
+
+

clear

+ +
def clear(self) -> None
+
+

Reset internal cache operation

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
void clear()
+
+
+

clear (overload 1)

+

Reset internal cache operation

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
void clear()
+
+
+

frame_time

+ +
def frame_time(self) -> int
+
+

Get the time of one frame.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnint type, return the time of one frame.
staticFalse
+
+

C++ defination code:

+ +
int frame_time()
+
+
+

frame_time (overload 1)

+

Get the time of one frame.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnint type, return the time of one frame.
staticFalse
+
+

C++ defination code:

+ +
int frame_time()
+
+
+

vocab

+ +
def vocab(self) -> tuple[str, int]
+
+

Get the acoustic model dictionary.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnstd::pair<char*, int> type, return the dictionary and length.
staticFalse
+
+

C++ defination code:

+ +
std::pair<char*, int> vocab()
+
+
+

vocab (overload 1)

+

Get the acoustic model dictionary.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnstd::pair<char*, int> type, return the dictionary and length.
staticFalse
+
+

C++ defination code:

+ +
std::pair<char*, int> vocab()
+
+
+

similar

+ +
def similar(self, pny: str, similar_pnys: list[str]) -> maix.err.Err
+
+

Manually register mute words, and each pinyin can register up to 10 homophones,\nplease note that using this interface to register homophones will overwrite,\nthe homophone table automatically generated in the "automatic homophone processing" feature.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdev_type: device type want to detect, can choose between WAV, PCM, or MIC.
device_name: device name want to detect, can choose a WAV file, a PCM file, or a MIC device name.
returnerr::Err type, if init success, return err::ERR_NONE
staticFalse
+
+

C++ defination code:

+ +
err::Err similar(const string &pny, std::vector<std::string> similar_pnys)
+
+
+

similar (overload 1)

+

Manually register mute words, and each pinyin can register up to 10 homophones,\nplease note that using this interface to register homophones will overwrite,\nthe homophone table automatically generated in the "automatic homophone processing" feature.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdev_type: device type want to detect, can choose between WAV, PCM, or MIC.
device_name: device name want to detect, can choose a WAV file, a PCM file, or a MIC device name.
returnerr::Err type, if init success, return err::ERR_NONE
staticFalse
+
+

C++ defination code:

+ +
err::Err similar(const string &pny, std::vector<std::string> similar_pnys)
+
+
+

mean

+

Get mean value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> mean
+
+
+

scale

+

Get scale value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> scale
+
+
+

dev_type

+ +
def dev_type(self) -> SpeechDevice
+
+

get device type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnnn::SpeechDevice type, see SpeechDevice of this module
staticFalse
+
+

C++ defination code:

+ +
nn::SpeechDevice dev_type()
+
+
+

dev_type (overload 1)

+

get device type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnnn::SpeechDevice type, see SpeechDevice of this module
staticFalse
+
+

C++ defination code:

+ +
nn::SpeechDevice dev_type()
+
+
+

YOLOv8

+

YOLOv8 class

+
+

C++ defination code:

+ +
class YOLOv8
+
+
+

__init__

+ +
def __init__(self, model: str = '', dual_buff: bool = True) -> None
+
+

Constructor of YOLOv8 class

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel: model path, default empty, you can load model later by load function.
dual_buff: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.
If you want to ensure every time forward output the input's result, set this arg to false please.
Default true to ensure speed.
throwIf model arg is not empty and load failed, will throw err::Exception.
staticFalse
+
+

C++ defination code:

+ +
YOLOv8(const string &model = "", bool dual_buff = true)
+
+
+

load

+ +
def load(self, model: str) -> maix.err.Err
+
+

Load model from file

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel: Model path want to load
returnerr::Err
staticFalse
+
+

C++ defination code:

+ +
err::Err load(const string &model)
+
+
+

detect

+ +
def detect(self, img: maix.image.Image, conf_th: float = 0.5, iou_th: float = 0.45, fit: maix.image.Fit = ..., keypoint_th: float = 0.5) -> ...
+
+

Detect objects from image

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimg: Image want to detect, if image's size not match model input's, will auto resize with fit method.
conf_th: Confidence threshold, default 0.5.
iou_th: IoU threshold, default 0.45.
fit: Resize method, default image.Fit.FIT_CONTAIN.
keypoint_th: keypoint threshold, default 0.5, only for yolov8-pose model.
throwIf image format not match model input format, will throw err::Exception.
returnObject list. In C++, you should delete it after use.
If model is yolov8-pose, object's points have value, and if points' value < 0 means that point is invalid(conf < keypoint_th).
staticFalse
+
+

C++ defination code:

+ +
nn::Objects *detect(image::Image &img, float conf_th = 0.5, float iou_th = 0.45, maix::image::Fit fit = maix::image::FIT_CONTAIN, float keypoint_th = 0.5)
+
+
+

input_size

+ +
def input_size(self) -> maix.image.Size
+
+

Get model input size

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size
staticFalse
+
+

C++ defination code:

+ +
image::Size input_size()
+
+
+

input_width

+ +
def input_width(self) -> int
+
+

Get model input width

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size of width
staticFalse
+
+

C++ defination code:

+ +
int input_width()
+
+
+

input_height

+ +
def input_height(self) -> int
+
+

Get model input height

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size of height
staticFalse
+
+

C++ defination code:

+ +
int input_height()
+
+
+

input_format

+ +
def input_format(self) -> maix.image.Format
+
+

Get input image format

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returninput image format, image::Format type.
staticFalse
+
+

C++ defination code:

+ +
image::Format input_format()
+
+
+

draw_pose

+ +
def draw_pose(self, img: maix.image.Image, points: list[int], radius: int = 4, color: maix.image.Color = ..., body: bool = True) -> None
+
+

Draw pose keypoints on image

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimg: image object, maix.image.Image type.
points: keypoits, int list type, [x, y, x, y ...]
radius: radius of points.
color: color of points.
body: true, if points' length is 17*2 and body is ture, will draw lines as human body, if set to false won't draw lines, default true.
staticFalse
+
+

C++ defination code:

+ +
void draw_pose(image::Image &img, std::vector<int> points, int radius = 4, image::Color color = image::COLOR_RED, bool body = true)
+
+
+

draw_seg_mask

+ +
def draw_seg_mask(self, img: maix.image.Image, x: int, y: int, seg_mask: maix.image.Image, threshold: int = 127) -> None
+
+

Draw segmentation on image

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimg: image object, maix.image.Image type.
seg_mask: segmentation mask image by detect method, a grayscale image
threshold: only mask's value > threshold will be draw on image, value from 0 to 255.
staticFalse
+
+

C++ defination code:

+ +
void draw_seg_mask(image::Image &img, int x, int y, image::Image &seg_mask, int threshold = 127)
+
+
+

labels

+

Labels list

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<string> labels
+
+
+

label_path

+

Label file path

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::string label_path
+
+
+

mean

+

Get mean value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> mean
+
+
+

scale

+

Get scale value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> scale
+
+
+

Object

+

Object for detect result

+
+

C++ defination code:

+ +
class Object
+
+
+

__init__

+ +
def __init__(self, x: int = 0, y: int = 0, w: int = 0, h: int = 0, class_id: int = 0, score: float = 0, points: list[int] = []) -> None
+
+

Constructor of Object for detect result

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx: left top x
y: left top y
w: width
h: height
class_id: class id
score: score
staticFalse
+
+

C++ defination code:

+ +
Object(int x = 0, int y = 0, int w = 0, int h = 0, int class_id = 0, float score = 0, std::vector<int> points = std::vector<int>())
+
+
+

__str__

+ +
def __str__(self) -> str
+
+

Object info to string

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnObject info string
staticFalse
+
+

C++ defination code:

+ +
std::string to_str()
+
+
+

x

+

Object left top coordinate x

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int x
+
+
+

y

+

Object left top coordinate y

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int y
+
+
+

w

+

Object width

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int w
+
+
+

h

+

Object height

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int h
+
+
+

class_id

+

Object class id

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int class_id
+
+
+

score

+

Object score

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
float score
+
+
+

points

+

keypoints

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<int> points
+
+
+

seg_mask

+

segmentation mask, uint8 list type, shape is h * w but flattened to one dimension, value fron 0 to 255.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
attentionFor efficiency, it's a pointer in C++, use this carefully!
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
image::Image *seg_mask
+
+
+

ObjectFloat

+

Object for detect result

+
+

C++ defination code:

+ +
class ObjectFloat
+
+
+

__init__

+ +
def __init__(self, x: float = 0, y: float = 0, w: float = 0, h: float = 0, class_id: float = 0, score: float = 0, points: list[float] = []) -> None
+
+

Constructor of Object for detect result

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx: left top x
y: left top y
w: width
h: height
class_id: class id
score: score
staticFalse
+
+

C++ defination code:

+ +
ObjectFloat(float x = 0, float y = 0, float w = 0, float h = 0, float class_id = 0, float score = 0, std::vector<float> points = std::vector<float>())
+
+
+

__str__

+ +
def __str__(self) -> str
+
+

Object info to string

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnObject info string
staticFalse
+
+

C++ defination code:

+ +
std::string to_str()
+
+
+

x

+

Object left top coordinate x

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
float x
+
+
+

y

+

Object left top coordinate y

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
float y
+
+
+

w

+

Object width

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
float w
+
+
+

h

+

Object height

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
float h
+
+
+

class_id

+

Object class id

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
float class_id
+
+
+

score

+

Object score

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
float score
+
+
+

points

+

keypoints

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> points
+
+
+

Objects

+

Objects Class for detect result

+
+

C++ defination code:

+ +
class Objects
+
+
+

__init__

+ +
def __init__(self) -> None
+
+

Constructor of Objects class

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
Objects()
+
+
+

add

+ +
def add(self, x: int = 0, y: int = 0, w: int = 0, h: int = 0, class_id: int = 0, score: float = 0, points: list[int] = []) -> Object
+
+

Add object to objects

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
throwThrow exception if no memory
staticFalse
+
+

C++ defination code:

+ +
nn::Object &add(int x = 0, int y = 0, int w = 0, int h = 0, int class_id = 0, float score = 0, std::vector<int> points = std::vector<int>())
+
+
+

remove

+ +
def remove(self, idx: int) -> maix.err.Err
+
+

Remove object form objects

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
err::Err remove(int idx)
+
+
+

at

+ +
def at(self, idx: int) -> Object
+
+

Get object item

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
nn::Object &at(int idx)
+
+
+

__item__

+ +
def __item__(self, idx: int) -> Object
+
+

Get object item

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
nn::Object &operator[](int idx)
+
+
+

__len__

+ +
def __len__(self) -> int
+
+

Get size

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
size_t size()
+
+
+

__iter__

+ +
def __iter__(self) -> typing.Iterator
+
+

Begin

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
std::vector<Object*>::iterator begin()
+
+
+

MUD

+

MUD(model universal describe file) class

+
+

C++ defination code:

+ +
class MUD
+
+
+

__init__

+ +
def __init__(self, model_path: str = None) -> None
+
+

MUD constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel_path: direction [in], model file path, model format can be MUD(model universal describe file) file.
If model_path set, will load model from file, load failed will raise err.Exception.
If model_path not set, you can load model later by load function.
staticFalse
+
+

C++ defination code:

+ +
MUD(const char *model_path = nullptr)
+
+
+

load

+ +
def load(self, model_path: str) -> maix.err.Err
+
+

Load model from file

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel_path: direction [in], model file path, model format can be MUD(model universal describe file) file.
returnerror code, if load success, return err::ERR_NONE
staticFalse
+
+

C++ defination code:

+ +
err::Err load(const std::string &model_path)
+
+
+

type

+

Model type, string type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::string type
+
+
+

items

+

Model config items, different model type has different config items

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::map<std::string, std::map<std::string, std::string>> items
+
+
+

LayerInfo

+

NN model layer info

+
+

C++ defination code:

+ +
class LayerInfo
+
+
+

__init__

+ +
def __init__(self, name: str = '', dtype: maix.tensor.DType = ..., shape: list[int] = []) -> None
+
+

LayerInfo constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramname: direction [in], layer name
dtype: direction [in], layer data type
shape: direction [in], layer shape
staticFalse
+
+

C++ defination code:

+ +
LayerInfo(const std::string &name =  "", tensor::DType dtype = tensor::DType::FLOAT32, std::vector<int> shape = std::vector<int>())
+
+
+

name

+

Layer name

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::string   name
+
+
+

dtype

+

Layer data type

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
attentionIf model is quantized, this is the real quantized data type like int8 float16,
in most scene, inputs and outputs we actually use float32 in API like forward.
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
tensor::DType dtype
+
+
+

shape

+

Layer shape

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<int> shape
+
+
+

shape_int

+ +
def shape_int(self) -> int
+
+

Shape as one int type, multiply all dims of shape

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
int shape_int()
+
+
+

to_str

+ +
def to_str(self) -> str
+
+

To string

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
std::string to_str()
+
+
+

__str__

+ +
def __str__(self) -> str
+
+

To string

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
std::string __str__()
+
+
+

NN

+

Neural network class

+
+

C++ defination code:

+ +
class NN
+
+
+

__init__

+ +
def __init__(self, model: str = '', dual_buff: bool = True) -> None
+
+

Neural network constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel: direction [in], model file path, model format can be MUD(model universal describe file) file.
If model_path set, will load model from file, load failed will raise err.Exception.
If model_path not set, you can load model later by load function.
dual_buff: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.
If you want to ensure every time forward output the input's result, set this arg to false please.
Default true to ensure speed.
staticFalse
+
+

C++ defination code:

+ +
NN(const std::string &model = "", bool dual_buff = true)
+
+
+

load

+ +
def load(self, model: str) -> maix.err.Err
+
+

Load model from file

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel: direction [in], model file path, model format can be MUD(model universal describe file) file.
returnerror code, if load success, return err::ERR_NONE
staticFalse
+
+

C++ defination code:

+ +
err::Err load(const std::string &model)
+
+
+

loaded

+ +
def loaded(self) -> bool
+
+

Is model loaded

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returntrue if model loaded, else false
staticFalse
+
+

C++ defination code:

+ +
bool loaded()
+
+
+

set_dual_buff

+ +
def set_dual_buff(self, enable: bool) -> None
+
+

Enable dual buff or disable dual buff

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramenable: true to enable, false to disable
staticFalse
+
+

C++ defination code:

+ +
void set_dual_buff(bool enable)
+
+
+

inputs_info

+ +
def inputs_info(self) -> list[LayerInfo]
+
+

Get model input layer info

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returninput layer info
staticFalse
+
+

C++ defination code:

+ +
std::vector<nn::LayerInfo> inputs_info()
+
+
+

outputs_info

+ +
def outputs_info(self) -> list[LayerInfo]
+
+

Get model output layer info

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnoutput layer info
staticFalse
+
+

C++ defination code:

+ +
std::vector<nn::LayerInfo> outputs_info()
+
+
+

extra_info

+ +
def extra_info(self) -> dict[str, str]
+
+

Get model extra info define in MUD file

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnextra info, dict type, key-value object, attention: key and value are all string type.
staticFalse
+
+

C++ defination code:

+ +
std::map<std::string, std::string> extra_info()
+
+
+

forward

+ +
def forward(self, inputs: maix.tensor.Tensors, copy_result: bool = True, dual_buff_wait: bool = False) -> maix.tensor.Tensors
+
+

forward run model, get output of model,\nthis is specially for MaixPy, not efficient, but easy to use in MaixPy

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paraminput: direction [in], input tensor
copy_result: If set true, will copy result to a new variable; else will use a internal memory, you can only use it until to the next forward.
Default true to avoid problems, you can set it to false manually to make speed faster.
dual_buff_wait: bool type, only for dual_buff mode, if true, will inference this image and wait for result, default false.
returnoutput tensor. In C++, you should manually delete tensors in return value and return value.
If dual_buff mode, it can be NULL(None in MaixPy) means not ready.
throwif error ocurrs like no memory or arg error, will raise err.Exception.
staticFalse
+
+

C++ defination code:

+ +
tensor::Tensors *forward(tensor::Tensors &inputs, bool copy_result = true, bool dual_buff_wait = false)
+
+
+

forward_image

+ +
def forward_image(self, img: maix.image.Image, mean: list[float] = [], scale: list[float] = [], fit: maix.image.Fit = ..., copy_result: bool = True, dual_buff_wait: bool = False) -> maix.tensor.Tensors
+
+

forward model, param is image

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimg: input image
mean: mean value, a list type, e.g. [0.485, 0.456, 0.406], default is empty list means not normalize.
scale: scale value, a list type, e.g. [1/0.229, 1/0.224, 1/0.225], default is empty list means not normalize.
fit: fit mode, if the image size of input not equal to model's input, it will auto resize use this fit method,
default is image.Fit.FIT_FILL for easy coordinate calculation, but for more accurate result, use image.Fit.FIT_CONTAIN is better.
copy_result: If set true, will copy result to a new variable; else will use a internal memory, you can only use it until to the next forward.
Default true to avoid problems, you can set it to false manually to make speed faster.
dual_buff_wait: bool type, only for dual_buff mode, if true, will inference this image and wait for result, default false.
returnoutput tensor. In C++, you should manually delete tensors in return value and return value.
If dual_buff mode, it can be NULL(None in MaixPy) means not ready.
throwIf error occurs, like arg error or alloc memory failed, will raise err.Exception.
staticFalse
+
+

C++ defination code:

+ +
tensor::Tensors *forward_image(image::Image &img, std::vector<float> mean = std::vector<float>(), std::vector<float> scale = std::vector<float>(), image::Fit fit = image::Fit::FIT_FILL, bool copy_result = true, bool dual_buff_wait = false)
+
+
+

FaceObject

+

Face object

+
+

C++ defination code:

+ +
class FaceObject
+
+
+

__init__

+ +
def __init__(self, x: int = 0, y: int = 0, w: int = 0, h: int = 0, class_id: int = 0, score: float = 0, points: list[int] = [], feature: list[float] = [], face: maix.image.Image = ...) -> None
+
+

Constructor

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
FaceObject(int x = 0, int y = 0, int w = 0, int h = 0, int class_id = 0, float score = 0, std::vector<int> points = std::vector<int>(), std::vector<float> feature = std::vector<float>(), image::Image face = image::Image())
+
+
+

__str__

+ +
def __str__(self) -> str
+
+

FaceObject info to string

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnFaceObject info string
staticFalse
+
+

C++ defination code:

+ +
std::string to_str()
+
+
+

x

+

FaceObject left top coordinate x

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int x
+
+
+

y

+

FaceObject left top coordinate y

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int y
+
+
+

w

+

FaceObject width

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int w
+
+
+

h

+

FaceObject height

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int h
+
+
+

class_id

+

FaceObject class id

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int class_id
+
+
+

score

+

FaceObject score

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
float score
+
+
+

points

+

keypoints

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<int> points
+
+
+

feature

+

feature, float list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> feature
+
+
+

face

+

face image

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
image::Image face
+
+
+

FaceRecognizer

+

FaceRecognizer class

+
+

C++ defination code:

+ +
class FaceRecognizer
+
+
+

__init__

+ +
def __init__(self, detect_model: str = '', feature_model: str = '', dual_buff: bool = True) -> None
+
+

Constructor of FaceRecognizer class

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdetect_model: face detect model path, default empty, you can load model later by load function.
feature_model: feature extract model
dual_buff: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.
If you want to ensure every time forward output the input's result, set this arg to false please.
Default true to ensure speed.
throwIf model arg is not empty and load failed, will throw err::Exception.
staticFalse
+
+

C++ defination code:

+ +
FaceRecognizer(const string &detect_model = "", const string &feature_model = "", bool dual_buff = true)
+
+
+

load

+ +
def load(self, detect_model: str, feature_model: str) -> maix.err.Err
+
+

Load model from file

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdetect_model: face detect model path, default empty, you can load model later by load function.
feature_model: feature extract model
returnerr::Err
staticFalse
+
+

C++ defination code:

+ +
err::Err load(const string &detect_model, const string &feature_model)
+
+
+

recognize

+ +
def recognize(self, img: maix.image.Image, conf_th: float = 0.5, iou_th: float = 0.45, compare_th: float = 0.8, get_feature: bool = False, get_face: bool = False, fit: maix.image.Fit = ...) -> list[FaceObject]
+
+

Detect objects from image

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimg: Image want to detect, if image's size not match model input's, will auto resize with fit method.
conf_th: Detect confidence threshold, default 0.5.
iou_th: Detect IoU threshold, default 0.45.
compare_th: Compare two face score threshold, default 0.8, if two faces' score < this value, will see this face fas unknown.
get_feature: return feature or not, if true will copy features to result, if false will not copy feature to result to save time and memory.
get_face: return face image or not, if true result object's face attribute will valid, or face sttribute is empty. Get face image will alloc memory and copy image, so will lead to slower speed.
fit: Resize method, default image.Fit.FIT_CONTAIN.
throwIf image format not match model input format, will throw err::Exception.
returnFaceObject list. In C++, you should delete it after use.
staticFalse
+
+

C++ defination code:

+ +
std::vector<nn::FaceObject> *recognize(image::Image &img, float conf_th = 0.5, float iou_th = 0.45, float compare_th = 0.8, bool get_feature = false, bool get_face = false, maix::image::Fit fit = maix::image::FIT_CONTAIN)
+
+
+

add_face

+ +
def add_face(self, face: FaceObject, label: str) -> maix.err.Err
+
+

Add face to lib

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramface: face object, find by recognize
label: face label(name)
staticFalse
+
+

C++ defination code:

+ +
err::Err add_face(nn::FaceObject *face, const std::string &label)
+
+
+

remove_face

+ +
def remove_face(self, idx: int = -1, label: str = '') -> maix.err.Err
+
+

remove face from lib

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramidx: index of face in lib, default -1 means use label, idx and label must have one, idx have high priotiry.
label: which face to remove, default to empty string mean use idx, idx and label must have one, idx have high priotiry.
staticFalse
+
+

C++ defination code:

+ +
err::Err remove_face(int idx = -1, const std::string &label = "")
+
+
+

save_faces

+ +
def save_faces(self, path: str) -> maix.err.Err
+
+

Save faces info to a file

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parampath: where to save, string type.
returnerr.Err type
staticFalse
+
+

C++ defination code:

+ +
err::Err save_faces(const std::string &path)
+
+
+

load_faces

+ +
def load_faces(self, path: str) -> maix.err.Err
+
+

Load faces info from a file

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parampath: from where to load, string type.
returnerr::Err type
staticFalse
+
+

C++ defination code:

+ +
err::Err load_faces(const std::string &path)
+
+
+

input_size

+ +
def input_size(self) -> maix.image.Size
+
+

Get model input size

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size
staticFalse
+
+

C++ defination code:

+ +
image::Size input_size()
+
+
+

input_width

+ +
def input_width(self) -> int
+
+

Get model input width

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size of width
staticFalse
+
+

C++ defination code:

+ +
int input_width()
+
+
+

input_height

+ +
def input_height(self) -> int
+
+

Get model input height

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size of height
staticFalse
+
+

C++ defination code:

+ +
int input_height()
+
+
+

input_format

+ +
def input_format(self) -> maix.image.Format
+
+

Get input image format

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returninput image format, image::Format type.
staticFalse
+
+

C++ defination code:

+ +
image::Format input_format()
+
+
+

mean_detector

+

Get mean value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> mean_detector
+
+
+

scale_detector

+

Get scale value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> scale_detector
+
+
+

mean_feature

+

Get mean value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> mean_feature
+
+
+

scale_feature

+

Get scale value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> scale_feature
+
+
+

labels

+

labels, list type, first is "unknown"

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<std::string> labels
+
+
+

features

+

features

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<std::vector<float>> features
+
+
+

SelfLearnClassifier

+

SelfLearnClassifier

+
+

C++ defination code:

+ +
class SelfLearnClassifier
+
+
+

__init__

+ +
def __init__(self, model: str = '', dual_buff: bool = True) -> None
+
+

Construct a new SelfLearnClassifier object

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel: MUD model path, if empty, will not load model, you can call load_model() later.
if not empty, will load model and will raise err::Exception if load failed.
dual_buff: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.
If you want to ensure every time forward output the input's result, set this arg to false please.
Default true to ensure speed.
staticFalse
+
+

C++ defination code:

+ +
SelfLearnClassifier(const std::string &model = "", bool dual_buff = true)
+
+
+

load_model

+ +
def load_model(self, model: str) -> maix.err.Err
+
+

Load model from file, model format is .mud,\nMUD file should contain [extra] section, have key-values:\n- model_type: classifier_no_top\n- input_type: rgb or bgr\n- mean: 123.675, 116.28, 103.53\n- scale: 0.017124753831663668, 0.01750700280112045, 0.017429193899782137

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel: MUD model path
returnerror code, if load failed, return error code
staticFalse
+
+

C++ defination code:

+ +
err::Err load_model(const string &model)
+
+
+

classify

+ +
def classify(self, img: maix.image.Image, fit: maix.image.Fit = ...) -> list[tuple[int, float]]
+
+

Classify image

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimg: image, format should match model input_type, or will raise err.Exception
fit: image resize fit mode, default Fit.FIT_COVER, see image.Fit.
throwIf error occurred, will raise err::Exception, you can find reason in log, mostly caused by args error or hardware error.
returnresult, a list of (idx, distance), smaller distance means more similar. In C++, you need to delete it after use.
staticFalse
+
+

C++ defination code:

+ +
std::vector<std::pair<int, float>> *classify(image::Image &img, image::Fit fit = image::FIT_COVER)
+
+
+

add_class

+ +
def add_class(self, img: maix.image.Image, fit: maix.image.Fit = ...) -> None
+
+

Add a class to recognize

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimg: Add a image as a new class
fit: image resize fit mode, default Fit.FIT_COVER, see image.Fit.
staticFalse
+
+

C++ defination code:

+ +
void add_class(image::Image &img, image::Fit fit = image::FIT_COVER)
+
+
+

class_num

+ +
def class_num(self) -> int
+
+

Get class number

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
int class_num()
+
+
+

rm_class

+ +
def rm_class(self, idx: int) -> maix.err.Err
+
+

Remove a class

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramidx: index, value from 0 to class_num();
staticFalse
+
+

C++ defination code:

+ +
err::Err rm_class(int idx)
+
+
+

add_sample

+ +
def add_sample(self, img: maix.image.Image, fit: maix.image.Fit = ...) -> None
+
+

Add sample, you should call learn method after add some samples to learn classes.\nSample image can be any of classes we already added.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimg: Add a image as a new sample.
staticFalse
+
+

C++ defination code:

+ +
void add_sample(image::Image &img, image::Fit fit = image::FIT_COVER)
+
+
+

rm_sample

+ +
def rm_sample(self, idx: int) -> maix.err.Err
+
+

Remove a sample

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramidx: index, value from 0 to sample_num();
staticFalse
+
+

C++ defination code:

+ +
err::Err rm_sample(int idx)
+
+
+

sample_num

+ +
def sample_num(self) -> int
+
+

Get sample number

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
int sample_num()
+
+
+

learn

+ +
def learn(self) -> int
+
+

Start auto learn class features from classes image and samples.\nYou should call this method after you add some samples.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnlearn epoch(times), 0 means learn nothing.
staticFalse
+
+

C++ defination code:

+ +
int learn()
+
+
+

clear

+ +
def clear(self) -> None
+
+

Clear all class and samples

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
void clear()
+
+
+

input_size

+ +
def input_size(self) -> maix.image.Size
+
+

Get model input size, only for image input

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size
staticFalse
+
+

C++ defination code:

+ +
image::Size input_size()
+
+
+

input_width

+ +
def input_width(self) -> int
+
+

Get model input width, only for image input

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size of width
staticFalse
+
+

C++ defination code:

+ +
int input_width()
+
+
+

input_height

+ +
def input_height(self) -> int
+
+

Get model input height, only for image input

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size of height
staticFalse
+
+

C++ defination code:

+ +
int input_height()
+
+
+

input_format

+ +
def input_format(self) -> maix.image.Format
+
+

Get input image format, only for image input

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returninput image format, image::Format type.
staticFalse
+
+

C++ defination code:

+ +
image::Format input_format()
+
+
+

input_shape

+ +
def input_shape(self) -> list[int]
+
+

Get input shape, if have multiple input, only return first input shape

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returninput shape, list type
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> input_shape()
+
+
+

save

+ +
def save(self, path: str, labels: list[str] = []) -> maix.err.Err
+
+

Save features and labels to a binary file

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parampath: file path to save, e.g. /root/my_classes.bin
labels: class labels, can be None, or length must equal to class num, or will return err::Err
returnmaix.err.Err if labels exists but length not equal to class num, or save file failed, or class num is 0.
staticFalse
+
+

C++ defination code:

+ +
err::Err save(const std::string &path, const std::vector<std::string> &labels = std::vector<std::string>())
+
+
+

load

+ +
def load(self, path: str) -> list[str]
+
+

Load features info from binary file

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parampath: feature info binary file path, e.g. /root/my_classes.bin
staticFalse
+
+

C++ defination code:

+ +
std::vector<std::string> load(const std::string &path)
+
+
+

labels

+

Labels list

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<string> labels
+
+
+

label_path

+

Label file path

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::string label_path
+
+
+

mean

+

Get mean value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> mean
+
+
+

scale

+

Get scale value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> scale
+
+
+

YOLOv5

+

YOLOv5 class

+
+

C++ defination code:

+ +
class YOLOv5
+
+
+

__init__

+ +
def __init__(self, model: str = '', dual_buff: bool = True) -> None
+
+

Constructor of YOLOv5 class

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel: model path, default empty, you can load model later by load function.
dual_buff: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.
If you want to ensure every time forward output the input's result, set this arg to false please.
Default true to ensure speed.
throwIf model arg is not empty and load failed, will throw err::Exception.
staticFalse
+
+

C++ defination code:

+ +
YOLOv5(const string &model = "", bool dual_buff = true)
+
+
+

load

+ +
def load(self, model: str) -> maix.err.Err
+
+

Load model from file

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel: Model path want to load
returnerr::Err
staticFalse
+
+

C++ defination code:

+ +
err::Err load(const string &model)
+
+
+

detect

+ +
def detect(self, img: maix.image.Image, conf_th: float = 0.5, iou_th: float = 0.45, fit: maix.image.Fit = ...) -> list[Object]
+
+

Detect objects from image

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimg: Image want to detect, if image's size not match model input's, will auto resize with fit method.
conf_th: Confidence threshold, default 0.5.
iou_th: IoU threshold, default 0.45.
fit: Resize method, default image.Fit.FIT_CONTAIN.
throwIf image format not match model input format, will throw err::Exception.
returnObject list. In C++, you should delete it after use.
staticFalse
+
+

C++ defination code:

+ +
std::vector<nn::Object> *detect(image::Image &img, float conf_th = 0.5, float iou_th = 0.45, maix::image::Fit fit = maix::image::FIT_CONTAIN)
+
+
+

input_size

+ +
def input_size(self) -> maix.image.Size
+
+

Get model input size

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size
staticFalse
+
+

C++ defination code:

+ +
image::Size input_size()
+
+
+

input_width

+ +
def input_width(self) -> int
+
+

Get model input width

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size of width
staticFalse
+
+

C++ defination code:

+ +
int input_width()
+
+
+

input_height

+ +
def input_height(self) -> int
+
+

Get model input height

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size of height
staticFalse
+
+

C++ defination code:

+ +
int input_height()
+
+
+

input_format

+ +
def input_format(self) -> maix.image.Format
+
+

Get input image format

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returninput image format, image::Format type.
staticFalse
+
+

C++ defination code:

+ +
image::Format input_format()
+
+
+

labels

+

Labels list

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<string> labels
+
+
+

label_path

+

Label file path

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::string label_path
+
+
+

mean

+

Get mean value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> mean
+
+
+

scale

+

Get scale value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> scale
+
+
+

anchors

+

Get anchors

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> anchors
+
+
+

YOLO11

+

YOLO11 class

+
+

C++ defination code:

+ +
class YOLO11
+
+
+

__init__

+ +
def __init__(self, model: str = '', dual_buff: bool = True) -> None
+
+

Constructor of YOLO11 class

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel: model path, default empty, you can load model later by load function.
dual_buff: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.
If you want to ensure every time forward output the input's result, set this arg to false please.
Default true to ensure speed.
throwIf model arg is not empty and load failed, will throw err::Exception.
staticFalse
+
+

C++ defination code:

+ +
YOLO11(const string &model = "", bool dual_buff = true)
+
+
+

load

+ +
def load(self, model: str) -> maix.err.Err
+
+

Load model from file

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel: Model path want to load
returnerr::Err
staticFalse
+
+

C++ defination code:

+ +
err::Err load(const string &model)
+
+
+

detect

+ +
def detect(self, img: maix.image.Image, conf_th: float = 0.5, iou_th: float = 0.45, fit: maix.image.Fit = ..., keypoint_th: float = 0.5) -> Objects
+
+

Detect objects from image

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimg: Image want to detect, if image's size not match model input's, will auto resize with fit method.
conf_th: Confidence threshold, default 0.5.
iou_th: IoU threshold, default 0.45.
fit: Resize method, default image.Fit.FIT_CONTAIN.
keypoint_th: keypoint threshold, default 0.5, only for yolo11-pose model.
throwIf image format not match model input format, will throw err::Exception.
returnObject list. In C++, you should delete it after use.
If model is yolo11-pose, object's points have value, and if points' value < 0 means that point is invalid(conf < keypoint_th).
staticFalse
+
+

C++ defination code:

+ +
nn::Objects *detect(image::Image &img, float conf_th = 0.5, float iou_th = 0.45, maix::image::Fit fit = maix::image::FIT_CONTAIN, float keypoint_th = 0.5)
+
+
+

input_size

+ +
def input_size(self) -> maix.image.Size
+
+

Get model input size

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size
staticFalse
+
+

C++ defination code:

+ +
image::Size input_size()
+
+
+

input_width

+ +
def input_width(self) -> int
+
+

Get model input width

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size of width
staticFalse
+
+

C++ defination code:

+ +
int input_width()
+
+
+

input_height

+ +
def input_height(self) -> int
+
+

Get model input height

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size of height
staticFalse
+
+

C++ defination code:

+ +
int input_height()
+
+
+

input_format

+ +
def input_format(self) -> maix.image.Format
+
+

Get input image format

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returninput image format, image::Format type.
staticFalse
+
+

C++ defination code:

+ +
image::Format input_format()
+
+
+

draw_pose

+ +
def draw_pose(self, img: maix.image.Image, points: list[int], radius: int = 4, color: maix.image.Color = ..., body: bool = True) -> None
+
+

Draw pose keypoints on image

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimg: image object, maix.image.Image type.
points: keypoits, int list type, [x, y, x, y ...]
radius: radius of points.
color: color of points.
body: true, if points' length is 17*2 and body is ture, will draw lines as human body, if set to false won't draw lines, default true.
staticFalse
+
+

C++ defination code:

+ +
void draw_pose(image::Image &img, std::vector<int> points, int radius = 4, image::Color color = image::COLOR_RED, bool body = true)
+
+
+

draw_seg_mask

+ +
def draw_seg_mask(self, img: maix.image.Image, x: int, y: int, seg_mask: maix.image.Image, threshold: int = 127) -> None
+
+

Draw segmentation on image

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimg: image object, maix.image.Image type.
seg_mask: segmentation mask image by detect method, a grayscale image
threshold: only mask's value > threshold will be draw on image, value from 0 to 255.
staticFalse
+
+

C++ defination code:

+ +
void draw_seg_mask(image::Image &img, int x, int y, image::Image &seg_mask, int threshold = 127)
+
+
+

labels

+

Labels list

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<string> labels
+
+
+

label_path

+

Label file path

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::string label_path
+
+
+

mean

+

Get mean value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> mean
+
+
+

scale

+

Get scale value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> scale
+
+
+

Classifier

+

Classifier

+
+

C++ defination code:

+ +
class Classifier
+
+
+

__init__

+ +
def __init__(self, model: str = '', dual_buff: bool = True) -> None
+
+

Construct a new Classifier object

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel: MUD model path, if empty, will not load model, you can call load() later.
if not empty, will load model and will raise err::Exception if load failed.
dual_buff: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.
If you want to ensure every time forward output the input's result, set this arg to false please.
Default true to ensure speed.
staticFalse
+
+

C++ defination code:

+ +
Classifier(const string &model = "", bool dual_buff = true)
+
+
+

load

+ +
def load(self, model: str) -> maix.err.Err
+
+

Load model from file, model format is .mud,\nMUD file should contain [extra] section, have key-values:\n- model_type: classifier\n- input_type: rgb or bgr\n- mean: 123.675, 116.28, 103.53\n- scale: 0.017124753831663668, 0.01750700280112045, 0.017429193899782137\n- labels: imagenet_classes.txt

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel: MUD model path
returnerror code, if load failed, return error code
staticFalse
+
+

C++ defination code:

+ +
err::Err load(const string &model)
+
+
+

classify

+ +
def classify(self, img: maix.image.Image, softmax: bool = True, fit: maix.image.Fit = ...) -> list[tuple[int, float]]
+
+

Forward image to model, get result. Only for image input, use classify_raw for tensor input.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimg: image, format should match model input_type, or will raise err.Exception
softmax: if true, will do softmax to result, or will return raw value
fit: image resize fit mode, default Fit.FIT_COVER, see image.Fit.
throwIf error occurred, will raise err::Exception, you can find reason in log, mostly caused by args error or hardware error.
returnresult, a list of (label, score). If in dual_buff mode, value can be one element list and score is zero when not ready. In C++, you need to delete it after use.
staticFalse
+
+

C++ defination code:

+ +
std::vector<std::pair<int, float>> *classify(image::Image &img, bool softmax = true, image::Fit fit = image::FIT_COVER)
+
+
+

classify_raw

+ +
def classify_raw(self, data: maix.tensor.Tensor, softmax: bool = True) -> list[tuple[int, float]]
+
+

Forward tensor data to model, get result

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdata: tensor data, format should match model input_type, or will raise err.Excetion
softmax: if true, will do softmax to result, or will return raw value
throwIf error occurred, will raise err::Exception, you can find reason in log, mostly caused by args error or hardware error.
returnresult, a list of (label, score). In C++, you need to delete it after use.
staticFalse
+
+

C++ defination code:

+ +
std::vector<std::pair<int, float>> *classify_raw(tensor::Tensor &data, bool softmax = true)
+
+
+

input_size

+ +
def input_size(self) -> maix.image.Size
+
+

Get model input size, only for image input

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size
staticFalse
+
+

C++ defination code:

+ +
image::Size input_size()
+
+
+

input_width

+ +
def input_width(self) -> int
+
+

Get model input width, only for image input

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size of width
staticFalse
+
+

C++ defination code:

+ +
int input_width()
+
+
+

input_height

+ +
def input_height(self) -> int
+
+

Get model input height, only for image input

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size of height
staticFalse
+
+

C++ defination code:

+ +
int input_height()
+
+
+

input_format

+ +
def input_format(self) -> maix.image.Format
+
+

Get input image format, only for image input

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returninput image format, image::Format type.
staticFalse
+
+

C++ defination code:

+ +
image::Format input_format()
+
+
+

input_shape

+ +
def input_shape(self) -> list[int]
+
+

Get input shape, if have multiple input, only return first input shape

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returninput shape, list type
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> input_shape()
+
+
+

labels

+

Labels list

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<string> labels
+
+
+

label_path

+

Label file path

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::string label_path
+
+
+

mean

+

Get mean value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> mean
+
+
+

scale

+

Get scale value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> scale
+
+
+

Retinaface

+

Retinaface class

+
+

C++ defination code:

+ +
class Retinaface
+
+
+

__init__

+ +
def __init__(self, model: str = '', dual_buff: bool = True) -> None
+
+

Constructor of Retinaface class

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel: model path, default empty, you can load model later by load function.
dual_buff: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.
If you want to ensure every time forward output the input's result, set this arg to false please.
Default true to ensure speed.
throwIf model arg is not empty and load failed, will throw err::Exception.
staticFalse
+
+

C++ defination code:

+ +
Retinaface(const string &model = "", bool dual_buff = true)
+
+
+

load

+ +
def load(self, model: str) -> maix.err.Err
+
+

Load model from file

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel: Model path want to load
returnerr::Err
staticFalse
+
+

C++ defination code:

+ +
err::Err load(const string &model)
+
+
+

detect

+ +
def detect(self, img: maix.image.Image, conf_th: float = 0.4, iou_th: float = 0.45, fit: maix.image.Fit = ...) -> list[Object]
+
+

Detect objects from image

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimg: Image want to detect, if image's size not match model input's, will auto resize with fit method.
conf_th: Confidence threshold, default 0.4.
iou_th: IoU threshold, default 0.45.
fit: Resize method, default image.Fit.FIT_CONTAIN.
throwIf image format not match model input format, will throw err::Exception.
returnObject list. In C++, you should delete it after use.
staticFalse
+
+

C++ defination code:

+ +
std::vector<nn::Object> *detect(image::Image &img, float conf_th = 0.4, float iou_th = 0.45, maix::image::Fit fit = maix::image::FIT_CONTAIN)
+
+
+

input_size

+ +
def input_size(self) -> maix.image.Size
+
+

Get model input size

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size
staticFalse
+
+

C++ defination code:

+ +
image::Size input_size()
+
+
+

input_width

+ +
def input_width(self) -> int
+
+

Get model input width

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size of width
staticFalse
+
+

C++ defination code:

+ +
int input_width()
+
+
+

input_height

+ +
def input_height(self) -> int
+
+

Get model input height

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size of height
staticFalse
+
+

C++ defination code:

+ +
int input_height()
+
+
+

input_format

+ +
def input_format(self) -> maix.image.Format
+
+

Get input image format

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returninput image format, image::Format type.
staticFalse
+
+

C++ defination code:

+ +
image::Format input_format()
+
+
+

mean

+

Get mean value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> mean
+
+
+

scale

+

Get scale value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> scale
+
+
+

FaceDetector

+

FaceDetector class

+
+

C++ defination code:

+ +
class FaceDetector
+
+
+

__init__

+ +
def __init__(self, model: str = '', dual_buff: bool = True) -> None
+
+

Constructor of FaceDetector class

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel: model path, default empty, you can load model later by load function.
dual_buff: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.
If you want to ensure every time forward output the input's result, set this arg to false please.
Default true to ensure speed.
throwIf model arg is not empty and load failed, will throw err::Exception.
staticFalse
+
+

C++ defination code:

+ +
FaceDetector(const string &model = "", bool dual_buff = true)
+
+
+

load

+ +
def load(self, model: str) -> maix.err.Err
+
+

Load model from file

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel: Model path want to load
returnerr::Err
staticFalse
+
+

C++ defination code:

+ +
err::Err load(const string &model)
+
+
+

detect

+ +
def detect(self, img: maix.image.Image, conf_th: float = 0.5, iou_th: float = 0.45, fit: maix.image.Fit = ...) -> list[Object]
+
+

Detect objects from image

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimg: Image want to detect, if image's size not match model input's, will auto resize with fit method.
conf_th: Confidence threshold, default 0.5.
iou_th: IoU threshold, default 0.45.
fit: Resize method, default image.Fit.FIT_CONTAIN.
throwIf image format not match model input format, will throw err::Exception.
returnObject list. In C++, you should delete it after use.
staticFalse
+
+

C++ defination code:

+ +
std::vector<nn::Object> *detect(image::Image &img, float conf_th = 0.5, float iou_th = 0.45, maix::image::Fit fit = maix::image::FIT_CONTAIN)
+
+
+

input_size

+ +
def input_size(self) -> maix.image.Size
+
+

Get model input size

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size
staticFalse
+
+

C++ defination code:

+ +
image::Size input_size()
+
+
+

input_width

+ +
def input_width(self) -> int
+
+

Get model input width

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size of width
staticFalse
+
+

C++ defination code:

+ +
int input_width()
+
+
+

input_height

+ +
def input_height(self) -> int
+
+

Get model input height

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size of height
staticFalse
+
+

C++ defination code:

+ +
int input_height()
+
+
+

input_format

+ +
def input_format(self) -> maix.image.Format
+
+

Get input image format

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returninput image format, image::Format type.
staticFalse
+
+

C++ defination code:

+ +
image::Format input_format()
+
+
+

mean

+

Get mean value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> mean
+
+
+

scale

+

Get scale value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> scale
+
+
+

PP_OCR

+

PP_OCR class

+
+

C++ defination code:

+ +
class PP_OCR
+
+
+

__init__

+ +
def __init__(self, model: str = '') -> None
+
+

Constructor of PP_OCR class

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel: model path, default empty, you can load model later by load function.
throwIf model arg is not empty and load failed, will throw err::Exception.
staticFalse
+
+

C++ defination code:

+ +
PP_OCR(const string &model = "")
+
+
+

load

+ +
def load(self, model: str) -> maix.err.Err
+
+

Load model from file

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammodel: Model path want to load
returnerr::Err
staticFalse
+
+

C++ defination code:

+ +
err::Err load(const string &model)
+
+
+

detect

+ +
def detect(self, img: maix.image.Image, thresh: float = 0.3, box_thresh: float = 0.6, fit: maix.image.Fit = ..., char_box: bool = False) -> OCR_Objects
+
+

Detect objects from image

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimg: Image want to detect, if image's size not match model input's, will auto resize with fit method.
thresh: Confidence threshold where pixels have charactor, default 0.3.
box_thresh: Box threshold, the box prob higher than this value will be valid, default 0.6.
fit: Resize method, default image.Fit.FIT_CONTAIN.
char_box: Calculate every charactor's box, default false, if true then you can get charactor's box by nn.OCR_Object's char_boxes attribute.
throwIf image format not match model input format or no memory, will throw err::Exception.
returnnn.OCR_Objects type. In C++, you should delete it after use.
staticFalse
+
+

C++ defination code:

+ +
nn::OCR_Objects *detect(image::Image &img, float thresh = 0.3, float box_thresh = 0.6, maix::image::Fit fit = maix::image::FIT_CONTAIN, bool char_box = false)
+
+
+

recognize

+ +
def recognize(self, img: maix.image.Image, box_points: list[int] = []) -> OCR_Object
+
+

Only recognize, not detect

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimg: image to recognize chractors, can be a stanrd cropped charactors image,
if crop image not standard, you can use box_points to assgin where the charactors' 4 corner is.
box_points: list type, length must be 8 or 0, default empty means not transfer image to standard image.
4 points postiion, format: [x1, y1, x2, y2, x3, y3, x4, y4], point 1 at the left-top, point 2 right-top...
char_box: Calculate every charactor's box, default false, if true then you can get charactor's box by nn.OCR_Object's char_boxes attribute.
staticFalse
+
+

C++ defination code:

+ +
nn::OCR_Object *recognize(image::Image &img, const std::vector<int> &box_points = std::vector<int>())
+
+
+

draw_seg_mask

+ +
def draw_seg_mask(self, img: maix.image.Image, x: int, y: int, seg_mask: maix.image.Image, threshold: int = 127) -> None
+
+

Draw segmentation on image

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimg: image object, maix.image.Image type.
seg_mask: segmentation mask image by detect method, a grayscale image
threshold: only mask's value > threshold will be draw on image, value from 0 to 255.
staticFalse
+
+

C++ defination code:

+ +
void draw_seg_mask(image::Image &img, int x, int y, image::Image &seg_mask, int threshold = 127)
+
+
+

input_size

+ +
def input_size(self) -> maix.image.Size
+
+

Get model input size

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size
staticFalse
+
+

C++ defination code:

+ +
image::Size input_size()
+
+
+

input_width

+ +
def input_width(self) -> int
+
+

Get model input width

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size of width
staticFalse
+
+

C++ defination code:

+ +
int input_width()
+
+
+

input_height

+ +
def input_height(self) -> int
+
+

Get model input height

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmodel input size of height
staticFalse
+
+

C++ defination code:

+ +
int input_height()
+
+
+

input_format

+ +
def input_format(self) -> maix.image.Format
+
+

Get input image format

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returninput image format, image::Format type.
staticFalse
+
+

C++ defination code:

+ +
image::Format input_format()
+
+
+

mean

+

Get mean value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> mean
+
+
+

scale

+

Get scale value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> scale
+
+
+

rec_mean

+

Get mean value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> rec_mean
+
+
+

rec_scale

+

Get scale value, list type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<float> rec_scale
+
+
+

labels

+

labels (charactors)

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::vector<std::string> labels
+
+
+

det

+

model have detect model

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
bool det
+
+
+

rec

+

model have recognize model

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
bool rec
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/nn/F.html b/maixpy/api/maix/nn/F.html new file mode 100644 index 00000000..de1dad72 --- /dev/null +++ b/maixpy/api/maix/nn/F.html @@ -0,0 +1,356 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.nn.F - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.nn.F

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.nn.F module

+
+

You can use maix.nn.F to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Variable

+

Function

+

softmax

+ +
def softmax(tensor: maix.tensor.Tensor, replace: bool) -> maix.tensor.Tensor
+
+

Softmax, only support 1D tensor, multi-dimension tensor will be treated as 1D tensor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
paramtensor: input tensor
replace: change input tensor data directly, if not, will create a new tensor
throwIf arg error, will raise err.Exception error
returnoutput tensor, if arg replace is true, return the arg tensor's address.
If not replace, return a new object, so In C++, you should delete it manually in this case!
+
+

C++ defination code:

+ +
tensor::Tensor *softmax(tensor::Tensor *tensor, bool replace)
+
+
+

Class

+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/peripheral.html b/maixpy/api/maix/peripheral.html new file mode 100644 index 00000000..506a6d4a --- /dev/null +++ b/maixpy/api/maix/peripheral.html @@ -0,0 +1,381 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.peripheral - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.peripheral

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Chip's peripheral driver

+
+

You can use maix.peripheral to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
modulebrief
keymaix.peripheral.key module
i2cmaix.peripheral.i2c module
spimaix.peripheral.spi module
pwmmaix.peripheral.pwm module
wdtmaix.peripheral.wdt module
adcmaix.peripheral.adc module
pinmapmaix.peripheral.pinmap module
uartmaix uart peripheral driver
gpiomaix.peripheral.gpio module
hidmaix.peripheral.hid module
timermaix.peripheral.timer module
+

Enum

+

Variable

+

Function

+

Class

+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/peripheral/adc.html b/maixpy/api/maix/peripheral/adc.html new file mode 100644 index 00000000..3e99074c --- /dev/null +++ b/maixpy/api/maix/peripheral/adc.html @@ -0,0 +1,539 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.peripheral.adc - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.peripheral.adc

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.peripheral.adc module

+
+

You can use maix.peripheral.adc to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Variable

+

RES_BIT_8

+

8-bit resolution, supported by the actual hardware

+ + + + + + + + + + + + + + + + + +
itemdescription
value8
readonlyTrue
+
+

C++ defination code:

+ +
const int RES_BIT_8 = 8
+
+
+

RES_BIT_10

+

10-bit resolution, supported by the actual hardware

+ + + + + + + + + + + + + + + + + +
itemdescription
value10
readonlyTrue
+
+

C++ defination code:

+ +
const int RES_BIT_10 = 10
+
+
+

RES_BIT_12

+

12-bit resolution, supported by the actual hardware

+ + + + + + + + + + + + + + + + + +
itemdescription
value12
readonlyTrue
+
+

C++ defination code:

+ +
const int RES_BIT_12 = 12
+
+
+

RES_BIT_16

+

16-bit resolution, supported by the actual hardware

+ + + + + + + + + + + + + + + + + +
itemdescription
value16
readonlyTrue
+
+

C++ defination code:

+ +
const int RES_BIT_16 = 16
+
+
+

Function

+

Class

+

ADC

+

Peripheral adc class

+
+

C++ defination code:

+ +
class ADC
+
+
+

__init__

+ +
def __init__(self, pin: int, resolution: int, vref: float = -1) -> None
+
+

ADC constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parampin: direction [in], adc pin, int type
resolution: direction [in], adc resolution. default is -1, means use default resolution
option:
resolution = adc.RES_BIT_8, means 8-bit resolution
resolution = adc.RES_BIT_10, means 10-bit resolution
resolution = adc.RES_BIT_12, means 12-bit resolution
resolution = adc.RES_BIT_16, means 16-bit resolution
the default resolution is determined by actual hardware.
vref: direction [in], adc refer voltage. default is -1, means use default refer voltage.
the default vref is determined by actual hardware. range: [0.0, 10.0]
staticFalse
+
+

C++ defination code:

+ +
ADC(int pin, int resolution, float vref = -1)
+
+
+

read

+ +
def read(self) -> int
+
+

read adc value

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnadc data, int type
if resolution is 8-bit, return value range is [0, 255]
if resolution is 10-bit, return value range is [0, 1023]
if resolution is 12-bit, return value range is [0, 4095]
if resolution is 16-bit, return value range is [0, 65535]
staticFalse
+
+

C++ defination code:

+ +
int read()
+
+
+

read_vol

+ +
def read_vol(self) -> float
+
+

read adc voltage

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnadc voltage, float type。the range is [0.0, vref]
staticFalse
+
+

C++ defination code:

+ +
float read_vol()
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/peripheral/gpio.html b/maixpy/api/maix/peripheral/gpio.html new file mode 100644 index 00000000..17481726 --- /dev/null +++ b/maixpy/api/maix/peripheral/gpio.html @@ -0,0 +1,652 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.peripheral.gpio - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.peripheral.gpio

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.peripheral.gpio module

+
+

You can use maix.peripheral.gpio to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Mode

+

GPIO mode

+ + + + + + + + + + + + + +
itemdescribe
valuesIN: input mode
OUT: output mode
OUT_OD: output open drain mode
MODE_MAX:
+
+

C++ defination code:

+ +
enum Mode
+    {
+        IN     = 0x01,     // input mode
+        OUT    = 0x02,     // output mode
+        OUT_OD = 0x03,     // output open drain mode
+        MODE_MAX
+    }
+
+
+

Pull

+

GPIO pull mode

+ + + + + + + + + + + + + +
itemdescribe
valuesPULL_NONE: pull none mode
PULL_UP: pull up mode
PULL_DOWN: pull down mode
PULL_MAX:
+
+

C++ defination code:

+ +
enum Pull
+    {
+        PULL_NONE = 0x00,  // pull none mode
+        PULL_UP   = 0x01,  // pull up mode
+        PULL_DOWN = 0x02,  // pull down mode
+        PULL_MAX
+    }
+
+
+

Variable

+

Function

+

Class

+

GPIO

+

Peripheral gpio class

+
+

C++ defination code:

+ +
class GPIO
+
+
+

__init__

+ +
def __init__(self, pin: str, mode: Mode = ..., pull: Pull = ...) -> None
+
+

GPIO constructor

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parampin: direction [in], gpio pin name, string type the same as board's pin name, e.g. "B14" or "GPIOB14", or number string like "10" if board no gpiochipe name.
mode: direction [in], gpio mode. gpio.Mode type, default is gpio.Mode.IN (input) mode.
pull: direction [in], gpio pull. gpio.Pull type, default is gpio.Pull.PULL_NONE (pull none) mode.
For input mode, this will set gpio default status(value), if set to gpio.Pull.PULL_NONE, gpio value will be floating.
For output mode, this will set gpio default status(value), if set to gpio.Pull.PULL_UP, gpio value will be 1, else 0.
throwerr::Exception if open gpio device failed.
staticFalse
+
+

C++ defination code:

+ +
GPIO(std::string pin, gpio::Mode mode = gpio::Mode::IN, gpio::Pull pull = gpio::Pull::PULL_NONE)
+
+
+

value

+ +
def value(self, value: int = -1) -> int
+
+

set and get gpio value

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramvalue: direction [in], gpio value. int type.
0, means write gpio to low level
1, means write gpio to high level
-1, means read gpio value, not set
returnint type, return gpio value, can be 0 or 1
staticFalse
+
+

C++ defination code:

+ +
int value(int value = -1)
+
+
+

high

+ +
def high(self) -> None
+
+

set gpio high (value to 1)

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
void high()
+
+
+

low

+ +
def low(self) -> None
+
+

set gpio low (value to 0)

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
void low()
+
+
+

toggle

+ +
def toggle(self) -> None
+
+

gpio toggle

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
void toggle()
+
+
+

get_mode

+ +
def get_mode(self) -> Mode
+
+

gpio get mode

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
gpio::Mode get_mode()
+
+
+

get_pull

+ +
def get_pull(self) -> Pull
+
+

get gpio pull

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returngpio::Pull type
staticFalse
+
+

C++ defination code:

+ +
gpio::Pull get_pull()
+
+
+

reset

+ +
def reset(self, mode: Mode, pull: Pull) -> maix.err.Err
+
+

reset gpio

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammode: direction [in], gpio mode. gpio.Mode type
pull: direction [in], gpio pull. gpio.Pull type
For input mode, this will set gpio default status(value), if set to gpio.Pull.PULL_NONE, gpio value will be floating.
For output mode, this will set gpio default status(value), if set to gpio.Pull.PULL_UP, gpio value will be 1, else 0.
returnerr::Err type
staticFalse
+
+

C++ defination code:

+ +
err::Err reset(gpio::Mode mode, gpio::Pull pull)
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/peripheral/hid.html b/maixpy/api/maix/peripheral/hid.html new file mode 100644 index 00000000..a286a781 --- /dev/null +++ b/maixpy/api/maix/peripheral/hid.html @@ -0,0 +1,531 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.peripheral.hid - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.peripheral.hid

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.peripheral.hid module

+
+

You can use maix.peripheral.hid to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

DeviceType

+

Device enum of hid

+ + + + + + + + + + + + + +
itemdescribe
valuesDEVICE_MOUSE:
DEVICE_KEYBOARD:
DEVICE_TOUCHPAD:
+
+

C++ defination code:

+ +
enum DeviceType {
+        DEVICE_MOUSE = 0,
+        DEVICE_KEYBOARD,
+        DEVICE_TOUCHPAD
+    }
+
+
+

Variable

+

Function

+

Class

+

Hid

+

Hid class

+
+

C++ defination code:

+ +
class Hid
+
+
+

__init__

+ +
def __init__(self, device_type: DeviceType, open: bool = True) -> None
+
+

Hid Device constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdevice_type: Device type, used to select mouse, keyboard, or touchpad.
open: auto open device in constructor, if false, you need call open() to open device
staticFalse
+
+

C++ defination code:

+ +
Hid(hid::DeviceType device_type, bool open = true)
+
+
+

open

+ +
def open(self) -> maix.err.Err
+
+

Open hid device

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerr::Err
staticFalse
+
+

C++ defination code:

+ +
err::Err open()
+
+
+

close

+ +
def close(self) -> maix.err.Err
+
+

Close hid device

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerr::Err
staticFalse
+
+

C++ defination code:

+ +
err::Err close()
+
+
+

write

+ +
def write(self, data: list[int]) -> maix.err.Err
+
+

Write data to hid device

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdata: data to write
For the keyboard, 8 bytes of data need to be written, with the format as follows:
data = [0x00, #
0x00, #
0x00, # Key value. Refer to the "Universal Serial Bus HID Usage Tables" section of the official documentation(https://www.usb.org).
0x00, #
0x00, #
0x00, #
0x00, #
0x00] #
For the mouse, 4 bytes of data need to be written, with the format as follows:
data = [0x00, # Button state
0x00: no button pressed
0x01: press left button
0x02: press right button
0x04: press middle button
x, # X-axis relative coordinates. Signed number, positive values for x indicate movement to the right
y, # Y-axis relative coordinates. Signed number, positive values for y indicate movement downward
0x00] # Wheel movement. Signed number, positive values indicate downward movement.
For the touchpad, 6 bytes of data need to be written, with the format as follows:
data = [0x00, # Button state (0: no button pressed, 0x01: press left button, 0x10, press right button.)
x & 0xFF, (x >> 8) & 0xFF, # X-axis absolute coordinate, 0 means unused.
Note: You must map the target position to the range [0x1, 0x7FFF]. This means x value = <position_to_move> * 0x7FFF / <actual_screen_width>
y & 0xFF, (y >> 8) & 0xFF, # Y-axis absolute coordinate, 0 means unused.
Note: You must map the target position to the range [0x1, 0x7FFF]. This means y value = <position_to_move> * 0x7FFF / <actual_screen_height>
0x00, # Wheel movement. Signed number, positive values indicate downward movement.
returnerr::Err
staticFalse
+
+

C++ defination code:

+ +
err::Err write(std::vector<int> &data)
+
+
+

is_opened

+ +
def is_opened(self) -> bool
+
+

Check if hid device is opened

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnbool
staticFalse
+
+

C++ defination code:

+ +
bool is_opened()
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/peripheral/i2c.html b/maixpy/api/maix/peripheral/i2c.html new file mode 100644 index 00000000..09e1812c --- /dev/null +++ b/maixpy/api/maix/peripheral/i2c.html @@ -0,0 +1,635 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.peripheral.i2c - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.peripheral.i2c

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.peripheral.i2c module

+
+

You can use maix.peripheral.i2c to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

AddrSize

+

Address size enum

+ + + + + + + + + + + + + +
itemdescribe
valuesSEVEN_BIT: 7-bit address mode
TEN_BIT: 10-bit address mode
+
+

C++ defination code:

+ +
enum AddrSize
+    {
+        SEVEN_BIT = 7,   // 7-bit address mode
+        TEN_BIT   = 10   // 10-bit address mode
+    }
+
+
+

Mode

+

I2C mode enum

+ + + + + + + + + + + + + +
itemdescribe
valuesMASTER: master mode
SLAVE: slave mode
+
+

C++ defination code:

+ +
enum Mode
+    {
+        MASTER = 0x00, // master mode
+        SLAVE = 0x01   // slave mode
+    }
+
+
+

Variable

+

Function

+

list_devices

+ +
def list_devices() -> list[int]
+
+

Get supported i2c bus devices.

+ + + + + + + + + + + + + +
itemdescription
returni2c bus devices list, int type, is the i2c bus id.
+
+

C++ defination code:

+ +
std::vector<int> list_devices()
+
+
+

Class

+

I2C

+

Peripheral i2c class

+
+

C++ defination code:

+ +
class I2C
+
+
+

__init__

+ +
def __init__(self, id: int, mode: Mode, freq: int = 100000, addr_size: AddrSize = ...) -> None
+
+

I2C Device constructor\nthis constructor will be export to MaixPy as _maix.example.Example.init

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramid: direction [in], i2c bus id, int type, e.g. 0, 1, 2
freq: direction [in], i2c clock, int type, default is 100000(100kbit/s), will auto set fast mode if freq > 100000.
mode: direction [in], mode of i2c, i2c.Mode.SLAVE or i2c.Mode.MASTER.
addr_size: direction [in], address length of i2c, i2c.AddrSize.SEVEN_BIT or i2c.AddrSize.TEN_BIT.
throwerr::Exception if open i2c device failed.
staticFalse
+
+

C++ defination code:

+ +
I2C(int id, i2c::Mode mode, int freq = 100000, i2c::AddrSize addr_size = i2c::AddrSize::SEVEN_BIT)
+
+
+

scan

+ +
def scan(self, addr: int = -1) -> list[int]
+
+

scan all i2c salve address on the bus

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramaddr: If -1, only scan this addr, or scan from 0x08~0x77, default -1.
returnthe list of i2c slave address, int list type.
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> scan(int addr = -1)
+
+
+

writeto

+ +
def writeto(self, addr: int, data: maix.Bytes(bytes)) -> int
+
+

write data to i2c slave

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramaddr: direction [in], i2c slave address, int type
data: direction [in], data to write, bytes type.
Note: The range of value should be in [0,255].
returnif success, return the length of written data, error occurred will return -err::Err.
staticFalse
+
+

C++ defination code:

+ +
int writeto(int addr, const Bytes &data)
+
+
+

readfrom

+ +
def readfrom(*args, **kwargs)
+
+

read data from i2c slave

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramaddr: direction [in], i2c slave address, int type
len: direction [in], data length to read, int type
returnthe list of data read from i2c slave, bytes type, you should delete it after use in C++.
If read failed, return nullptr in C++, None in MaixPy.
staticFalse
+
+

C++ defination code:

+ +
Bytes* readfrom(int addr, int len)
+
+
+

writeto_mem

+ +
def writeto_mem(self, addr: int, mem_addr: int, data: maix.Bytes(bytes), mem_addr_size: int = 8, mem_addr_le: bool = False) -> int
+
+

write data to i2c slave's memory address

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramaddr: direction [in], i2c slave address, int type
mem_addr: direction [in], memory address want to write, int type.
data: direction [in], data to write, bytes type.
mem_addr_size: direction [in], memory address size, default is 8.
mem_addr_le: direction [in], memory address little endian, default is false, that is send high byte first.
returndata length written if success, error occurred will return -err::Err.
staticFalse
+
+

C++ defination code:

+ +
int writeto_mem(int addr, int mem_addr, const Bytes &data, int mem_addr_size = 8, bool mem_addr_le = false)
+
+
+

readfrom_mem

+ +
def readfrom_mem(*args, **kwargs)
+
+

read data from i2c slave

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramaddr: direction [in], i2c slave address, int type
mem_addr: direction [in], memory address want to read, int type.
len: direction [in], data length to read, int type
mem_addr_size: direction [in], memory address size, default is 8.
mem_addr_le: direction [in], memory address little endian, default is false, that is send high byte first.
returnthe list of data read from i2c slave, bytes type, you should delete it after use in C++.
If read failed, return nullptr in C++, None in MaixPy.
staticFalse
+
+

C++ defination code:

+ +
Bytes* readfrom_mem(int addr, int mem_addr, int len, int mem_addr_size = 8, bool mem_addr_le = false)
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/peripheral/key.html b/maixpy/api/maix/peripheral/key.html new file mode 100644 index 00000000..5920a7ba --- /dev/null +++ b/maixpy/api/maix/peripheral/key.html @@ -0,0 +1,575 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.peripheral.key - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.peripheral.key

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.peripheral.key module

+
+

You can use maix.peripheral.key to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Keys

+

Keys enum, id the same as linux input.h(input-event-codes.h)

+ + + + + + + + + + + + + +
itemdescribe
valuesKEY_NONE:
KEY_ESC:
KEY_OK:
KEY_OPTION:
KEY_NEXT:
KEY_PREV:
+
+

C++ defination code:

+ +
enum Keys{
+        KEY_NONE   = 0x000,
+        KEY_ESC    = 0x001,
+        KEY_OK     = 0x160,
+        KEY_OPTION = 0x165,
+        KEY_NEXT   = 0x197,
+        KEY_PREV   = 0x19c
+    }
+
+
+

State

+

Key state enum

+ + + + + + + + + + + + + +
itemdescribe
valuesKEY_RELEASED:
KEY_PRESSED:
+
+

C++ defination code:

+ +
enum State{
+        KEY_RELEASED  = 0,
+        KEY_PRESSED   = 1,
+    }
+
+
+

Variable

+

Function

+

add_default_listener

+

Add default listener, if you want to exit app when press ok button, you can just call this function.\nThis function is auto called in MaixPy' startup code, so you don't need to call it in MaixPy.\nCreate Key object will auto call rm_default_listener() to cancel the default ok button function.\nWhen ok button pressed, a SIGINT signal will be raise and call app.set_exit_flag(True).

+
+

C++ defination code:

+ +
void add_default_listener()
+
+
+

rm_default_listener

+

Remove default listener, if you want to cancel the default ok button function(exit app), you can just call this function.

+
+

C++ defination code:

+ +
void rm_default_listener()
+
+
+

Class

+

Key

+

Key input class

+
+

C++ defination code:

+ +
class Key
+
+
+

__init__

+ +
def __init__(self, callback: typing.Callable[[int, int], None] = None, open: bool = True) -> None
+
+

Key Device constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramcallback: When key triggered and callback is not empty(empty In MaixPy is None, in C++ is nullptr),
callback will be called with args key(key.Keys) and value(key.State).
If set to null, you can get key value by read() function.
This callback called in a standalone thread, so you can block a while in callback, and you should be carefully when operate shared data.
open: auto open device in constructor, if false, you need call open() to open device
staticFalse
+
+

C++ defination code:

+ +
Key(std::function<void(int, int)> callback = nullptr, bool open = true)
+
+
+

open

+ +
def open(self) -> maix.err.Err
+
+

Open(Initialize) key device, if already opened, will close first and then open.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerr::Err type, err.Err.ERR_NONE means success
staticFalse
+
+

C++ defination code:

+ +
err::Err open()
+
+
+

close

+ +
def close(self) -> maix.err.Err
+
+

Close key device

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerr::Err type, err.Err.ERR_NONE means success
staticFalse
+
+

C++ defination code:

+ +
err::Err close()
+
+
+

is_opened

+ +
def is_opened(self) -> bool
+
+

Check key device is opened

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnbool type, true means opened, false means closed
staticFalse
+
+

C++ defination code:

+ +
bool is_opened()
+
+
+

read

+ +
def read(self) -> tuple[int, int]
+
+

Read key input, and return key and value, if callback is set, DO NOT call this function manually.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnlist type, first is key(maix.key.Keys), second is value(maix.key.State), if no key input, return [0, 0]
throwIf read failed, will throw maix.err.Exception.
staticFalse
+
+

C++ defination code:

+ +
std::pair<int, int> read()
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/peripheral/pinmap.html b/maixpy/api/maix/peripheral/pinmap.html new file mode 100644 index 00000000..54f5d21c --- /dev/null +++ b/maixpy/api/maix/peripheral/pinmap.html @@ -0,0 +1,415 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.peripheral.pinmap - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.peripheral.pinmap

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.peripheral.pinmap module

+
+

You can use maix.peripheral.pinmap to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Variable

+

Function

+

get_pins

+ +
def get_pins() -> list[str]
+
+

Get all pins of devices

+ + + + + + + + + + + + + +
itemdescription
returnpin name list, string type.
+
+

C++ defination code:

+ +
std::vector<std::string> get_pins()
+
+
+

get_pin_functions

+ +
def get_pin_functions(pin: str) -> list[str]
+
+

Get all function of a pin

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
parampin: pin name, string type.
returnfunction list, function name is string type.
throwIf pin name error will throwout err.Err.ERR_ARGS error.
+
+

C++ defination code:

+ +
std::vector<std::string> get_pin_functions(const std::string &pin)
+
+
+

set_pin_function

+ +
def set_pin_function(pin: str, func: str) -> maix.err.Err
+
+

Set function of a pin

+ + + + + + + + + + + + + + + + + +
itemdescription
parampin: pin name, string type.
func: which function should this pin use.
returnif set ok, will return err.Err.ERR_NONE, else error occurs.
+
+

C++ defination code:

+ +
err::Err set_pin_function(const std::string &pin, const std::string &func)
+
+
+

Class

+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/peripheral/pwm.html b/maixpy/api/maix/peripheral/pwm.html new file mode 100644 index 00000000..3e17158c --- /dev/null +++ b/maixpy/api/maix/peripheral/pwm.html @@ -0,0 +1,583 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.peripheral.pwm - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.peripheral.pwm

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.peripheral.pwm module

+
+

You can use maix.peripheral.pwm to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Variable

+

Function

+

Class

+

PWM

+

Peripheral pwm class

+
+

C++ defination code:

+ +
class PWM
+
+
+

__init__

+ +
def __init__(self, id: int, freq: int = 1000, duty: float = 0, enable: bool = True, duty_val: int = -1) -> None
+
+

PWM constructor

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parampin: direction [in], pwm id, int type, like 0, 1, 2 etc.
freq: direction [in], pwm frequency, unit: Hz. int type. default is 1000
duty: direction [in], pwm duty. double type. range is [0, 100], default is 0.
enable: direction [in], enable pwm output right now. bool type. default is true, if false, you need to call enable() to enable pwm output.
duty_val: direction [in], pwm duty value, int type. default -1 means not set and auto calculate by freq and duty.
This arg directly set pwm duty value, if set, will ignore duty arg.
duty_val = duty / 100 * T_ns, T_ns = 1 / freq * 1000000000.
throwIf args error or init pwm failed, will throw err::Exception
staticFalse
+
+

C++ defination code:

+ +
PWM(int id, int freq = 1000, double duty = 0, bool enable = true, int duty_val = -1)
+
+
+

duty

+ +
def duty(self, duty: float = -1) -> float
+
+

get or set pwm duty

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramduty: direction [in], pwm duty, double type, value in [0, 100], default -1 means only read.
returncurrent duty, float type, if set and set failed will return -err::Err
staticFalse
+
+

C++ defination code:

+ +
double duty(double duty = -1)
+
+
+

duty_val

+ +
def duty_val(self, duty_val: int = -1) -> int
+
+

set pwm duty value

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramduty_val: direction [in], pwm duty value. int type. default is -1
duty_val > 0 means set duty_val
duty_val == -1 or not set, return current duty_val
returnint type
when get duty_val, return current duty_val, else return -err::Err code.
staticFalse
+
+

C++ defination code:

+ +
int duty_val(int duty_val = -1)
+
+
+

freq

+ +
def freq(self, freq: int = -1) -> int
+
+

get or set pwm frequency

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramfreq: direction [in], pwm frequency. int type. default is -1
freq >= 0, set freq
freq == -1 or not set, return current freq
returnint type, current freq, if set and set failed will return -err::Err
staticFalse
+
+

C++ defination code:

+ +
int freq(int freq = -1)
+
+
+

enable

+ +
def enable(self) -> maix.err.Err
+
+

set pwm enable

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerr::Err type, err.Err.ERR_NONE means success
staticFalse
+
+

C++ defination code:

+ +
err::Err enable()
+
+
+

disable

+ +
def disable(self) -> maix.err.Err
+
+

set pwm disable

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerr::Err type, err.Err.ERR_NONE means success
staticFalse
+
+

C++ defination code:

+ +
err::Err disable()
+
+
+

is_enabled

+ +
def is_enabled(self) -> bool
+
+

get pwm enable status

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnbool type, true means enable, false means disable
staticFalse
+
+

C++ defination code:

+ +
bool is_enabled()
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/peripheral/spi.html b/maixpy/api/maix/peripheral/spi.html new file mode 100644 index 00000000..e9030052 --- /dev/null +++ b/maixpy/api/maix/peripheral/spi.html @@ -0,0 +1,540 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.peripheral.spi - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.peripheral.spi

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.peripheral.spi module

+
+

You can use maix.peripheral.spi to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Mode

+

SPI mode enum

+ + + + + + + + + + + + + +
itemdescribe
valuesMASTER: spi master mode
SLAVE: spi slave mode
+
+

C++ defination code:

+ +
enum Mode
+    {
+        MASTER = 0x0, // spi master mode
+        SLAVE = 0x1,  // spi slave mode
+    }
+
+
+

Variable

+

Function

+

Class

+

SPI

+

Peripheral spi class

+
+

C++ defination code:

+ +
class SPI
+
+
+

__init__

+ +
def __init__(self, id: int, mode: Mode, freq: int, polarity: int = 0, phase: int = 0, bits: int = 8, cs_enable: int = 0, soft_cs: bool = False, cs: str = 'GPIOA19') -> None
+
+

SPI constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramid: direction [in], spi bus id, int type
mode: direction [in], mode of spi, spi.Mode type, spi.Mode.MASTER or spi.Mode.SLAVE.
freq: direction [in], freq of spi, int type
polarity: direction [in], polarity of spi, 0 means idle level of clock is low, 1 means high, int type, default is 0.
phase: direction [in], phase of spi, 0 means data is captured on the first edge of the SPI clock cycle, 1 means second, int type, default is 0.
bits: direction [in], bits of spi, int type, default is 8.
cs_enable: direction [in], cs pin active level, default is 0(low)
soft_cs: direction [in], not use hardware cs, bool type, if set true, you can operate cs pin use gpio manually.
cs: direction [in], soft cs pin number, std::string type, default is "GPIOA19", if SPI support multi hardware cs, you can set it to other value.
staticFalse
+
+

C++ defination code:

+ +
SPI(int id, spi::Mode mode, int freq, int polarity = 0, int phase = 0,
+            int bits = 8, unsigned char cs_enable=0, bool soft_cs = false, std::string cs = "GPIOA19")
+
+
+

read

+ +
def read(*args, **kwargs)
+
+

read data from spi

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramlength: direction [in], read length, int type
returnbytes data, Bytes type in C++, bytes type in MaixPy. You need to delete it manually after use in C++.
staticFalse
+
+

C++ defination code:

+ +
Bytes *read(int length)
+
+
+

write

+ +
def write(self, data: maix.Bytes(bytes)) -> int
+
+

write data to spi

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdata: direction [in], data to write, Bytes type in C++, bytes type in MaixPy
returnwrite length, int type, if write failed, return -err::Err code.
staticFalse
+
+

C++ defination code:

+ +
int write(Bytes *data)
+
+
+

write_read

+ +
def write_read(*args, **kwargs)
+
+

write data to spi and read data from spi at the same time.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdata: direction [in], data to write, Bytes type in C++, bytes type in MaixPy
read_len: direction [in], read length, int type, should > 0.
returnread data, Bytes type in C++, bytes type in MaixPy. You need to delete it manually after use in C++.
staticFalse
+
+

C++ defination code:

+ +
Bytes *write_read(Bytes *data, int read_len)
+
+
+

is_busy

+ +
def is_busy(self) -> bool
+
+

get busy status of spi

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnbusy status, bool type
staticFalse
+
+

C++ defination code:

+ +
bool is_busy()
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/peripheral/timer.html b/maixpy/api/maix/peripheral/timer.html new file mode 100644 index 00000000..35a31fd8 --- /dev/null +++ b/maixpy/api/maix/peripheral/timer.html @@ -0,0 +1,365 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.peripheral.timer - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.peripheral.timer

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.peripheral.timer module

+
+

You can use maix.peripheral.timer to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Variable

+

Function

+

Class

+

TIMER

+

Peripheral timer class

+
+

C++ defination code:

+ +
class TIMER
+
+
+

__init__

+ +
def __init__(self) -> None
+
+

TIMER constructor

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
TIMER()
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/peripheral/uart.html b/maixpy/api/maix/peripheral/uart.html new file mode 100644 index 00000000..45512b93 --- /dev/null +++ b/maixpy/api/maix/peripheral/uart.html @@ -0,0 +1,973 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.peripheral.uart - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.peripheral.uart

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix uart peripheral driver

+
+

You can use maix.peripheral.uart to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

PARITY

+

uart parity enum

+ + + + + + + + + + + + + +
itemdescribe
valuesPARITY_NONE: no parity
PARITY_ODD: odd parity
PARITY_EVEN: even parity
PARITY_MAX:
+
+

C++ defination code:

+ +
enum PARITY
+    {
+        PARITY_NONE = 0x00,  // no parity
+        PARITY_ODD  = 0x01,  // odd parity
+        PARITY_EVEN = 0x02,  // even parity
+        PARITY_MAX
+    }
+
+
+

STOP

+

uart stop bits

+ + + + + + + + + + + + + +
itemdescribe
valuesSTOP_1: 1 stop bit
STOP_2: 2 stop bits
STOP_1_5: 1.5 stop bits
STOP_MAX:
+
+

C++ defination code:

+ +
enum STOP
+    {
+        STOP_1   = 0x01,  // 1 stop bit
+        STOP_2   = 0x02,  // 2 stop bits
+        STOP_1_5 = 0x03,  // 1.5 stop bits
+        STOP_MAX
+    }
+
+
+

BITS

+

uart stop bits

+ + + + + + + + + + + + + +
itemdescribe
valuesBITS_5: 5 data bits
BITS_6: 6 data bits
BITS_7: 7 data bits
BITS_8: 8 data bits
BITS_MAX:
+
+

C++ defination code:

+ +
enum BITS
+    {
+        BITS_5 = 5,  // 5 data bits
+        BITS_6 = 6,  // 6 data bits
+        BITS_7 = 7,  // 7 data bits
+        BITS_8 = 8,  // 8 data bits
+        BITS_MAX
+    }
+
+
+

FLOW_CTRL

+

uart flow control

+ + + + + + + + + + + + + +
itemdescribe
valuesFLOW_CTRL_NONE: no flow control
FLOW_CTRL_HW: hardware flow control
FLOW_CTRL_MAX:
+
+

C++ defination code:

+ +
enum FLOW_CTRL
+    {
+        FLOW_CTRL_NONE = 0,  // no flow control
+        FLOW_CTRL_HW   = 1,  // hardware flow control
+        FLOW_CTRL_MAX
+    }
+
+
+

Variable

+

Function

+

list_devices

+ +
def list_devices() -> list[str]
+
+

Get supported uart ports.

+ + + + + + + + + + + + + +
itemdescription
returnuart ports list, string type.
+
+

C++ defination code:

+ +
std::vector<std::string> list_devices()
+
+
+

Class

+

UART

+

maix uart peripheral driver

+
+

C++ defination code:

+ +
class UART : public comm::CommBase
+
+
+

__init__

+ +
def __init__(self, port: str = '', baudrate: int = 115200, databits: BITS = ..., parity: PARITY = ..., stopbits: STOP = ..., flow_ctrl: FLOW_CTRL = ...) -> None
+
+

UART constructor. You need to call open() to open the device.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramport: uart port. string type, can get it by uart.list_devices().
If empty, will not open device in constructor, default empty.
if not empty, will auto open device in constructor, open fail will throw err.Exception.
baudrate: baudrate of uart. int type, default 115200.
databits: databits, values @see uart.DATA_BITS
parity: parity, values @see uart.PARITY
stopbits: stopbits, values @see uart.STOP_BITS
flow_control: flow_control, values @see uart.FLOW_CTRL
staticFalse
+
+

C++ defination code:

+ +
UART(const std::string &port = "", int baudrate = 115200, uart::BITS databits = uart::BITS_8,
+            uart::PARITY parity = uart::PARITY_NONE, uart::STOP stopbits = uart::STOP_1,
+            uart::FLOW_CTRL flow_ctrl = uart::FLOW_CTRL_NONE)
+
+
+

set_port

+ +
def set_port(self, port: str) -> maix.err.Err
+
+

Set port

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramport: uart port. string type, can get it by uart.list_devices().
returnset port error code, err.Err type.
staticFalse
+
+

C++ defination code:

+ +
err::Err set_port(const std::string &port)
+
+
+

get_port

+ +
def get_port(self) -> str
+
+

Get port

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnuart port, string type.
staticFalse
+
+

C++ defination code:

+ +
std::string get_port()
+
+
+

set_baudrate

+ +
def set_baudrate(self, baudrate: int) -> maix.err.Err
+
+

Set baud rate

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parambaudrate: baudrate of uart. int type, default 115200.
returnset baud rate error code, err.Err type.
staticFalse
+
+

C++ defination code:

+ +
err::Err set_baudrate(int baudrate)
+
+
+

get_baudrate

+ +
def get_baudrate(self) -> int
+
+

Get baud rate

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnbaud rate, int type.
staticFalse
+
+

C++ defination code:

+ +
int get_baudrate()
+
+
+

open

+ +
def open(self) -> maix.err.Err
+
+

Open uart device, before open, port must be set in constructor or by set_port().\nIf already opened, do nothing and return err.ERR_NONE.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnopen device error code, err.Err type.
staticFalse
+
+

C++ defination code:

+ +
err::Err open()
+
+
+

is_open

+ +
def is_open(self) -> bool
+
+

Check if device is opened.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returntrue if opened, false if not opened.
staticFalse
+
+

C++ defination code:

+ +
bool is_open()
+
+
+

close

+ +
def close(self) -> maix.err.Err
+
+

Close uart device, if already closed, do nothing and return err.ERR_NONE.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnclose device error code, err.Err type.
staticFalse
+
+

C++ defination code:

+ +
err::Err close()
+
+
+

set_received_callback

+ +
def set_received_callback(self, callback: typing.Callable[[UART, maix.Bytes(bytes)], None]) -> None
+
+

Set received callback function

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramcallback: function to call when received data
staticFalse
+
+

C++ defination code:

+ +
void set_received_callback(std::function<void(uart::UART&, Bytes&)> callback)
+
+
+

write_str

+ +
def write_str(self, str: str) -> int
+
+

Send string data

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramstr: string data
returnsent data length, < 0 means error, value is -err.Err.
staticFalse
+
+

C++ defination code:

+ +
int write_str(const std::string &str)
+
+
+

write

+ +
def write(self, data: maix.Bytes(bytes)) -> int
+
+

Send data to uart

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdata: direction [in], data to send, bytes type. If you want to send str type, use str.encode() to convert.
returnsent length, int type, if < 0 means error, value is -err.Err.
staticFalse
+
+

C++ defination code:

+ +
int write(Bytes &data)
+
+
+

available

+ +
def available(self, timeout: int = 0) -> int
+
+

Check if data available or wait data available.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramtimeout: unit ms, timeout to wait data, default 0.
0 means check data available and return immediately,
> 0 means wait until data available or timeout.
- 1 means wait until data available.
returnavailable data number, 0 if timeout or no data, <0 if error, value is -err.Err, can be err::ERR_IO, err::ERR_CANCEL, err::ERR_NOT_OPEN.
throwerr.Exception if fatal error.
staticFalse
+
+

C++ defination code:

+ +
int available(int timeout = 0)
+
+
+

read

+ +
def read(*args, **kwargs)
+
+

Recv data from uart

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramlen: max data length want to receive, default -1.
-1 means read data in uart receive buffer.
>0 means read len data want to receive.
other values is invalid.
timeout: unit ms, timeout to receive data, default 0.
0 means read data in uart receive buffer and return immediately,
-1 means block until read len data,
>0 means block until read len data or timeout.
returnreceived data, bytes type.
Attention, you need to delete the returned object yourself in C++.
throwRead failed will raise err.Exception error.
staticFalse
+
+

C++ defination code:

+ +
Bytes *read(int len = -1, int timeout = 0)
+
+
+

readline

+ +
def readline(*args, **kwargs)
+
+

Read line from uart, that is read until '\n' or '\r\n'.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramtimeout: unit ms, timeout to receive data, default -1 means block until read '\n' or '\r\n'.
> 0 means block until read '\n' or '\r\n' or timeout.
returnreceived data, bytes type. If timeout will return the current received data despite not read '\n' or '\r\n'.
e.g. If we want to read b'123\n', but when we only read b'12', timeout, then return b'12'.
staticFalse
+
+

C++ defination code:

+ +
Bytes *readline(int timeout = -1)
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/peripheral/wdt.html b/maixpy/api/maix/peripheral/wdt.html new file mode 100644 index 00000000..b9232a25 --- /dev/null +++ b/maixpy/api/maix/peripheral/wdt.html @@ -0,0 +1,460 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.peripheral.wdt - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.peripheral.wdt

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.peripheral.wdt module

+
+

You can use maix.peripheral.wdt to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Variable

+

Function

+

Class

+

WDT

+

Peripheral wdt class

+
+

C++ defination code:

+ +
class WDT
+
+
+

__init__

+ +
def __init__(self, id: int, feed_ms: int) -> None
+
+

WDT constructor, after construct, the wdt will auto start.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramid: direction [in], id of wdt, int type
feed_ms: direction [in], feed interval, int type, unit is ms, you must feed wdt in this interval, or system will restart.
staticFalse
+
+

C++ defination code:

+ +
WDT(int id, int feed_ms)
+
+
+

feed

+ +
def feed(self) -> int
+
+

feed wdt

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerror code, if feed success, return err::ERR_NONE
staticFalse
+
+

C++ defination code:

+ +
int feed()
+
+
+

stop

+ +
def stop(self) -> int
+
+

stop wdt

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
int stop()
+
+
+

restart

+ +
def restart(self) -> int
+
+

restart wdt, stop and start watchdog timer.

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
int restart()
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/protocol.html b/maixpy/api/maix/protocol.html new file mode 100644 index 00000000..b77f5e46 --- /dev/null +++ b/maixpy/api/maix/protocol.html @@ -0,0 +1,1103 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.protocol - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.protocol

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.protocol module

+
+

You can use maix.protocol to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

CMD

+

protocol cmd, more doc see MaixCDK document's convention doc

+ + + + + + + + + + + + + + + + + +
itemdescribe
notemax app custom CMD value should < CMD_APP_MAX
valuesCMD_APP_MAX: 200, max app custom CMD value should < CMD_APP_MAX
CMD_SET_REPORT: set auto upload data mode
CMD_APP_LIST:
CMD_START_APP:
CMD_EXIT_APP:
CMD_CUR_APP_INFO:
CMD_APP_INFO:
CMD_KEY:
CMD_TOUCH:
+
+

C++ defination code:

+ +
enum CMD
+        {
+            CMD_APP_MAX = 0xC8,     //  200, max app custom CMD value should < CMD_APP_MAX
+
+            CMD_SET_REPORT   = 0xF8, // set auto upload data mode
+            CMD_APP_LIST     = 0xF9,
+            CMD_START_APP    = 0xFA,
+            CMD_EXIT_APP     = 0xFB,
+            CMD_CUR_APP_INFO = 0xFC,
+            CMD_APP_INFO     = 0xFD,
+            CMD_KEY          = 0xFE,
+            CMD_TOUCH        = 0xFF,
+        }
+
+
+

FLAGS

+

protocol flags, more doc see MaixCDK document's convention doc

+ + + + + + + + + + + + + +
itemdescribe
valuesFLAG_REQ:
FLAG_RESP:
FLAG_IS_RESP_MASK:
FLAG_RESP_OK:
FLAG_RESP_ERR:
FLAG_RESP_OK_MASK:
FLAG_REPORT:
FLAG_REPORT_MASK:
FLAG_VERSION_MASK:
+
+

C++ defination code:

+ +
enum FLAGS
+        {
+            FLAG_REQ = 0x00,
+            FLAG_RESP = 0x80,
+            FLAG_IS_RESP_MASK = 0x80,
+
+            FLAG_RESP_OK = 0x40,
+            FLAG_RESP_ERR = 0x00,
+            FLAG_RESP_OK_MASK = 0x40,
+
+            FLAG_REPORT = 0x20,
+            FLAG_REPORT_MASK = 0x20,
+
+            FLAG_VERSION_MASK = 0x03
+        }
+
+
+

Variable

+

VERSION

+

protocol version

+ + + + + + + + + + + + + + + + + +
itemdescription
value1
readonlyTrue
+
+

C++ defination code:

+ +
const uint8_t VERSION = 1
+
+
+ +

protocol header

+ + + + + + + + + + + + + +
itemdescription
readonlyFalse
+
+

C++ defination code:

+ +
extern uint32_t HEADER
+
+
+

Function

+

crc16_IBM

+ +
def crc16_IBM(data: maix.Bytes(bytes)) -> int
+
+

CRC16-IBM

+ + + + + + + + + + + + + + + + + +
itemdescription
paramdata: data, bytes type.
returnCRC16-IBM value, uint16_t type.
+
+

C++ defination code:

+ +
uint16_t crc16_IBM(const Bytes *data)
+
+
+

Class

+

MSG

+

protocol msg

+
+

C++ defination code:

+ +
class MSG
+
+
+

version

+

protocol version

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
uint8_t version
+
+
+

resp_ok

+

Indicate response message type, true means CMD valid and the CMD processed correctly, (only for response msg)

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
uint8_t resp_ok
+
+
+

has_been_replied

+

Flag whether CMD has been processed and responded to CMD sender.\nE.g. CMD CMD_START_APP will be automatically processed in CommProtocol.get_msg function,\nso the return msg will set this flag to true.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
bool has_been_replied{false}
+
+
+

cmd

+

CMD value

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
uint8_t cmd
+
+
+

is_resp

+

message is response or not, contrast with is_req

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
bool is_resp
+
+
+

body_len

+

Message body length, read only, use set_body() to update

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
attentionDO NOT manually change this value
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int body_len
+
+
+

encode_resp_ok

+ +
def encode_resp_ok(*args, **kwargs)
+
+

Encode response ok(success) message

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parambody: response body, can be null
returnencoded data, if nullptr, means error, and the error code is -err.Err
staticFalse
+
+

C++ defination code:

+ +
Bytes *encode_resp_ok(Bytes *body = nullptr)
+
+
+

encode_report

+ +
def encode_report(*args, **kwargs)
+
+

Encode proactively report message

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parambody: report body, can be null
returnencoded data, if nullptr, means error, and the error code is -err.Err
staticFalse
+
+

C++ defination code:

+ +
Bytes *encode_report(Bytes *body = nullptr)
+
+
+

encode_resp_err

+ +
def encode_resp_err(*args, **kwargs)
+
+

Encode response error message

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramcode: error code
msg: error message
returnencoded data, if nullptr, means error, and the error code is -err.Err
staticFalse
+
+

C++ defination code:

+ +
Bytes *encode_resp_err(err::Err code, const std::string &msg)
+
+
+

set_body

+ +
def set_body(self, body_new: maix.Bytes(bytes)) -> None
+
+

Update message body

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parambody_new: new body data
staticFalse
+
+

C++ defination code:

+ +
void set_body(Bytes *body_new)
+
+
+

get_body

+ +
def get_body(*args, **kwargs)
+
+

Get message body

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnmessage body, bytes type
staticFalse
+
+

C++ defination code:

+ +
Bytes *get_body()
+
+
+

Protocol

+

Communicate protocol

+
+

C++ defination code:

+ +
class Protocol
+
+
+

__init__

+ +
def __init__(self, buff_size: int = 1024, header: int = 3148663466) -> None
+
+

Construct a new Protocol object

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parambuff_size: Data queue buffer size
staticFalse
+
+

C++ defination code:

+ +
Protocol(int buff_size = 1024, uint32_t header=maix::protocol::HEADER)
+
+
+

buff_size

+ +
def buff_size(self) -> int
+
+

Data queue buffer size

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
int buff_size()
+
+
+

push_data

+ +
def push_data(self, new_data: maix.Bytes(bytes)) -> maix.err.Err
+
+

Add data to data queue

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramnew_data: new data
returnerror code, maybe err.Err.ERR_BUFF_FULL
staticFalse
+
+

C++ defination code:

+ +
err::Err push_data(const Bytes *new_data)
+
+
+

decode

+ +
def decode(self, new_data: maix.Bytes(bytes) = None) -> MSG
+
+

Decode data in data queue and return a message

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramnew_data: new data add to data queue, if null, only decode.
returndecoded message, if nullptr, means no message decoded.
staticFalse
+
+

C++ defination code:

+ +
protocol::MSG *decode(const Bytes *new_data = nullptr)
+
+
+

encode_resp_ok

+ +
def encode_resp_ok(*args, **kwargs)
+
+

Encode response ok(success) message to buffer

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramcmd: CMD value
body: response body, can be null
returnencoded data, if nullptr, means error, and the error code is -err.Err
staticFalse
+
+

C++ defination code:

+ +
Bytes *encode_resp_ok(uint8_t cmd, Bytes *body = nullptr)
+
+
+

encode_report

+ +
def encode_report(*args, **kwargs)
+
+

Encode proactively report message to buffer

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramcmd: CMD value
body: report body, can be null
returnencoded data, if nullptr, means error, and the error code is -err.Err
staticFalse
+
+

C++ defination code:

+ +
Bytes *encode_report(uint8_t cmd, Bytes *body = nullptr)
+
+
+

encode_resp_err

+ +
def encode_resp_err(*args, **kwargs)
+
+

Encode response error message to buffer

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramcmd: CMD value
code: error code
msg: error message
returnencoded data, if nullptr, means error, and the error code is -err.Err
staticFalse
+
+

C++ defination code:

+ +
Bytes *encode_resp_err(uint8_t cmd, err::Err code, const std::string &msg)
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/rtmp.html b/maixpy/api/maix/rtmp.html new file mode 100644 index 00000000..f2f354b2 --- /dev/null +++ b/maixpy/api/maix/rtmp.html @@ -0,0 +1,645 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.rtmp - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.rtmp

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.rtmp module

+
+

You can use maix.rtmp to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

TagType

+

Video type

+ + + + + + + + + + + + + +
itemdescribe
valuesTAG_NONE:
TAG_VIDEO:
TAG_AUDIO:
TAG_SCRIPT:
+
+

C++ defination code:

+ +
enum TagType
+    {
+        TAG_NONE,
+        TAG_VIDEO,
+        TAG_AUDIO,
+        TAG_SCRIPT,
+    }
+
+
+

Variable

+

Function

+

Class

+

Rtmp

+

Rtmp class

+
+

C++ defination code:

+ +
class Rtmp
+
+
+

__init__

+ +
def __init__(self, host: str = 'localhost', port: int = 1935, app: str = '', stream: str = '', bitrate: int = 1000000) -> None
+
+

Construct a new Video object

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
noteRtmp url : rtmp://host:prot/app/stream
example:
r = Rtmp("localhost", 1935, "live", "stream")
means rtmp url is rtmp://localhost:1935/live/stream
paramhost: rtmp ip
port: rtmp port, default is 1935.
app: rtmp app name
stream: rtmp stream name
bitrate: rtmp bitrate, default is 1000 * 1000
staticFalse
+
+

C++ defination code:

+ +
Rtmp(std::string host = "localhost", int port = 1935, std::string app = std::string(), std::string stream = std::string(), int bitrate = 1000 * 1000)
+
+
+

push_video

+ +
def push_video(self) -> int
+
+

Get bitrate

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnbitrate
staticFalse
+
+

C++ defination code:

+ +
int bitrate()
+
+
+

bind_camera

+ +
def bind_camera(self, cam: maix.camera.Camera) -> maix.err.Err
+
+

Bind camera

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
noteIf the cam object is bound, the cam object cannot be used elsewhere.
paramcam: camera object
returnerror code, err::ERR_NONE means success, others means failed
staticFalse
+
+

C++ defination code:

+ +
err::Err bind_camera(camera::Camera *cam)
+
+
+

get_camera

+ +
def get_camera(self) -> maix.camera.Camera
+
+

If you bind a camera, return the camera object.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnCamera object
staticFalse
+
+

C++ defination code:

+ +
camera::Camera *get_camera()
+
+
+

start

+ +
def start(self, path: str = '') -> maix.err.Err
+
+

Start push stream

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
noteonly support flv file now
parampath: File path, if you passed file path, cyclic push the file, else if you bound camera, push the camera image.
returnerror code, err::ERR_NONE means success, others means failed
staticFalse
+
+

C++ defination code:

+ +
err::Err start(std::string path = std::string())
+
+
+

stop

+ +
def stop(self) -> maix.err.Err
+
+

Stop push stream

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerror code, err::ERR_NONE means success, others means failed
staticFalse
+
+

C++ defination code:

+ +
err::Err stop()
+
+
+

get_path

+ +
def get_path(self) -> str
+
+

Get the file path of the push stream

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnfile path
staticFalse
+
+

C++ defination code:

+ +
std::string get_path()
+
+
+

get_path (overload 1)

+

Check whether push streaming has started

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnIf rtmp thread is running, returns true
staticFalse
+
+

C++ defination code:

+ +
bool is_started()
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/rtsp.html b/maixpy/api/maix/rtsp.html new file mode 100644 index 00000000..2239ba30 --- /dev/null +++ b/maixpy/api/maix/rtsp.html @@ -0,0 +1,948 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.rtsp - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.rtsp

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.rtsp module

+
+

You can use maix.rtsp to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

RtspStreamType

+

The stream type of rtsp

+ + + + + + + + + + + + + +
itemdescribe
valuesRTSP_STREAM_NONE: format invalid
RTSP_STREAM_H265:
+
+

C++ defination code:

+ +
enum RtspStreamType
+    {
+        RTSP_STREAM_NONE = 0,  // format invalid
+        RTSP_STREAM_H265,
+    }
+
+
+

Variable

+

Function

+

Class

+

Region

+

Region class

+
+

C++ defination code:

+ +
class Region
+
+
+

__init__

+ +
def __init__(self, x: int, y: int, width: int, height: int, format: maix.image.Format, camera: maix.camera.Camera) -> None
+
+

Construct a new Region object

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx: region coordinate x
y: region coordinate y
width: region width
height: region height
format: region format
camera: bind region to camera
staticFalse
+
+

C++ defination code:

+ +
Region(int x, int y, int width, int height, image::Format format, camera::Camera *camera)
+
+
+

get_canvas

+ +
def get_canvas(self) -> maix.image.Image
+
+

Return an image object from region

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnimage object
staticFalse
+
+

C++ defination code:

+ +
image::Image *get_canvas()
+
+
+

update_canvas

+ +
def update_canvas(self) -> maix.err.Err
+
+

Update canvas

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err update_canvas()
+
+
+

Rtsp

+

Rtsp class

+
+

C++ defination code:

+ +
class Rtsp
+
+
+

__init__

+ +
def __init__(self, ip: str = '', port: int = 8554, fps: int = 30, stream_type: RtspStreamType = ...) -> None
+
+

Construct a new Video object

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramip: rtsp ip
port: rtsp port
fps: rtsp fps
stream_type: rtsp stream type
staticFalse
+
+

C++ defination code:

+ +
Rtsp(std::string ip = std::string(), int port = 8554, int fps = 30, rtsp::RtspStreamType stream_type = rtsp::RtspStreamType::RTSP_STREAM_H265)
+
+
+

start

+ +
def start(self) -> maix.err.Err
+
+

start rtsp

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerror code, err::ERR_NONE means success, others means failed
staticFalse
+
+

C++ defination code:

+ +
err::Err start()
+
+
+

start (overload 1)

+

stop rtsp

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerror code, err::ERR_NONE means success, others means failed
staticFalse
+
+

C++ defination code:

+ +
err::Err stop()
+
+
+

bind_camera

+ +
def bind_camera(self, camera: maix.camera.Camera) -> maix.err.Err
+
+

Bind camera

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramcamera: camera object
returnerror code, err::ERR_NONE means success, others means failed
staticFalse
+
+

C++ defination code:

+ +
err::Err bind_camera(camera::Camera *camera)
+
+
+

write

+ +
def write(self, frame: ...) -> maix.err.Err
+
+

Write data to rtsp

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramframe: video frame data
returnerror code, err::ERR_NONE means success, others means failed
staticFalse
+
+

C++ defination code:

+ +
err::Err write(video::Frame &frame)
+
+
+

get_url

+ +
def get_url(self) -> str
+
+

Get url of rtsp

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnurl of rtsp
staticFalse
+
+

C++ defination code:

+ +
std::string get_url()
+
+
+

get_urls

+ +
def get_urls(self) -> list[str]
+
+

Get url list of rtsp

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnurl list of rtsp
staticFalse
+
+

C++ defination code:

+ +
std::vector<std::string> get_urls()
+
+
+

to_camera

+ +
def to_camera(self) -> maix.camera.Camera
+
+

Get camera object from rtsp

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returncamera object
staticFalse
+
+

C++ defination code:

+ +
camera::Camera *to_camera()
+
+
+

rtsp_is_start

+ +
def rtsp_is_start(self) -> bool
+
+

return rtsp start status

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returntrue means rtsp is start, false means rtsp is stop.
staticFalse
+
+

C++ defination code:

+ +
bool rtsp_is_start()
+
+
+

add_region

+ +
def add_region(self, x: int, y: int, width: int, height: int, format: maix.image.Format = ...) -> Region
+
+

return a region object, you can draw image on the region.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramx: region coordinate x
y: region coordinate y
width: region width
height: region height
format: region format, support Format::FMT_BGRA8888 only
returnthe reigon object
staticFalse
+
+

C++ defination code:

+ +
rtsp::Region *add_region(int x, int y, int width, int height, image::Format format = image::Format::FMT_BGRA8888)
+
+
+

update_region

+ +
def update_region(self, region: Region) -> maix.err.Err
+
+

update and show region

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err update_region(rtsp::Region &region)
+
+
+

del_region

+ +
def del_region(self, region: Region) -> maix.err.Err
+
+

del region

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err del_region(rtsp::Region *region)
+
+
+

draw_rect

+ +
def draw_rect(self, id: int, x: int, y: int, width: int, height: int, color: maix.image.Color, thickness: int = 1) -> maix.err.Err
+
+

Draw a rectangle on the canvas

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramid: region id
x: rectangle coordinate x
y: rectangle coordinate y
width: rectangle width
height: rectangle height
color: rectangle color
thickness: rectangle thickness. If you set it to -1, the rectangle will be filled.
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err draw_rect(int id, int x, int y, int width, int height, image::Color color, int thickness = 1)
+
+
+

draw_string

+ +
def draw_string(self, id: int, x: int, y: int, str: str, color: maix.image.Color, size: int = 16, thickness: int = 1) -> maix.err.Err
+
+

Draw a string on the canvas

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramid: region id
x: string coordinate x
y: string coordinate y
str: string
color: string color
size: string size
thickness: string thickness
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err draw_string(int id, int x, int y, const char *str, image::Color color, int size = 16, int thickness = 1)
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/sys.html b/maixpy/api/maix/sys.html new file mode 100644 index 00000000..e8a4ce60 --- /dev/null +++ b/maixpy/api/maix/sys.html @@ -0,0 +1,770 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.sys - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.sys

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.sys module

+
+

You can use maix.sys to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Variable

+

Function

+

os_version

+ +
def os_version() -> str
+
+

Get system version

+ + + + + + + + + + + + + +
itemdescription
returnversion string, e.g. "maixcam-2024-08-13-maixpy-v4.4.20"
+
+

C++ defination code:

+ +
std::string os_version()
+
+
+

maixpy_version

+ +
def maixpy_version() -> str
+
+

Get MaixPy version, if get failed will return empty string.

+ + + + + + + + + + + + + +
itemdescription
returnversion string, e.g. "4.4.21"
+
+

C++ defination code:

+ +
std::string maixpy_version()
+
+
+

device_name

+ +
def device_name() -> str
+
+

Get device name

+ + + + + + + + + + + + + +
itemdescription
returndevice name, e.g. "MaixCAM"
+
+

C++ defination code:

+ +
std::string device_name()
+
+
+

host_name

+ +
def host_name() -> str
+
+

Get host name

+ + + + + + + + + + + + + +
itemdescription
returnhost name, e.g. "maixcam-2f9f"
+
+

C++ defination code:

+ +
std::string host_name()
+
+
+

host_domain

+ +
def host_domain() -> str
+
+

Get host domain

+ + + + + + + + + + + + + +
itemdescription
returnhost domain, e.g. "maixcam-2f9f.local"
+
+

C++ defination code:

+ +
std::string host_domain()
+
+
+

ip_address

+ +
def ip_address() -> dict[str, str]
+
+

Get ip address

+ + + + + + + + + + + + + +
itemdescription
returnip address, dict type, e.g. {"eth0": "192.168.0.195", "wlan0": "192.168.0.123", "usb0": "10.47.159.1"}
+
+

C++ defination code:

+ +
std::map<std::string, std::string> ip_address()
+
+
+

mac_address

+ +
def mac_address() -> dict[str, str]
+
+

Get mac address

+ + + + + + + + + + + + + +
itemdescription
returnmac address, dict type, e.g. {"eth0": "00:0c:29:2f:9f:00", "wlan0": "00:0c:29:2f:9f:01", "usb0": "00:0c:29:2f:9f:02"}
+
+

C++ defination code:

+ +
std::map<std::string, std::string> mac_address()
+
+
+

device_key

+ +
def device_key() -> str
+
+

Get device key, can be unique id of device

+ + + + + + + + + + + + + +
itemdescription
returndevice key, 32 bytes hex string, e.g. "1234567890abcdef1234567890abcdef"
+
+

C++ defination code:

+ +
std::string device_key()
+
+
+

memory_info

+ +
def memory_info() -> dict[str, int]
+
+

Get memory info

+ + + + + + + + + + + + + +
itemdescription
returnmemory info, dict type, e.g. {"total": 1024, "used": 512, "hw_total": 25610241024}
total: total memory size in Byte.
used: used memory size in Byte.
hw_total: total memory size in Byte of hardware, the total <= hw_total,
OS kernel may reserve some memory for some hardware like camera, npu, display etc.
+
+

C++ defination code:

+ +
std::map<std::string, int> memory_info()
+
+
+

bytes_to_human

+ +
def bytes_to_human(bytes: int, precision: int = 2, base: int = 1024, unit: str = 'B', sep: str = ' ') -> str
+
+

Bytes to human readable string

+ + + + + + + + + + + + + + + + + +
itemdescription
parambytes:: bytes size,e.g. 1234B = 1234/1024 = 1.205 KB
precision:: decimal precision, default 2
base:: base number, default 1024
unit:: unit string, e.g. "B"
sep:: separator string, e.g. " "
returnhuman readable string, e.g. "1.21 KB"
+
+

C++ defination code:

+ +
std::string bytes_to_human(unsigned long long bytes, int precision = 2, int base = 1024, const std::string &unit = "B", const std::string &sep = " ")
+
+
+

cpu_freq

+ +
def cpu_freq() -> dict[str, int]
+
+

Get CPU frequency

+ + + + + + + + + + + + + +
itemdescription
returnCPU frequency, dict type, e.g. {"cpu0": 1000000000, "cpu1": 1000000000}
+
+

C++ defination code:

+ +
std::map<std::string, unsigned long> cpu_freq()
+
+
+

cpu_temp

+ +
def cpu_temp() -> dict[str, float]
+
+

Get CPU temperature

+ + + + + + + + + + + + + +
itemdescription
returnCPU temperature, unit dgree, dict type, e.g. {"cpu": 50.0, "cpu0": 50, "cpu1": 50}
+
+

C++ defination code:

+ +
std::map<std::string, float> cpu_temp()
+
+
+

cpu_usage

+ +
def cpu_usage() -> dict[str, float]
+
+

Get CPU usage

+ + + + + + + + + + + + + +
itemdescription
returnCPU usage, dict type, e.g. {"cpu": 50.0, "cpu0": 50, "cpu1": 50}
+
+

C++ defination code:

+ +
std::map<std::string, float> cpu_usage()
+
+
+

npu_freq

+ +
def npu_freq() -> dict[str, int]
+
+

Get NPU frequency

+ + + + + + + + + + + + + +
itemdescription
returnNPU frequency, dict type, e.g. {"npu0": 500000000}
+
+

C++ defination code:

+ +
std::map<std::string, unsigned long> npu_freq()
+
+
+

disk_usage

+ +
def disk_usage(path: str = '/') -> dict[str, int]
+
+

Get disk usage

+ + + + + + + + + + + + + + + + + +
itemdescription
parampath:: disk path, default "/"
returndisk usage, dict type, e.g. {"total": 1024, "used": 512}
+
+

C++ defination code:

+ +
std::map<std::string, unsigned long long> disk_usage(const std::string &path = "/")
+
+
+

disk_partitions

+ +
def disk_partitions(only_disk: bool = True) -> list[dict[str, str]]
+
+

Get disk partition and mount point info

+ + + + + + + + + + + + + + + + + +
itemdescription
paramonly_disk: only return real disk, tempfs sysfs etc. not return, default true.
returndisk partition and mount point info, list type, e.g. [{"device": "/dev/mmcblk0p1", "mountpoint": "/mnt/sdcard", "fstype": "vfat"}]
+
+

C++ defination code:

+ +
std::vector<std::map<std::string, std::string>> disk_partitions(bool only_disk = true)
+
+
+

register_default_signal_handle

+

register default signal handle

+
+

C++ defination code:

+ +
void register_default_signal_handle()
+
+
+

poweroff

+ +
def poweroff() -> None
+
+

Power off device

+
+

C++ defination code:

+ +
void poweroff()
+
+
+

reboot

+ +
def reboot() -> None
+
+

Power off device and power on

+
+

C++ defination code:

+ +
void reboot()
+
+
+

Class

+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/tensor.html b/maixpy/api/maix/tensor.html new file mode 100644 index 00000000..db1b614f --- /dev/null +++ b/maixpy/api/maix/tensor.html @@ -0,0 +1,1256 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.tensor - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.tensor

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.tensor module

+
+

You can use maix.tensor to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

DType

+

Tensor data types

+ + + + + + + + + + + + + +
itemdescribe
valuesUINT8:
INT8:
UINT16:
INT16:
UINT32:
INT32:
FLOAT16:
FLOAT32:
FLOAT64:
BOOL:
DTYPE_MAX:
+
+

C++ defination code:

+ +
enum DType
+        {
+            UINT8 = 0,
+            INT8,
+            UINT16,
+            INT16,
+            UINT32,
+            INT32,
+            FLOAT16,
+            FLOAT32,
+            FLOAT64,
+            BOOL,
+            // STRING,
+            // OBJECT,
+            DTYPE_MAX
+        }
+
+
+

Variable

+

dtype_size

+

Tensor data type size in bytes

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
attentionIt's a copy of this variable in MaixPy,
so change it in C++ (e.g. update var in hello function) will not take effect the var inMaixPy.
So we add const for this var to avoid this mistake.
value{
1, // UINT8
1, // INT8
2, // UINT16
2, // INT16
4, // UINT32
4, // INT32
2, // FLOAT16
4, // FLOAT32
8, // FLOAT64
1, // BOOL
// 1, // STRING
// 1, // OBJECT
0
}
readonlyTrue
+
+

C++ defination code:

+ +
const std::vector<int> dtype_size = {
+            1, // UINT8
+            1, // INT8
+            2, // UINT16
+            2, // INT16
+            4, // UINT32
+            4, // INT32
+            2, // FLOAT16
+            4, // FLOAT32
+            8, // FLOAT64
+            1, // BOOL
+            // 1, // STRING
+            // 1, // OBJECT
+            0
+        }
+
+
+

dtype_name

+

Tensor data type name

+ + + + + + + + + + + + + + + + + +
itemdescription
value{
"uint8",
"int8",
"uint16",
"int16",
"uint32",
"int32",
"float16",
"float32",
"float64",
"bool",
// "string",
// "object",
"invalid"
}
readonlyTrue
+
+

C++ defination code:

+ +
const std::vector<std::string> dtype_name = {
+            "uint8",
+            "int8",
+            "uint16",
+            "int16",
+            "uint32",
+            "int32",
+            "float16",
+            "float32",
+            "float64",
+            "bool",
+            // "string",
+            // "object",
+            "invalid"
+        }
+
+
+

Function

+

tensor_from_numpy_float32

+ +
def tensor_from_numpy_float32(array: numpy.ndarray[numpy.float32], copy: bool = True) -> Tensor
+
+

float32 type numpy ndarray object to tensor.Tensor object.

+ + + + + + + + + + + + + + + + + +
itemdescription
paramarray: numpy array object.
copy: if true, will alloc new buffer and copy data, else will directly use array's data buffer, default true.
Use this arg carefully, when set to false, ther array MUST keep alive until we don't use the return tensor of this func, or will cause program crash.
returntensor.Tensor object.
+
+

C++ defination code:

+ +
tensor::Tensor *tensor_from_numpy_float32(py::array_t<float, py::array::c_style> array, bool copy = true)
+
+
+

tensor_from_numpy_uint8

+ +
def tensor_from_numpy_uint8(array: numpy.ndarray[numpy.uint8], copy: bool = True) -> Tensor
+
+

uint8 type numpy ndarray object to tensor.Tensor object.

+ + + + + + + + + + + + + + + + + +
itemdescription
paramarray: numpy array object.
copy: if true, will alloc new buffer and copy data, else will directly use array's data buffer, default true.
Use this arg carefully, when set to false, ther array MUST keep alive until we don't use the return tensor of this func, or will cause program crash.
returntensor.Tensor object.
+
+

C++ defination code:

+ +
tensor::Tensor *tensor_from_numpy_uint8(py::array_t<uint8_t, py::array::c_style> array, bool copy = true)
+
+
+

tensor_from_numpy_int8

+ +
def tensor_from_numpy_int8(array: numpy.ndarray[numpy.int8], copy: bool = True) -> Tensor
+
+

int8 type numpy ndarray object to tensor.Tensor object.

+ + + + + + + + + + + + + + + + + +
itemdescription
paramarray: numpy array object.
copy: if true, will alloc new buffer and copy data, else will directly use array's data buffer, default true.
Use this arg carefully, when set to false, ther array MUST keep alive until we don't use the return tensor of this func, or will cause program crash.
returntensor.Tensor object.
+
+

C++ defination code:

+ +
tensor::Tensor *tensor_from_numpy_int8(py::array_t<int8_t, py::array::c_style> array, bool copy = true)
+
+
+

tensor_to_numpy_float32

+ +
def tensor_to_numpy_float32(t: Tensor, copy: bool = True) -> numpy.ndarray[numpy.float32]
+
+

tensor.Tensor object to float32 type numpy ndarray object.

+ + + + + + + + + + + + + + + + + +
itemdescription
paramt: tensor.Tensor object.
copy: Whether alloc new Tensor and copy data or not,
if not copy, array object will directly use arg's data buffer, will faster but change array will affect arg's data, default true.
returnnumpy array object
+
+

C++ defination code:

+ +
py::array_t<float, py::array::c_style> tensor_to_numpy_float32(tensor::Tensor *t, bool copy = true)
+
+
+

tensor_to_numpy_uint8

+ +
def tensor_to_numpy_uint8(t: Tensor, copy: bool = True) -> numpy.ndarray[numpy.uint8]
+
+

tensor.Tensor object to int8 type numpy ndarray object.

+ + + + + + + + + + + + + + + + + +
itemdescription
paramt: tensor.Tensor object.
copy: Whether alloc new Tensor and copy data or not,
if not copy, array object will directly use arg's data buffer, will faster but change array will affect arg's data, default true.
returnnumpy array object
+
+

C++ defination code:

+ +
py::array_t<uint8_t, py::array::c_style> tensor_to_numpy_uint8(tensor::Tensor *t, bool copy = true)
+
+
+

tensor_to_numpy_int8

+ +
def tensor_to_numpy_int8(t: Tensor, copy: bool = True) -> numpy.ndarray[numpy.int8]
+
+

tensor.Tensor object to int8 type numpy ndarray object.

+ + + + + + + + + + + + + + + + + +
itemdescription
paramt: tensor.Tensor object.
copy: Whether alloc new Tensor and copy data or not,
if not copy, array object will directly use arg's data buffer, will faster but change array will affect arg's data, default true.
returnnumpy array object
+
+

C++ defination code:

+ +
py::array_t<int8_t, py::array::c_style> tensor_to_numpy_int8(tensor::Tensor *t, bool copy = true)
+
+
+

Class

+

Tensor

+

Tensor class

+
+

C++ defination code:

+ +
class Tensor
+
+
+

__init__

+ +
def __init__(self, shape: list[int], dtype: DType) -> None
+
+

Tensor constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramshape: tensor shape, a int list
dtype: tensor element data type, see DType of this module
staticFalse
+
+

C++ defination code:

+ +
Tensor(std::vector<int> shape, tensor::DType dtype)
+
+
+

to_str

+ +
def to_str(self) -> str
+
+

To string

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
std::string to_str()
+
+
+

__str__

+ +
def __str__(self) -> str
+
+

To string

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
std::string __str__()
+
+
+

shape

+ +
def shape(self) -> list[int]
+
+

get tensor shape

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returntensor shape, a int list
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> shape()
+
+
+

expand_dims

+ +
def expand_dims(self, axis: int) -> None
+
+

expand tensor shape

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramaxis: axis to expand
staticFalse
+
+

C++ defination code:

+ +
void expand_dims(int axis)
+
+
+

reshape

+ +
def reshape(self, shape: list[int]) -> None
+
+

reshape tensor shape, if size not match, it will throw an err::Exception

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramshape: new shape
staticFalse
+
+

C++ defination code:

+ +
void reshape(std::vector<int> shape)
+
+
+

flatten

+ +
def flatten(self) -> None
+
+

Flatten tensor shape to 1D

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
void flatten()
+
+
+

dtype

+ +
def dtype(self) -> DType
+
+

get tensor data type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returntensor data type, see DType of this module
staticFalse
+
+

C++ defination code:

+ +
tensor::DType  dtype()
+
+
+

to_float_list

+ +
def to_float_list(self) -> list[float]
+
+

get tensor data and return a list

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnlist type data
staticFalse
+
+

C++ defination code:

+ +
std::valarray<float>* to_float_list()
+
+
+

argmax

+ +
def argmax(self, axis: int = 65535) -> Tensor
+
+

argmax of tensor

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramaxis: By default, the index is into the flattened array, otherwise along the specified axis., wrong axis will throw an err::Exception
returnargmax result, you need to delete it after use in C++.
staticFalse
+
+

C++ defination code:

+ +
tensor::Tensor *argmax(int axis = 0xffff)
+
+
+

argmax1

+ +
def argmax1(self) -> int
+
+

argmax1, flattened data max index

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnargmax result, int type
staticFalse
+
+

C++ defination code:

+ +
int argmax1()
+
+
+

Tensors

+

Tensors

+
+

C++ defination code:

+ +
class Tensors
+
+
+

__init__

+ +
def __init__(self) -> None
+
+

Constructor of Tensors

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
Tensors()
+
+
+

add_tensor

+ +
def add_tensor(self, key: str, tensor: Tensor, copy: bool, auto_delete: bool) -> None
+
+

Add tensor

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
void add_tensor(const std::string &key, tensor::Tensor *tensor, bool copy, bool auto_delete)
+
+
+

rm_tensor

+ +
def rm_tensor(self, key: str) -> None
+
+

Remove tensor

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
void rm_tensor(const std::string &key)
+
+
+

clear

+ +
def clear(self) -> None
+
+

Clear tensors

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
void clear()
+
+
+

get_tensor

+ +
def get_tensor(self, key: str) -> Tensor
+
+

Get tensor by key

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
tensor::Tensor &get_tensor(const std::string &key)
+
+
+

__getitem__

+ +
def __getitem__(self, key: str) -> Tensor
+
+

Operator []

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
tensor::Tensor &operator[](const std::string &key)
+
+
+

__len__

+ +
def __len__(self) -> int
+
+

Size

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
size_t size()
+
+
+

keys

+ +
def keys(self) -> list[str]
+
+

Get names

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
std::vector<std::string> keys()
+
+
+

tensors

+

Tensors data, dict type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::map<std::string, tensor::Tensor*> tensors
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/thread.html b/maixpy/api/maix/thread.html new file mode 100644 index 00000000..91655f58 --- /dev/null +++ b/maixpy/api/maix/thread.html @@ -0,0 +1,460 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.thread - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.thread

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.thread module

+
+

You can use maix.thread to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Variable

+

Function

+

Class

+

Thread

+

thread class

+
+

C++ defination code:

+ +
class Thread
+
+
+

__init__

+ +
def __init__(self, func: typing.Callable[[capsule], None], args: capsule = None) -> None
+
+

create thread

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramfunc: direction [in], thread function, one args parameter, void* type, no return value
args: direction [in], thread function parameter
staticFalse
+
+

C++ defination code:

+ +
Thread(std::function<void(void *)> func, void *args = nullptr)
+
+
+

join

+ +
def join(self) -> None
+
+

wait thread exit

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
void join()
+
+
+

detach

+ +
def detach(self) -> None
+
+

detach thread, detach will auto start thread and you can't use join anymore.

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
void detach()
+
+
+

joinable

+ +
def joinable(self) -> bool
+
+

Check if thread is joinable

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returntrue if thread is joinable
staticFalse
+
+

C++ defination code:

+ +
bool joinable()
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/time.html b/maixpy/api/maix/time.html new file mode 100644 index 00000000..7bfcbe91 --- /dev/null +++ b/maixpy/api/maix/time.html @@ -0,0 +1,1623 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.time - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.time

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.time module

+
+

You can use maix.time to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Variable

+

Function

+

time

+ +
def time() -> float
+
+

Get current time in s

+ + + + + + + + + + + + + + + + + +
itemdescription
returncurrent time in s, double type
attentionIf board have no RTC battery, when bootup and connect to network,
system will automatically sync time by NTP, will cause time() have big change,
e.g. before NTP: 10(s), after: 1718590639.5149617(s).
If you want to calculate time interval, please use ticks_s().
+
+

C++ defination code:

+ +
double time()
+
+
+

time_ms

+ +
def time_ms() -> int
+
+

Get current time in ms

+ + + + + + + + + + + + + + + + + +
itemdescription
returncurrent time in ms, uint64_t type
attentionIf board have no RTC battery, when bootup and connect to network,
system will automatically sync time by NTP, will cause time() have big change,
e.g. before NTP: 10000(ms), after: 1718590639000(ms)
If you want to calculate time interval, please use ticks_ms().
+
+

C++ defination code:

+ +
uint64_t time_ms()
+
+
+

time_s

+ +
def time_s() -> int
+
+

Get current time in s

+ + + + + + + + + + + + + + + + + +
itemdescription
returncurrent time in s, uint64_t type
attentionIf board have no RTC battery, when bootup and connect to network,
system will automatically sync time by NTP, will cause time() have big change,
e.g. before NTP: 10(s), after: 1718590639(s)
+
+

C++ defination code:

+ +
uint64_t time_s()
+
+
+

time_us

+ +
def time_us() -> int
+
+

Get current time in us

+ + + + + + + + + + + + + + + + + +
itemdescription
returncurrent time in us, uint64_t type
attentionIf board have no RTC battery, when bootup and connect to network,
system will automatically sync time by NTP, will cause time() have big change,
e.g. before NTP: 10000000(us), after: 1718590639000000(s)
If you want to calculate time interval, please use ticks_us().
+
+

C++ defination code:

+ +
uint64_t time_us()
+
+
+

time_diff

+ +
def time_diff(last: float, now: float = -1) -> float
+
+

Calculate time difference in s.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
paramlast: last time
now: current time, can be -1 if use current time
returntime difference
attentionIf board have no RTC battery, when bootup and connect to network,
system will automatically sync time by NTP, will cause time() have big change, and lead to big value.
e.g. before NTP: 1(s), after: 1718590500(s)
If you want to calculate time interval, please use ticks_diff().
+
+

C++ defination code:

+ +
double time_diff(double last, double now = -1)
+
+
+

ticks_s

+ +
def ticks_s() -> float
+
+

Get current time in s since bootup

+ + + + + + + + + + + + + +
itemdescription
returncurrent time in s, double type
+
+

C++ defination code:

+ +
double ticks_s()
+
+
+

ticks_ms

+ +
def ticks_ms() -> int
+
+

Get current time in ms since bootup

+ + + + + + + + + + + + + +
itemdescription
returncurrent time in ms, uint64_t type
+
+

C++ defination code:

+ +
uint64_t ticks_ms()
+
+
+

ticks_us

+ +
def ticks_us() -> int
+
+

Get current time in us since bootup

+ + + + + + + + + + + + + +
itemdescription
returncurrent time in us, uint64_t type
+
+

C++ defination code:

+ +
uint64_t ticks_us()
+
+
+

ticks_diff

+ +
def ticks_diff(last: float, now: float = -1) -> float
+
+

Calculate time difference in s.

+ + + + + + + + + + + + + + + + + +
itemdescription
paramlast: last time
now: current time, can be -1 if use current time
returntime difference
+
+

C++ defination code:

+ +
double ticks_diff(double last, double now = -1)
+
+
+

sleep

+

Sleep seconds

+ + + + + + + + + + + + + +
itemdescription
params: seconds, double type
+
+

C++ defination code:

+ +
void sleep(double s)
+
+
+

sleep_ms

+

Sleep milliseconds

+ + + + + + + + + + + + + +
itemdescription
paramms: milliseconds, uint64_t type
+
+

C++ defination code:

+ +
void sleep_ms(uint64_t ms)
+
+
+

sleep_us

+

Sleep microseconds

+ + + + + + + + + + + + + +
itemdescription
paramus: microseconds, uint64_t type
+
+

C++ defination code:

+ +
void sleep_us(uint64_t us)
+
+
+

fps

+ +
def fps() -> float
+
+

Calculate FPS since last call this method.\nAttention, this method is not multi thread safe, only call this method in one threads.\nIf you want to use in multi threads, please use time.FPS class.\nFPS is average value of recent n(buff_len) times, and you can call fps_set_buff_len(10) to change buffer length, default is 20.\nMultiple invoke this function will calculate fps between two invoke, and you can also call fps_start() fisrt to manually assign fps calulate start point.

+ + + + + + + + + + + + + +
itemdescription
returnfloat type, current fps since last call this method
+
+

C++ defination code:

+ +
float fps()
+
+
+

fps_start

+ +
def fps_start() -> None
+
+

Manually set fps calculation start point, then you can call fps() function to calculate fps between fps_start() and fps().

+
+

C++ defination code:

+ +
void fps_start()
+
+
+

fps_set_buff_len

+ +
def fps_set_buff_len(len: int) -> None
+
+

Set fps method buffer length, by default the buffer length is 10.

+ + + + + + + + + + + + + +
itemdescription
paramlen: Buffer length to store recent fps value.
+
+

C++ defination code:

+ +
void fps_set_buff_len(int len)
+
+
+

now

+ +
def now() -> DateTime
+
+

Get current UTC date and time

+ + + + + + + + + + + + + +
itemdescription
returncurrent date and time, DateTime type
+
+

C++ defination code:

+ +
time::DateTime *now()
+
+
+

localtime

+ +
def localtime() -> DateTime
+
+

Get local time

+ + + + + + + + + + + + + +
itemdescription
returnlocal time, DateTime type
+
+

C++ defination code:

+ +
time::DateTime *localtime()
+
+
+

strptime

+ +
def strptime(str: str, format: str) -> DateTime
+
+

DateTime from string

+ + + + + + + + + + + + + + + + + +
itemdescription
paramstr: date time string
format: date time format
returnDateTime
+
+

C++ defination code:

+ +
time::DateTime *strptime(const std::string &str, const std::string &format)
+
+
+

gmtime

+ +
def gmtime(timestamp: float) -> DateTime
+
+

timestamp to DateTime(time zone is UTC (value 0))

+ + + + + + + + + + + + + + + + + +
itemdescription
paramtimestamp: double timestamp
returnDateTime
+
+

C++ defination code:

+ +
time::DateTime *gmtime(double timestamp)
+
+
+

timezone

+ +
def timezone(timezone: str = '') -> str
+
+

Set or get timezone

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
paramtimezone: string type, can be empty and default to empty, if empty, only return crrent timezone, a "region/city" string, e.g. Asia/Shanghai, Etc/UTC, you can get all by list_timezones function.
returnstring type, return current timezone setting.
attentionwhen set new timezone, time setting not take effect in this process for some API, so you need to restart program.
+
+

C++ defination code:

+ +
std::string timezone(const std::string &timezone = "")
+
+
+

timezone (overload 1)

+

Set or get timezone

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
paramregion: string type, which region to set, can be empty means only get current, default empty.
city: string type, which city to set, can be empty means only get current, default empty.
returnlist type, return current timezone setting, first is region, second is city.
attentionwhen set new timezone, time setting not take effect in this process for some API, so you need to restart program.
+
+

C++ defination code:

+ +
std::vector<std::string> timezone2(const std::string &region = "", const std::string &city = "")
+
+
+

list_timezones

+ +
def list_timezones() -> dict[str, list[str]]
+
+

List all timezone info

+ + + + + + + + + + + + + +
itemdescription
returnA dict with key are regions, and value are region's cities.
+
+

C++ defination code:

+ +
std::map<std::string, std::vector<std::string>> list_timezones()
+
+
+

ntp_timetuple

+ +
def ntp_timetuple(host: str, port: int = -1, retry: int = 3, timeout_ms: int = 0) -> list[int]
+
+

Retrieves time from an NTP server\nThis function fetches the current time from the specified NTP server and port,\nreturning a tuple containing the time details.

+ + + + + + + + + + + + + + + + + +
itemdescription
paramhost: The hostname or IP address of the NTP server.
port: The port number of the NTP server. Use -1 for the default port 123.
retry: The number of retry attempts. Must be at least 1.
timeout_ms: The timeout duration in milliseconds. Must be non-negative.
returnA list of 6 elements: [year, month, day, hour, minute, second]
+
+

C++ defination code:

+ +
std::vector<int> ntp_timetuple(std::string host, int port=-1, uint8_t retry=3, int timeout_ms=0)
+
+
+

ntp_timetuple_with_config

+ +
def ntp_timetuple_with_config(path: str) -> list[int]
+
+

Retrieves time from an NTP server using a configuration file\nThis function reads the configuration from a YAML file to fetch the current time\nfrom a list of specified NTP servers, returning a tuple containing the time details.

+ + + + + + + + + + + + + + + + + +
itemdescription
parampath: The path to the YAML configuration file, which should include:
- Config:
- retry: Number of retry attempts (must be at least 1)
- total_timeout_ms: Total timeout duration in milliseconds (must be non-negative)
- NtpServers:
- host: Hostname or IP address of the NTP server
- port: Port number of the NTP server (use 123 for default)
Example YAML configuration:
Config:
- retry: 3
- total_timeout_ms: 10000
NtpServers:
- host: "pool.ntp.org"
port: 123
- host: "time.nist.gov"
port: 123
- host: "time.windows.com"
port: 123
returnA list of 6 elements: [year, month, day, hour, minute, second]
+
+

C++ defination code:

+ +
std::vector<int> ntp_timetuple_with_config(std::string path)
+
+
+

ntp_sync_sys_time

+ +
def ntp_sync_sys_time(host: str, port: int = -1, retry: int = 3, timeout_ms: int = 0) -> list[int]
+
+

Retrieves time from an NTP server and synchronizes the system time\nThis function fetches the current time from the specified NTP server and port,\nthen synchronizes the system time with the retrieved time.

+ + + + + + + + + + + + + + + + + +
itemdescription
paramhost: The hostname or IP address of the NTP server.
port: The port number of the NTP server. Use 123 for the default port.
retry: The number of retry attempts. Must be at least 1.
timeout_ms: The timeout duration in milliseconds. Must be non-negative.
returnA list of 6 elements: [year, month, day, hour, minute, second]
+
+

C++ defination code:

+ +
std::vector<int> ntp_sync_sys_time(std::string host, int port=-1, uint8_t retry=3, int timeout_ms=0)
+
+
+

ntp_sync_sys_time_with_config

+ +
def ntp_sync_sys_time_with_config(path: str) -> list[int]
+
+

Retrieves time from an NTP server using a configuration file and synchronizes the system time\nThis function reads the configuration from a YAML file to fetch the current time\nfrom a list of specified NTP servers, then synchronizes the system time with the retrieved time.

+ + + + + + + + + + + + + + + + + +
itemdescription
parampath: The path to the YAML configuration file, which should include:
- Config:
- retry: Number of retry attempts (must be at least 1)
- total_timeout_ms: Total timeout duration in milliseconds (must be non-negative)
- NtpServers:
- host: Hostname or IP address of the NTP server
- port: Port number of the NTP server (use 123 for default)
Example YAML configuration:
Config:
- retry: 3
- total_timeout_ms: 10000
NtpServers:
- host: "pool.ntp.org"
port: 123
- host: "time.nist.gov"
port: 123
- host: "time.windows.com"
port: 123
returnA vector of integers containing the time details: [year, month, day, hour, minute, second]
+
+

C++ defination code:

+ +
std::vector<int> ntp_sync_sys_time_with_config(std::string path)
+
+
+

Class

+

FPS

+

FPS class to use average filter to calculate FPS.

+
+

C++ defination code:

+ +
class FPS
+
+
+

__init__

+ +
def __init__(self, buff_len: int = 20) -> None
+
+

FPS class constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parambuff_len: Average buffer length, default 20, that is, fps() function will return the average fps in recent buff_len times fps.
staticFalse
+
+

C++ defination code:

+ +
FPS(int buff_len = 20)
+
+
+

start

+ +
def start(self) -> None
+
+

Manually set fps calculation start point, then you can call fps() function to calculate fps between start() and fps().

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
void start()
+
+
+

fps

+ +
def fps(self) -> float
+
+

The same as end function.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnfloat type, current fps since last call this method
staticFalse
+
+

C++ defination code:

+ +
float fps()
+
+
+

fps (overload 1)

+

Calculate FPS since last call this method.\nFPS is average value of recent n(buff_len) times, and you can call fps_set_buff_len(10) to change buffer length, default is 20.\nMultiple invoke this function will calculate fps between two invoke, and you can also call fps_start() fisrt to manually assign fps calulate start point.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnfloat type, current fps since last call this method
staticFalse
+
+

C++ defination code:

+ +
inline float end()
+
+
+

set_buff_len

+ +
def set_buff_len(self, len: int) -> None
+
+

Set fps method buffer length, by default the buffer length is 10.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramlen: Buffer length to store recent fps value.
staticFalse
+
+

C++ defination code:

+ +
void set_buff_len(int len)
+
+
+

DateTime

+

Date and time class

+
+

C++ defination code:

+ +
class DateTime
+
+
+

year

+

Year

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int year
+
+
+

month

+

Month, 1~12

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int month
+
+
+

day

+

Day

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int day
+
+
+

hour

+

Hour

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int hour
+
+
+

minute

+

Minute

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int minute
+
+
+

second

+

Second

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int second
+
+
+

microsecond

+

Microsecond

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int microsecond
+
+
+

yearday

+

Year day

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int yearday
+
+
+

weekday

+

Weekday, 0 is Monday, 6 is Sunday

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int weekday
+
+
+

zone

+

Time zone

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
float zone
+
+
+

zone_name

+

Time zone name

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::string zone_name
+
+
+

__init__

+ +
def __init__(self, year: int = 0, month: int = 0, day: int = 0, hour: int = 0, minute: int = 0, second: int = 0, microsecond: int = 0, yearday: int = 0, weekday: int = 0, zone: int = 0) -> None
+
+

Constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramyear: year
month: month
day: day
hour: hour
minute: minute
second: second
microsecond: microsecond
yearday: year day
weekday: weekday
zone: time zone
staticFalse
+
+

C++ defination code:

+ +
DateTime(int year = 0, int month = 0, int day = 0, int hour = 0, int minute = 0, int second = 0, int microsecond = 0, int yearday = 0, int weekday = 0, int zone = 0)
+
+
+

strftime

+ +
def strftime(self, format: str) -> str
+
+

Convert to string

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returndate time string
staticFalse
+
+

C++ defination code:

+ +
std::string strftime(const std::string &format)
+
+
+

timestamp

+ +
def timestamp(self) -> float
+
+

Convert to float timestamp

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnfloat timestamp
staticFalse
+
+

C++ defination code:

+ +
double timestamp()
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/touchscreen.html b/maixpy/api/maix/touchscreen.html new file mode 100644 index 00000000..a04f281c --- /dev/null +++ b/maixpy/api/maix/touchscreen.html @@ -0,0 +1,576 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.touchscreen - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.touchscreen

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.touchscreen module

+
+

You can use maix.touchscreen to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Variable

+

Function

+

Class

+

TouchScreen

+

TouchScreen class

+
+

C++ defination code:

+ +
class TouchScreen
+
+
+

__init__

+ +
def __init__(self, device: str = '', open: bool = True) -> None
+
+

Construct a new TouchScreen object

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdevice: touchscreen device path, you can get devices by list_devices method, by default(value is NULL(None in MaixPy)) means the first device
open: If true, touchscreen will automatically call open() after creation. default is true.
staticFalse
+
+

C++ defination code:

+ +
TouchScreen(const std::string &device = "", bool open = true)
+
+
+

open

+ +
def open(self) -> maix.err.Err
+
+

open touchscreen device

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerror code, err::ERR_NONE means success, others means failed
staticFalse
+
+

C++ defination code:

+ +
err::Err open()
+
+
+

close

+ +
def close(self) -> maix.err.Err
+
+

close touchscreen device

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerror code, err::ERR_NONE means success, others means failed
staticFalse
+
+

C++ defination code:

+ +
err::Err close()
+
+
+

read

+ +
def read(self) -> list[int]
+
+

read touchscreen device

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
attentionThis method will discard same event in buffer, that is:
if too many move event in buffer when call this method, it will only return the last one,
and if read pressed or released event, it will return immediately.
returnReturns a list include x, y, pressed state
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> read()
+
+
+

read (overload 1)

+

read touchscreen device

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
attentionThis method will return immediately if have event, so it's better to use available() to check if have more event in buffer,
or too much event in buffer when your program call this read() interval is too long will make your program slow.
returnReturns a list include x, y, pressed state
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> read0()
+
+
+

available

+ +
def available(self, timeout: int = 0) -> bool
+
+

If we need to read from touchscreen, for event driven touchscreen means have event or not

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramtimeout: -1 means block, 0 means no block, >0 means timeout, default is 0, unit is ms.
returntrue if need to read(have event), false if not
staticFalse
+
+

C++ defination code:

+ +
bool available(int timeout = 0)
+
+
+

is_opened

+ +
def is_opened(self) -> bool
+
+

Check if touchscreen is opened

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returntrue if touchscreen is opened, false if not
staticFalse
+
+

C++ defination code:

+ +
bool is_opened()
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/tracker.html b/maixpy/api/maix/tracker.html new file mode 100644 index 00000000..48c42e56 --- /dev/null +++ b/maixpy/api/maix/tracker.html @@ -0,0 +1,836 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.tracker - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.tracker

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.tracker module

+
+

You can use maix.tracker to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Variable

+

Function

+

Class

+

Object

+

tracker.Object class

+
+

C++ defination code:

+ +
class Object
+
+
+

__init__

+ +
def __init__(self, x: int, y: int, w: int, h: int, class_id: int, score: float) -> None
+
+

tracker.Object class constructor

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
Object(const int &x, const int &y, const int &w, const int &h, const int &class_id, const float &score)
+
+
+

x

+

position x attribute.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int x
+
+
+

y

+

position y attribute.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int y
+
+
+

w

+

position rectangle width.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int w
+
+
+

h

+

position rectangle height.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int h
+
+
+

class_id

+

object class id, int type.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
int class_id
+
+
+

score

+

object score(prob).

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
float score
+
+
+

Track

+

tracker.Track class

+
+

C++ defination code:

+ +
class Track
+
+
+

__init__

+ +
def __init__(self, id: int, score: float, lost: bool, start_frame_id: int, frame_id: int) -> None
+
+

tracker.Track class constructor

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
Track(const size_t &id, const float &score, const bool &lost, const size_t &start_frame_id, const size_t &frame_id)
+
+
+

id

+

track id.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
size_t id
+
+
+

score

+

track score(prob).

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
float score
+
+
+

lost

+

whether this track lost.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
bool lost
+
+
+

start_frame_id

+

track start frame id.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
size_t start_frame_id
+
+
+

frame_id

+

track current frame id.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
size_t frame_id
+
+
+

history

+

track position history, the last one is latest position.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typevar
staticFalse
readonlyFalse
+
+

C++ defination code:

+ +
std::deque<tracker::Object> history
+
+
+

ByteTracker

+

tracker.ByteTracker class

+
+

C++ defination code:

+ +
class ByteTracker
+
+
+

__init__

+ +
def __init__(self, max_lost_buff_num: int = 60, track_thresh: float = 0.5, high_thresh: float = 0.6, match_thresh: float = 0.8, max_history: int = 20) -> None
+
+

tracker.ByteTracker class constructor

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammax_lost_buff_num: the frames for keep lost tracks.
track_thresh: tracking confidence threshold.
high_thresh: threshold to add to new track.
match_thresh: matching threshold for tracking, e.g. one object in two frame iou < match_thresh we think they are the same obj.
max_history: max tack's position history length.
staticFalse
+
+

C++ defination code:

+ +
ByteTracker(const int &max_lost_buff_num = 60,
+                    const float &track_thresh = 0.5,
+                    const float &high_thresh = 0.6,
+                    const float &match_thresh = 0.8,
+                    const int &max_history = 20)
+
+
+

update

+ +
def update(self, objs: list[Object]) -> list[Track]
+
+

update tracks according to current detected objects.

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
std::vector<tracker::Track> update(const std::vector<tracker::Object> &objs)
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/util.html b/maixpy/api/maix/util.html new file mode 100644 index 00000000..2d6db1be --- /dev/null +++ b/maixpy/api/maix/util.html @@ -0,0 +1,350 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.util - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.util

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.util module

+
+

You can use maix.util to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

Variable

+

Function

+

do_exit_function

+ +
def do_exit_function() -> None
+
+

exec all of exit function

+
+

C++ defination code:

+ +
void do_exit_function()
+
+
+

register_atexit

+ +
def register_atexit() -> None
+
+

Registering default processes that need to be executed on exit

+
+

C++ defination code:

+ +
void register_atexit()
+
+
+

Class

+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/maix/video.html b/maixpy/api/maix/video.html new file mode 100644 index 00000000..e734ad21 --- /dev/null +++ b/maixpy/api/maix/video.html @@ -0,0 +1,3833 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + maix.video - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

maix.video

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

maix.video module

+
+

You can use maix.video to access this module with MaixPy
+This module is generated from MaixPy and MaixCDK

+
+

Module

+

No module

+

Enum

+

VideoType

+

Video type

+ + + + + + + + + + + + + +
itemdescribe
valuesVIDEO_NONE: format invalid
VIDEO_ENC_H265_CBR: Deprecated
VIDEO_ENC_MP4_CBR: Deprecated
VIDEO_DEC_H265_CBR: Deprecated
VIDEO_DEC_MP4_CBR: Deprecated
VIDEO_H264_CBR: Deprecated
VIDEO_H265_CBR: Deprecated
VIDEO_H264_CBR_MP4: Deprecated
VIDEO_H265_CBR_MP4: Deprecated
VIDEO_H264:
VIDEO_H264_MP4:
VIDEO_H264_FLV:
VIDEO_H265:
VIDEO_H265_MP4:
+
+

C++ defination code:

+ +
enum VideoType
+    {
+        VIDEO_NONE = 0,  // format invalid
+        VIDEO_ENC_H265_CBR,     // Deprecated
+        VIDEO_ENC_MP4_CBR,      // Deprecated
+        VIDEO_DEC_H265_CBR,     // Deprecated
+        VIDEO_DEC_MP4_CBR,      // Deprecated
+        VIDEO_H264_CBR,         // Deprecated
+        VIDEO_H265_CBR,         // Deprecated
+        VIDEO_H264_CBR_MP4,     // Deprecated
+        VIDEO_H265_CBR_MP4,     // Deprecated
+
+        VIDEO_H264,
+        VIDEO_H264_MP4,
+        VIDEO_H264_FLV,
+        VIDEO_H265,
+        VIDEO_H265_MP4,
+    }
+
+
+

MediaType

+

Video type

+ + + + + + + + + + + + + +
itemdescribe
valuesMEDIA_TYPE_UNKNOWN: Represents an unknown media type, which is usually treated as AVMEDIA_TYPE_DATA.
MEDIA_TYPE_VIDEO: Represents a video stream, such as video content encoded in H.264, MPEG-4, etc.
MEDIA_TYPE_AUDIO: Represents an audio stream, such as audio content encoded in AAC, MP3, etc.
MEDIA_TYPE_DATA: Represents opaque data streams that are usually continuous. This type of stream is not necessarily audio or video and may be used for other data purposes.
MEDIA_TYPE_SUBTITLE: Represents a subtitle stream used for displaying text or subtitle information, such as SRT, ASS, etc.
MEDIA_TYPE_ATTACHMENT: Represents attachment streams that are usually sparse. Attachment streams can include images, fonts, or other files that need to be bundled with the media.
MEDIA_TYPE_NB: Represents the number of media types (count) and indicates the total number of media types defined in this enumeration. It is not a media type itself but is used for counting enumeration items.
+
+

C++ defination code:

+ +
enum MediaType
+    {
+        MEDIA_TYPE_UNKNOWN = -1,    // Represents an unknown media type, which is usually treated as AVMEDIA_TYPE_DATA.
+        MEDIA_TYPE_VIDEO,           // Represents a video stream, such as video content encoded in H.264, MPEG-4, etc.
+        MEDIA_TYPE_AUDIO,           // Represents an audio stream, such as audio content encoded in AAC, MP3, etc.
+        MEDIA_TYPE_DATA,            // Represents opaque data streams that are usually continuous. This type of stream is not necessarily audio or video and may be used for other data purposes.
+        MEDIA_TYPE_SUBTITLE,        // Represents a subtitle stream used for displaying text or subtitle information, such as SRT, ASS, etc.
+        MEDIA_TYPE_ATTACHMENT,      // Represents attachment streams that are usually sparse. Attachment streams can include images, fonts, or other files that need to be bundled with the media.
+        MEDIA_TYPE_NB               // Represents the number of media types (count) and indicates the total number of media types defined in this enumeration. It is not a media type itself but is used for counting enumeration items.
+    }
+
+
+

Variable

+

Function

+

timebase_to_us

+ +
def timebase_to_us(timebase: list[int], value: int) -> float
+
+

Convert a value in timebase units to microseconds. value * 1000000 / (timebase[1] / timebase[0])

+ + + + + + + + + + + + + + + + + +
itemdescription
paramtimebse: Time base, used as the unit for calculating playback time. It must be an array containing two parameters,
in the format [num, den], where the first parameter is the numerator of the time base, and the second parameter is the denominator of the time base.
value: Input value
returnReturn the result in microseconds.
+
+

C++ defination code:

+ +
double timebase_to_us(std::vector<int> timebase, uint64_t value)
+
+
+

timebase_to_ms

+ +
def timebase_to_ms(timebase: list[int], value: int) -> float
+
+

Convert a value in timebase units to milliseconds.

+ + + + + + + + + + + + + + + + + +
itemdescription
paramtimebse: Time base, used as the unit for calculating playback time. It must be an array containing two parameters,
in the format [num, den], where the first parameter is the numerator of the time base, and the second parameter is the denominator of the time base.
value: Input value
returnReturn the result in milliseconds.
+
+

C++ defination code:

+ +
double timebase_to_ms(std::vector<int> timebase, uint64_t value)
+
+
+

Class

+

Context

+

Context class

+
+

C++ defination code:

+ +
class Context
+
+
+

__init__

+ +
def __init__(self, media_type: MediaType, timebase: list[int]) -> None
+
+

Construct a new Context object

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parammedia_type: enable capture, if true, you can use capture() function to get an image object
timebase: Time base, used as the unit for calculating playback time. It must be an array containing two parameters,
in the format [num, den], where the first parameter is the numerator of the time base, and the second parameter is the denominator of the time base.
staticFalse
+
+

C++ defination code:

+ +
Context(video::MediaType media_type, std::vector<int> timebase)
+
+
+

audio_sample_rate

+ +
def audio_sample_rate(self) -> int
+
+

Get sample rate of audio (only valid in the context of audio)

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnsample rate
staticFalse
+
+

C++ defination code:

+ +
int audio_sample_rate()
+
+
+

audio_sample_rate (overload 1)

+

Get sample rate of audio (only valid in the context of audio)

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnsample rate
staticFalse
+
+

C++ defination code:

+ +
int audio_sample_rate()
+
+
+

audio_channels

+ +
def audio_channels(self) -> int
+
+

Get channels of audio (only valid in the context of audio)

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnchannels
staticFalse
+
+

C++ defination code:

+ +
int audio_channels()
+
+
+

audio_channels (overload 1)

+

Get channels of audio (only valid in the context of audio)

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnchannels
staticFalse
+
+

C++ defination code:

+ +
int audio_channels()
+
+
+

audio_format

+ +
def audio_format(self) -> maix.audio.Format
+
+

Get format of audio (only valid in the context of audio)

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnaudio format. @see audio::Format
staticFalse
+
+

C++ defination code:

+ +
audio::Format audio_format()
+
+
+

audio_format (overload 1)

+

Get format of audio (only valid in the context of audio)

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnaudio format. @see audio::Format
staticFalse
+
+

C++ defination code:

+ +
audio::Format audio_format()
+
+
+

set_pcm

+ +
def set_pcm(self, data: maix.Bytes(bytes), duration: int = 0, pts: int = 0, copy: bool = True) -> maix.err.Err
+
+

Set pcm data (only valid in the context of audio)

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramduration: Duration of the current pcm. unit: timebase
pts: The start time of this pcm playback. If it is 0, it means this parameter is not supported. unit: timebase
returnerr::Err
staticFalse
+
+

C++ defination code:

+ +
err::Err set_pcm(maix::Bytes *data, int duration = 0, uint64_t pts = 0, bool copy = true)
+
+
+

get_pcm

+ +
def get_pcm(*args, **kwargs)
+
+

Get pcm data (only valid in the context of audio)

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
attentionNote that if you call this interface, you are responsible for releasing the memory of the data, and this interface cannot be called again.
returnBytes
staticFalse
+
+

C++ defination code:

+ +
Bytes *get_pcm()
+
+
+

image

+ +
def image(self) -> maix.image.Image
+
+

Retrieve the image data to be played.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
attentionNote that if you call this interface, you are responsible for releasing the memory of the image, and this interface cannot be called again.
staticFalse
+
+

C++ defination code:

+ +
image::Image *image()
+
+
+

media_type

+ +
def media_type(self) -> MediaType
+
+

Get the media type to determine whether it is video, audio, or another media type.

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
video::MediaType media_type()
+
+
+

pts

+ +
def pts(self) -> int
+
+

Get the start time of the current playback., in units of time base.

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
uint64_t pts()
+
+
+

last_pts

+ +
def last_pts(self) -> int
+
+

Get the start time of the previous playback, in units of time base.

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
uint64_t last_pts()
+
+
+

timebase

+ +
def timebase(self) -> list[int]
+
+

Get the time base.

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> timebase()
+
+
+

duration

+ +
def duration(self) -> int
+
+

Duration of the current frame. unit: timebase

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
int duration()
+
+
+

duration_us

+ +
def duration_us(self) -> int
+
+

Duration of the current frame. unit: us

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
uint64_t duration_us()
+
+
+

Frame

+

Frame class

+
+

C++ defination code:

+ +
class Frame
+
+
+

to_bytes

+ +
def to_bytes(*args, **kwargs)
+
+

Get raw data of packet

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramcopy: if true, will alloc memory and copy data to new buffer
returnraw data
staticFalse
+
+

C++ defination code:

+ +
Bytes *to_bytes(bool copy = false)
+
+
+

size

+ +
def size(self) -> int
+
+

Get raw data size of packet

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnsize of raw data
staticFalse
+
+

C++ defination code:

+ +
size_t size()
+
+
+

is_valid

+ +
def is_valid(self) -> bool
+
+

Check packet is valid

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returntrue, packet is valid; false, packet is invalid
staticFalse
+
+

C++ defination code:

+ +
bool is_valid()
+
+
+

set_pts

+ +
def set_pts(self, pts: int) -> None
+
+

Set pts

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parampts: presentation time stamp. unit: time_base
staticFalse
+
+

C++ defination code:

+ +
void set_pts(uint64_t pts)
+
+
+

set_dts

+ +
def set_dts(self, dts: int) -> None
+
+

Set dts

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdts: decoding time stamp. unit: time_base
staticFalse
+
+

C++ defination code:

+ +
void set_dts(uint64_t dts)
+
+
+

set_duration

+ +
def set_duration(self, duration: int) -> None
+
+

Set duration

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramduration: packet display time. unit: time_base
staticFalse
+
+

C++ defination code:

+ +
void set_duration(uint64_t duration)
+
+
+

get_pts

+ +
def get_pts(self) -> int
+
+

Set pts

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parampts: presentation time stamp. unit: time_base
returnpts value
staticFalse
+
+

C++ defination code:

+ +
uint64_t get_pts()
+
+
+

get_dts

+ +
def get_dts(self) -> int
+
+

Set dts

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdts: decoding time stamp. unit: time_base
returndts value
staticFalse
+
+

C++ defination code:

+ +
uint64_t get_dts()
+
+
+

get_duration

+ +
def get_duration(self) -> int
+
+

Get duration

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnduration value
staticFalse
+
+

C++ defination code:

+ +
uint64_t get_duration()
+
+
+

type

+ +
def type(self) -> VideoType
+
+

Get frame type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnvideo type. @see video::VideoType
staticFalse
+
+

C++ defination code:

+ +
video::VideoType type()
+
+
+

Packet

+

Packet class

+
+

C++ defination code:

+ +
class Packet
+
+
+

__init__

+ +
def __init__(self, data: int, len: int, pts: int = -1, dts: int = -1, duration: int = 0) -> None
+
+

Packet number (pair of numerator and denominator).

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdata: src data pointer, use pointers directly without copying.
Note: this object will try to free this memory
len: data len
pts: presentation time stamp. unit: time_base
dts: decoding time stamp. unit: time_base
duration: packet display time. unit: time_base
staticFalse
+
+

C++ defination code:

+ +
Packet(uint8_t *data, int len, uint64_t pts = -1, uint64_t dts = -1, int64_t duration = 0)
+
+
+

get

+ +
def get(self) -> list[int]
+
+

Get raw data of packet

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnraw data
staticFalse
+
+

C++ defination code:

+ +
std::vector<uint8_t> get()
+
+
+

data

+ +
def data(self) -> int
+
+

Get raw data of packet

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnraw data
staticFalse
+
+

C++ defination code:

+ +
uint8_t *data()
+
+
+

data_size

+ +
def data_size(self) -> int
+
+

Get raw data size of packet

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnsize of raw data
staticFalse
+
+

C++ defination code:

+ +
size_t data_size()
+
+
+

is_valid

+ +
def is_valid(self) -> bool
+
+

Check packet is valid

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returntrue, packet is valid; false, packet is invalid
staticFalse
+
+

C++ defination code:

+ +
bool is_valid()
+
+
+

set_pts

+ +
def set_pts(self, pts: int) -> None
+
+

Set pts

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parampts: presentation time stamp. unit: time_base
returntrue, packet is valid; false, packet is invalid
staticFalse
+
+

C++ defination code:

+ +
void set_pts(uint64_t pts)
+
+
+

set_dts

+ +
def set_dts(self, dts: int) -> None
+
+

Set dts

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdts: decoding time stamp. unit: time_base
returntrue, packet is valid; false, packet is invalid
staticFalse
+
+

C++ defination code:

+ +
void set_dts(uint64_t dts)
+
+
+

set_duration

+ +
def set_duration(self, duration: int) -> None
+
+

Set duration

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramduration: packet display time. unit: time_base
returntrue, packet is valid; false, packet is invalid
staticFalse
+
+

C++ defination code:

+ +
void set_duration(uint64_t duration)
+
+
+

Encoder

+

Encode class

+
+

C++ defination code:

+ +
class Encoder
+
+
+

__init__

+ +
def __init__(self, path: str = '', width: int = 2560, height: int = 1440, format: maix.image.Format = ..., type: VideoType = ..., framerate: int = 30, gop: int = 50, bitrate: int = 3000000, time_base: int = 1000, capture: bool = False, block: bool = True) -> None
+
+

Construct a new Video object

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramwidth: picture width. this value may be set automatically. default is 2560.
height: picture height. this value may be set automatically. default is 1440.
format: picture format. default is image::Format::FMT_YVU420SP. @see image::Format
type: video encode/decode type. default is ENC_H265_CBR. @see EncodeType
framerate: frame rate. framerate default is 30, means 30 frames per second
for video. 1/time_base is not the average frame rate if the frame rate is not constant.
gop: for h264/h265 encoding, the interval between two I-frames, default is 50.
bitrate: for h264/h265 encoding, used to limit the bandwidth used by compressed data, default is 3000kbps
time_base: frame time base. time_base default is 1000, means 1/1000 ms (not used)
capture: enable capture, if true, you can use capture() function to get an image object
block: This parameter determines whether encoding should block until it is complete.
If set to true, it will wait until encoding is finished before returning.
If set to false, it will return the current encoding result on the next call.
staticFalse
+
+

C++ defination code:

+ +
Encoder(std::string path = "", int width = 2560, int height = 1440, image::Format format = image::Format::FMT_YVU420SP, video::VideoType type = video::VideoType::VIDEO_H264, int framerate = 30, int gop = 50, int bitrate = 3000 * 1000, int time_base = 1000, bool capture = false, bool block = true)
+
+
+

bind_camera

+ +
def bind_camera(self, camera: maix.camera.Camera) -> maix.err.Err
+
+

Bind camera

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramcamera: camera object
returnerror code, err::ERR_NONE means success, others means failed
staticFalse
+
+

C++ defination code:

+ +
err::Err bind_camera(camera::Camera *camera)
+
+
+

encode

+ +
def encode(self, img: maix.image.Image = ..., pcm: maix.Bytes(bytes) = b'') -> Frame
+
+

Encode image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimg: the image will be encode.
if the img is NULL, this function will try to get image from camera, you must use bind_camera() function to bind the camera.
pcm: the pcm data will be encode.
returnencode result
staticFalse
+
+

C++ defination code:

+ +
video::Frame *encode(image::Image *img = maix::video::Encoder::NoneImage, Bytes *pcm = maix::video::Encoder::NoneBytes)
+
+
+

capture

+ +
def capture(self) -> maix.image.Image
+
+

Capture image

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
attentionEach time encode is called, the last captured image will be released.
returnerror code
staticFalse
+
+

C++ defination code:

+ +
image::Image *capture()
+
+
+

width

+ +
def width(self) -> int
+
+

Get video width

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnvideo width
staticFalse
+
+

C++ defination code:

+ +
int width()
+
+
+

height

+ +
def height(self) -> int
+
+

Get video height

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnvideo height
staticFalse
+
+

C++ defination code:

+ +
int height()
+
+
+

type

+ +
def type(self) -> VideoType
+
+

Get video encode type

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnVideoType
staticFalse
+
+

C++ defination code:

+ +
video::VideoType type()
+
+
+

framerate

+ +
def framerate(self) -> int
+
+

Get video encode framerate

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnframe rate
staticFalse
+
+

C++ defination code:

+ +
int framerate()
+
+
+

gop

+ +
def gop(self) -> int
+
+

Get video encode gop

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returngop value
staticFalse
+
+

C++ defination code:

+ +
int gop()
+
+
+

bitrate

+ +
def bitrate(self) -> int
+
+

Get video encode bitrate

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnbitrate value
staticFalse
+
+

C++ defination code:

+ +
int bitrate()
+
+
+

time_base

+ +
def time_base(self) -> int
+
+

Get video encode time base

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returntime base value
staticFalse
+
+

C++ defination code:

+ +
int time_base()
+
+
+

Decoder

+

Decoder class

+
+

C++ defination code:

+ +
class Decoder
+
+
+

__init__

+ +
def __init__(self, path: str, format: maix.image.Format = ...) -> None
+
+

Construct a new decoder object

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parampath: Path to the file to be decoded. Supports files with .264 and .mp4 extensions. Note that only mp4 files containing h.264 streams are supported.
format: Decoded output format, currently only support YUV420SP
staticFalse
+
+

C++ defination code:

+ +
Decoder(std::string path, image::Format format = image::Format::FMT_YVU420SP)
+
+
+

decode_video

+ +
def decode_video(self, block: bool = True) -> Context
+
+

Decode the video stream, returning the image of the next frame each time.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramblock: Whether it blocks or not. If true, it will wait for the decoding to complete and return the current frame.
If false, it will return the result of the previous frame's decoding. If the previous frame's decoding result is empty,
it will return an unknown Context, and you can use the media_type method of the Context to determine if a valid result exists.
default is true.
returnDecoded context information.
staticFalse
+
+

C++ defination code:

+ +
video::Context * decode_video(bool block = true)
+
+
+

decode_audio

+ +
def decode_audio(self) -> Context
+
+

Decode the video stream, returning the image of the next frame each time.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnDecoded context information.
staticFalse
+
+

C++ defination code:

+ +
video::Context * decode_audio()
+
+
+

decode

+ +
def decode(self, block: bool = True) -> Context
+
+

Decode the video and audio stream

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramblock: Whether it blocks or not. If true, it will wait for the decoding to complete and return the current frame.
If false, it will return the result of the previous frame's decoding. If the previous frame's decoding result is empty,
it will return an unknown Context, and you can use the media_type method of the Context to determine if a valid result exists.
default is true.
returnDecoded context information.
staticFalse
+
+

C++ defination code:

+ +
video::Context * decode(bool block = true)
+
+
+

width

+ +
def width(self) -> int
+
+

Get the video width

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnvideo width
staticFalse
+
+

C++ defination code:

+ +
int width()
+
+
+

height

+ +
def height(self) -> int
+
+

Get the video height

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnvideo height
staticFalse
+
+

C++ defination code:

+ +
int height()
+
+
+

bitrate

+ +
def bitrate(self) -> int
+
+

Get the video bitrate

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnbitrate value
staticFalse
+
+

C++ defination code:

+ +
int bitrate()
+
+
+

fps

+ +
def fps(self) -> int
+
+

Get the video fps

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnfps value
staticFalse
+
+

C++ defination code:

+ +
int fps()
+
+
+

seek

+ +
def seek(self, time: float = -1) -> float
+
+

Seek to the required playback position

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramtime: timestamp value, unit: s
returnreturn the current position, unit: s
staticFalse
+
+

C++ defination code:

+ +
double seek(double time = -1)
+
+
+

duration

+ +
def duration(self) -> float
+
+

Get the maximum duration of the video. If it returns 0, it means it cannot be predicted.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnduration value, unit: s
staticFalse
+
+

C++ defination code:

+ +
double duration()
+
+
+

timebase

+ +
def timebase(self) -> list[int]
+
+

Get the time base.

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> timebase()
+
+
+

has_audio

+ +
def has_audio(self) -> bool
+
+

If find audio data, return true

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
bool has_audio()
+
+
+

has_video

+ +
def has_video(self) -> bool
+
+

If find video data, return true

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
bool has_video()
+
+
+

Video

+

Video class

+
+

C++ defination code:

+ +
class Video
+
+
+

__init__

+ +
def __init__(self, path: str = '', width: int = 2560, height: int = 1440, format: maix.image.Format = ..., time_base: int = 30, framerate: int = 30, capture: bool = False, open: bool = True) -> None
+
+

Construct a new Video object

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parampath: video path. the path determines the location where you load or save the file, if path is none, the video module will not save or load file.
xxx.h265 means video format is H265, xxx.mp4 means video format is MP4
width: picture width. this value may be set automatically. default is 2560.
height: picture height. this value may be set automatically. default is 1440.
format: picture pixel format. this value may be set automatically. default is FMT_YVU420SP.
time_base: frame time base. time_base default is 30, means 1/30 ms
framerate: frame rate. framerate default is 30, means 30 frames per second
for video. 1/time_base is not the average frame rate if the frame rate is not constant.
capture: enable capture, if true, you can use capture() function to get an image object
open: If true, video will automatically call open() after creation. default is true.
staticFalse
+
+

C++ defination code:

+ +
Video(std::string path = std::string(), int width = 2560, int height = 1440, image::Format format = image::Format::FMT_YVU420SP, int time_base = 30, int framerate = 30, bool capture = false, bool open = true)
+
+
+

open

+ +
def open(self, path: str = '', fps: float = 30.0) -> maix.err.Err
+
+

Open video and run

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parampath: video path. the path determines the location where you load or save the file, if path is none, the video module will not save or load file.
xxx.h265 means video format is H265, xxx.mp4 means video format is MP4
fps: video fps
returnerror code, err::ERR_NONE means success, others means failed
staticFalse
+
+

C++ defination code:

+ +
err::Err open(std::string path = std::string(), double fps = 30.0)
+
+
+

close

+ +
def close(self) -> None
+
+

Close video

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
void close()
+
+
+

bind_camera

+ +
def bind_camera(self, camera: maix.camera.Camera) -> maix.err.Err
+
+

Bind camera

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramcamera: camera object
returnerror code, err::ERR_NONE means success, others means failed
staticFalse
+
+

C++ defination code:

+ +
err::Err bind_camera(camera::Camera *camera)
+
+
+

encode

+ +
def encode(self, img: maix.image.Image = ...) -> Packet
+
+

Encode image.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimg: the image will be encode.
if the img is NULL, this function will try to get image from camera, you must use bind_camera() function to bind the camera.
returnencode result
staticFalse
+
+

C++ defination code:

+ +
video::Packet *encode(image::Image *img = maix::video::Video::NoneImage)
+
+
+

decode

+ +
def decode(self, frame: Frame = None) -> maix.image.Image
+
+

Decode frame

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramframe: the frame will be decode
returndecode result
staticFalse
+
+

C++ defination code:

+ +
image::Image *decode(video::Frame *frame = nullptr)
+
+
+

finish

+ +
def finish(self) -> maix.err.Err
+
+

Encode or decode finish

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err finish()
+
+
+

capture

+ +
def capture(self) -> maix.image.Image
+
+

Capture image

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
attentionEach time encode is called, the last captured image will be released.
returnerror code
staticFalse
+
+

C++ defination code:

+ +
image::Image *capture()
+
+
+

is_recording

+ +
def is_recording(self) -> bool
+
+

Check if video is recording

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returntrue if video is recording, false if not
staticFalse
+
+

C++ defination code:

+ +
bool is_recording()
+
+
+

is_opened

+ +
def is_opened(self) -> bool
+
+

Check if video is opened

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returntrue if video is opened, false if not
staticFalse
+
+

C++ defination code:

+ +
bool is_opened()
+
+
+

is_closed

+ +
def is_closed(self) -> bool
+
+

check video device is closed or not

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnclosed or not, bool type
staticFalse
+
+

C++ defination code:

+ +
bool is_closed()
+
+
+

width

+ +
def width(self) -> int
+
+

Get video width

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnvideo width
staticFalse
+
+

C++ defination code:

+ +
int width()
+
+
+

height

+ +
def height(self) -> int
+
+

Get video height

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnvideo height
staticFalse
+
+

C++ defination code:

+ +
int height()
+
+
+

VideoRecorder

+

Video Recorder class. This module is not fully supported and may be deprecated in the future.

+
+

C++ defination code:

+ +
class VideoRecorder
+
+
+

__init__

+ +
def __init__(self, open: bool = True) -> None
+
+

Construct a new VideoRecorder object. This is an object that integrates recording, video capturing, and display functions, which can be used to achieve high-resolution video input when needed.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramopen: If true, video will automatically call open() after creation. default is true.
staticFalse
+
+

C++ defination code:

+ +
VideoRecorder(bool open = true)
+
+
+

lock

+ +
def lock(self, timeout: int = -1) -> maix.err.Err
+
+

lock video

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramtimeout: timeout in ms. unit:ms
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err lock(int64_t timeout = -1)
+
+
+

unlock

+ +
def unlock(self) -> maix.err.Err
+
+

unlock video

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err unlock()
+
+
+

open

+ +
def open(self) -> maix.err.Err
+
+

Start a thread to handle the input function.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err open()
+
+
+

close

+ +
def close(self) -> maix.err.Err
+
+

Stop the thread, and reset the object.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err close()
+
+
+

is_opened

+ +
def is_opened(self) -> bool
+
+

Check whether the object is opened.

+ + + + + + + + + + + + + + + + + +
itemdescription
typefunc
staticFalse
+
+

C++ defination code:

+ +
bool is_opened()
+
+
+

bind_display

+ +
def bind_display(self, display: maix.display.Display, fit: maix.image.Fit = ...) -> maix.err.Err
+
+

Bind a Display object. if this object is not bound, it will not be displayed.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdisplay: display object
fit: fit mode. It is recommended to fill in FIT_COVER or FIT_FILL. For maixcam, using FIT_CONTAIN may affect the
functionality of the second layer created by add_channel() in the Display. default is FIT_COVER.
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err bind_display(display::Display *display, image::Fit fit = image::FIT_COVER)
+
+
+

bind_camera

+ +
def bind_camera(self, camera: maix.camera.Camera) -> maix.err.Err
+
+

Bind a Camera object. if this object is not bound, images cannot be captured.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramcamera: camera object
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err bind_camera(camera::Camera *camera)
+
+
+

bind_audio

+ +
def bind_audio(self, audio: maix.audio.Recorder) -> maix.err.Err
+
+

Bind a AudioRecorder object. if this object is not bound, audio cannot be captured.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramaudio: audio recorder object
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err bind_audio(audio::Recorder *audio)
+
+
+

bind_imu

+ +
def bind_imu(self, imu: capsule) -> maix.err.Err
+
+

Bind a IMU object. if this object is not bound, imu data cannot be captured.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramimu: imu object
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err bind_imu(void *imu)
+
+
+

reset

+ +
def reset(self) -> maix.err.Err
+
+

Reset the video recorder.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
noteIt will not reset the bound object; if you have already bound the display using bind_display(), there is no need to rebind the display after calling reset().
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err reset()
+
+
+

config_path

+ +
def config_path(self, path: str) -> maix.err.Err
+
+

The recorded video will be saved to this path, and this API cannot be called during runtime.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
parampath: The path of the video file to be saved
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err config_path(std::string path)
+
+
+

get_path

+ +
def get_path(self) -> str
+
+

Get the path of the video file to be saved

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnpath
staticFalse
+
+

C++ defination code:

+ +
std::string get_path()
+
+
+

config_snapshot

+ +
def config_snapshot(self, enable: bool, resolution: list[int] = [], format: maix.image.Format = ...) -> maix.err.Err
+
+

Set the snapshot parameters

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
noteEnabling snapshot functionality may result in some performance loss.
paramenable: enable or disable snapshot
resolution: image resolution of snapshot
format: image format of snapshot
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err config_snapshot(bool enable, std::vector<int> resolution = std::vector<int>(), image::Format format = image::Format::FMT_YVU420SP)
+
+
+

config_resolution

+ +
def config_resolution(self, resolution: list[int]) -> maix.err.Err
+
+

Set the resolution of the video, and this API cannot be called during runtime.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
noteYou must bind the camera first, and this interface will modify the camera's resolution. The width must be divisible by 32.
paramresolution: The resolution of the video
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err config_resolution(std::vector<int> resolution)
+
+
+

get_resolution

+ +
def get_resolution(self) -> list[int]
+
+

Get the resolution of the video

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnthe resolution of the video
staticFalse
+
+

C++ defination code:

+ +
std::vector<int> get_resolution()
+
+
+

config_fps

+ +
def config_fps(self, fps: int) -> maix.err.Err
+
+

Set the fps of the video, and this API cannot be called during runtime.

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
noteThis interface only affect the fps of the encoded file.
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err config_fps(int fps)
+
+
+

get_fps

+ +
def get_fps(self) -> int
+
+

Get the fps of the video.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnfps value
staticFalse
+
+

C++ defination code:

+ +
int get_fps()
+
+
+

config_bitrate

+ +
def config_bitrate(self, bitrate: int) -> maix.err.Err
+
+

Set the bitrate of the video, and this API cannot be called during runtime.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err config_bitrate(int bitrate)
+
+
+

get_bitrate

+ +
def get_bitrate(self) -> int
+
+

Get the bitrate of the video.

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnbitrate value
staticFalse
+
+

C++ defination code:

+ +
int get_bitrate()
+
+
+

mute

+ +
def mute(self, data: int = -1) -> int
+
+

Set/Get the mute of the video

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdata: If the parameter is true, mute; if false, unmute; if no parameter is provided, return the mute status.
returnerror code
staticFalse
+
+

C++ defination code:

+ +
int mute(int data = -1)
+
+
+

volume

+ +
def volume(self, data: int = -1) -> int
+
+

Set/Get the volume of the video

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramdata: The volume of the video, the range is 0-100. if no parameter is provided, return the volume.
returnerror code
staticFalse
+
+

C++ defination code:

+ +
int volume(int data = -1)
+
+
+

seek

+ +
def seek(self) -> int
+
+

Get the current position of the video

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returncurrent position, unit: ms
staticFalse
+
+

C++ defination code:

+ +
int64_t seek()
+
+
+

record_start

+ +
def record_start(self) -> maix.err.Err
+
+

Start recording

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
noteYou must bind the camera at a minimum during input. Additionally,
if you bind a display, the input image will be shown,
if you bind a audio, audio will be recorded,
if you bind a IMU, IMU data will be logged.
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err record_start()
+
+
+

snapshot

+ +
def snapshot(self) -> maix.image.Image
+
+

Take a snapshot

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnimage::Image
staticFalse
+
+

C++ defination code:

+ +
image::Image *snapshot()
+
+
+

record_finish

+ +
def record_finish(self) -> maix.err.Err
+
+

Stop recording and save the video

+ + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err record_finish()
+
+
+

draw_rect

+ +
def draw_rect(self, id: int, x: int, y: int, w: int, h: int, color: maix.image.Color = ..., thickness: int = -1, hidden: bool = False) -> maix.err.Err
+
+

Draw a rect on the video

+ + + + + + + + + + + + + + + + + + + + + + + + + +
itemdescription
typefunc
paramid: id of the rect, range is [0, 15]
x: x coordinate
y: y coordinate
w: width
h: height
color: color
tickness: The line width of the rectangular box; if set to -1, it indicates that the rectangular box will be filled.
hidden: Hide or show the rectangular box
returnerror code
staticFalse
+
+

C++ defination code:

+ +
err::Err draw_rect(int id, int x, int y, int w, int h, image::Color color = image::COLOR_WHITE, int thickness = -1, bool hidden = false)
+
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/api/sidebar.yaml b/maixpy/api/sidebar.yaml new file mode 100644 index 00000000..f7c86ba8 --- /dev/null +++ b/maixpy/api/sidebar.yaml @@ -0,0 +1,139 @@ +items: +- file: README.md + label: Brief +- collapsed: false + items: + - collapsed: false + file: maix/err.md + label: err + - collapsed: false + file: maix/tensor.md + label: tensor + - collapsed: false + file: maix/image.md + label: image + - collapsed: false + file: maix/camera.md + label: camera + - collapsed: false + file: maix/display.md + label: display + - collapsed: false + file: maix/ext_dev.md + items: + - collapsed: false + file: maix/ext_dev/imu.md + label: imu + - collapsed: false + file: maix/ext_dev/qmi8658.md + label: qmi8658 + - collapsed: false + file: maix/ext_dev/tmc2209.md + label: tmc2209 + - collapsed: false + file: maix/ext_dev/bm8563.md + label: bm8563 + label: ext_dev + - collapsed: false + file: maix/audio.md + label: audio + - collapsed: false + file: maix/tracker.md + label: tracker + - collapsed: false + file: maix/http.md + label: http + - collapsed: false + file: maix/rtsp.md + label: rtsp + - collapsed: false + file: maix/rtmp.md + label: rtmp + - collapsed: false + file: maix/touchscreen.md + label: touchscreen + - collapsed: false + file: maix/video.md + label: video + - collapsed: false + file: maix/network.md + items: + - collapsed: false + file: maix/network/wifi.md + label: wifi + label: network + - collapsed: false + file: maix/comm.md + label: comm + - collapsed: false + file: maix/fs.md + label: fs + - collapsed: false + file: maix/app.md + label: app + - collapsed: false + file: maix/protocol.md + label: protocol + - collapsed: false + file: maix/time.md + label: time + - collapsed: false + file: maix/example.md + label: example + - collapsed: false + file: maix/util.md + label: util + - collapsed: false + file: maix/thread.md + label: thread + - collapsed: false + file: maix/sys.md + label: sys + - collapsed: false + file: maix/i18n.md + label: i18n + - collapsed: false + file: maix/peripheral.md + items: + - collapsed: false + file: maix/peripheral/key.md + label: key + - collapsed: false + file: maix/peripheral/i2c.md + label: i2c + - collapsed: false + file: maix/peripheral/spi.md + label: spi + - collapsed: false + file: maix/peripheral/pwm.md + label: pwm + - collapsed: false + file: maix/peripheral/wdt.md + label: wdt + - collapsed: false + file: maix/peripheral/adc.md + label: adc + - collapsed: false + file: maix/peripheral/pinmap.md + label: pinmap + - collapsed: false + file: maix/peripheral/uart.md + label: uart + - collapsed: false + file: maix/peripheral/gpio.md + label: gpio + - collapsed: false + file: maix/peripheral/hid.md + label: hid + - collapsed: false + file: maix/peripheral/timer.md + label: timer + label: peripheral + - collapsed: false + file: maix/nn.md + items: + - collapsed: false + file: maix/nn/F.md + label: F + label: nn + label: maix diff --git a/maixpy/config.json b/maixpy/config.json new file mode 100644 index 00000000..7689a260 --- /dev/null +++ b/maixpy/config.json @@ -0,0 +1,5 @@ +{ + "import": "config_zh", + "class": "md_page", + "name": "MaixPy 页面" +} diff --git a/maixpy/doc/assets/body_keypoints.jpg b/maixpy/doc/assets/body_keypoints.jpg new file mode 100644 index 00000000..b0fd0057 Binary files /dev/null and b/maixpy/doc/assets/body_keypoints.jpg differ diff --git a/maixpy/doc/assets/face_detection.jpg b/maixpy/doc/assets/face_detection.jpg new file mode 100644 index 00000000..5e89913d Binary files /dev/null and b/maixpy/doc/assets/face_detection.jpg differ diff --git a/maixpy/doc/assets/face_recognize.jpg b/maixpy/doc/assets/face_recognize.jpg new file mode 100644 index 00000000..ec5ff7a3 Binary files /dev/null and b/maixpy/doc/assets/face_recognize.jpg differ diff --git a/maixpy/doc/assets/face_tracking1.jpg b/maixpy/doc/assets/face_tracking1.jpg new file mode 100644 index 00000000..f22215de Binary files /dev/null and b/maixpy/doc/assets/face_tracking1.jpg differ diff --git a/maixpy/doc/assets/face_tracking2.jpg b/maixpy/doc/assets/face_tracking2.jpg new file mode 100644 index 00000000..082fd1d8 Binary files /dev/null and b/maixpy/doc/assets/face_tracking2.jpg differ diff --git a/maixpy/doc/assets/gpio_led.png b/maixpy/doc/assets/gpio_led.png new file mode 100644 index 00000000..dda8ecfb Binary files /dev/null and b/maixpy/doc/assets/gpio_led.png differ diff --git a/maixpy/doc/assets/maixvision_browser.jpg b/maixpy/doc/assets/maixvision_browser.jpg new file mode 100644 index 00000000..3bcda9b9 Binary files /dev/null and b/maixpy/doc/assets/maixvision_browser.jpg differ diff --git a/maixpy/doc/assets/maixvision_browser2.jpg b/maixpy/doc/assets/maixvision_browser2.jpg new file mode 100644 index 00000000..94c2895c Binary files /dev/null and b/maixpy/doc/assets/maixvision_browser2.jpg differ diff --git a/maixpy/doc/assets/ocr.jpg b/maixpy/doc/assets/ocr.jpg new file mode 100644 index 00000000..e51de16a Binary files /dev/null and b/maixpy/doc/assets/ocr.jpg differ diff --git a/maixpy/doc/assets/self_learn_classifier.jpg b/maixpy/doc/assets/self_learn_classifier.jpg new file mode 100644 index 00000000..a401b9ef Binary files /dev/null and b/maixpy/doc/assets/self_learn_classifier.jpg differ diff --git a/maixpy/doc/assets/yolov5s_onnx.jpg b/maixpy/doc/assets/yolov5s_onnx.jpg new file mode 100644 index 00000000..fa6c2d4a Binary files /dev/null and b/maixpy/doc/assets/yolov5s_onnx.jpg differ diff --git a/maixpy/doc/assets/yolov8_out1.jpg b/maixpy/doc/assets/yolov8_out1.jpg new file mode 100644 index 00000000..0d932619 Binary files /dev/null and b/maixpy/doc/assets/yolov8_out1.jpg differ diff --git a/maixpy/doc/assets/yolov8_out2.jpg b/maixpy/doc/assets/yolov8_out2.jpg new file mode 100644 index 00000000..5116fddd Binary files /dev/null and b/maixpy/doc/assets/yolov8_out2.jpg differ diff --git a/maixpy/doc/assets/yolov8_seg.jpg b/maixpy/doc/assets/yolov8_seg.jpg new file mode 100644 index 00000000..316875be Binary files /dev/null and b/maixpy/doc/assets/yolov8_seg.jpg differ diff --git a/maixpy/doc/en/README_no_screen.html b/maixpy/doc/en/README_no_screen.html new file mode 100644 index 00000000..4de7c0e1 --- /dev/null +++ b/maixpy/doc/en/README_no_screen.html @@ -0,0 +1,501 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Screenless Edition Quick Start - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Screenless Edition Quick Start

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

About This Document

+

As mentioned in the Quick Start Guide, it is strongly recommended to purchase the version with a screen for development, as it provides a better development experience, including using the built-in APP, accessing apps from the MaixHub App Store, and easier debugging (e.g., common settings can be completed directly by touching the screen interface, and images can be viewed in real-time).

+

However, if you are unable to purchase the version with a screen or require a screenless version for mass production, please refer to this document.

+

Getting a MaixCAM Device

+ +

Initial Setup

+

Preparing the TF Image Card and Inserting it into the Device

+

If your package includes a TF card, it already contains the factory image. If the TF card was not installed in the device during manufacturing, carefully open the case (be careful not to disconnect any cables inside) and insert the TF card. Additionally, since the factory firmware may be outdated, it is essential to update the system to the latest version by following the Upgrade and Flash System instructions; otherwise, some applications and APIs may not function properly.

+

If you did not purchase a TF card, you will need to flash the system onto your own TF card. Follow the Upgrade and Flash System guide, then install the card into the board.

+

Powering On

+

Use a Type-C data cable to connect the MaixCAM device to provide power and wait for the device to boot.

+

Firstly: Ensure that the USB cable is of good quality and that the USB port on your computer is reliable (power supply >= 5V 500mA, normal interference resistance). The first boot may take about 20 seconds, after which your computer will detect one or two virtual network adapters (visible in your computer's network manager).

+

If the virtual network adapter is not detected:

+
    +
  • Ensure that you purchased the TF card package. If you have confirmed that the TF card is inserted into the device, try updating to the latest system.
  • +
  • If you did not purchase the TF card package, you need to flash the latest system onto the TF card following the Upgrade and Flash System guide.
  • +
  • Check if the USB connection is loose and whether the USB cable is of good quality; you can try using a better-quality cable.
  • +
  • Ensure that the USB port provides sufficient power. You can try another USB port or even another computer if possible.
  • +
+

Preparing to Connect the Computer and Device

+

To enable communication between your computer (PC) and the device (MaixCAM), they need to be on the same local area network. Two methods are provided; we will first use Method 1:

+
    +
  • Method 1: Wired connection. The device connects to the computer via a USB cable, and it will be recognized as a virtual USB network adapter, placing it on the same local area network as the computer. If you encounter issues, refer to the FAQ for common problems.
  • +
+
+Method 2 involves driver installation on different computer systems: +

There are two default USB virtual network adapter drivers (NCM and RNDIS) to meet the needs of different systems:

+
    +
  • Windows: All Windows systems will automatically install the RNDIS driver. Only Win11 will automatically install the NCM driver. Either one that works is fine (NCM is faster than RNDIS).
      +
    • Open Task Manager -> Performance, and you will see a virtual Ethernet connection with an IP, for example, 10.131.167.100 is the computer's IP, and the device's IP is the same except the last digit changed to 1, i.e., 10.131.167.1. If it's Win11, you will see two virtual network adapters; you can use any one of the IPs.
    • +
    • Additionally, you can open the Device Manager on your computer (search Device Manager in the search bar). If the RNDIS and NCM drivers are correctly installed, either one that works is fine:
      +RNDIS ok NCM ok
    • +
    +
  • +
  • Linux: No extra setup is required. Just plug in the USB cable. Use ifconfig or ip addr to see usb0 and usb1 network adapters, and you can use either IP. Note that the IP, for example, 10.131.167.100, is the computer's IP, and the device's IP is the same except the last digit changed to 1, i.e., 10.131.167.1.
  • +
  • MacOS: Check the usb network adapter in System Settings -> Network. Note that the IP, for example, 10.131.167.100, is the computer's IP, and the device's IP is the same except the last digit changed to 1, i.e., 10.131.167.1.
  • +
+
+
+
    +
  • Method 2: Wireless connection. The device connects to the same router or WiFi hotspot that the computer is connected to (if you experience screen lag or high latency with WiFi, use a wired connection). There are two methods for connecting to a wireless hotspot:
      +
    • Modify the wifi.ssid and wifi.pass files in the TF card's boot partition and reboot to connect. Modification methods:
        +
      • If you are familiar with SSH, you can connect to the device via SSH (if wired connection is available) and modify the files in the /boot directory.
      • +
      • You can also enter upgrade mode as described in the previous section, after which a USB drive will appear on the computer. Modify the files in it, ensuring to safely eject the drive before rebooting.
      • +
      • You can also use a card reader, and a USB drive will appear on the computer. Modify the wifi.ssid and wifi.pass files in it, ensuring to safely eject the drive before rebooting.
      • +
      +
    • +
    • If the wired connection is already available, you can follow the next step and use MaixVision to run code. Modify the tools/wifi_connect.py script with your SSID and PASSWORD, then run it.
    • +
    +
  • +
+

Preparing the Development Environment

+
    +
  • First, ensure that the computer and device are on the same local area network.
  • +
  • Download and install MaixVision.
  • +
  • Use a Type-C cable to connect the device and computer, open MaixVision, and click the Connect button at the bottom left. The software will automatically search for the device. Wait a moment until the device appears, then click the device to connect.
  • +
+

If the device is not detected, you can find solutions in the FAQ.

+

Here is a video tutorial on using MaixVision:

+

+

Connecting to the Internet

+

The first run requires a network connection to activate the device and install the runtime library. If you do not have a router, you can use your phone to create a hotspot.

+

In MaixVision, modify the tools/wifi_connect.py script with your SSID and PASSWORD, then run it. For other WiFi connection methods, see the previous section.

+

Upgrading the Runtime Library

+

This step is very important!!! If this step is not completed, other applications and features may not function properly (e.g., crashing).

+
    +
  • First, ensure that the WiFi connection from the previous step is completed and that you have an IP address with internet access.
  • +
  • Run the tools/install_runtime.py script from the MaixVision examples to install the latest runtime library.
  • +
+

If Request failed or a similar error appears, please check if the network is connected and able to access the internet. If the problem persists, take a photo and contact customer service for assistance.

+

Running Examples

+

Click on the Example Code on the left side of MaixVision, select an example, and click the Run button at the bottom left to send the code to the device for execution.

+

For example:

+
    +
  • hello_maix.py, click the Run button, and you will see messages printed by the device in the MaixVision terminal, and an image will appear in the top right corner.
  • +
  • camera_display.py, this example opens the camera and displays the camera feed on the screen.
  • +
+ +
from maix import camera, display, app
+
+disp = display.Display()          # Create a display object and initialize the screen
+cam = camera.Camera(640, 480)     # Create a camera object, manually setting the resolution to 640x480, and initialize the camera
+while not app.need_exit():        # Keep looping until the program exits (can exit by pressing the device's function button or clicking the stop button in MaixVision)
+    img = cam.read()              # Read the camera feed into the img variable, print(img) can be used to print img details
+    disp.show
+
+(img)                # Display img on the screen
+
+
    +
  • yolov5.py detects objects in the camera feed, draws bounding boxes around them, and displays them on the screen. It supports detecting 80 different objects. For more details, see YOLOv5 Object Detection.
  • +
+

You can try other examples on your own.

+
+

If you experience image lag when using the camera examples, it may be due to poor network connection, low-quality USB cable, or poor USB port quality on the host. Try changing the connection method or using a different cable, USB port, or computer.

+
+

Installing Applications on the Device

+

The above steps allow you to run code on the device. Once MaixVision is disconnected, the code will stop running. If you want the code to appear in the boot menu, you can package it as an application and install it on the device.

+

Click the install application button at the bottom left of MaixVision, fill in the application information, and it will be installed on the device. You will then see the application on the device.
+You can also choose to package the application and share it on the MaixHub App Store.

+
+

The default examples do not include an explicit exit function. Press the device's function button to exit the application (for MaixCAM, it is the user button).

+
+

If you want the program to start automatically at boot, you can modify and run the tools/set_autostart.py script.

+

Next Steps

+

If you like what you've seen so far, please make sure to visit GitHub and give the MaixPy open-source project a star (you need to log in to GitHub first). Your star and support are our motivation to keep maintaining and adding new features!

+

You have now completed a basic usage and development process. Next, you can learn more about MaixPy syntax and features by following the directory on the left. If you encounter any issues with the API, you can find help in the API Documentation.

+

It's best to learn with a specific goal in mind, such as working on an interesting project. This will improve your learning experience. You can also share your projects and experiences on the MaixHub Sharing Platform to earn cash rewards!

+

Frequently Asked Questions (FAQ)

+

If you encounter any issues, first check the FAQ. If you can't find a solution, you can ask questions in the forum or group below, or submit a code issue on MaixPy issue.

+

Share and Communicate

+
    +
  • MaixHub Project and Experience Sharing: Share your projects and experiences to earn cash rewards. To receive official rewards, your content should meet the following criteria:
      +
    • Reproducibility: A fairly complete project reproduction process.
    • +
    • Showcase: Projects without a detailed reproduction process but with an attractive presentation.
    • +
    • Bug Solution Experience: Share your process and specific solution to a difficult problem.
    • +
    +
  • +
  • MaixPy Official Forum (for questions and discussions)
  • +
  • QQ Group: (It's recommended to post first before asking in the QQ group so others can quickly understand your problem and reproduction process)
      +
    • MaixPy (v4) AI Vision Group: 862340358
    • +
    +
  • +
  • Telegram: MaixPy
  • +
  • MaixPy Code Issues: MaixPy issue
  • +
  • For business cooperation or bulk purchases, please contact support@sipeed.com.
  • +
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/ai_model_converter/maixcam.html b/maixpy/doc/en/ai_model_converter/maixcam.html new file mode 100644 index 00000000..8878103e --- /dev/null +++ b/maixpy/doc/en/ai_model_converter/maixcam.html @@ -0,0 +1,551 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Convert ONNX Model to a Format Usable by MaixCAM / MaixPy (MUD) - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Convert ONNX Model to a Format Usable by MaixCAM / MaixPy (MUD)

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction

+

Models trained on a computer cannot be directly used by MaixCAM due to its limited hardware performance. Generally, we need to perform INT8 quantization to reduce computation and convert the model into a format supported by MaixCAM.

+

This article explains how to convert an ONNX model into a format that MaixCAM can use (MUD model).

+

Model File Formats Supported by MaixCAM

+

MUD (Model Universal Description file) is a model description file supported by MaixPy, used to unify model files across different platforms, making MaixPy code cross-platform compatible. It is essentially a text file in ini format and can be edited with a text editor.
+Typically, a MUD file is accompanied by one or more actual model files. For MaixCAM, the actual model file is in .cvimodel format, with the MUD file providing some descriptive information.

+

For example, a YOLOv8 model consists of two files: yolov8n.mud and yolov8n.cvimodel. The former contains:

+ +
[basic]
+type = cvimodel
+model = yolov8n.cvimodel
+
+[extra]
+model_type = yolov8
+input_type = rgb
+mean = 0, 0, 0
+scale = 0.00392156862745098, 0.00392156862745098, 0.00392156862745098
+labels = person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign, parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket, bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut, cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair dryer, toothbrush
+
+

This file specifies the model type as cvimodel and the model path relative to the MUD file as yolov8n.cvimodel. It also includes information such as preprocessing mean and scale (which should match the preprocessing method used during training), and labels representing the 80 categories for object detection.

+

When using this model, place both files in the same directory.

+

Preparing the ONNX Model

+

Prepare your ONNX model and view it on https://netron.app/ to ensure that the operators used in your model are supported by the conversion tool. The list of supported operators can be found in the CVITEK_TPU_SDK Developer Guide.pdf available from Sophgo's TPU SDK.

+

Identify Appropriate Quantization Output Nodes

+

Models usually have post-processing nodes that are handled by the CPU. We need to strip these out as they can affect quantization quality and potentially cause quantization to fail.

+

For example, in YOLOv5:

+

YOLOv5 ONNX Model

+

There are three conv layers, with subsequent calculations handled by the CPU. For quantization, use the outputs of these conv layers as the final outputs of the model. The output names in this case are /model.24/m.0/Conv_output_0,/model.24/m.1/Conv_output_0,/model.24/m.2/Conv_output_0.

+

Setting Up the Model Conversion Environment

+

The model conversion uses Sophgo's https://github.com/sophgo/tpu-mlir. We will install it in a Docker environment to avoid compatibility issues with the host machine.

+

Install Docker

+

Follow the official Docker installation documentation.

+

For example:

+ +
# Install dependencies for Docker
+sudo apt-get update
+sudo apt-get install apt-transport-https ca-certificates curl gnupg-agent software-properties-common
+# Add the official Docker source
+curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
+sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
+# Install Docker
+sudo apt-get update
+sudo apt-get install docker-ce docker-ce-cli containerd.io
+
+

Pull the Docker Image

+ +
docker pull sophgo/tpuc_dev:latest
+
+
+

If pulling from within China, you may experience slow speeds. Consider setting up a local mirror. You can search for instructions or refer to Docker Proxy and Mirror Setup.

+
+

Run the Container

+ +
docker run --privileged --name tpu-env -v /home/$USER/data:/home/$USER/data -it sophgo/tpuc_dev
+
+

This command starts a container named tpu-env, mounting the ~/data directory from the host to the container's ~/data, enabling file sharing and path consistency.

+

To start the container next time, use docker start tpu-env && docker attach tpu-env.

+

Install tpu-mlir

+

Download the whl file from GitHub and place it in the ~/data directory. Install it in the container:

+ +
pip install tpu_mlir*.whl # Replace with the downloaded file name
+
+

Running model_transform.py should display help information, indicating a successful installation.

+

Writing the Conversion Script

+

The conversion mainly involves two commands: model_transform.py and model_deploy.py. To simplify the process, create a script convert_yolov5_to_cvimodel.sh:

+ +
#!/bin/bash
+
+set -e
+
+net_name=yolov5s
+input_w=640
+input_h=640
+
+# mean: 0, 0, 0
+# std: 255, 255, 255
+
+# mean
+# 1/std
+
+# mean: 0, 0, 0
+# scale: 0.00392156862745098, 0.00392156862745098, 0.00392156862745098
+
+mkdir -p workspace
+cd workspace
+
+# convert to mlir
+model_transform.py \
+--model_name ${net_name} \
+--model_def ../${net_name}.onnx \
+--input_shapes [[1,3,${input_h},${input_w}]] \
+--mean "0,0,0" \
+--scale "0.00392156862745098,0.00392156862745098,0.00392156862745098" \
+--keep_aspect_ratio \
+--pixel_format rgb \
+--channel_format nchw \
+--output_names "/model.24/m.0/Conv_output_0,/model.24/m.1/Conv_output_0,/model.24/m.2/Conv_output_0" \
+--test_input ../dog.jpg \
+--test_result ${net_name}_top_outputs.npz \
+--tolerance 0.99,0.99 \
+--mlir ${net_name}.mlir
+
+# export bf16 model
+#   not use --quant_input, use float32 for easy coding
+model_deploy.py \
+--mlir ${net_name}.mlir \
+--quantize BF16 \
+--processor cv181x \
+--test_input ${net_name}_in_f32.npz \
+--test_reference ${net_name}_top_outputs.npz \
+--model ${net_name}_bf16.cvimodel
+
+echo "calibrate for int8 model"
+# export int8 model
+run_calibration.py ${net_name}.mlir \
+--dataset ../images \
+--input_num 200 \
+-o ${net_name}_cali_table
+
+echo "convert to int8 model"
+# export int8 model
+#    add --quant_input, use int8 for faster processing in maix.nn.NN.forward_image
+model_deploy.py \
+--mlir ${net_name}.mlir \
+--quantize INT8 \
+--quant_input \
+--calibration_table ${net_name}_cali_table \
+--processor cv181x \
+--test_input ${net_name}_in_f32.npz \
+--test_reference ${net_name}_top_outputs.npz \
+--tolerance 0.9,0.6 \
+--model ${net_name}_int8.cvimodel
+
+

Key parameters include:

+
    +
  • output_names: Names of the output nodes we identified earlier.
  • +
  • mean, scale: Preprocessing methods used during training. For instance, YOLOv5 preprocesses the image by subtracting mean and dividing by std. In this example, mean is 0 and std is 255, meaning the scale is 1/std. Modify these according to your model's preprocessing method.
  • +
  • test_input: The image used for testing during conversion. In this script, it's ../dog.jpg, so ensure this image is placed in the same directory as the script. Replace it according to your model.
  • +
  • tolerance: Allowed
  • +
+

error margin before and after quantization. If errors during conversion indicate values lower than this threshold, it means the converted model might have significant deviation from the ONNX model. If acceptable, you can lower this threshold. Often, this requires optimizing the model and carefully examining post-processing.

+
    +
  • quantize: The data type for quantization. Generally, INT8 models are used on MaixCAM. Although a BF16 model is also converted here, INT8 is preferred for speed, while BF16 can be considered if INT8 conversion is not feasible or if precision is critical.
  • +
  • dataset: The dataset used for quantization. For YOLOv5, it's a folder of images. Copy a subset of typical images from the coco dataset. Use --input_num to specify the number of images used (should be ≤ the actual number in the images directory).
  • +
+

Running the Conversion Script

+

Run the script with:

+ +
chmod +x convert_yolov5_to_cvimodelsh && ./convert_yolov5_to_cvimodel.sh
+
+

Wait for the conversion to complete.

+

If errors occur, carefully review the previous explanations for potential issues with parameters or output layers.

+

Upon successful conversion, the workspace folder will contain a **_int8.cvimodel file.

+

Writing the MUD File

+

Modify the MUD file according to your model. For YOLOv5, the MUD file looks like this. Change labels to match your trained model:

+ +
[basic]
+type = cvimodel
+model = yolov5s.cvimodel
+
+[extra]
+model_type = yolov5
+input_type = rgb
+mean = 0, 0, 0
+scale = 0.00392156862745098, 0.00392156862745098, 0.00392156862745098
+anchors = 10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326
+labels = person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign, parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket, bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut, cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair dryer, toothbrush
+
+

The basic section specifies the model file type and path, necessary for loading and running the model using the maix.nn.NN class in MaixPy or MaixCDK.

+

The extra section varies based on the model. It includes parameters such as preprocessing, post-processing, and labels. For YOLOv5, you can download its model, copy, and modify it.

+

If you need to support a new model not currently supported by MaixPy, define the extra parameters based on the model's preprocessing and post-processing requirements, then write the corresponding decoding class. If you prefer not to modify the MaixPy C++ source code, you can use the maix.nn.NN class to load the model and handle post-processing in Python, though this is less efficient.

+

Writing Post-processing Code

+

If you modify the mud file based on supported models, you can directly use the corresponding code in MaixPy or MaixCDK. If you need to support new models, design the mud file and write the preprocessing and post-processing code:

+
    +
  1. Option 1: Use maix.nn.NN in MaixPy to load the model, then use the forward or forward_image function to run the model and process the output with Python functions.
  2. +
  3. Option 2: In MaixCDK, refer to YOLOv5 source code, add a new hpp file, and create a class to process your model. Modify all functions and class @maixpy annotations, compile the MaixPy project, and call the new class to run the model in MaixPy.
  4. +
+

You can submit the source code (Pull Request) to the main MaixPy repository to contribute to the community and share new models on MaixHub for rewards ranging from 30 to 2000 yuan based on quality!

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/audio/ai_classify.html b/maixpy/doc/en/audio/ai_classify.html new file mode 100644 index 00000000..b9c4051a --- /dev/null +++ b/maixpy/doc/en/audio/ai_classify.html @@ -0,0 +1,364 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy AI voice classify - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy AI voice classify

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

TODO: To be completed. If you need it urgently, you can first port the model yourself or process the audio into a spectrogram using FFT, and then train an AI classification model based on the image representation.

+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/audio/digit.html b/maixpy/doc/en/audio/digit.html new file mode 100644 index 00000000..5c185615 --- /dev/null +++ b/maixpy/doc/en/audio/digit.html @@ -0,0 +1,511 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Continuous Chinese digit recognition - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Continuous Chinese digit recognition

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-10-081.0.0916BGAI + + Initial document + +
+
+
+ +
+
+ +

Introduction

+

MaixCAM has ported the Maix-Speech offline speech library, enabling continuous Chinese numeral recognition, keyword recognition, and large vocabulary speech recognition capabilities. It supports audio recognition in PCM and WAV formats, and can accept input recognition via the onboard microphone.

+

Maix-Speech

+

Maix-Speech is an offline speech library specifically designed for embedded environments. It features deep optimization of speech recognition algorithms, achieving a significant lead in memory usage while maintaining excellent WER. For more details on the principles, please refer to the open-source project.

+

Continuous Chinese digit recognition

+ +
from maix import app, nn
+
+speech = nn.Speech("/root/models/am_3332_192_int8.mud")
+speech.init(nn.SpeechDevice.DEVICE_MIC, "hw:0,0")
+
+def callback(data: str, len: int):
+    print(data)
+
+speech.digit(640, callback)
+
+while not app.need_exit():
+    frames = speech.run(1)
+    if frames < 1:
+        print("run out\n")
+        speech.deinit()
+        break
+
+

Usage

+
    +
  1. Import the app and nn modules
  2. +
+ +
from maix import app, nn
+
+
    +
  1. Load the acoustic model
  2. +
+ +
speech = nn.Speech("/root/models/am_3332_192_int8.mud")
+
+
    +
  • You can also load the am_7332 acoustic model; larger models provide higher accuracy but consume more resources.
  • +
+
    +
  1. Choose the corresponding audio device
  2. +
+ +
speech.init(nn.SpeechDevice.DEVICE_MIC, "hw:0,0")
+
+
    +
  • This uses the onboard microphone and supports both WAV and PCM audio as input devices.
  • +
+ +
speech.init(nn.SpeechDevice.DEVICE_WAV, "path/audio.wav")   # Using WAV audio input
+
+ +
speech.init(nn.SpeechDevice.DEVICE_PCM, "path/audio.pcm")   # Using PCM audio input
+
+
    +
  • Note that WAV must be 16KHz sample rate with S16_LE storage format. You can use the arecord tool for conversion.
  • +
+ +
arecord -d 5 -r 16000 -c 1 -f S16_LE audio.wav
+
+
    +
  • When recognizing PCM/WAV , if you want to reset the data source, such as for the next WAV file recognition, you can use the speech.devive method, which will automatically clear the cache:
  • +
+ +
speech.devive(nn.SpeechDevice.DEVICE_WAV, "path/next.wav")
+
+
    +
  1. Set up the decoder
  2. +
+ +
def callback(data: str, len: int):
+    print(data)
+
+speech.digit(640, callback)
+
+
    +
  • Users can register several decoders (or none), which decode the results from the acoustic model and execute the corresponding user callback. Here, a digit decoder is registered to output the Chinese digit recognition results from the last 4 seconds. The returned recognition results are in string format and support 0123456789 .(dot) S(ten) B(hundred) Q(thousand) W(thousand). For other decoder usages, please refer to the sections on Real-time voice recognition and keyword recognition.

    +
  • +
  • When setting the digit decoder, you need to specify a blank value; exceeding this value (in ms) will insert a _ in the output results to indicate idle silence.

    +
  • +
  • After registering the decoder, use the speech.deinit() method to clear the initialization.

    +
  • +
+
    +
  1. Recognition
  2. +
+ +
while not app.need_exit():
+    frames = speech.run(1)
+    if frames < 1:
+        print("run out\n")
+        speech.deinit()
+        break
+
+
    +
  • Use the speech.run method to run speech recognition. The parameter specifies the number of frames to run each time, returning the actual number of frames processed. Users can choose to run 1 frame each time and then perform other processing, or run continuously in a single thread, stopping it with an external thread.
  • +
+

Recognition Results

+

If the above program runs successfully, speaking into the onboard microphone will yield continuous Chinese digit recognition results, such as:

+ +
_0123456789
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/audio/keyword.html b/maixpy/doc/en/audio/keyword.html new file mode 100644 index 00000000..e48cdf69 --- /dev/null +++ b/maixpy/doc/en/audio/keyword.html @@ -0,0 +1,540 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Keyword recognition - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Keyword recognition

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-10-081.0.0916BGAI + + Initial document + +
+
+
+ +
+
+ +

Introduction

+

MaixCAM has ported the Maix-Speech offline speech library, enabling continuous Chinese numeral recognition, keyword recognition, and large vocabulary speech recognition capabilities. It supports audio recognition in PCM and WAV formats, and can accept input recognition via the onboard microphone.

+

Maix-Speech

+

Maix-Speech is an offline speech library specifically designed for embedded environments. It features deep optimization of speech recognition algorithms, achieving a significant lead in memory usage while maintaining excellent WER. For more details on the principles, please refer to the open-source project.

+

Keyword recognition

+ +
from maix import app, nn
+
+speech = nn.Speech("/root/models/am_3332_192_int8.mud")
+speech.init(nn.SpeechDevice.DEVICE_MIC, "hw:0,0")
+
+kw_tbl = ['xiao3 ai4 tong2 xue2',
+          'ni3 hao3',
+          'tian1 qi4 zen3 me yang4']
+kw_gate = [0.1, 0.1, 0.1]
+
+def callback(data:list[float], len: int):
+    for i in range(len):
+        print(f"\tkw{i}: {data[i]:.3f};", end=' ')
+    print("\n")
+
+speech.kws(kw_tbl, kw_gate, callback, True)
+
+while not app.need_exit():
+    frames = speech.run(1)
+    if frames < 1:
+        print("run out\n")
+        speech.deinit()
+        break
+
+

Usage

+
    +
  1. Import the app and nn modules
  2. +
+ +
from maix import app, nn
+
+
    +
  1. Load the acoustic model
  2. +
+ +
speech = nn.Speech("/root/models/am_3332_192_int8.mud")
+
+
    +
  • You can also load the am_7332 acoustic model; larger models provide higher accuracy but consume more resources.
  • +
+
    +
  1. Choose the corresponding audio device
  2. +
+ +
speech.init(nn.SpeechDevice.DEVICE_MIC, "hw:0,0")
+
+
    +
  • This uses the onboard microphone and supports both WAV and PCM audio as input devices.
  • +
+ +
speech.init(nn.SpeechDevice.DEVICE_WAV, "path/audio.wav")   # Using WAV audio input
+
+ +
speech.init(nn.SpeechDevice.DEVICE_PCM, "path/audio.pcm")   # Using PCM audio input
+
+
    +
  • Note that WAV must be 16KHz sample rate with S16_LE storage format. You can use the arecord tool for conversion.
  • +
+ +
arecord -d 5 -r 16000 -c 1 -f S16_LE audio.wav
+
+
    +
  • When recognizing PCM/WAV , if you want to reset the data source, such as for the next WAV file recognition, you can use the speech.devive method, which will automatically clear the cache:
  • +
+ +
speech.devive(nn.SpeechDevice.DEVICE_WAV, "path/next.wav")
+
+
    +
  1. Set up the decoder
  2. +
+ +
kw_tbl = ['xiao3 ai4 tong2 xue2',
+          'ni3 hao3',
+          'tian1 qi4 zen3 me yang4']
+kw_gate = [0.1, 0.1, 0.1]
+
+def callback(data:list[float], len: int):
+    for i in range(len):
+        print(f"\tkw{i}: {data[i]:.3f};", end=' ')
+    print("\n")
+
+speech.kws(kw_tbl, kw_gate, callback, True)
+
+
    +
  • Users can register several decoders (or none), which decode the results from the acoustic model and execute the corresponding user callback. Here, a kws decoder is registered to output a list of probabilities for all registered keywords from the last frame. Users can observe the probability values and set their own thresholds for activation. For other decoder usages, please refer to the sections on Real-time voice recognition and continuous Chinese numeral recognition.

    +
  • +
  • When setting up the kws decoder, you need to provide a keyword list separated by spaces in Pinyin, a keyword probability threshold list arranged in order, and specify whether to enable automatic near-sound processing. If set to True, different tones of the same Pinyin will be treated as similar words to accumulate probabilities. Finally, you need to set a callback function to handle the decoded data.

    +
  • +
  • Users can also manually register near-sound words using the speech.similar method, with a maximum of 10 near-sound words registered for each Pinyin. (Note that using this interface to register near-sound words will override the near-sound table generated by enabling automatic near-sound processing.)

    +
  • +
+ +
similar_char = ['zhen3', 'zheng3']
+speech.similar('zen3', similar_char)
+
+
    +
  • After registering the decoder, use the speech.deinit() method to clear the initialization.
  • +
+
    +
  1. Recognition
  2. +
+ +
while not app.need_exit():
+    frames = speech.run(1)
+    if frames < 1:
+        print("run out\n")
+        speech.deinit()
+        break
+
+
    +
  • Use the speech.run method to run speech recognition. The parameter specifies the number of frames to run each time, returning the actual number of frames processed. Users can choose to run 1 frame each time and then perform other processing, or run continuously in a single thread, stopping it with an external thread.
  • +
+

Recognition Results

+

If the above program runs successfully, speaking into the onboard microphone will yield keyword recognition results, such as:

+ +
kws log 2.048s, len 24
+decoder_kws_init get 3 kws
+  00, xiao3 ai4 tong2 xue2
+  01, ni3 hao3
+  02, tian1 qi4 zen3 me yang4
+find shared memory(491520),  saved:491520
+    kw0: 0.959; 	kw1: 0.000; 	kw2: 0.000;     # xiao3 ai4 tong2 xue2
+    kw0: 0.000; 	kw1: 0.930; 	kw2: 0.000;     # ni3 hao3
+    kw0: 0.000; 	kw1: 0.000; 	kw2: 0.961;     # tian1 qi4 zen3 me yang4
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/audio/play.html b/maixpy/doc/en/audio/play.html new file mode 100644 index 00000000..b3c58b25 --- /dev/null +++ b/maixpy/doc/en/audio/play.html @@ -0,0 +1,523 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Playback Audio - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Playback Audio

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-05-201.0.0lxowalle + + Initial document + +
+
+
+ +
+
+ +

Introduction

+

This document provides instructions on how to play audio

+

How to use

+

Hardware operation

+

image-20240520134637905

+

The MaixCAM does not have a built-in speaker, so you will need to solder a 1W speaker yourself. The pins for soldering the speaker are shown in the diagram above on the VOP and VON pins corresponding to the Speaker.

+

Note: If the MaixCAM has copper posts attached to these pins, they can be soldered directly to the posts, or on the other side of the board for aesthetic reasons.

+

Code

+

Playing a WAV file

+ +
from maix import audio, time, app
+
+p = audio.Player("/root/output.wav")
+
+p.play()
+
+while not app.need_exit():
+    time.sleep_ms(10)
+print("play finish!")
+
+

Steps:

+
    +
  1. Import the audio, time and app modules:

    + +
    from maix import audio, time, app
    +
    +
  2. +
  3. Initialize the player:

    + +
    p = audio.Player("/root/output.wav")
    +
    +
  4. +
+
    +
  • Note that the default sample rate is 48k, the sample format is little-endian format - signed 16-bit, and the sample channel is 1. You can also customise the parameters like this p = audio.Player(sample_rate=48000, format=audio.Format.FMT_S16_LE, channel = 1). So far only tested with sample rate 48000, format FMT_S16_LE, and number of sampling channels 1.
  • +
  • If it is a .wav file, the sample rate, sample format and sample channel are automatically obtained.
  • +
+
    +
  1. Playing audio

    + +
    p.play()
    +
    +
  2. +
+
    +
  • This will block until all audio data is written, but not until all audio data is actually played. If you exit the programme after calling play(), some of the audio data to be played may be lost.
  • +
+
    +
  1. Done
  2. +
+

Playback with PCM data

+ +
from maix import audio, time, app
+
+p = audio.Player()
+
+with open('/root/output.pcm', 'rb') as f:
+    ctx = f.read()
+
+p.play(bytes(ctx))
+
+while not app.need_exit():
+    time.sleep_ms(10)
+
+print("play finish!")
+
+

Steps:

+
    +
  1. Import the audio, time and app modules:

    + +
    from maix import audio, time, app
    +
    +
  2. +
  3. Initialize the player:

    + +
    p = audio.Player()
    +
    +
  4. +
+
    +
  • Note that the default sample rate is 48k, the sample format is little-endian format - signed 16-bit, and the sample channel is 1. You can also customise the parameters like this p = audio.Player(sample_rate=48000, format=audio.Format.FMT_S16_LE, channel = 1). So far only tested with sample rate 48000, format FMT_S16_LE, and number of sampling channels 1.
  • +
+
    +
  1. Open and playback a PCM file

    + +
      with open('/root/output.pcm', 'rb') as f:
    +      ctx = f.read()
    +
    +  p.play(bytes(ctx))
    +
    +  while not app.need_exit():
    +    time.sleep_ms(10)
    +
    +
  2. +
+
    +
  • with open(‘xxx’,‘rb’) as f: open file xxx and get file object f
  • +
  • ctx = f.read() reads the contents of the file into ctx
  • +
  • p.play(bytes(ctx)) plays the audio, p is the opened player object, ctx is the PCM data converted to type bytes
  • +
  • time.sleep_ms(10) Here there is a loop to wait for the playback to complete, as the playback operation is performed asynchronously, and if the program exits early, then it may result in the audio not being played completely.
  • +
+
    +
  1. Done
  2. +
+

Other

+

The Player and Recorder modules have some bugs to be worked out, make sure they are created before other modules (Camera module, Display module, etc.). For example:

+ +
# Create Player and Recorder first.
+p = audio.Player()
+r = audio.Recorder()
+
+# Then create the Camera
+c = camera.Camera()
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/audio/recognize.html b/maixpy/doc/en/audio/recognize.html new file mode 100644 index 00000000..d2bc4769 --- /dev/null +++ b/maixpy/doc/en/audio/recognize.html @@ -0,0 +1,520 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Real-time voice recognition - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Real-time voice recognition

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-10-081.0.0916BGAI + + Initial document + +
+
+
+ +
+
+ +

Introduction

+

MaixCAM has ported the Maix-Speech offline speech library, enabling continuous Chinese numeral recognition, keyword recognition, and large vocabulary speech recognition capabilities. It supports audio recognition in PCM and WAV formats, and can accept input recognition via the onboard microphone.

+

Maix-Speech

+

Maix-Speech is an offline speech library specifically designed for embedded environments. It features deep optimization of speech recognition algorithms, achieving a significant lead in memory usage while maintaining excellent WER. For more details on the principles, please refer to the open-source project.

+

Continuous Large Vocabulary Speech Recognition

+ +
from maix import app, nn
+
+speech = nn.Speech("/root/models/am_3332_192_int8.mud")
+speech.init(nn.SpeechDevice.DEVICE_MIC, "hw:0,0")
+
+def callback(data: tuple[str, str], len: int):
+    print(data)
+
+lmS_path = "/root/models/lmS/"
+
+speech.lvcsr(lmS_path + "lg_6m.sfst", lmS_path + "lg_6m.sym", \
+             lmS_path + "phones.bin", lmS_path + "words_utf.bin", \
+             callback)
+
+while not app.need_exit():
+    frames = speech.run(1)
+    if frames < 1:
+        print("run out\n")
+        speech.deinit()
+        break
+
+

Usage

+
    +
  1. Import the app and nn modules
  2. +
+ +
from maix import app, nn
+
+
    +
  1. Load the acoustic model
  2. +
+ +
speech = nn.Speech("/root/models/am_3332_192_int8.mud")
+
+
    +
  • You can also load the am_7332 acoustic model; larger models provide higher accuracy but consume more resources.
  • +
+
    +
  1. Choose the corresponding audio device
  2. +
+ +
speech.init(nn.SpeechDevice.DEVICE_MIC, "hw:0,0")
+
+
    +
  • This uses the onboard microphone and supports both WAV and PCM audio as input devices.
  • +
+ +
speech.init(nn.SpeechDevice.DEVICE_WAV, "path/audio.wav")   # Using WAV audio input
+
+ +
speech.init(nn.SpeechDevice.DEVICE_PCM, "path/audio.pcm")   # Using PCM audio input
+
+
    +
  • Note that WAV must be 16KHz sample rate with S16_LE storage format. You can use the arecord tool for conversion.
  • +
+ +
arecord -d 5 -r 16000 -c 1 -f S16_LE audio.wav
+
+
    +
  • When recognizing PCM/WAV , if you want to reset the data source, such as for the next WAV file recognition, you can use the speech.devive method, which will automatically clear the cache:
  • +
+ +
speech.devive(nn.SpeechDevice.DEVICE_WAV, "path/next.wav")
+
+
    +
  1. Set up the decoder
  2. +
+ +
def callback(data: tuple[str, str], len: int):
+    print(data)
+
+lmS_path = "/root/models/lmS/"
+
+speech.lvcsr(lmS_path + "lg_6m.sfst", lmS_path + "lg_6m.sym", \
+             lmS_path + "phones.bin", lmS_path + "words_utf.bin", \
+             callback)
+
+
    +
  • Users can register several decoders (or none), which decode the results from the acoustic model and execute the corresponding user callback. Here, a lvcsr decoder is registered to output continuous speech recognition results (for fewer than 1024 Chinese characters). For other decoder usages, please refer to the sections on continuous Chinese numeral recognition and keyword recognition.

    +
  • +
  • When setting up the lvcsr decoder, you need to specify the paths for the sfst file, the sym file (output symbol table), the path for phones.bin (phonetic table), and the path for words.bin (dictionary). Lastly, a callback function must be set to handle the decoded data.

    +
  • +
  • After registering the decoder, use the speech.deinit() method to clear the initialization.

    +
  • +
+
    +
  1. Recognition
  2. +
+ +
while not app.need_exit():
+    frames = speech.run(1)
+    if frames < 1:
+        print("run out\n")
+        speech.deinit()
+        break
+
+
    +
  • Use the speech.run method to run speech recognition. The parameter specifies the number of frames to run each time, returning the actual number of frames processed. Users can choose to run 1 frame each time and then perform other processing, or run continuously in a single thread, stopping it with an external thread.
  • +
+

Recognition Results

+

If the above program runs successfully, speaking into the onboard microphone will yield real-time speech recognition results, such as:

+ +
### SIL to clear decoder!
+('今天天气 怎么样 ', 'jin1 tian1 tian1 qi4 zen3 me yang4 ')
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/audio/record.html b/maixpy/doc/en/audio/record.html new file mode 100644 index 00000000..0fc86df6 --- /dev/null +++ b/maixpy/doc/en/audio/record.html @@ -0,0 +1,502 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Audio Record - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Audio Record

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-05-201.0.0lxowalle + + Initial document + +
+
+
+ +
+
+ +

Introduction

+

This document provides the usage of audio recording and supports recording audio in PCM and WAV formats.

+

The MaixCAM has a microphone on board, so you can use the recording function directly.

+

How to use

+

Getting PCM data

+

If you don't pass path when constructing a Recorder object, it will only record audio and not save it to a file, but you can save it to a file manually.

+ +
from maix import audio, time, app
+
+r = audio.Recorder()
+r.volume(12)
+print("sample_rate:{} format:{} channel:{}".format(r.sample_rate(), r.format(), r.channel()))
+
+while not app.need_exit():
+    data = r.record()
+    print("data size", len(data))
+
+    time.sleep_ms(10)
+
+print("record finish!")
+
+

Steps:

+
    +
  1. Import the audio, time and app modules:

    + +
    from maix import audio, time, app
    +
    +
  2. +
  3. Initialize Recorder

    + +
    r = audio.Recorder()
    +r.volume(12)
    +
    +
      +
    • Note that the default sample rate is 48k, the sample format is little-endian format - signed 16-bit, and the sample channel is 1. You can also customise the parameters like this r = audio.Recorder(sample_rate=48000, format=audio.Format.FMT_S16_LE, channel = 1). So far only tested with sample rate 48000, format FMT_S16_LE, and number of sampling channels 1.

      +
    • +
    • r.volume(12) is used to set the volume, the volume range is [0,100]

      +
    • +
    +
  4. +
  5. Start recording

    + +
    data = r.record()
    +
    +
      +
    • data is bytes type data in PCM format that holds the currently recorded audio. The PCM format is set when initialising the Recorder object, see step 2. Note that if the recording is too fast and there is no data in the audio buffer, it is possible to return an empty bytes of data.
    • +
    +
  6. +
  7. Done, you can do voice processing on the PCM data returned by r.record() when doing your own applications.

    +
  8. +
+

Records audio and saves it in WAV format.

+

If you pass path when constructing a Recorder object, the recorded audio will be saved to a path file, and you can also get the currently recorded PCM data via the record method. path only supports paths with .pcm and .wav suffixes, and the record method does not return WAV headers when recording .wav, it only returns PCM data.

+ +
from maix import audio, time, app
+
+r = audio.Recorder("/root/output.wav")
+r.volume(12)
+print("sample_rate:{} format:{} channel:{}".format(r.sample_rate(), r.format(), r.channel()))
+
+while not app.need_exit():
+    data = r.record()
+    print("data size", len(data))
+
+    time.sleep_ms(10)
+
+print("record finish!")
+
+

The code means basically the same as above.

+

Record audio and save to WAV format (blocking)

+

If the record_ms parameter is set during recording, recording audio will block until the time set by record_ms is reached, unit: ms.

+ +
from maix import audio, time, app
+
+r = audio.Recorder("/root/output.wav")
+r.volume(12)
+print("sample_rate:{} format:{} channel:{}".format(r.sample_rate(), r.format(), r.channel()))
+
+r.record(5000)
+
+print("record finish!")
+
+

The above example will keep recording 5000ms and save it to WAV format, during the recording period it will block in record method, note that PCM data will not be returned when record is set to record_ms.

+

Other

+

The Player and Recorder modules have some bugs to be worked out, make sure they are created before other modules (Camera module, Display module, etc.). For example:

+ +
# Create Player and Recorder first.
+p = audio.Player()
+r = audio.Recorder()
+
+# Then create the Camera
+c = camera.Camera()
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/audio/synthesis.html b/maixpy/doc/en/audio/synthesis.html new file mode 100644 index 00000000..388226be --- /dev/null +++ b/maixpy/doc/en/audio/synthesis.html @@ -0,0 +1,374 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy speech synthesis - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy speech synthesis

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

TODO: comming soon~

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/basic/app.html b/maixpy/doc/en/basic/app.html new file mode 100644 index 00000000..137dc071 --- /dev/null +++ b/maixpy/doc/en/basic/app.html @@ -0,0 +1,441 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy App development and app stores - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy App development and app stores

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Where to Find Applications

+

After powering on, the device will automatically enter the application selection interface. All built-in applications are available on the MaixHub App Store, where you can find corresponding app descriptions and usage instructions.

+

Where to Find Source Code

+

You can find the source code links (if available) on the app pages in the App Store. The source code for official integrated applications is located in the MaixPy/projects directory or the MaixCDK/projects directory.

+

Installing Applications

+

Frequently used settings include Settings -> Language and Settings -> WiFi.

+

The App Store application can be used to upgrade and install apps. Once connected to a WiFi network with internet access, you can scan to install apps from the MaixHub App Store.

+

Introduction to Application Ecosystem

+

In order to make the development board ready to use out of the box, make it easy for users to use without barriers, enable developers to share their interesting applications, and provide effective channels for receiving feedback and even profits, we have launched a simple application framework, including:

+
    +
  • App Store: Developers can upload and share applications, which users can download and use without needing to develop them. Developers can receive certain cash rewards (from MaixHub or user tips).
  • +
  • Pre-installed Apps: The official provides some commonly used applications, such as color block detection, AI object detection tracking, QR code scanning, face recognition, etc., which users can use directly or use as serial module.
  • +
  • MaixPy + MaixCDK Software Development Kit: Using MaixPy or MaixCDK, you can quickly develop embedded AI visual and audio applications in Python or C/C++, efficiently realizing your interesting ideas.
  • +
  • MaixVision Desktop Development Tool: A brand-new desktop code development tool for quick start, debugging, running, uploading code, installing applications to devices, one-click development, and even support for graphical block-based programming, making it easy for elementary school students to get started.
  • +
+

Everyone is welcome to pay attention to the App Store and share their applications in the store to build a vibrant community together.

+

Packaging Applications

+

Using MaixPy + MaixVison makes it easy to develop, package, and install applications:

+
    +
  • Develop applications with MaixPy in MaixVision, which can be a single file or a project directory.
  • +
  • Connect the device.
  • +
  • Click the "Install" button at the bottom-left corner of MaixVision, fill in the basic information of the application in the popup window, where the ID is used to identify the application. A device cannot simultaneously install different applications with the same ID, so the ID should be different from the IDs of applications on MaixHub. The application name can be duplicated. You can also upload an icon.
  • +
  • Click "Package Application" to package the application into an installer. If you want to upload it to the MaixHub App Store, you can use this packaged file.
  • +
  • Click "Install Application" to install the packaged application on the device.
  • +
  • Disconnect from the device, and you will see your application in the device's app selection interface. Simply click on it to run the application.
  • +
+
+

If you develop with MaixCDK, you can use maixcdk release to package an application. Refer to the MaixCDK documentation for specifics.

+
+

Exiting Applications

+

If you have developed a relatively simple application without a user interface and a back button, you can exit the application by pressing the device's function button (usually labeled as USER, FUNC, or OK) or the back button (if available, MaixCAM does not have this button by default).

+

Installing Applications

+
    +
  • Method 1: Use the App Store application on the device. Find the application on the App Store, connect the device to the internet, and scan the code to install.

    +
  • +
  • Method 2: Install using a local installation package. Transfer the package to the device's file system, for example, to /root/my_app_v1.0.0.zip, and then run the following code. Make sure to modify the pkg_path variable to the correct path, you can also find this script in MaixPy's examples/tools/install_app.py:

    +
  • +
+ +
import os
+
+def install_app(pkg_path):
+    if not os.path.exists(pkg_path):
+        raise Exception(f"Package {pkg_path} not found")
+    cmd = f"/maixapp/apps/app_store/app_store install {pkg_path}"
+    err_code = os.system(cmd)
+    if err_code != 0:
+        print("[ERROR] Install failed, error code:", err_code)
+    else:
+        print(f"Install {pkg_path} success")
+
+pkg_path = "/root/my_app_v1.0.0.zip"
+
+install_app(pkg_path)
+
+
    +
  • Method 3:
      +
    • For applications developed using MaixPy, run maixtool deploy in the project root directory (which contains app.yaml and main.py). A QR code will be displayed. Keep the device and computer on the same local network, and use the App Store on the device to scan the QR code corresponding to the local network address for online installation.
    • +
    • For applications developed using MaixCDK, run maixcdk deploy in the project root directory. A QR code will be displayed. Keep the device and computer on the same local network, and use the App Store on the device to scan the QR code corresponding to the local network address for online installation.
    • +
    +
  • +
+

Basic Guidelines for Application Development

+
    +
  • Since touchscreens are standard, it is recommended to create a simple interface with touch interaction. You can refer to examples for implementation methods.
  • +
  • Avoid making interfaces and buttons too small, as MaixCAM default screen is 2.3 inches with 552x368 resolution and high PPI. Make sure fingers can easily tap without making mistakes.
  • +
  • Implement a simple serial interaction for the main functionality of each application based on the serial protocol (see example). This way, users can directly use it as a serial module. For instance, in a face detection application, you can output coordinates via serial port when a face is detected.
  • +
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/basic/app_usage.html b/maixpy/doc/en/basic/app_usage.html new file mode 100644 index 00000000..b86352c4 --- /dev/null +++ b/maixpy/doc/en/basic/app_usage.html @@ -0,0 +1,162 @@ + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Application User Guide - MaixPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/basic/auto_start.html b/maixpy/doc/en/basic/auto_start.html new file mode 100644 index 00000000..ca9774a7 --- /dev/null +++ b/maixpy/doc/en/basic/auto_start.html @@ -0,0 +1,446 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy/MaixCAM Application Auto-Start at Boot - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixPy/MaixCAM Application Auto-Start at Boot

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Packaged applications can be set to automatically start when the device boots up, bypassing the application menu and directly launching the specified application.

+

Method One for Setting Application Auto-Start

+

First, package and install the application, then go to Settings -> Auto-Start on your device to select the application you want to auto-start. To cancel auto-start, you can also adjust it here.

+

Method Two for Setting Application Auto-Start

+

Run the Python script to set up, and modify the new_autostart_app_id variable in the script to the app_id you want to set. All installed app_ids will be printed out when you run the script, so you can run it once to find the desired app_id, modify the variable, and then run it again. To cancel the autostart setting, set it to None.

+

This script can also be found in the MaixPy examples under examples/tools as set_autostart.py:

+ +
import configparser, os
+
+def parse_apps_info():
+    info_path = "/maixapp/apps/app.info"
+    conf = configparser.ConfigParser()
+    conf.read(info_path)
+    version = conf["basic"]["version"]
+    apps = {}
+    for id in list(conf.keys()):
+        if id in ["basic", "DEFAULT"]:
+            continue
+        apps[id] = conf[id]
+    return apps
+
+def list_apps():
+    apps = parse_apps_info()
+    print(f"APP num: {len(apps)}")
+    for i, (id, info) in enumerate(apps.items()):
+        name_zh = info.get("name[zh]", "")
+        print(f"{i + 1}. [{info['name']}] {name_zh}:")
+        print(f"    id: {id}")
+        print(f"    exec: {info['exec']}")
+        print(f"    author: {info['author']}")
+        print(f"    desc: {info['desc']}")
+        print(f"    desc_zh: {info.get('desc', 'None')}")
+        print("")
+
+
+def get_curr_autostart_app():
+    path = "/maixapp/auto_start.txt"
+    if os.path.exists(path):
+        with open(path, "r") as f:
+            app_id = f.readline().strip()
+            return app_id
+    return None
+
+def set_autostart_app(app_id):
+    path = "/maixapp/auto_start.txt"
+    if not app_id:
+        if os.path.exists(path):
+            os.remove(path)
+        return
+    with open(path, "w") as f:
+        f.write(app_id)
+
+if __name__ == "__main__":
+    # new_autostart_app_id = "settings"   # change to app_id you want to set
+    new_autostart_app_id = None           # remove autostart
+
+    list_apps()
+    print("Before set autostart appid:", get_curr_autostart_app())
+    set_autostart_app(new_autostart_app_id)
+    print("Current autostart appid:", get_curr_autostart_app())
+
+
+

Method Three for Setting Application Auto-Start

+

You can also modify the /maixapp/auto_start.txt file in your device to set it up. For methods on file transfer, refer to the previous documentation.

+
    +
  • First, determine the id of the application you want to set. This is set when you package the application; if it's not an application you packaged yourself, you can install it on the device and check the folder names under the device's /maixapp/apps/ directory, which are the application names (or you can download and check the device's /maixapp/apps/app.info file, where the application id is indicated inside the [] brackets).
  • +
  • Then write the id into the /maixapp/auto_start.txt file. (You can create the file locally on your computer, and then transfer it to the device using MaixVision.)
  • +
  • To cancel, delete the /maixapp/auto_start.txt file on the device.
  • +
+

Other Methods

+

For MaixCAM, since the underlying system is Linux, if you are familiar with Linux, you can edit the startup scripts in /etc/rc.local or /etc/init.d.

+

However, it is important to note that this method may cause the application to continue running when MaixVision connects, thereby occupying resources (such as the screen and camera) which might prevent MaixVision from running programs normally. The first two methods allow MaixVision to terminate the program upon connection to run its own programs.

+

Thus, this method is more suitable for running background processes that do not occupy screen and camera resources. Generally, if you are not familiar with Linux, it is not recommended to use this method.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/basic/linux_basic.html b/maixpy/doc/en/basic/linux_basic.html new file mode 100644 index 00000000..482ae06f --- /dev/null +++ b/maixpy/doc/en/basic/linux_basic.html @@ -0,0 +1,422 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Basic Knowledge of Linux - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Basic Knowledge of Linux

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction

+

For beginners just starting out, you can skip this chapter for now and come back to it after mastering the basics of MaixPy development.

+

The latest MaixPy supports running Linux on the MaixCAM hardware, so the underlying MaixPy development is based on the Linux system. Although Sipeed has done a lot of work for developers with MaixPy, making it possible to enjoy using it without knowledge of the Linux system, there might be situations where some low-level operations are necessary or for the convenience of developers unfamiliar with Linux. In this section, we will cover some basic Linux knowledge.

+

Why Linux System is Needed

+

Specific reasons can be researched individually. Here are a few examples in simplified terms that may not sound too technical but are easy for beginners to understand:

+
    +
  • In microcontrollers, our program is usually a loop, but with Linux, we can run multiple programs simultaneously, each appearing to run independently, where the actual execution is handled by the operating system.
  • +
  • With a large community of Linux-based developers, required functionalities and drivers can be easily found without the need to implement them from scratch.
  • +
  • Linux offers a rich set of accompanying software tools for convenient development and debugging. Some Linux common tools not mentioned in this tutorial can theoretically be used as well.
  • +
+

File System

+

What is a file system?

+
    +
  • Similar to a computer's file system, Linux manages hardware disks using a file system, making it easy for us to read and write data to the disk.
  • +
  • For students who have learned about microcontrollers but not familiar with file system development, imagine having a Flash or TF card where data can be read and written through APIs even after power loss. However, Flash has read/write limitations, requiring a program to ensure its longevity. A file system is like a mature program that manages the Flash space and read/write operations. By calling the file system's APIs, we can significantly reduce development work and ensure stability and security with proven programs.
  • +
+

Transferring Files between Computer and Device (Development Board)

+

Since the device has Linux and a file system, how do we send files to it?

+

For MaixPy, we offer MaixVision for file management in future versions. Before that, you can use the following method:

+

Here we mainly discuss transferring files through the network. Other methods can be explored on your own by searching for "transferring files to Linux":

+
    +
  • Ensure the device and computer are connected to the same local network, for example:
      +
    • When the MaixCAM's USB port is connected to the computer, a virtual network card is created which can be seen in the device manager on the computer, and the device's IP can be found in the device's Settings -> Device Information.
    • +
    • Alternatively, connect to the same local network on the device through Settings -> WiFi.
    • +
    +
  • +
  • Use SCP or SFTP protocols on the computer to transfer files to the device. There are many specific software options and methods, such as:
      +
    • On Windows, you can use WinSCP, FileZilla, or the scp command.
    • +
    • On Linux, use FileZilla or the scp command.
    • +
    • On Mac, use FileZilla or the scp command.
    • +
    +
  • +
+

Terminal and Command Line

+

The terminal is a tool for communicating with and operating the Linux system, similar to Windows' cmd or PowerShell.

+

For example, we can enter ssh root@maixcam-xxxx.local in the Terminal tool on a Windows system with PowerShell or on a Linux system. You can find the specific name in the device's Settings->Device Information, which allows us to connect to the device through the terminal (both username and password are root).

+

Then, we can operate the device by entering commands. For instance, the ls command can list the files in the current directory of the device, while cd is used to switch to a different directory (similar to clicking folders in file management on a computer),

+ +
cd /     # Switch to the root directory
+ls       # Display all files in the current directory (root directory)
+
+

This will display similar content as below:

+ +
bin         lib         media       root        tmp
+boot        lib64       mnt         run         usr
+dev         linuxrc     opt         sbin        var
+etc         lost+found  proc        sys
+
+

For more command learning, please search for Linux command line usage tutorials on your own. This is just to introduce beginners to basic concepts so that when developers mention them, they can understand what they mean.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/basic/maixpy_upgrade.html b/maixpy/doc/en/basic/maixpy_upgrade.html new file mode 100644 index 00000000..7a70eba4 --- /dev/null +++ b/maixpy/doc/en/basic/maixpy_upgrade.html @@ -0,0 +1,394 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM Update MaixPy. - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM Update MaixPy.

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

There are two methods to begin with. If you are new to this and want to keep things simple, you can try using the pre-installed MaixPy firmware on the TF card that comes with the device. You can consider updating it later.

+

However, since we don't know when the TF card you received was manufactured, it is recommended to update the system.

+

Updating the System Directly(Highly Recommend)

+

Follow the steps in Upgrading and Flashing the System to upgrade to the latest system, which already includes the newest MaixPy firmware.

+

Updating Only the MaixPy Firmware

+

Check the latest version information and release notes in the MaixPy repository release page. It includes details about the MaixPy firmware and the system information corresponding to each version.

+

If you prefer not to update the system (since system changes are usually minimal, you can check if there are any system-related changes in the MaixPy update notes before deciding whether to update the system), you can simply update the MaixPy firmware.

+
    +
  • Set up WiFi in the settings to connect the system to the internet.
  • +
  • Click on Update MaixPy in the settings app to proceed with the update.
  • +
+

You can also execute Python code to call system command to install:

+ +
import os
+
+os.system("pip install MaixPy -U")
+
+
+

If you are comfortable using the terminal, you can also update MaixPy by using pip install MaixPy -U in the terminal.

+
+

And you can download wheel file (.whlformat) manually, and send to device(transfer method see MaixVision Usage), then install by pip install *****.whl command.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/basic/maixvision.html b/maixpy/doc/en/basic/maixvision.html new file mode 100644 index 00000000..d4cbda58 --- /dev/null +++ b/maixpy/doc/en/basic/maixvision.html @@ -0,0 +1,456 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixVision -- MaixCAM MaixPy Programming IDE + Graphical Block Programming - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixVision -- MaixCAM MaixPy Programming IDE + Graphical Block Programming

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction

+

MaixVision is a development tool specifically designed for the Maix ecosystem, supporting MaixPy programming and graphical block programming. It allows for online operation and debugging, real-time image preview, and synchronizing images from device displays, which is convenient for debugging and development.

+

It also supports packaging and installing applications on devices, allowing users to easily generate and install applications with one click.

+

In addition, it integrates several handy tools for development, such as file management, threshold editor, QR code generator, and more.

+

Download

+

Visit the MaixVision homepage to download.

+

Using MaixPy Programming and Online Running

+

Follow the steps in Quick Start to connect your device, and you can easily use MaixPy programming and run it online.

+

Real-time Image Preview

+

MaixPy provides a display module that can show images on the screen. Also, when the show method of the display module is called, it sends the image to be displayed on MaixVision, for example:

+ +
from maix import display, camera
+
+cam = camera.Camera(640, 480)
+disp = display.Display()
+while 1:
+    disp.show(cam.read())
+
+

Here we use the camera to capture an image, then display it on the screen using the disp.show() method, and also send it to MaixVision for display.

+

When we click the 'pause' button in the top right corner, it will stop sending images to MaixVision.

+

Code Auto Completion

+

Code suggestions depend on local Python packages installed on your computer. To enable code suggestions, you need to install Python on your computer and the required Python packages.

+
    +
  • To install Python, visit the Python official website.
  • +
  • To install the required packages, for MaixPy, for instance, you need to install the MaixPy package on your computer using pip install MaixPy. If MaixPy gets updated, you should update it on both your computer and device. On your computer, manually execute pip install MaixPy -U in the terminal. For device updates, update directly in the Settings application.
  • +
+
+

Users in China can use a local mirror pip install -i https://pypi.tuna.tsinghua.edu.cn/simple MaixPy.

+
+
    +
  • Restart MaixVision to see the code suggestions.
  • +
+
+

If suggestions still do not appear, you can manually set the path to the Python executable in settings and restart.

+
+
+

Note that installing Python packages on your computer is just for code suggestions. The actual code runs on the device (development board), and the device must also have the corresponding packages to run properly.

+
+
+

Additionally, while you have the MaixPy package installed on your computer, due to our limited resources, we cannot guarantee that you can directly use the Maix package in your computer's Python. Please run it on supported devices.

+
+

Calculating the Image Histogram

+

In the previous step, we could see the image in real-time in MaixVision. By selecting an area with the mouse, we can view the histogram for that area at the bottom of the screen, displaying different color channels.

+

This feature is helpful when finding suitable parameters for some image processing algorithms.

+

Distinguishing Between Device File System and Computer File System

+

Here we have an important concept to grasp: distinguish between the Device File System and the Computer File System.

+
    +
  • Computer File System: Operates on the computer. Opening a file or project in MaixVision accesses files on the computer, and saving is automatically done to the computer's file system.
  • +
  • Device File System: The program sends the code to the device for execution, so the files used in the code are read from the device's file system.
  • +
+

A common issue is when students save a file on the computer, such as D:\data\a.jpg, and then use this file on the device with img = image.load("D:\data\a.jpg"). Naturally, the file cannot be found because the device does not have D:\data\a.jpg.

+

For specifics on how to send files from the computer to the device, refer to the following section.

+

Transferring Files to the Device

+

First, connect to the device, then click the button to browse the device file system, as shown below. Then you can upload files to the device or download files to the computer.

+

maixvision_browser2

+

maixvision_browser

+
+Alternatively, other tools can be used, click to expand +

First, know the device's IP address or name, which MaixVision can find, or see in the device's Settings->System Information, such as maixcam-xxxx.local or 192.168.0.123.
+ The username and password are root, using the SFTP protocol for file transfer, and the port number is 22.

+

There are many useful tools available for different systems:

+

Windows

+

Use WinSCP or FileZilla to connect to the device and transfer files, choosing the SFTP protocol and entering the device and account information to connect.

+

Specific instructions can be searched online.

+

Linux

+

In the terminal, use the scp command to transfer files to the device, such as:

+ +
scp /path/to/your/file.py root@maixcam-xxxx.local:/root
+
+

Mac

+
    +
  • Method 1: In the terminal, use the scp command to transfer files to the device, such as:
  • +
+ +
scp /path/to/your/file.py root@maixcam-xxxx.local:/root
+
+
    +
  • Method 2: Use FileZilla or other tools to connect to the device and transfer files, choosing the SFTP protocol and entering the device and account information to connect.
  • +
+
+
+

Using Graphical Block Programming

+

Under development, please stay tuned.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/basic/os.html b/maixpy/doc/en/basic/os.html new file mode 100644 index 00000000..84a00340 --- /dev/null +++ b/maixpy/doc/en/basic/os.html @@ -0,0 +1,391 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Upgrade and burn system. - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Upgrade and burn system.

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction

+

If you have purchased the official (Sipeed) package with a TF card, typically the system has already been pre-programmed at the factory and can be used directly without further steps.

+

However, to avoid using an outdated version of the pre-programmed system, it is highly recommended to first upgrade to the latest system following the tutorial.

+

Obtaining the Latest System

+

Visit the MaixPy Release page to find the latest system image file, such as maixcam_os_20240401_maixpy_v4.1.0.xz.

+

Alternate link: Sourceforge

+

How to Confirm if System Upgrade is Needed

+
    +
  • Upon booting up to the main menu, click on Settings, then Device Info to check the system's version number.

    +
  • +
  • Visit the MaixPy Release History page to review the update logs, which contain information on MaixPy firmware and system image updates. If there are significant updates after your current version, it is advisable to upgrade.

    +
    +

    If the latest system update only includes routine MaixPy firmware updates compared to your current system, you may choose not to upgrade. You can simply update MaixPy separately in Settings under Update MaixPy.

    +
    +
  • +
+

Burning the System Image to MaixCAM

+

Refer to the hardware documentation MaixCAM System Burning tutorial. Note that if the conditions for USB Burning are met, it is recommended to use the USB Burning method. The USB burning method does not require removing the TF card.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/basic/python.html b/maixpy/doc/en/basic/python.html new file mode 100644 index 00000000..1545c062 --- /dev/null +++ b/maixpy/doc/en/basic/python.html @@ -0,0 +1,414 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Basic Knowledge of Python - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Basic Knowledge of Python

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

The tutorial documentation of MaixPy does not delve into specific Python syntax tutorials because there are already too many excellent Python tutorials available. Here, we only introduce what needs to be learned, provide guidance on directions and paths.

+

Introduction to Python

+

Python is an interpreted, object-oriented, dynamically typed high-level programming language.

+
    +
  • Interpreted: It does not require compilation, runs directly. The advantage is rapid development, while a minor drawback is the slower execution speed due to code interpretation on each run. However, most often, the bottleneck lies in the developer's code rather than the language itself.
  • +
  • Object-oriented: It supports object-oriented programming, allowing the definition of classes and objects. Compared to procedural languages, it is easier to organize code. For more details, please search independently.
  • +
  • Dynamically typed: Variables do not need to declare types, can be assigned directly, and the type will be automatically determined based on the assignment. This reduces code volume, but can also lead to type errors, requiring the developer's attention.
  • +
+

In conclusion, for developers unfamiliar with Python, it is very easy to get started as Python offers plenty of ready-to-use libraries, a large developer community, short application development cycles, making it highly worthwhile to learn!

+

Python Environment Setup

+

You can install Python on your computer according to the Python tutorial you are following for learning.
+Alternatively, you can connect to a device via MaixVision on MaixVision and then run the program on the development board.

+

What Python Basics are Needed to Use MaixPy?

+
    +
  • Basic concepts of Python.
  • +
  • Basic concepts of object-oriented programming.
  • +
  • Basic syntax of Python, including:
      +
    • Tab indentation alignment syntax.
    • +
    • Variables, functions, classes, objects, comments, etc.
    • +
    • Control statements such as if, for, while, etc.
    • +
    • Modules and importing modules.
    • +
    • Basic data types such as int, float, str, list, dict, tuple, etc.
    • +
    • Difference between bytes and str, and conversion.
    • +
    • Exception handling, try-except.
    • +
    • Common built-in functions like print, open, len, range, etc.
    • +
    • Common built-in modules like os, sys, time, random, math, etc.
    • +
    +
  • +
+

Mastering the above foundational knowledge will enable you to smoothly program with MaixPy. With the help of subsequent tutorials and examples, if unsure, you can refer to search engines, official documentation, or ask ChatGPT to successfully complete your development tasks.

+

For Developers Experienced in Another Object-Oriented Programming Language

+

If you are already proficient in an object-oriented language like C++/Java/C#, you simply need to quickly review Python syntax before starting to use it.

+

You can refer to resources like Runoob Tutorial or the Python Official Tutorial.

+

Alternatively, you can explore individual developers' blogs, such as Wow! It's Python.

+

For Developers with C Language Experience but No Object-Oriented Programming Experience

+

If you only know C and lack understanding of object-oriented concepts, you can start by learning about object-oriented programming concepts before diving into Python. It's relatively quick and you can search for video tutorials for entry-level guidance.

+

After following introductory video tutorials, you can then refer to documentation tutorials such as Runoob Tutorial or the Python Official Tutorial to get started!

+

Once you have acquired the basic knowledge, you can start using MaixPy for programming based on the documentation and examples.

+

For Programming Beginners

+

If you have never dealt with programming before, you will need to start learning Python from scratch. Python is also quite suitable as an introductory language. You can search for video tutorials for specific guidance.

+

After mastering the basic syntax, you will be able to use MaixPy for programming by following examples provided.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/basic/python_pkgs.html b/maixpy/doc/en/basic/python_pkgs.html new file mode 100644 index 00000000..6f3997ad --- /dev/null +++ b/maixpy/doc/en/basic/python_pkgs.html @@ -0,0 +1,392 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Add extra Python packages. - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Add extra Python packages.

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction

+

MaixPy is based on the Python language and provides a wide range of functionalities and APIs for embedded application development. In addition to this, you can also use other Python packages to extend its functionality.

+

Installing Additional Python Packages

+
+

Please note that not all Python packages are supported. Generally, only pure Python packages are supported, not C extension packages. C extension packages may require you to manually cross-compile them on a computer (which is quite complex and won't be covered here).

+
+

Method 1: Installing Using Python Code

+

You can install the package you need in MaixVision using Python code, for example:

+ +
import os
+os.system("pip install package_name")
+
+

To update a package, you can use:

+ +
import os
+os.system("pip install --upgrade package_name")
+
+

Method 2: Installing Using the Terminal and pip Command

+

Follow the terminal usage method introduced in Linux Basics and use pip install package_name to install the package you need.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/basic/view_src_code.html b/maixpy/doc/en/basic/view_src_code.html new file mode 100644 index 00000000..542fbe9c --- /dev/null +++ b/maixpy/doc/en/basic/view_src_code.html @@ -0,0 +1,432 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy How to Find the Source Code Corresponding to MaixPy API - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy How to Find the Source Code Corresponding to MaixPy API

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction

+

MaixPy is implemented based on Python, with some functions written in Python and most of the underlying code written in C/C++. This ensures efficient performance.

+

If you have questions while using a function, you can consult this document and the API documentation. If your doubts are still unresolved, you can find the underlying implementation source code using the method described in this article. You are also welcome to contribute to the documentation or code, and become a MaixPy developer!

+

Check the Documentation First

+

Always check the documentation first: https://wiki.sipeed.com/maixpy/, then check the API documentation: https://wiki.sipeed.com/maixpy/api/index.html.

+

The API documentation is only available in English because it is generated from the comments in the code, which are all in English. If you can't understand English, you can use a translation tool.

+

How to Find the Source Code Corresponding to the API

+

There are two open-source repositories: MaixPy and MaixCDK. MaixPy is the project repository containing part of the MaixPy source code, all documents, and examples; MaixCDK contains most of the underlying C/C++ implementations of MaixPy APIs.

+

You can download these two repositories or view them directly on the web.

+

Don't forget to give them a star so more people can see it!

+

Finding C/C++ Written APIs

+

Assume we want to find the maix.image.Image.find_blobs function as an example. First, let's try to find it manually:

+
    +
  • Since this is a vision-related API, we look in the components/vision/include directory of MaixCDK and see a maix_image.hpp header file, where we might find it.
  • +
  • Searching for find_blobs in maix_image.hpp, we immediately find the function declaration:
  • +
+ +
std::vector<image::Blob> find_blobs(std::vector<std::vector<int>> thresholds = std::vector<std::vector<int>>(), bool invert = false, std::vector<int> roi = std::vector<int>(), int x_stride = 2, int y_stride = 1, int area_threshold = 10, int pixels_threshold = 10, bool merge = false, int margin = 0, int x_hist_bins_max = 0, int y_hist_bins_max = 0);
+
+
    +
  • We also notice that there are comments before the function declaration, from which the API documentation is automatically generated. If you compare the API documentation with this comment, you will find them identical. Modifying this comment and recompiling will generate updated API documentation.
  • +
  • This is just the function declaration. We find that there is no such function in components/vision/src/maix_image.cpp. However, we see components/vision/src/maix_image_find_blobs.cpp, indicating that the function is written in a separate cpp file. Here, we can see the function's source code.
  • +
+

Finding APIs Written with Pybind11

+

If you can't find it in MaixCDK, look in MaixPy/components.

+
+

In the above code, you'll notice that the first parameter we use in find_blobs is of type list, i.e., [[...]], while the C/C++ definition is std::vector<std::vector<int>>. This is because we use pybind11 to automatically convert the std::vector type to list type.

+
+

For some types like numpy's array, which are inconvenient to define in MaixCDK, we use the pybind11 definitions in MaixPy/components. For example, the maix.image.image2cv method uses pybind11 related code here.

+

How to Modify the Code

+

After finding the code, modify it directly and compile the firmware following the build documentation.

+

How to Add Code

+

Copy other APIs, write a function, and add complete comments. Include an extra @maixpy maix.xxx.xxx tag in the comments, where xxx is the module and API name you want to add. Then compile the firmware.

+

Refer to MaixCDK/components/basic/includemaix_api_example.hpp.

+

API parameters and return values automatically convert from basic C++ types to Python types, making it very simple. See the pybind11 automatic type conversion list for details.

+

For example, to add maix.my_module.my_func, create a header file in the appropriate place in MaixCDK (preferably following the current folder classification) and add the code:

+ +
namespace maix::my_module
+{
+    /**
+     * My function, add two integers.
+     * @param a arg a, int type
+     * @param b arg b, int type
+     * @return int type, will return a + b
+     * @maixpy maix.my_module.my_func
+     */
+    int my_func(int a, int b);
+}
+
+

Then add a cpp file:

+ +
int my_func(int a, int b)
+{
+    return a + b;
+}
+
+

Compile MaixPy to generate the whl file and install it on the device to use the maix.my_module.my_func function.

+

How to Contribute Code

+

If you find any unfinished APIs or bugs in MaixPy, feel free to submit a PR (Pull Request) to the MaixPy repository. For detailed submission methods, see Contributing Documentation and Code.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/config.json b/maixpy/doc/en/config.json new file mode 100644 index 00000000..2c779f44 --- /dev/null +++ b/maixpy/doc/en/config.json @@ -0,0 +1,4 @@ +{ + "import": "config_en", + "name": "MaixPy English Documentation" +} diff --git a/maixpy/doc/en/faq.html b/maixpy/doc/en/faq.html new file mode 100644 index 00000000..0383047e --- /dev/null +++ b/maixpy/doc/en/faq.html @@ -0,0 +1,511 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy FAQ (Frequently Asked Questions) - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy FAQ (Frequently Asked Questions)

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +
+

This page lists common questions and solutions related to MaixPy. If you encounter any issues, please search for answers here first.
+Additionally, there are other resources:

+ +
+

MaixVision cannot find the device?

+

First, confirm whether the connection method is WiFi or USB cable.
+WiFi:

+
    +
  • Ensure that WiFi is correctly connected and has obtained an IP address. You can view the ip in Settings -> Device Info or Settings -> WiFi.
  • +
+

USB Cable:

+
    +
  • Ensure that the device is connected to the computer via a Type-C data cable, and the device is powered on and has entered the function selection interface.
  • +
  • Ensure that the device driver is installed:
      +
    • On Windows, check if there is a USB virtual network adapter device in Device Manager. If there is an exclamation mark, it means the driver is not installed properly. Follow the instructions in Quick Start to install the driver.
    • +
    • On Linux, you can check if there is a usb0 device by running ifconfig or ip addr, or check all USB devices with lsusb. Linux already includes the driver, so if the device is not recognized, check the hardware connection, ensure the device system is up-to-date, and ensure the device has booted up properly.
    • +
    • On macOS, follow the same steps as Linux.
    • +
    +
  • +
  • Additionally, check the quality of the USB cable and try using a high-quality cable.
  • +
  • Additionally, check the quality of the computer's USB port. For example, some small form factor PCs have poor EMI design on their USB ports, and connecting a good quality USB hub may allow the device to work. You can also try a different USB port or a different computer.
  • +
+

MaixVision camera example shows choppy video

+

The default GC4653 camera has a maximum frame rate of 30 frames per second (FPS). Under normal circumstances, the MaixVision display should not appear choppy to the naked eye. If choppiness occurs, first consider transmission issues:

+
    +
  • Check the network connection quality, such as WiFi.
  • +
  • If using a USB connection, check the USB cable quality, computer USB port quality, and try using a different computer, USB port, or USB cable for comparison.
  • +
+

What is the difference between MaixPy v4 and v1/v3?

+
    +
  • MaixPy v4 uses the Python language and is the culmination of the experiences from v1 and v3, offering better supporting software and ecosystem, more features, simpler usage, and more comprehensive documentation. While the hardware has significant improvements, the pricing is even more affordable compared to the other two versions. Additionally, it provides compatibility with the K210 user experience and API, making it easier for users to migrate quickly from v1 to v4.
  • +
  • v1 used the Micropython language and had many limitations, such as limited third-party library support. Additionally, due to the hardware performance limitations of the Maix-I (K210), there was not enough memory, limited AI model support, and lack of hardware acceleration for many codecs.
  • +
  • v3 also used the Python language and was based on the Maix-II-Dock (v831) hardware. However, the hardware had limited AI model support, and the Allwinner ecosystem was not open enough, with an incomplete API. This version was only intended for use with the Maix-II-Dock (v831) and will not receive further updates.
  • +
+

Does MaixPy currently only support MaixCAM, or can it work with other boards using the same chipset?

+

MaixPy currently only supports the MaixCAM series of boards. Other boards using the same chipset, including Sipeed's boards like the LicheeRV-Nano, are not supported. It is strongly recommended not to attempt using MaixPy with other boards, as it may result in device damage (such as smoke or screen burn), for which you will be solely responsible.

+

In the future, Sipeed's Maix series of products will continue to be supported by MaixPy. If you have any needs that cannot be met by MaixCAM, you can post your requirements on the MaixHub Discussion Forum or send an email to support@sipeed.com.

+

Can I use a camera or screen other than the officially bundled ones?

+

It is not recommended to use cameras or screens other than the officially bundled ones, unless you have sufficient software and hardware knowledge and experience. Otherwise, it may result in device damage.

+

The officially bundled accessories have been fine-tuned for both software and hardware, ensuring the best performance and allowing for out-of-the-box usage. Other accessories may have different interfaces, drivers, and software, requiring you to calibrate them yourself, which is an extremely complex process.

+

However, if you are an expert, we welcome you to submit a pull request!

+

Model running error: cvimodel built for xxxcv181x CANNOT run on platform cv181x.

+

Failure to parse the model file is generally caused by file corruption. Ensure that your model file is not damaged. For example:

+
    +
  • Editing a binary file with an editor caused the file to become corrupted. For example, opening a cvimodel file with MaixVision can corrupt the binary file due to MaixVision's auto-save feature. Therefore, do not open and save binary files with text editors like MaixVision (this issue will be fixed in a future update of MaixVision by removing the auto-save feature).
  • +
  • If it was downloaded from the internet, make sure the download was not corrupted. Typically, files on the internet provide sha256sum/md5 checksums. After downloading, you can compare these values; for specific methods, please search online or ask ChatGPT.
  • +
  • If it comes from a compressed archive, ensure that the decompression process was error-free. You can decompress the archive again to make sure there were no errors in the process.
  • +
  • Ensure that the file was not damaged during the transfer to the device. You can compare the sha256sum values of the file on the device and on your computer; for specific methods, please search online or ask ChatGPT.
  • +
+

Power-on Black Screen, No Display on the Screen

+

Refer to MaixCAM FAQ

+

Why doesn’t the computer detect a serial port when connecting via USB to MaixCAM?

+

The USB port on the MaixCAM is a USB 2.0 interface of the chip, not a USB-to-serial interface, so it is normal for no serial port to appear when connected to a computer.

+

How do you communicate without a USB-to-serial connection?
+By default, the USB will simulate a USB network card. When you connect the USB to your computer, a virtual network card will appear. According to the instructions in the Quick Start Guide, you can use MaixVision to communicate with MaixCAM to run code, preview images, manage files, and other functions.

+

Additionally, since the USB simulates a network card, you can also use standard SSH software to connect to MaixCAM for communication. Alternatively, you can connect via WiFi and communicate within the same local network.

+

If you need to use the serial port, there are two situations:

+
    +
  1. Serial communication with a computer: You need to purchase any USB-to-serial module to connect the computer's USB port with the board's serial port (for MaixCAM, it's the UART0 pins A16 (TX) and A17 (RX), or you can use the TX and RX pins on the USB adapter board that comes with the MaixCAM package, which are also the A16 and A17 pins and are functionally equivalent).

    +
  2. +
  3. Serial communication with another MCU/SOC: Directly connect MaixCAM's A16 (TX) and A17 (RX) to the MCU's RX and TX pins.

    +
  4. +
+

Red Screen, Initialization Display Failed, Please Check FAQ

+

The message indicates that the display driver initialization failed.
+As of July 2024, the underlying display driver for MaixCAM is initialized together with the camera driver. Therefore, this issue is most likely caused by a failure in the camera driver initialization.
+To resolve this issue:

+
    +
  • Try updating to the latest system and install the latest runtime libraries (very important!!!). The runtime libraries need to work in conjunction with the system drivers, and version mismatches may cause errors. Updating to the latest system image and installing the latest runtime libraries should generally resolve the issue.
  • +
  • Maybe multiple process try to occupy driver, easiest way is reboot.
  • +
  • Check for hardware connection issues with the camera. Ensure that the camera is properly connected and not damaged.
  • +
+

What are the differences between Runtime, MaixPy, and system image? Which one should I upgrade?

+
    +
  • Runtime is the runtime environment. Many system functions depend on it, including MaixPy. If you encounter the problem of being unable to run the program, first check and update it online.

    +
  • +
  • The system image includes the basic operating system, hardware drivers, built-in applications, and MaixPy firmware, etc. It is the basic environment. It is best to keep it up to date, especially in the Release page. If the version update mentions that the system has been updated, it is strongly recommended to update the system, because some MaixPy functions may depend on the drivers in the system.

    +
  • +
+
+

Updating the system will format all previous data. Please back up useful data in the device system before updating.

+
+
    +
  • MaixPy is a dependent library for running the MaixPy program. If you do not need to update the system function, and the update log does not mention that the system has important updates such as drivers, you can update MaixPy alone.
  • +
+

Error Loading MUD Model File: *****.cvimodel not exists, load model failed

+
    +
  • Check if the .mud file you are trying to load really exists on the device (note, it should be on the device, not on the computer, it needs to be transferred to the device).
  • +
  • Verify that the model path you wrote is correct.
  • +
  • If you have changed the file name, note that the MUD file is a model description file and can be edited with a text editor. The actual model file is the .cvimodel file (for MaixCAM). The .mud file specifies the file name and path of the .cvimodel. Therefore, if you have changed the file name of .cvimodel, you also need to modify the model path in the .mud file. For example, here is the mud file for the Yolov5 model:
  • +
+ +
[basic]
+type = cvimodel
+model = yolov5s_224_int8.cvimodel
+
+[extra]
+model_type = yolov5
+input_type = rgb
+mean = 0, 0, 0
+scale = 0.00392156862745098, 0.00392156862745098, 0.00392156862745098
+anchors = 10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326
+labels = person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign, parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket, bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut, cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair dryer, toothbrush
+
+

Here, the model is specified as the yolov5s_224_int8.cvimodel file relative to the directory of this .mud file. If you have changed yolov5s_224_int8.cvimodel to another name, you need to update it here as well.

+

MaixVision Shows Red Wavy Line on import maix

+

This error occurs because MaixVision's code hinting feature cannot find the maix module. It's important to understand that MaixVision's code hinting relies on the local Python packages on your computer, while the code execution depends on the Python packages on the device. To enable MaixVision's code hinting, you need to install Python and the MaixPy package on your computer. For more details, refer to the MaixVision User Documentation.

+

MaixCAM starts very slowly, even exceeding 1 minute, or the screen flickers

+

This is mostly due to insufficient power supply. MaixCAM requires a voltage of around 5V and a current between 150mA and 500mA. If you encounter this issue, you can use a USB to TTL module to connect MaixCAM's serial port to a computer. You may see a message like Card did not respond to voltage select! : -110, indicating insufficient power supply. Simply switch to a more stable power supply to resolve the problem.

+

For MaixCAM, it draws 400mA during startup, 250mA in standby mode with the screen on, and 400mA~500mA when running AI models at full speed. Therefore, ensuring a stable power supply is very important!

+

MaixCAM Black screen and not boot up, or stock in LOGO screen

+

Refer to MaixCAM FAQ

+

MaixVision Program Stuck on "start running ..."

+

When the MaixVision log output window prints the message start running ..., it indicates that the program has been sent to the device and has begun executing. What gets printed afterward depends on your program. For instance, if you call print("hello"), it will print hello. If your program doesn't include any print statements, then there will be no logs displayed.

+

So, the program isn't actually stuck; it's just that your program hasn't output anything, so no logs are shown. You can try adding print("xxx") in your code to generate output, which is the simplest way to debug your program.

+

Why Does the Hardware Have 256MB of Memory, But Only 128MB is Available in the System?

+

The remaining memory is reserved for low-level drivers and the kernel, which are used for operating the camera, display, hardware encoding/decoding, NPU, and other drivers. You can check the memory used by these drivers (known as ION memory in CVITEK systems) by running cat /sys/kernel/debug/ion/cvi_carveout_heap_dump/summary. For other memory usage, you can run cat /proc/meminfo.

+

If you want to adjust the memory allocation, you would need to compile the system yourself and modify the ION_SIZE in the memmap.py file located in the LicheeRV-Nano-Build/build/boards/sg200x/sg2002_licheervnano_sd/ directory(refer to customize system doc).

+

Why Am I Unable to Install the Runtime Library, and an Error "Request Failed" Is Displayed?

+
    +
  • Ensure that the device is successfully connected to the internet. You can try connecting to a different mobile hotspot.
  • +
  • Verify that the system image you flashed is the latest version.
  • +
  • If you see an error related to DNS resolution failure, it might be due to DNS settings issues on your network. You can try connecting to a different mobile hotspot or manually modify the DNS server settings in /boot/resolv.conf (modifying this file requires a reboot) and /etc/resolv.conf (modifying this file does not require a reboot, but rebooting will overwrite it with the contents of the former).
  • +
  • Make sure you have purchased a genuine MaixCAM from Sipeed.
  • +
  • Contact customer service, providing the system version and device_key (which can be found after disconnecting from MaixVision or, if you have a screen, in System Settings -> System Information).
  • +
+

Translation:

+

Compile error: type not registered yet?

+ +
from ._maix.peripheral.key import add_default_listener
+ImportError: arg(): could not convert default argument into a Python object (type not registered yet?). #define
+
+

The error indicates that an object has not been defined as a Python object. In MaixPy, this is usually caused by an issue with the order of automatic API generation. For example, if there is an API declared with @maixpy in a.hpp, and another API in b.hpp that uses a definition from a.hpp as a parameter, then b.hpp depends on a.hpp. However, the current MaixPy compilation script does not perform dependency scanning. To resolve this, you need to manually specify the scan order in the components/maix/headers_priority.txt file in the MaixPy project, ensuring that a.hpp is scanned before b.hpp.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/gui/i18n.html b/maixpy/doc/en/gui/i18n.html new file mode 100644 index 00000000..3a5ab9a7 --- /dev/null +++ b/maixpy/doc/en/gui/i18n.html @@ -0,0 +1,489 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy MaixCAM i18n (Internationalization) Multi-Language Implementation - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixPy MaixCAM i18n (Internationalization) Multi-Language Implementation

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction to i18n (Internationalization)

+

i18n is an abbreviation for internationalization, which aims to switch languages according to the user's region or preference.

+

Commonly used languages, such as Chinese and English, have corresponding region codes (LCID). For example, the region code for Chinese is zh, English is en, and Japanese is ja. There are also secondary region codes, like Simplified Chinese corresponding to zh-cn. Generally, implementing zh is sufficient.

+

For region codes, you can refer to Windows Locale Codes or check Wikipedia.

+

Using i18n in MaixPy MaixCAM

+

The general user process is as follows:

+
    +
  • Initially, users can select the system language in the system settings, with the factory default being en (English).
  • +
  • Then, the program can get the current system locale using maix.i18n.get_locale().
  • +
  • The program displays the corresponding language strings based on the system locale.
  • +
+

For applications, the tricky part is the third step, which involves looking up the corresponding strings based on the locale settings. Here are two methods to achieve this, depending on your needs:

+

Using a Dictionary Directly Without Translation Files

+

If your program only has a few strings, you can manually specify the translation dictionary:

+ +
from maix import i18n
+
+trans_dict = {
+    "zh": {
+        "hello": "你好"
+    },
+    "en": {
+    }
+}
+
+trans = i18n.Trans(trans_dict)
+tr = trans.tr
+
+trans.set_locale("zh")
+print(tr("hello"))
+print(tr("my friend"))
+
+

Here, trans.set_locale("zh") temporarily sets the language to Chinese. Running this will print 你好 and my friend, since there is no translation for my friend, it returns as is.

+

Automatically Scanning and Generating a Dictionary, and Loading from Translation Files

+

This method is more suitable for scenarios with many strings to translate.

+

In the previous method, we manually specified string translations, which is convenient for simple scenarios. However, if there are too many strings, manually editing the dictionary can easily result in omissions. Therefore, we need the program to automatically find the strings that need translation and generate translation files, which we only need to translate.

+

In MaixPy, the maix.i18n.Trans class is provided to load translation files in multiple languages. By calling its tr() function and passing in the text to be translated, you can get the translation. For example:

+ +
from maix import i18n, err
+trans = i18n.Trans()
+tr = trans.tr
+
+e = trans.load("locales")
+err.check_raise(e, "load translation yamls failed")
+
+print(tr("hello"))
+
+

Here, the translation files are loaded from the locales folder in the current directory, and the system prints hello according to the language settings, such as 你好 for Chinese.

+

Translation Files: Since translation files are used here, how are these files created?
+First, we need to know which text needs translation, which are the strings called by the tr function. So we just need to search for all strings that use the tr function in the source code to find all the strings that need translation.
+The usage process is as follows:

+
    +
  • Create a project folder to store the code entry main.py, and open this project folder with MaixVision for easy operation.
  • +
  • Write main.py, using the tr function to call the strings that need translation.
  • +
  • MaixPy provides a scanning tool. First, make sure maixtool is installed (pip install maixtool -U on the computer terminal to install or upgrade).
  • +
  • Then, in the directory, use the computer terminal to execute maixtool i18n -d . r to scan for strings that need translation and generate a locales directory containing translation files for Chinese and English. For more languages, execute maixtool i18n -h for help.
  • +
  • The generated files are key-value pairs, for example, in zh.yaml, hello: hello means the Chinese translation of hello is hello. This is incorrect and needs manual translation, changing hello: hello to hello: 你好. Make sure to use a text editor that supports UTF-8 encoding, especially on Windows, avoid changing the file to GBK encoding to prevent errors. You can use MaixVision or VsCode for editing.
  • +
  • Then run the project, or package the project into an installation package, remember to include the locales directory.
  • +
  • If the source code is updated later, execute the maixtool command again to update the files. It will update the previously translated files. If you are worried about accidental overwriting, you can back up the files first and then delete the backup after confirming everything is correct.
  • +
+

This way, your program will change the language according to the system settings. You can also manually call trans.set_locale("zh") to temporarily switch the language for debugging.

+

Displaying Translations on the Interface

+

The previous examples used the print function to display translations. If you want to display them on the interface, you need font support. For English, it is supported by default, but for languages with large font libraries like Chinese, it is not supported by default.
+For example:

+ +
from maix import i18n, image, display, app, time
+
+trans_dict = {
+    "zh": {
+        "hello": "你好"
+    },
+    "en": {
+    }
+}
+
+trans = i18n.Trans(trans_dict)
+tr = trans.tr
+trans.set_locale("zh")
+
+disp = display.Display()
+img = image.Image(disp.width(), disp.height())
+
+img.draw_string(10, 10, tr("hello"), image.COLOR_WHITE, scale=2)
+disp.show(img)
+
+while not app.need_exit():
+    time.sleep_ms(100)
+
+

Running this will show a bunch of ? because there is no Chinese font library. For the image module, you can load a font library. The system has a built-in Chinese font library, or you can use your own font library:

+ +
from maix import i18n, image, display, app, time
+
+trans_dict = {
+    "zh": {
+        "hello": "你好"
+    },
+    "en": {
+    }
+}
+
+trans = i18n.Trans(trans_dict)
+tr = trans.tr
+trans.set_locale("zh")
+
+disp = display.Display()
+
+image.load_font("sourcehansans", "/maixapp/share/font/SourceHanSansCN-Regular.otf", size = 24)
+image.set_default_font("sourcehansans")
+
+img = image.Image(disp.width(), disp.height())
+img.draw_string(10, 10, tr("hello"), image.COLOR_WHITE, scale=2)
+disp.show(img)
+
+while not app.need_exit():
+    time.sleep_ms(100)
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/index.html b/maixpy/doc/en/index.html new file mode 100644 index 00000000..63f6c39b --- /dev/null +++ b/maixpy/doc/en/index.html @@ -0,0 +1,604 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Quick Start - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Quick Start

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ + + + +
+
+

For an introduction to MaixPy, please see the MaixPy official website homepage
+Please give the MaixPy project a Star ⭐️ to encourage us to develop more features if you like MaixPy.

+
+

Before Start

+
    +
  • Please carefully follow the steps outlined in this document. Do not skip any sections, and compare your actions accordingly.
  • +
  • Pay close attention to the table of contents on the left. Be sure to read through the basic sections thoroughly and patiently.
  • +
  • Before asking questions, first search the documentation in the left-hand table of contents and review the FAQ.
  • +
  • This document is the MaixPy v4 Tutorial. Be mindful not to confuse it with the MaixPy-v1 (K210 series), and ensure you are referring to the correct documentation.
  • +
+

Get a MaixCAM Device

+

maixcam_pro

+ +

It is recommended to purchase the bundle with a TF card, camera, 2.3-inch touchscreen, case, Type-C data cable, Type-C one-to-two mini board, and 4P serial port socket+cable, which will be convenient for later use and development. The following tutorials assume that you already have these accessories (including the screen).

+

It is highly recommended to purchase a package that includes a screen, as it greatly enhances the development experience. If you do not need a screen for actual deployment in a production environment, you can start with a screen-included kit for initial development, and then either remove the screen or purchase a screenless version for mass production later on.

+
    +
  • Power Supply: A stable power supply is crucial. MaixCAM requires a steady 5V 500mA power supply. Insufficient power can result in failure to boot or crashes during operation. This is especially true for some computer USB ports, which may provide unstable power.

    +
  • +
  • TF Card Reader: Used for flashing the system, essential.

    +
  • +
  • USB to serial port module: If you want to debug serial communication with PC, it is recommended to prepare one. You can buy any one from Taobao or buy them together at Sipeed store, such as this dual serial port to USB module.

    +
  • +
+
+

Note that currently only the MaixCAM development board is supported. Other development boards with the same chip are not supported, including Sipeed's development boards with the same chip. Please be careful not to purchase the wrong board, which could result in unnecessary waste of time and money.

+
+

For no screen devies

+

If you use screenless version, please refer to the Quick Start (Screenless Version) document.

+

Getting Started

+

Prepare the TF Image Card and Insert it into the Device

+

If the package you purchased includes a TF card, it already contains the factory image. If the TF card was not installed in the device at the factory, you will first need to carefully open the case (be careful not to tear the ribbon cables inside) and then insert the TF card. Additionally, since the firmware from the factory may be outdated, it is highly recommended to follow the instructions on Upgrading and Flashing the System to upgrade the system to the latest version.

+

If you did not purchase a TF card, you need to flash the system onto a self-provided TF card. Please refer to Upgrading and Flashing the System for the flashing method, and then install it on the board.

+

Power On

+

Use a Type-C data cable to connect the MaixCAM device and power it on. Wait for the device to boot up and enter the function selection interface.

+

maixcam_font

+

If the screen does not display:

+
    +
  • Please confirm that you purchased the bundled TF card. If you confirm that you have a TF card and it is inserted into the device, you can try updating to the latest system.
  • +
  • If you did not purchase the TF card bundle, you need to follow the instructions in Upgrading and Flashing the System to flash the latest system onto the TF card.
  • +
  • Also, ensure that the screen and camera cables are not loose. The screen cable can easily come off when opening the case, so be careful.
  • +
+

Connect to the Network

+

For the first run, you need to connect to the network, as you will need it later to activate the device and use the IDE.
+If you don't have a router, you can use your phone to open a hotspot.

+

Click Settings on the device and select WiFi. There are two ways to connect to the WiFi hotspot:

+
    +
  • Scan the WiFi sharing code:
      +
    • Use your phone to share the WiFi hotspot QR code, or go to maixhub.com/wifi to generate a QR code.
    • +
    • Click the Scan QR code button, the camera screen will appear, scan the QR code generated previously to connect.
    • +
    +
  • +
  • Search for hotspots:
      +
    • Click the Scan button to start scanning the surrounding WiFi, you can click multiple times to refresh the list.
    • +
    • Find your WiFi hotspot.
    • +
    • Enter the password and click the Connect button to connect.
    • +
    +
  • +
+

Then wait for the IP address to be obtained, which may take 10 to 30 seconds. If the interface does not refresh, you can exit the WiFi function and re-enter to view it, or you can also see the IP information in Settings -> Device Information.

+

Update the Runtime Libraries

+

This step is very important!!! If this step is not done properly, other applications and functions may not work (e.g., they may crash).

+
    +
  • First, ensure that you have completed the previous step of connecting to WiFi and have obtained an IP address to access the internet.
  • +
  • On the device, click Settings, and select Install Runtime Libraries.
  • +
  • After the installation is complete, you will see that it has been updated to the latest version. Then exit.
  • +
+

If it shows Request failed or 请求失败 (Request failed), please first check if the network is connected. You need to be able to connect to the internet. If it still doesn't work, please take a photo and contact customer service for assistance.

+

Use Built-in Applications

+

Many applications are built-in, such as Find Blobs, AI Detector, Line Follower, etc. For example, Find Blobs:

+ +

Please explore other applications on your own. More applications will be updated in the future. For usage documentation and application updates, please see the MaixHub App Store.

+

Note: The applications only include a part of the functionality that MaixPy can achieve. Using MaixPy, you can create even more features.

+

Use as a Serial Module

+
+

If you want to use the device as the main controller (or if you don't understand what a serial module is), you can skip this step.

+
+

The built-in applications can be used directly as serial modules, such as Find Blobs, Find Faces, Find QR Codes, etc.
+Note that the serial port can only directly connect to other microcontrollers. If you want to communicate with a computer via a serial port, you must provide a USB-to-serial module yourself.

+

Usage:

+
    +
  • Hardware connection: You can connect the device to the Type-C one-to-two mini board(For MaixCAM-Pro is 6Pin interface), which allows you to connect the device via serial to your main controller, such as Arduino, Raspberry Pi, STM32, etc.
  • +
  • Open the application you want to use, such as QR code recognition. When the device scans a QR code, it will send the result to your main controller via serial.
  • +
+
+

The serial baud rate is 115200, the data format is 8N1, and the protocol follows the Maix Serial Communication Protocol Standard. You can find the corresponding application introduction on the MaixHub APP to view the protocol.
+If APP no serial output, you can also do it by yourself, follow function examples and UART usage doc to add function and serial output.

+
+

Preparing to Connect Computer and Device

+

To enable communication between the computer (PC) and the device (MaixCAM), we need to ensure they are on the same local area network. There are two methods to achieve this:

+
    +
  • Method 1 (Highly Recommended): Wireless Connection. Connect the device to the same router or Wi-Fi hotspot that the computer is connected to via Wi-Fi. Go to the device's Settings -> WiFi Settings and connect to your Wi-Fi. (If you experience screen lag or high latency with Wi-Fi, you can try Method 2 for a wired connection.)
  • +
+

Here is the translation:

+
    +
  • Method Two: Wired Connection. The device connects to the computer via a USB cable, and the device will emulate as a USB network adapter. This way, the device and the computer will be on the same local network through the USB connection. It is recommended to start with WiFi because although a wired connection offers stable transmission, it may encounter issues such as faulty cables, poor connection, or driver problems. If you encounter any issues, you can refer to the common problems in the FAQ.
  • +
+
+Method Two: Driver Installation on Different Computer Systems: +

By default, there are two types of USB virtual network adapter drivers (NCM and RNDIS drivers) to meet the needs of different systems. You can also disable the unused virtual network adapter on the device under Settings -> USB Settings:

+
    +
  • Windows: All Windows systems will automatically install the RNDIS driver, while only Windows 11 will automatically install the NCM driver. As long as one of the drivers works, it is sufficient.
      +
    • Open Task Manager -> Performance, and you should see a virtual Ethernet with an IP address such as 10.131.167.100, which is the computer's IP address. The device's IP address is the same but with the last digit changed to 1, i.e., 10.131.167.1. If you are using Windows 11, you will see two virtual network adapters; you can use either IP address.
    • +
    • Additionally, you can open Device Manager (search for Device Manager in the search bar). The RNDIS and NCM drivers should be correctly installed, as shown below:
      +RNDIS ok NCM ok
    • +
    +
  • +
  • Linux: No additional setup is required. Simply plug in the USB cable. Use ifconfig or ip addr to see the usb0 and usb1 network interfaces, and either IP address can be used. Note: The IP address you see, such as 10.131.167.100, is the computer's IP address, and the device's IP address is the same but with the last digit changed to 1, i.e., 10.131.167.1.
  • +
  • MacOS: Check for the usb network adapter under System Settings -> Network. Note: The IP address you see, such as 10.131.167.100, is the computer's IP address, and the device's IP address is the same but with the last digit changed to 1, i.e., 10.131.167.1.
  • +
+
+
+

Preparing the Development Environment

+
    +
  • First, ensure that the computer and the device are on the same local network as per the previous step.
  • +
  • Download and install MaixVision.
  • +
  • Connect the device and the computer using a Type-C cable. Open MaixVision, click the “Connect” button in the lower left corner, and it will automatically search for the device. Wait for a moment until the device appears, then click the connection button next to the device to connect.
  • +
+

If no device is detected, you can also manually enter the device's IP address in the device's Settings -> Device Info. You can also find solutions in the FAQ.

+

After a successful connection, the function selection interface on the device will disappear, and the screen will turn black, releasing all hardware resources. If there is still an image displayed, you can disconnect and reconnect.

+

Here is a video example of using MaixVision:

+

+

Run Examples

+

Click Example Code on the left side of MaixVision, select an example, and click the Run button in the bottom left to send the code to the device for execution.

+

For example:

+
    +
  • hello_maix.py: Click the Run button, and you will see messages printed from the device in the MaixVision terminal, as well as an image in the upper right corner.
  • +
  • camera_display.py: This example will open the camera and display the camera view on the screen.
  • +
+ +
from maix import camera, display, app
+
+disp = display.Display()          # Construct a display object and initialize the screen
+cam = camera.Camera(640, 480)     # Construct a camera object, manually set the resolution to 640x480, and initialize the camera
+while not app.need_exit():        # Keep looping until the program exits (you can exit by pressing the function key on the device or clicking the stop button in MaixVision)
+    img = cam.read()              # Read the camera view and save it to the variable img, you can print(img) to print the details of img
+    disp.show(img)                # Display img on the screen
+
+
    +
  • yolov5.py will detect objects in the camera view, draw bounding boxes around them, and display them on the screen. It supports detection of 80 object types. For more details, please see YOLOv5 Object Detection.
  • +
+

You can try other examples on your own.

+
+

If you encounter image display stuttering when using the camera examples, it may be due to poor network connectivity, or the quality of the USB cable or the host's USB being too poor. You can try changing the connection method or replacing the cable, host USB port, or computer.

+
+

Install Applications on the Device

+

The above examples run code on the device, but the code will stop running when MaixVision is disconnected. If you want the code to appear in the boot menu, you can package it as an application and install it on the device.

+

Click the Install App button in the bottom left corner of MaixVision, fill in the application information, and the application will be installed on the device. Then you will be able to see the application on the device.
+ You can also choose to package the application and share your application to the MaixHub App Store.

+
+

The default examples do not explicitly write an exit function, so you can exit the application by pressing the function key on the device. (For MaixCAM, it is the user key.)

+
+

If you want the program to start automatically on boot, you can set it in Settings -> Boot Startup.

+

More MaixVision usage refer to MaixVision documentation

+

Next Steps

+

If you like what you've seen so far, please be sure to give the MaixPy open-source project a star on GitHub (you need to log in to GitHub first). Your star and recognition is the motivation for us to continue maintaining and adding new features!

+

Up to this point, you've experienced the usage and development workflow. Next, you can learn about MaixPy syntax and related features. Please follow the left sidebar to learn. If you have any questions about using the API, you can look it up in the API documentation.

+

It's best to learn with a specific purpose in mind, such as working on an interesting small project. This way, the learning effect will be better. You can share your projects and experiences on the MaixHub Share Plaza and receive cash rewards!

+

Frequently Asked Questions (FAQ)

+

If you encounter any problems, please check the FAQ first. If you cannot find a solution there, you can ask in the forums or groups below, or submit a source code issue on MaixPy issue.

+

Share and Discuss

+
    +
  • MaixHub Project and Experience Sharing: Share your projects and experiences, and receive cash rewards. The basic requirements for receiving official rewards are:
      +
    • Reproducible: A relatively complete process for reproducing the project.
    • +
    • Showcase: No detailed project reproduction process, but an attractive project demonstration.
    • +
    • Bug-solving experience: Sharing the process and specific solution for resolving a particular issue.
    • +
    +
  • +
  • MaixPy Official Forum (for asking questions and discussion)
  • +
  • Telegram: MaixPy
  • +
  • MaixPy Source Code Issues: MaixPy issue
  • +
  • For business cooperation or bulk purchases, please contact support@sipeed.com.
  • +
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/modules/acc.html b/maixpy/doc/en/modules/acc.html new file mode 100644 index 00000000..df699811 --- /dev/null +++ b/maixpy/doc/en/modules/acc.html @@ -0,0 +1,404 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Reading the Accelerometer and Attitude Calculation with MaixCAM MaixPy - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Reading the Accelerometer and Attitude Calculation with MaixCAM MaixPy

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction to IMU

+

For the MaixCAM-Pro, it has an onboard QMI8658 chip that integrates a three-axis gyroscope and a three-axis accelerometer. This chip can provide high-precision data on attitude, motion, and position, making it suitable for various applications that require accurate motion detection, such as drones, robots, game controllers, and virtual reality devices. The QMI8658 features low power consumption, high stability, and high sensitivity. Below is an introduction to using the IMU module to obtain attitude data.

+
+

MaixCAM does not have an onboard accelerometer, but you can connect one externally using an IIC driver.

+
+

Using IMU in MaixPy

+

Example:

+ +
from maix.ext_dev import imu
+
+i = imu.IMU("qmi8658", mode=imu.Mode.DUAL,
+                              acc_scale=imu.AccScale.ACC_SCALE_2G,
+                              acc_odr=imu.AccOdr.ACC_ODR_8000,
+                              gyro_scale=imu.GyroScale.GYRO_SCALE_16DPS,
+                              gyro_odr=imu.GyroOdr.GYRO_ODR_8000)
+
+while True:
+    data = i.read()
+    print("\n-------------------")
+    print(f"acc x: {data[0]}")
+    print(f"acc y: {data[1]}")
+    print(f"acc z: {data[2]}")
+    print(f"gyro x: {data[3]}")
+    print(f"gyro y: {data[4]}")
+    print(f"gyro z: {data[5]}")
+    print(f"temp: {data[6]}")
+    print("-------------------\n")
+
+

Initialize the IMU object according to your needs, and then call read() to get the raw data read from the IMU.

+

If the mode parameter is set to DUAL, the data returned by read() will be [acc_x, acc_y, acc_z, gyro_x, gyro_y, gyro_z, temp]. If mode is set to only one of ACC or GYRO, it will return only the corresponding [x, y, z, temp]. For example, if ACC is selected, read() will return [acc_x, acc_y, acc_z, temp].

+

For detailed information on the BM8653 API, please refer to the BM8653 API Documentation

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/modules/bm8653.html b/maixpy/doc/en/modules/bm8653.html new file mode 100644 index 00000000..e05a921f --- /dev/null +++ b/maixpy/doc/en/modules/bm8653.html @@ -0,0 +1,467 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy bm8653 Driver Instructions - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixPy bm8653 Driver Instructions

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-08-271.0.0iawak9lkm + + Initial document + +
+
+
+ +
+
+ +

Introduction to BM8653

+

BM8653 is a real-time clock (RTC) chip widely used in various electronic devices to provide accurate time and date information. It features low power consumption and high precision, capable of continuing to operate via a backup battery when the device is powered off, ensuring the continuity and accuracy of time.

+

Using BM8653 in MaixPy

+

Using BM8653 in MaixPy is straightforward; you only need to know which I2C bus your platform's BM8653 is mounted on. The onboard BM8563 on the MaixCAM Pro is mounted on I2C-4.

+

Example:

+ +
from maix import ext_dev, pinmap, err, time
+
+### Enable I2C
+# ret = pinmap.set_pin_function("PIN_NAME", "I2Cx_SCL")
+# if ret != err.Err.ERR_NONE:
+#     print("Failed in function pinmap...")
+#     exit(-1)
+# ret = pinmap.set_pin_function("PIN_NAME", "I2Cx_SDA")
+# if ret != err.Err.ERR_NONE:
+#     print("Failed in function pinmap...")
+#     exit(-1)
+
+BM8653_I2CBUS_NUM = 4
+
+rtc = ext_dev.bm8563.BM8563(BM8653_I2CBUS_NUM)
+
+### 2020-12-31 23:59:45
+t = [2020, 12, 31, 23, 59, 45]
+
+# Set time
+rtc.datetime(t)
+
+while True:
+    rtc_now = rtc.datetime()
+    print(f"{rtc_now[0]}-{rtc_now[1]}-{rtc_now[2]} {rtc_now[3]}:{rtc_now[4]}:{rtc_now[5]}")
+    time.sleep(1)
+
+

If you are using the onboard BM8653 on the MaixCAM Pro, there is no need to enable I2C-4.

+

The example demonstrates reading from and writing to the BM8653, setting or retrieving the current time.

+

You can also use the following example to set the current time in the BM8653 to the system time, or set the current system time to the time in the BM8653.

+ +
from maix import ext_dev, pinmap, err, time
+
+### Enable I2C
+# ret = pinmap.set_pin_function("PIN_NAME", "I2Cx_SCL")
+# if ret != err.Err.ERR_NONE:
+#     print("Failed in function pinmap...")
+#     exit(-1)
+# ret = pinmap.set_pin_function("PIN_NAME", "I2Cx_SDA")
+# if ret != err.Err.ERR_NONE:
+#     print("Failed in function pinmap...")
+#     exit(-1)
+
+
+BM8653_I2CBUS_NUM = 4
+
+rtc = ext_dev.bm8563.BM8563(BM8653_I2CBUS_NUM)
+
+### Update RTC time from system
+rtc.systohc()
+
+### Update system time from RTC
+# rtc.hctosys()
+
+while True:
+    rtc_now = rtc.datetime()
+    print(f"{rtc_now[0]}-{rtc_now[1]}-{rtc_now[2]} {rtc_now[3]}:{rtc_now[4]}:{rtc_now[5]}")
+    time.sleep(1)
+
+

The underlying implementation of BM8653 is similar to the singleton pattern, ensuring that read and write operations on a single BM8653 are thread-safe. This means you can create BM8653 objects freely and read/write to BM8653 from any location without causing data race conditions.

+

The timetuple passed to the BM8653 object follows the format (year, month, day[, hour[, minute[, second]]]), meaning the first three parameters are mandatory, and any missing subsequent parameters will not modify the corresponding time. BM8653 guarantees that a returned timetuple being empty indicates an error, and if not empty, it will always contain a list of 6 elements: (year, month, day, hour, minute, second).

+

For detailed information on the BM8653 API, please refer to the BM8653 API Documentation

+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/modules/qmi8658.html b/maixpy/doc/en/modules/qmi8658.html new file mode 100644 index 00000000..9928df87 --- /dev/null +++ b/maixpy/doc/en/modules/qmi8658.html @@ -0,0 +1,441 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy qmi8658 Driver Instructions - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixPy qmi8658 Driver Instructions

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-08-271.0.0iawak9lkm + + Initial document + +
+
+
+ +
+
+ +

Introduction to QMI8658

+

QMI8658 is an Inertial Measurement Unit (IMU) chip that integrates a three-axis gyroscope and a three-axis accelerometer. It provides high-precision attitude, motion, and position data, making it suitable for various applications requiring accurate motion detection, such as drones, robots, game controllers, and virtual reality devices. QMI8658 features low power consumption, high stability, and high sensitivity.

+

Using QMI8658 in MaixPy

+

Using QMI8658 in MaixPy is straightforward; you only need to know which I2C bus your platform's QMI8658 is mounted on. The onboard QMI8658 on the MaixCAM Pro is mounted on I2C-4.

+

Example:

+ +
from maix import ext_dev, pinmap, err, time
+
+### Enable I2C
+# ret = pinmap.set_pin_function("PIN_NAME", "I2Cx_SCL")
+# if ret != err.Err.ERR_NONE:
+#     print("Failed in function pinmap...")
+#     exit(-1)
+# ret = pinmap.set_pin_function("PIN_NAME", "I2Cx_SDA")
+# if ret != err.Err.ERR_NONE:
+#     print("Failed in function pinmap...")
+#     exit(-1)
+
+QMI8658_I2CBUS_NUM = 4
+
+imu = ext_dev.qmi8658.QMI8658(QMI8658_I2CBUS_NUM,
+                              mode=ext_dev.qmi8658.Mode.DUAL,
+                              acc_scale=ext_dev.qmi8658.AccScale.ACC_SCALE_2G,
+                              acc_odr=ext_dev.qmi8658.AccOdr.ACC_ODR_8000,
+                              gyro_scale=ext_dev.qmi8658.GyroScale.GYRO_SCALE_16DPS,
+                              gyro_odr=ext_dev.qmi8658.GyroOdr.GYRO_ODR_8000)
+
+while True:
+    data = imu.read()
+    print("\n-------------------")
+    print(f"acc x: {data[0]}")
+    print(f"acc y: {data[1]}")
+    print(f"acc z: {data[2]}")
+    print(f"gyro x: {data[3]}")
+    print(f"gyro y: {data[4]}")
+    print(f"gyro z: {data[5]}")
+    print(f"temp: {data[6]}")
+    print("-------------------\n")
+
+

Initialize the QMI8658 object according to your needs, and then call read() to get the raw data read from the QMI8658.

+

If the mode parameter is set to DUAL, the data returned by read() will be [acc_x, acc_y, acc_z, gyro_x, gyro_y, gyro_z, temp]. If mode is set to only one of ACC or GYRO, it will return only the corresponding [x, y, z, temp]. For example, if ACC is selected, read() will return [acc_x, acc_y, acc_z, temp].

+

For detailed information on the QMI8658 API, please refer to the QMI8658 API Documentation

+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/modules/rtc.html b/maixpy/doc/en/modules/rtc.html new file mode 100644 index 00000000..c3463671 --- /dev/null +++ b/maixpy/doc/en/modules/rtc.html @@ -0,0 +1,379 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Using the RTC Module with MaixCAM MaixPy - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Using the RTC Module with MaixCAM MaixPy

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

The MaixCAM-Pro has an onboard RTC module, which will automatically synchronize the system time upon power-on and also sync time from the network. It will automatically re-sync when there are changes in network status.

+

Therefore, under normal circumstances, you don’t need to manually operate the RTC; you can directly use the system’s time API to get the current time.

+

If you do need to manually operate the RTC, please refer to bm8653 RTC Module Usage. Before manually operating the RTC, you can disable automatic synchronization by deleting the RTC and NTP-related services in the system’s /etc/init.d directory.

+
+

MaixCAM does not have an onboard RTC.

+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/modules/temp_humi.html b/maixpy/doc/en/modules/temp_humi.html new file mode 100644 index 00000000..2210738f --- /dev/null +++ b/maixpy/doc/en/modules/temp_humi.html @@ -0,0 +1,377 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Reading Temperature and Humidity Sensors with MaixCAM MaixPy - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Reading Temperature and Humidity Sensors with MaixCAM MaixPy

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction

+

By attaching a temperature and humidity sensor module to MaixCAM, you can easily read the environmental temperature and humidity. This example uses the Si7021 sensor, which can be driven via I2C.

+

The complete code is available at MaixPy/examples/sensors/temp_humi_si7021.py.

+

Note that the system image needs to be version >= 2024.6.3_maixpy_v4.2.1.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/modules/thermal_cam.html b/maixpy/doc/en/modules/thermal_cam.html new file mode 100644 index 00000000..3bebda1a --- /dev/null +++ b/maixpy/doc/en/modules/thermal_cam.html @@ -0,0 +1,379 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Using Thermal Infrared Image Sensors with MaixCAM and MaixPy - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Using Thermal Infrared Image Sensors with MaixCAM and MaixPy

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Currently, the official hardware product is not yet available. If you only need low resolution, you can purchase a serial or I2C module from online platforms like Taobao and drive it yourself. This document will be updated when the official high-resolution module is released.

+

For thermal infrared camera modules, you might consider options such as K210 + MLX90640 Infrared Thermal Imager or Heimann HTPA 32x32d Thermal Infrared.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/modules/tmc2209.html b/maixpy/doc/en/modules/tmc2209.html new file mode 100644 index 00000000..2013c6bb --- /dev/null +++ b/maixpy/doc/en/modules/tmc2209.html @@ -0,0 +1,643 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy tmc2209 单串口驱动使用介绍 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixPy tmc2209 单串口驱动使用介绍

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-08-211.0.0iawak9lkm + + 初版文档 + +
+
+
+ +
+
+ +

Introduction to TMC2209

+

TMC2209 is a stepper motor driver chip produced by the German company Trinamic. It is designed specifically for 2-phase stepper motors, featuring low power consumption, high efficiency, and excellent noise suppression capabilities. TMC2209 supports currents up to 2.8A, making it suitable for various applications such as 3D printers, CNC machines, robots, etc.

+

Using TMC2209 to Drive Stepper Motors in MaixPy

+
    +
  • Ensure that your stepper motor is a 2-phase 4-wire type, and confirm the step angle of your motor (step_angle), the microstepping resolution you need (micro_step), and the distance the load moves per revolution of the motor (screw_pitch or round_mm). This information will help us configure the driver parameters later.

    +
  • +
  • Generally, TMC2209 driver boards on the market have the following pins (if you find it troublesome, you can purchase our TMC2209 driver board, link [not yet available,敬请期待]):

    + +
            ---------
    +     EN-|       |-VM
    +    MS1-|       |-GND
    +    MS2-|       |-2B
    +     RX-|       |-2A
    +     TX-|       |-1A
    +     NC-|       |-1B
    +   STEP-|       |-VDD
    +    DIR-|       |-GND
    +        ---------
    +
    +

    EN: EN is the enable pin. Connect this pin to GND to enable TMC2209 hardware-wise.

    +

    MS1: MS1 is one of the microstepping selection pins, used in conjunction with the MS2 pin to set the microstepping mode of the stepper motor.

    +

    MS2: MS2 is one of the microstepping selection pins, used in conjunction with the MS1 pin to set the microstepping mode of the stepper motor.

    +

    This driver program only uses the UART mode of TMC2209. In this mode, the two microstep selection pins are respectively AD0 (originally MS1) and AD1 (originally MS2). The level states of these two pins together form the UART address of the TMC2209, ranging from 0x00 to 0x03.

    +

    TX: TX is the serial communication transmit pin, used for communication with an external microcontroller via UART.

    +

    RX: RX is the serial communication receive pin, used for communication with an external microcontroller via UART.

    +

    When using both RX and TX on TMC2209, ensure there is a 1K ohm resistor between the RX of the TMC2209 driver board and the TX of the main control chip. Otherwise, communication data anomalies may occur.

    +

    NC: NC is the no-connect pin, indicating that this pin does not need to be connected during normal use.

    +

    STEP: STEP is the step signal input pin. Each pulse received advances the stepper motor by one step angle. Since this driver is purely UART-driven, this pin does not need to be connected and can be left floating.

    +

    DIR: DIR is the direction signal input pin, used to control the rotation direction of the stepper motor. When DIR is high, the motor rotates clockwise; when DIR is low, the motor rotates counterclockwise. Since this driver is purely UART-driven, this pin does not need to be connected and can be left floating.

    +

    VM: VM is the power input pin, connected to the positive terminal of the stepper motor's power supply.

    +

    GND: GND is the ground pin, connected to the negative terminal of the power supply.

    +

    2B, 2A, 1B, 1A: These pins are the phase output pins of the stepper motor, connected to the two phases of the motor's coils.

    +

    VDD: VDD is the logic power input pin, providing power to the internal logic circuits of the chip.

    +
  • +
  • Using TMC2209 Driver in MaixPy

    +
  • +
+

As an example, let's consider a stepper motor with a step angle of 18, a microstep resolution of 256, and a screw pitch of 3mm:

+ +
from maix import pinmap, ext_dev, err, time
+
+port = "/dev/ttyS1"
+uart_addr = 0x00
+uart_baudrate = 115200
+step_angle = 18
+micro_step = 256
+screw_pitch = 3
+speed = 6
+use_internal_sense_resistors = True
+run_current_per = 100
+hold_current_per = 100
+
+
+if port == "/dev/ttyS1":
+    ret = pinmap.set_pin_function("A19", "UART1_TX")
+    if ret != err.Err.ERR_NONE:
+        print("Failed in function pinmap...")
+        exit(-1)
+    ret = pinmap.set_pin_function("A18", "UART1_RX")
+    if ret != err.Err.ERR_NONE:
+        print("Failed in function pinmap...")
+        exit(-1)
+
+slide = ext_dev.tmc2209.ScrewSlide(port, uart_addr, uart_baudrate,
+                            step_angle, micro_step, screw_pitch, speed,
+                            use_internal_sense_resistors, run_current_per, hold_current_per)
+
+def reset_callback() -> bool:
+    if 2 > 1:   # An event occurs (e.g., a sensor is triggered),
+                # indicating that the slide has moved to the boundary and the motor needs to stop.
+        print("Reset finish...")
+        return True
+    # Not occurred, no need to stop the motor.
+    return False
+
+def move_callback(per:float) -> bool:
+    # per is the percentage of the current distance moved by move()
+    # out of the total distance required for the current move(), ranging from 0 to 100.
+    print(f"Slide moving... {per}")
+    if per >= 50: # Example: Stop moving when 50% of the total distance for the current move() has been covered.
+        print(f"{per} >= 50%, stop.")
+        return True
+    return False
+
+
+slide.reset(reset_callback)
+
+slide.move(screw_pitch*2, -1, move_callback)
+slide.move(-screw_pitch)
+
+while True:
+    slide.move(screw_pitch*2)
+    slide.move(-(screw_pitch*2))
+    time.sleep_ms(100)
+
+

First, ensure that UART1 is enabled using pinmap in the program.

+

Then create a ScrewSlide object, using the internal reference resistor by default, and defaulting to 100% of the motor's running current and 100% of the motor's holding current. These parameters may need to be adjusted according to your motor.

+

Next, the routine declares a reset callback function and a move callback function, which are respectively passed into the reset() function and move() function. The reset() and move() functions call the callback functions periodically to confirm whether the motor needs to be stopped immediately (when the callback function returns True).

+

Both the move() and reset() functions are blocking functions, and they will only stop the motor and return when the callback function returns True (or when the specified length of movement is completed in the case of move()).

+

Using tmc2209 Driver for Stepper Motors with Constant Load in MaixPy

+

!!!Screw stepper motors with constant load should not be considered as stepper motors with constant load, because screw stepper motors have limit devices to ensure the direction of motion of the load on the rod is known, and the screw stepper motor often collides with the limit device during operation, causing the motor load to not be constant. Other cases can be deduced by analogy to know whether it is a stepper motor with constant load.

+

In some application scenarios, the load on the stepper motor is constant throughout, and only increases when it hits an edge and stalls. In such cases, you can use the Slide class instead of the ScrewSlide class, where Slide has stall detection functionality. Using ScrewSlide is also feasible, it does not have stall detection but is more flexible. Please choose between these two classes based on the usage scenario; this section only discusses the Slide class.

+
    +
  • Implementation Principle
  • +
+

The TMC2209 has an internal register SG_RESULT, which stores data proportional to the remaining torque of the motor. If the motor load is constant, the variation in the register value is very small. When the motor stalls, the register value will rapidly decrease and maintain a lower value. By finding the running average value and stall average value of this register for the constant load motor, you can measure whether the motor is stalling at any given moment.

+
    +
  • Obtaining the Average Value of the SG_RESULT Register
  • +
+

The maix.ext_dev.tmc2209 module provides a function to obtain and save this average value, maix.ext_dev.tmc2209.slide_scan.

+

example:

+ +
from maix import ext_dev, pinmap, err
+
+port = "/dev/ttyS1"
+uart_addr = 0x00
+uart_baudrate = 115200
+step_angle = 1.8
+micro_step = 256
+round_mm = 60
+speed = 60
+use_internal_sense_resistors = True
+run_current_per = 100
+hold_current_per = 100
+
+if port == "/dev/ttyS1":
+    ret = pinmap.set_pin_function("A19", "UART1_TX")
+    if ret != err.Err.ERR_NONE:
+        print("Failed in function pinmap...")
+        exit(-1)
+    ret = pinmap.set_pin_function("A18", "UART1_RX")
+    if ret != err.Err.ERR_NONE:
+        print("Failed in function pinmap...")
+        exit(-1)
+
+ext_dev.tmc2209.slide_scan(port, uart_addr, uart_baudrate,
+                           step_angle, micro_step, round_mm, speed, True,
+                           True, run_current_per, hold_current_per,
+                           conf_save_path='./slide_scan_example.bin', force_update=False)
+
+

After configuring the serial port and driver parameters, call slide_scan. The last parameter of slide_scan, force_update, determines the behavior when the configuration file already exists:

+
+

If force_update is True, the old configuration will be overwritten with the new configuration.

+

If force_update is False, the running average value will be updated to the average of the new and old values, and the stall average value will be updated to the larger of the new and old stall average values (for example, if a slide has left and right boundaries, and the left boundary stall average value is less than the right boundary stall average value, meaning the right boundary is more prone to stalling than the left boundary, the easiest stalling average value will be saved).

+
+

After running this program, the stepper motor will continue to rotate forward until it encounters a stall. Wait about 300ms, then stop the program. The program will record the running average value of the SG_RESULT register and the stall average value to conf_save_path.

+

Subsequently, the Slide class can load this configuration file to stop the motor when a stall is detected.

+
    +
  • Verifying the Configuration File Values
  • +
+

You may wonder if this configuration is actually usable. The maix.ext_dev.tmc2209 module provides a function to test this configuration file, slide_test.

+

First, ensure the motor is in a stalled state, then modify the parameters to match those used when calling slide_scan, and execute the following code.

+

example

+ +
from maix import ext_dev, pinmap, err
+
+port = "/dev/ttyS1"
+uart_addr = 0x00
+uart_baudrate = 115200
+step_angle = 1.8
+micro_step = 256
+round_mm = 60
+speed = 60
+use_internal_sense_resistors = True
+run_current_per = 100
+hold_current_per = 100
+
+if port == "/dev/ttyS1":
+    ret = pinmap.set_pin_function("A19", "UART1_TX")
+    if ret != err.Err.ERR_NONE:
+        print("Failed in function pinmap...")
+        exit(-1)
+    ret = pinmap.set_pin_function("A18", "UART1_RX")
+    if ret != err.Err.ERR_NONE:
+        print("Failed in function pinmap...")
+        exit(-1)
+
+ext_dev.tmc2209.slide_test(port, uart_addr, uart_baudrate,
+                           step_angle, micro_step, round_mm, speed, True,
+                           True, run_current_per, hold_current_per,
+                           conf_save_path='./slide_scan_example.bin')
+
+

The motor will stop rotating instantly upon encountering a stall, and the program will end accordingly.

+

The stall stop logic for Slide.move() and Slide.reset() is the same.

+
    +
  • Using Slide
  • +
+

The approach to using Slide is essentially the same as using ScrewSlide, except that Slide removes the callback function and adds stall stop logic.

+

If a configuration file is not passed when using Slide, it can still be used. The stall detection threshold is the average at the start of motor operation multiplied by Slide.stop_default_per()/100. The motor stops when the recent average operation number is lower than this value. You can obtain and modify this value through Slide.stop_default_per().

+ +
from maix import pinmap, ext_dev, err, time
+
+port = "/dev/ttyS1"
+uart_addr = 0x00
+uart_baudrate = 115200
+step_angle = 1.8
+micro_step = 256
+round_mm = 60
+speed = 60
+use_internal_sense_resistors = True
+run_current_per = 100
+hold_current_per = 100
+
+if port == "/dev/ttyS1":
+    ret = pinmap.set_pin_function("A19", "UART1_TX")
+    if ret != err.Err.ERR_NONE:
+        print("Failed in function pinmap...")
+        exit(-1)
+    ret = pinmap.set_pin_function("A18", "UART1_RX")
+    if ret != err.Err.ERR_NONE:
+        print("Failed in function pinmap...")
+        exit(-1)
+
+slide = ext_dev.tmc2209.Slide(port, uart_addr, uart_baudrate,
+                              step_angle, micro_step, round_mm, speed,
+                              cfg_file_path="./slide_conf.bin")
+
+slide.reset()
+slide.move(60)
+slide.move(-60)
+
+

Notes

+

This driver is implemented purely through UART, offering the advantage of requiring fewer pins to drive up to 4 motors with relatively high precision. The downside is that it is not suitable for applications requiring extremely high precision.

+

Known Issues:

+
    +
  • Do not use UART0 of MaixCAM as the driver's serial port, as it may cause MaixCAM to fail to boot properly.
  • +
+

!!! If you find any bugs, we welcome you to submit a PR to report them.

+

Disclaimer

+

This motor driver program (hereinafter referred to as the "Program") is developed by [Sipeed] based on the BSD-3 open source license repository janelia-arduino/TMC2209. The Program is intended for learning and research purposes only and is not guaranteed to work under all environmental conditions. Users assume all risks associated with the use of this Program.

+

[Sipeed] shall not be liable for any losses or damages arising from the use or inability to use the Program, including but not limited to direct, indirect, incidental, special, punitive, or consequential damages.

+

Users should conduct sufficient testing and validation to ensure that the Program meets their specific requirements and environment before using it in practical applications. [Sipeed] makes no express or implied warranties regarding the accuracy, reliability, completeness, or suitability of the Program.

+

Users are responsible for complying with all applicable laws and regulations when using the Program and ensuring that they do not infringe upon the legal rights of any third parties. [Sipeed] shall not be liable for any consequences resulting from users' violation of laws or infringement of third-party rights.

+

The interpretation of this disclaimer is reserved by [Sipeed], who also reserves the right to modify this disclaimer at any time.

+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/modules/tof.html b/maixpy/doc/en/modules/tof.html new file mode 100644 index 00000000..a9c6ba46 --- /dev/null +++ b/maixpy/doc/en/modules/tof.html @@ -0,0 +1,378 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Using TOF Modules for Distance Measurement and Terrain Detection with MaixCAM and MaixPy - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Using TOF Modules for Distance Measurement and Terrain Detection with MaixCAM and MaixPy

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Sipeed offers two additional TOF modules that can be used for distance measurement. These modules can be purchased and used with serial communication for your projects.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/network/flask.html b/maixpy/doc/en/network/flask.html new file mode 100644 index 00000000..ad06b245 --- /dev/null +++ b/maixpy/doc/en/network/flask.html @@ -0,0 +1,408 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Using Flask to Build an HTTP Web Server with MaixPy MaixCAM - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Using Flask to Build an HTTP Web Server with MaixPy MaixCAM

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction

+

MaixPy is based on Python, so you can use the Python library Flask to quickly set up a web server. As it is a common Python library, you can find specific uses and methods online, so they won't be elaborated on here.

+

If you only want to create a page that displays camera images, you can also refer to the HTTP image server method in JPEG Streaming.

+

Simple HTTP Service Example

+

After running the following program, accessing http://device_ip:8000 in a computer browser will display the "hello world" text and an image.

+ +
from flask import Flask, request, send_file
+import maix # we not use it but we import it to listen for key events to exit this program
+
+app = Flask(__name__)
+
+@app.route("/", methods=["GET", "POST"])
+def root():
+    print("========")
+    print(request.remote_addr)
+    print(f'headers:\n{request.headers}')
+    print(f'data: {request.data}')
+    print("========")
+    return 'hello world<br><img src="/img" style="background-color: black">'
+
+@app.route("/<path:path>")
+def hello(path):
+    print(path)
+    print(f'headers:\n{request.headers}')
+    print(f'data: {request.data}')
+    print("---------\n\n")
+    return f"hello from {path}"
+
+@app.route("/img")
+def img():
+    return send_file("/maixapp/share/icon/detector.png")
+
+if __name__ == "__main__":
+    app.run(host="0.0.0.0", port=8000)
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/network/http.html b/maixpy/doc/en/network/http.html new file mode 100644 index 00000000..ef690e7b --- /dev/null +++ b/maixpy/doc/en/network/http.html @@ -0,0 +1,393 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Using HTTP Network Communication with MaixPy MaixCAM - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Using HTTP Network Communication with MaixPy MaixCAM

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction

+

HTTP is an application layer network protocol based on TCP. Through it, we can send and receive information to and from network servers, such as retrieving webpage content from a web server. For more information, you can search for HTTP.

+

Using HTTP Requests in MaixPy

+

Since MaixPy is based on Python, you can directly use the built-in requests library. The requests library is a very robust and user-friendly library, so it won't be elaborated on here. Please search for related documentation and tutorials for more information.

+

Here is an example of fetching the homepage content of https://example.com.

+ +
import requests
+
+url = 'https://example.com'
+response = requests.get(url)
+print("Response:")
+print("-- status code:", response.status_code)
+print("")
+print("-- headers:", response.headers)
+print("")
+print("-- content:", response.content)
+print("")
+print("-- text:", response.text)
+print("")
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/network/mqtt.html b/maixpy/doc/en/network/mqtt.html new file mode 100644 index 00000000..bfd1de3e --- /dev/null +++ b/maixpy/doc/en/network/mqtt.html @@ -0,0 +1,390 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Using MQTT with MaixPy MaixCAM for Message Subscription and Publishing - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Using MQTT with MaixPy MaixCAM for Message Subscription and Publishing

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

MQTT Introduction

+

MQTT allows for quick and easy real-time communication using a publish-subscribe model.

+

System components:

+
    +
  • MQTT Server (broker): Responsible for forwarding messages.
  • +
  • MQTT Clients: Subscribe to topics from the server, receive messages, and publish messages to specific topics on the server.
  • +
+

Communication process:

+
    +
  • Clients connect to the MQTT server.
  • +
  • Clients subscribe to topics they are interested in, such as topic1.
  • +
  • When other clients or the server publish information on the topic1 topic, it is pushed to the subscribing clients in real time.
  • +
  • Clients can also actively publish messages to specific topics. All clients subscribed to that topic will receive the messages. For example, if a client publishes a message to topic1, all clients subscribed to topic1 will receive it, including the publishing client itself.
  • +
+

Using MQTT in MaixPy MaixCAM

+

The paho-mqtt module can be used for this purpose. You can look up the usage of paho-mqtt online or refer to the examples in the MaixPy/examples repository.

+

If you are using an older system, you might need to manually install the paho-mqtt package. Installation instructions can be found in the Adding Extra Python Packages guide.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/network/network_settings.html b/maixpy/doc/en/network/network_settings.html new file mode 100644 index 00000000..2a3cd24c --- /dev/null +++ b/maixpy/doc/en/network/network_settings.html @@ -0,0 +1,405 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Network Settings for MaixPy MaixCAM WiFi Configuration - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Network Settings for MaixPy MaixCAM WiFi Configuration

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction

+

To enable MaixCAM to use the network, it first needs to connect to the network via WiFi. MaixCAM provides several methods to connect to a WiFi hotspot.

+

Using the Built-in Settings Application

+

After powering on, enter the Settings application and select the WiFi function. You can connect by sharing a WiFi QR code from your phone or by generating a QR code at maixhub.com/wifi and scanning it. Alternatively, you can manually scan for WiFi hotspots and enter the password to connect.

+

Once connected successfully and the DHCP assigns an IP address, the IP will be displayed on the screen.

+

Connecting via MaixPy

+ +
from maix import network, err
+
+w = network.wifi.Wifi()
+print("IP:", w.get_ip())
+
+SSID = "Sipeed_Guest"
+PASSWORD = "qwert123"
+print("Connecting to", SSID)
+
+e = w.connect(SSID, PASSWORD, wait=True, timeout=60)
+err.check_raise(e, "Failed to connect to WiFi")
+print("IP:", w.get_ip())
+
+

DNS Server Configuration

+

In practice, some users may find that their router's DNS resolution cannot resolve certain domain names. Therefore, the default system sets the DNS servers in the /boot/resolv.conf file:

+ +
nameserver 114.114.114.114 # China
+nameserver 223.5.5.5 # Aliyun China
+nameserver 8.8.4.4 # Google
+nameserver 8.8.8.8 # Google
+nameserver 223.6.6.6 # Aliyun China
+
+

Generally, there is no need to modify this file. If you encounter DNS resolution issues, you can modify this file.

+

The actual configuration file used by the system is located at /etc/resolv.conf. This file is automatically copied from /boot/resolv.conf at startup. Therefore, the simplest solution after modification is to reboot.

+

If you prefer not to reboot, you need to modify both files simultaneously.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/network/socket.html b/maixpy/doc/en/network/socket.html new file mode 100644 index 00000000..ea63c29e --- /dev/null +++ b/maixpy/doc/en/network/socket.html @@ -0,0 +1,486 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Using Socket for TCP/UDP Communication with MaixPy MaixCAM - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Using Socket for TCP/UDP Communication with MaixPy MaixCAM

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction to Sockets

+

Sockets are software abstractions for TCP/UDP communication. Through socket interfaces, we can perform TCP/UDP communication.

+

Since MaixPy is based on Python, we can directly use the built-in socket library for communication. For more documentation and tutorials, please search online.

+

Here, we introduce simple usage methods. With these example codes, you can perform basic TCP and UDP communication on MaixPy MaixCAM. Remember to modify the IP address and port number according to your actual situation.

+

Socket TCP Client

+

This example requests a TCP server, sends a message, waits for a response, and then closes the connection.

+ +
import socket
+
+def tcp_client(ip, port):
+    client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+    server_address = (ip, port)
+    client_socket.connect(server_address)
+
+    try:
+        # Send data to the server
+        message = 'Hello, Server!'
+        print("Send:", message)
+        client_socket.sendall(message.encode('utf-8'))
+
+        # Receive the server's response
+        data = client_socket.recv(1024)
+        print('Received:', data.decode('utf-8'))
+    finally:
+        # Close the connection
+        client_socket.close()
+
+if __name__ == "__main__":
+    tcp_client("10.228.104.1", 8080)
+
+

Socket TCP Server

+

This example creates a socket server that continuously waits for client connections. Once a client connects, a thread is created to communicate with the client, reading the client's message and echoing it back.

+ +
import socket
+import threading
+
+local_ip = "0.0.0.0"
+local_port = 8080
+
+def receiveThread(conn, addr):
+    while True:
+        print('Reading...')
+        client_data = conn.recv(1024)
+        if not client_data:
+            break
+        print(client_data)
+        conn.sendall(client_data)
+    print(f"Client {addr} disconnected")
+
+ip_port = (local_ip, local_port)
+sk = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+sk.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+sk.bind(ip_port)
+sk.listen(50)
+
+print("Waiting for clients...")
+while True:
+    conn, addr = sk.accept()
+    print(f"Client {addr} connected")
+    # Create a new thread to communicate with this client
+    t = threading.Thread(target=receiveThread, args=(conn, addr))
+    t.daemon = True
+    t.start()
+
+

Socket UDP Client

+ +
import socket
+
+def udp_send(ip, port):
+    # Create a socket object
+    udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+
+    # Define the server's IP address and port number
+    server_address = (ip, port)
+
+    try:
+        # Send data to the server
+        message = 'Hello, Server!'
+        udp_socket.sendto(message.encode('utf-8'), server_address)
+    finally:
+        # Close the connection
+        udp_socket.close()
+
+# Call the function
+udp_send("10.228.104.1", 8080)
+
+

Socket UDP Server

+ +
import socket
+
+def udp_receive(ip, port):
+    # Create a socket object
+    udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+
+    # Define the server's IP address and port number
+    server_address = (ip, port)
+
+    # Bind the port
+    udp_socket.bind(server_address)
+
+    print('Waiting for a message...')
+
+    while True:
+        data, address = udp_socket.recvfrom(1024)
+        print('Received:', data.decode('utf-8'))
+        print('From:', address)
+
+    # Close the connection
+    udp_socket.close()
+
+# Call the function
+udp_receive('0.0.0.0', 8080)
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/network/websocket.html b/maixpy/doc/en/network/websocket.html new file mode 100644 index 00000000..0d67c7e4 --- /dev/null +++ b/maixpy/doc/en/network/websocket.html @@ -0,0 +1,433 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Using WebSocket with MaixPy MaixCAM - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Using WebSocket with MaixPy MaixCAM

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction

+

Similar to sockets, WebSocket enables long-lived communication connections and supports communication with web pages.

+

Since MaixPy is based on Python, you can use the commonly available Python websockets and asyncio modules. For more detailed information, please refer to the documentation and tutorials available online.

+

WebSocket Client

+

The following example connects to a server, sends a message 10 times, and then ends the connection:

+ +
import asyncio
+import websockets
+import time
+
+async def send_msg(websocket):
+    count = 1
+    while count <= 10:
+        msg = f"hello {count}"
+        await websocket.send(msg)
+        recv_text = await websocket.recv()
+        print(f"Received: {recv_text}", end="\n")
+        count += 1
+        time.sleep(1)
+    await websocket.close(reason="client exit")
+
+async def main_logic(ip, port):
+    async with websockets.connect(f'ws://{ip}:{port}') as websocket:
+        await send_msg(websocket)
+
+ip = "10.228.104.100"
+port = 5678
+asyncio.get_event_loop().run_until_complete(main_logic(ip, port))
+
+

WebSocket Server

+

The following example accepts client connections and responds with ack for msg: followed by the received message.

+ +
import asyncio
+import websockets
+import functools
+
+async def recv_msg(websocket):
+    print("New client connected, recv_msg start")
+    while True:
+        try:
+            recv_text = await websocket.recv()
+        except Exception as e:
+            print("Receive failed")
+            break
+        print("Received:", recv_text)
+        response_text = f"ack for msg: {recv_text}"
+        await websocket.send(response_text)
+    print("recv_msg end")
+
+async def main_logic(websocket, path, other_param):
+    await recv_msg(websocket)
+
+ip = "0.0.0.0"
+port = 5678
+start_server = websockets.serve(functools.partial(main_logic, other_param="test_value"), ip, port)
+print("Start server")
+asyncio.get_event_loop().run_until_complete(start_server)
+print("Start server loop")
+asyncio.get_event_loop().run_forever()
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/no_translate.html b/maixpy/doc/en/no_translate.html new file mode 100644 index 00000000..053c80eb --- /dev/null +++ b/maixpy/doc/en/no_translate.html @@ -0,0 +1,390 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + no translation - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

no translation

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +
+
This page not translated yet
+
+ Please visit + +
+
+ +
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/peripheral/adc.html b/maixpy/doc/en/peripheral/adc.html new file mode 100644 index 00000000..b271c39a --- /dev/null +++ b/maixpy/doc/en/peripheral/adc.html @@ -0,0 +1,478 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Using ADC in MaixCAM MaixPy - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Using ADC in MaixCAM MaixPy

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-06-111.0.0iawak9lkm + + Initial document + +
+
+
+ +
+
+ +

ADC Introduction

+

An ADC, which can also be called an analog-to-digital converter, converts an input voltage signal into an output digital signal. As the ADC converted digital signal itself does not have practical significance, only represents a relative size. Therefore, any ADC needs a reference analog as a conversion standard, the reference standard is generally the largest convertible signal size. The digital output of the ADC indicates the size of the input signal relative to the reference signal.

+

ADC peripherals generally have two main parameters: resolution and reference voltage.

+
    +
  • Resolution: The resolution of an ADC is expressed as the number of bits in a binary (or decimal) number. It describes the ability of the A/D converter to discriminate the input signal. Generally speaking, an A/D converter with n-bit output can distinguish 2^n different levels of input analog voltage, and the minimum value of input voltage that can be distinguished is 1/(2^n) of the full-scale input. For a given maximum input voltage, the more output bits, the higher the resolution.
  • +
  • Reference Voltage: The ADC peripheral reference voltage is the voltage that is compared to a known voltage during AD conversion to find the value of the unknown voltage. The reference voltage can be thought of as the highest upper limit voltage and can be reduced to improve resolution when the signal voltage is low.
  • +
+

With the board's ADC, it is possible to capture external voltages and have the board verify that the voltages are up to snuff or perform specific tasks when specific voltages are detected (e.g., the ADC detects multiple buttons).

+

Using ADC in MaixPy

+

Using ADC with MaixPy is easy:

+ +
from maix.peripheral import adc
+from maix import time
+
+a = adc.ADC(0, adc.RES_BIT_12)
+
+raw_data = a.read()
+print(f"ADC raw data:{raw_data}")
+
+time.sleep_ms(50)
+
+vol = a.read_vol()
+print(f"ADC vol:{vol}")
+
+

Use ADC0 to read the raw conversion data from it, or read the voltage data directly from it.

+

See the ADC API documentation for a detailed description of the ADC API.

+

Some notes on MaixCAM's ADC

+

MaixCAM elicits an IO that connects to the ADC, this IO is GPIO B3(For MaixCAM-Pro, B3 connected light LED, so ADC can't directly use).

+

+

This IO is ADC by default and does not require additional configuration.

+

MaixCAM's ADC peripheral has a sampling accuracy of 12 bits, which means that the sampling output range is from 0 to 4095. The sampling accuracy is 1/4096 of the reference voltage.

+

The MaixCAM's ADC peripheral cannot scan at a frequency higher than 320K/s, which is the reason for the additional wait time between ADC samples in the previous example.

+

The MaixCAM's ADC peripheral has an internal reference voltage of 1.5V, which may vary slightly in actual use.Since the typical internal reference voltage is 1.5 V, the ADC range of Soc is 0 to 1.5 V. Since the ADC range of this range is small, MaixCAM has designed a voltage divider circuit for the ADC peripheral to increase the ADC range. The reference voltage Vin_max of this voltage divider circuit is about 4.6~5.0V, due to the error of resistor resistance in the circuit, the impedance of ADC external device, and the deviation of internal reference voltage. A higher precision default value has been chosen in the API, and there is generally no need to pass this parameter.

+

+

If you need high ADC accuracy, you can calculate the reference voltage for this voltage divider circuit by following the steps below:

+
    +
  • You need to first measure to get the actual input voltage of ADC_PIN, which we call Vin.

    +
  • +
  • Then you need to measure to get the actual input voltage at ADC1, which we call Vadc. The location of resistor R10 can be found in this BOM file.

    +
  • +
  • You need to keep the same voltage input to ADC_PIN as in step 1 and then execute these commands in the shell:

    + +
    echo 1 > /sys/class/cvi-saradc/cvi-saradc0/device/cv_saradc
    +cat /sys/class/cvi-saradc/cvi-saradc0/device/cv_saradc
    +
    +

    This gives you the raw measured value of the ADC, which we call adc_data.

    +
  • +
  • You need to know the resistance values of the resistors R6 and R10 in the picture, record them.Typically, the MaixCAM has a resistance value of 10KΩ (10 000Ω) for R6 and 5.1KΩ (5 100Ω) for R10.

    +
  • +
  • Finally, you need to pass the results from the above steps to these python codes to get the range [0,Vin_max] of the ADC_PIN port.

    + +
    def maixcam_get_vin_max(Vin:float, Vadc:float, adc_data:int, r6:int, r10:int, adc_max:int=4095):
    +    Vref = (Vadc/adc_data)*(adc_max+1)
    +    r3 = Vadc*r6/(Vin-Vadc)
    +    Vin_max = (Vref/r3)*(r6+r3)
    +    return Vin_max
    +
    +Vin = 3.3		# step 1
    +Vadc = 1.06		# step 2
    +adc_data=2700	# step 3
    +r6=10000		# step 4
    +r10=5100		# step 4
    +
    +if __name__ == '__main__':
    +    print(maixcam_get_vin_max(Vin, Vadc, adc_data, r6, r10))
    +
    +

    Now pass the result to the third parameter of adc.ADC() and you will get a highly accurate ADC.

    +
  • +
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/peripheral/gpio.html b/maixpy/doc/en/peripheral/gpio.html new file mode 100644 index 00000000..f5ca7a4c --- /dev/null +++ b/maixpy/doc/en/peripheral/gpio.html @@ -0,0 +1,432 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Using GPIO - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Using GPIO

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction

+

Using GPIO allows you to control pins for input or output high and low levels, which is commonly used to read signals or output control signals.

+

Note: The pins on the MaixCAM are tolerant to 3.3V. Do not input 5V voltage.

+

Using GPIO in MaixPy

+
+

MaixPy Firmware should > 4.1.2(not include)

+
+

First, we need to know which pins and GPIOs the device has. For MaixCAM, each pin corresponds to a GPIO controller, as shown in the figure:

+


+maixcam_pro_io

+

It is important to note that pins can be used not only as GPIOs but also for other functions like PWM. Before using them, we need to set the pin function to GPIO.

+

For example, on MaixCAM, some pins are already occupied by other functions by default, such as UART0 and WiFi (SDIO1 + A26), so it is not recommended to use them.

+

Other pins can be used, and the A14 pin is connected to the onboard LED, which is used as a system load indicator by default. If initialized, it will automatically disable the system indicator function and can be used as a regular GPIO (note that A14 can only be used as an output). This way, you can control the LED's on and off state.

+

Here is the English translation of your text:

+

The circuit diagram of the LED is shown in the figure. Therefore, we only need to provide a high signal to pin A14, and the LED will conduct and light up:
+

+ +
from maix import gpio, pinmap, time
+
+pinmap.set_pin_function("A14", "GPIOA14")
+led = gpio.GPIO("GPIOA14", gpio.Mode.OUT)
+led.value(0)
+
+while 1:
+    led.toggle()
+    time.sleep_ms(500)
+
+

Here, we first use pinmap to set the function of the A14 pin to GPIO. Of course, for A14, since it only has the GPIO function, it can be omitted. For the sake of generality, other pins may need to be set, so it is set in this example.

+

For more APIs, please refer to the GPIO API Documentation

+

GPIO in Input Mode

+ +
from maix import gpio, pinmap, time
+
+pinmap.set_pin_function("A19", "GPIOA19")
+led = gpio.GPIO("GPIOA19", gpio.Mode.IN)
+
+while 1:
+    print(led.value())
+    time.sleep_ms(1) # sleep to make cpu free
+
+

Here is the English translation of the text:

+

MaixCAM-Pro Uses Illumination LED

+

Both MaixCAM and MaixCAM-Pro have a small LED light connected to pin A14. Additionally, the MaixCAM-Pro has an onboard illumination LED connected to pin B3, which is turned on by a high signal and off by a low signal:

+ +
from maix import gpio, pinmap, time
+
+pinmap.set_pin_function("B3", "GPIOB3")
+led = gpio.GPIO("GPIOB3", gpio.Mode.OUT)
+led.value(0)
+
+while 1:
+    led.toggle()
+    time.sleep_ms(500)
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/peripheral/hid.html b/maixpy/doc/en/peripheral/hid.html new file mode 100644 index 00000000..a7ce7ef1 --- /dev/null +++ b/maixpy/doc/en/peripheral/hid.html @@ -0,0 +1,450 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Introduction to Using MaixCAM MaixPy HID Device - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Introduction to Using MaixCAM MaixPy HID Device

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

简介

+

MaixPy currently supports the use of keyboards, mice, and touchscreens, and the following is a guide on how to use maixPy to control your PC via HID.

+

Preparation

+
+

MaixPy firmware version should be >= 4.5.1.

+
+

You must enable the HID device before operating HID, there are two ways:

+
    +
  1. Open the Settings application that comes with MaixCAM, click USB Settings in turn -> tick the required HID devices, such as Keyboard, Mouse, Touchscreen, and then click Confirm , then restart MaixCAM.
  2. +
  3. Through the Examples/tools/maixcam_switch_usb_mode.py in MaixVision, modify the HID devices that need to be switched on in the device_list, run it and restart MaixCAM.
  4. +
+

Note: Since only 4 USB devices are supported, only 4 devices can be started at the same time among ncm, rndis, keyboard, mouse, touchpad, choose according to the actual demand, among them, ncm and rndis are the USB network protocol devices, you can turn them off if you don't need them, by default, they are turned on.

+

Write a keyboard in MaixPy.

+

You need to enable HID Keyboard to run it.

+

The following example sends rstuv four characters through the keyboard and then releases the key.

+ +
from maix import hid, time
+
+keyboard = hid.Hid(hid.DeviceType.DEVICE_KEYBOARD)
+
+# Refer to the `Universal Serial Bus HID Usage Tables` section of the [USB HID Documentation](https://www.usb.org) for key numbers.
+keys = [21, 22, 23, 24, 25, 0]   # means [r, s, t, u, v, 0], 0 means release key.
+
+for key in keys:
+    keyboard.write([0, 0, key, 0, 0, 0, 0, 0])
+
+
+

Write a mouse in MaixPy.

+

You need to enable HID Mouse to run it.

+

The following example moves the mouse 5 pixels every 100ms.

+ +
from maix import hid, time
+
+mouse = hid.Hid(hid.DeviceType.DEVICE_MOUSE)
+
+button = 0      # button state, 0 means release, 1 means left button pressed, 2 means right button pressed, 4 means wheel button pressed
+x_oft = 0       # offset relative to current position, value range is -127~127
+y_oft = 0       # offset relative to current position, value range is -127~127
+wheel_move = 0  # The distance the wheel has moved, the range of values is -127~127
+
+count = 0
+while True:
+    x_oft += 5
+    y_oft += 5
+    mouse.write([button, x_oft, y_oft, wheel_move])
+    time.sleep_ms(100)
+    count += 1
+    if count > 50:
+        break
+
+

Write a touchpad in MaixPy.

+

The HID Touchpad needs to be enabled to run.

+

In the following example, move the touchscreen 150 units every 100ms. Note that the coordinate system of the touchscreen is absolute, not relative, and that you need to map the actual size of the screen to the interval [1, 0x7FFF], the coordinates (1,1) means the upper left corner, the coordinates (0x7FFF,0x7FFF) means the lower right corner.

+ +
from maix import hid, time
+
+touchpad = hid.Hid(hid.DeviceType.DEVICE_TOUCHPAD)
+
+def touchpad_set(button, x_oft, y_oft, wheel_move):
+    touchpad.write([button,                             # button state, 0 means release, 1 means left button pressed, 2 means right button pressed, 4 means wheel button pressed
+                    x_oft & 0xff, (x_oft >> 8) & 0xff,  # Absolute position, the leftmost is 1, the rightmost is 0x7fff, 0 means no operation, the value range is 0 to 0x7fff.
+                    y_oft & 0xff, (y_oft >> 8) & 0xff,  # Absolute position, the topmost is 1, the bottom is 0x7fff, 0 means no operation, the value range is 0 to 0x7fff
+                    wheel_move])                        # wheel move distance, value range is -127~127
+button = 0
+x_oft = 0
+y_oft = 0
+wheel_move = 0
+count = 0
+while True:
+    x_oft += 150
+    y_oft += 150
+    touchpad_set(button, x_oft, y_oft, wheel_move)
+    time.sleep_ms(100)
+    count += 1
+    if count > 50:
+        break
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/peripheral/i2c.html b/maixpy/doc/en/peripheral/i2c.html new file mode 100644 index 00000000..e73943c2 --- /dev/null +++ b/maixpy/doc/en/peripheral/i2c.html @@ -0,0 +1,408 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Using I2C with MaixCAM MaixPy - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Using I2C with MaixCAM MaixPy

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +
+

Note: Requires MaixPy image and firmware >= 4.2.1

+
+

The I2C and corresponding pins of MaixCAM can be seen in the diagram:

+


+maixcam_pro_io

+

For MaixCAM, due to limited pin resources, the pins for I2C1 and I2C3 overlap with those of the WiFi module (SDIO1). Therefore, you can only use either WiFi or hardware I2C, but not both. Additionally, there is an I2C5, which is simulated by software at the lower driver level. It is recommended to use this one, as the drivers are already set up, and its use is the same as using hardware I2C.

+

By default, the pins for I2C5 are configured as GPIO. Therefore, before using the i2c module, you should first use the pinmap module to set the pin functions to I2C5 as follows:

+ +
from maix import i2c, pinmap
+
+pinmap.set_pin_function("A15", "I2C5_SCL")
+pinmap.set_pin_function("A27", "I2C5_SDA")
+
+bus1 = i2c.I2C(5, i2c.Mode.MASTER)
+slaves = bus1.scan()
+print("find slaves:", slaves)
+
+
+

For more APIs, see i2c API documentation.

+

As mentioned above, for the MaixCAM, you must choose between using hardware I2C and WiFi. If you need to use I2C, you must disable WiFi and use the pinmap module to set the pin functions for I2C, then operate using the maix.i2c module.

+
+

TODO: Provide a method to disable WiFi (requires disabling the WiFi driver in the system, which is more complex).

+
+ +
from maix import i2c, pinmap
+
+pinmap.set_pin_function("P18", "I2C1_SCL")
+pinmap.set_pin_function("P21", "I2C1_SDA")
+
+bus1 = i2c.I2C(1, i2c.Mode.MASTER)
+slaves = bus1.scan()
+print("find slaves:", slaves)
+
+
+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/peripheral/pinmap.html b/maixpy/doc/en/peripheral/pinmap.html new file mode 100644 index 00000000..34d1bb74 --- /dev/null +++ b/maixpy/doc/en/peripheral/pinmap.html @@ -0,0 +1,454 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Using PINMAP in MaixCAM MaixPy - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Using PINMAP in MaixCAM MaixPy

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-06-111.0.0iawak9lkm + + Initial document + +
+
+
+ +
+
+ +

Pinmap Introduction

+

In System on Chip (SoC) design, a pin usually has more than one function, and this design method is called pin multiplexing. There are several main reasons for this:

+
    +
  • It saves the number of SoC pins.

    +

    SoCs integrate a large number of functional modules, such as CPUs, GPUs, memory controllers, I/O interfaces, communication modules, and so on. Assigning separate pins for each function would result in a very large number of pins being required, increasing the complexity and cost of the package. Through pin multiplexing, one pin can support different functions in different modes, thus significantly reducing the total number of pins.

    +
  • +
  • It reduces the cost of chip packaging and manufacturing.

    +

    Designers can choose smaller package sizes by reducing the number of pins, thus reducing packaging and manufacturing costs. Smaller packages not only reduce material costs, but also reduce the amount of space the chip takes up on the board, facilitating the design of more compact electronic products.

    +
  • +
  • It improves design flexibility.

    +

    Pin-multiplexing provides greater design flexibility. Different combinations of pin functions may be required in different application scenarios, and different pin functions can be enabled according to specific needs through software configuration. For example, the same pin can be used as a UART interface in one practical application and an SPI bus interface in another.

    +
  • +
  • It simplifies the PCB layout.

    +

    Reducing the number of pins simplifies the layout design of a printed circuit board (PCB). Fewer pins mean fewer wiring layers and vias, which simplifies PCB design and reduces manufacturing challenges and costs.

    +
  • +
  • Optimize performance.

    +

    In some cases, signal paths and performance can be optimized by multiplexing pins. For example, by selecting the proper combination of pin functions, interference and noise in the signal transmission path can be reduced, improving the overall performance and reliability of the system.

    +
  • +
+

Pinmap displays and manages the individual pin configurations of the chip, which typically include the name of each pin and its function (usually multiple functions).

+

We use the MaixCAM GPIO A28 as an example.

+
    +
  • A28 is the pin name.
  • +
  • GPIOA28/UART2_TX/JTAG_TDI are the functions supported by this pin as listed in the Soc manual, and the function of this pin at the same time can only be one of these three functions.
  • +
+

With Pinmap, we can set the specified chip pin for the specified function.

+

Using Pinmap in MaixPy

+

The following diagram lists the pin numbers and their functions on the MaixCAM board.

+


+maixcam_pro_io

+

Or read the SG2002 Chip Manual Pinmux section for the remaining pin numbers and functions.

+

It's actually quite easy to use Pinmap to manage pin functions through MaixPy.

+ +
from maix.peripheral import pinmap
+
+print(pinmap.get_pins())
+
+f = pinmap.get_pin_functions("A28")
+print(f"GPIO A28 pin functions:{f}")
+
+print(f"Set GPIO A28 to {f[0]} function")
+pinmap.set_pin_function("A28", f[0])
+
+

In the example, we start by listing all the pins available for management. Then we query GPIO A28 for all the functions available. Finally the function of the pin is set to the first function listed (GPIO).

+

For a more detailed description of the Pinmap API, see the Pinmap API documentation.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/peripheral/pwm.html b/maixpy/doc/en/peripheral/pwm.html new file mode 100644 index 00000000..13db3ae5 --- /dev/null +++ b/maixpy/doc/en/peripheral/pwm.html @@ -0,0 +1,415 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Using PWM in MaixCAM MaixPy - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Using PWM in MaixCAM MaixPy

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction

+

To use PWM in MaixPy (v4), first set the pin function to PWM using pinmap.

+

Each PWM corresponds to a specific pin, as shown in the pin diagram of MaixCAM:

+


+maixcam_pro_io

+

We recommend using PWM6 and PWM7.

+

For MaixCAM, since WiFi uses all pins of SDIO1, PWM4~9 can only be used alternatively with WiFi.

+
+

TODO: Provide a method to disable WiFi (requires disabling the WiFi driver in the system, which is quite complex)

+
+

Using PWM to Control a Servo in MaixPy

+

Here we take controlling a servo as an example, using PWM7 and the A19 pin of MaixCAM:

+ +
from maix import pwm, time, pinmap
+
+SERVO_PERIOD = 50     # 50Hz 20ms
+SERVO_MIN_DUTY = 2.5  # 2.5% -> 0.5ms
+SERVO_MAX_DUTY = 12.5  # 12.5% -> 2.5ms
+
+# Use PWM7
+pwm_id = 7
+# !! set pinmap to use PWM7
+pinmap.set_pin_function("A19", "PWM7")
+
+def angle_to_duty(percent):
+    return (SERVO_MAX_DUTY - SERVO_MIN_DUTY) * percent / 100.0 + SERVO_MIN_DUTY
+
+out = pwm.PWM(pwm_id, freq=SERVO_PERIOD, duty=angle_to_duty(0), enable=True)
+
+for i in range(100):
+    out.duty(angle_to_duty(i))
+    time.sleep_ms(100)
+
+for i in range(100):
+    out.duty(angle_to_duty(100 - i))
+    time.sleep_ms(100)
+
+

This code controls the servo to rotate from the minimum angle to the maximum angle and then back to the minimum angle.

+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/peripheral/spi.html b/maixpy/doc/en/peripheral/spi.html new file mode 100644 index 00000000..2dc16064 --- /dev/null +++ b/maixpy/doc/en/peripheral/spi.html @@ -0,0 +1,521 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Using SPI in MaixCAM MaixPy - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Using SPI in MaixCAM MaixPy

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-06-111.0.0iawak9lkm + + Initial document + +
+
+
+ +
+
+ +

SPI Introduction

+

SPI (Serial Peripheral Interface) is a synchronous peripheral interface that enables the SoC to communicate serially with various peripheral devices to exchange information. Common peripherals are Flash RAM, network controllers, LCD display drivers, and A/D converters.

+

SPI uses Master-Slave mode, which supports one or more Slave devices.

+

On a hardware circuit, SPI usually consists of 4 wires which are:

+
    +
  • MISO(Master Output Slave Input): This pin sends data in slave mode or receives data in master mode.
  • +
  • MOSI(Master Input Slave Output): This pin sends data in master mode or receives data in slave mode.
  • +
  • SCK: Serial bus clock, output by the master device and input by the slave device.
  • +
  • NSS/CS: Slave Device Selection. It acts as a chip select pin, allowing the master device to communicate with specific slave devices individually, avoiding conflicts on the bus.
  • +
+

In terms of communication protocols, SPI behavior is generally like this:

+
    +
  • SPI supports one master device and multiple slave devices. When the master device needs to communicate with a specific slave device, it selects the CS pin connected to that slave device to enable this transfer.This means that a slave device has only one CS pin for the master device to select itself, and the number of chip-select pins for the master device depends on how many slave devices are connected to its SPI bus.

    +
  • +
  • SPI has four modes, depending on the configuration of polarity (CPOL) and phase (CPHA).

    +

    Polarity affects the level of the clock signal when the SPI bus is idle.

    +
      +
    1. CPOL = 1, it indicates a high level at idle.
    2. +
    3. CPOL = 0, it indicates a low level at idle.
    4. +
    +

    The phase determines the edge at which the SPI bus acquires data. There are two types of edges, rising edge and falling edge.

    +
      +
    1. CPHA = 0, it indicates that sampling starts from the first edge.
    2. +
    3. CPHA = 1, it indicates that sampling starts from the second edge.
    4. +
    +

    Polarity and phase are combined to form the four modes of SPI:

    +
  • +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ModeCPOLCPHA
000
101
210
311
+
    +
  • SPI typically supports both full-duplex transmission and half-duplex transmission.

    +
  • +
  • SPI does not specify a maximum transmission rate, it does not have an address scheme; SPI does not specify a communication response mechanism, it does not specify flow control rules.

    +
  • +
+

Using SPI in MaixPy

+

This is the pinout of MaixCAM.

+


+maixcam_pro_io

+

You need to use maix.peripheral.pinmap to complete the pin mapping for SPI before use.

+

Note: The MaixCAM's SPI can only be used as an SPI master device. MaixCAM's SPI does not support modifying the valid level of the hardware CS pins at this time. The active level of all SPI hardware CS is low. If you need to use other CS active levels, configure the software CS pins and their active levels in the SPI API. SPI4 is the software simulated SPI, the measured maximum rate is 1.25MHz, and the usage is the same as hardware SPI.

+

Using SPI with MaixPy is easy:

+ +
from maix import spi, pinmap
+
+pin_function = {
+    "A24": "SPI4_CS",
+    "A23": "SPI4_MISO",
+    "A25": "SPI4_MOSI",
+    "A22": "SPI4_SCK"
+}
+
+for pin, func in pin_function.items():
+    if 0 != pinmap.set_pin_function(pin, func):
+        print(f"Failed: pin{pin}, func{func}")
+        exit(-1)
+
+
+spidev = spi.SPI(4, spi.Mode.MASTER, 1250000)
+
+### Example of full parameter passing.
+# spidev = spi.SPI(id=4,                  # SPI ID
+#                  mode=spi.Mode.MASTER,  # SPI mode
+#                  freq=1250000,          # SPI speed
+#                  polarity=0,            # CPOL 0/1, default is 0
+#                  phase=0,               # CPHA 0/1, default is 0
+#                  bits=8,                # Bits of SPI, default is 8
+#                  cs_enable=True,        # Use soft CS pin? True/False, default is False
+#                  cs='GPIOA19')          # Soft cs pin number, default is 'GPIOA19'
+
+b = bytes(range(0, 8))
+
+res = spidev.write_read(b, len(b))
+if res == b:
+    print("loopback test succeed")
+else:
+    print("loopback test failed")
+    print(f"send:{b}\nread:{res}")
+
+

You need to connect the MOSI and MISO of this SPI first.

+

Configure the required pins with pinmap and then enable full duplex communication, the return value will be equal to the sent value.

+

See the SPI API documentation for a more detailed description of the SPI API.

+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/peripheral/uart.html b/maixpy/doc/en/peripheral/uart.html new file mode 100644 index 00000000..57f5c64a --- /dev/null +++ b/maixpy/doc/en/peripheral/uart.html @@ -0,0 +1,652 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Introduction to Using MaixCAM MaixPy UART Serial Port - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Introduction to Using MaixCAM MaixPy UART Serial Port

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction to Serial Ports

+

A serial port is a communication method that includes the definitions of both hardware and communication protocols.

+
    +
  • Hardware includes:
      +
    • 3 pins: GND, RX, TX, with cross-connection for communication. RX and TX should be cross-connected, meaning one side's TX should connect to the other side's RX, and both sides' GND should be connected together.
    • +
    • Controller, usually inside the chip, also known as the UART peripheral. Generally, a chip can have one or more UART controllers, each with corresponding pins.
    • +
    +
  • +
  • Serial communication protocol: To ensure smooth communication between both parties, a set of protocols is established, specifying how communication should occur, including common parameters like baud rate and parity bit. Baud rate is the most commonly used parameter.
  • +
+

Using the serial port of the board, you can communicate data with other microcontrollers or SOCs. For example, human detection can be implemented on MaixCAM, and the detected coordinates can be sent to STM32/Arduino microcontrollers via the serial port.

+

Using Serial Port in MaixPy

+

MaixCAM's default configuration exposes a serial port through the USB port. By plugging in the Type-C adapter board, you can directly use the serial port pins. Alternatively, you can use the A16(TX) and A17(RX) pins directly on the board, which are equivalent to those exposed via the USB port, refer to IO interface image:

+


+maixcam_pro_io

+

When using the serial port exposed through USB on MaixCAM, note that the RX and TX pins on the Type-C adapter board will swap between regular and reverse insertions (assuming the Type-C female port is facing forward and matching the silk screen). If communication fails, try flipping the Type-C connection to see if it resolves the issue. Although this is a design flaw, frequent plug/unplug operations are rare, so adapting to it is acceptable.

+

After connecting the two communicating boards (cross-connecting RX and TX and connecting both GND), you can use software for communication.

+

Using the serial port with MaixPy is simple:

+ +
from maix import uart
+
+device = "/dev/ttyS0"
+# ports = uart.list_devices() # List available serial ports
+
+serial = uart.UART(device, 115200)
+serial.write_str("hello world")
+print("received:", serial.read(timeout = 2000))
+
+

Here, we use the first serial port /dev/ttyS0, which is the serial port exposed via Type-C mentioned above.

+

More serial port APIs can be found in the UART API documentation.

+

MaixCAM Serial Port Usage Notes

+

TX Pin Notes

+

MaixCAM's TX (UART0) pin must not be in a pulled-down state during boot-up, or the device will fail to start. This is a characteristic of the chip. If you are designing a 3.3v to 5v level-shifting circuit, be sure not to default it to a pulled-down state and keep it floating (consider using a level-shifting chip).

+

If the device fails to boot, also check whether the TX pin is pulled down.

+

Connecting to a Computer via Serial Port

+

Developers may ask: Why doesn't the serial port device appear on the computer when the USB is plugged in? The answer is that the USB on the device defaults to a virtual USB network card without serial port functionality. To access the device's terminal, use SSH connection.

+

For MaixCAM, the serial port 0 from the Type-C adapter board is directly connected to the A16(TX) and A17(RX) pins. It can be connected directly to other devices, such as microcontrollers' serial port pins. To communicate with a computer, use a USB-to-serial converter board (such as this one).

+

Boot Log Output

+

It is important to note that MaixCAM's serial port 0 will output some boot logs during startup. After startup, the message serial ready will be printed. When communicating with a microcontroller, discard this information. If there are system startup issues, the boot log from serial port 0 can help diagnose the problem.

+

Sending Data

+

There are mainly two functions for sending data: write_str and write.

+

The write_str function is used to send strings, while write is used to send byte streams, i.e., str and bytes types, which can be converted to each other. For example:

+
    +
  • "A" can be converted to b"A" using the encode() method, and vice versa, b"A" can be converted back to "A" using the decode() method.
  • +
  • str cannot display some invisible characters, such as the ASCII value 0, which is generally \0 in strings and serves as a terminator. In bytes type, it can be stored as b"\x00".
  • +
  • This is more useful for non-ASCII encoded strings. For example, the Chinese character in UTF-8 encoding is represented by three bytes \xe5\xa5\xbd. We can use "好".encode("utf-8") to get b"\xe5\xa5\xbd", and b'\xe5\xa5\xbd'.decode("utf-8) to get "好".
  • +
+

So if we need to send byte data, we can use the write() method to send it. For example:

+ +
bytes_content = b'\x01\x02\x03'
+serial.write(bytes_content)
+
+

Therefore, for the str type, you can use serial.write(str_content.encode()) instead of write_str to send it.

+

If you have other data types that you want to convert into a string to send, you can use Python string formatting to create a string. For example, to send I have xxx apple, where xxx is an integer variable, you can do:

+ +
num = 10
+content = "I have {} apple".format(num)
+content2 = f"I have {num} apple"
+content3 = "I have {:04d} apple".format(num)
+content4 = f"I have {num:d} apple"
+print(content)
+print(content2)
+print(content3)
+print(content4)
+print(type(content))
+serial.write_str(content)
+
+

Additionally, you can encode the data into a binary stream to send. For example, the first 4 bytes are hexadecimal AABBCCDD, followed by an int type value, and finally a 0xFF at the end. You can use struct.pack to encode it (if this is unclear, you can read the explanation later):

+ +
from struct import pack
+num = 10
+bytes_content = b'\xAA\xBB\xCC\xDD'
+bytes_content += pack("<i", num)
+bytes_content += b'\xFF'
+print(bytes_content, type(bytes_content))
+serial.write(bytes_content)
+
+

Here, pack("<i", num) encodes num as an int type, which is a 4-byte signed integer. The < symbol indicates little-endian encoding, with the low byte first. Here, num = 10, the 4-byte hexadecimal representation is 0x0000000A, and little-endian encoding puts the low byte 0x0A first, resulting in b'\x0A\x00\x00\x00'.

+
+

Here, we use i to encode int type data as an example. Other types, such as B for unsigned char, etc., can also be used. More struct.pack formatting options can be searched online with python struct pack.

+
+

In this way, the final data sent is AA BB CC DD 0A 00 00 00 FF as binary data.

+

Receiving Data

+

Use the read method to read data directly:

+ +
while not app.need_exit():
+    data = serial.read()
+    if data:
+        print(data)
+    time.sleep_ms(1)
+
+

Similarly, the data obtained by the read method is also of the bytes type. Here, read reads a batch of data sent by the other party. If there is no data, it returns b'', which is an empty byte.

+

Here, time.sleep_ms(1) is used to sleep for 1ms, which frees up the CPU so that this thread does not occupy all CPU resources. 1ms does not affect the program's efficiency, especially in multithreading.

+

In addition, the read function has two parameters:

+
    +
  • len: Represents the maximum length you want to receive. The default is -1, meaning it will return as much as there is in the buffer. If you pass a value >0, it means it will return data up to that length.
  • +
  • timeout:
      +
    • The default 0 means it will return immediately with whatever data is in the buffer. If len is -1, it returns all data; if a length is specified, it returns data not exceeding that length.
    • +
    • <0 means it waits until data is received before returning. If `
    • +
    +
  • +
+

lenis-1, it waits until data is received and returns (blocking read for all data); if a length is specified, it waits until it reacheslen` before returning.

+
    +
  • >0 means it will return after this time, regardless of whether data is received.
  • +
+

It may seem complex, but here are some common parameter combinations:

+
    +
  • read(): Which is read(-1, 0), reads the data received in the buffer, usually a batch of data sent by the other party. It returns immediately when the other party has stopped sending (within one character's sending time).
  • +
  • read(len = -1, timeout = -1): Blocking read for a batch of data, waits for the other party to send data and returns only when there is no more data within one character's sending time.
  • +
  • read(len = 10, timeout = 1000): Blocking read for 10 characters, returns when 10 characters are read or 1000ms has passed without receiving any data.
  • +
+

Setting a Callback Function for Receiving Data

+

In MCU development, a serial port interrupt event usually occurs when data is received. MaixPy has already handled the interrupt at the bottom layer, so developers don't need to handle the interrupt themselves. If you want to call a callback function upon receiving data, you can use set_received_callback to set the callback function:

+ +

+from maix import uart, app, time
+
+def on_received(serial : uart.UART, data : bytes):
+    print("received:", data)
+    # send back
+    serial.write(data)
+
+device = "/dev/ttyS0"
+
+serial = uart.UART(device, 115200)
+serial.set_received_callback(on_received)
+
+serial0.write_str("hello\r\n")
+print("sent hello")
+print("wait data")
+
+while not app.need_exit():
+    time.sleep_ms(100) # sleep to make CPU free
+
+

When data is received, the set callback function will be called in another thread. Since it's called in another thread, unlike an interrupt function, you don't have to exit the function quickly. You can handle some tasks in the callback function before exiting, but be aware of common multithreading issues.

+

If you use the callback function method to receive data, do not use the read function to read it, or it will read incorrectly.

+

Using Other Serial Ports

+

Each pin may correspond to different peripheral functions, which is also known as pin multiplexing. As shown below, each pin corresponds to different functions. For example, pin A17 (silkscreen identification on the board) corresponds to GPIOA17, UART0_RX, and PWM5 functions. The default function is UART0_RX.

+


+maixcam_pro_io

+

By default, you can directly use UART0 as shown above. For other serial port pins, they are not set to the serial peripheral function by default, so you need to set the mapping to use other serial ports. Use pinmap.set_pin_function to set it.

+

Let's take UART1 as an example. First, set the pin mapping to choose the serial port function, then use the device number /dev/ttyS1. Note that uart.list_devices() will not return manually mapped serial ports by default, so you can directly pass the parameters manually:

+ +
from maix import app, uart, pinmap, time
+
+pinmap.set_pin_function("A18", "UART1_RX")
+pinmap.set_pin_function("A19", "UART1_TX")
+
+device = "/dev/ttyS1"
+
+serial1 = uart.UART(device, 115200)
+
+

Application Layer Communication Protocol

+

Concept and Character Protocol

+

Serial ports only define the hardware communication timing. To let the receiver understand the meaning of the character stream sent by the sender, an application communication protocol is usually established. For example, if the sender needs to send coordinates containing two integer values x, y, the following protocol is established:

+
    +
  • Frame Header: When I start sending the $ symbol, it means I'm about to start sending valid data.
  • +
+
+

Content: Designing a start symbol is because serial communication is stream-based. For example, sending 12345 twice may result in receiving 12345123 at some moment. The 45 from the second frame has not been received. We can determine a complete data frame based on start and end symbols.

+
+
    +
  • The value range of x, y is 0~65535, i.e., an unsigned short integer (unsigned short). I'll first send x then y, separated by a comma, such as 10,20.
  • +
  • Frame Tail: Finally, I'll send a * to indicate that I've finished sending this data.
  • +
+

In this way, sending a data packet looks like $10,20* as a string. The other party can receive and parse it using C language:

+ +
// 1. Receive data
+// 2. Determine if the reception is complete based on the frame header and tail, and store the complete frame data in the buff array
+// 3. Parse a frame of data
+uint16_t x, y;
+sscanf(buff, "$%d,%d*", &x, &y);
+
+

Thus, we have defined a simple character communication protocol with a certain degree of reliability. However, since we usually use parameters like 115200 8 N 1 for serial ports, where N means no parity check, we can add a checksum to our protocol at the end. For example:

+
    +
  • Here, we add a checksum value after x, y, ranging from 0 to 255. It is the sum of all previous characters modulo 255.
  • +
  • Taking $10,20 as an example, in Python, you can simply use the sum function: sum(b'$10,20') % 255 --> 20, and send $10,20,20*.
  • +
  • The receiver reads the checksum 20, calculates it in the same way as $10,20, and if it is also 20, it means no transmission error occurred. Otherwise, we assume a transmission error and discard the packet to wait for the next one.
  • +
+

In MaixPy, encoding a character protocol can be done using Python's string formatting feature:

+ +
x = 10
+y = 20
+content = "${},{}*".format(x, y)
+print(content)
+
+

Binary Communication Protocol

+

The character protocol above has a clear characteristic of using visible characters to transmit data. The advantage is simplicity and human readability. However, it uses an inconsistent number of characters and larger data volumes. For example, $10,20* and $1000,2000* have varying lengths, with 1000 using 4 characters, which means 4 bytes. We know an unsigned short integer (uint16) can represent values ranging from 0~65535 using only two bytes. This reduces the transmission data.

+

We also know visible characters can be converted to binary via ASCII tables, such as $1000 being 0x24 0x31 0x30 0x30 0x30 in binary, requiring 5 bytes. If we directly encode 1000 in binary as 0x03E8, we can send 0x24 0x03 0xE8 in just 3 bytes, reducing communication overhead.

+

Additionally, 0x03E8 is a 2-byte representation with 0xE8 as the low byte, transmitted first in little-endian encoding. The opposite is big-endian encoding. Both are fine as long as both parties agree on one.

+

In MaixPy, converting a number to bytes is simple with struct.pack. For example, 0x03E8 (decimal 1000):

+ +
from struct import pack
+b = pack("<H", 1000)
+print(b)
+
+

Here, <H indicates little-endian encoding, with H denoting a uint16 data type, resulting in b'\xe8\x03' as bytes.

+

Similarly, binary protocols can have a frame header, data content, checksum, frame tail, or a frame length field instead of a frame tail, based on preference.

+

Built-in MaixPy Communication Protocol

+

MaixPy also includes a built-in communication protocol.

+

This communication protocol defines the format for communication between parties, making it easier to parse and recognize information. It's a binary protocol that includes a frame header, data content, and checksum. The complete protocol is defined in the Maix Serial Communication Protocol Standard. Those unfamiliar with communication protocols may find it challenging at first, but reviewing the example below multiple times can help with understanding.

+

For instance, if we have object detection, and we want to send the detected objects' information, such as coordinates, to another device (like STM32 or Arduino microcontrollers) via serial port:

+

Complete example: MaixPy/examples/protocol/comm_protocol_yolov5.py.

+

First, we need to detect objects. Refer to the yolov5 object detection example. Here, we omit other details and focus on the detection results:

+ +
while not app.need_exit():
+    img = cam.read()
+    objs = detector.detect(img, conf_th = 0.5, iou_th = 0.45)
+    for obj in objs:
+        img.draw_rect(obj.x, obj.y, obj.w, obj
+
+.h, color = image.COLOR_RED)
+        msg = f'{detector.labels[obj.class_id]}: {obj.score:.2f}'
+        img.draw_string(obj.x, obj.y, msg, color = image.COLOR_RED)
+    dis.show(img)
+
+

You can see objs are multiple detection results. Here, we're drawing boxes on the screen, and we can find a way to send these results via the serial port.

+

We don't need to manually initialize the serial port, just use the built-in maix.comm, maix.protocol modules. Calling comm.CommProtoco will automatically initialize the serial port, with a default baud rate of 115200. The serial port protocol can be set in the device's System Settings->Communication Protocol.

+

The system settings may have other communication methods, such as tcp, with uart as the default. You can also use maix.app.get_sys_config_kv("comm", "method") to check if uart is currently set.

+ +
from maix import comm, protocol, app
+from maix.err import Err
+import struct
+
+def encode_objs(objs):
+    '''
+        encode objs info to bytes body for protocol
+        2B x(LE) + 2B y(LE) + 2B w(LE) + 2B h(LE) + 2B idx ...
+    '''
+    body = b""
+    for obj in objs:
+        body += struct.pack("<hhHHH", obj.x, obj.y, obj.w, obj.h, obj.class_id)
+    return body
+
+APP_CMD_ECHO = 0x01        # Custom command 1, for testing, not used here, reserved
+APP_CMD_DETECT_RES = 0x02  # Custom command 2, send detected object information
+                           # You can define more commands based on your application
+
+p = comm.CommProtocol(buff_size = 1024)
+
+while not app.need_exit():
+    # ...
+    objs = detector.detect(img, conf_th = 0.5, iou_th = 0.45)
+    if len(objs) > 0:
+        body = encode_objs(objs)
+        p.report(APP_CMD_DETECT_RES, body)
+    # ...
+
+

Here, the encode_objs function packages all detected object information into bytes type data, and the p.report function sends the result.

+

The content of body is simply defined as 2B x(LE) + 2B y(LE) + 2B w(LE) + 2B h(LE) + 2B idx ..., meaning:

+
    +
  • In this image, multiple objects are detected and arranged in order in body. Each target takes up 2+2+2+2+2 = 10 bytes, with body_len / 10 objects in total.
  • +
  • The 1st and 2nd bytes represent the x coordinate of the top-left corner of the detected object, in pixels. Since the yolov5 result can have negative values for this coordinate, we use a short type to represent it, with little-endian encoding (LE).
  • +
+
+

Little-endian here means the low byte is in front. For example, if the x coordinate is 100, hexadecimal 0x64, we use a two-byte short to represent it as 0x0064. Little-endian encoding puts 0x64 first, resulting in b'\x64\x00'.

+
+
    +
  • Similarly, encode the subsequent data in sequence, resulting in 10 bytes of bytes type data for each object.
  • +
  • Iterate through and encode all object information into a single bytes string.
  • +
+

When calling the report function, the protocol header, checksum, etc., are automatically added according to the protocol, allowing the other end to receive a complete data frame.

+

On the other end, data should be decoded according to the protocol. If the receiving end is also using MaixPy, you can directly do:

+ +
while not app.need_exit():
+    msg = p.get_msg()
+    if msg and msg.is_report and msg.cmd == APP_CMD_DETECT_RES:
+        print("receive objs:", decode_objs(msg.get_body()))
+        p.resp_ok(msg.cmd, b'1')
+
+

If the other device is something like STM32 or Arduino, you can refer to the C language functions in the appendix of the Maix Serial Communication Protocol Standard for encoding and decoding.

+

Other Tutorials

+ + + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/peripheral/wdt.html b/maixpy/doc/en/peripheral/wdt.html new file mode 100644 index 00000000..75c9b00c --- /dev/null +++ b/maixpy/doc/en/peripheral/wdt.html @@ -0,0 +1,393 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Using Watchdog Timer in MaixCAM MaixPy - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Using Watchdog Timer in MaixCAM MaixPy

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction

+

To prevent program issues, a watchdog timer (WDT) is often used to automatically restart the system when the program encounters a problem.

+

The principle is that there is a countdown timer that we need to periodically reset within the program logic (also called "feeding the dog"). If our program gets stuck and fails to reset the countdown timer, the hardware will trigger a system reboot when the timer reaches 0.

+

Using WDT in MaixPy

+ +
from maix import wdt, app, time
+
+w = wdt.WDT(0, 1000)
+
+while not app.need_exit():
+    w.feed()
+    # Here, sleep operation is our task
+    # 200 ms is normal; if it exceeds 1000 ms, it will cause a system reset
+    time.sleep_ms(200)
+
+

This code sets up a watchdog timer that requires feeding every 1000 ms. If the program fails to feed the watchdog within this period, the system will reset.

+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/pro/compile_os.html b/maixpy/doc/en/pro/compile_os.html new file mode 100644 index 00000000..926c0dee --- /dev/null +++ b/maixpy/doc/en/pro/compile_os.html @@ -0,0 +1,409 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Compiling a System for MaixCAM MaixPy - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Compiling a System for MaixCAM MaixPy

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Why Customize the System?

+

Typically, you can download the latest system for MaixCAM directly from this link. However, there are some scenarios where you might need to customize the system:

+
    +
  • For example, if you are mass-producing 1,000 products and want each to have your own application that automatically starts on boot, without configuring each one individually, you can modify the builtin_files and package a system. Once this system is flashed onto the boards, they will all include your custom files, eliminating the need to copy them again after booting.
  • +
  • If the official system does not include the software packages or drivers you need, you can compile your own system and select the packages you want to include.
  • +
+

Obtaining the Base System

+

The principle is to use a system from this link as the base (note that this system cannot be directly flashed onto MaixCAM as it may damage the screen), then copy the MaixCAM-specific files into the base system and repackage it into a system usable by MaixCAM.

+

If you don't need to customize the base system, you can directly download the latest system image from here.

+

If the base system doesn't meet your requirements, such as needing to add or remove some software packages and drivers, follow the instructions in the LicheeRV-Nano-Build repository README to compile the system. It's recommended to use Docker for compilation to avoid environment issues and to use bash instead of zsh.

+

Remember, the compiled system should not be flashed directly onto MaixCAM, as it might damage the screen.

+

Copying Files for MaixCAM

+

Prepare the following:

+
    +
  • The base system, which is a .img or .img.xz file.
  • +
  • Additional files for MaixCAM can be downloaded from the MaixPy release page. Download the latest builtin_files.tar.xz.
  • +
+
+

If you need to add custom files to the system, you can extract the files and add them to the appropriate directory. For example, if you want a cat.jpg file to be in the /root directory after flashing, simply place cat.jpg in the root directory.

+
+
    +
  • Download or clone the MaixPy source code locally.
  • +
  • Compile MaixPy to obtain the .whl installation package, or you can download the latest installation package from the MaixPy release page.
  • +
+

In the MaixPy/tools/os directory, run the following command:

+ +
./gen_os.sh <base_os_filepath> <maixpy_whl_filepath> <builtin_files_dir_path> <os_version_str> [skip_build_apps]
+
+

Here’s what each parameter means:

+
    +
  • base_os_filepath: The path to the base system, in .img or .img.xz format.
  • +
  • maixpy_whl_filepath: The MaixPy package, in .whl format.
  • +
  • builtin_files_dir_path: The custom files for MaixCAM, which can be downloaded from the MaixPy release page.
  • +
  • os_version_str: The system version, which should follow a format like maixcam-2024-08-16-maixpy-v4.4.21.
  • +
  • skip_build_apps: Skip compiling built-in applications, optional arg. Set to 1 to skip, no this arg it will compile and copy apps from MaixCDK and MaixPy into the system.
  • +
+

Example command:

+ +
./gen_os.sh '/home/xxx/.../LicheeRV-Nano-Build/install/soc_sg2002_licheervnano_sd/images/2024-08-13-14-43-0de38f.img' ../../dist/MaixPy-4.4.21-py3-none-any.whl '/home/xxx/.../sys_builtin_files' maixcam-2024-08-15-maixpy-v4.4.21
+
+

After waiting for the built-in apps to compile and copy, you should find a maixcam-2024-08-15-maixpy-v4.4.21.img.xz system image in the MaixPy/tools/os/tmp directory.

+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/projects/face_tracking.html b/maixpy/doc/en/projects/face_tracking.html new file mode 100644 index 00000000..034e9b28 --- /dev/null +++ b/maixpy/doc/en/projects/face_tracking.html @@ -0,0 +1,485 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Face Tracking 2 axis servo gimbal - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Face Tracking 2 axis servo gimbal

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-06-111.0.0iawak9lkm + + Initial documentation + +
+
+
+ +
+
+ +

Before reading this article, make sure you know how to develop with MaixCAM. For details, please read Quick Start.

+

Source Code

+

Download APP

+

Description

+

Face recognition and tracking is accomplished using a gimbal consisting of two servos and MaixCAM.

+

+

+

Usage of this example program

+
    +
  • Assemble your Gimbal and MaixCAM.

    +
  • +
  • Modify the parameters in main.py.

    +

    Modify the MaixCAM pins used for each servo. The specified pins must have PWM capability.The servos.Servos constructor then configures the pin for PWM functionality.

    + +
    ROLL_PWM_PIN_NAME = "A17"
    +PITCH_PWM_PIN_NAME = "A16"
    +
    +

    Modify the initial positions of the two servos.

    + +
    init_pitch = 80         # init position, value: [0, 100], means minimum angle to maxmum angle of servo
    +init_roll = 50          # 50 means middle
    +
    +

    You need to modify the min-max PWM duty cycle for the active range of each of the two servos. NOTE: Certain Gimbal configurations may have unintended consequences when servos exceed their physically limited maximum range of motion. Ensure that there is no obstruction within the range of motion of the servos corresponding to the following setpoints.

    + +
    PITCH_DUTY_MIN  = 3.5   # The minimum duty cycle corresponding to the range of motion of the y-axis servo.
    +PITCH_DUTY_MAX  = 9.5   # Maximum duty cycle corresponding to the y-axis servo motion range.
    +ROLL_DUTY_MIN   = 2.5   # Minimum duty cycle for x-axis servos.
    +ROLL_DUTY_MAX   = 12.5  # Maxmum duty cycle for x-axis servos.
    +
    +

    You need to select the direction of motion of the servos.

    + +
    pitch_reverse = False                   # reverse out value direction
    +roll_reverse = True                     # reverse out value direction
    +
    +
  • +
  • Just execute the code at the end.

    +

    If you installed the application from MaixHub, click face_tracking in the launcher to execute the program.

    +

    If you got the source code from Github, you can import the project folder in MaixVision and execute the whole project. Please refer to MaixVision Description for more information about MaixVision.

    +

    Of course, you can also copy the whole project folder to our MaixCAM in your favorite way and execute it with python.

    +
  • +
  • If you want to exit the program, just press the button in the upper left corner.

    +

    +
  • +
+

FAQs

+
    +
  • The face tracking is not ideal.

    +

    Different Gimbal use different PID parameters, you can adjust the PID value to make the effect better.

    + +
    pitch_pid = [0.3, 0.0001, 0.0018, 0]    # [P I D I_max]
    +roll_pid  = [0.3, 0.0001, 0.0018, 0]    # [P I D I_max]
    +
    +
  • +
  • After completing the tracking, the gimbal jerks small left and right for a period of time against a motionless face.

    +

    You can usually make this effect as small as possible by adjusting the PID; however, there is no way to avoid the jitter caused by the physical structure of the gimbal. You can try to adjust the deadband to minimize the jitter.

    + +
    target_ignore_limit = 0.08
    +# when target error < target_err_range*target_ignore_limit , set target error to 0
    +
    +
  • +
  • The display shows or the terminal prints PIN: XXX does not exist.

    +

    This is because the pin does not exist in the pinout of the MaixCAM board. Please select a pin with PWM function on MaixCAM.

    +
  • +
  • The display shows or the terminal prints Pin XXX doesn't have PWM function.

    +

    This is because the pin does not have a PWM function, you need to select a pin with a PWM function.

    +
  • +
+

How to track other objects

+
    +
  • In main.py there exists a class Target which is used to customize the target to be tracked.

    +
  • +
  • In __init__, initialize the objects you need to use, such as the camera.

    +
  • +
  • In __get_target(), you need to calculate the center point of the tracked object, and if the tracked object does not exist in the frame, return -1,-1 to make sure that the program does not do anything for a while if the target is not found. You also need to call self.__exit_listener(img) and self.disp.show(img) before returning to the point to make sure that the program can interact with you properly.

    +
  • +
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/projects/index.html b/maixpy/doc/en/projects/index.html new file mode 100644 index 00000000..ce6cb75b --- /dev/null +++ b/maixpy/doc/en/projects/index.html @@ -0,0 +1,395 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Practical Projects with MaixCAM MaixPy, Introduction and Collection - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Practical Projects with MaixCAM MaixPy, Introduction and Collection

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction

+

Here we provide some common practical project examples for community members to refer to and replicate for use. This also helps to inspire everyone to create more and better applications and projects.

+

There are several ways to find projects implemented with MaixPy:

+

MaixPy Official Documentation

+

You can find practical projects in the documentation on the left, such as "Line Following Car."

+

If you have a good project or a recommended project, you can also contribute by adding it to the documentation.

+

MaixHub Project Sharing Square

+

Projects can be found in the MaixHub Project Sharing section.

+

High-quality shares will also be linked to the MaixPy official documentation.

+

You can also share your project-making methods, which will receive official rewards (guaranteed) and cash tips from community members (usually, high-quality projects that meet urgent needs are more likely to be tipped).

+

Recommend Projects:

+ +

MaixHub App Sharing

+

In addition to project sharing, you can also find directly runnable applications at the MaixHub App Store, some of which might be written in MaixPy. If the author has provided the source code or written detailed tutorials, these can also be referred to.

+

Recommend Projects:

+ + + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/projects/line_tracking_robot.html b/maixpy/doc/en/projects/line_tracking_robot.html new file mode 100644 index 00000000..d50e5364 --- /dev/null +++ b/maixpy/doc/en/projects/line_tracking_robot.html @@ -0,0 +1,458 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Line Tracking Robot (/Car) - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Line Tracking Robot (/Car)

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-05-091.0.0lxowalle + + Initial documentation + +
+
+
+ +
+
+ +

Before reading this article, make sure you know how to develop with MaixCAM. For details, please read Quick Start.

+

Introduction

+

This article describes how to implement a line tracking robot using MaixPy.

+

How to implement line tracking robot using MaixPy

+
    +
  1. Preparation of MaixCAM and trolley
  2. +
  3. Implementing the line tracking function
  4. +
  5. Implement the trolley control function
  6. +
+

Preparation of MaixCAM and trolley

+

TODO

+

Implementing the line tracking function

+

You can quickly find straight lines using the get_regression of the image module, see [Line tracking](. /line_tracking.html).

+

Code:

+ +
from maix import camera, display, image
+
+cam = camera.Camera(320, 240)
+disp = display.Display()
+
+# thresholds = [[0, 80, 40, 80, 10, 80]]      # red
+thresholds = [[0, 80, -120, -10, 0, 30]]    # green
+# thresholds = [[0, 80, 30, 100, -120, -60]]  # blue
+
+while 1:
+    img = cam.read()
+
+    lines = img.get_regression(thresholds, area_threshold = 100)
+    for a in lines:
+        img.draw_line(a.x1(), a.y1(), a.x2(), a.y2(), image.COLOR_GREEN, 2)
+        theta = a.theta()
+        rho = a.rho()
+        if theta > 90:
+            theta = 270 - theta
+        else:
+            theta = 90 - theta
+        img.draw_string(0, 0, "theta: " + str(theta) + ", rho: " + str(rho), image.COLOR_BLUE)
+
+    disp.show(img)
+
+
+

The above code implements the function of finding a straight line, note:

+
    +
  • Use a.theta() to get the angle of the line.
  • +
  • Use a.rho() to get the distance between the line and the origin (the origin is in the upper left corner).
  • +
+

After find the straight line with reference to the above code, you can use a.theta() and a.rho() to control the direction of the cart.

+

Implement the trolley control function

+

TODO

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/sidebar.yaml b/maixpy/doc/en/sidebar.yaml new file mode 100644 index 00000000..8f8a281f --- /dev/null +++ b/maixpy/doc/en/sidebar.yaml @@ -0,0 +1,192 @@ +items: +- file: README.md + label: Quick Start +- file: README_no_screen.md + label: Quick Start(no screen) +- file: faq.md + label: FAQ + +- label: Base + collapsed: false + items: + - file: basic/os.md + label: Burning system + - file: basic/app.md + label: App Usage and Development + - file: basic/maixpy_upgrade.md + label: Update MaixPy + - file: basic/maixvision.md + label: MaixVision usage + - file: basic/python.md + label: Python syntax + - file: basic/linux_basic.md + label: Linux fundamentals + - file: basic/python_pkgs.md + label: Add python packages + - file: basic/auto_start.md + label: Auto start + - file: basic/view_src_code.md + label: View API source code + +- label: Basic images and algorithms + items: + - file: vision/display.md + label: Screen usage + - file: vision/camera.md + label: Camera usage + - file: vision/image_ops.md + label: Image control + - file: vision/touchscreen.md + label: Touch screen + - file: vision/find_blobs.md + label: Finding color blocks + - file: vision/line_tracking.md + label: Line tracking + - file: vision/qrcode.md + label: QRcode identity + - file: vision/apriltag.md + label: AprilTag identity + - file: vision/opencv.md + label: OpenCV usage + - file: gui/i18n.md + label: I18N (Multi language) + +- label: AI Vision + items: + - file: vision/ai.md + label: AI vision knowledge + - file: vision/classify.md + label: AI object classification + - file: vision/yolov5.md + label: YOLO11/v8/v5 object detection + - file: vision/face_detection.md + label: Face and keypoints detection + - file: vision/face_recognition.md + label: Face recognition + - file: vision/body_key_points.md + label: Human critical point detection + - file: vision/segmentation.md + label: Image semantic segmentation + - file: vision/self_learn_classifier.md + label: Self-learning classifier + - file: vision/self_learn_detector.md + label: Self-learning detector + - file: vision/object_track.md + label: Object tracking and counting + - file: vision/ocr.md + label: OCR + - file: vision/maixhub_train.md + label: MaixHub online AI training + - file: vision/customize_model_yolov5.md + label: YOLOv5 model offline training + - file: vision/customize_model_yolov8.md + label: YOLO11/v8 model offline training + - file: ai_model_converter/maixcam.md + label: ONNX model to MaixCAM's + + +- label: AI audio + items: + - file: audio/record.md + label: Audio recording + - file: audio/play.md + label: Playing audio + - file: audio/recognize.md + label: Real-time voice recognition + - file: audio/digit.md + label: Continuous Chinese digit recognition + - file: audio/keyword.md + label: Keyword recognition + - file: audio/synthesis.md + label: Speech synthesis + +- label: Video + items: + - file: video/record.md + label: Video recording + - file: video/play.md + label: Playing video + - file: video/jpeg_streaming.md + label: JPEG streaming + - file: video/rtsp_streaming.md + label: RTSP streaming + - file: video/rtmp_streaming.md + label: RTMP streaming + +- label: Network + items: + - file: network/network_settings.md + label: Network settings + - file: network/http.md + label: http communication + - file: network/socket.md + label: socket communication + - file: network/mqtt.md + label: MQTT communication + - file: network/websocket.md + label: websocket communication + - file: network/flask.md + label: Flask Web server + +- label: On-chip peripherals + items: + - file: peripheral/pinmap.md + label: PINMAP + - file: peripheral/gpio.md + label: GPIO + - file: peripheral/uart.md + label: UART + - file: peripheral/i2c.md + label: I2C + - file: peripheral/pwm.md + label: PWM + - file: peripheral/spi.md + label: SPI + - file: peripheral/wdt.md + label: WDT watchdog + - file: peripheral/adc.md + label: ADC + - file: peripheral/hid.md + label: HID + +- label: Off-chip modules + items: + - file: modules/temp_humi.md + label: Temperature and humidity + - file: modules/acc.md + label: Accelerometer + - file: modules/rtc.md + label: RTC usage + - file: modules/tmc2209.md + label: Stepper motor TMC2209 + - file: modules/tof.md + label: TOF + - file: modules/thermal_cam.md + label: Thermal imaging + +- label: Projects + items: + - file: projects/README.md + label: Intro & Collection + - file: projects/line_tracking_robot.md + label: Line tracking robot + - file: projects/face_tracking.md + label: Face tracking 2-axis gimbal + +- label: Advanced + collapsed: false + items: + - file: source_code/contribute.md + label: Contribute + - file: source_code/build.md + label: Build source code + - file: source_code/faq.md + label: MaixPy Source FAQ + - file: source_code/add_c_module.md + label: Write in C/C++ + - file: source_code/maixcdk.md + label: MaixCDK development + - file: pro/compile_os.md + label: Build OS + + diff --git a/maixpy/doc/en/source_code/add_c_module.html b/maixpy/doc/en/source_code/add_c_module.html new file mode 100644 index 00000000..e66ed2fa --- /dev/null +++ b/maixpy/doc/en/source_code/add_c_module.html @@ -0,0 +1,437 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Adding a C/C++ Module to MaixCAM MaixPy - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Adding a C/C++ Module to MaixCAM MaixPy

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction

+

Sometimes you need to execute a function efficiently, and Python's speed is insufficient. In such cases, you can implement the function using C/C++ or other compiled languages.

+

General Function Wrapping

+

If the function you want to wrap does not depend on other features of MaixPy, you can directly use the general methods for adding modules to Python using C/C++. You can search for methods like ffi or ctype on the internet.

+
+

PRs are welcome to add more methods.

+
+

If Your Module Needs to Depend on Other MaixPy Basic APIs

+

Method 1

+

Directly modify the MaixPy firmware and then compile it. Refer to View MaixPy API Source Code. This method is the simplest and fastest. If the code is well-packaged, it can be merged into the official repository (by submitting a PR).

+
    +
  • Follow Compiling MaixPy Source Code to get the dist/***.whl installation package.
  • +
  • Send the .whl package from the dist directory to the device, then run the code import os; os.system("pip install /root/xxxxx.whl") (replace the path accordingly).
  • +
  • If installing the .whl package is too slow during debugging, you can use maixcdk build to compile and then use scp -r maix_xxx root@10.228.104.1:/usr/lib/python3.11/site-packages to directly copy it to the device system to overwrite the package. Adjust the package name and device IP as needed.
  • +
  • Once you have finished debugging and feel that the features you added are valuable, consider merging them into the official repository. You can learn how to do this by searching for keywords like "github submit PR" on search engines.
  • +
+

Modifying the code:
+As described in View MaixPy API Source Code, you can view and modify the source code, add C++ functions, and include comments. After compiling, you can call them in MaixPy. It's very simple.

+

For example:

+ +
namespace maix::test
+{
+    /**
+     * My function, add two integers.
+     * @param a arg a, int type
+     * @param b arg b, int type
+     * @return int type, a + b
+     * @maixpy maix.test.add
+     */
+    int add(int a, int b);
+}
+
+

Yes, simply write a C++ function. Note the @maixpy comment. During compilation, a Python function will be automatically generated. It's that simple! Then you can call the function with maix.test.add(1, 2).

+

Method 2

+

Create a MaixPy module project based on an engineering template. This method is suitable for adding a package without modifying the MaixPy source code and still using MaixPy (MaixCDK) APIs. The method is as follows:

+
    +
  • First, compile MaixPy source code to ensure the compilation environment is set up correctly.
  • +
  • Copy the MaixPy/tools/maix_module project template to a new directory. It can be in the same directory as MaixPy. For example, copy all files and directories to the maix_xxx directory.
  • +
  • In the maix_xxx directory, run python init_files.py in the terminal to initialize the project files.
  • +
  • Change the project name: Modify the module_name.txt file to the desired module name, starting with maix_. This makes it easier for others to find your project on pypi.org or github.com.
  • +
  • Run python setup.py bdist_wheel linux in the project root directory to build for the computer.
  • +
  • After building, you can directly run python -c "import maix_xxx; maix_xxx.basic.print('Li Hua')" in the project root directory to test your module functions.
  • +
  • Run python setup.py bdist_wheel maixcam to build the package for MaixCAM. Note that the code prompt file (pyi file) can only be generated when building for the linux platform. Therefore, before releasing, first build for the linux platform to generate the code prompt file, then execute this command to generate the package for the MaixCAM platform.
  • +
  • Send the .whl package from the dist directory to the device, then run import os; os.system("pip install /root/xxxxx.whl") (replace the path accordingly).
  • +
  • If installing the .whl package is too slow during debugging, you can use maixcdk build to compile and then use scp -r maix_xxx root@10.228.104.1:/usr/lib/python3.11/site-packages to directly copy it to the device system to overwrite the package. Adjust the package name and device IP as needed.
  • +
  • Once you have debugged your code, consider open-sourcing it on github.com and uploading it to pypi.org. You can refer to the official documentation or search for tutorials on how to upload. Generally, you need to run pip install twine and then twine upload dist/maix_xxx***.whl. After completing this, feel free to share your achievements on maixhub.com/share!
  • +
+

Modifying the code:
+As described in View MaixPy API Source Code, add source files in the components/maix/include and components/maix/src directories, add C++ functions, and include comments. After compiling, you can call them directly. It's very simple.

+

For example:

+ +
namespace maix_xxx::test
+{
+    /**
+     * My function, add two integers.
+     * @param a arg a, int type
+     * @param b arg b, int type
+     * @return int type, a + b
+     * @maix_xxx maix_xxx.test.add
+     */
+    int add(int a, int b);
+}
+
+

Yes, simply write a C++ function. Note the @maix_xxx comment. During compilation, a Python function will be automatically generated. It's that simple! Then you can call the function with maix_xxx.test.add(1, 2).

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/source_code/build.html b/maixpy/doc/en/source_code/build.html new file mode 100644 index 00000000..39cf0465 --- /dev/null +++ b/maixpy/doc/en/source_code/build.html @@ -0,0 +1,442 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy develop source code guide - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy develop source code guide

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Get source code

+ +
mkdir -p ~/maix
+cd ~/maix
+git clone https://github.com/sipeed/MaixPy
+
+

Getting MaixCDK Source Code

+

The MaixPy project depends on MaixCDK. You need to clone it first and place it in a directory on your computer (do not place it under the MaixPy directory).

+ +
cd ~/maix
+git clone https://github.com/sipeed/MaixCDK
+
+

Then, you need to set the environment variable MAIXCDK_PATH to specify the path to MaixCDK, which can be added in ~/.bashrc or ~/.zshrc (depending on your shell):

+ +
export MAIXCDK_PATH=~/maix/MaixCDK
+
+

Only after successfully setting the environment variable can MaixPy locate the MaixCDK source code.

+

Build and pack to wheel

+ +
cd ~/maix/MaixPy
+python setup.py bdist_wheel maixcam
+
+

maixcam Can be replaced with other board config, see setup.py 's platform_names variable.

+

After build success, you will find wheel file in dist directory, use pip install -U MaixPy****.whl on your device to install or upgrade.

+
+

python setup.py bdist_wheel maixcam --skip-build will not execute build command and only pack wheel, so you can use maixcdk menuconfig and maixcdk build first to customize building.

+
+
+

Additionally, if you are debugging APIs and need to install frequently, using pip can be slow. You can compile and then copy the maix directory directly to the /usr/lib/python3.11/site-packages directory on your device to overwrite the old files.

+
+

Build manually

+ +
maixcdk build
+
+

Run test after modify source code

+
    +
  • First, build source code by
  • +
+ +
maixcdk build
+
+
    +
  • If build for PC self(platform linux):
  • +
+

Then execute ./run.sh your_test_file_name.py to run python script.

+ +
cd test
+./run.sh examples/hello_maix.py
+
+
    +
  • If cross compile for board:
      +
    • The fastest way is copy maix dir to device's /usr/lib/python3.11/site-packages/ directory, then run script on device.
    • +
    • Or pack wheel and install on device by pip install -U MaixPy****.whl, then run script on device.
    • +
    +
  • +
+

Preview documentation locally

+

Documentation in docs directory, use Markdown format, you can use teedoc to generate web version documentation.

+

And the API doc is generated when build MaixPy firmware, if you don't build MaixPy, the API doc will be empty.

+ +
pip install teedoc -U
+cd docs
+teedoc install -i https://pypi.tuna.tsinghua.edu.cn/simple
+teedoc serve
+
+

Then visit http://127.0.0.1:2333 to preview documentation on web browser.

+

For developers who want to contribute

+

See MaixPy develop source code guide

+

If you encounter any problems when use source code, please refer to FAQ first.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/source_code/contribute.html b/maixpy/doc/en/source_code/contribute.html new file mode 100644 index 00000000..bd941a2d --- /dev/null +++ b/maixpy/doc/en/source_code/contribute.html @@ -0,0 +1,408 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Contributing to MaixCAM MaixPy Documentation Modification and Code Contribution - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Contributing to MaixCAM MaixPy Documentation Modification and Code Contribution

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Contributing to MaixPy Documentation Modification

+
    +
  • Click the "Edit this page" button in the top right corner of the documentation you want to modify to enter the GitHub source documentation page.
  • +
  • Make sure you are logged in to your GitHub account.
  • +
  • Click the pencil icon in the top right corner of the GitHub preview documentation page to modify the content.
  • +
  • GitHub will prompt you to fork a copy to your own repository. Click the "Fork" button.
  • +
+
+

This step forks the MaixPy source code repository to your own account, allowing you to freely modify it.

+
+
    +
  • Modify the documentation content, then fill in the modification description at the bottom of the page, and click "Commit changes".
  • +
  • Then find the "Pull requests" button in your repository and click to create a new Pull request.
  • +
  • In the pop-up page, fill in the modification description and click "Submit Pull request". Others and administrators can then see your modifications on the Pull requests page.
  • +
  • Wait for the administrator to review and approve, and your modifications will be merged into the MaixPy source code repository.
  • +
  • After the merge is successful, the documentation will be automatically updated to the MaixPy official documentation.
  • +
+
+

Due to CDN caching, it may take some time to see the update. For urgent updates, you can contact the administrator for manual refreshing.
+You can also visit en.wiki.sipeed.com/maixpy to view the GitHub Pages service version, which is updated in real-time without caching.

+
+

Contributing to MaixPy Code Contribution

+
    +
  • Visit the MaixPy code repository address: github.com/sipeed/MaixPy
  • +
  • Before modifying the code, it is best to create an issue first, describing the content you want to modify to let others know your ideas and plans, so that everyone can participate in the modification discussion and avoid duplication of effort.
  • +
  • Click the "Fork" button in the top right corner to fork a copy of the MaixPy code repository to your own account.
  • +
  • Then clone a copy of the code from your account to your local machine.
  • +
  • After modifying the code, commit it to your repository.
  • +
  • Then find the "Pull requests" button in your repository and click to create a new Pull request.
  • +
  • In the pop-up page, fill in the modification description and click "Submit Pull request". Others and administrators can then see your modifications on the Pull requests page.
  • +
  • Wait for the administrator to review and approve, and your modifications will be merged into the MaixPy source code repository.
  • +
+
+

Note that most of the MaixPy code is automatically generated from MaixCDK, so if you modify the C/C++ source code, you may need to modify this repository first.

+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/source_code/faq.html b/maixpy/doc/en/source_code/faq.html new file mode 100644 index 00000000..e7f0deb4 --- /dev/null +++ b/maixpy/doc/en/source_code/faq.html @@ -0,0 +1,390 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Source Code FAQ - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Source Code FAQ

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

subprocess.CalledProcessError: Command '('lsb_release', '-a')' returned non-zero exit status 1.

+

Edit /usr/bin/lsb_release as root, change the first line from #!/usr/bin/python3 to python3.

+

Then compile again and it should work.

+

ImportError: arg(): could not convert default argument 'format: maix::image::Format' in method '<class 'maix._maix.camera.Camera'>.init' into a Python object (type not registered yet?)

+

Pybind11 need you to register image::Format first, then you can use it in camera::Camera, to we must fist define image::Format in generated build/maixpy_wrapper.cpp source file.

+

To achieve this, edit components/maix/headers_priority.txt, the depended on should be placed before the one use it.
+e.g.

+ +
maix_image.hpp
+maix_camera.hpp
+
+

/usr/bin/ld: /lib/libgdal.so.30: undefined reference to `std::condition_variable::wait(std::unique_lockstd::mutex&)@GLIBCXX_3.4.30' collect2: error: ld returned 1 exit status

+

This issue commonly arises when building for Linux and using a conda environment, due to some libraries in the conda environment having compilation parameter problems. The solution is to not use conda, or to individually locate the problematic library within conda and replace it with the system's version or simply delete it (the system will then locate the necessary library).

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/source_code/maixcdk.html b/maixpy/doc/en/source_code/maixcdk.html new file mode 100644 index 00000000..15355936 --- /dev/null +++ b/maixpy/doc/en/source_code/maixcdk.html @@ -0,0 +1,379 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM Switching to MaixCDK for C/C++ Application Development - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM Switching to MaixCDK for C/C++ Application Development

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

In addition to developing with MaixPy, there is also a corresponding C/C++ SDK available, called MaixCDK.

+

Introduction to MaixCDK

+

MaixPy is built on top of MaixCDK, and most of MaixPy's APIs are automatically generated based on MaixCDK's APIs. Therefore, any functionality available in MaixPy is also included in MaixCDK.
+If you are more familiar with C/C++ programming or require higher performance, you can use MaixCDK for development.

+

Using MaixCDK

+

The MaixCDK code repository is located at github.com/sipeed/MaixCDK, where you can find the MaixCDK code and documentation.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/video/jpeg_streaming.html b/maixpy/doc/en/video/jpeg_streaming.html new file mode 100644 index 00000000..2bb44a86 --- /dev/null +++ b/maixpy/doc/en/video/jpeg_streaming.html @@ -0,0 +1,542 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Video Stream JPEG Streaming / Sending Images to Server - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Video Stream JPEG Streaming / Sending Images to Server

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-05-201.0.1lxowalle + + update JPEG-HTTP usage + +
2024-04-031.0.0neucrack + + Initial document + +
+
+
+ +
+
+ +

Introduction

+

Sometimes it is necessary to send images to a server, or to push video from a webcam to a server, so here are two ways to do it.

+
    +
  • One of the simplest methods is to compress images into JPEG format and send them one by one to the server. Note, this is a very basic method and not a formal way to stream video. It is also not suitable for high-resolution, high-frame-rate video streams, as it involves sending images one by one. For more efficient video streaming, please use the RTSP or RTMP modules discussed later.

    +
  • +
  • Set up an HTTP server, so that the PC side can be accessed directly through the browser.

    +
  • +
+

Methods for pushing streams as a client

+ +
from maix import image
+import requests
+
+# create image
+img = image.Image(640, 480, image.Format.FMT_RGB)
+# draw something
+img.draw_rect(60, 60, 80, 80, image.Color.from_rgb(255, 0, 0))
+
+# convert to jpeg
+jpeg = img.to_format(image.Format.FMT_JPEG) # image.Format.FMT_PNG
+# get jpeg bytes
+jpeg_bytes = jpeg.to_bytes()
+
+# faster way, borrow memory from jpeg object,
+# but be careful, when jpeg object is deleted, jpeg_bytes object MUST NOT be used, or program will crash
+# jpeg_bytes = jpeg.to_bytes(copy = False)
+
+# send image binary bytes to server
+url = "http://192.168.0.123:8080/upload"
+res = requests.post(url, data=jpeg_bytes)
+print(res.status_code)
+print(res.text)
+
+

As you can see, the image is first converted into JPEG format, and then the binary data of the JPEG image is sent to the server via TCP.

+

Methods for pushing streams as a server

+ +
from maix import camera, time, app, http
+
+html = """<!DOCTYPE html>
+<html>
+<head>
+    <title>JPG Stream</title>
+</head>
+<body>
+    <h1>MaixPy JPG Stream</h1>
+    <img src="/stream" alt="Stream">
+</body>
+</html>"""
+
+cam = camera.Camera(320, 240)
+stream = http.JpegStreamer()
+stream.set_html(html)
+stream.start()
+
+print("http://{}:{}".format(stream.host(), stream.port()))
+while not app.need_exit():
+    t = time.ticks_ms()
+    img = cam.read()
+    jpg = img.to_jpeg()
+    stream.write(jpg)
+    print(f"time: {time.ticks_ms() - t}ms, fps: {1000 / (time.ticks_ms() - t)}")
+
+

Steps:

+
    +
  1. Import the image, camera and http modules:

    + +
    from maix import image, camera, http
    +
    +
  2. +
  3. Initialize the camera:

    + +
    cam = camera.Camera(320, 240)
    +
    +
  4. +
  5. Initialize Stream Object

    + +
    stream = http.JpegStreamer()
    +stream.start()
    +
    +
      +
    • http.JpegStreamer() is used to create a JpegStreamer object, which will start an http server that will be used to publish jpeg image streams to clients.
    • +
    • stream.start() is used to start the http server.
    • +
    +
  6. +
  7. Custom html styles (optional)

    + +
    html = """<!DOCTYPE html>
    +<html>
    +<head>
    +    <title>JPG Stream</title>
    +</head>
    +<body>
    +    <h1>MaixPy JPG Stream</h1>
    +    <img src="/stream" alt="Stream">
    +</body>
    +</html>"""
    +
    +stream.set_html(html)
    +
    +
      +
    • html = xxx is the html code that can be used to customise the style of your web page. Note that the core code is <img src=‘/stream’ alt=‘Stream’>, be sure not to miss this line of code.
    • +
    • stream.set_html(html) is used to set the custom html code, this step is optional. The default browsing address is http://device_ip:8000.
    • +
    +
  8. +
  9. Getting images from the camera and pushing streams

    + +
    while 1:
    +    img = cam.read()
    +    jpg = img.to_jpeg()
    +    stream.write(jpg)
    +
    +
      +
    • img = cam.read() gets an image from the camera, when initialised as cam = camera.Camera(320, 240) the img object is an RGB image with a resolution of 320x240.
    • +
    • jpg = img.to_jpeg() converts the image to jpeg format
    • +
    • stream.write(jpg) writes the image format to the server and the http server will send this image to the http client.
    • +
    +
  10. +
    1. +
    2. Done, after running the code above, you can see the video stream directly through your browser, the default address is http://device_ip:8000. Open your browser and take a look!
    3. +
    +
  11. +
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/video/play.html b/maixpy/doc/en/video/play.html new file mode 100644 index 00000000..4a0fdc31 --- /dev/null +++ b/maixpy/doc/en/video/play.html @@ -0,0 +1,478 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy Playback Video - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixPy Playback Video

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-08-191.0.0lxowalle + + Initial document + +
+
+
+ +
+
+ +

Introduction

+

This document provides instructions for using the Play Video feature.

+

MaixPy supports playing h264, mp4 and flv video formats, note that currently only avc encoded mp4 and flv files are supported.

+

Play MP4 video

+

An example of playing an mp4 video, the path to the video file is /root/output.mp4.

+ +
from maix import video, display, app
+
+disp = display.Display()
+d = video.Decoder('/root/output.mp4')
+print(f'resolution: {d.width()}x{d.height()} bitrate: {d.bitrate()} fps: {d.fps()}')
+d.seek(0)
+while not app.need_exit():
+    ctx = d.decode_video()
+    if not ctx:
+        d.seek(0)
+        continue
+
+    img = ctx.image()
+    disp.show(img)
+    print(f'need wait : {ctx.duration_us()} us')
+
+

Steps:

+
    +
  1. Import the module and initialise the camera

    + +
    from maix import video, display, app
    +disp = display.Display()
    +
    +
      +
    • disp = display.Display() is used to initialise the display to show the decoded image
    • +
    +
  2. +
  3. Initialise the Decoder module

    + +
    d = video.Decoder('/root/output.mp4')
    +
    +
      +
    • d = video.Decoder(‘/root/output.mp4’) is used to initialise the decoder and set the path to the video file that needs to be played. If you need to play flv files, you can fill in the path of the file with flv suffix, such as {your_file_path}.flv, if you need to play h264 files, you can fill in the path of the file with h264 suffix, such as {your_file_path}.h264
    • +
    +
  4. +
  5. Set the decoding location

    + +
    d.seek(0)
    +
    +
      +
    • can be used to set the position of the video to be played, in seconds.
    • +
    +
  6. +
  7. Get the decoded image

    + +
    ctx = d.decode_video()
    +img = ctx.image()
    +
    +
      +
    • Each call returns a frame context, and you can obtain img through ctx.image(). Currently the decoded output only supports the NV21 format.
    • +
    +
  8. +
  9. Display the decoded image

    + +
    disp.show(img)
    +
    +
      +
    • When displaying images, ctx.duration_us() can be used to get the duration of each frame in microseconds.
    • +
    +
  10. +
  11. Done, see API documentation for more usage of Decoder.

    +
  12. +
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/video/record.html b/maixpy/doc/en/video/record.html new file mode 100644 index 00000000..490a6ca2 --- /dev/null +++ b/maixpy/doc/en/video/record.html @@ -0,0 +1,533 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Video Record - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Video Record

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-05-201.0.0lxowalle + + Initial document + +
+
+
+ +
+
+ +

Introduction

+

This document provides instructions on how to use the video recording feature

+

Example 1

+

An example of recording a video in h265 format.

+ +
from maix import video, image, camera, app, time
+
+cam = camera.Camera(640, 480, image.Format.FMT_YVU420SP)
+e = video.Encoder()
+f = open('/root/output.h265', 'wb')
+
+record_ms = 2000
+start_ms = time.ticks_ms()
+while not app.need_exit():
+    img = cam.read()
+    frame = e.encode(img)
+
+    print(frame.size())
+    f.write(frame.to_bytes())
+
+    if time.ticks_ms() - start_ms > record_ms:
+        app.set_exit_flag(True)
+
+

步骤:

+
    +
  1. import module and Initialize the camera

    + +
    from maix import video, image, camera, app, time
    +cam = camera.Camera(640, 480, image.Format.FMT_YVU420SP)
    +
    +
      +
    • camera.Camera() is used to initialise the camera, here the camera resolution is initialised to 640x480, currently the Encoder only supports the NV21 format, so set the image format to image.Format.FMT_YVU420SP.
    • +
    +
  2. +
  3. Initialise the Encoder module

    + +
    e = video.Encoder()
    +
    +
      +
    • The video.Encoder() module currently only supports processing image.Format.FMT_YVU420SP format images, which supports h265 and h264 encoding, and defaults to h265 encoding. If you want to use h264 encoding, then you can change the initialisation parameter to video.Encoder(type=video.VideoType.VIDEO_H264_CBR).
    • +
    • Note that only one encoder can exist at the same time
    • +
    +
  4. +
  5. Encoding the camera image

    + +
    img = cam.read()
    +frame = e.encode(img)
    +
    +
      +
    • img = cam.read() read camera image and save to img
    • +
    • frame = e.encode(img) encode img and save result to frame
    • +
    +
  6. +
  7. Save the encoded result to file

    + +
    f = open('/root/output.h265', 'wb')
    +f.write(frame.to_bytes(False))
    +
    +
      +
    • f = open(xxx) opens and creates a file
    • +
    • f.write(frame.to_bytes(False)) converts the encoding result frame to type bytes and then calls f.write() to write the data to the file
    • +
    +
  8. +
  9. Timed 2s exit

    + +
    record_ms = 2000
    +start_ms = time.ticks_ms()
    +while not app.need_exit():
    +    if time.ticks_ms() - start_ms > record_ms:
    +    app.set_exit_flag(True)
    +
    +
      +
    • Here is the application logic for the timed exit, see the code for yourself
    • +
    +
  10. +
  11. Done

    +
  12. +
+

Example 2

+

An example of recording a video in h265 format.

+ +
from maix import video, time, image, camera, app
+
+cam = camera.Camera(640, 480, image.Format.FMT_YVU420SP)
+e = video.Encoder(capture = True)
+e.bind_camera(cam)
+
+f = open('/root/output.h265', 'wb')
+
+record_ms = 2000
+start_ms = time.ticks_ms()
+while not app.need_exit():
+    frame = e.encode()
+    img = e.capture()
+
+    print(frame.size())
+    f.write(frame.to_bytes(True))
+
+    if time.ticks_ms() - start_ms > record_ms:
+        app.set_exit_flag(True)
+
+

Similar to example 1, the difference is that the Encoder object's bind_camera method is called, and the Encoder takes the initiative to get the camera image, which has the advantage of using the hardware features to increase the encoding speed.

+ +
e = video.Encoder(capture = True)
+e.bind_camera(cam)
+frame = e.encode()
+img = e.capture()
+
+
    +
  • e = video.Encoder(capture = True) enables the capture parameter to allow encoding to capture encoded images when encoding
  • +
  • e.bind_camera(cam) binds the camera to the Encoder object
  • +
  • frame = e.encode() Instead of passing in img when encoding, fetch the image from the camera internally
  • +
  • img = e.capture() captures the encoded image from the Encoder object, which can be used for image processing
  • +
+

Convert to MP4 format

+

If you want to record video in mp4 format, you can record H265 video first, and then use the ffmpeg tool in the system to convert to mp4 format.

+ +
import os
+
+# Pack h265 to mp4
+# /root/output.h265 is the h265 file path
+# /root/output.mp4 is the mp4 file path
+os.system('ffmpeg -loglevel quiet -i /root/output.h265 -c:v copy -c:a copy /root/output.mp4 -y')
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/video/rtmp_streaming.html b/maixpy/doc/en/video/rtmp_streaming.html new file mode 100644 index 00000000..20a1e311 --- /dev/null +++ b/maixpy/doc/en/video/rtmp_streaming.html @@ -0,0 +1,517 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Video Streaming RTMP Push Streaming - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Video Streaming RTMP Push Streaming

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-05-311.0.0lxowalle + + initial document + +
+
+
+ +
+
+ +

Introduction

+

This document provides methods for pushing H264 video streams via RTMP

+

How to use

+

The following example shows pushing an h264 video stream to rtmp://192.168.0.30:1935/live/stream

+ +
from maix import camera, time, rtmp, image
+
+cam = camera.Camera(640, 480, image.Format.FMT_YVU420SP)
+
+# rtmp://192.168.0.30:1935/live/stream
+host = '192.168.0.30'
+port = 1935
+app = 'live'
+stream = 'stream'
+bitrate = 1000_000
+r = rtmp.Rtmp(host, port, app, stream, bitrate)
+r.bind_camera(cam)
+r.start()
+
+while True:
+    time.sleep(1)
+
+

Steps:

+
    +
  1. Import the camera、rtmp、time and image modules:

    + +
    from maix import camera, time, rtmp, image
    +
    +
  2. +
  3. Initialize the camera:

    + +
    cam = camera.Camera(640, 480, image.Format.FMT_YVU420SP) # Initialise camera, output resolution 640x480 NV21 format
    +
    +
      +
    • Note that the RTMP module currently only supports the NV21 format, so the camera needs to be configured to output in NV21 format.
    • +
    +
  4. +
  5. Initialise and start the Rtmp object

    + +
    r = rtmp.Rtmp(host, port, app, stream, bitrate)
    +r.bind_camera(cam)
    +r.start()
    +
    +
      +
    • r = rtmp.Rtmp(host, port, app, stream, bitrate) is used to create an Rtmp object, where host refers to the ip address or domain of the rtmp server, app refers to the name of the application that is open to the rtmp server, and stream refers to the name of the rtmp stream, which can also be used as the key for pushing the stream
    • +
    • r.bind_camera(cam) is used to bind a Camera object, the original Camera object can not be used after binding.
    • +
    • r.start() is used to start the rtmp stream.
    • +
    +
  6. +
  7. Done

    +
  8. +
+

Push streaming test to Bilibili

+

Launch bilibili live stream

+
    +
  1. Click on Live Streaming

    +

    +
  2. +
  3. Click on Live Streaming Settings

    +
  4. +
+

+
    +
  1. Find the live streaming address
  2. +
+

+
    +
  1. Scroll down, select a category, and click Start Live!
  2. +
+

+
    +
  1. Get the push stream address
  2. +
+

+
    +
  • server address: rtmp://live-push.bilivideo.com/live-bvc
  • +
  • key:?streamname=live_xxxx&key=1fbfxxxxxxxxxxxxxffe0&schedule=rtmp&pflag=1
  • +
+

Push stream address: rtmp://live-push.bilivideo.com/live-bvc/?streamname=live_xxxx&key=1fbfxxxxxxxxxxxxxffe0&schedule=rtmp&pflag=1

+

Run the RTMP client

+ +
from maix import camera, time, rtmp, image
+
+cam = camera.Camera(640, 480, image.Format.FMT_YVU420SP)
+
+# rtmp://live-push.bilivideo.com/live-bvc/?streamname=live_xxxx&key=1fbfxxxxxxxxxxxxxffe0&schedule=rtmp&pflag=1
+host = 'live-push.bilivideo.com'
+port = 1935
+app = 'live-bvc'
+stream = '?streamname=live_xxxx&key=1fbfxxxxxxxxxxxxxffe0&schedule=rtmp&pflag=1'
+bitrate = 1000_000
+r = rtmp.Rtmp(host, port, app, stream, bitrate)
+r.bind_camera(cam)
+r.start()
+
+while True:
+    time.sleep(1)
+
+

Above get bilibili's push stream address as rtmp://live-push.bilivideo.com/live-bvc/?streamname=live_xxxx&key=1fbfxxxxxxxxxxxxxffe0&schedule=rtmp&pflag=1

+

Can be detached:

+
    +
  1. server address is live-push.bilivideo.com
  2. +
  3. port is 1935, if there is no port number, the default is 1935
  4. +
  5. application name is live-bvc
  6. +
  7. stream name is ?streamname=live_xxxx&key=1fbfxxxxxxxxxxxxxffe0&schedule=rtmp&pflag=1
  8. +
+

Run the code and you will be able to see the maixcam screen in the live stream, if you find that the live stream is not displayed, try to close the live stream first, then reopen it and run the code again.

+

Try it~!

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/video/rtsp_streaming.html b/maixpy/doc/en/video/rtsp_streaming.html new file mode 100644 index 00000000..8b76eb64 --- /dev/null +++ b/maixpy/doc/en/video/rtsp_streaming.html @@ -0,0 +1,464 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Video Streaming RTSP Push Streaming - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Video Streaming RTSP Push Streaming

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-05-201.0.0lxowalle + + Initial documentation + +
+
+
+ +
+
+ +

Introduction

+

This document provides methods for pushing streaming camera image via RTSP

+

How to use

+ +
from maix import time, rtsp, camera, image
+
+server = rtsp.Rtsp()
+cam = camera.Camera(2560, 1440, image.Format.FMT_YVU420SP)
+server.bind_camera(cam)
+server.start()
+
+print(server.get_url())
+
+while True:
+    time.sleep(1)
+
+

Steps:

+
    +
  1. Import the image、camera、image and rtsp modules:

    + +
    from maix import time, rtsp, camera, image
    +
    +
  2. +
  3. Initialize the camera:

    + +
    cam = camera.Camera(2560, 1440, image.Format.FMT_YVU420SP) # Initialise camera, output resolution 2560x1440 NV21 format
    +
    +
      +
    • Note that the RTSP module currently only supports the NV21 format, so the camera needs to be configured to output in NV21 format.
    • +
    +
  4. +
  5. Initialise and start the Rtsp object

    + +
    server = rtsp.Rtsp()
    +server.bind_camera(cam)
    +server.start()
    +
    +
      +
    • server = rtsp.Rtsp() used to create an Rtsp object
    • +
    • server.bind_camera(cam) is used to bind a Camera object, after which the original Camera object can no longer be used.
    • +
    • server.start() is used to start the rtsp push stream.
    • +
    +
  6. +
  7. Print the URL of the current RTSP stream

    +

    python print(server.get_url())

    +
      +
    • server.get_url() is used to get the playback address of RTSP.
    • +
    +
  8. +
  9. Finished, after running the above code, you can play the video stream through VLC software, the tested version of VLC is 3.0.20. The default playback address is rtsp://device ip:8554/live.

    +
  10. +
+

OSD

+

Drawing lines and frames via OSD

+

TODO

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/ai.html b/maixpy/doc/en/vision/ai.html new file mode 100644 index 00000000..b39922bf --- /dev/null +++ b/maixpy/doc/en/vision/ai.html @@ -0,0 +1,417 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Basic Knowledge of AI Vision - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Basic Knowledge of AI Vision

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-04-031.0.0neucrack + + Initial documentation + +
+
+
+ +
+
+ +

Introduction

+

If you don't have an AI background, you can first read What is Artificial Intelligence (AI) and Machine Learning to understand the basic concepts of AI before learning about AI.

+

Then, the visual AI we use is generally based on the deep neural network learning method. If you are interested, you can check out Deep Neural Network (DNN) Basics.

+

Using Visual AI in MaixPy

+

Using visual AI in MaixPy is very simple. By default, commonly used AI models are provided, and you can use them directly without having to train the models yourself. You can find the maixcam models in the MaixHub Model Library.

+

Additionally, the underlying APIs have been well-encapsulated, and you only need to make simple calls to implement them.

+

If you want to train your own model, you can start with MaixHub Online Training. On the online platform, you can train models just by clicking, without the need to purchase expensive machines, set up complex development environments, or write code, making it very suitable for beginners and also for experienced users who are too lazy to read code.

+

Generally, once you have obtained the model file, you can transfer it to the device and call the MaixPy API to use it. The specific calling methods are discussed in the following sections.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/apriltag.html b/maixpy/doc/en/vision/apriltag.html new file mode 100644 index 00000000..17aaf5d1 --- /dev/null +++ b/maixpy/doc/en/vision/apriltag.html @@ -0,0 +1,531 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Apriltag Recognition - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Apriltag Recognition

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-04-031.0.0lxowalle + + Initial documentation + +
+
+
+ +
+
+ +

Before reading this article, make sure you are familiar with how to develop with MaixCAM. For more details, please read Quick Start.

+

Introduction

+

This article introduces how to use MaixPy to recognize Apriltag labels.

+

Using MaixPy to Recognize Apriltag Labels

+

MaixPy's maix.image.Image provides the find_apriltags method, which can be used to recognize Apriltag labels.

+

How to Recognize Apriltag Labels

+

A simple example of recognizing Apriltag labels and drawing bounding boxes:

+ +
from maix import image, camera, display
+
+cam = camera.Camera()
+disp = display.Display()
+
+families = image.ApriltagFamilies.TAG36H11
+x_scale = cam.width() / 160
+y_scale = cam.height() / 120
+
+while 1:
+    img = cam.read()
+
+    new_img = img.resize(160, 120)
+    apriltags = new_img.find_apriltags(families = families)
+    for a in apriltags:
+        corners = a.corners()
+
+        for i in range(4):
+            corners[i][0] = int(corners[i][0] * x_scale)
+            corners[i][1] = int(corners[i][1] * y_scale)
+        x = int(a.x() * x_scale)
+        y = int(a.y() * y_scale)
+        w = int(a.w() * x_scale)
+        h = int(a.h() * y_scale)
+
+        for i in range(4):
+            img.draw_line(corners[i][0], corners[i][1], corners[(i + 1) % 4][0], corners[(i + 1) % 4][1], image.COLOR_RED)
+        img.draw_string(x + w, y, "id: " + str(a.id()), image.COLOR_RED)
+        img.draw_string(x + w, y + 15, "family: " + str(a.family()), image.COLOR_RED)
+
+    disp.show(img)
+
+

Steps:

+
    +
  1. Import the image, camera, and display modules

    + +
    from maix import image, camera, display
    +
    +
  2. +
  3. Initialize the camera and display

    + +
    cam = camera.Camera()
    +disp = display.Display()
    +
    +
  4. +
  5. Get the image from the camera and display it

    + +
    while 1:
    +    img = cam.read()
    +    disp.show(img)
    +
    +
  6. +
  7. Call the find_apriltags method to recognize Apriltag labels in the camera image

    + +
    new_img = img.resize(160, 120)
    +apriltags = new_img.find_apriltags(families = families)
    +
    +
      +
    • img is the camera image obtained through cam.read()
    • +
    • img.resize(160, 120) is used to scale down the image to a smaller size, allowing the algorithm to compute faster with a smaller image
    • +
    • new_img.find_apriltags(families = families) is used to find Apriltag labels, and the query results are saved in apriltags for further processing. The families parameter is used to select the Apriltag family, defaulting to image.ApriltagFamilies.TAG36H11
    • +
    +
  8. +
  9. Process the recognized label results and display them on the screen

    + +
    for a in apriltags:
    +    # Get position information (and map coordinates to the original image)
    +    x = int(a.x() * x_scale)
    +    y = int(a.y() * y_scale)
    +    w = int(a.w() * x_scale)
    +    corners = a.corners()
    +    for i in range(4):
    +        corners[i][0] = int(corners[i][0] * x_scale)
    +        corners[i][1] = int(corners[i][1] * y_scale)
    +
    +    # Display
    +    for i in range(4):
    +        img.draw_line(corners[i][0], corners[i][1], corners[(i + 1) % 4][0], corners[(i + 1) % 4][1], image.COLOR_RED)
    +        img.draw_string(x + w, y, "id: " + str(a.id()), image.COLOR_RED)
    +        img.draw_string(x + w, y + 15, "family: " + str(a.family()), image.COLOR_RED)
    +        img.draw_string(x + w, y + 30, "rotation : " + str(180 * a.rotation() // 3.1415), image.COLOR_RED)
    +
    +
      +
    • Iterate through the members of apriltags, which is the result of scanning Apriltag labels through img.find_apriltags(). If no labels are found, the members of apriltags will be empty.
    • +
    • x_scale and y_scale are used to map coordinates. Since new_img is a scaled-down image, the coordinates of the Apriltag need to be mapped to be drawn correctly on the original image img.
    • +
    • a.corners() is used to get the coordinates of the four vertices of the detected label, and img.draw_line() uses these four vertex coordinates to draw the shape of the label.
    • +
    • img.draw_string is used to display the label content, where a.x() and a.y() are used to get the x and y coordinates of the top-left corner of the label, a.id() is used to get the label ID, a.family() is used to get the label family type, and a.rotation() is used to get the rotation angle of the label.
    • +
    +
  10. +
+

Common Parameter Explanations

+

Here are explanations for common parameters. If you can't find parameters to implement your application, you may need to consider using other algorithms or extending the required functionality based on the current algorithm's results.

+ + + + + + + + + + + + + + + + + + + + +
ParameterDescriptionExample
roiSet the rectangular region for the algorithm to compute. roi=[x, y, w, h], where x and y represent the coordinates of the top-left corner of the rectangle, and w and h represent the width and height of the rectangle. The default is the entire image.Compute the region with coordinates (50,50) and a width and height of 100:
img.find_apriltags(roi=[50, 50, 100, 100])
familiesApriltag label family typeScan for labels from the TAG36H11 family:
img.find_apriltags(families = image.ApriltagFamilies.TAG36H11)
+

This article introduces common methods. For more API information, please refer to the image section of the API documentation.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/body_key_points.html b/maixpy/doc/en/vision/body_key_points.html new file mode 100644 index 00000000..47681fa7 --- /dev/null +++ b/maixpy/doc/en/vision/body_key_points.html @@ -0,0 +1,429 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Human Pose Keypoint Detection - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Human Pose Keypoint Detection

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction

+

Using MaixPy, you can easily detect the coordinates of keypoints on human joints, which can be used for posture detection, such as monitoring sitting posture or providing input for motion-based games.

+

MaixPy implements human pose detection based on YOLOv8-Pose / YOLO11-Pose, capable of detecting 17 keypoints on the human body.

+

+

Usage

+

You can easily implement this using the maix.nn.YOLOv8 or maix.nn.YOLO11 classes in MaixPy:

+ +
from maix import camera, display, image, nn, app
+
+detector = nn.YOLOv8(model="/root/models/yolov8n_pose.mud", dual_buff=True)
+# detector = nn.YOLO11(model="/root/models/yolo11n_pose.mud", dual_buff=True)
+
+cam = camera.Camera(detector.input_width(), detector.input_height(), detector.input_format())
+dis = display.Display()
+
+while not app.need_exit():
+    img = cam.read()
+    objs = detector.detect(img, conf_th=0.5, iou_th=0.45, keypoint_th=0.5)
+    for obj in objs:
+        img.draw_rect(obj.x, obj.y, obj.w, obj.h, color=image.COLOR_RED)
+        msg = f'{detector.labels[obj.class_id]}: {obj.score:.2f}'
+        img.draw_string(obj.x, obj.y, msg, color=image.COLOR_RED)
+        detector.draw_pose(img, obj.points, 8 if detector.input_width() > 480 else 4, image.COLOR_RED)
+    dis.show(img)
+
+

You can also find the code in the MaixPy/examples/vision directory.

+

Since YOLOv8-Pose is used here, the YOLOv8 class is also used, with the only difference being the model file compared to YOLOv8 object detection. The same applies to YOLO11. The detect function returns an additional points value, which is a list of int containing 17 keypoints. The points are arranged in order; for example, the first value is the x-coordinate of the nose, the second value is the y-coordinate of the nose, and so on:

+ +
1. Nose
+2. Left Eye
+3. Right Eye
+4. Left Ear
+5. Right Ear
+6. Left Shoulder
+7. Right Shoulder
+8. Left Elbow
+9. Right Elbow
+10. Left Wrist
+11. Right Wrist
+12. Left Hip
+13. Right Hip
+14. Left Knee
+15. Right Knee
+16. Left Ankle
+17. Right Ankle
+
+

If any of these parts are occluded, the value will be -1.

+

Models with More Resolutions

+

The default model input resolution is 320x224. If you want to use models with higher resolution, you can download and transfer them from the MaixHub model library:

+ +

Higher resolution generally provides better accuracy but at the cost of lower processing speed. Choose the model based on your application needs. If the provided resolution does not meet your requirements, you can train your own model using the source code from YOLOv8-Pose / YOLO11-Pose and export your own ONNX model, then convert it to a format supported by MaixCAM (methods are covered in later articles).

+

dual_buff for Double Buffering Acceleration

+

You may notice that dual_buff is used for model initialization (default value is True). Enabling the dual_buff parameter can improve efficiency and increase the frame rate. For more details and considerations, refer to the dual_buff Introduction.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/camera.html b/maixpy/doc/en/vision/camera.html new file mode 100644 index 00000000..f57d4f4e --- /dev/null +++ b/maixpy/doc/en/vision/camera.html @@ -0,0 +1,560 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Camera Usage - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Camera Usage

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-04-031.0.0neucrack + + Initial documentation + +
+
+
+ +
+
+ +

Introduction

+

For the MaixCAM, it comes with a pre-installed GC4653 camera, or an optional OS04A10 camera or global shutter camera, and even an HDMI to MIPI module, all of which can be directly used with simple API calls.

+

API Documentation

+

This article introduces common methods. For more API usage, refer to the documentation of the maix.camera module.

+

Camera Switching

+

Currently supported cameras:

+
    +
  • GC4653: M12 universal lens, 1/3" sensor, clear image quality, 4MP.
  • +
  • OS04A10: M12 universal lens, 1/1.8" large sensor, ultra-clear image quality, 4MP.
  • +
  • OV2685: Does not support lens replacement, lowest image quality, and lowest cost; generally not recommended for use.
  • +
  • SC035HGS: Monochrome global shutter camera, 0.3MP black-and-white, suitable for capturing high-speed objects.
  • +
+

The system will automatically switch; simply replace the hardware to use.

+

Getting Images from the Camera

+

Using MaixPy to easily get images:

+ +
from maix import camera
+
+cam = camera.Camera(640, 480)
+
+while 1:
+    img = cam.read()
+    print(img)
+
+

Here we import the camera module from the maix module, then create a Camera object, specifying the width and height of the image. Then, in a loop, we continuously read the images. The default output is in RGB format. If you need BGR format or other formats, please refer to the API documentation.

+ +
from maix import camera, image
+cam = camera.Camera(640, 480, image.Format.FMT_GRAYSCALE) # Set the output greyscale image
+
+

Also get the NV21 image

+ +
from maix import camera, image
+cam = camera.Camera(640, 480, image.Format.FMT_YVU420SP) # set to output NV21 image
+
+

Note: You need to disable MaixVision's online browsing function if you set a very high resolution (e.g. 2560x1440), otherwise the code may run abnormally due to lack of memory.
+You can also get greyscale images

+

Setting the frame rate of the camera

+

Currently the camera supports 30fps, 60fps and 80fps configurations, the frame rate is selected by the width, height, fps parameters passed when creating the Camera object, currently the maximum supported resolution is 1280x720 under 60/80fps, and the maximum supported resolution is 2560x1440 under 30fps.

+

Setting the frame rate to 30 fps

+ +
from maix import camera
+cam = camera.Camera(640, 480, fps=30) # set the frame rate to 30 fps
+# or
+cam = camera.Camera(1920, 1280) # Frame rate is set to 30 fps when resolution is higher than 1280x720
+
+

Set the frame rate to 60 fps

+ +
from maix import camera
+cam = camera.Camera(640, 480, fps=60) # Set frame rate to 60 fps
+# or
+cam = camera.Camera(640, 480) # Set frame rate to 60fps if resolution is less than or equal to 1280x720
+
+

Set the frame rate to 80 fps

+ +
from maix import camera
+cam = camera.Camera(640, 480, fps=80) # Set frame rate to 60 fps
+
+

Notes:

+
    +
  1. if Camera is passed in a size larger than 1280x720, for example written as camera.Camera(1920, 1080, fps=60), then the fps parameter will be invalidated, and the frame rate will remain at 30fps.
  2. +
  3. A 60/80fps frame will be offset by a few pixels compared to a 30fps frame, and the offset will need to be corrected if the viewing angle is critical.
  4. +
  5. Note that due to the fact that 60/80fps and 30fps share the same isp configuration, in some environments there will be some deviation in the quality of the screen at the two frame rates.
  6. +
  7. The camera's performance depends on the system. Some systems may not support setting the camera to 80fps, which can result in strange patterns appearing on the screen. In such cases, please switch back to the normal 60fps setting.
  8. +
+

Image correction

+

In case of distortion such as fisheye, you can use the lens_corr function under the Image object to correct the distortion of the image. In general, you just need to increase or decrease the value of strength to adjust the image to the right effect.

+

``python
+from maix import camera, display

+

cam = camera.Camera(320, 240)
+disp = display.Display()
+while not app.need_exit():: t = time.
+ t = time.ticks_ms()
+ img = cam.read()
+ img = img.lens_corr(strength=1.5) # Adjust the strength value until the image is no longer distorted.
+ disp = display.Display()
+``

+

Note that since the correction is done through software, it takes some time. Alternatively, you can use a distortion-free lens (inquire with the vendor) to solve the issue from a hardware perspective.

+

Skipping Initial Frames

+

During the brief initialization period of the camera, the image acquisition may not be stable, resulting in strange images. You can use the skip_frames function to skip the initial few frames:

+ +
cam = camera.Camera(640, 480)
+cam.skip_frames(30)           # Skip the first 30 frames
+
+

Displaying Images

+

MaixPy provides the display module, which can conveniently display images:

+ +
from maix import camera, display
+
+cam = camera.Camera(640, 480)
+disp = display.Display()
+
+while 1:
+    img = cam.read()
+    disp.show(img)
+
+

Setting the camera parameters

+

Set exposure time

+

Note that after setting the exposure time, the camera will switch to manual exposure mode, if you want to switch back to automatic exposure mode you need to run cam.exp_mode(0).

+ +
cam = camera.Camera()
+cam.exposure(1000)
+
+

Setting the gain

+

Note that after setting the gain, the camera will switch to manual exposure mode, to switch back to auto exposure mode you need to run cam.exp_mode(0). Customised gain values will only work in manual exposure mode.

+ +
cam = camera.Camera()
+cam.gain(100)
+
+

Setting the white balance

+ +
cam = camera.Camera()
+cam.awb_mode(1)     # 0,turn on white balance;1,turn off white balance
+
+

Setting brightness, contrast and saturation

+ +
cam = camera.Camera()
+cam.luma(50) # Set brightness, range [0, 100]
+cam.constrast(50) # set contrast, range [0, 100]
+cam.saturation(50) # Set the saturation, range [0, 100].
+
+

Using a USB Camera

+

In addition to using the MIPI interface camera that comes with the development board, you can also use an external USB camera.
+Method:

+
    +
  • First, in the development board settings, select USB Mode under USB Settings and set it to HOST mode. If there is no screen available, you can use the examples/tools/maixcam_switch_usb_mode.py script to set it.
  • +
  • Currently (as of 2024.10.24), the maix.camera module does not yet support USB cameras, but you can use OpenCV for this.
  • +
+ +
from maix import image, display
+import cv2
+import sys
+
+cap = cv2.VideoCapture(0)
+cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640)
+cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 480)
+# cap.set(cv2.CAP_PROP_CONVERT_RGB, 0)
+
+disp = display.Display()
+
+if not cap.isOpened():
+    print("Unable to open camera")
+    sys.exit(1)
+print("Starting to read")
+while True:
+    ret, frame = cap.read()
+    if not ret:
+        print("Unable to read frame")
+        return
+    img = image.cv2image(frame, bgr=True, copy=False)
+    disp.show(img)
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/classify.html b/maixpy/doc/en/vision/classify.html new file mode 100644 index 00000000..447af719 --- /dev/null +++ b/maixpy/doc/en/vision/classify.html @@ -0,0 +1,406 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Using AI Models for Object Classification in MaixCAM MaixPy - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Using AI Models for Object Classification in MaixCAM MaixPy

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Object Classification Concept

+

For example, if there are two images in front of you, one with an apple and the other with an airplane, the task of object classification is to input these two images into an AI model one by one. The model will then output two results, one for apple and one for airplane.

+

Using Object Classification in MaixPy

+

MaixPy provides a pre-trained 1000 classification model based on the imagenet dataset, which can be used directly:

+ +
from maix import camera, display, image, nn
+
+classifier = nn.Classifier(model="/root/models/mobilenetv2.mud", dual_buff = True)
+cam = camera.Camera(classifier.input_width(), classifier.input_height(), classifier.input_format())
+dis = display.Display()
+
+while 1:
+    img = cam.read()
+    res = classifier.classify(img)
+    max_idx, max_prob = res[0]
+    msg = f"{max_prob:5.2f}: {classifier.labels[max_idx]}"
+    img.draw_string(10, 10, msg, image.COLOR_RED)
+    dis.show(img)
+
+

Result video:

+ +

Here, the camera captures an image, which is then passed to the classifier for recognition. The result is displayed on the screen.

+

For more API usage, refer to the documentation for the maix.nn module.

+

dual_buff Dual Buffer Acceleration

+

You may have noticed that the model initialization uses dual_buff (which defaults to True). Enabling the dual_buff parameter can improve running efficiency and increase the frame rate. For detailed principles and usage notes, see dual_buff Introduction.

+

Training Your Own Classification Model on MaixHub

+

If you want to train a classification model for specific images, visit MaixHub to learn and train the model. When creating a project, select "Classification Model", then simply upload your images to train. There's no need to set up a training environment or spend money on expensive GPUs—training can be done quickly with one click.

+

Offline Training for Your Own Classification Model

+

For offline training, you need to set up your environment. Search for keywords such as PyTorch classification model training or Mobilenet for guidance.
+After training the model, export it in ONNX format, then refer to the MaixCAM Model Conversion Documentation to convert it into a model format supported by MaixCAM. Finally, use the nn.Classifier class mentioned above to load the model.

+

The classification model can be Mobilenet or another model like ResNet. During model conversion, it's best to extract the layer just before softmax as the final output layer because the classifier.classify(img, softmax=True) function has softmax enabled by default—this means the function will perform a softmax calculation on the results. Therefore, the model itself doesn't need a softmax layer. However, if the model does include a softmax layer, you can specify not to execute it again by using: classifier.classify(img, softmax=False).

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/custmize_model.html b/maixpy/doc/en/vision/custmize_model.html new file mode 100644 index 00000000..d4e6d5d3 --- /dev/null +++ b/maixpy/doc/en/vision/custmize_model.html @@ -0,0 +1,368 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Please refer to MaixCAM Model Conversion, and find the model documentation you need to convert in the left directory, such as Custom YOLOv5 Model.

+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/customize_model_yolov5.html b/maixpy/doc/en/vision/customize_model_yolov5.html new file mode 100644 index 00000000..4c147b97 --- /dev/null +++ b/maixpy/doc/en/vision/customize_model_yolov5.html @@ -0,0 +1,464 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Offline Training of YOLOv5 Model for Custom Object Detection with MaixCAM MaixPy - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Offline Training of YOLOv5 Model for Custom Object Detection with MaixCAM MaixPy

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-6-20v1.0neucrack + + Documentation written + +
+
+
+ +
+
+ +

Introduction

+

The default official model provides detection for 80 types of objects. If this does not meet your needs, you can train your own detection objects using two methods:

+
    +
  • Use MaixHub Online Training, which is convenient and fast, without needing to buy a server or set up an environment, just a few clicks of the mouse.
  • +
  • Set up a training environment on your own computer or server.
  • +
+

The former is simple and quick, while the latter uses your own computer and the number of training images is not limited, but the latter is much more difficult.

+

Note: This article explains how to customize training, but some basic knowledge is assumed. If you do not have this knowledge, please learn it yourself:

+
    +
  • This article will not explain how to install the training environment. Please search and install it yourself (Pytorch environment installation) and test it.
  • +
  • This article will not explain the basic concepts of machine learning or basic Linux usage knowledge.
  • +
+

If you think there is something in this article that needs improvement, feel free to click Edit this article in the upper right corner to contribute and submit a documentation PR.

+

Process and Goals of this Article

+

To use our model on MaixPy (MaixCAM), the following process is required:

+
    +
  • Set up the training environment (this is not covered in this article, please search for Pytorch training environment setup).
  • +
  • Pull the yolov5 source code to your local machine.
  • +
  • Prepare the dataset and format it as required by the yolov5 project.
  • +
  • Train the model to get an onnx model file, which is the final output file of this article.
  • +
  • Convert the onnx model to a MUD file supported by MaixPy, which is detailed in MaixCAM Model Conversion.
  • +
  • Use MaixPy to load and run the model.
  • +
+

Reference Articles

+

Since this is a relatively common operational process, this article only provides an overview. For specific details, you can refer to the YOLOv5 official code and documentation (recommended), and search for training tutorials to ultimately export the onnx file.

+

Here are some articles from the MaixHub community:

+ +

If you find any good articles, feel free to modify this article and submit a PR.

+

Exporting YOLOv5 ONNX Model File

+

YOLOv5 provides an export option. Execute the following command in the yolov5 directory:

+ +
python export.py --weights ../yolov5s.pt --include onnx --img 224 320
+
+

This command loads the pt parameter file and converts it to onnx, while also specifying the resolution. Note that the height comes first, followed by the width. The model was trained with 640x640, but we re-specified the resolution to improve the running speed. The resolution 320x224 is used because it is closer to the MaixCAM screen ratio for better display. You can set it according to your needs.

+

MaixCAM MUD File

+

When converting onnx to mud format model files, refer to MaixCAM Model Conversion. You will eventually get a mud file and a cvimodel file. The content of the mud file is:

+ +
[basic]
+type = cvimodel
+model = yolov8n.cvimodel
+
+[extra]
+model_type = yolov8
+input_type = rgb
+mean = 0, 0, 0
+scale = 0.00392156862745098, 0.00392156862745098, 0.00392156862745098
+labels = person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign, parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket, bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut, cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair drier, toothbrush
+
+

Replace the parameters according to the content of your training. For example, if you train to detect digits 0-9, then just replace labels=0,1,2,3,4,5,6,7,8,9, and then place the two files in the same directory and load the mud file when running the model.

+

Upload share on MaixHub

+

Share your model on MaixHub model zoo 上传并分享你的模型,可以多提供几个分辨率供大家选择。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/customize_model_yolov8.html b/maixpy/doc/en/vision/customize_model_yolov8.html new file mode 100644 index 00000000..6a089214 --- /dev/null +++ b/maixpy/doc/en/vision/customize_model_yolov8.html @@ -0,0 +1,531 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Offline Training for YOLO11/YOLOv8 Models on MaixCAM MaixPy to Customize Object and Keypoint Detection - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Offline Training for YOLO11/YOLOv8 Models on MaixCAM MaixPy to Customize Object and Keypoint Detection

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-10-10v2.0neucrack + + Added YOLO11 support + +
2024-06-21v1.0neucrack + + Document creation + +
+
+
+ +
+
+ +

Introduction

+

The default official model provides detection for 80 different objects. If this doesn't meet your needs, you can train your own model to detect custom objects, which can be done on your own computer or server by setting up a training environment.

+

YOLOv8 / YOLO11 not only supports object detection but also supports keypoint detection with YOLOv8-pose / YOLO11-pose. Apart from the official human keypoints, you can also create your own keypoint dataset to train models for detecting specific objects and keypoints.

+

Since YOLOv8 and YOLO11 mainly modify the internal network while the preprocessing and post-processing remain the same, the training and conversion steps for YOLOv8 and YOLO11 are identical, except for the output node names.

+

Note: This article explains how to train a custom model but assumes some basic knowledge. If you do not have this background, please learn it independently:

+
    +
  • This article will not cover how to set up the training environment; please search for how to install and test a PyTorch environment.
  • +
  • This article will not cover basic machine learning concepts or Linux-related knowledge.
  • +
+

If you think there are parts of this article that need improvement, please click on Edit this article at the top right and submit a PR to contribute to the documentation.

+

Process and Article Goal

+

To ensure our model can be used on MaixPy (MaixCAM), it must go through the following steps:

+
    +
  • Set up the training environment (not covered in this article, please search for how to set up a PyTorch training environment).
  • +
  • Clone the YOLO11/YOLOv8 source code locally.
  • +
  • Prepare the dataset and format it according to the YOLO11 / YOLOv8 project requirements.
  • +
  • Train the model to obtain an onnx model file, which is the final output of this article.
  • +
  • Convert the onnx model into a MUD file supported by MaixPy, as described in the MaixCAM Model Conversion article.
  • +
  • Use MaixPy to load and run the model.
  • +
+

Reference Articles

+

Since this process is quite general, this article only provides an overview. For specific details, please refer to the YOLO11 / YOLOv8 official code and documentation (recommended) and search for training tutorials to eventually export an ONNX file.

+

If you come across good articles, feel free to edit this one and submit a PR.

+

Exporting YOLO11 / YOLOv8 ONNX Models

+

Create an export_onnx.py file in the ultralytics directory:

+ +
from ultralytics import YOLO
+import sys
+
+print(sys.path)
+net_name = sys.argv[1] # yolov8n.pt yolov8n-pose.pt # https://docs.ultralytics.com/models/yolov8/#supported-tasks-and-modes
+input_width = int(sys.argv[2])
+input_height = int(sys.argv[3])
+
+# Load a model
+model = YOLO(net_name)  # load an official model
+# model = YOLO("path/to/best.pt")  # load a custom model
+
+# Predict with the model
+results = model("https://ultralytics.com/images/bus.jpg")  # predict on an image
+path = model.export(format="onnx", imgsz=[input_height, input_width])  # export the model to ONNX format
+print(path)
+
+

Then run python export_onnx.py yolov8n.pt 320 224 to export the onnx model. Here, we have redefined the input resolution. The model was originally trained with 640x640, but we use 320x224 to improve the processing speed and match the MaixCAM's screen aspect ratio for convenient display. You can set the resolution according to your own needs.

+

Converting to a Model Supported by MaixCAM and MUD File

+

MaixPy/MaixCDK currently supports YOLOv8 / YOLO11 for object detection, YOLOv8-pose / YOLO11-pose for keypoint detection, and YOLOv8-seg / YOLO11-seg for segmentation (as of 2024-10-10).

+

Follow MaixCAM Model Conversion to convert the model.

+

Pay attention to the model output node selection:

+
    +
  • Object detection:
      +
    • YOLOv8 extracts /model.22/dfl/conv/Conv_output_0,/model.22/Sigmoid_output_0 from ONNX as outputs.
    • +
    • YOLO11 extracts /model.23/dfl/conv/Conv_output_0,/model.23/Sigmoid_output_0.
    • +
    +
  • +
  • Keypoint detection:
      +
    • YOLOv8-pose extracts /model.22/dfl/conv/Conv_output_0,/model.22/Sigmoid_output_0,/model.22/Concat_output_0 as outputs.
    • +
    • YOLO11-pose extracts /model.23/dfl/conv/Conv_output_0,/model.23/Sigmoid_output_0,/model.23/Concat_output_0.
    • +
    +
  • +
  • Image segmentation:
      +
    • YOLOv8-seg extracts /model.22/dfl/conv/Conv_output_0,/model.22/Sigmoid_output_0,/model.22/Concat_output_0,output1.
    • +
    • YOLO11-seg extracts /model.23/dfl/conv/Conv_output_0,/model.23/Sigmoid_output_0,/model.23/Concat_output_0,output1.
    • +
    +
  • +
+

+

For object detection, the MUD file would be as follows (replace yolo11 for YOLO11):

+ +
[basic]
+type = cvimodel
+model = yolov8n.cvimodel
+
+[extra]
+model_type = yolov8
+input_type = rgb
+mean = 0, 0, 0
+scale = 0.00392156862745098, 0.00392156862745098, 0.00392156862745098
+labels = person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign, parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket, bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut, cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair dryer, toothbrush
+
+

Replace labels according to the objects you trained.

+

For keypoint detection (yolov8-pose), the MUD file would be (replace yolo11 for YOLO11):

+ +
[basic]
+type = cvimodel
+model = yolov8n_pose.cvimodel
+
+[extra]
+model_type = yolov8
+type = pose
+input_type = rgb
+mean = 0, 0, 0
+scale = 0.00392156862745098, 0.00392156862745098, 0.00392156862745098
+labels = person
+
+

The default model is for human pose detection, so labels only contains person. Replace it according to your detected objects.

+

For image segmentation (yolov8-seg), the MUD file would be (replace yolo11 for YOLO11):

+ +
[basic]
+type = cvimodel
+model = yolo11n-seg_320x224_int8.cvimodel
+
+[extra]
+model_type = yolov8
+input_type = rgb
+type = seg
+mean = 0, 0, 0
+scale = 0.00392156862745098, 0.00392156862745098, 0.00392156862745098
+labels = person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign, parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket, bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut, cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair dryer, toothbrush
+
+

Upload and Share on MaixHub

+

Visit the MaixHub Model Library to upload and share your model. Consider providing multiple resolutions for others to choose from.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/display.html b/maixpy/doc/en/vision/display.html new file mode 100644 index 00000000..0a4197b3 --- /dev/null +++ b/maixpy/doc/en/vision/display.html @@ -0,0 +1,514 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Screen Usage - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Screen Usage

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-03-311.0.0neucrack + + Initial document + +
+
+
+ +
+
+ +

Introduction

+

MaixPy provides the display module, which can display images on the screen, and can also send images to MaixVision for display, facilitating debugging and development.

+

API Documentation

+

This document introduces commonly used methods. For more APIs, please refer to the display section of the API documentation.

+

Using the Screen

+
    +
  • Import the display module:
  • +
+ +
from maix import display
+
+
    +
  • Create a Display object:
  • +
+ +
disp = display.Display()
+
+
    +
  • Display an image:
  • +
+ +
disp.show(img)
+
+

Here, the img object is a maix.image.Image object, which can be obtained through the read method of the camera module, or loaded from an image file in the file system using the load method of the image module, or created as a blank image using the Image class of the image module.

+

For example:

+ +
from maix import image, display
+
+disp = display.Display()
+img = image.load("/root/dog.jpg")
+disp.show(img)
+
+

Here, you need to transfer the dog.jpg file to the /root directory on the device first.

+

Display text:

+ +
from maix import image, display
+
+disp = display.Display()
+img = image.Image(320, 240)
+img.draw_rect(0, 0, disp.width(), disp.height(), color=image.Color.from_rgb(255, 0, 0), thickness=-1)
+img.draw_rect(10, 10, 100, 100, color=image.Color.from_rgb(255, 0, 0))
+img.draw_string(10, 10, "Hello MaixPy!", color=image.Color.from_rgb(255, 255, 255))
+disp.show(img)
+
+

Read an image from the camera and display it:

+ +
from maix import camera, display, app
+
+disp = display.Display()
+cam = camera.Camera(320, 240)
+while not app.need_exit():
+    img = cam.read()
+    disp.show(img)
+
+
+

Here, while not app.need_exit(): is used to facilitate exiting the loop when the app.set_exit_flag() method is called elsewhere.

+
+

Adjusting Backlight Brightness

+

You can manually adjust the backlight brightness in the system's "Settings" app. If you want to adjust the backlight brightness programmatically, you can use the set_backlight method, with the parameter being the brightness percentage, ranging from 0 to 100:

+ +
disp.set_backlight(50)
+
+

Note that when the program exits and returns to the app selection interface, the backlight brightness will automatically revert to the system setting.

+

Displaying on MaixVision

+

When running code in MaixVision, images can be displayed on MaixVision for easier debugging and development.

+

When calling the show method, the image will be automatically compressed and sent to MaixVision for display.

+

Of course, if you don't have a screen, or to save memory by not initializing the screen, you can also directly call the send_to_maixvision method of the maix.display object to send the image to MaixVision for display.

+ +
from maix import image,display
+
+from maix import image,display
+
+img = image.Image(320, 240)
+disp = display.Display()
+
+img.draw_rect(0, 0, img.width(), img.height(), color=image.Color.from_rgb(255, 0, 0), thickness=-1)
+img.draw_rect(10, 10, 100, 100, color=image.Color.from_rgb(255, 0, 0))
+img.draw_string(10, 10, "Hello MaixPy!", color=image.Color.from_rgb(255, 255, 255))
+display.send_to_maixvision(img)
+
+

Replacing with Other Screen Models

+

If you wish to switch to a screen of a different size, you can consult and purchase from the store.

+

For MaixCAM, the following four screen options are currently supported:

+
    +
  • 2.3-inch 552x368 resolution capacitive touch screen: The default screen that comes with MaixCAM.
  • +
  • 2.4-inch 640x480 resolution capacitive touch screen: The default screen that comes with MaixCAM-Pro.
  • +
  • 5-inch 854x480 resolution non-touch screen: Note that this is a non-touch screen, similar in size to a mobile phone screen.
  • +
  • 7-inch 1280x800 resolution capacitive touch screen: A large 7-inch screen, suitable for scenarios requiring a fixed screen display.
  • +
+

The image refresh time difference between different screens is about 1-5 milliseconds, which is not significant; the main difference lies in the image resolution, which affects image processing time.

+

When replacing the screen, you must also modify the configuration file; otherwise, mismatched refresh timing could cause screen burn-in (leaving a ghost image on the screen). It’s important to follow the steps strictly as outlined below. If screen burn-in occurs, don’t panic; powering off and leaving it overnight usually resolves the issue.

+
    +
  • Follow the system burning documentation to burn the system. Once completed, a USB drive will appear.
  • +
  • Open the USB drive, and you will see a uEnv.txt file.
  • +
  • Edit the uEnv.txt file, modifying the pannel key value as follows:
      +
    • 2.3-inch (MaixCAM default screen): st7701_hd228001c31.
    • +
    • 2.4-inch (MaixCAM-Pro default screen): st7701_lct024bsi20.
    • +
    • 5-inch: st7701_dxq5d0019_V0, with the earlier (2023) test screen being st7701_dxq5d0019b480854.
    • +
    • 7-inch: mtd700920b, with the earlier (2023) test screen being zct2133v1.
    • +
    +
  • +
  • Save the uEnv.txt file, and click to eject the USB drive—do not just disconnect the power, or the file may be lost.
  • +
  • Press the board's reset button, or power cycle to restart.
  • +
+

The above method is the safest, ensuring the screen model is set correctly before powering on. If you have already burned the system, you can also modify the system’s /boot/uEnv.txt file and then reboot.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/dual_buff.html b/maixpy/doc/en/vision/dual_buff.html new file mode 100644 index 00000000..3e4fd87c --- /dev/null +++ b/maixpy/doc/en/vision/dual_buff.html @@ -0,0 +1,396 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Introduction to Running Models in Dual Buffer Mode with MaixPy MaixCAM - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Introduction to Running Models in Dual Buffer Mode with MaixPy MaixCAM

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction

+

You may have noticed that there is a parameter dual_buff=True when initializing the code for model running. For example, in YOLOv5:

+ +
from maix import camera, display, image, nn, app
+
+detector = nn.YOLOv5(model="/root/models/yolov5s.mud", dual_buff=True)
+# detector = nn.YOLOv8(model="/root/models/yolov8n.mud", dual_buff=True)
+
+cam = camera.Camera(detector.input_width(), detector.input_height(), detector.input_format())
+dis = display.Display()
+
+while not app.need_exit():
+    img = cam.read()
+    objs = detector.detect(img, conf_th = 0.5, iou_th = 0.45)
+    for obj in objs:
+        img.draw_rect(obj.x, obj.y, obj.w, obj.h, color = image.COLOR_RED)
+        msg = f'{detector.labels[obj.class_id]}: {obj.score:.2f}'
+        img.draw_string(obj.x, obj.y, msg, color = image.COLOR_RED)
+    dis.show(img)
+
+

Generally, this parameter defaults to True, unless you manually set dual_buff=False to disable the dual buffer function.

+

Enabling this feature improves running efficiency, thereby increasing the frame rate (assuming the camera's frame rate is not limited, the above code will halve the loop time on MaixCAM, effectively doubling the frame rate). However, there are drawbacks. The detect function returns the result of the previous call to the detect function, meaning there is a one-frame delay between the result and the input. If you want the detection result to match the input img rather than the previous frame, disable this feature. Additionally, due to the preparation of dual buffers, memory usage will increase. If you encounter insufficient memory issues, you will also need to disable this feature.

+

Principle

+

Model object detection involves several steps:

+
    +
  • Capturing the image
  • +
  • Image preprocessing
  • +
  • Model execution
  • +
  • Post-processing the results
  • +
+

Only the model execution step runs on the hardware NPU, while other steps run on the CPU.

+

If dual_buff is set to False, during detect, the CPU preprocesses (while the NPU is idle), then the NPU performs the computation (while the CPU is idle waiting for the NPU to finish), and then the CPU post-processes (while the NPU is idle). This process is linear and relatively simple. However, a problem arises because either the CPU or the NPU is always idle. When dual_buff=True is enabled, the CPU preprocesses and hands off to the NPU for computation. At this point, the CPU does not wait for the NPU to produce results but instead exits the detect function and proceeds to the next camera read and preprocess. Once the NPU finishes its computation, the CPU has already prepared the next data, immediately passing it to the NPU to continue computing without giving the NPU any idle time. This maximizes the efficient simultaneous operation of both the CPU and NPU.

+

However, note that if the camera frame rate is not high enough, it will still limit the overall frame rate.

+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/face_detection.html b/maixpy/doc/en/vision/face_detection.html new file mode 100644 index 00000000..e8715a1c --- /dev/null +++ b/maixpy/doc/en/vision/face_detection.html @@ -0,0 +1,429 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Face Detection and Keypoint Detection - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Face Detection and Keypoint Detection

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction

+

Face detection can be applied in many scenarios, such as providing the face detection step for face recognition, or for face tracking applications, etc.

+

The face detection provided here can not only detect faces but also detect 5 key points, including two eyes, one nose, and two corners of the mouth.

+

face detection

+

Using Face Detection in MaixPy

+

MaixPy officially provides three face detection models from the open-source projects Face Detector 1MB with landmark, Retinaface, and YOLOv8-face.

+

All three models can be used. YOLOv8-face performs better but is slightly slower, so you can choose based on your testing.

+

Using YOLOv8-face (requires MaixPy version >= 4.3.8):

+ +
from maix import camera, display, image, nn, app
+
+detector = nn.YOLOv8(model="/root/models/yolov8n_face.mud", dual_buff = True)
+
+cam = camera.Camera(detector.input_width(), detector.input_height(), detector.input_format())
+dis = display.Display()
+
+while not app.need_exit():
+    img = cam.read()
+    objs = detector.detect(img, conf_th = 0.5, iou_th = 0.45, keypoint_th = 0.5)
+    for obj in objs:
+        img.draw_rect(obj.x, obj.y, obj.w, obj.h, color = image.COLOR_RED)
+        msg = f'{detector.labels[obj.class_id]}: {obj.score:.2f}'
+        img.draw_string(obj.x, obj.y, msg, color = image.COLOR_RED)
+        detector.draw_pose(img, obj.points, 2, image.COLOR_RED)
+    dis.show(img)
+
+

For the other two models:
+Here, a line of commented-out code is used to load the Retinaface model. Choose which line of code to use based on the model you download.

+ +
from maix import camera, display, image, nn, app
+import math
+
+detector = nn.Retinaface(model="/root/models/retinaface.mud")
+# detector = nn.FaceDetector(model="/root/models/face_detector.mud")
+
+cam = camera.Camera(detector.input_width(), detector.input_height(), detector.input_format())
+dis = display.Display()
+
+while not app.need_exit():
+    img = cam.read()
+    objs = detector.detect(img, conf_th = 0.4, iou_th = 0.45)
+    for obj in objs:
+        img.draw_rect(obj.x, obj.y, obj.w, obj.h, color = image.COLOR_RED)
+        radius = math.ceil(obj.w / 10)
+        img.draw_keypoints(obj.points, image.COLOR_RED, size = radius if radius < 5 else 4)
+    dis.show(img)
+
+

Model Downloads and Other Resolution Models

+

Download the models; the compressed package contains multiple resolutions to choose from. Higher resolution models are more accurate but take longer to process:

+ +

dual_buff Dual Buffer Acceleration

+

You may have noticed that the model initialization uses dual_buff (which defaults to True). Enabling the dual_buff parameter can improve running efficiency and increase the frame rate. For detailed principles and usage notes, see dual_buff Introduction.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/face_recognition.html b/maixpy/doc/en/vision/face_recognition.html new file mode 100644 index 00000000..e6460705 --- /dev/null +++ b/maixpy/doc/en/vision/face_recognition.html @@ -0,0 +1,430 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Face Recognition - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Face Recognition

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction to Face Recognition

+

face_recognize

+

Face recognition involves identifying the location of faces in the current view and who they are.
+Thus, in addition to detecting faces, face recognition typically involves a database to store known and unknown individuals.

+

Recognition Principles

+
    +
  • Use AI models to detect faces, obtaining coordinates and features of facial components.
  • +
  • Use the coordinates of these features for affine transformation to align the face in the image to a standard face orientation, facilitating the extraction of facial features by the model.
  • +
  • Employ a feature extraction model to derive facial feature values.
  • +
  • Compare these features with those stored in the database (by calculating the cosine distance between the saved and the current facial features, identifying the face in the database with the smallest distance; if it's below a predefined threshold, it is recognized as the person in the database).
  • +
+

Using MaixPy

+

MaixPy's maix.nn module provides a face recognition API, ready to use with built-in models. Additional models can also be downloaded from the MaixHub model repository (select the appropriate hardware platform, such as maixcam).

+

Recognition:

+ +
from maix import nn, camera, display, image
+import os
+import math
+
+recognizer = nn.FaceRecognizer(detect_model="/root/models/retinaface.mud", feature_model = "/root/models/face_feature.mud", dual_buff = True)
+if os.path.exists("/root/faces.bin"):
+    recognizer.load_faces("/root/faces.bin")
+cam = camera.Camera(recognizer.input_width(), recognizer.input_height(), recognizer.input_format())
+dis = display.Display()
+
+while 1:
+    img = cam.read()
+    faces = recognizer.recognize(img, 0.5, 0.45, 0.8)
+    for obj in faces:
+        img.draw_rect(obj.x, obj.y, obj.w, obj.h, color = image.COLOR_RED)
+        radius = math.ceil(obj.w / 10)
+        img.draw_keypoints(obj.points, image.COLOR_RED, size = radius if radius < 5 else 4)
+        msg = f'{recognizer.labels[obj.class_id]}: {obj.score:.2f}'
+        img.draw_string(obj.x, obj.y, msg, color = image.COLOR_RED)
+    dis.show(img)
+
+

When you first run this code, it can detect faces but will not recognize them. We need to enter a mode to learn faces.

+
+

Here recognizer.labels[0] is by default unknown, and every new face added will automatically append to labels.

+
+

For example, you can learn faces when a user presses a button:

+ +
faces = recognizer.recognize(img, 0.5, 0.45, True)
+for face in faces:
+    print(face)
+    # This accounts for the scenario where multiple faces are present in one scene; obj.class_id of 0 means the face is not registered
+    # Write your own logic here
+    #   For instance, based on face’s class_id and coordinates, you can decide whether to add it to the database and facilitate user interaction, like pressing a button to register
+    recognizer.add_face(face, label) # label is the name you assign to the face
+recognizer.save_faces("/root/faces.bin")
+
+

Complete Example

+

A complete example is provided for recording unknown faces and recognizing faces with a button press. This can be found in the MaixPy example directory under nn_face_recognize.py.

+

dual_buff Dual Buffer Acceleration

+

You may have noticed that the model initialization uses dual_buff (which defaults to True). Enabling the dual_buff parameter can improve running efficiency and increase the frame rate. For detailed principles and usage notes, see dual_buff Introduction.

+

Replacing Other Default Recognition Models

+

The current recognition model (used to distinguish different individuals) is based on the MobileNetV2 model. If its accuracy does not meet your requirements, you can replace it with another model, such as the Insight Face ResNet50 model. Of course, you can also train your own model or find other pre-trained models and convert them into a format supported by MaixCAM. For the conversion method, refer to the MaixCAM Model Conversion Documentation, and you can write the mud file based on existing examples.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/find_blobs.html b/maixpy/doc/en/vision/find_blobs.html new file mode 100644 index 00000000..5e082e97 --- /dev/null +++ b/maixpy/doc/en/vision/find_blobs.html @@ -0,0 +1,574 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Find Blobs - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Find Blobs

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-04-031.0.0neucrack + + Initial documentation + +
2024-04-031.0.1lxowalle + + Added detailed usage for finding blobs + +
+
+
+ +
+
+ +

Before reading this article, make sure you know how to develop with MaixCAM. For details, please read Quick Start.

+

Introduction

+

This article will introduce how to use MaixPy to find color blobs and how to use the default application of MaixCam to find color blobs.

+

In vision applications, finding color blobs is a very common requirement, such as robots finding color blobs, automated production lines finding color blobs, etc., which requires identifying specific color areas in the image and obtaining information such as the position and size of these areas.

+

Using MaixPy to Find Blobs

+

The maix.image.Image module in MaixPy provides the find_blobs method, which can conveniently find color blobs.

+

How to Find Blobs

+

A simple example to find color blobs and draw bounding boxes:

+ +
from maix import image, camera, display
+
+cam = camera.Camera(320, 240)
+disp = display.Display()
+
+# Select the corresponding configuration based on the color of the blob
+thresholds = [[0, 80, 40, 80, 10, 80]]      # red
+# thresholds = [[0, 80, -120, -10, 0, 30]]    # green
+# thresholds = [[0, 80, 30, 100, -120, -60]]  # blue
+
+while 1:
+    img = cam.read()
+    blobs = img.find_blobs(thresholds, pixels_threshold=500)
+    for blob in blobs:
+        img.draw_rect(blob[0], blob[1], blob[2], blob[3], image.COLOR_GREEN)
+    disp.show(img)
+
+

Steps:

+
    +
  1. Import the image, camera, and display modules

    + +
    from maix import image, camera, display
    +
    +
  2. +
  3. Initialize the camera and display

    + +
    cam = camera.Camera(320, 240)	# Initialize the camera with an output resolution of 320x240 in RGB format
    +disp = display.Display()
    +
    +
  4. +
  5. Get the image from the camera and display it

    + +
    while 1:
    +    img = cam.read()
    +    disp.show(img)
    +
    +
  6. +
  7. Call the find_blobs method to find color blobs in the camera image and draw them on the screen

    + +
    blobs = img.find_blobs(thresholds, pixels_threshold=500)
    +for blob in blobs:
    +    img.draw_rect(blob[0], blob[1], blob[2], blob[3], image.COLOR_GREEN)
    +
    +
      +
    • img is the camera image obtained through cam.read(). When initialized with cam = camera.Camera(320, 240), the img object is an RGB image with a resolution of 320x240.
    • +
    • img.find_blobs is used to find color blobs. thresholds is a list of color thresholds, where each element is a color threshold. Multiple thresholds can be passed in to find multiple colors simultaneously. Each color threshold is in the format [L_MIN, L_MAX, A_MIN, A_MAX, B_MIN, B_MAX], where L, A, and B are the three channels in the LAB color space. The L channel represents brightness, the A channel represents the red-green component, and the B channel represents the blue-yellow component. pixels_threshold is a pixel count threshold used to filter out unwanted small blobs.
    • +
    • img.draw_rect is used to draw bounding boxes around the color blobs. blob[0], blob[1], blob[1], and blob[1] represent the x-coordinate of the top-left corner of the blob, the y-coordinate of the top-left corner of the blob, the width of the blob, and the height of the blob, respectively.
    • +
    +
  8. +
+

Common Parameter Explanations

+

Here are explanations of commonly used parameters. If you cannot find parameters that can implement your application, you may need to consider using other algorithms or extending the required functionality based on the current algorithm's results.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ParameterDescriptionExample
thresholdsThresholds based on the LAB color space, thresholds=[[l_min, l_max, a_min, a_max, b_min, b_max]], representing:
Brightness range [l_min, l_max]
Green to red component range [a_min, a_max]
Blue to yellow component range [b_min, b_max]
Multiple thresholds can be set simultaneously
Set two thresholds to detect red and green
img.find_blobs(thresholds=[[0, 80, 40, 80, 10, 80], [0, 80, -120, -10, 0, 30]])
Red threshold is [0, 80, 40, 80, 10, 80]
Green threshold is [0, 80, -120, -10, 0, 30]
invertEnable threshold inversion, when enabled, the passed thresholds are inverted. Default is False.Enable threshold inversion
img.find_blobs(invert=True)
roiSet the rectangular region for the algorithm to compute, roi=[x, y, w, h], where x and y represent the coordinates of the top-left corner of the rectangle, and w and h represent the width and height of the rectangle, respectively. The default is the entire image.Compute the region at (50, 50) with a width and height of 100
img.find_blobs(roi=[50, 50, 100, 100])
area_thresholdFilter out blobs with a pixel area smaller than area_threshold, in units of pixels. The default is 10. This parameter can be used to filter out some useless small blobs.Filter out blobs with an area smaller than 1000
img.find_blobs(area_threshold=1000)
pixels_thresholdFilter out blobs with fewer valid pixels than pixels_threshold. The default is 10. This parameter can be used to filter out some useless small blobs.Filter out blobs with fewer than 1000 valid pixels
img.find_blobs(pixels_threshold=1000)
+

This article introduces commonly used methods. For more APIs, please see the image section of the API documentation.

+

Setting Thresholds Offline

+

To quickly verify the function of find blobs, you can first use the find blobs application provided by MaixCam to experience the effect of finding color blobs.

+

Demo

+

Turn on the device, select Find Blobs application, then select the colour you want to identify, or customize the colour, then you can identify the corresponding colour, the setting bar at the bottom will show the threshold range, and the serial port will also output the coordinates and colour information of the identified coordinates.

+

+

source code address

+

Quick use

+

Using the default threshold

+

The find blobs app provides four configurations, red, green, blue and user, where red, green and blue are used to find red, green and blue colour blocks, and user customized thresholds are saved when the app is exited, and the next time the app is opened the thresholds from the last debugging are loaded. For quick experience, you can switch to the corresponding configuration by clicking the button at the bottom of the interface, the app interface is referenced below:

+

+

Quick Debug Thresholds

+

Method of operation:

+
    +
  1. Aim the camera at the object you need to find, click on the target on the screen, then the left side will show the rectangle of the corresponding colour of the object, and the LAB value of the object's colour.
  2. +
  3. Click on the rectangular box, the system will `automatically set' the LAB threshold, then the screen will draw the edge of the object.
  4. +
+

The advantage of this method is that it is easy and quick to set the threshold and find the corresponding colour block. The disadvantage is that it is not precise enough, you can fine tuning it manually in the next step.

+

Manually fine tune the threshold

+

Method of operation:

+
    +
  1. Click on the Options icon in the lower left corner to enter configuration mode

    +
  2. +
  3. Aim the camera at the object you need to find, click on the target object on the screen, at this time the left side will show the rectangular box of the corresponding colour of the object, and display the LAB value of the object's colour.

    +
  4. +
  5. Click on the lower option L Min, L Max, A Min, A Max, B Min, B Max, and a slider will appear on the right to set the value of this option. These values correspond to the minimum and maximum values of the L, A and B channels of the LAB colour format.

    +
  6. +
  7. Referring to the LAB value of the object colour calculated in step 2, adjust L Min, L Max, A Min, A Max, B Min, B Max to the appropriate value to identify the corresponding colour block.

    +

    For example, LAB=(20, 50, 80), since L=20, in order to fit a certain range, let L Min=10, L Max=30; similarly, since A=50, let A Min=40, A Max=60; since B=80, let B Min=70, B Max=90.

    +
  8. +
+

This method can be more precise to find the right threshold, with the Quick Debug Threshold method, it is easy to find the desired threshold.

+

Get recognition results via serial protocol

+

The find blobs app supports reporting information about detected color blobs via the serial port (default baud rate is 115200).

+

Since only one report message is sent, we can illustrate the content of the report message with an example.

+

For instance, if the report message is:

+ +
shellCopy code
+
+AA CA AC BB 14 00 00 00 E1 08 EE 00 37 00 15 01 F7 FF 4E 01 19 00 27 01 5A 00 A7 20
+
+
    +
  • AA CA AC BB: Protocol header, content is fixed
  • +
  • 14 00 00 00: Data length, the total length excluding the protocol header and data length
  • +
  • E1: Flag, used to identify the serial message flag
  • +
  • 08: Command type, for the find blobs app application, this value is fixed at 0x08
  • +
  • EE 00 37 00 15 01 F7 FF 4E 01 19 00 27 01 5A 00: Coordinates of the four vertices of the found color blob, with each value represented by 2 bytes in little-endian format. EE 00 and 37 00 represent the first vertex coordinate as (238, 55), 15 01 and F7 FF represent the second vertex coordinate as (277, -9), 4E 01 and 19 00 represent the third vertex coordinate as (334, 25), 27 01 and 5A 00 represent the fourth vertex coordinate as (295, 90).
  • +
  • A7 20: CRC checksum value, used to verify if the frame data has errors during transmission.
  • +
+

About the LAB Color Space

+

The LAB color space, like the RGB color space, is a way to represent colors. LAB can represent all colors visible to the human eye. If you need to learn more about LAB, you can search for relevant articles online, which will provide more details. However, for you, it should be sufficient to understand why LAB is advantageous for MaixPy.

+

Advantages of LAB for MaixPy:

+
    +
  1. The color gamut of the LAB color space is larger than that of RGB, so it can completely replace RGB.
  2. +
  3. In the LAB color space, since the L channel is the brightness channel, we often set it to a relatively large range (commonly [0, 80]), and when coding, we mainly focus on the A and B channels. This can save a lot of time spent struggling with how to select color thresholds.
  4. +
  5. The color perception in the LAB color space is more uniform and easier to debug with code. For example, if you only need to find red color blobs, you can fix the values of the L and B channels and only adjust the value of the A channel (in cases where high color accuracy is not required). For RGB channels, you generally need to adjust all three R, G, and B channels simultaneously to find suitable thresholds.
  6. +
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/image_ops.html b/maixpy/doc/en/vision/image_ops.html new file mode 100644 index 00000000..7c793e89 --- /dev/null +++ b/maixpy/doc/en/vision/image_ops.html @@ -0,0 +1,664 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Basic Image Operations - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Basic Image Operations

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-04-031.0.0neucrack + + Initial document + +
+
+
+ +
+
+ +

Introduction

+

Images play a very important role in visual applications. Whether it's a picture or a video, since a video is essentially a series of frames, image processing is the foundation of visual applications.

+

API Documentation

+

This document introduces common methods. For more APIs, refer to the documentation of the maix.image module.

+

Image Formats

+

MaixPy provides a basic image module image, where the most important part is the image.Image class, which is used for image creation and various basic image operations, as well as image loading and saving.

+

There are many image formats, and we generally use image.Format.FMT_RGB888 or image.Format.FMT_RGBA8888 or image.Format.FMT_GRAYSCALE or image.Format.FMT_BGR888, etc.

+

We all know that the three colors RGB can synthesize any color, so in most cases, we use image.Format.FMT_RGB888, which is sufficient. RGB888 is RGB packed in memory, i.e., the arrangement in memory is:
+pixel1_red, pixel1_green, pixel1_blue, pixel2_red, pixel2_green, pixel2_blue, ... arranged in sequence.

+

Creating an Image

+

Creating an image is very simple, you only need to specify the width and height of the image, and the image format:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+print(img)
+print(img.width(), img.height(), img.format())
+
+

320 is the width of the image, 240 is the height of the image, and image.Format.FMT_RGB888 is the format of the image. The format parameter can be omitted, and the default is image.Format.FMT_RGB888.

+

Here, you can get the width, height, and format of the image using img.width(), img.height(), and img.format().

+

Displaying on the Screen

+

MaixPy provides the maix.display.Display class, which can conveniently display images:

+ +
from maix import image, display
+
+disp = display.Display()
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+disp.show(img)
+
+

Note that here, since there is no image data, a black image is displayed. See the following sections for how to modify the image.

+

Reading Images from the File System

+

MaixPy provides the maix.image.load method, which can read images from the file system:

+ +
from maix import image
+
+img = image.load("/root/image.jpg")
+if img is None:
+    raise Exception(f"load image failed")
+print(img)
+
+

Note that here, /root/image.jpg has been transferred to the board in advance. You can refer to the previous tutorials for the method.
+It supports jpg and png image formats.

+

Saving Images to the File System

+

MaixPy's maix.image.Image provides the save method, which can save images to the file system:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+
+# do something with img
+img.save("/root/image.jpg")
+
+

Drawing Rectangles

+

image.Image provides the draw_rect method, which can draw rectangles on the image:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img.draw_rect(10, 10, 100, 100, image.Color.from_rgb(255, 0, 0))
+
+

Here, the parameters are: x, y, w, h, color. x and y are the coordinates of the top-left corner of the rectangle, w and h are the width and height of the rectangle, and color is the color of the rectangle, which can be created using the image.Color.from_rgb method.
+You can specify the line width of the rectangle using thickness, which defaults to 1.

+

You can also draw a solid rectangle by passing thickness=-1:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img.draw_rect(10, 10, 100, 100, (255, 0, 0), thickness=-1)
+
+

Writing Strings

+

image.Image provides the draw_string method, which can write text on the image:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img.draw_string(10, 10, "Hello MaixPy", image.Color.from_rgb(255, 0, 0))
+
+

Here, the parameters are: x, y, text, color. x and y are the coordinates of the top-left corner of the text, text is the text to be written, and color is the color of the text, which can be created using the image.Color.from_rgb method.

+

You can also enlarge the font by passing the scale parameter:

+ +
img.draw_string(10, 10, "Hello MaixPy", image.Color.from_rgb(255, 0, 0), scale=2)
+
+

Get the width and height of the font:

+ +
w, h = img.string_size("Hello MaixPy", scale=2)
+print(w, h)
+
+

Note that here, scale is the magnification factor, and the default is 1. It should be consistent with draw_string.

+

Chinese support and custom fonts

+

The image module supports loading ttf/otf fonts. The default font only supports English. If you want to display Chinese or custom fonts, you can first download the font file to the device and then load the font.
+The system also has several built-in fonts, under the /maixapp/share/font directory, code example:

+ +
from maix import image, display, app, time
+
+image.load_font("sourcehansans", "/maixapp/share/font/SourceHanSansCN-Regular.otf", size = 32)
+print("fonts:", image.fonts())
+image.set_default_font("sourcehansans")
+
+disp = display.Display()
+
+img = image.Image(disp.width(), disp.height())
+img.draw_string(2, 2, "Hello! Hello, world!", image.Color.from_rgba(255, 0, 0))
+
+disp.show(img)
+while not app.need_exit():
+time.sleep(1)
+
+

Load the font file, then set the default font, or you can set the default font without setting the default font, and set the parameters in the writing function:

+ +
img.draw_string(2, 2, "你好!Hello, world!", image.Color.from_rgba(255, 0, 0), font="sourcehansans")
+
+

Note that the string_size method will also use the default font to calculate the size, and you can also use the font parameter to set the font to be calculated separately.

+

Drawing Lines

+

image.Image provides the draw_line method, which can draw lines on the image:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img.draw_line(10, 10, 100, 100, image.Color.from_rgb(255, 0, 0))
+
+

Here, the parameters are: x1, y1, x2, y2, color. x1 and y1 are the coordinates of the starting point of the line, x2 and y2 are the coordinates of the end point of the line, and color is the color of the line, which can be created using the image.Color.from_rgb method.

+

Drawing Circles

+

image.Image provides the draw_circle method, which can draw circles on the image:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img.draw_circle(100, 100, 50, image.Color.from_rgb(255, 0, 0))
+
+

Here, the parameters are: x, y, r, color. x and y are the coordinates of the center of the circle, r is the radius, and color is the color of the circle, which can be created using the image.Color.from_rgb method.

+

Resizing Images

+

image.Image provides the resize method, which can resize images:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img_new = img.resize(160, 120)
+print(img, img_new)
+
+

Note that here, the resize method returns a new image object, and the original image remains unchanged.

+

Cropping Images

+

image.Image provides the crop method, which can crop images:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img_new = img.crop(10, 10, 100, 100)
+print(img, img_new)
+
+

Note that here, the crop method returns a new image object, and the original image remains unchanged.

+

Rotating Images

+

image.Image provides the rotate method, which can rotate images:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img_new = img.rotate(90)
+print(img, img_new)
+
+

Note that here, the rotate method returns a new image object, and the original image remains unchanged.

+

Copying Images

+

image.Image provides the copy method, which can copy an independent image:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888) 
+img_new = img.copy()
+print(img, img_new)
+
+

Affine Transformations

+

image.Image provides the affine method, which can perform affine transformations. By providing the coordinates of three or more points in the current image and the corresponding coordinates in the target image, you can automatically perform operations such as rotation, scaling, and translation on the image to transform it into the target image:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img_new = img.affine([(10, 10), (100, 10), (10, 100)], [(10, 10), (100, 20), (20, 100)])
+print(img, img_new)
+
+

For more parameters and usage, please refer to the API documentation.

+

Drawing Keypoints

+

image.Image provides the draw_keypoints method, which can draw keypoints on the image:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+
+keypoints = [10, 10, 100, 10, 10, 100]
+img.draw_keypoints(keypoints, image.Color.from_rgb(255, 0, 0), size=10, thickness=1, fill=False)
+
+

This draws three red keypoints at the coordinates (10, 10), (100, 10), and (10, 100). The size of the keypoints is 10, the line width is 1, and they are not filled.

+

Drawing Crosses

+

image.Image provides the draw_cross method, which can draw crosses on the image:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img.draw_cross(100, 100, image.Color.from_rgb(255, 0, 0), size=5, thickness=1)
+
+

This draws a red cross at the coordinate (100, 100). The extension size of the cross is 5, so the length of the line segment is 2 * size + thickness, and the line width is 1.

+

Drawing Arrows

+

image.Image provides the draw_arrow method, which can draw arrows on the image:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img.draw_arrow(10, 10, 100, 100, image.Color.from_rgb(255, 0, 0), thickness=1)
+
+

This draws a red arrow starting from the coordinate (10, 10), with the end point at (100, 100), and a line width of 1.

+

Drawing Images

+

image.Image provides the draw_image method, which can draw images on the image:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img2 = image.Image(100, 100, image.Format.FMT_RGB888)
+img2.draw_rect(10, 10, 90, 90, image.Color.from_rgb(255, 0, 0))
+img.draw_image(10, 10, img2)
+
+

Converting Formats

+

image.Image provides the to_format method, which can convert image formats:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img_new = img.to_format(image.Format.FMT_BGR888)
+print(img, img_new)
+img_jpg = img.to_format(image.Format.FMT_JPEG)
+print(img, img_new)
+
+

Note that here, the to_format method returns a new image object, and the original image remains unchanged.

+

Converting between Numpy/OpenCV and maix.image.Image Formats

+

Refer to MaixPy use OpenCV documentation

+

Converting between bytes Data

+

image.Image provides the to_bytes method, which can convert an image to bytes data:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+data = img.to_bytes()
+print(type(data), len(data), img.data_size())
+
+img_jpeg = image.from_bytes(320, 240, image.Format.FMT_RGB888, data)
+print(img_jpeg)
+img = img_jpeg.to_format(image.Format.FMT_RGB888)
+print(img)
+
+

Here, to_bytes returns a new bytes object, which is independent memory and does not affect the original image.
+The image.Image constructor can directly construct an image object from bytes data by passing the data parameter. Note that the new image is also independent memory and does not affect data.

+

Since memory copying is involved, this method is relatively time-consuming and should not be used frequently.

+
+

If you want to optimize your program without copying (not recommended for casual use, as poorly written code can easily cause crashes), please refer to the API documentation.

+
+

More Basic API Usage

+

For more API usage, please refer to the documentation of the maix.image module.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/line_tracking.html b/maixpy/doc/en/vision/line_tracking.html new file mode 100644 index 00000000..d966e581 --- /dev/null +++ b/maixpy/doc/en/vision/line_tracking.html @@ -0,0 +1,595 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Line Tracking - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Line Tracking

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-05-091.0.0lxowalle + + Initial document + +
+
+
+ +
+
+ +

Before reading this article, make sure you already know how to develop MaixCAM. For details, please read Quick Start.

+

Introduction

+

In vision applications, the function of tracking line is often required in applications such as line-following robot. In this article, we will describe:

+
    +
  • How to use MaixPy to tracking line.

    +
  • +
  • How to tracking line using MaixCam's default application

    +
  • +
+

How to use MaixPy to tracking line

+

The maix.image.Image module in MaixPy provides the get_regression method, which can conveniently tracking line.

+

Code example

+

A simple example of finding and drawing a line.

+ +
from maix import camera, display, image
+
+cam = camera.Camera(320, 240)
+disp = display.Display()
+
+# thresholds = [[0, 80, 40, 80, 10, 80]] # red
+thresholds = [[0, 80, -120, -10, 0, 30]] # green
+# thresholds = [[0, 80, 30, 100, -120, -60]] # blue
+
+while 1:
+    img = cam.read()
+
+    lines = img.get_regression(thresholds, area_threshold = 100)
+    for a in lines:
+        img.draw_line(a.x1(), a.y1(), a.x2(), a.y2(), image.COLOR_GREEN, 2)
+        theta = a.theta()
+        rho = a.rho()
+        if theta > 90:
+            theta = 270 - theta
+        else:
+            theta = 90 - theta
+        img.draw_string(0, 0, "theta: " + str(theta) + ", rho: " + str(rho), image.COLOR_BLUE)
+
+    disp.show(img)
+
+

Steps:

+
    +
  1. import image, camera, display modules

    + +
    from maix import image, camera, display
    +
    +
  2. +
  3. Initialize camera and display

    + +
    cam = camera.Camera(320, 240) # Initialise camera, output resolution 320x240 in RGB format.
    +disp = display.Display()
    +
    +
  4. +
  5. Get the image from the camera and display it

    + +
    while 1:
    +    img = cam.read()
    +    disp.show(img)
    +
    +
  6. +
  7. Call the get_regression method to find the straight line in the camera image and draw it to the screen

    + +
    lines = img.get_regression(thresholds, area_threshold = 100)
    +for a in lines:
    +   img.draw_line(a.x1(), a.y1(), a.x2(), a.y2(), image.COLOR_GREEN, 2)
    +   theta = a.theta()
    +   rho = a.rho()
    +   if theta > 90:
    +      theta = 270 - theta
    +   else:
    +      theta = 90 - theta
    +   img.draw_string(0, 0, "theta: " + str(theta) + ", rho: " + str(rho), image.COLOR_BLUE)
    +
    +
      +
    • img is the camera image read via cam.read(), when initialised as cam = camera.Camera(320, 240), the img object is an RGB image with a resolution of 320x240.
    • +
    • img.get_regression is used to find straight lines, thresholds is a list of colour thresholds, each element is a colour threshold, multiple thresholds are passed in if multiple thresholds are found at the same time, and each colour threshold has the format [L_MIN, L_MAX, A_MIN, A_MAX, B_MIN, B_MAX], where L, A, B are the three channels of LAB colour space, L channel is the luminance, A channel is the red-green channel, B channel is the blue-yellow channel. pixels_threshold is a pixel area threshold used to filter some unwanted straight lines.
    • +
    • for a in lines is used to iterate through the returned Line objects, where a is the current Line object. Normally the get_regression function will only return one Line object, but if you need to find more than one line, try the find_line method.
    • +
    • Use img.draw_line to draw the found line, a.x1(), a.y1(), a.x2(), a.y2() represent the coordinates of the ends of the line.
    • +
    • Use img.draw_string to show the angle between the line and the x-axis in the upper left corner, and a.theta() is the angle between the line and the y-axis, which is converted to theta for easier understanding, a.rho() is the length of the vertical line from the origin to the line.
    • +
    +
  8. +
  9. Run the code through the maixvision, you can find the line, look at the effect!

    +

    image-20240509110204007

    +
  10. +
+

Common Parameter Explanations

+

Here are explanations of commonly used parameters. If you cannot find parameters that can implement your application, you may need to consider using other algorithms or extending the required functionality based on the current algorithm's results.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ParameterDescriptionExample
thresholdsThresholds based on the LAB color space, thresholds=[[l_min, l_max, a_min, a_max, b_min, b_max]], representing:
Brightness range [l_min, l_max]
Green to red component range [a_min, a_max]
Blue to yellow component range [b_min, b_max]
Multiple thresholds can be set simultaneously
Set two thresholds to detect red and green
img.find_blobs(thresholds=[[0, 80, 40, 80, 10, 80], [0, 80, -120, -10, 0, 30]])
Red threshold is [0, 80, 40, 80, 10, 80]
Green threshold is [0, 80, -120, -10, 0, 30]
invertEnable threshold inversion, when enabled, the passed thresholds are inverted. Default is False.Enable threshold inversion
img.find_blobs(invert=True)
roiSet the rectangular region for the algorithm to compute, roi=[x, y, w, h], where x and y represent the coordinates of the top-left corner of the rectangle, and w and h represent the width and height of the rectangle, respectively. The default is the entire image.Compute the region at (50, 50) with a width and height of 100
img.find_blobs(roi=[50, 50, 100, 100])
area_thresholdFilter out blobs with a pixel area smaller than area_threshold, in units of pixels. The default is 10. This parameter can be used to filter out some useless small blobs.Filter out blobs with an area smaller than 1000
img.find_blobs(area_threshold=1000)
pixels_thresholdFilter out blobs with fewer valid pixels than pixels_threshold. The default is 10. This parameter can be used to filter out some useless small blobs.Filter out blobs with fewer than 1000 valid pixels
img.find_blobs(pixels_threshold=1000)
+

This article introduces commonly used methods. For more APIs, please see the image section of the API documentation.

+

Increasing the speed of line tracking

+

Here are a few ways to increase the speed of line tracking

+
    +
  1. Choose a suitable resolution

    +

    The larger the resolution, the slower the calculation speed, you can choose a more suitable resolution according to the recognition distance and accuracy requirements.

    +
  2. +
  3. Use gray scale image

    +

    When using gray scale recognition, the algorithm will only process one channel, there is a faster recognition speed, in the environment of a single color will be very useful. Note that only l_min and l_max are valid when passing thresholds to get_regression when using gray scale image recognition.

    +

    Methods for get gray scale image:

    + +
    # Example 1
    +cam = camera.Camera(320, 240, image.Format.FMT_GRAYSCALE)    	# Support after MaixPy v4.2.1
    +gray_img = cam.read()											# get gray scale image
    +
    +# Example 2
    +cam = camera.Camera(320, 240)
    +img = cam.read()
    +gray_img = img.to_format(image.Format.FMT_GRAYSCALE)			# get gray scale image
    +
    +
  4. +
+

How to tracking line using MaixCam's default application

+

To quickly verify the line tracking functionality, you can use the line_tracking application provided by MaixCam to experience the line finding effect.

+

How to use it

+
    +
  1. Select and open the Line tracking application.
  2. +
  3. Click on the line in the screen that needs to be identified and the colour of the line will be displayed on the left hand side
  4. +
  5. Click on the colour to be detected on the left (the colour below L A B in the screen)
  6. +
  7. The line will be identified and the coordinates and angle of the line will be output from the serial port.
  8. +
+

Demo

+

+

Advanced operations

+

Manual adjustment of LAB threshold to tracking line

+

The application provides manual setting of LAB threshold to tracking line accurately.

+

Steps:

+
    +
  1. Click the options icon in the bottom-left corner to enter configuration mode.
  2. +
  3. Point the camera at the object you need to find, click on the target object on the screen, and the left side will display a rectangular frame of the object's color and show the LAB values of that color.
  4. +
  5. Click on the bottom options L Min, L Max, A Min, A Max, B Min, B Max. After clicking, a slider will appear on the right side to set the value for that option. These values correspond to the minimum and maximum values of the L, A, and B channels in the LAB color format, respectively.
  6. +
  7. Referring to the LAB values of the object color calculated in step 2, adjust L Min, L Max, A Min, A Max, B Min, B Max to appropriate values to identify the corresponding color blobs. For example, if LAB = (20, 50, 80), since L=20, to accommodate a certain range, set L Min=10 and L Max=30. Similarly, since A=50, set A Min=40 and A Max=60. Since B=80, set B Min=70 and B Max=90.
  8. +
+

Getting Detection Data via Serial Protocol

+

The line tracking application supports reporting detected straight line information via the serial port (default baud rate is 115200).

+

Since only one report message is sent, we can illustrate the content of the report message with an example.

+

For instance, if the report message is:

+ +
AA CA AC BB 0E 00 00 00 00 E1 09 FC 01 01 00 E9 01 6F 01 57 00 C1 C6
+
+
    +
  • AA CA AC BB: Protocol header, fixed content

    +
  • +
  • 0E 00 00 00: Data length, the total length excluding the protocol header and data length, here means the length is 14.

    +
  • +
  • E1: Flag bit, used to identify the serial message flag

    +
  • +
  • 09: Command type, for the line tracking application, this value is fixed at 0x09.

    +
  • +
  • FC 01 01 00 E9 01 6F 01 57 00: The coordinates and angle information for both ends of line, with each value represented as a 2-byte value in little-end format. FC 01 and 01 00 indicate that the coordinates of the first endpoint are (508, 1), E9 01 and 6F 01 indicate that the coordinates of the second endpoint are (489, 367), and 57 00 indicates that the angle of the line to the x-axis is 87 degrees

    +
  • +
  • C1 C6: CRC checksum value, used to verify if the frame data has errors during transmission.

    +
  • +
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/maixhub_train.html b/maixpy/doc/en/vision/maixhub_train.html new file mode 100644 index 00000000..29c0b739 --- /dev/null +++ b/maixpy/doc/en/vision/maixhub_train.html @@ -0,0 +1,433 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Using MaixHub to Train AI Models for MaixCAM MaixPy - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Using MaixHub to Train AI Models for MaixCAM MaixPy

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-04-031.0.0neucrack + + Initial document + +
+
+
+ +
+
+ +

Introduction

+

MaixHub offers the functionality to train AI models online, directly within a browser. This eliminates the need for expensive hardware, complex development environments, or coding skills, making it highly suitable for beginners as well as experts who prefer not to delve into code.

+

Basic Steps to Train a Model Using MaixHub

+

Identify the Data and Model Types

+

To train an AI model, you first need to determine the type of data and model. As of April 2024, MaixHub provides models for image data including Object Classification Models and Object Detection Models. Object classification models are simpler than object detection models, as the latter require marking the position of objects within images, which can be more cumbersome. Object classification merely requires identifying what is in the image without needing coordinates, making it simpler and recommended for beginners.

+

Collect Data

+

As discussed in AI basics, training a model requires a dataset for the AI to learn from. For image training, you need to create a dataset and upload images to it.

+

Ensure the device is connected to the internet (WiFi).
+Open the MaixHub app on your device and choose to collect data to take photos and upload them directly to MaixHub. You need to create a dataset on MaixHub first, then click on device upload data, which will display a QR code. Scan this QR code with your device to connect to MaixHub.

+

It's important to distinguish between training and validation datasets. To ensure the performance during actual operation matches the training results, the validation dataset must be of the same image quality as those taken during actual operation. It's also advisable to use images taken by the device for the training set. If using internet images, restrict them to the training set only, as the closer the dataset is to actual operational conditions, the better.

+

Annotate Data

+

For classification models, images are annotated during upload by selecting the appropriate category for each image.

+

For object detection models, after uploading, you need to manually annotate each image by marking the coordinates, size, and category of the objects to be recognized.
+This annotation process can also be done offline on your own computer using software like labelimg, then imported into MaixHub using the dataset import feature.
+Utilize shortcuts during annotation to speed up the process. MaixHub will also add more annotation aids and automatic annotation tools in the future (there is already an automatic annotation tool available for videos that you can try).

+

Train the Model

+

Select training parameters, choose the corresponding device platform, select maixcam, and wait in the training queue. You can monitor the training progress in real-time and wait for it to complete.

+

Deploy the Model

+

Once training is complete, you can use the deploy function in the MaixHub app on your device to scan a code and deploy.
+The device will automatically download and run the model, storing it locally for future use.

+

If you find the recognition results satisfactory, you can share the model to the model library with a single click for others to use.

+

How to Use

+

Please visit MaixHub to register an account, then log in. There are video tutorials on the homepage for learning.

+

Note that if the tutorial uses the M2dock development board, the process is similar for MaixCAM, although the MaixHub application on the device might differ slightly. The overall process is the same, so please apply the knowledge flexibly.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/object_track.html b/maixpy/doc/en/vision/object_track.html new file mode 100644 index 00000000..021d5bc3 --- /dev/null +++ b/maixpy/doc/en/vision/object_track.html @@ -0,0 +1,407 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Object Tracking and Counting (e.g., Pedestrian Counting) - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Object Tracking and Counting (e.g., Pedestrian Counting)

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction to Object Tracking

+

Previously, we used YOLOv5, YOLOv8, or even find_blobs to detect objects. However, when there are multiple objects in the frame and we need to distinguish between each object, object tracking becomes necessary.

+

For instance, if there are five people moving in the frame, we need to assign each person a number and track their movement.

+

Applications:

+
    +
  • Pedestrian counting, such as counting the number of people passing through a certain area.
  • +
  • Counting workpieces, such as counting products on a production line.
  • +
  • Recording and recognizing the movement trajectories of objects.
  • +
+

MaixCAM/MaixPy Object Tracking and Pedestrian Counting Results

+

As shown in the video below, the system can track each person and count those who cross the yellow area from top to bottom (displayed in the lower-left corner):

+

+

Using MaixCAM/MaixPy for Object Tracking and Pedestrian Counting

+

You can directly install the application to experience it.
+You can also check the examples in the examples/vision/tracker directory.

+

The tracker_bytetrack.py example is a basic object tracking example and involves several steps:

+
    +
  • Use YOLOv5 or YOLOv8 to detect objects. This allows you to replace the model to detect different objects according to your needs.
  • +
  • Use the maix.tracker.ByteTracker algorithm for object tracking. Simply calling the update function will give you the results (the trajectory of each object in the frame), which is very straightforward.
  • +
+

Several parameters need to be adjusted according to your specific scenario. Refer to the example code and API documentation for detailed parameter descriptions:

+ +
# configs
+conf_threshold = 0.3       # detection threshold
+iou_threshold = 0.45       # detection IOU threshold
+max_lost_buff_time = 120   # the number of frames to keep lost tracks
+track_thresh = 0.4         # tracking confidence threshold
+high_thresh = 0.6          # threshold to add a new track
+match_thresh = 0.8         # matching threshold for tracking; if IOU < match_thresh between an object in two frames, they are considered the same object
+max_history_num = 5        # maximum length of a track's position history
+show_detect = False        # show detection
+valid_class_id = [0]       # classes used in the detection model
+
+

The tracker_bytetrack_count.py example adds pedestrian counting. To keep it simple, the example only implements counting for people walking from top to bottom. If a person is below the yellow area and their trajectory crosses into the yellow area, they are counted as crossing from top to bottom. You can write custom logic based on your specific application scenario.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/ocr.html b/maixpy/doc/en/vision/ocr.html new file mode 100644 index 00000000..ad308e77 --- /dev/null +++ b/maixpy/doc/en/vision/ocr.html @@ -0,0 +1,578 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + OCR Image Text Recognition with MaixCAM MaixPy - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

OCR Image Text Recognition with MaixCAM MaixPy

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction to OCR

+

OCR (Optical Character Recognition) refers to the visual recognition of text in images. It can be applied in various scenarios, such as:

+
    +
  • Recognizing text/numbers on cards
  • +
  • Extracting text from cards, such as ID cards
  • +
  • Digitizing paper documents
  • +
  • Reading digital displays, useful for meter reading and digitizing old instrument data
  • +
  • License plate recognition
  • +
+

Using OCR in MaixPy

+

MaixPy has integrated PaddleOCR, an open-source OCR algorithm developed by Baidu. For understanding the principles, you can refer to this open-source project.

+

OCR

+

First, ensure that your MaixPy version is >= 4.6.

+

Then, execute the code: (The complete, latest code can be found in the MaixPy repository; please refer to the source code.)

+ +
from maix import camera, display, image, nn, app
+
+model = "/root/models/pp_ocr.mud"
+ocr = nn.PP_OCR(model)
+
+cam = camera.Camera(ocr.input_width(), ocr.input_height(), ocr.input_format())
+dis = display.Display()
+
+image.load_font("ppocr", "/maixapp/share/font/ppocr_keys_v1.ttf", size = 20)
+image.set_default_font("ppocr")
+
+while not app.need_exit():
+    img = cam.read()
+    objs = ocr.detect(img)
+    for obj in objs:
+        points = obj.box.to_list()
+        img.draw_keypoints(points, image.COLOR_RED, 4, -1, 1)
+        img.draw_string(obj.box.x4, obj.box.y4, obj.char_str(), image.COLOR_RED)
+    dis.show(img)
+
+

You can see that ocr = nn.PP_OCR(model) loads the model, and then ocr.detect(img) detects and recognizes the text, displaying the results on the screen.

+

More Model Options

+

You can download more complete models with different input resolutions, languages, and versions from the MaixHub Model Download (MaixPy currently defaults to the pp_ocr.mud model, which uses PPOCRv3 for detection and v4 for recognition).

+

Recognizing Without Detection

+

If you already have a processed image with known coordinates for the four corners of the text, you can skip calling the detect function and simply call the recognize function. This way, it will only recognize the text in the image without detection.

+

Custom Models

+

The default model provides detection and recognition for Chinese and English text. If you have specific requirements, such as another language or only want to detect certain shapes without recognizing all types of text, you can download the corresponding model from the PaddleOCR Official Model Library and convert it to a format supported by MaixCAM.

+

The most complex part here is converting the model into a format usable by MaixCAM, which is a relatively complex process that requires basic Linux skills and adaptability.

+
    +
  • First, either train your model using PaddleOCR source code or download the official models. Choose PP-OCRv3 for detection because it is efficient and faster than v4, and download the v4 model for recognition; tests show that v3 does not perform well when quantized on MaixCAM.
  • +
  • Then, convert the model to ONNX:
  • +
+ +
model_path=./models/ch_PP-OCRv3_rec_infer
+paddle2onnx --model_dir ${model_path} --model_filename inference.pdmodel --params_filename inference.pdiparams --save_file ${model_path}/inference.onnx --opset_version 14 --enable_onnx_checker True
+
+
    +
  • Next, set up the environment according to the ONNX to MUD format model documentation and convert the model. Sample conversion scripts are provided in the appendix.
  • +
  • Finally, load and run it using MaixPy.
  • +
+

Appendix: Model Conversion Scripts

+

Detection:

+ +
#!/bin/bash
+
+set -e
+
+net_name=ch_PP_OCRv3_det
+input_w=320
+input_h=224
+output_name=sigmoid_0.tmp_0
+
+# scale 1/255.0
+# "mean": [0.485, 0.456, 0.406],
+# "std": [0.229, 0.224, 0.225],
+
+# mean: mean * 255
+# scale: 1/(std*255)
+
+# mean: 123.675, 116.28, 103.53
+# scale: 0.01712475, 0.017507, 0.01742919
+
+mkdir -p workspace
+cd workspace
+
+# convert to mlir
+model_transform.py \
+--model_name ${net_name} \
+--model_def ../${net_name}.onnx \
+--input_shapes [[1,3,${input_h},${input_w}]] \
+--mean "123.675,116.28,103.53" \
+--scale "0.01712475,0.017507,0.01742919" \
+--keep_aspect_ratio \
+--pixel_format bgr \
+--channel_format nchw \
+--output_names "${output_name}" \
+--test_input ../test_images/test3.jpg \
+--test_result ${net_name}_top_outputs.npz \
+--tolerance 0.99,0.99 \
+--mlir ${net_name}.mlir
+
+# export bf16 model
+# not use --quant_input, use float32 for easy coding
+model_deploy.py \
+--mlir ${net_name}.mlir \
+--quantize BF16 \
+--processor cv181x \
+--test_input ${net_name}_in_f32.npz \
+--test_reference ${net_name}_top_outputs.npz \
+--model ${net_name}_bf16.cvimodel
+
+echo "calibrate for int8 model"
+# export int8 model
+run_calibration.py ${net_name}.mlir \
+--dataset ../images \
+--input_num 200 \
+-o ${net_name}_cali_table
+
+echo "convert to int8 model"
+# export int8 model
+# add --quant_input, use int8 for faster processing in maix.nn.NN.forward_image
+model_deploy.py \
+--mlir ${net_name}.mlir \
+--quantize INT8 \
+--quant_input \
+--calibration_table ${net_name}_cali_table \
+--processor cv181x \
+--test_input ${net_name}_in_f32.npz \
+--test_reference ${net_name}_top_outputs.npz \
+--tolerance 0.9,0.5 \
+--model ${net_name}_int8.cvimodel
+
+

Recognition:

+ +
#!/bin/bash
+
+set -e
+
+# net_name=ch_PP_OCRv4_rec
+# output_name=softmax_11.tmp_0
+
+net_name=ch_PP_OCRv3_rec_infer_sophgo
+output_name=softmax_5.tmp_0
+
+input_w=320
+input_h=48
+cali_images=../images_crop_320
+
+# scale 1/255.0
+# "mean": [0.5, 0.5, 0.5],
+# "std": [0.5, 0.5, 0.5],
+
+# mean: mean * 255
+# scale: 1/(std*255)
+
+# mean: 127.5,127.5,127.5
+# scale: 0.00784313725490196,0.00784313725490196,0.00784313725490196
+
+mkdir -p workspace
+cd workspace
+
+# convert to mlir
+model_transform.py \
+--model_name ${net_name} \
+--model_def ../${net_name}.onnx \
+--input_shapes [[1,3,${input_h},${input_w}]] \
+--mean "127.5,127.5,127.5" \
+--scale "0.00784313725490196,0.00784313725490196,0.00784313725490196" \
+--keep_aspect_ratio \
+--pixel_format bgr \
+--channel_format nchw \
+--output_names "${output_name}" \
+--test_input ../test_images/test3.jpg \
+--test_result ${net_name}_top_outputs.npz \
+--tolerance 0.99,0.99 \
+--mlir ${net_name}.mlir
+
+# export bf16 model
+# not use --quant_input, use float32 for easy coding
+model_deploy.py \
+--mlir ${net_name}.mlir \
+--quantize BF16 \
+--processor cv181x \
+--test_input ${net_name}_in_f32.npz \
+--test_reference ${net_name}_top_outputs.npz \
+--model ${net_name}_bf16.cvimodel
+
+echo "calibrate for int8 model"
+# export int8 model
+run_calibration.py ${net_name}.mlir \
+--dataset $cali_images \
+--input_num 200 \
+-o ${net_name}_cali_table
+
+echo "convert to int8 model"
+# export int8 model
+# add --quant_input, use int8 for faster processing in maix.nn.NN.forward_image
+model_deploy.py \
+--mlir ${net_name}.mlir \
+--quantize INT8 \
+--quant_input \
+--calibration_table ${net
+
+_name}_cali_table \
+--processor cv181x \
+--test_input ${net_name}_in_f32.npz \
+--test_reference ${net_name}_top_outputs.npz \
+--tolerance 0.9,0.5 \
+--model ${net_name}_int8.cvimodel
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/opencv.html b/maixpy/doc/en/vision/opencv.html new file mode 100644 index 00000000..04749eac --- /dev/null +++ b/maixpy/doc/en/vision/opencv.html @@ -0,0 +1,523 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Use OpenCV - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Use OpenCV

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction

+

For MaixCAM, since it uses Linux and the performance can support using the Python version of OpenCV, you can use the cv2 module directly in addition to the maix module.

+

The examples in this article and more can be found in MaixPy/examples/vision/opencv.

+

Note that OpenCV functions are basically CPU-calculated. If you can use maix modules, try not to use OpenCV, because many maix functions are hardware-accelerated.

+

Converting between Numpy/OpenCV and maix.image.Image Formats

+

You can convert maix.image.Image object to a numpy array, which can then be used by libraries such as numpy and opencv:

+ +
from maix import image, time, display, app
+
+disp = display.Display()
+
+while not app.need_exit():
+    img = image.Image(320, 240, image.Format.FMT_RGB888)
+    img.draw_rect(0, 0, 100, 100, image.COLOR_RED, thickness=-1)
+    t = time.ticks_ms()
+    img_bgr = image.image2cv(img, ensure_bgr=True, copy=True)
+    img2   = image.cv2image(img_bgr, bgr=True, copy=True)
+    print("time:", time.ticks_ms() - t)
+    print(type(img_bgr), img_bgr.shape)
+    print(type(img2), img2)
+    print("")
+    disp.show(img2)
+
+

The previous program is slower because each conversion involves a memory copy. Below is an optimized version for better performance. However, it is not recommended to use this unless you are aiming for extreme speed, as it is prone to errors:

+ +
from maix import image, time, display, app
+
+disp = display.Display()
+
+while not app.need_exit():
+    img = image.Image(320, 240, image.Format.FMT_RGB888)
+    img.draw_rect(0, 0, 100, 100, image.COLOR_RED, thickness=-1)
+
+    t = time.ticks_ms()
+    img_rgb = image.image2cv(img, ensure_bgr=False, copy=False)
+    img2 = image.cv2image(img_rgb, bgr=False, copy=False)
+    print("time:", time.ticks_ms() - t)
+    print(type(img_rgb), img_rgb.shape)
+    print(type(img2), img2)
+
+    disp.show(img2)
+
+
    +
  • In img_rgb = image.image2cv(img, ensure_bgr=False, copy=False), img_rgb directly uses the data from img without creating a memory copy. Note that the obtained img_rgb is an RGB image. Since OpenCV APIs assume the image is BGR, you need to be careful when using OpenCV APIs to process the image. If you are not sure, set ensure_bgr to True.
  • +
  • In img2 = image.cv2image(img_rgb, bgr=False, copy=False), setting copy to False means img2 directly uses the memory of img_rgb without creating a new memory copy, resulting in faster performance. However, be cautious because img_rgb must not be destroyed before img2 finishes using it; otherwise, the program will crash.
  • +
  • Note that since memory is borrowed, modifying the converted image will also affect the original image.
  • +
+

Load an Image

+ +
import cv2
+
+file_path = "/maixapp/share/icon/detector.png"
+img = cv2.imread(file_path)
+print(img)
+
+

Since the cv2 module is quite large, import cv2 may take some time.

+

Display Image on Screen

+

To display an image on the screen, convert it to a maix.image.Image object and then use display to show it:

+ +
from maix import display, image, time
+import cv2
+
+disp = display.Display()
+
+file_path = "/maixapp/share/icon/detector.png"
+img = cv2.imread(file_path)
+
+img_show = image.cv2image(img)
+disp.show(img_show)
+
+while not app.need_exit():
+    time.sleep(1)
+
+

Use OpenCV Functions

+

For example, edge detection:

+

Based on the code above, use the cv2.Canny function:

+ +
from maix import image, display, app, time
+import cv2
+
+file_path = "/maixapp/share/icon/detector.png"
+img0 = cv2.imread(file_path)
+
+disp = display.Display()
+
+while not app.need_exit():
+    img = img0.copy()
+
+    # canny method
+    t = time.ticks_ms()
+    edged = cv2.Canny(img, 180, 60)
+    t2 = time.ticks_ms() - t
+
+    # show by maix.display
+    t = time.ticks_ms()
+    img_show = image.cv2image(edged)
+    print(f"edge time: {t2}ms, convert time: {time.ticks_ms() - t}ms")
+    disp.show(img_show)
+
+

Use Camera

+

On a PC, we use OpenCV's VideoCapture class to read from the camera. For MaixCAM, OpenCV does not support this directly, so we use the maix.camera module to read from the camera and then use it with OpenCV.

+

Convert a maix.image.Image object to a numpy.ndarray object using the image.image2cv function:

+ +
from maix import image, display, app, time, camera
+import cv2
+
+disp = display.Display()
+cam = camera.Camera(320, 240, image.Format.FMT_BGR888)
+
+while not app.need_exit():
+    img = cam.read()
+
+    # convert maix.image.Image object to numpy.ndarray object
+    t = time.ticks_ms()
+    img = image.image2cv(img, ensure_bgr=False, copy=False)
+    print("time: ", time.ticks_ms() - t)
+
+    # canny method
+    edged = cv2.Canny(img, 180, 60)
+
+    # show by maix.display
+    img_show = image.cv2image(edged, bgr=True, copy=False)
+    disp.show(img_show)
+
+

Read USB camera

+

First, in the development board settings, select USB Mode under USB Settings and set it to HOST mode. If there is no screen available, you can use the examples/tools/maixcam_switch_usb_mode.py script to set it.

+ +
from maix import image, display, app
+import cv2
+import sys
+
+cap = cv2.VideoCapture(0)
+cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640)
+cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 480)
+# cap.set(cv2.CAP_PROP_CONVERT_RGB, 0)
+
+disp = display.Display()
+
+if not cap.isOpened():
+    print("无法打开摄像头")
+    sys.exit(1)
+print("开始读取")
+while not app.need_exit():
+    ret, frame = cap.read()
+    if not ret:
+        print("无法读取帧")
+        break
+    img = image.cv2image(frame, bgr=True, copy=False)
+    disp.show(img)
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/qrcode.html b/maixpy/doc/en/vision/qrcode.html new file mode 100644 index 00000000..4cc18088 --- /dev/null +++ b/maixpy/doc/en/vision/qrcode.html @@ -0,0 +1,495 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy QR Code Recognition - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy QR Code Recognition

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + Update history +
+ + + + + + + + + + + + + + + + + + + +
DateVersionAuthorUpdate content
2024-04-031.0.0lxowalle + + Initial document + +
+
+
+ +
+
+ +

Before reading this article, make sure you are familiar with how to develop with MaixCAM. For details, please read Quick Start.

+

Introduction

+

This article explains how to use MaixPy for QR code recognition.

+

Using MaixPy to Recognize QR Codes

+

MaixPy's maix.image.Image includes the find_qrcodes method for QR code recognition.

+

How to Recognize QR Codes

+

A simple example that recognizes QR codes and draws a bounding box:

+ +
from maix import image, camera, display
+
+cam = camera.Camera(320, 240)
+disp = display.Display()
+
+while True:
+    img = cam.read()
+    qrcodes = img.find_qrcodes()
+    for qr in qrcodes:
+        corners = qr.corners()
+        for i in range(4):
+            img.draw_line(corners[i][0], corners[i][1], corners[(i + 1) % 4][0], corners[(i + 1) % 4][1], image.COLOR_RED)
+        img.draw_string(qr.x(), qr.y() - 15, qr.payload(), image.COLOR_RED)
+    disp.show(img)
+
+

Steps:

+
    +
  1. Import the image, camera, and display modules:

    + +
    from maix import image, camera, display
    +
    +
  2. +
  3. Initialize the camera and display:

    + +
    cam = camera.Camera(320, 240)  # Initialize the camera with a resolution of 320x240 in RGB format
    +disp = display.Display()
    +
    +
  4. +
  5. Capture and display images from the camera:

    + +
    while True:
    +    img = cam.read()
    +    disp.show(img)
    +
    +
  6. +
  7. Use the find_qrcodes method to detect QR codes in the camera image:

    + +
    qrcodes = img.find_qrcodes()
    +
    +
      +
    • img is the camera image captured by cam.read(). When initialized as cam = camera.Camera(320, 240), the img object is a 320x240 resolution RGB image.
    • +
    • img.find_qrcodes searches for QR codes and saves the results in qrcodes for further processing.
    • +
    +
  8. +
  9. Process and display the results of QR code recognition on the screen:

    + +
    for qr in qrcodes:
    +    corners = qr.corners()
    +    for i in range(4):
    +        img.draw_line(corners[i][0], corners[i][1], corners[(i + 1) % 4][0], corners[(i + 1) % 4][1], image.COLOR_RED)
    +    img.draw_string(qr.x(), qr.y() - 15, qr.payload(), image.COLOR_RED)
    +
    +
      +
    • qrcodes contains the results from img.find_qrcodes(). If no QR codes are found, qrcodes will be empty.
    • +
    • qr.corners() retrieves the coordinates of the four corners of the detected QR code. img.draw_line() uses these coordinates to draw the QR code outline.
    • +
    • img.draw_string displays information about the QR code content and position. qr.x() and qr.y() retrieve the x and y coordinates of the QR code's top-left corner, and qr.payload() retrieves the content of the QR code.
    • +
    +
  10. +
+

Common Parameter Explanation

+

List common parameters and their explanations. If you cannot find parameters that fit your application, consider whether to use a different algorithm or extend the functionality based on the current algorithm's results.

+ + + + + + + + + + + + + + + +
ParameterDescriptionExample
roiSets the rectangular area for the algorithm to compute, where roi=[x, y, w, h], x and y denote the top-left coordinates of the rectangle, and w and h denote the width and height of the rectangle, defaulting to the entire image.Compute the area with coordinates (50,50) and width and height of 100:
img.find_qrcodes(roi=[50, 50, 100, 100])
+

This article introduces common methods. For more API details, refer to the image section of the API documentation.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/segmentation.html b/maixpy/doc/en/vision/segmentation.html new file mode 100644 index 00000000..d7d3fb8f --- /dev/null +++ b/maixpy/doc/en/vision/segmentation.html @@ -0,0 +1,417 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Image Semantic Segmentation - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Image Semantic Segmentation

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction

+

Image semantic segmentation refers to identifying specific objects in an image and recognizing the pixels that represent the parts of those objects. For example, in the image below, the human body and the dog are identified, and their body parts are segmented. This can be used for collision detection, autonomous vehicle navigation, area measurement, and more.

+

+

Image Semantic Segmentation with MaixPy

+

MaixPy includes YOLOv8-seg and YOLO11-seg for object detection and image segmentation.

+

MaixPy provides a model for 80 object categories from the COCO dataset by default.

+
+

To use YOLOv8, MaixPy version must be >= 4.4.0
+To use YOLO11, MaixPy version must be >= 4.7.0

+
+

The following code demonstrates the usage, and you can also find it in MaixPy examples.

+ +
from maix import camera, display, image, nn, app, time
+
+detector = nn.YOLOv8(model="/root/models/yolov8n_seg.mud", dual_buff=True)
+# detector = nn.YOLO11(model="/root/models/yolo11n_seg.mud", dual_buff=True)
+
+cam = camera.Camera(detector.input_width(), detector.input_height(), detector.input_format())
+dis = display.Display()
+
+while not app.need_exit():
+    img = cam.read()
+    objs = detector.detect(img, conf_th=0.5, iou_th=0.45)
+    for obj in objs:
+        # img.draw_image(obj.x, obj.y, obj.seg_mask)
+        detector.draw_seg_mask(img, obj.x, obj.y, obj.seg_mask, threshold=127)
+        img.draw_rect(obj.x, obj.y, obj.w, obj.h, color=image.COLOR_RED)
+        msg = f'{detector.labels[obj.class_id]}: {obj.score:.2f}'
+        img.draw_string(obj.x, obj.y, msg, color=image.COLOR_RED)
+    dis.show(img)
+
+
+

To switch between YOLOv8 and YOLO11, just modify the commented part of the above code.

+
+

Models with More Resolutions

+

The default model resolution is 320x224. For models with different resolutions, download them from the MaixHub model library:

+ +

dual_buff for Double Buffering Acceleration

+

You may notice that dual_buff is used for model initialization (default value is True). Enabling the dual_buff parameter can improve efficiency and increase the frame rate. For more details and considerations, refer to the dual_buff Introduction.

+

Customizing Your Own Object Segmentation Model

+

The provided models are based on the 80 categories from the COCO dataset. If this does not meet your needs, you can train your own specific object detection and segmentation model. Follow the instructions in Offline Training YOLOv8/YOLO11 to use the official YOLOv8/YOLO11 model training method, and then convert it to a model format supported by MaixCAM.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/self_learn_classifier.html b/maixpy/doc/en/vision/self_learn_classifier.html new file mode 100644 index 00000000..02aaf74e --- /dev/null +++ b/maixpy/doc/en/vision/self_learn_classifier.html @@ -0,0 +1,432 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Self-Learning Classifier - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Self-Learning Classifier

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction to MaixPy Self-Learning Classifier

+

Usually, to recognize new categories, we need to collect a dataset on a computer and retrain the model, which is a cumbersome and difficult process. Here, we provide a method that allows for instant learning of new objects directly on the device without the need for computer-side training, suitable for less complex scenarios.

+

For example, if there is a bottle and a phone in front of you, you can use the device to take a picture of each as the basis for two classifications. Then, you collect a few more pictures of them from different angles, extract their features and save them. During recognition, the feature values of the image are compared with the saved feature values, and the classification that is more similar to the saved features is considered the corresponding classification.

+

Using the Self-Learning Classifier in MaixPy

+

The default image comes with the Self-Learning Classification APP, which you can use directly to get familiar with the process.

+

+

Steps:

+
    +
  • Click the + Class button to collect n classification (class) images. The object needs to be within the white frame on the screen while collecting the images.
  • +
  • Click the + Sample button to collect m sample images. Collect some images for each classification. The order does not matter, and the number is flexible. It's best to take pictures from different angles, but not too different.
  • +
  • Click the Learn button to start learning. The device will automatically classify and learn based on the collected classification and sample images, obtaining the characteristics of the classifications.
  • +
  • Align the object with the center of the screen, recognize the image, and output the result. The screen will show the classification it belongs to and the similarity distance to this classification. The closer the similarity distance, the more similar it is.
  • +
  • The feature values ​​learned by this APP will be saved to /root/my_classes.bin, so the last one will be automatically loaded after exiting the application or restarting it.
  • +
+

Simplified version of the code, for the complete version, please refer to the examples for the full code.

+ +
from maix import nn, image
+
+classifier = nn.SelfLearnClassifier(model="/root/models/mobilenetv2.mud", dual_buff = True)
+
+img1 = image.load("/root/1.jpg")
+img2 = image.load("/root/2.jpg")
+img3 = image.load("/root/3.jpg")
+sample_1 = image.load("/root/sample_1.jpg")
+sample_2 = image.load("/root/sample_2.jpg")
+sample_3 = image.load("/root/sample_3.jpg")
+sample_4 = image.load("/root/sample_4.jpg")
+sample_5 = image.load("/root/sample_5.jpg")
+sample_6 = image.load("/root/sample_6.jpg")
+
+classifier.add_class(img1)
+classifier.add_class(img2)
+classifier.add_class(img3)
+classifier.add_sample(sample_1)
+classifier.add_sample(sample_2)
+classifier.add_sample(sample_3)
+classifier.add_sample(sample_4)
+classifier.add_sample(sample_5)
+classifier.add_sample(sample_6)
+
+classifier.learn()
+
+img = image.load("/root/test.jpg")
+max_idx, max_score = classifier.classify(img)
+print(max_idx, max_score)
+
+

Storing and Loading Learned Feature Values

+

Use the save function to store the learned feature values. This will generate a binary file containing the feature values of the objects. When you need to use it again, simply use the load function to load the feature values.

+ +
classifier.save("/root/my_classes.bin")
+classifier.load("/root/my_classes.bin")
+
+

If you have named each classification and stored them in the labels variable, you can also use:

+ +
classifier.save("/root/my_classes.bin", labels=labels)
+labels = classifier.load("/root/my_classes.bin")
+
+

dual_buff Dual Buffer Acceleration

+

You may have noticed that the model initialization uses dual_buff (which defaults to True). Enabling the dual_buff parameter can improve running efficiency and increase the frame rate. For detailed principles and usage notes, see dual_buff Introduction.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/self_learn_detector.html b/maixpy/doc/en/vision/self_learn_detector.html new file mode 100644 index 00000000..c8d5115e --- /dev/null +++ b/maixpy/doc/en/vision/self_learn_detector.html @@ -0,0 +1,393 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Self-Learning Detection Tracker - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Self-Learning Detection Tracker

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

MaixPy Self-Learning Detection Tracker

+

Similar to the self-learning classifier, this tracker doesn't require training. You can simply select the target object by drawing a box around it, and the system will detect and track the object, making it quite useful in simple detection scenarios. Unlike the self-learning classifier, the detection tracker provides the coordinates and size of the object.

+

+

Using the Self-Learning Detection Tracker in MaixPy

+

MaixPy currently offers a single-target learning detection tracking algorithm. Once you select the target object, the tracker will continuously follow it. The algorithm used here is NanoTrack, which you can explore if you're interested in learning more about the underlying principles.

+

You can directly use the built-in self-learning tracking application after flashing the latest system image (>=2024.9.5_v4.5.0) to see the results.

+

To use it, call the maix.nn.NanoTrack class. After initializing the object, call the init method to specify the target to be detected, then call the track method to continuously track the target. Below is a simplified code example:

+ +
from maix import nn
+
+model_path = "/root/models/nanotrack.mud"
+tracker = nn.NanoTrack(model_path)
+tracker.init(img, x, y, w, h)
+pos = tracker.track(img)
+
+

Note that this uses a built-in model located in the system at /root/models. You can also download the model from the MaixHub model library.

+

For more detailed code, refer to MaixPy/examples/vision/ai_vision/nn_self_learn_tracker.py.

+

Other Self-Learning Tracking Algorithms

+

Currently, the NanoTrack algorithm is implemented, which is highly stable and reliable in simple scenarios and provides a sufficient frame rate. However, its limitations include the need for the object to return near the last disappearance point to be detected again if it goes out of view, and the fact that it can only detect one target at a time.

+

If you have better algorithms, you can refer to the existing NanoTrack implementation for guidance. Feel free to discuss or submit code PRs.

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/touchscreen.html b/maixpy/doc/en/vision/touchscreen.html new file mode 100644 index 00000000..2ca5d11c --- /dev/null +++ b/maixpy/doc/en/vision/touchscreen.html @@ -0,0 +1,462 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy / MaixCAM Touchscreen Usage Guide - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixPy / MaixCAM Touchscreen Usage Guide

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Introduction

+

MaixCAM comes equipped with a touchscreen, which, when used in conjunction with applications, can facilitate numerous engaging functionalities. We can utilize APIs to detect touch interactions on the touchscreen.

+

Reading Touch Input with MaixPy

+

MaixPy offers a straightforward maix.touchscreen.TouchScreen class for reading touch inputs. Here's an example:

+ +
from maix import touchscreen, app, time
+
+ts = touchscreen.TouchScreen()
+
+pressed_already = False
+last_x = 0
+last_y = 0
+last_pressed = False
+while not app.need_exit():
+    x, y, pressed = ts.read()
+    if x != last_x or y != last_y or pressed != last_pressed:
+        print(x, y, pressed)
+        last_x = x
+        last_y = y
+        last_pressed = pressed
+    if pressed:
+        pressed_already = True
+    else:
+        if pressed_already:
+            print(f"clicked, x: {x}, y: {y}")
+            pressed_already = False
+    time.sleep_ms(1)  # sleep some time to free some CPU usage
+
+

Interactivity with the Screen

+

Integrating the screen can enable various interactive user experiences. More examples can be found in the MaixPy/examples/vision/touchscreen directory.

+

As previously described, to display content on the screen, typically, a maix.image.Image object is created and displayed using disp.show(img). Implementing a button is as simple as drawing one on the image and then detecting touches within its area, ensuring that the image's dimensions match those of the screen:

+ +
from maix import touchscreen, app, time, display, image
+
+ts = touchscreen.TouchScreen()
+disp = display.Display()
+
+img = image.Image(disp.width(), disp.height())
+
+# draw exit button
+exit_label = "< Exit"
+size = image.string_size(exit_label)
+exit_btn_pos = [0, 0, 8*2 + size.width(), 12 * 2 + size.height()]
+img.draw_string(8, 12, exit_label, image.COLOR_WHITE)
+img.draw_rect(exit_btn_pos[0], exit_btn_pos[1], exit_btn_pos[2], exit_btn_pos[3], image.COLOR_WHITE, 2)
+
+def is_in_button(x, y, btn_pos):
+    return x > btn_pos[0] and x < btn_pos[0] + btn_pos[2] and y > btn_pos[1] and y < btn_pos[1] + btn_pos[3]
+
+while not app.need_exit():
+    x, y, pressed = ts.read()
+    if is_in_button(x, y, exit_btn_pos):
+        app.set_exit_flag(True)
+    img.draw_circle(x, y, 1, image.Color.from_rgb(255, 255, 255), 2)
+    disp.show(img)
+
+

Handling Different Screen and Image Sizes

+

In the example above, the img matches the screen size. If your img and screen sizes differ (e.g., using img = image.Image(240, 240) on a 640x480 screen), the default behavior of disp.show(img) is image.Fit.FIT_CONTAIN, which scales the image to 480x480 and fills the sides with black. If a button is drawn on the 240x240 image, such as at coordinates (0, 0, 60, 40), the button will also be scaled up. Thus, the coordinates for touch detection should be adjusted to ((640 - 480) / 2, 0, 480/240*60, 480/240*40), which translates to (80, 0, 120, 80).

+

For convenience in scaling images and quickly calculating the positions and sizes of points or rectangles in the scaled image, the image.resize_map_pos function is provided:

+ +
from maix import touchscreen, app, time, display, image
+
+ts = touchscreen.TouchScreen()
+disp = display.Display()
+
+img = image.Image(240, 240)
+img.draw_rect(0, 0, img.width(), img.height(), image.COLOR_WHITE)
+
+# draw exit button
+exit_label = "< Exit"
+size = image.string_size(exit_label)
+exit_btn_pos = [0, 0, 8*2 + size.width(), 12 * 2 + size.height()]
+img.draw_string(8, 12, exit_label, image.COLOR_WHITE)
+img.draw_rect(exit_btn_pos[0], exit_btn_pos[1], exit_btn_pos[2], exit_btn_pos[3],  image.COLOR_WHITE, 2)
+# 图像按键坐标映射到屏幕上的坐标
+exit_btn_disp_pos = image.resize_map_pos(img.width(), img.height(), disp.width(), disp.height(), image.Fit.FIT_CONTAIN, exit_btn_pos[0], exit_btn_pos[1], exit_btn_pos[2], exit_btn_pos[3])
+
+def is_in_button(x, y, btn_pos):
+    return x > btn_pos[0] and x < btn_pos[0] + btn_pos[2] and y > btn_pos[1] and y < btn_pos[1] + btn_pos[3]
+
+while not app.need_exit():
+    x, y, pressed = ts.read()
+    if is_in_button(x, y, exit_btn_disp_pos):
+        app.set_exit_flag(True)
+    # 屏幕的坐标映射回图像上对应的坐标,然后在图像上画点
+    x, y = image.resize_map_pos_reverse(img.width(), img.height(), disp.width(), disp.height(), image.Fit.FIT_CONTAIN, x, y)
+    img.draw_circle(x, y, 1, image.Color.from_rgb(255, 255, 255), 2)
+    disp.show(img, fit=image.Fit.FIT_CONTAIN)
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/en/vision/yolov5.html b/maixpy/doc/en/vision/yolov5.html new file mode 100644 index 00000000..091854e0 --- /dev/null +++ b/maixpy/doc/en/vision/yolov5.html @@ -0,0 +1,512 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy MaixCAM Using YOLOv5 / YOLOv8 / YOLO11 for Object Detection - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixPy MaixCAM Using YOLOv5 / YOLOv8 / YOLO11 for Object Detection

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Object Detection Concept

+

Object detection refers to detecting the position and category of objects in images or videos, such as identifying apples or airplanes in a picture and marking their locations.

+

Unlike classification, object detection includes positional information. Therefore, the result of object detection is generally a rectangular box that marks the location of the object.

+

Object Detection in MaixPy

+

MaixPy provides YOLOv5, YOLOv8, and YOLO11 models by default, which can be used directly:

+
+

YOLOv8 requires MaixPy >= 4.3.0.
+YOLO11 requires MaixPy >= 4.7.0.

+
+ +
from maix import camera, display, image, nn, app
+
+detector = nn.YOLOv5(model="/root/models/yolov5s.mud", dual_buff=True)
+# detector = nn.YOLOv8(model="/root/models/yolov8n.mud", dual_buff=True)
+# detector = nn.YOLO11(model="/root/models/yolo11n.mud", dual_buff=True)
+
+cam = camera.Camera(detector.input_width(), detector.input_height(), detector.input_format())
+dis = display.Display()
+
+while not app.need_exit():
+    img = cam.read()
+    objs = detector.detect(img, conf_th=0.5, iou_th=0.45)
+    for obj in objs:
+        img.draw_rect(obj.x, obj.y, obj.w, obj.h, color=image.COLOR_RED)
+        msg = f'{detector.labels[obj.class_id]}: {obj.score:.2f}'
+        img.draw_string(obj.x, obj.y, msg, color=image.COLOR_RED)
+    dis.show(img)
+
+

Example video:

+
+
+

Here, the camera captures an image, passes it to the detector for detection, and then displays the results (classification name and location) on the screen.

+

You can switch between YOLO11, YOLOv5, and YOLOv8 simply by replacing the corresponding line and modifying the model file path.

+

For the list of 80 objects supported by the model, see the appendix of this document.

+

For more API usage, refer to the documentation for the maix.nn module.

+

dual_buff for Double Buffering Acceleration

+

You may notice that the model initialization uses dual_buff (default value is True). Enabling the dual_buff parameter can improve efficiency and increase the frame rate. For more details and usage considerations, see the dual_buff Introduction.

+

More Input Resolutions

+

The default model input resolution is 320x224, which closely matches the aspect ratio of the default screen. You can also download other model resolutions:

+

YOLOv5: https://maixhub.com/model/zoo/365
+YOLOv8: https://maixhub.com/model/zoo/400
+YOLO11: https://maixhub.com/model/zoo/453

+

Higher resolutions provide more accuracy, but take longer to process. Choose the appropriate resolution based on your application.

+

Which Model to Use: YOLOv5, YOLOv8, or YOLO11?

+

We provide three models: YOLOv5s, YOLOv8n, and YOLO11n. The YOLOv5s model is larger, while YOLOv8n and YOLO11n are slightly faster. According to official data, the accuracy is YOLO11n > YOLOv8n > YOLOv5s. You can test them to decide which works best for your situation.

+

Additionally, you may try YOLOv8s or YOLO11s, which will have a lower frame rate (e.g., yolov8s_320x224 is 10ms slower than yolov8n_320x224), but offer higher accuracy. You can download these models from the model library mentioned above or export them yourself from the official YOLO repository.

+

Different Resolutions for Camera and Model

+

If the resolution of img is different from the model's resolution when using the detector.detect(img) function, the function will automatically call img.resize to adjust the image to the model's input resolution. The default resize method is image.Fit.FIT_CONTAIN, which scales while maintaining the aspect ratio and fills the surrounding areas with black. The detected coordinates will also be automatically mapped back to the original img.

+

Training Your Own Object Detection Model on MaixHub

+

If you need to detect specific objects beyond the 80 categories provided, visit MaixHub to learn and train an object detection model. Select "Object Detection Model" when creating a project. Refer to the MaixHub Online Training Documentation.

+

Alternatively, you can find models shared by community members at the MaixHub Model Library.

+

Training Your Own Object Detection Model Offline

+

We strongly recommend starting with MaixHub for online training, as the offline method is much more difficult and is not suitable for beginners. Some knowledge may not be explicitly covered here, so be prepared to do further research.

+

Refer to Training a Custom YOLOv5 Model or Training a Custom YOLOv8/YOLO11 Model Offline.

+

Appendix: 80 Classes

+

The 80 objects in the COCO dataset are:

+ +
person
+bicycle
+car
+motorcycle
+airplane
+bus
+train
+truck
+boat
+traffic light
+fire hydrant
+stop sign
+parking meter
+bench
+bird
+cat
+dog
+horse
+sheep
+cow
+elephant
+bear
+zebra
+giraffe
+backpack
+umbrella
+handbag
+tie
+suitcase
+frisbee
+skis
+snowboard
+sports ball
+kite
+baseball bat
+baseball glove
+skateboard
+surfboard
+tennis racket
+bottle
+wine glass
+cup
+fork
+knife
+spoon
+bowl
+banana
+apple
+sandwich
+orange
+broccoli
+carrot
+hot dog
+pizza
+donut
+cake
+chair
+couch
+potted plant
+bed
+dining table
+toilet
+tv
+laptop
+mouse
+remote
+keyboard
+cell phone
+microwave
+oven
+toaster
+sink
+refrigerator
+book
+clock
+vase
+scissors
+teddy bear
+hair dryer
+toothbrush
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/README_no_screen.html b/maixpy/doc/zh/README_no_screen.html new file mode 100644 index 00000000..bdfdef90 --- /dev/null +++ b/maixpy/doc/zh/README_no_screen.html @@ -0,0 +1,502 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 无屏幕版快速开始 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 无屏幕版快速开始

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

关于本页文档

+

正如快速开始所述,开发时强烈推荐购买带屏幕版本,会有更好的开发体验,包括使用内置的 APP,以及使用 MaixHub 应用商店的 APP,以及方便调试(比如常用设置可以直接触摸点击界面完成,可以实时在屏幕看到图像等)。

+

当然,如果你实在没有条件购买带屏幕的,或者你在量产时需要无屏幕的版本,请看本文。

+

获得 MaixCAM 设备

+ +

上手配置

+

准备 TF 镜像卡和插入到设备

+

如果你买的套餐里面有 TF 卡,里面已经有出厂镜像了,如果出厂时 TF 卡没有安装到设备,需要先小心打开外壳(注意里面有排线连接不要扯断了),然后插入 TF 卡。另外因为出厂的固件可能比较老旧,务必按照升级和烧录系统先将系统升级到最新版本,否则可能会遇到某些应用 和 API 无法使用的问题。

+

如果没买 TF 卡,则需要将系统烧录进自备的 TF 卡中,烧录方法请看升级和烧录系统,然后再安装到板子。

+

上电开机

+

使用 Type-C 数据线连接 MaixCAM 设备给设备供电,等待设备开机。

+

首先:保证 USB 线材质量足够好,以及电脑 USB 端口质量够好(供电 >= 5v 500mA,抗干扰能力正常)。
+第一次等待 20 秒左右,然后电脑会出现一个或者两个虚拟网卡设备(可以在电脑的网络管理器看到)。

+

如果虚拟网卡设备:

+
    +
  • 请确认购买了配套的 TF 卡,如果确认有 TF 卡,并且已经插入到设备,可以尝试更新到最新的系统
  • +
  • 如果你没有购买 TF 卡套餐,你需要按照升级和烧录系统的方法烧录最新的系统到 TF 卡。
  • +
  • 请确认 USB 有没有松动,以及 USB 线材质量,可以换一根质量好点的线尝试。
  • +
  • 请确认 USB 口供电足够,可以换一个 USB 口,或者有条件在其它电脑试试。
  • +
+

准备连接电脑和设备

+

为了后面电脑(PC)能和 设备(MaixCAM)通信,我们要让它们在同一个局域网内,提供了两种方式,我们首先使用方法一:

+
    +
  • 方法一:有线连接, 设备通过 USB 线连接到电脑,设备会虚拟成一个 USB 网卡,这样和电脑就通过 USB 在同一局域网了,遇到问题也可以在 FAQ 中找常见问题。
  • +
+
+方法二在不同电脑系统中驱动安装方法: +

默认会有两种 USB 虚拟网卡驱动(NCM 和 RNDIS驱动),以满足不同系统的需求:

+
    +
  • Windows: windows 所有系统会自动安装 RNDIS 驱动, 仅 Win11 会自动安装 NCM 驱动,两种驱动有一个能用就行(NCM 速度比 RNDIS 速度快)。
      +
    • 打开任务管理器 -> 性能,可以看到一个虚拟的以太网,并且可以看到 ip 比如 10.131.167.100 是电脑的 ip, 设备的 ip 是最后一位改为110.131.167.1。如果是 Win11 则会看到两个虚拟网卡,随便选择一个 IP 使用即可。
    • +
    • 另外也可以打开电脑的 设备管理器(搜索栏搜索设备管理器), RNDIS 和 NCM 驱动被正确安装的效果,一个能用就行
      +RNDIS ok NCM ok
    • +
    +
  • +
  • Linux: 无需额外设置,插上 USB 线即可。 使用 ifconfig 或者 ip addr 查看到 usb0usb1 网卡,两个 IP 都可以使用,注意 这里看到的 ip 比如 10.131.167.100 是电脑的 ip, 设备的 ip 是最后一位改为110.131.167.1
  • +
  • MacOS: 在系统设置->网络里面查看到 usb 网卡,注意 这里看到的 ip 比如 10.131.167.100 是电脑的 ip, 设备的 ip 是最后一位改为110.131.167.1
  • +
+
+
+
    +
  • 方法二:无线连接, 设备使用 WiFi 连接到电脑连接的同一个路由器或者 WiFi 热点下(WiFi 如果出现画面卡顿或者延迟高的问题可以使用有线连接。),连接无线热点方式有两种:
      +
    • 修改 TF 的 boot 分区中的 wifi.ssidwifi.pass 文件,重启即可连接。修改方法:
        +
      • 如果你已经了解 SSH, 可以通过 ssh 连接到设备(如果有线连接可用)修改/boot目录下文件。
      • +
      • 也可以按照前面升级系统的方式进入升级模式后电脑会出现一个 U 盘,然后修改里面的文件即可,注意修改完要先 弹出U盘 再重启。
      • +
      • 也可以直接用 读卡器,电脑会出现一个U盘,修改其中的wifi.ssidwifi.pass文件即可,注意修改完要先 弹出U盘 再重启。
      • +
      +
    • +
    • 如果你有线已经可以使用,按照下一步已经可以使用 MaixVision 运行代码了,可以修改例程 tools/wifi_connect.py 中的 SSID 和 PASSWORD 然后运行即可。
    • +
    +
  • +
+

开发环境准备

+
    +
  • 首先保证上一步电脑和设备已经在同一个局域网中了。
  • +
  • 下载 MaixVision 并安装。
  • +
  • 使用 Type-C 连接设备和电脑,打开 MaixVision,点击左下角的“连接”按钮,会自动搜索设备,稍等一下就能看到设备,点击设备有点的连接按钮以连接设备。
  • +
+

如果没有扫描到设备, 也可以在 FAQ 中找到解决方法。

+

这里有 MaixVision 的使用示例视频:

+

+

联网

+

首次运行需要连接网络,以激活设备安装运行库。
+如果没有路由器可以用手机开一个热点。

+

MaixVision 修改例程 tools/wifi_connect.py 中的 SSID 和 PASSWORD 然后运行即可。其它连接 WiFi 的方法看前面的介绍。

+

升级运行库

+

这一步很重要 !!! 这一步如果不做好,其它应用和功能可能无法运行(比如闪退等)。

+
    +
  • 首先保证上一步连接 WiFi 已经完成,并且获取到 IP 地址能访问公网。
  • +
  • 运行 MaixVision 例程里面的 tools/install_runtime.py 来安装最新的运行库。
  • +
+

如果显示Request failed 或者请求失败,请先检查网络是否已经连接,需要能连接到互联网,如果还不行,请拍照联系客服处理即可。

+

运行例程

+

点击 MaixVision 左侧的示例代码,选择一个例程,点击左下角运行按钮将代码发送到设备上运行。

+

比如:

+
    +
  • hello_maix.py,点击运行按钮,就能看到 MaixVision 终端有来自设备打印的消息,以及右上角出现了图像。
  • +
  • camera_display.py,这个例程会打开摄像头并在屏幕上显示摄像头的画面。
  • +
+ +
from maix import camera, display, app
+
+disp = display.Display()          # 构造一个显示对象,并初始化屏幕
+cam = camera.Camera(640, 480)     # 构造一个摄像头对象,手动设置了分辨率为 640x480, 并初始化摄像头
+while not app.need_exit():        # 一直循环,直到程序退出(可以通过按下设备的功能按键退出或者 MaixVision 点击停止按钮退出)
+    img = cam.read()              # 读取摄像头画面保存到 img 变量,可以通过 print(img) 来打印 img 的详情
+    disp.show(img)                # 将 img 显示到屏幕上
+
+ +

其它例程可以自行尝试。

+
+

如果你使用相机例程遇到了图像显示卡顿,可能是网络不通畅,或者 USB 线质量或者主机 USB 质量太差造成,可以更换连接方式或者更换线缆、主机 USB 口或者电脑等。

+
+

安装应用到设备

+

上面是在设备中运行代码,MaixVision 断开后代码就会停止运行,如果想让代码出现在开机菜单中,可以打包成应用安装到设备上。

+

点击 MaixVision 左下侧的安装应用按钮,填写应用信息,会将应用安装到设备上,然后在设备上就能看到应用了。
+也可以选择打包应用,将你的应用分享到MaixHub 应用商店

+
+

默认例程没有显式编写退出功能,进入应用后按下设备的功能按键即可退出应用。(对于 MaixCAM 是 user 键)

+
+

如果想让程序开机自启动,可以修改并运行例程tools/set_autostart.py即可。

+

下一步

+

看到这里,如果你觉得不错,请务必来 github 给 MaixPy 开源项目点一个 star(需要先登录 github), 你的 star 和认同是我们不断维护和添加新功能的动力!

+

到这里你已经体验了一遍使用和开发流程了,接下来可以学习 MaixPy 语法和功能相关的内容,请按照左边的目录进行学习,如果遇到 API 使用问题,可以在API 文档中查找。

+

学习前最好带着自己学习的目的学,比如做一个有趣的小项目,这样学习效果会更好,项目和经验都可以分享到MaixHub 分享广场,会获得现金奖励哦!

+

常见问题 FAQ

+

遇到问题可以优先在 FAQ 里面找,找不到再在下面的论坛或者群询问,或者在 MaixPy issue 提交源码问题。

+

分享交流

+
    +
  • MaixHub 项目和经验分享 :分享你的项目和经验,获得现金打赏,获得官方打赏的基本要求:
      +
    • 可复现型:较为完整的项目制作复现过程。
    • +
    • 炫耀型:无详细的项目复现过程,但是项目展示效果吸引人。
    • +
    • Bug 解决经验型:解决了某个难题的过程和具体解决方法分享。
    • +
    +
  • +
  • MaixPy 官方论坛(提问和交流)
  • +
  • QQ 群: (建议在 QQ 群提问前先发个帖,方便群友快速了解你需要了什么问题,复现过程是怎样的)
      +
    • MaixPy (v4) AI 视觉交流大群: 862340358
    • +
    +
  • +
  • Telegram: MaixPy
  • +
  • MaixPy 源码问题: MaixPy issue
  • +
  • 商业合作或批量购买请联系 support@sipeed.com 。
  • +
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/ai_model_converter/maixcam.html b/maixpy/doc/zh/ai_model_converter/maixcam.html new file mode 100644 index 00000000..ffac741b --- /dev/null +++ b/maixpy/doc/zh/ai_model_converter/maixcam.html @@ -0,0 +1,559 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 将 ONNX 模型转换为 MaixCAM MaixPy 可以使用的模型(MUD) - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

将 ONNX 模型转换为 MaixCAM MaixPy 可以使用的模型(MUD)

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

简介

+

电脑上训练的模型不能直接给 MaixCAM 使用,因为 MaixCAM 的硬件性能有限,一般我们需要将模型进行INT8量化以减少计算量,并且转换为 MaixCAM 支持的模型格式。

+

本文介绍如何将 ONNX 模型转换为 MaixCAM 能使用的模型(MUD模型)。

+

MaixCAM 支持的模型文件格式

+

MUD(模型统一描述文件, model universal description file)是 MaixPy 支持的一种模型描述文件,用来统一不同平台的模型文件,方便 MaixPy 代码跨平台,本身是一个 ini格式的文本文件,可以使用文本编辑器编辑。
+一般 MUD 文件会伴随一个或者多个实际的模型文件,比如对于 MaixCAM, 实际的模型文件是.cvimodel格式, MUD 文件则是对它做了一些描述说明。

+

这里以 YOLOv8 模型文件举例,一共两个文件yolov8n.mudyolov8n.cvimodel,前者内容:

+ +
[basic]
+type = cvimodel
+model = yolov8n.cvimodel
+
+[extra]
+model_type = yolov8
+input_type = rgb
+mean = 0, 0, 0
+scale = 0.00392156862745098, 0.00392156862745098, 0.00392156862745098
+labels = person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign, parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket, bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut, cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair drier, toothbrush
+
+

可以看到, 指定了模型类别为cvimodel, 模型路径为相对mud文件的路径下的yolov8n.cvimodel文件;
+以及一些需要用到的信息,比如预处理meanscale,这里需要和训练的时候对模型输入的数据的预处理方法一致,labels则是检测对象的 80 种分类。

+

实际用这个模型的时候将两个文件放在同一个目录下即可。

+

准备 ONNX 模型

+

准备好你的 onnx 模型, 然后在https://netron.app/ 查看你的模型,确保你的模型使用的算子在转换工具的支持列表中,转换工具的支持列表可以在算能 TPU SDKCVITEK_TPU_SDK开发指南.pdf 中看到列表。

+

找出合适的量化输出节点

+

一般模型都有后处理节点,这部分是 CPU 进行运算的,我们将它们剥离出来,它们会影响到量化效果,可能会导致量化失败。

+

这里以YOLOv5 举例

+

+

可以看到这里有三个conv,后面的计算均由 CPU 进行,我们量化时就采取这几个conv的输出作为模型的最后输出,在这里输出名分别叫/model.24/m.0/Conv_output_0,/model.24/m.1/Conv_output_0,/model.24/m.2/Conv_output_0

+

安装模型转换环境

+

模型转换使用算能的https://github.com/sophgo/tpu-mlir,要安装它我们直接在 docker 环境中安装,防止我们电脑的环境不匹配,如果你没用过 docker,可以简单理解成它类似虚拟机。

+

安装 docker

+

参考docker 安装官方文档安装即可。

+

比如:

+ +
# 安装docker依赖的基础软件
+sudo apt-get update
+sudo apt-get install apt-transport-https ca-certificates curl gnupg-agent software-properties-common
+# 添加官方来源
+curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
+sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
+# 安装 docker
+sudo apt-get update
+sudo apt-get install docker-ce docker-ce-cli containerd.io
+
+

拉取 docker 镜像

+ +
docker pull sophgo/tpuc_dev:latest
+
+

如果docker拉取失败,可以通过以下方式进行下载:

+ +
wget https://sophon-file.sophon.cn/sophon-prod-s3/drive/24/06/14/12/sophgo-tpuc_dev-v3.2_191a433358ad.tar.gz
+docker load -i sophgo-tpuc_dev-v3.2_191a433358ad.tar.gz
+
+

这个方法参考tpu-mlir官方docker环境配置

+

此外你也可以设置国内的镜像,可自行搜索或者参考docker 设置代理,以及国内加速镜像设置

+

运行容器

+ +
docker run --privileged --name tpu-env -v /home/$USER/data:/home/$USER/data -it sophgo/tpuc_dev
+
+

这就起了一个容器,名叫tpu-env,并且把本机的~/data目录挂载到了容器的~/data,这样就实现了文件共享,并且和宿主机路径一致。

+

下次启动容器用docker start tpu-env && docker attach tpu-env即可。

+

安装 tpu-mlir

+

先到github下载 whl 文件,放到~/data目录下。
+在容器中执行命令安装:

+ +
pip install tpu_mlir*.whl # 这里就是下载文件的名字
+
+

执行model_transform.py 会有打印帮助信息就算是安装成功了。

+

编写转换脚本

+

转换模型主要就两个命令,model_transform.pymodel_deploy.py,主要麻烦的是参数,所以我们写一个脚本convert_yolov5_to_cvimodel.sh存下来方便修改。

+ +
#!/bin/bash
+
+set -e
+
+net_name=yolov5s
+input_w=640
+input_h=640
+
+# mean: 0, 0, 0
+# std: 255, 255, 255
+
+# mean
+# 1/std
+
+# mean: 0, 0, 0
+# scale: 0.00392156862745098, 0.00392156862745098, 0.00392156862745098
+
+mkdir -p workspace
+cd workspace
+
+# convert to mlir
+model_transform.py \
+--model_name ${net_name} \
+--model_def ../${net_name}.onnx \
+--input_shapes [[1,3,${input_h},${input_w}]] \
+--mean "0,0,0" \
+--scale "0.00392156862745098,0.00392156862745098,0.00392156862745098" \
+--keep_aspect_ratio \
+--pixel_format rgb \
+--channel_format nchw \
+--output_names "/model.24/m.0/Conv_output_0,/model.24/m.1/Conv_output_0,/model.24/m.2/Conv_output_0" \
+--test_input ../dog.jpg \
+--test_result ${net_name}_top_outputs.npz \
+--tolerance 0.99,0.99 \
+--mlir ${net_name}.mlir
+
+# export bf16 model
+#   not use --quant_input, use float32 for easy coding
+model_deploy.py \
+--mlir ${net_name}.mlir \
+--quantize BF16 \
+--processor cv181x \
+--test_input ${net_name}_in_f32.npz \
+--test_reference ${net_name}_top_outputs.npz \
+--model ${net_name}_bf16.cvimodel
+
+echo "calibrate for int8 model"
+# export int8 model
+run_calibration.py ${net_name}.mlir \
+--dataset ../images \
+--input_num 200 \
+-o ${net_name}_cali_table
+
+echo "convert to int8 model"
+# export int8 model
+#    add --quant_input, use int8 for faster processing in maix.nn.NN.forward_image
+model_deploy.py \
+--mlir ${net_name}.mlir \
+--quantize INT8 \
+--quant_input \
+--calibration_table ${net_name}_cali_table \
+--processor cv181x \
+--test_input ${net_name}_in_f32.npz \
+--test_reference ${net_name}_top_outputs.npz \
+--tolerance 0.9,0.6 \
+--model ${net_name}_int8.cvimodel
+
+

可以看到,这里有几个比较重要的参数:

+
    +
  • output_names 就是我们前面说到的输出节点的输出名。
  • +
  • mean, scale 就是训练时使用的预处理方法,比如 YOLOv5 官方代码的预处理是把图像 RGB 3个通道分别 -mean再除以std,并且默认mean
  • +
+

0std255,即将图像的值归一,这里scale就是1/std。你的模型需要根据实际的预处理方法修改。

+
    +
  • test_input 就是转换时用来测试的图像,这里是../dog.jpg,所以实际模型转换时我们需要在此脚本所在同目录放一张dog.jpg的图,你的模型根据你的实际情况替换图像。
  • +
  • tolerance 就是量化前后允许的误差,如果转换模型时报错提示值小于设置的这个值,说明转出来的模型可能相比 onnx 模型误差较大,如果你能够容忍,可以适当调小这个阈值让模型转换通过,不过大多数时候都是因为模型结构导致的,需要优化模型,以及仔细看后处理,把能去除的后处理去除了。
  • +
  • quantize 即量化的数据类型,在 MaixCAM 上我们一般用 INT8 模型,这里我们虽然也顺便转换了一个 BF16 模型,BF16 模型的好处时精度高,不过运行速率比较慢,能转成 INT8 就推荐先用 INT8,实在不能转换的或者精度要求高速度要求不高的再考虑 BF16。
  • +
  • dataset 表示用来量化的数据集,也是放在转换脚本同目录下,比如这里是images文件夹,里面放数据即可,对于 YOLOv5 来说就是图片,从 coco 数据集中复制一部分典型场景的图片过来即可。 用--input_num 可以指定实际使用图片的数量(小于等于 images 目录下实际的图片)。
  • +
+

执行转换脚本

+

直接执行chmod +x convert_yolov5_to_cvimodelsh && ./convert_yolov5_to_cvimodel.sh 等待转换完成。

+

如果出错了,请仔细看上一步的说明,是不是参数有问题,或者输出层选择得不合理等。

+

然后就能在workspace文件夹下看到有**_int8.cvimodel 文件了。

+

编写mud文件

+

根据你的模型情况修改mud文件,对于 YOLOv5 就如下,修改成你训练的labels就好了。

+ +
[basic]
+type = cvimodel
+model = yolov5s.cvimodel
+
+[extra]
+model_type = yolov5
+input_type = rgb
+mean = 0, 0, 0
+scale = 0.00392156862745098, 0.00392156862745098, 0.00392156862745098
+anchors = 10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326
+labels = person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign, parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket, bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut, cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair drier, toothbrush
+
+

这里basic部分指定了模型文件类别和模型文件路径,是必要的参数,有了这个参数就能用MaixPy或者MaixCDK中的maix.nn.NN类来加载并运行模型了。

+

extra则根据不同模型的需求设计不同参数。
+比如这里对YOLOv5设计了这些参数,主要是 预处理、后处理、标签等参数。
+对于 MaixPy 已经支持了的模型可以直接下载其模型复制修改。
+也可以看具体的代码,比如YOLOv5 的源码,可以看到源码使用了哪些参数。

+

比如你用YOLOv5训练了检测数字0~9的模型,那么需要将labels改成0,1,2,3,4,5,6,7,8,9,其它参数如果你没改训练代码保持即可。

+

如果你需要移植 MaixPy 没有支持的模型,则可以根据模型的预处理和后处理情况定义 extra, 然后编写对应的解码类。如果你不想用C++修改 MaixPy 源码,你也可以用MaixPy 的maix.nn.NN类加载模型,然后用 forward 或者 forward_image 方法或者原始输出,在 Python 层面写后处理也可以,只是运行效率比较低不太推荐。

+

编写后处理代码

+

如上一步所说,如果是按照已经支持的模型的mud文件修改好,那直接调用MaixPy或者MaixCDK对应的代码加载即可。
+如果支持新模型,设计好 mud 文件后,你需要实际编写预处理和后处理,有两种方法:

+
    +
  • 一:MaixPy 用 maix.nn.NN加载模型,然后forward或者forward_image函数运行模型,获得输出,然后用 Python 函数编写后处理得到最终结果。
  • +
  • 二:在MaixCDK中,可以参考YOLOv5 的源码, 新增一个hpp文件,增加一个处理你的模型的类,并且修改所有函数和类的@maixpy注释,编写好了编译MaixPy项目,即可在MaixPy中调用新增的类来运行模型了。
  • +
+

支持了新模型后还可以将源码提交(Pull Request)到主MaixPy仓库中,成为MaixPy项目的一员,为社区做贡献,也可以到 MaixHub 分享 分享你新支持的模型,根据质量可以获得最少 30元 最高 2000元 的打赏!

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/audio/ai_classify.html b/maixpy/doc/zh/audio/ai_classify.html new file mode 100644 index 00000000..24259829 --- /dev/null +++ b/maixpy/doc/zh/audio/ai_classify.html @@ -0,0 +1,375 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy AI 声音分类 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy AI 声音分类

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

TODO: 待完成,如果你急需,可以先自行移植模型,或者先将声音用 FFT 处理成瀑布图,再以图片的方式进行训练 AI 分类识别。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/audio/digit.html b/maixpy/doc/zh/audio/digit.html new file mode 100644 index 00000000..b05ba0b6 --- /dev/null +++ b/maixpy/doc/zh/audio/digit.html @@ -0,0 +1,512 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 连续中文数字识别 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 连续中文数字识别

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-10-081.0.0916BGAI + + 初版文档 + +
+
+
+ +
+
+ +

简介

+

MaixCAM 移植了 Maix-Speech 离线语音库,实现了连续中文数字识别、关键词识别以及大词汇量语音识别功能。支持 PCMWAV 格式的音频识别,且可通过板载麦克风进行输入识别。

+

Maix-Speech

+

Maix-Speech 是专为嵌入式环境设计的离线语音库,其针对语音识别算法进行了深度优化,在内存占用上达到了数量级上的领先,并且保持了优良的WER。如果想了解原理可查看该开源项目。

+

连续中文数字识别

+ +
from maix import app, nn
+
+speech = nn.Speech("/root/models/am_3332_192_int8.mud")
+speech.init(nn.SpeechDevice.DEVICE_MIC, "hw:0,0")
+
+def callback(data: str, len: int):
+    print(data)
+
+speech.digit(640, callback)
+
+while not app.need_exit():
+    frames = speech.run(1)
+    if frames < 1:
+        print("run out\n")
+        speech.deinit()
+        break
+
+

使用方法

+
    +
  1. 导入 appnn 模块
  2. +
+ +
from maix import app, nn
+
+
    +
  1. 加载声学模型
  2. +
+ +
speech = nn.Speech("/root/models/am_3332_192_int8.mud")
+
+
    +
  • 也可以加载 am_7332 声学模型,模型越大精度越高但是消耗的资源也越大
  • +
+
    +
  1. 选择对应的音频设备
  2. +
+ +
speech.init(nn.SpeechDevice.DEVICE_MIC, "hw:0,0")
+
+
    +
  • 这里使用的是板载的麦克风,也选择 WAVPCM 音频作为输入设备
  • +
+ +
speech.init(nn.SpeechDevice.DEVICE_WAV, "path/audio.wav")   # 使用 WAV 音频输入
+
+ +
speech.init(nn.SpeechDevice.DEVICE_PCM, "path/audio.pcm")   # 使用 PCM 音频输入
+
+
    +
  • 注意 WAV 需要是 16KHz 采样,S16_LE 存储格式,可以使用 arecord 工具转换
  • +
+ +
arecord -d 5 -r 16000 -c 1 -f S16_LE audio.wav
+
+
    +
  • PCM/WAV 识别时,如果想要重新设置数据源,例如进行下一个WAV文件的识别可以使用 speech.devive 方法,内部会自动进行缓存清除操作:
  • +
+ +
speech.devive(nn.SpeechDevice.DEVICE_WAV, "path/next.wav")
+
+
    +
  1. 设置解码器
  2. +
+ +
def callback(data: str, len: int):
+    print(data)
+
+speech.digit(640, callback)
+
+
    +
  • 用户可以注册若干个解码器(也可以不注册),解码器的作用是解码声学模型的结果,并执行对应的用户回调。这里注册了一个 digit 解码器用于输出最近4s内的中文数字识别结果。返回的识别结果为字符串形式,支持 0123456789 .(点) S(十) B(百) Q(千) W(万)。对于其他解码器的使用可以查看语音实时识别和关键词识别部分

    +
  • +
  • 设置 digit 解码器时需要设置 blank 值,超过该值(ms)则在输出结果里插入一个 _ 表示空闲静音

    +
  • +
  • 在注册完解码器后需要使用 speech.deinit() 方法清除初始化

    +
  • +
+
    +
  1. 识别
  2. +
+ +
while not app.need_exit():
+    frames = speech.run(1)
+    if frames < 1:
+        print("run out\n")
+        speech.deinit()
+        break
+
+
    +
  • 使用 speech.run 方法运行语音识别,传入的参数为每次运行的帧数,返回实际运行的帧数。用户可以选择每次运行1帧后进行其他处理,或在一个线程中持续运行,使用外部线程进行停止。
  • +
+

识别结果

+

如果上述程序运行正常,对板载麦克风说话,会得到连续中文数字识别结果,如:

+ +
_0123456789
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/audio/keyword.html b/maixpy/doc/zh/audio/keyword.html new file mode 100644 index 00000000..7c22ce8b --- /dev/null +++ b/maixpy/doc/zh/audio/keyword.html @@ -0,0 +1,541 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 关键词识别 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 关键词识别

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-10-081.0.0916BGAI + + 初版文档 + +
+
+
+ +
+
+ +

简介

+

MaixCAM 移植了 Maix-Speech 离线语音库,实现了连续中文数字识别、关键词识别以及大词汇量语音识别功能。支持 PCMWAV 格式的音频识别,且可通过板载麦克风进行输入识别。

+

Maix-Speech

+

Maix-Speech 是专为嵌入式环境设计的离线语音库,其针对语音识别算法进行了深度优化,在内存占用上达到了数量级上的领先,并且保持了优良的WER。如果想了解原理可查看该开源项目。

+

关键词识别

+ +
from maix import app, nn
+
+speech = nn.Speech("/root/models/am_3332_192_int8.mud")
+speech.init(nn.SpeechDevice.DEVICE_MIC, "hw:0,0")
+
+kw_tbl = ['xiao3 ai4 tong2 xue2',
+          'ni3 hao3',
+          'tian1 qi4 zen3 me yang4']
+kw_gate = [0.1, 0.1, 0.1]
+
+def callback(data:list[float], len: int):
+    for i in range(len):
+        print(f"\tkw{i}: {data[i]:.3f};", end=' ')
+    print("\n")
+
+speech.kws(kw_tbl, kw_gate, callback, True)
+
+while not app.need_exit():
+    frames = speech.run(1)
+    if frames < 1:
+        print("run out\n")
+        speech.deinit()
+        break
+
+

使用方法

+
    +
  1. 导入 appnn 模块
  2. +
+ +
from maix import app, nn
+
+
    +
  1. 加载声学模型
  2. +
+ +
speech = nn.Speech("/root/models/am_3332_192_int8.mud")
+
+
    +
  • 也可以加载 am_7332 声学模型,模型越大精度越高但是消耗的资源也越大
  • +
+
    +
  1. 选择对应的音频设备
  2. +
+ +
speech.init(nn.SpeechDevice.DEVICE_MIC, "hw:0,0")
+
+
    +
  • 这里使用的是板载的麦克风,也选择 WAVPCM 音频作为输入设备
  • +
+ +
speech.init(nn.SpeechDevice.DEVICE_WAV, "path/audio.wav")   # 使用 WAV 音频输入
+
+ +
speech.init(nn.SpeechDevice.DEVICE_PCM, "path/audio.pcm")   # 使用 PCM 音频输入
+
+
    +
  • 注意 WAV 需要是 16KHz 采样,S16_LE 存储格式,可以使用 arecord 工具转换
  • +
+ +
arecord -d 5 -r 16000 -c 1 -f S16_LE audio.wav
+
+
    +
  • PCM/WAV 识别时,如果想要重新设置数据源,例如进行下一个WAV文件的识别可以使用 speech.devive 方法,内部会自动进行缓存清除操作:
  • +
+ +
speech.devive(nn.SpeechDevice.DEVICE_WAV, "path/next.wav")
+
+
    +
  1. 设置解码器
  2. +
+ +
kw_tbl = ['xiao3 ai4 tong2 xue2',
+          'ni3 hao3',
+          'tian1 qi4 zen3 me yang4']
+kw_gate = [0.1, 0.1, 0.1]
+
+def callback(data:list[float], len: int):
+    for i in range(len):
+        print(f"\tkw{i}: {data[i]:.3f};", end=' ')
+    print("\n")
+
+speech.kws(kw_tbl, kw_gate, callback, True)
+
+
    +
  • 用户可以注册若干个解码器(也可以不注册),解码器的作用是解码声学模型的结果,并执行对应的用户回调。这里注册了一个 kws 解码器用于输出最近一帧所有注册的关键词的概率列表,用户可以观察概率值,自行设定阈值进行唤醒。对于其他解码器的使用可以查看语音实时识别和连续中文数字识别部分

    +
  • +
  • 设置 kws 解码器时需要设置 关键词列表,以拼音间隔空格填写,关键词概率门限表,按顺序排列输入即可,是否进行 自动近音处理,设置为 True 则会自动将不同声调的拼音作为近音词来合计概率。最后还要设置一个回调函数用于处理解码出的数据。

    +
  • +
  • 用户还可以使用 speech.similar 方法手工注册近音词,每个拼音可以注册最多 10 个近音词。(注意,使用该接口注册近音词会覆盖使能 自动近音处理 里自动生成的近音表)

    +
  • +
+ +
similar_char = ['zhen3', 'zheng3']
+speech.similar('zen3', similar_char)
+
+
    +
  • 在注册完解码器后需要使用 speech.deinit() 方法清除初始化
  • +
+
    +
  1. 识别
  2. +
+ +
while not app.need_exit():
+    frames = speech.run(1)
+    if frames < 1:
+        print("run out\n")
+        speech.deinit()
+        break
+
+
    +
  • 使用 speech.run 方法运行语音识别,传入的参数为每次运行的帧数,返回实际运行的帧数。用户可以选择每次运行1帧后进行其他处理,或在一个线程中持续运行,使用外部线程进行停止。
  • +
+

识别结果

+

如果上述程序运行正常,对板载麦克风说话,会得到关键词识别结果,如:

+ +
kws log 2.048s, len 24
+decoder_kws_init get 3 kws
+  00, xiao3 ai4 tong2 xue2
+  01, ni3 hao3
+  02, tian1 qi4 zen3 me yang4
+find shared memory(491520),  saved:491520
+    kw0: 0.959; 	kw1: 0.000; 	kw2: 0.000;     # 小爱同学
+    kw0: 0.000; 	kw1: 0.930; 	kw2: 0.000;     # 你好
+    kw0: 0.000; 	kw1: 0.000; 	kw2: 0.961;     # 天气怎么样
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/audio/play.html b/maixpy/doc/zh/audio/play.html new file mode 100644 index 00000000..4489171e --- /dev/null +++ b/maixpy/doc/zh/audio/play.html @@ -0,0 +1,524 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 播放音频 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 播放音频

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-05-201.0.0lxowalle + + 初版文档 + +
+
+
+ +
+
+ +

简介

+

本文档提供播放音频的使用方法

+

使用方法

+

硬件操作

+

image-20240520134637905

+

MaixCAM没有内置喇叭,因此需要自行焊接一个功率在1W内的喇叭。喇叭焊接的引脚见上图的Speaker对应的VOPVON脚。

+

注:如果MaixCAM在这两个脚上连接了铜柱,则可以直接焊接在铜柱上,为了美观也可以焊接在板子的另一面。

+

编写代码

+

播放一个WAV文件

+ +
from maix import audio, time, app
+
+p = audio.Player("/root/output.wav")
+
+p.play()
+
+while not app.need_exit():
+    time.sleep_ms(10)
+print("play finish!")
+
+

步骤:

+
    +
  1. 导入audio、time和app模块

    + +
    from maix import audio, time, app
    +
    +
  2. +
  3. 初始化播放器

    + +
    p = audio.Player("/root/output.wav")
    +
    +
  4. +
+
    +
  • 默认的采样率是48k,采样格式为小端格式-有符号16位,采样通道为1。你也可以像这样自定义参数p = audio.Player(sample_rate=48000, format=audio.Format.FMT_S16_LE, channel = 1)。目前只测试过采样率48000,FMT_S16_LE格式,和采样通道数为1。
  • +
  • 如果是.wav文件,则会自动获取采样率、采样格式和采样通道。
  • +
+
    +
  1. 播放音频

    + +
    p.play()
    +
    +
  2. +
+
    +
  • 该将会阻塞直到写入所有音频数据,但不会阻塞到实际播放完所有音频数据。如果调用play()后退出了程序,则部分待播放的音频数据可能会丢失。
  • +
+
    +
  1. 完成
  2. +
+

PCM数据播放

+ +
from maix import audio, time, app
+
+p = audio.Player()
+
+with open('/root/output.pcm', 'rb') as f:
+    ctx = f.read()
+
+p.play(bytes(ctx))
+
+while not app.need_exit():
+    time.sleep_ms(10)
+
+print("play finish!")
+
+

步骤:

+
    +
  1. 导入audio、time和app模块

    + +
    from maix import audio, time, app
    +
    +
  2. +
  3. 初始化播放器

    + +
    p = audio.Player()
    +
    +
  4. +
+
    +
  • 注意默认的采样率是48k,采样格式为小端格式-有符号16位,采样通道为1。你也可以像这样自定义参数p = audio.Player(sample_rate=48000, format=audio.Format.FMT_S16_LE, channel = 1)。目前只测试过采样率48000,FMT_S16_LE格式,和采样通道数为1
  • +
+
    +
  1. 打开并播放一个PCM文件

    + +
      with open('/root/output.pcm', 'rb') as f:
    +      ctx = f.read()
    +
    +  p.play(bytes(ctx))
    +
    +  while not app.need_exit():
    +    time.sleep_ms(10)
    +
    +
  2. +
+
    +
  • with open('xxx','rb') as f:打开文件xxx, 并获取文件对象f
  • +
  • ctx = f.read()将读取文件的内容到ctx
  • +
  • p.play(bytes(ctx))播放音频,p是已打开的播放器对象, ctx是转换为bytes类型的PCM数据
  • +
  • time.sleep_ms(10)这里有一个循环来等待播放完成,因为播放操作是异步执行的,如果提前退出了程序,那么可能导致音频不会完全播放。
  • +
+
    +
  1. 完成
  2. +
+

其他

+

PlayerRecorder模块有些bug待解决,请保证它们在其他模块(Camera模块,Display模块等)之前创建。例如:

+ +
# 先创建Player和Recorder
+p = audio.Player()
+r = audio.Recorder()
+
+# 再创建Camera
+c = camera.Camera()						
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/audio/recognize.html b/maixpy/doc/zh/audio/recognize.html new file mode 100644 index 00000000..84c25ef9 --- /dev/null +++ b/maixpy/doc/zh/audio/recognize.html @@ -0,0 +1,521 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 语音实时识别 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 语音实时识别

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-10-081.0.0916BGAI + + 初版文档 + +
+
+
+ +
+
+ +

简介

+

MaixCAM 移植了 Maix-Speech 离线语音库,实现了连续中文数字识别、关键词识别以及大词汇量语音识别功能。支持 PCMWAV 格式的音频识别,且可通过板载麦克风进行输入识别。

+

Maix-Speech

+

Maix-Speech 是专为嵌入式环境设计的离线语音库,其针对语音识别算法进行了深度优化,在内存占用上达到了数量级上的领先,并且保持了优良的WER。如果想了解原理可查看该开源项目。

+

连续大词汇量语音识别

+ +
from maix import app, nn
+
+speech = nn.Speech("/root/models/am_3332_192_int8.mud")
+speech.init(nn.SpeechDevice.DEVICE_MIC, "hw:0,0")
+
+def callback(data: tuple[str, str], len: int):
+    print(data)
+
+lmS_path = "/root/models/lmS/"
+
+speech.lvcsr(lmS_path + "lg_6m.sfst", lmS_path + "lg_6m.sym", \
+             lmS_path + "phones.bin", lmS_path + "words_utf.bin", \
+             callback)
+
+while not app.need_exit():
+    frames = speech.run(1)
+    if frames < 1:
+        print("run out\n")
+        speech.deinit()
+        break
+
+

使用方法

+
    +
  1. 导入 appnn 模块
  2. +
+ +
from maix import app, nn
+
+
    +
  1. 加载声学模型
  2. +
+ +
speech = nn.Speech("/root/models/am_3332_192_int8.mud")
+
+
    +
  • 也可以加载 am_7332 声学模型,模型越大精度越高但是消耗的资源也越大
  • +
+
    +
  1. 选择对应的音频设备
  2. +
+ +
speech.init(nn.SpeechDevice.DEVICE_MIC, "hw:0,0")
+
+
    +
  • 这里使用的是板载的麦克风,也选择 WAVPCM 音频作为输入设备
  • +
+ +
speech.init(nn.SpeechDevice.DEVICE_WAV, "path/audio.wav")   # 使用 WAV 音频输入
+
+ +
speech.init(nn.SpeechDevice.DEVICE_PCM, "path/audio.pcm")   # 使用 PCM 音频输入
+
+
    +
  • 注意 WAV 需要是 16KHz 采样,S16_LE 存储格式,可以使用 arecord 工具转换
  • +
+ +
arecord -d 5 -r 16000 -c 1 -f S16_LE audio.wav
+
+
    +
  • PCM/WAV 识别时,如果想要重新设置数据源,例如进行下一个WAV文件的识别可以使用 speech.devive 方法,内部会自动进行缓存清除操作:
  • +
+ +
speech.devive(nn.SpeechDevice.DEVICE_WAV, "path/next.wav")
+
+
    +
  1. 设置解码器
  2. +
+ +
def callback(data: tuple[str, str], len: int):
+    print(data)
+
+lmS_path = "/root/models/lmS/"
+
+speech.lvcsr(lmS_path + "lg_6m.sfst", lmS_path + "lg_6m.sym", \
+             lmS_path + "phones.bin", lmS_path + "words_utf.bin", \
+             callback)
+
+
    +
  • 用户可以注册若干个解码器(也可以不注册),解码器的作用是解码声学模型的结果,并执行对应的用户回调。这里注册了一个 lvcsr 解码器用于输出连续语音识别结果(小于1024个汉字结果)。对于其他解码器的使用可以查看连续中文数字识别和关键词识别部分

    +
  • +
  • 设置 lvcsr 解码器时需要设置 sfst 文件路径,sym 文件路径(输出符号表),phones.bin 的路径(拼音表),和 words.bin 的路径(词典表)。最后还要设置一个回调函数用于处理解码出的数据。

    +
  • +
  • 在注册完解码器后需要使用 speech.deinit() 方法清除初始化

    +
  • +
+
    +
  1. 识别
  2. +
+ +
while not app.need_exit():
+    frames = speech.run(1)
+    if frames < 1:
+        print("run out\n")
+        speech.deinit()
+        break
+
+
    +
  • 使用 speech.run 方法运行语音识别,传入的参数为每次运行的帧数,返回实际运行的帧数。用户可以选择每次运行1帧后进行其他处理,或在一个线程中持续运行,使用外部线程进行停止。
  • +
+

识别结果

+

如果上述程序运行正常,对板载麦克风说话,会得到实时语言识别结果,如:

+ +
### SIL to clear decoder!
+('今天天气 怎么样 ', 'jin1 tian1 tian1 qi4 zen3 me yang4 ')
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/audio/record.html b/maixpy/doc/zh/audio/record.html new file mode 100644 index 00000000..96aab2f9 --- /dev/null +++ b/maixpy/doc/zh/audio/record.html @@ -0,0 +1,502 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 录音 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 录音

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-05-201.0.0lxowalle + + 初版文档 + +
+
+
+ +
+
+ +

简介

+

本文档提供录音的使用方法,支持录入PCMWAV格式的音频。

+

MaixCAM板载了麦克风,所以你可以直接使用录音功能。

+

使用方法

+

获取PCM数据

+

当构造Recorder对象时不传入path, 则只会录入音频后不会保存到文件中,当然你可以手动保存到文件。

+ +
from maix import audio, time, app
+
+r = audio.Recorder()
+r.volume(12)
+print("sample_rate:{} format:{} channel:{}".format(r.sample_rate(), r.format(), r.channel()))
+
+while not app.need_exit():
+    data = r.record()
+    print("data size", len(data))
+
+    time.sleep_ms(10)
+
+print("record finish!")
+
+

步骤:

+
    +
  1. 导入audio、time和app模块

    + +
    from maix import audio, time, app
    +
    +
  2. +
  3. 初始化录制器

    + +
    r = audio.Recorder()
    +r.volume(12)
    +
    +
      +
    • 注意默认的采样率是48k,采样格式为小端格式-有符号16位,采样通道为1。你也可以像这样自定义参数p = audio.Recorder(sample_rate=48000, format=audio.Format.FMT_S16_LE, channel = 1)。目前只测试过采样率48000,FMT_S16_LE格式,和采样通道数为1

      +
    • +
    • r.volume(12)用来设置音量,音量范围为[0,100]

      +
    • +
    +
  4. +
  5. 开始录制

    + +
    data = r.record()
    +
    +
      +
    • dataPCM格式的bytes类型数据,保存了当前录入的音频。PCM格式在初始化Recorder对象时设置,见步骤2。注意如果录制太快,音频缓冲区没有数据, 则有可能返回一个空的bytes数据。
    • +
    +
  6. +
  7. 完成,做自己的应用时可以对r.record()返回的PCM数据做语音处理。

    +
  8. +
+

录制音频并保存为WAV格式

+

当构造Recorder对象时传入了path, 则录入的音频将会保存到path文件中,并且你也可以通过record方法获取当前录入的PCM数据。path只支持.pcm.wav后缀的路径,并且当录入.wav时,record方法不会返回WAV头部信息,只会返回PCM数据。

+ +
from maix import audio, time, app
+
+r = audio.Recorder("/root/output.wav")
+r.volume(12)
+print("sample_rate:{} format:{} channel:{}".format(r.sample_rate(), r.format(), r.channel()))
+
+while not app.need_exit():
+    data = r.record()
+    print("data size", len(data))
+
+    time.sleep_ms(10)
+
+print("record finish!")
+
+

代码含义基本同上。

+

录制音频并保存为WAV格式(阻塞)

+

录入时如果设置了record_ms参数,录入音频会阻塞直到到达record_ms设置的时间,单位ms。

+ +
from maix import audio, time, app
+
+r = audio.Recorder("/root/output.wav")
+r.volume(12)
+print("sample_rate:{} format:{} channel:{}".format(r.sample_rate(), r.format(), r.channel()))
+
+r.record(5000)
+print("record finish!")
+
+

上面示例将会持续录入5000ms,并保存为WAV格式,录入期间将会阻塞在record方法中,注意当record设置了record_ms后不会返回PCM数据。

+

其他

+

PlayerRecorder模块有些bug待解决,请保证它们在其他模块(Camera模块,Display模块等)之前创建。例如:

+ +
# 先创建Player和Recorder
+p = audio.Player()
+r = audio.Recorder()
+
+# 再创建Camera
+c = camera.Camera()
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/audio/synthesis.html b/maixpy/doc/zh/audio/synthesis.html new file mode 100644 index 00000000..5784d35d --- /dev/null +++ b/maixpy/doc/zh/audio/synthesis.html @@ -0,0 +1,375 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 语音合成 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 语音合成

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

TODO: 正在赶来

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/basic/app.html b/maixpy/doc/zh/basic/app.html new file mode 100644 index 00000000..fc8480c0 --- /dev/null +++ b/maixpy/doc/zh/basic/app.html @@ -0,0 +1,441 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 应用开发和应用商店 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 应用开发和应用商店

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

哪里找应用

+

开机后会自动进入应用选择界面,内置各种应用均发布在 MaixHub 应用商店, 可以在这里找到对应应用的介绍和使用说明。

+

哪里找源码

+

源码可以在应用商店应用页面看到源码链接(如果有)。
+官方集成的应用源码都在 MaixPy/projects 目录 或者 MaixCDK/projects

+

安装应用

+

可以先设置语言 设置 -> 语言, 以及 设置 -> WiFi

+

应用商店应用可以用来升级和安装应用,连接上可以连接互联网的 WiFi 后即可在MaixHub 应用商店扫码安装应用。

+

应用生态简介

+

为了让开发板做到开箱即用,以及方便用户无门槛地使用,以及方便开发者分享自己的有趣应用,并且能有有效的渠道获取到反馈甚至是收益,我们推出了一个简易的应用框架,包括:

+
    +
  • 应用商店: 开发者上传分享应用,用户无需开发直接下载使用,开发者可以获取到一定的现金收益(来自 MaixHub 官方以及用户打赏)。
  • +
  • 出厂内置大量应用: 官方提供了一些常用的应用,比如找色块、AI 物体检测追踪、找二维码、人脸识别等等,用户可以直接使用,也可以作为串口模块直接使用。
  • +
  • MaixPy + MaixCDK 软件开发包:使用 MaixPy 或者 MaixCDK 可以用 Python 或者 C/C++ 语言快速开发嵌入式 AI 视觉听觉应用,超高效率实现你的有趣想法。
  • +
  • MaixVision 配套电脑端开发工具: 全新的电脑端代码开发工具,快速上手、调试、运行、上传代码、安装应用到设备,一键式开发,甚至支持图像化积木式编程,小学生也能轻松上手。
  • +
+

大家可以多多关注应用商店,也可以在应用商店中分享自己的应用,大家一起共建活跃的社区。

+

打包应用

+

使用 MaixPy + MaixVison 可以方便地开发、打包、安装应用:

+
    +
  • 在 MaixVision 中使用 MaixPy 开发应用程序,可以是单个文件,也可以是一个工程目录。
  • +
  • 连接设备。
  • +
  • 点点击 MaixVision 左下角的 安装 按钮,会弹出一个界面填写应用的基本信息,id 是用来判别应用的 id,一个设备不能同时安装相同 id 的不同应用,所以 id 应该与 MaixHub 上面已经有的应用 id 不同,应用名字可以重复。以及图标等。
  • +
  • 点击打包应用,会将应用打包成一个安装包,如果你要上传到 MaixHub 应用商店,用这个打包好的文件即可。
  • +
  • 点击 安装应用,这会将打包好的应用安装到设备。
  • +
  • 断开与设备的连接,就能看到设备功能选择界面多了一个你的应用,直接点进去就能运行。
  • +
+
+

如果你用 MaixCDK 开发,使用 maixcdk relrease 就能打包出来一个应用,具体看 MaixCDK 的文档。

+
+

退出应用

+

如果你只是写了比较简单的应用,没有做界面和返回按钮,默认可以按设备上的功能按键(一般是 USER 或者 FUNC 或者 OK 按钮)或者返回按钮(如果有这个按键,MaixCAM 默认没有这个按键)来退出应用。

+

安装应用

+
    +
  • 方法一: 设备使用应用商店应用,从应用商店找到应用,设备联网后,扫码安装。
  • +
  • 方法二: 使用安装包本地安装,将安装包传输到设备文件系统,比如/root/my_app_v1.0.0.zip,然后执行代码,注意修改pkg_path变量的路径,你也可以在MaixPyexamples/tools/install_app.py找到本代码:
  • +
+ +
import os
+
+def install_app(pkg_path):
+    if not os.path.exists(pkg_path):
+        raise Exception(f"package {pkg_path} not found")
+    cmd = f"/maixapp/apps/app_store/app_store install {pkg_path}"
+    err_code = os.system(cmd)
+    if err_code != 0:
+        print("[ERROR] Install failed, error code:", err_code)
+    else:
+        print(f"Install {pkg_path} success")
+
+pkg_path = "/root/my_app_v1.0.0.zip"
+
+install_app(pkg_path)
+
+
    +
  • 方法三:
      +
    • 如果是使用MaixPy开发的应用,在项目根目录(包含app.yamlmain.py)执行maixtool deploy会弹出一个二维码,保持设备和电脑在同一局域网,设备使用应用商店扫描对应的局域网地址二维码就能在线安装。
    • +
    • 如果是使用MaixCDK开发的应用,在项目根目录执行maixcdk deploy也会出现二维码,保持设备和电脑在同一局域网,设备使用应用商店扫描对应的局域网地址二维码就能在线安装。
    • +
    +
  • +
+

应用开发基本准则

+
    +
  • 因为默认都配了触摸屏幕,推荐都写一个简单的界面显示,最好有触摸交互。实现方法可以在例子里面找找参考。
  • +
  • 界面和按钮不要太小,因为 MaixCAM 默认的屏幕是 2.3寸 552x368分辨率,PPI 比较高屏幕比较小,要让手指能很容易戳到并且不会点错。
  • +
  • 每个应用实现的主要功能实现一个简单的串口交互,基于串口协议例程),这样用户可以直接当成串口模块使用,比如人脸检测应用,可以在检测到人脸后通过串口输出坐标。
  • +
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/basic/app_usage.html b/maixpy/doc/zh/basic/app_usage.html new file mode 100644 index 00000000..4fe22040 --- /dev/null +++ b/maixpy/doc/zh/basic/app_usage.html @@ -0,0 +1,162 @@ + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 应用使用说明 - MaixPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/basic/auto_start.html b/maixpy/doc/zh/basic/auto_start.html new file mode 100644 index 00000000..be13a202 --- /dev/null +++ b/maixpy/doc/zh/basic/auto_start.html @@ -0,0 +1,447 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 应用开机自启 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 应用开机自启

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

打包安装好的应用可以设置开机自动启动,这样开机就不会显示应用菜单,直接进入指定的应用。

+

设置应用开机自启方法一

+

先打包安装好应用,然后在设备设置 -> 开机自启 设置中选择需要自动启动的应用即可,取消开机自启也是在这里设置。

+

设置应用开机自启方法二

+

运行 Python 脚本设置,修改脚本中的new_autostart_app_id 变量为你想设置的 app_id, 所有已经安装了的app_id会在执行脚本时打印出来,可以先执行一遍找到你想设置的app_id,修改变量再执行一遍即可,取消自动启动设置为None即可。
+此脚本也可以在MaixPyexamples/tools中找到set_autostart.py

+ +
import configparser, os
+
+def parse_apps_info():
+    info_path = "/maixapp/apps/app.info"
+    conf = configparser.ConfigParser()
+    conf.read(info_path)
+    version = conf["basic"]["version"]
+    apps = {}
+    for id in list(conf.keys()):
+        if id in ["basic", "DEFAULT"]:
+            continue
+        apps[id] = conf[id]
+    return apps
+
+def list_apps():
+    apps = parse_apps_info()
+    print(f"APP num: {len(apps)}")
+    for i, (id, info) in enumerate(apps.items()):
+        name_zh = info.get("name[zh]", "")
+        print(f"{i + 1}. [{info['name']}] {name_zh}:")
+        print(f"    id: {id}")
+        print(f"    exec: {info['exec']}")
+        print(f"    author: {info['author']}")
+        print(f"    desc: {info['desc']}")
+        print(f"    desc_zh: {info.get('desc', 'None')}")
+        print("")
+
+
+def get_curr_autostart_app():
+    path = "/maixapp/auto_start.txt"
+    if os.path.exists(path):
+        with open(path, "r") as f:
+            app_id = f.readline().strip()
+            return app_id
+    return None
+
+def set_autostart_app(app_id):
+    path = "/maixapp/auto_start.txt"
+    if not app_id:
+        if os.path.exists(path):
+            os.remove(path)
+        return
+    with open(path, "w") as f:
+        f.write(app_id)
+
+if __name__ == "__main__":
+    # new_autostart_app_id = "settings"   # change to app_id you want to set
+    new_autostart_app_id = None           # remove autostart
+
+    list_apps()
+    print("Before set autostart appid:", get_curr_autostart_app())
+    set_autostart_app(new_autostart_app_id)
+    print("Current autostart appid:", get_curr_autostart_app())
+
+
+

设置应用开机自启方法三

+

你也可以通过修改设备中的 /maixapp/auto_start.txt 文件来设置,和传输文件的方法请看前面的文档。

+
    +
  • 首先知道你需要设置的应用的 id 是什么。在你打包应用的时候设置的;如果不是你自己打包的应用,可以先安装到设备,查看设备/maixapp/apps/ 目录下的文件夹名就是应用名,(也可以下载查看设备的/maixapp/apps/app.info 文件,[]中括号部分就是应用id)。
  • +
  • 然后写入 id/maixapp/auto_start.txt 文件即可。(可以在电脑本地创建文件,然后 MaixVision 传输到设备。)
  • +
  • 如果要取消,删除设备上的 /maixapp/auto_start.txt 文件即可。
  • +
+

其它方法

+

对于 MaixCAM, 因为底层是 Linux, 如果你熟悉 Linux, 编辑/etc/rc.local 或者 /etc/init.d 下的启动脚本也可以。

+

但是需要注意的是,这种方式会让 MaixVision 在连接的时候无法停止这个应用,从而造成资源占用(比如屏幕和摄像头) MaixVision 可能无法正常跑程序,而前两种方法 MaixVision 连接设备时是可以正常让程序退出以供 MaixVsion 跑程序的。

+

所以这种方法比较适合开机跑一些不会占用屏幕和摄像头等资源的后台进程,一般情况下如果你不熟悉 Linux 不建议这样操作。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/basic/linux_basic.html b/maixpy/doc/zh/basic/linux_basic.html new file mode 100644 index 00000000..daa1beea --- /dev/null +++ b/maixpy/doc/zh/basic/linux_basic.html @@ -0,0 +1,424 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Linux 基础知识 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Linux 基础知识

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

简介

+

本章内容对于刚入门的同学来说,可以先跳过此章节,在学会 MaixPy 基础开发后再来学习也是可以的。

+

最新的 MaixPy 支持的 MaixCAM 硬件支持跑 Linux 系统,所以 MaixPy 底层都是基于 Linux 系统进行开发的。
+虽然 Sipeed 开发的 MaixPy 已经为开发者们做了很多工作,即使不知道 Linux 系统知识也能愉快使用,但是以防在某些情况下需要一些底层操作,以及方便未接触过 Linux 的开发者学习,这里写一些 Linux 基础知识。

+

为什么需要 Linux 系统

+

具体的原因大家可以自行查阅,这里用通俗的看起来不太专业的话语简单举几个例子方便初学者理解:

+
    +
  • 在单片机中,我们的程序是一个死循环程序,用上 Linux 后我们可以同时跑很多程序,每个程序看起来都独立在同时运行,每个程序具体怎么执行的由操作系统实现。
  • +
  • 基于 Linux 的开发者众多,需要功能和驱动可以很方便地找到,不需要自己再实现一遍。
  • +
  • 基于 Linux 配套的软件工具丰富,可以很方便地进行开发和调试,比如在本教程没有提到的一些 Linux 通用工具理论上也是可以使用的。
  • +
+

文件系统

+

什么是文件系统?

+
    +
  • 就像电脑的文件系统一样,Linux 上会将硬件磁盘用文件系统进行管理,这样我们可以很方便地向磁盘读写数据。
  • +
  • 对于学过单片机没有接触过文件系统开发的同学来讲,可以理解为我们有一个 Flash 或者 TF 卡,我们可以通过 API 读写 Flash 存取数据,断电后也能保存数据,但是 Flash 具有读写寿命,我们往往需要写一套程序去保证 Flash 读写寿命,而文件系统就可以理解成这样一套成熟的程序,文件系统帮我们完成了具体如何管理 Flash 空间和读写,我们只需调用文件系统的 API 即可,大大减少了我们的开发工作量并且用成熟的程序保证了稳定性和安全性。
  • +
+

在电脑和设备(开发板)之间传输文件

+

既然设备有 Linux 和文件系统,那我们怎么发送文件到设备呢?

+

对于 MaixPy 我们配套了 MaixVision, 在后面的版本也会支持文件管理功能,在此之前可以用下面的方法:

+

这里我们主要介绍通过网络传输的方式,其它方式可自行探索传输文件到 Linux

+
    +
  • 确保设备和电脑连接到了同一个局域网,比如:
      +
    • MaixCAM 的 USB 口连接到电脑会创建一个虚拟网卡,在电脑端的设备管理器就能看到,设备的 IP 可以在设备的设置->设备信息中看到设备名和 IP。
    • +
    • 也可以在设备设置->WiFi中连接到和电脑相同的局域网。
    • +
    +
  • +
  • 电脑使用 SCP 或者 SFTP 协议传输文件到设备,具体的软件有很多,具体的软件和使用方法可以自行搜索,比如:
      +
    • 在 Windows 上可以使用 WinSCP 或者 FileZilla,或者 scp 命令等。
    • +
    • 在 Linux 上可以使用 FileZilla 或者 scp 命令 等。
    • +
    • 在 Mac 上可以使用 FileZilla 或者 scp 命令 等。
    • +
    +
  • +
+

终端和命令行

+

终端就是通过终端这个软件与 Linux 系统进行通信和操作的工具,类似于 Windows 的cmd或者PowerShell

+

比如我们可以在电脑的 Window 系统中的 powershell 或者 Linux系统中的 终端 工具中输入ssh root@maixcam-xxxx.local 这里具体的名字在设备的设置->设备信息中可以看到,这样我们就可以通过终端连接到设备了(用户名和密码都是root)。
+然后我们通过输入命令来操作设备,比如ls命令可以列出设备文件系统中当前目录下的文件, cd 用来切换当前所在的目录(就像电脑文件管理中点击文件夹切换目录一样),

+ +
cd /     # 切换到根目录
+ls       # 显示当前目录(根目录)下的所有文件
+
+

然后会显示类似下面的内容:

+ +
bin         lib         media       root        tmp
+boot        lib64       mnt         run         usr
+dev         linuxrc     opt         sbin        var
+etc         lost+found  proc        sys
+
+

更多命令学习请自行搜索Linux 命令行使用教程,这里只是为了让初学者知道基本概念,这样有开发者提到时可以知道是什么意思。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/basic/maixpy_upgrade.html b/maixpy/doc/zh/basic/maixpy_upgrade.html new file mode 100644 index 00000000..0a03bb34 --- /dev/null +++ b/maixpy/doc/zh/basic/maixpy_upgrade.html @@ -0,0 +1,423 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM 更新 MaixPy - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM 更新 MaixPy

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

有两种方法,如果第一次上手使用,为了降低难度,可以直接使用出厂 TF 卡自带的 MaixPy 固件尝试,以后再考虑更新。

+

不过因为不知道你拿到手的是什么时候出厂的 TF 卡,所以建议都更新一下系统。

+

直接更新系统(强烈推荐)

+

按照 升级和烧录系统 中的操作升级到最新的系统,里面就包含了最新的 MaixPy 固件。

+

只更新 MaixPy 固件

+

MaixPy 仓库 release 页面 看到最新的版本信息和更新日志,其中包含了 MaixPy 固件信息,以及对应版本使用的系统信息。

+

如果不想更新系统(因为一般系统变动不大,可以看 MaixPy 更新日志中是否有系统改动相关,再决定是否更新系统),则可以只更新 MaixPy 固件。

+
    +
  • 在设置中设置 WiFi, 让系统联网。
  • +
  • 点击设置应用中的 更新 MaixPy 进行更新。
  • +
+

也可以执行 Python 代码调用系统命令来更新:

+ +
import os
+
+os.system("pip install MaixPy -U")
+
+

由于默认从pypi.org下载,中国国内速度可能比较慢,可以设置国内的镜像站点,修改下面代码的 server 变量来选择,此脚本在MaixPyexamples/tools 目录下也有,可以直接在MaixVision中运行。

+ +
import os
+
+def install_maixpy(server):
+    cmd = f"pip install maixpy -U -i {server}"
+    print("Start install now, wait patiently ...")
+    err = os.system(cmd)
+    if err != 0:
+        print("[ERROR] execute failed, code:", err)
+    else:
+        print("Install complete")
+
+
+servers = {
+    "pypi": "https://pypi.org/simple",
+    "aliyun": "https://mirrors.aliyun.com/pypi/simple",
+    "ustc": "https://pypi.mirrors.ustc.edu.cn/simple",
+    "163": "https://mirrors.163.com/pypi/simple",
+    "douban": "https://pypi.douban.com/simple",
+    "tuna": "https://pypi.tuna.tsinghua.edu.cn/simple"
+}
+
+# Select server based on your network
+server = servers["tuna"]
+
+install_maixpy(server)
+
+
+

如果你会使用终端, 也可以直接在终端中使用 pip install MaixPy -U 来更新 MaixPy。

+
+

另外你也可以手动下载wheel 文件(.whl格式)传输到设备(传输方法见后文MaixVision 使用)后通过 pip install ******.whl 命令来安装。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/basic/maixvision.html b/maixpy/doc/zh/basic/maixvision.html new file mode 100644 index 00000000..a5c4d70e --- /dev/null +++ b/maixpy/doc/zh/basic/maixvision.html @@ -0,0 +1,457 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixVision -- MaixCAM MaixPy 编程 IDE + 图形化积木编程 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixVision -- MaixCAM MaixPy 编程 IDE + 图形化积木编程

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

简介

+

MaixVision 是专为 Maix 生态打造的一款开发者编程工具,支持 MaixPy 编程和图形化积木编程,同时支持在线运行和调试,以及实时预览图像,可以同步设备显示屏的图像,方便调试和开发。

+

以及支持打包应用和安装应用到设备,方便用户一键生成、安装应用。

+

同时还集成一些方便开发的小工具,比如文件管理,阈值编辑器,二维码生成等等。

+

下载

+

访问 MaixVision 主页 下载。

+

使用 MaixPy 编程和在线运行

+

按照快速开始的步骤连接设备,我们可以很方便地使用 MaixPy 编程和在线运行。

+

实时预览图像

+

MaixPy 提供display模块,可以将图像显示到屏幕上,同时,在调用display模块的show方法时,会将图像发送到 MaixVision 显示,比如代码:

+ +
from maix import display, camera
+
+cam = camera.Camera(640, 480)
+disp = display.Display()
+while 1:
+    disp.show(cam.read())
+
+

这里我们用摄像头读取了图像,然后通过disp.show()方法将图像显示到屏幕上,同时也会发送到 MaixVision 显示。

+

当我们点击了右上角的暂停按钮,就会停止发送图像到 MaixVision 显示。

+

代码自动补全

+

代码提示依赖电脑本地的 Python 包,为了实现代码提示,我们需要在电脑中安装 Python,并且安装需要提示的 Python 包。

+
    +
  • 安装 Python 请访问 Python 官网安装。
  • +
  • 安装需要提示的包,比如对于 MaixPy, 你需要在电脑也安装一份 MaixPy 包,在电脑使用pip install MaixPy即可安装好,如果MaixPy更新了,你也需要在电脑和设备更新到MaixPy,电脑手动在终端执行pip install MaixPy -U即可,设备更新直接在设置应用中更新即可。
  • +
+
+

中国国内用户可以使用国内镜像pip install -i https://pypi.tuna.tsinghua.edu.cn/simple MaixPy

+
+
    +
  • 重启 MaixVision 就能够看到代码提示了。
  • +
+
+

如果仍然不能提示,可以手动在设置中设置 python 可执行文件的路径后重启。

+
+
+

注意在电脑安装 Python 包这里只是为了用作代码提示,实际代码运行还是在设备(开发板)上,设备上也要有对应的包才能正常运行。

+
+
+

另外,虽然你在电脑上安装了 MaixPy 包,但是由于我们精力有限,我们不确保你能直接在电脑的 Python 导入 maix 包进行使用,请在支持的设备上运行。

+
+

计算图像的直方图

+

在上一步中我们可以在 MaixVision 中实时看到图像,我们用鼠标框选一个区域,图像下方就能看到这个区域的直方图了,选择不同的颜色表示方法,可以看到不同的颜色通道的直方图。

+

这个功能方便我们在做某些图像处理算法时找到一些合适的参数。

+

区分设备文件系统电脑文件系统

+

这里我们有一个比较重要的概念需要掌握:分清楚设备文件系统电脑文件系统

+
    +
  • 电脑文件系统:运行在电脑上,在 MaixVision 中打开文件或者工程都是打开的电脑里面的文件,保存也是自动保存到电脑的文件系统。
  • +
  • 设备文件系统:程序运行时会将程序发送到设备上运行,所以代码里面使用的文件都是从设备文件系统读取。
  • +
+

所以常见的问题是有同学在电脑上保存了文件D:\data\a.jpg,然后在设备上使用这个文件img = image.load("D:\data\a.jpg"),这样当然是找不到文件的,因为设备上没有D:\data\a.jpg这个文件。

+

具体如何将电脑的文件发送到设备上,参考下面的章节。

+

传输文件到设备

+

先连接设备,然后点击浏览设备文件系统的按钮,有两个入口,如下图,然后就能上传文件到设备,或者从设备下载文件到电脑了。

+

maixvision_browser2

+

maixvision_browser

+
+也可以用其它工具代替,点击展开 +

先知道设备的 ip 地址或者设备名称,MaixVision 就可以搜索到, 或者在设备设置->系统信息中看到,比如类似 maixcam-xxxx.local 或者 192.168.0.123
+ 用户名和密码都是 root, 使用 SFTP 协议传输文件,端口号是 22

+

然后不同系统下都有很多好用的软件:

+

Windows 下

+

使用 WinSCP 或者 FileZilla 等工具连接设备,将文件传输到设备上,选择 SFTP 协议填写设备和账号信息连接即可。

+

具体不懂的可以自行搜索。

+

Linux 下

+

终端使用 scp 命令传输文件到设备上,比如:

+ +
scp /path/to/your/file.py root@maixcam-xxxx.local:/root
+
+

Mac 下

+
    +
  • 方法一:终端使用 scp 命令传输文件到设备上,比如:
  • +
+ +
scp /path/to/your/file.py root@maixcam-xxxx.local:/root
+
+
    +
  • 方法二:使用 FileZilla 等工具连接设备,将文件传输到设备上,选择 SFTP 协议填写设备和账号信息连接即可。
  • +
+
+
+

使用图形化积木编程

+

开发中,敬请期待。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/basic/os.html b/maixpy/doc/zh/basic/os.html new file mode 100644 index 00000000..1ee21cac --- /dev/null +++ b/maixpy/doc/zh/basic/os.html @@ -0,0 +1,394 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 升级和烧录系统 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 升级和烧录系统

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

介绍

+

如果你购买了官方(Sipeed)的带 TF 卡的套餐,一般来说出厂已经烧录好了系统,可以跳过这一步直接使用。

+

但是为了防止出厂烧录的系统版本过旧,强烈建议 先按照教程 升级到最新 的系统。

+

获得最新系统

+

MaixPy 发布页面 找到最新的系统镜像文件,比如maixcam_os_20240401_maixpy_v4.1.0.xz

+
+

中国国内用户下载速度慢可以用迅雷下载,速度可能会快一些。
+或者使用例如 github.abskoop.workers.dev 这种代理网站下载。

+
+

备用地址:Sourceforge (同步可能不及时,建议优先上面的方式)

+

如何确认系统是否需要升级

+
    +
  • 在开机后的功能选择界面,点击设置,然后点击设备信息,可以看到系统的版本号。
  • +
  • MaixPy 发布历史页面查看更新日志,里面有 MaixPy 固件和系统镜像的更新说明,如果在你的版本后有重要更新,建议升级。
  • +
+
+

如果最新系统和当前系统对比只是 MaixPy 固件的常规更新,也可以不升级,在 设置 中的 更新 MaixPy 中单独更新 MaixPy,不过一般 不推荐 这样做。

+
+

烧录系统到 MaixCAM

+

参考 硬件文档中的 MaixCAM 系统烧录 教程,注意里面能满足 USB 烧录的条件则推荐使用 USB 烧录方式,USB 烧录方式不用拔 TF 卡。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/basic/python.html b/maixpy/doc/zh/basic/python.html new file mode 100644 index 00000000..77119b37 --- /dev/null +++ b/maixpy/doc/zh/basic/python.html @@ -0,0 +1,415 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Python 基础知识 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

Python 基础知识

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

MaixPy 的教程文档里面就不涉及具体的 Python 语法教程了,因为 Python 的教程实在是太多了,都做得很好,这里只介绍需要学什么,方向和线路指导即可。

+

Python 简介

+

Python 是一门解释性、面向对象、动态类型的高级编程语言。

+
    +
  • 解释性:不需要编译,直接运行,优点是开发快速,缺点是因为每次运行都要解释一遍代码,运行速度慢一点点,但是往往瓶颈还是开发者写的代码而不是语言本身。
  • +
  • 面向对象:支持面向对象编程,可以定义类和对象,相比面向过程语言,更容易组织代码。更多自行搜索。
  • +
  • 动态类型:变量不需要声明类型,可以直接赋值,类型会根据赋值自动确定,这样可以减少代码量,但是也容易出现类型错误,需要开发者自己注意。
  • +
+

总之,对于没有接触过 Python 的开发者来说,Python 非常容易上手,有大量现成的库,开发者群体巨大,开发应用周期短,非常值得学习!

+

Python 环境安装

+

你可以按照你学习的 Python 教程在电脑上安装 Python;
+也可以在 MaixVisioin 上连接设备后使用 MaixVision 编程然后在开发板运行。

+

使用 MaixPy 需要的 Python 基础有哪些?

+
    +
  • Python 的基本概念。
  • +
  • 面向对象编程的基本概念。
  • +
  • Python 的基本语法,包括:
      +
    • tab 缩进对齐语法
    • +
    • 变量、函数、类、对象、注释等
    • +
    • 控制语句比如 if、for、while 等等
    • +
    • 模块和导入模块
    • +
    • 基本数据类型比如 int、float、str、list、dict、tuple 等等
    • +
    • bytes 和 str 的区别和转换
    • +
    • 异常处理,try except
    • +
    • 常用的内置函数,比如 print、open、len、range 等等
    • +
    • 常用的内置模块,比如 os、sys、time、random、math 等等
    • +
    +
  • +
+

掌握以上的基础知识就可以顺畅使用 MaixPy 编程了,配合后面的教程和例程,在不懂的时候查询搜索引擎或者官方文档,或者问 ChatGPT 就能顺利完成开发。

+

对于已经有一门面向对象编程语言经验的开发者

+

如果你已经会一门面向对象语言比如 C++/Java/C# 等等,那只需要快速浏览一下 Python 的语法,就可以开始使用了。

+

比如 菜鸟教程 或者 Python 官方教程

+

或者个人开发者的博客,比如 哇!是 Python

+

对于没有面向对象编程经验但是有 C 语言经验的开发者

+

如果只学了 C,缺乏对面向对象的理解,那么可以先学习一下面向对象的概念,然后再学习 Python,也是比较快的,可以自行搜索视频教程入门。

+

跟着视频教程入门之后可以看看文档教程,比如 菜鸟教程 或者 Python 官方教程 就可以开动了!

+

在学了入门知识后,就可以按照 MaixPy 的文档和例程开始使用 MaixPy 编程了。

+

对于编程新手

+

如果你从未接触过编程,那么你需要重头开始学习 Python,Python 作为入门语言也是比较合适的,具体可以搜一搜视频教程。

+

在学会了基础语法后,就能按照例程使用 MaixPy 编程了。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/basic/python_pkgs.html b/maixpy/doc/zh/basic/python_pkgs.html new file mode 100644 index 00000000..965cc53f --- /dev/null +++ b/maixpy/doc/zh/basic/python_pkgs.html @@ -0,0 +1,393 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 添加额外的 Python 软件包 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 添加额外的 Python 软件包

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

简介

+

MaixPy 基于 Python 语言,提供了大量方便嵌入式应用开发的功能和 API,除此之外,你也可以使用其它的 Python 包来扩展功能。

+

安装额外的 Python 包

+
+

注意可能不是所有 Python 包都支持,一般只支持纯 Python 包,不支持 C 扩展包, C 扩展包可能需要你手动在电脑交叉编译(比较复杂,这里就不介绍了)。

+
+

方法一: 使用 Python 代码来安装

+

在 MaixVision 中使用 Python 代码来安装你需要的包,比如:

+ +
import os
+os.system("pip install 包名")
+
+

要更新一个包,可以使用:

+ +
import os
+os.system("pip install --upgrade 包名")
+
+

方法二: 终端使用 pip 命令安装

+

使用Linux 基础中介绍的终端使用方法,使用 pip install 包名 安装你需要的包。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/basic/view_src_code.html b/maixpy/doc/zh/basic/view_src_code.html new file mode 100644 index 00000000..e27724d9 --- /dev/null +++ b/maixpy/doc/zh/basic/view_src_code.html @@ -0,0 +1,436 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 如何找到 MaixPy API 对应的源码 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 如何找到 MaixPy API 对应的源码

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

简介

+

MaixPy 是基于 Python 实现,有部分函数是用 Python 编写,大多数底层代码都是使用 C/C++ 编写,这样可以保证运行效率。

+

如果我们在使用一个函数遇到疑问,我们可以查询本文档,以及 API 文档。
+如果仍然不能解决你的疑惑,那么可以直接按照本文的方法找到底层实现的源码找出答案,也欢迎一起贡献文档或代码,成为 MaixPy 开发者的一员

+

先看文档

+

一定要先看文档: https://wiki.sipeed.com/maixpy/, 然后看 API 文档:https://wiki.sipeed.com/maixpy/api/index.html

+

API 文档只有英文,原因是 API 文档是从代码的注释生成而来,代码中一律使用英文,看不懂英文可以使用翻译。

+

如何找到 API 对应的源码

+

首先有两个开源仓库,分别是 MaixPyMaixCDK
+MaixPy 是工程仓库,里面包含了 MaixPy 的部分源码,所有文档、例程;MaixCDK 包含了大多数 MaixPy API 的底层 C/C++ 实现。

+

我们可以把这两份代码下载下来,也可以直接在网页查看。

+

顺便记得给它们点一个 star 让更多人看到哦~

+

找到 C/C++ 编写的 API

+

现在假设我们要找到 maix.image.Image.find_blobs 函数为例, 首先我们尝试手动去找:

+
    +
  • 因为这是属于视觉相关的 API, 我们在 MaixCDKcomponents/vision/include 下面可以看到有一个 maix_image.hpp的头文件,猜测大概在这里面。
  • +
  • maix_image.hpp 搜索 find_blobs,马上就发现了函数声明:
  • +
+ +
std::vector<image::Blob> find_blobs(std::vector<std::vector<int>> thresholds = std::vector<std::vector<int>>(), bool invert = false, std::vector<int> roi = std::vector<int>(), int x_stride = 2, int y_stride = 1, int area_threshold = 10, int pixels_threshold = 10, bool merge = false, int margin = 0, int x_hist_bins_max = 0, int y_hist_bins_max = 0);
+
+
    +
  • 同时我们发现函数声明前面有注释,API 文档即从这份注释自动生成而来,如果你仔细对比 API 文档和这个注释会发现他们一模一样的,改动这个注释编译后会产生 API 文档。
  • +
  • 这只是函数声明,我们找到components/vision/src/maix_image.cpp,发现里面没有这个函数,仔细一看有个components/vision/src/maix_image_find_blobs.cpp,原来是将函数单独写了一个cpp,在里面我们就能看到函数的源代码了。
  • +
+

找到使用 Pybind11 编写的 API

+

如果 MaixCDK 里面找不到,那就可以到 MaixPy/components里面寻找。

+
+

上面的代码你会发现,我们在使用find_blobs时第一个参数是[[...]]这样的参数即list类型,C/C++ 定义第一个参数是std::vector<std::vector<int>>类型,原因是我们使用了pybind11自动将 std::vector 类型转换为了list类型。

+
+

而有一些类型在MaixCDK里面不方便定义,比如numpyarray类型,但是pybind11里面有相关的定义方便我们直接使用,但是又不想 MaixCDK 里面有 pybind11 相关的代码,所以我们在MaixPy/components 里面来写使用了 pybind11 相关的代码,比如maix.image.image2cv方法。

+

如何修改代码

+

在找到代码后,直接修改,然后按照编译文档编译出固件即可。

+

如何增加代码

+

照抄其它 API,写一个函数,然后添加完整的注释,注释中额外添加一个@maixpy maix.xxx.xxx,这里xxx即你想添加到的模块和API名,然后编译出固件即可。

+

可以参考MaixCDK/components/basic/includemaix_api_example.hpp

+

API 参数和返回值用基础的C++ 类型会自动转换为Python的类型,是不是十分简单.
+具体的类型转换参考pybind11 类型自动转换列表

+

比如我们希望增加一个maix.my_module.my_func,在MaixCDK中合适的地方(最好符合现在的文件夹分类)创建一个头文件,然后添加代码:

+ +
namespace maix::my_module
+{
+    /**
+     * My function, add two integer.
+     * @param a arg a, int type
+     * @param b arg b, int type
+     * @return int type, will a + b
+     * @maixpy maix.my_module.my_func
+     */
+    int my_func(int a, int b);
+}
+
+

然后增加一个cpp文件:

+ +
int my_func(int a, int b)
+{
+    return a + b;
+}
+
+

然后编译 MaixPy 生成whl文件,安装到设备即可使用maix.my_module.my_func函数。

+

如何贡献代码

+

如果你发现 MaixPy 有未完成的 API, 或者有 bug, 欢迎修改后提交 PR(Pull Request)到 MaixPy 仓库,具体提交方法看 贡献文档和代码

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/config.json b/maixpy/doc/zh/config.json new file mode 100644 index 00000000..b44cb82a --- /dev/null +++ b/maixpy/doc/zh/config.json @@ -0,0 +1,4 @@ +{ + "import": "config_zh", + "name": "MaixPy 中文文档" +} diff --git a/maixpy/doc/zh/faq.html b/maixpy/doc/zh/faq.html new file mode 100644 index 00000000..2faf7d45 --- /dev/null +++ b/maixpy/doc/zh/faq.html @@ -0,0 +1,518 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy FAQ(常见问题) - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy FAQ(常见问题)

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +
+

此页面列出了 MaixPy 相关的常见问题和解决方案,如果你遇到了问题,请先在这里找寻答案。
+另外还有其它途径:

+ +
+

MaixVision 无法搜索到设备?

+

先确认连接方式是 WiFi 还是 USB 线,
+WiFi:

+
    +
  • 确认 WiFi 是否正确连接上并且获取到 IP 地址, 可以在 设置->设备信息 或者设置->WiFi 里面看到 ip
  • +
+

USB线:

+
    +
  • 确保设备通过 Type-C 数据线连接到电脑,设备处于开机状态并且进入了功能选择界面。
  • +
  • 确保设备驱动已经安装:
      +
    • Windows 下可以在设备管理器中查看是否有 USB 虚拟网卡设备,如果有感叹号则是去动没有安装好,按照快速开始 中的方法安装驱动即可。
    • +
    • Linux 下可以通过ifconfig或者ip addr查看是否有usb0设备或者lsusb查看所有 USB 设备。 Linux 已经自带去动,所以识别不到检查硬件连接,设备系统是否是最新,以及设备是否已经正常启动即可。
    • +
    • Mac OS 同 Linux 方法,或者在系统设置 -> 网络 里面看有没有 usb 网卡。
    • +
    +
  • +
  • 另外 检查 USB 线缆的质量,换一个高质量的线缆。
  • +
  • 另外 检查电脑 USB 口的质量,比如实测某些小主机 USB 口 EMI 设计太糟糕,外接一个质量好点的 USB HUB 反而可以使用了,也可以换 USB 口 或者直接换台电脑。
  • +
+

MaixVision 运行摄像头例程显示图像卡顿

+

默认配的 GC4653 摄像头最高帧率为 30 帧,摄像头例程正常情况下 MaixVision 的显示肉眼不会有卡顿感,如果卡顿,首先考虑传输问题:

+
    +
  • 检查网络连接质量,比如 WiFi。
  • +
  • 如果用的 USB 连接,检查 USB 线质量, 电脑 USB 口质量,可以尝试换台电脑或者 USB 口 或者 USB 线缆尝试对比。
  • +
+

此产品适合量产吗

+

答案:适合。

+
    +
  • 软件上使用 Python 即可稳定运行,方便开发也可靠。
  • +
  • 软件上另外支持和 MaixPy 相同 API 的 C++ SDK(MaixCDK),满足高效率和稳定要求。
  • +
  • 硬件上提供各种形式的 PCB 和外壳,核心板和整板都有,芯片供货稳定,如果有量产需求可以联系 support@sipeed.com 咨询。
  • +
  • 量大价更优。
  • +
+

MaixPy v4 和 v1 v3 有什么区别?

+
    +
  • MaixPy v4 使用 Python 语言,是吸取了 v1 v3 经验的集大成之作,有更好的配套软件和生态,更多的功能,更简单的使用方式和更完善的文档;硬件有很大提升的同时加个和另外两者的硬件价格想当甚至更便宜;另外也做了兼容 K210 的使用体验和 API,方便用户从 v1 快速迁移到 v4。
  • +
  • v1 使用了 Micropython 语言,有很多局限性,比如第三方库支持有限;同时受限于 Maix-I (K210) 的硬件性能,内存不够用,AI 模型支持有限,很多编解码不支持硬件加速等缺点。
  • +
  • v3 也是使用了 Python 语言,基于 Maix-II-Dock (v831) 硬件,硬件 AI 模型支持有限,而且全志的基础生态不够开放,API 也不够完善,此版本仅作为 Maix-II-Dock (v831)上面使用,不会继续更新。
  • +
+

MaixPy 目前只支持 MaixCAM 吗,用其它同款芯片的板子行不行?

+

MaixPy 目前仅支持 MaixCAM 系列板子,其它同款芯片的板子也不支持(包括 Sipeed 的同款芯片板子 比如 LicheeRV-Nano),强烈不建议尝试,导致设备损坏(比如冒烟烧屏等)后果自负。

+

未来 Sipeed 出的 Maix 系列的产品都将继续得到 MaixPy 支持,目前如果 MaixCAM 有什么无法满足的需求,可以到 MaixHub 讨论版块 提出需求或者发送邮件到 support@sipeed.com.

+

可以用除了官方搭配的摄像头或者屏幕以外的自己的摄像头或者屏幕吗?

+

不建议这样操作,除非你有够丰富的软硬件知识和经验,否则可能导致设备损坏。

+

官方搭配的配件对应的软硬件是调教过的,表现效果是最好的,上手即可使用,其它配件可能接口不同,驱动不同,软件不同,需要自己去调教,这是一个非常复杂的过程,不建议尝试。

+

当然,如果你是大佬,我们也欢迎你提交 PR!

+

运行模型报错 cvimodel built for xxxcv181x CANNOT run on platform cv181x

+

解析模型文件失败了,一般情况是模型文件损坏造成的,确保你的模型文件是没有损坏的。
+比如:

+
    +
  • 用编辑器编辑了二进制文件导致文件损坏。比如用 maixvision 打开了 cvimodel 文件,由于 maixvision 的自动保存功能会破坏二进制文件,所以不要用 maixvision 等文本编辑器打开二进制文件并保存(后面 MaixVision 会修复这个问题,即去掉 maixvision 的自动保存功能)。
  • +
  • 如果是从网上下载的,保证下载没有出问题,一般网上的文件提供 sha256sum/md5 校验值,下载下来后可以对比一下,具体方法请自行搜索或者问 ChatGPT。
  • +
  • 如果是来自压缩包,请确认解压过程没有出错,可以从压缩包重新解压一遍保证中间没有出错。
  • +
  • 保证传输到设备的过程没有造成文件损坏,可以对比一下设备中的文件和电脑中的文件 sha256sum 值,具体方法请自性搜索或者问 ChatGPT。
  • +
+

上电启动黑屏,屏幕无显示

+

请看 MaixCAM FAQ

+

通过 USB 连接了电脑和 MaixCAM 为什么电脑没有出现串口?

+

MaixCAM 的 USB 口是芯片的 USB2.0 接口,不是 USB 转串口接口,所以插上电脑不会出现串口,这是正常现象。
+没有 USB 转串口怎么通信呢?
+默认 USB 会模拟出 USB 网卡,所以当你将 USB 插上电脑时会出现虚拟网卡,按照 快速开始 中的说明可以使用 MaixVision 与 MaixCAM 通信实现代码运行、图像预览、文件管理等功能。
+另外,因为 USB 模拟了网卡,所以你也可以用通用的 SSH 软件连接 MaixCAM,实现通信。
+或者你也可以连接 WiFi 和电脑在同一个局域网下通信。

+

如果你要使用串口,分为两种情况:

+
    +
  1. 串口和电脑通信:需要自行购买任意一款 USB 转串口模块来连接电脑的 USB 和板子的串口(对于MaixCAM 是 UART0 的 A16(TX) 和 A17(RX) 引脚,或者连接 MaixCAM 套餐送的 USB 转接板引出的两个 TX RX 引脚,也是 A16 A17 引脚,是等效的)
  2. +
  3. 串口和其它 MCU/SOC 通信: 直接连接 MaixCAM 的 A16(TX)和 A17(RX) 到 单片机的 RX 和 TX 引脚即可。
  4. +
+

红色屏幕,提示初始化显示失败,请查看FAQ

+

从子面意思可以看到是显示驱动初始化失败了。
+MaixCAM 的底层的显示驱动目前(2024.7)是和摄像头驱动绑定在一起初始化的,所以遇到这个问题多半是摄像头驱动初始化失败了。
+解决方法:

+
    +
  • 尝试更新到最新的系统,安装最新的运行库(重要!!!)因为运行库需要和系统里面的驱动配合工作,版本不一致可能会出错,所以更新到最新的镜像安装最新运行库即可一般就能解决。
  • +
  • 有可能是多个进程一起企图占用驱动,最简单粗暴的方法就是重启。
  • +
  • 硬件上摄像头连接有问题,检查摄像头硬件连接,以及摄像头是否损坏。
  • +
+

Runtime、MaixPy、系统镜像有什么区别,我应该升级哪个?

+
    +
  • Runtime 是运行时环境,系统很多功能依赖这个,包括 MaixPy 也依赖此环境,遇到无法运行程序的问题首先联网检查更新这个。
  • +
  • 系统镜像包含了基本的操作系统、硬件驱动、内置应用,以及 MaixPy 固件等,是基础环境,最好是保持最新, 特别是在Release页面中版本更新中提到了系统有更新,则强烈建议更新系统,因为有些 MaixPy 功能可能依赖系统里面的驱动。
  • +
+
+

更新系统会格式化所有之前的数据,更新前请备份好设备系统中有用的数据。

+
+
    +
  • MaixPy 是运行 MaixPy 程序的依赖库,如果不需要更新系统功能,以及更新日志中没有提到系统有重要更新比如驱动,那可以单独更新 MaixPy 即可,不过以防有驱动变化,最好是直接重新烧录系统。
  • +
+

加载 MUD 模型文件报错 *****.cvimodel not exists, load model failed

+
    +
  • 检查设备中(注意不是电脑里面,需要传到设备里面去)是否真的存在你加载的 .mud 文件。
  • +
  • 检查你写的模型路径写错没有。
  • +
  • 如果你改过文件名,需要注意: MUD 文件是一个模型描述文件,可以用文本编辑器编辑,实际的模型文件是 .cvimodel 文件(对于MaixCAM),.mud 文件中指定了 .cvimodel 的文件名和路径,所以如果你改动了 .cvimodel的文件名,那么也要修改.mud文件中的model路径,比如这里 Yolov5 模型的 mud:
  • +
+ +
[basic]
+type = cvimodel
+model = yolov5s_224_int8.cvimodel
+
+[extra]
+model_type = yolov5
+input_type = rgb
+mean = 0, 0, 0
+scale = 0.00392156862745098, 0.00392156862745098, 0.00392156862745098
+anchors = 10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326
+labels = person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign, parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket, bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut, cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair drier, toothbrush
+
+

这里制定了 model 为相对这个.mud文件目录的 yolov5s_224_int8.cvimodel 文件为模型文件,如果你改了yolov5s_224_int8.cvimodel 为其它名,那也需要改这里。

+

MaixVision import maix 显示红色波浪线

+

这是 MaixVision 的代码提示功能报错找不到 maix 模块。
+这里需要搞清楚一个概念: MaixVision 的代码提示依赖的是电脑本地的 Python 包,代码运行依赖的设备端的 Python 包,所以要让 MaixVision 能够提示就要在电脑上也安装 Python 和 MaixPy 包。具体请看MaixVision 使用文档

+

MaixCAM 启动非常缓慢,甚至超过了 1 分钟,或者屏幕在闪动

+

多半是由于供电不足造成的, MaixCAM 需要 5v 150mA~500mA 左右的电压和点流,如果你遇到了这种现象,可以使用 USB 转 TTL 模块连接 MaixCAM 的串口到电脑,可以看到Card did not respond to voltage select! : -110 这样的字样,说明供电不足,换一个更加的稳定的供电设备即可。
+对于 MaixCAM,在开机会有 400mA 的电流,待机且屏幕有显示需要 250mA,全速运行 AI 模型需要 400mA~500mA 的电流,所以保证电源的稳定性十分重要!

+

MaixCAM 黑屏无法启动,或者卡在 LOGO 界面

+

参考MaixCAM FAQ

+

MaixVision 启动程序一直“卡在“ start running ...

+

MaixVision 的日志输出窗口在开始启动程序是会打印一句start running ...代表程序开始发送到设备并开始执行,
+后面再打印(输出)什么取决于你的程序,比如你调用了print("hello") 则会打印hello,如果你的程序没有任何打印那就不会有任何日志。。。
+所以实际上不是卡住了,而是你的程序就没有输出过任何东西,自然也就不会显示任何日志了,可以尝试在自己的程序中加print("xxx")来打印,这也是我们调试程序最简单的方法。

+

为什么硬件有 256MB 内存,在系统里只能用 128MB 内存呢?

+

因为其它内存给底层驱动和内核预留了,用于摄像头、屏幕、硬件编解码、NPU 等驱动使用,可以通过 cat /sys/kernel/debug/ion/cvi_carveout_heap_dump/summary 看到驱动使用的内存(算能特有,叫 ION 内存),以及其它内存可以通过cat /proc/meminfo看到,如果你想调整内存分配,需要自己编译系统,修改系统的LicheeRV-Nano-Buildbuild/boards/sg200x/sg2002_licheervnano_sd/memmap.py 文件中的 ION_SIZE 来调整(看定制系统文档)。

+

为什么无法安装运行库,提示错误 请求失败!

+
    +
  • 请保证设备已经成功连接到互联网,可以换一个手机热点试试。
  • +
  • 确保系统镜像是烧录的最新的。
  • +
  • 如果提示 DNS 解析失败,可能时网络 DNS 设置问题,可以换一个手机热点试试,或者手动修改 /boot/resolv.conf(只修改这个文件需要重启) 和 /etc/resolv.conf(修改了这个文件不用重启,重启就是把前者拷贝覆盖到这个文件)中的 DNS 服务器设置。
  • +
  • 确保你是从 Sipeed 购买的正版 MaixCAM。
  • +
  • 咨询客服,带上系统版本可以 device_key (可以连接上 MaixVision 点击断开连接按钮后看到,有屏幕的也可以在系统设置->系统信息中看到)
  • +
+

编译报错: type not registered yet?

+ +
from ._maix.peripheral.key import add_default_listener
+ImportError: arg(): could not convert default argument into a Python object (type not registered yet?). #define
+
+

显示有对象没有定义成 python 对象,在 MaixPy 中一般是由于自动扫描API生成时的顺序问题造成的,比如在a.hpp中有一个@maixpy声明的API, 在b.hpp 中有另一个API而且参数使用了a.hpp中的定义,那么可以说b.hpp需要依赖a.hpp,但目前MaixPy的编译脚本不会做依赖关系扫描,所以需要在MaixPy项目中的components/maix/headers_priority.txt文件中手动指定一下,a.hppb.hpp前面扫描就可以了。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/gui/i18n.html b/maixpy/doc/zh/gui/i18n.html new file mode 100644 index 00000000..14add373 --- /dev/null +++ b/maixpy/doc/zh/gui/i18n.html @@ -0,0 +1,491 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy MaixCAM i18n(国际化) 多语言实现 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixPy MaixCAM i18n(国际化) 多语言实现

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

i18n (国际化)简介

+

i18n 是国际化单词(internationalization)的简称,目的在与根据用户的地域或者喜好切换语言。

+

我们常用的 中文 和 英文 这个就是语言,语言有对应的地域编码( LCID),比如中文的地域编码为zh,英文为en,日文为ja,另外还有二级地域编码,比如简体中文对应zh-cn,一般我们实现zh即可。

+

地域编号可以参考Windows的地域编码表 或者看 wikipedia

+

MaixPy MaixCAM 中使用 i18n

+

用户使用大致流程如下:

+
    +
  • 首先用户使用时,在系统设置中可以选择系统语言,比如出厂默认是en即英文。
  • +
  • 然后程序通过maix.i18n.get_locale()可以获得当前系统设置的地域。
  • +
  • 程序根据系统设置的地域显示对应语言的字符串。
  • +
+

对于应用程序来说,比较麻烦的地方就在这里的第三步,即根据地域设置查表获取对应的字符串,下面提供两种方法,根据自己的需求选择:

+

不使用翻译文件,直接使用字典

+

如果你的程序只有几个字符串,可以直接手动指定翻译字典:

+ +
from maix import i18n
+
+trans_dict = {
+    "zh": {
+        "hello": "你好"
+    },
+    "en": {
+    }
+}
+
+trans = i18n.Trans(trans_dict)
+tr = trans.tr
+
+trans.set_locale("zh")
+print(tr("hello"))
+print(tr("my friend"))
+
+

这里用trans.set_locale("zh")临时设置语言为中文了, 运行就会打印你好my friend 了, 因为没给my friend填加翻译,所以原封不动地返回了。

+

自动扫描生成字典,并且从翻译文件加载

+

这种方法比较适合有大量需要翻译的字符串的场景。

+

前面的方法我们手动指定了字符串翻译,在简单场景很方便,但是如果字符串太多了,手动改字典很容易漏掉,所以我们需要程序自动帮我们找到需要翻译的字符串并生成翻译文件,我们只需要翻译一下文件就好了。

+

在 MaixPy 中,提供了maix.i18n.Trans 这个类,可以用来加载多种语言的翻译文件,调用其成员函数tr(),传入想要翻译的文字即可获得翻译,举例:

+ +
from maix import i18n, err
+trans = i18n.Trans()
+tr = trans.tr
+
+e = trans.load("locales")
+err.check_raise(e, "load translation yamls failed")
+
+print(tr("hello"))
+
+

这里从当前目录下的locales文件夹加载了翻译文件,然后根据系统的语言设置打印hello,比如中文就会打印你好

+

翻译文件: 既然这里加载用到了翻译文件,这些翻译文件怎么制作呢?
+首先我们需要知道我们需要翻译那些文字,显而易见,就是上面我们用函数tr调用的字符串,所以我们只需要搜索源码中所有用到了tr函数的字符串即可认为是我们需要翻译的所有字符串了。
+所以使用流程如下:

+
    +
  • 建立一个项目文件夹,里面存放代码入口main.py,可以使用 `MaixVision`` 打开这个项目文件夹方便运行。
  • +
  • 编写main.py,让需要翻译的字符串都用上述的tr函数调用。
  • +
  • MaixPy 提供了一个扫描工具,首先确保安装了maixtool(电脑通过系统终端 pip install maixtool -U 命令来安装升级)。
  • +
  • 然后在目录下仍然使用电脑终端执行maixtool i18n -d . r来扫描需要翻译的字符串,并且生成一个locales目录,里面包含了中英文两种语言的翻译文件,如果要更多语言,执行maixtool i18n -h查看帮助。
  • +
  • 生成的文件是键值对组成的,比如zh.yaml中的hello: hello 的意思就是字符串hello中文翻译是hello,这显然不对,需要我们手动翻译一下,改成hello: 你好即可。注意编辑文件一定要用支持 UTF-8 编码的编辑器,特别是在Windows下不要将文件改为GBK编码了,不然会出错,可以用 MaixVision 或者 VsCode 编辑。
  • +
  • 然后运行项目,或者打包项目为安装包都可以,记得把 locales 目录也一起打包进去。
  • +
  • 如果后面又更新了源码,需要再次执行maixtool命令更新文件,更新会对之前已经翻译了的文件更新,如果你担心程序不小心将之前的错误覆盖,可以先自行备份一份,确认无误后再删除备份。
  • +
+

这样你的程序就会根据系统语言设置更改语言了,如果你调试程序也可以手动调用trans.set_locale("zh")来手动临时切换语言。

+

显示翻译到界面

+

前面的例子都是在调用print函数打印,如果想显示到界面上,还有一步要做,就是需要字库支持,对于英文来说默认都支持了,可是对于中文这种字库庞大的语言,默认是不支持的。
+比如:

+ +
from maix import i18n, image, display, app, time
+
+trans_dict = {
+    "zh": {
+        "hello": "你好"
+    },
+    "en": {
+    }
+}
+
+trans = i18n.Trans(trans_dict)
+tr = trans.tr
+trans.set_locale("zh")
+
+disp = display.Display()
+img = image.Image(disp.width(), disp.height())
+
+img.draw_string(10, 10, tr("hello"), image.COLOR_WHITE, scale=2)
+disp.show(img)
+
+while not app.need_exit():
+    time.sleep_ms(100)
+
+

运行会发现显示了一堆?,因为没有中文字库,对于image模块,可以加载字库,系统内置了一个中文字库,你也可以用你自己的字库:

+ +
from maix import i18n, image, display, app, time
+
+trans_dict = {
+    "zh": {
+        "hello": "你好"
+    },
+    "en": {
+    }
+}
+
+trans = i18n.Trans(trans_dict)
+tr = trans.tr
+trans.set_locale("zh")
+
+disp = display.Display()
+
+
+image.load_font("sourcehansans", "/maixapp/share/font/SourceHanSansCN-Regular.otf", size = 24)
+image.set_default_font("sourcehansans")
+
+img = image.Image(disp.width(), disp.height())
+img.draw_string(10, 10, tr("hello"), image.COLOR_WHITE, scale=2)
+disp.show(img)
+
+while not app.need_exit():
+    time.sleep_ms(100)
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/index.html b/maixpy/doc/zh/index.html new file mode 100644 index 00000000..2e7a5441 --- /dev/null +++ b/maixpy/doc/zh/index.html @@ -0,0 +1,619 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 快速开始 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 快速开始

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ + + + +
+
+

关于 MaixPy 介绍请看 MaixPy 官网首页
+喜欢 MaixPy 请给 MaixPy 项目 点个 Star ⭐️ 以鼓励我们开发更多功能。

+
+

写在前面

+
    +
  • 仔细阅读按照下面文档的步骤,不要遗漏内容,对比进行操作。
  • +
  • 左边目录请仔细查看,基础部分一定要耐心阅读完。
  • +
  • 提问前先在左边目录仔细查找文档,以及看FAQ
  • +
  • 本文档是MaixPy v4 教程文档,注意与 MaixPy-v1(k210系列)区别开,勿错看文档。
  • +
  • 也可以参考下面的视频上手教程,注意视频内容有更正在评论区和弹幕会补充,以最新的文档为准,更多视频教程可以到 B 站搜索 MaixCAM。
  • +
+ +

获得 MaixCAM 设备

+

maixcam_pro

+
    +
  • MaixCAM 主体,目前有几个版本,根据自己的需求买:

    + +
  • +
  • TF 卡: 系统安装在 TF 卡,没有 TF 无法启动。

    +
  • +
  • 摄像头: 视觉相关应用需要摄像头,可以根据自己的使用场景和财力选择合适的摄像头型号。比如 OS01A10 成像质量比 GC4653 高。

    +
  • +
  • 触摸屏: 方便交互,官方默认集成的应用都需要触摸屏交互,可以大大提升交互体验和开发难度。

    +
  • +
  • 电源: 一个稳定的供电方式,MaixCAM 需要 5v 500mA 的稳定供电,如果供电不足可能会导致无法开机,或者运行过程中死机等情况。特别是有些电脑的 USB 口供电可能不稳定。

    +
  • +
  • TF 读卡器: 用来烧录系统,必备。

    +
  • +
  • USB转串口模块: 如果你想要电脑和 MaixCAM 之间串口通信,需要备一个,淘宝随便买一个就行,也可以直接在 Sipeed 店里一起买,比如这个双串口转USB模块

    +
  • +
+
+

注意,目前只支持 MaixCAM 系列开发板,其它同型号芯片的开发板均不支持,包括 Sipeed 的同型号芯片开发板,请注意不要买错造成不必要的时间和金钱浪费。

+
+

使用无屏幕版本

+

如果你使用的是无屏幕版本,请看快速开始(无屏幕版本)文档。

+

上手配置

+

准备 TF 镜像卡和插入到设备

+

如果你买的套餐里面有 TF 卡,里面已经有出厂镜像了,如果出厂时 TF 卡没有安装到设备,需要先小心打开外壳(注意里面有排线连接不要扯断了),然后插入 TF 卡。另外因为出厂的固件可能比较老旧,务必按照升级和烧录系统先将系统升级到最新版本,否则可能会遇到某些应用 和 API 无法使用的问题。

+

如果没买 TF 卡,则需要将系统烧录进自备的 TF 卡中,烧录方法请看升级和烧录系统,然后再安装到板子。

+

上电开机

+

使用 Type-C 数据线连接 MaixCAM 设备给设备供电,等待设备开机,开机会进入功能选择界面。

+

maixcam_font

+

如果屏幕没有显示

+
    +
  • 请确认购买了配套的 TF 卡,如果确认有 TF 卡,并且已经插入到设备,可以尝试更新到最新的系统
  • +
  • 如果你没有购买 TF 卡套餐,你需要按照升级和烧录系统的方法烧录最新的系统到 TF 卡。
  • +
  • 另外请确认屏幕和摄像头的排线没有松动,屏幕的排线在拆开外壳时很容易脱落,需要注意。
  • +
+

联网

+

首次运行需要连接网络,后面会激活设备和使用 IDE 会用到。
+如果没有路由器可以用手机开一个热点。

+

设备上点击 设置(Settings),选择WiFi,有两种方法连接 WiFi 热点:

+
    +
  • 扫描 WiFi 分享码:
      +
    • 使用手机分享WiFi热点二维码,或者到maixhub.com/wifi 生成一个二维码。
    • +
    • 点击扫描二维码按钮,会出现摄像头的画面,扫描前面生成的二维码进行连接。
    • +
    +
  • +
  • 搜索热点:
      +
    • 点击 扫描 按钮开始扫描周围 WiFi, 可以多次点击刷新列表。
    • +
    • 找到你的 WiFi 热点。
    • +
    • 输入密码点击连接按钮进行连接。
    • +
    +
  • +
+

然后等待获取到 IP 地址,这可能需要 1030 秒,如果界面没有刷新可以退出WiFi功能重新进入查看,或者在设置 -> 设备信息 中也可以看到 IP 信息。

+

升级运行库

+

这一步很重要 !!! 这一步如果不做好,其它应用和功能可能无法运行(比如闪退等)。

+
    +
  • 首先保证上一步连接 WiFi 已经完成,并且获取到 IP 地址能访问公网。
  • +
  • 设备上点击 设置(Settings),选择安装运行库
  • +
  • 安装完成后可以看到更新到了最新版本,然后退出即可。
  • +
+

如果显示Request failed 或者请求失败,请先检查网络是否已经连接,需要能连接到互联网,如果还不行,请拍照联系客服处理即可。

+

使用内置应用

+

内置了很多应用,比如 找色块,AI 检测器,巡线等等,自学习检测举例:

+ +

其它的请自行摸索,以后还会更新更多应用,使用文档以及应用更新请看 MaixHub 应用商店

+

注意:应用只包含了 MaixPy 能实现的一部分功能,使用 MaixPy 能创造更多功能

+

作为串口模块使用

+
+

如果是想把设备当成主控使用(或者你还不懂什么是串口模块)可以跳过这一步。

+
+

内置的各种应用可以直接当成串口模块使用,比如找色块找人脸找二维码等等,
+注意这里串口仅能直接和其它单片机连接,如果要和电脑串口通信请自备一个 USB 转串口模块

+

使用方法:

+
    +
  • 硬件连接: 可以给设备接上Type-C一转二小板(对于 MaixCAM-Pro 是 6Pin 接口),这样我们就能将设备通过串口连接到你的主控上了,比如Arduino树莓派STM32等等。
  • +
  • 打开你想用的应用,比如二维码识别,当设备扫描到二维码就会通过串口把结果发送给你的主控了。
  • +
+
+

发送的串口波特率是 115200,数据格式是 8N1,协议遵循 Maix 串口通信协议标准,可以在MaixHub APP 找到对应的应用介绍查看协议。
+如果应用没有做串口输出结果,你也可以自己基于对应功能的例程,自行按照串口使用文档添加串口输出结果。

+
+

准备连接电脑和设备

+

为了后面电脑(PC)能和 设备(MaixCAM)通信,我们要让它们在同一个局域网内,提供了两种方式:

+
    +
  • 方法一 (强烈推荐):无线连接, 设备使用 WiFi 连接到电脑连接的同一个路由器或者 WiFi 热点下: 在设备的设置 -> WiFi 设置中连接到你的 WiFi 即可。(WiFi 如果出现画面卡顿或者延迟的问题可以尝试下面的方法二使用有线连接。)
  • +
  • 方法二:有线连接, 设备通过 USB 线连接到电脑,设备会虚拟成一个 USB 网卡,这样和电脑就通过 USB 在同一局域网了。推荐先用 WiFi 开始是因为有线虽然传输稳定但是可能会遇到线缆不良,接触不良,驱动等问题,遇到问题也可以在 FAQ 中找常见问题。
  • +
+
+方法二在不同电脑系统中驱动安装方法: +

默认会有两种 USB 虚拟网卡驱动(NCM 和 RNDIS驱动),以满足不同系统的需求,你也可以在设备端设置应用 -> USB设置 里面关掉不用的虚拟网卡:

+
    +
  • Windows: windows 所有系统会自动安装 RNDIS 驱动, 仅 Win11 会自动安装 NCM 驱动,两种驱动有一个能用就行
      +
    • 打开任务管理器 -> 性能,可以看到一个虚拟的以太网,并且可以看到 ip 比如 10.131.167.100 是电脑的 ip, 设备的 ip 是最后一位改为110.131.167.1。如果是 Win11 则会看到两个虚拟网卡,随便选择一个 IP 使用即可。
    • +
    • 另外也可以打开电脑的 设备管理器(搜索栏搜索设备管理器), RNDIS 和 NCM 驱动被正确安装的效果:
      +RNDIS ok NCM ok
    • +
    +
  • +
  • Linux: 无需额外设置,插上 USB 线即可。 使用 ifconfig 或者 ip addr 查看到 usb0usb1 网卡,两个 IP 都可以使用,注意 这里看到的 ip 比如 10.131.167.100 是电脑的 ip, 设备的 ip 是最后一位改为110.131.167.1
  • +
  • MacOS: 在系统设置->网络里面查看到 usb 网卡,注意 这里看到的 ip 比如 10.131.167.100 是电脑的 ip, 设备的 ip 是最后一位改为110.131.167.1
  • +
+
+
+

开发环境准备

+
    +
  • 首先保证上一步电脑和设备已经在同一个局域网中了。
  • +
  • 下载 MaixVision 并安装。
  • +
  • 使用 Type-C 连接设备和电脑,打开 MaixVision,点击左下角的“连接”按钮,会自动搜索设备,稍等一下就能看到设备,点击设备有点的连接按钮以连接设备。
  • +
+

如果没有扫描到设备,你也可以在设备设置 -> 设备信息 中查看设备的 IP 地址手动输入, 也可以在 FAQ 中找到解决方法。

+

连接成功后,设备的功能选择界面会消失,屏幕会黑屏,释放了所有硬件资源,如果仍然有画面显示,可以断开连接重连。

+

这里有 MaixVision 的使用示例视频:

+

+

运行例程

+

点击 MaixVision 左侧的示例代码,选择一个例程,点击左下角运行按钮将代码发送到设备上运行。

+

比如:

+
    +
  • hello_maix.py,点击运行按钮,就能看到 MaixVision 终端有来自设备打印的消息,以及右上角出现了图像。
  • +
  • camera_display.py,这个例程会打开摄像头并在屏幕上显示摄像头的画面。
  • +
+ +
from maix import camera, display, app
+
+disp = display.Display()          # 构造一个显示对象,并初始化屏幕
+cam = camera.Camera(640, 480)     # 构造一个摄像头对象,手动设置了分辨率为 640x480, 并初始化摄像头
+while not app.need_exit():        # 一直循环,直到程序退出(可以通过按下设备的功能按键退出或者 MaixVision 点击停止按钮退出)
+    img = cam.read()              # 读取摄像头画面保存到 img 变量,可以通过 print(img) 来打印 img 的详情
+    disp.show(img)                # 将 img 显示到屏幕上
+
+ +

其它例程可以自行尝试。

+
+

如果你使用相机例程遇到了图像显示卡顿,可能是网络不通畅,或者 USB 线质量或者主机 USB 质量太差造成,可以更换连接方式或者更换线缆、主机 USB 口或者电脑等。

+
+

安装应用到设备

+

上面是在设备中运行代码,MaixVision 断开后代码就会停止运行,如果想让代码出现在开机菜单中,可以打包成应用安装到设备上。

+

点击 MaixVision 左下侧的安装应用按钮,填写应用信息,会将应用安装到设备上,然后在设备上就能看到应用了。
+也可以选择打包应用,将你的应用分享到MaixHub 应用商店

+
+

默认例程没有显式编写退出功能,进入应用后按下设备的功能按键即可退出应用。(对于 MaixCAM 是 user 键)

+
+

如果想让程序开机自启动,可以在 设置 -> 开机启动 中设置。

+

更多 MaixVision 使用请看 MaixVision 文档

+

下一步

+

看到这里,如果你觉得不错,请务必来 github 给 MaixPy 开源项目点一个 star(需要先登录 github), 你的 star 和认同是我们不断维护和添加新功能的动力!

+

到这里你已经体验了一遍使用和开发流程了,接下来可以学习 MaixPy 语法和功能相关的内容,请按照左边的目录进行学习,如果遇到 API 使用问题,可以在API 文档中查找。

+

学习前最好带着自己学习的目的学,比如做一个有趣的小项目,这样学习效果会更好,项目和经验都可以分享到MaixHub 分享广场,会获得现金奖励哦!

+

常见问题 FAQ

+

遇到问题可以优先在 FAQ 里面找,找不到再在下面的论坛或者群询问,或者在 MaixPy issue 提交源码问题。

+

分享交流

+
    +
  • MaixHub 项目和经验分享 :分享你的项目和经验,获得现金打赏,获得官方打赏的基本要求:
      +
    • 可复现型:较为完整的项目制作复现过程。
    • +
    • 炫耀型:无详细的项目复现过程,但是项目展示效果吸引人。
    • +
    • Bug 解决经验型:解决了某个难题的过程和具体解决方法分享。
    • +
    +
  • +
  • MaixPy 官方论坛(提问和交流)
  • +
  • QQ 群: (建议在 QQ 群提问前先发个帖,方便群友快速了解你需要了什么问题,复现过程是怎样的)
      +
    • MaixPy (v4) AI 视觉交流大群: 862340358
    • +
    +
  • +
  • Telegram: MaixPy
  • +
  • MaixPy 源码问题: MaixPy issue
  • +
  • 商业合作或批量购买请联系 support@sipeed.com 。
  • +
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/modules/acc.html b/maixpy/doc/zh/modules/acc.html new file mode 100644 index 00000000..c5767cda --- /dev/null +++ b/maixpy/doc/zh/modules/acc.html @@ -0,0 +1,406 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 读取加速度计和姿态解算 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 读取加速度计和姿态解算

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

IMU 简介

+

对于 MaixCAM-Pro,板载了一款集成了三轴陀螺仪和三轴加速度计的QMI8658芯片, 它能够提供高精度的姿态、运动和位置数据,适用于各种需要精确运动检测的应用场景,如无人机、机器人、游戏控制器和虚拟现实设备等。QMI8658具有低功耗、高稳定性和高灵敏度的特点, 下面将介绍使用IMU模块来获取姿态数据。

+
+

MaixCAM 无板载加速度计,可自行外接使用 iic 驱动。

+
+

MaixPy 中使用 IMU

+

使用 IMU 模块从 QMI8658 读取数据.

+

示例代码:

+ +
from maix.ext_dev import imu
+
+i = imu.IMU("qmi8658", mode=imu.Mode.DUAL,
+                              acc_scale=imu.AccScale.ACC_SCALE_2G,
+                              acc_odr=imu.AccOdr.ACC_ODR_8000,
+                              gyro_scale=imu.GyroScale.GYRO_SCALE_16DPS,
+                              gyro_odr=imu.GyroOdr.GYRO_ODR_8000)
+
+while True:
+    data = i.read()
+    print("\n-------------------")
+    print(f"acc x: {data[0]}")
+    print(f"acc y: {data[1]}")
+    print(f"acc z: {data[2]}")
+    print(f"gyro x: {data[3]}")
+    print(f"gyro y: {data[4]}")
+    print(f"gyro z: {data[5]}")
+    print(f"temp: {data[6]}")
+    print("-------------------\n")
+
+

按照您的需求初始化 IMU 对象, 然后调用 read() 即可. read() 返回的是从 IMU 中读出的原始数据.

+

如果 mode 参数选择 DUAL, 则 read()返回的数据为 [acc_x, acc_y, acc_z, gyro_x, gyro_y, gyro_z, temp], 如果 mode 只选择 ACC/GYRO 中的一个, 只会返回对应的 [x, y, z, temp], 例如选择 ACC, read() 会返回 [acc_x, acc_y, acc_z, temp].

+

有关 IMU API 的详细说明请看 IMU API 文档

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/modules/bm8653.html b/maixpy/doc/zh/modules/bm8653.html new file mode 100644 index 00000000..1fcfcf15 --- /dev/null +++ b/maixpy/doc/zh/modules/bm8653.html @@ -0,0 +1,468 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy bm8653驱动说明 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixPy bm8653驱动说明

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-08-271.0.0iawak9lkm + + 初版文档 + +
+
+
+ +
+
+ +

BM8653 简介

+

BM8653是一款实时时钟(RTC)芯片,广泛应用于各种电子设备中,用于提供精确的时间和日期信息。它具有低功耗、高精度的特点,能够在设备断电的情况下通过备用电池继续运行,确保时间的连续性和准确性。

+

MaixPy 中使用 BM8653

+

在 MaixPy 中使用 BM8653 很简单, 您只需要知道您平台上的 BM8653 挂载在哪个 I2C 总线上. MaixCAM Pro 板载的 BM8563 挂载在 I2C-4 上.

+

示例代码:

+ +
from maix import ext_dev, pinmap, err, time
+
+### Enable I2C
+# ret = pinmap.set_pin_function("PIN_NAME", "I2Cx_SCL")
+# if ret != err.Err.ERR_NONE:
+#     print("Failed in function pinmap...")
+#     exit(-1)
+# ret = pinmap.set_pin_function("PIN_NAME", "I2Cx_SDA")
+# if ret != err.Err.ERR_NONE:
+#     print("Failed in function pinmap...")
+#     exit(-1)
+
+BM8653_I2CBUS_NUM = 4
+
+rtc = ext_dev.bm8563.BM8563(BM8653_I2CBUS_NUM)
+
+### 2020-12-31 23:59:45
+t = [2020, 12, 31, 23, 59, 45]
+
+# Set time
+rtc.datetime(t)
+
+while True:
+    rtc_now = rtc.datetime()
+    print(f"{rtc_now[0]}-{rtc_now[1]}-{rtc_now[2]} {rtc_now[3]}:{rtc_now[4]}:{rtc_now[5]}")
+    time.sleep(1)
+
+

如果您使用的是 MaixCAM Pro 板载的 BM8653, 无需使能 I2C-4.

+

示例中读写 BM8653, 设置或是读取当前时间.

+

您也可以通过以下示例将当前 BM8653 内的时间设置为系统时间, 或是将当前系统时间设置为 BM8653 内的时间.

+ +
from maix import ext_dev, pinmap, err, time
+
+### Enable I2C
+# ret = pinmap.set_pin_function("PIN_NAME", "I2Cx_SCL")
+# if ret != err.Err.ERR_NONE:
+#     print("Failed in function pinmap...")
+#     exit(-1)
+# ret = pinmap.set_pin_function("PIN_NAME", "I2Cx_SDA")
+# if ret != err.Err.ERR_NONE:
+#     print("Failed in function pinmap...")
+#     exit(-1)
+
+
+BM8653_I2CBUS_NUM = 4
+
+rtc = ext_dev.bm8563.BM8563(BM8653_I2CBUS_NUM)
+
+### Update RTC time from system
+rtc.systohc()
+
+### Update system time from RTC
+# rtc.hctosys()
+
+while True:
+    rtc_now = rtc.datetime()
+    print(f"{rtc_now[0]}-{rtc_now[1]}-{rtc_now[2]} {rtc_now[3]}:{rtc_now[4]}:{rtc_now[5]}")
+    time.sleep(1)
+
+

BM8653 的底层实现类似于单例模式, 本 API 可以保证对单个 BM8653 的读写是线程安全的. 也就意味着您可以随意的创建 BM8653 对象, 在任意地方读写 BM8653 均不会产生数据竞争.

+

传给 BM8653 对象的 timetuple 遵循 (year, month, day[, hour[, minute[, second]]]), 即必须要有前三个参数, 后续参数缺失部分代表的时间不会进行修改. BM8653 保证返回的 timetuple 为空时表示错误, 不为空时必定是含有6个元素的 List[], 其内容为(year, month, day, hour, minute, second).

+

有关 BM8653 API 的详细说明请看 BM8653 API 文档

+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/modules/qmi8658.html b/maixpy/doc/zh/modules/qmi8658.html new file mode 100644 index 00000000..d1d3b571 --- /dev/null +++ b/maixpy/doc/zh/modules/qmi8658.html @@ -0,0 +1,442 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy qmi8658驱动说明 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixPy qmi8658驱动说明

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-08-271.0.0iawak9lkm + + 初版文档 + +
+
+
+ +
+
+ +

QMI8658 简介

+

QMI8658是一款集成了三轴陀螺仪和三轴加速度计的惯性测量单元(IMU)芯片. 它能够提供高精度的姿态、运动和位置数据,适用于各种需要精确运动检测的应用场景,如无人机、机器人、游戏控制器和虚拟现实设备等。QMI8658具有低功耗、高稳定性和高灵敏度的特点.

+

MaixPy 中使用 QMI8658

+

在 MaixPy 中使用 QMI8658 很简单, 您只需要知道您使用的平台上 QMI8658 挂载在哪个 I2C 总线上. MaixCAM Pro 板载的 QMI8658 挂载在 I2C-4 上.

+

示例代码:

+ +
from maix import ext_dev, pinmap, err, time
+
+### Enable I2C
+# ret = pinmap.set_pin_function("PIN_NAME", "I2Cx_SCL")
+# if ret != err.Err.ERR_NONE:
+#     print("Failed in function pinmap...")
+#     exit(-1)
+# ret = pinmap.set_pin_function("PIN_NAME", "I2Cx_SDA")
+# if ret != err.Err.ERR_NONE:
+#     print("Failed in function pinmap...")
+#     exit(-1)
+
+QMI8658_I2CBUS_NUM = 4
+
+imu = ext_dev.qmi8658.QMI8658(QMI8658_I2CBUS_NUM,
+                              mode=ext_dev.qmi8658.Mode.DUAL,
+                              acc_scale=ext_dev.qmi8658.AccScale.ACC_SCALE_2G,
+                              acc_odr=ext_dev.qmi8658.AccOdr.ACC_ODR_8000,
+                              gyro_scale=ext_dev.qmi8658.GyroScale.GYRO_SCALE_16DPS,
+                              gyro_odr=ext_dev.qmi8658.GyroOdr.GYRO_ODR_8000)
+
+while True:
+    data = imu.read()
+    print("\n-------------------")
+    print(f"acc x: {data[0]}")
+    print(f"acc y: {data[1]}")
+    print(f"acc z: {data[2]}")
+    print(f"gyro x: {data[3]}")
+    print(f"gyro y: {data[4]}")
+    print(f"gyro z: {data[5]}")
+    print(f"temp: {data[6]}")
+    print("-------------------\n")
+
+

按照您的需求初始化 QMI8658 对象, 然后调用 read() 即可. read() 返回的是从 QMI8658 中读出的原始数据.

+

如果 mode 参数选择 DUAL, 则 read()返回的数据为 [acc_x, acc_y, acc_z, gyro_x, gyro_y, gyro_z, temp], 如果 mode 只选择 ACC/GYRO 中的一个, 只会返回对应的 [x, y, z, temp], 例如选择 ACC, read() 会返回 [acc_x, acc_y, acc_z, temp].

+

有关 QMI8658 API 的详细说明请看 QMI8658 API 文档

+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/modules/rtc.html b/maixpy/doc/zh/modules/rtc.html new file mode 100644 index 00000000..ec4b9c8b --- /dev/null +++ b/maixpy/doc/zh/modules/rtc.html @@ -0,0 +1,380 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 使用 RTC 模块 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 使用 RTC 模块

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

MaixCAM-Pro 板载了一个 RTC 模块,默认上电会自动同步系统时间,以及从网络同步时间,网络状态变化后也会自动同步。

+

所以一般情况不需要手动操作 RTC,直接使用系统的时间 API 获取时间即可。

+

如果一定要手动操作 RTC,请看bm8653 RTC 模块使用(手动操作前可以在系统 /etc/init.d目录下把 RTC 和 NTP 相关服务删掉以禁用自动同步。

+
+

MaixCAM 无板载 RTC。

+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/modules/temp_humi.html b/maixpy/doc/zh/modules/temp_humi.html new file mode 100644 index 00000000..62b9fbc0 --- /dev/null +++ b/maixpy/doc/zh/modules/temp_humi.html @@ -0,0 +1,379 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 读取温湿度传感器 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 读取温湿度传感器

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

简介

+

通过给 MaixCAM 外挂一个温湿度传感器模块,可以轻松读取到环境温度和湿度,这里以 Si7021 这款传感器为例,通过 I2C 可以驱动它。

+

使用

+

完整的代码在MaixPy/examples/ext_dev/sensors/temp_humi_si7021.py

+

注意系统镜像需要 >= 2024.6.3_maixpy_v4.2.1 版本。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/modules/thermal_cam.html b/maixpy/doc/zh/modules/thermal_cam.html new file mode 100644 index 00000000..67fd8fbb --- /dev/null +++ b/maixpy/doc/zh/modules/thermal_cam.html @@ -0,0 +1,376 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 使用热红外图像传感器 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 使用热红外图像传感器

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

目前官方还未上架硬件产品,如果只是小分辨率需求,可以自己淘宝买一个串口或者 IIC 模块进行驱动,后面官方上线高分辨率模块会更新本文档。

+

热红外摄像头模块比如可以参考K210+MLX90640红外热像仪, 热红外 heimann (海曼) HTPA 32x32d

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/modules/tmc2209.html b/maixpy/doc/zh/modules/tmc2209.html new file mode 100644 index 00000000..0055c8d0 --- /dev/null +++ b/maixpy/doc/zh/modules/tmc2209.html @@ -0,0 +1,644 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy tmc2209 单串口驱动使用介绍 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixPy tmc2209 单串口驱动使用介绍

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-08-211.0.0iawak9lkm + + 初版文档 + +
+
+
+ +
+
+ +

TMC2209 简介

+

TMC2209是一款由德国Trinamic公司生产的步进电机驱动芯片。它专为2相步进电机设计,具有低功耗、高效率和良好的噪声抑制能力。TMC2209支持高达2.8A的电流,适用于各种步进电机应用,如3D打印机、CNC机床、机器人等。

+

MaixPy 中使用 tmc2209 驱动步进电机

+
    +
  • 请确保您的步进电机为两相四线步进电机, 然后确认您的电机步进角度(step_angle), 需要使用的微步数(micro_step), 以及该电机旋转一圈时, 负载移动的距离(screw_pitch或round_mm). 以便我们后续配置驱动参数.

    +
  • +
  • 一般来说, 市面上的 TMC2209 的驱动板有以下这些引脚(如果您嫌麻烦, 可以采购我司在售的 TMC2209 驱动板, 链接[暂未上架,敬请期待]):

    + +
            ---------
    +     EN-|       |-VM
    +    MS1-|       |-GND
    +    MS2-|       |-2B
    +     RX-|       |-2A
    +     TX-|       |-1A
    +     NC-|       |-1B
    +   STEP-|       |-VDD
    +    DIR-|       |-GND
    +        ---------
    +
    +

    EN: EN 为使能脚, 将该引脚接到 GND 以硬件使能 TMC2209.

    +

    MS1: MS1 为微步进选择引脚之一,与 MS2 引脚配合使用,用于设置步进电机的微步进模式。

    +

    MS2: MS2 为微步进选择引脚之一,与 MS1 引脚配合使用,用于设置步进电机的微步进模式。

    +

    This driver program only supports the UART mode of TMC2209. In UART mode, the original microstep selection pins MS1 and MS2 are redefined as AD0 and AD1, respectively. The combination of the logic levels of these two pins determines the UART address of the TMC2209, with a value range from 0x00 to 0x03. This means that a single UART port can connect up to 4 TMC2209 drivers with different addresses. For example, when MS1 is at a low level (0) and MS2 is at a high level (1), the UART address is binary 0b10, which is hexadecimal 0x02.

    +

    TX: TX 为串行通信发送引脚,用于与外部微控制器进行串口通信。

    +

    RX: RX 为串行通信接收引脚,用于与外部微控制器进行串口通信。

    +

    在 TMC2209 上, 同时使用 RXTX 时, 请确保 TMC2209 驱动板 RX 与主控芯片 TX 间存在 1K 欧姆的电阻. 否则会出现通信数据异常.

    +

    NC: NC 为未连接引脚,表示该引脚在正常使用中不需要连接。

    +

    STEP: STEP 为步进信号输入引脚,每接收到一个脉冲信号,步进电机前进一个步进角度。因为本驱动为纯 UART 方式驱动,故该引脚不需要连接, 悬空即可.

    +

    DIR: DIR 为方向信号输入引脚,用于控制步进电机的旋转方向。当 DIR 为高电平时,电机顺时针旋转;当 DIR 为低电平时,电机逆时针旋转。因为本驱动为纯 UART 方式驱动,故该引脚不需要连接, 悬空即可.

    +

    VM: VM 为电源输入引脚,连接到步进电机的电源正极。

    +

    GND: GND 为接地引脚,连接到电源的负极。

    +

    2B, 2A, 1B, 1A: 这些引脚为步进电机的相位输出引脚,分别连接到步进电机的两相线圈。

    +

    VDD: VDD 为逻辑电源输入引脚,为芯片内部的逻辑电路提供电源。

    +
  • +
  • 使用 MaixPy 中的 TMC2209 驱动

    +
  • +
+

以一个步进角度为18,微步数为256,螺距为3mm的丝杆步进电机为例:

+ +
from maix import pinmap, ext_dev, err, time
+
+port = "/dev/ttyS1"
+uart_addr = 0x00
+uart_baudrate = 115200
+step_angle = 18
+micro_step = 256
+screw_pitch = 3
+speed = 6
+use_internal_sense_resistors = True
+run_current_per = 100
+hold_current_per = 100
+
+
+if port == "/dev/ttyS1":
+    ret = pinmap.set_pin_function("A19", "UART1_TX")
+    if ret != err.Err.ERR_NONE:
+        print("Failed in function pinmap...")
+        exit(-1)
+    ret = pinmap.set_pin_function("A18", "UART1_RX")
+    if ret != err.Err.ERR_NONE:
+        print("Failed in function pinmap...")
+        exit(-1)
+
+slide = ext_dev.tmc2209.ScrewSlide(port, uart_addr, uart_baudrate,
+                            step_angle, micro_step, screw_pitch, speed,
+                            use_internal_sense_resistors, run_current_per, hold_current_per)
+
+def reset_callback() -> bool:
+    if 2 > 1:   # An event occurs (e.g., a sensor is triggered),
+                # indicating that the slide has moved to the boundary and the motor needs to stop.
+        print("Reset finish...")
+        return True
+    # Not occurred, no need to stop the motor.
+    return False
+
+def move_callback(per:float) -> bool:
+    # per is the percentage of the current distance moved by move()
+    # out of the total distance required for the current move(), ranging from 0 to 100.
+    print(f"Slide moving... {per}")
+    if per >= 50: # Example: Stop moving when 50% of the total distance for the current move() has been covered.
+        print(f"{per} >= 50%, stop.")
+        return True
+    return False
+
+
+slide.reset(reset_callback)
+
+slide.move(screw_pitch*2, -1, move_callback)
+slide.move(-screw_pitch)
+
+while True:
+    slide.move(screw_pitch*2)
+    slide.move(-(screw_pitch*2))
+    time.sleep_ms(100)
+
+

程序中需要先使用 pinmap 确保 UART1 被启用.

+

然后创建一个 ScrewSlide 对象, 默认使用内部参考电阻, 默认使用 100% 的电机运行电流和 100% 的电机保持电流. 这些参数可能需要根据您的电机进行调整.

+

然后例程声明了一个reset回调函数和一个move回调函数并分别传入reset()函数和move()函数中. reset() 和 move() 会每隔一段时间调用回调函数以确认是否需要立即停止电机(当回调函数返回True).

+

move() 和 reset() 函数均为阻塞函数, 只有在回调函数返回True时(move还能在运动完指定长度时)停止电机并返回.

+

MaixPy 中使用 tmc2209 驱动恒定负载的步进电机

+

!!!丝杆步进电机携带恒定负载也不能视为带恒定负载的步进电机, 因为丝杆步进电机有限位装置以保证负载在杠上的运动方向是可知的, 丝杆步进电机运行时会与限位装置经常碰撞导致电机负载并不是恒定的. 其他情况举一反三即可知是否为恒定负载步进电机.

+

某些应用场景中, 步进电机全程的负载恒定, 只有在接触到边缘堵转时负载变高. 那么可以使用 Slide 类代替 ScrewSlide 类, 在这种情况下 Slide 具备堵转检测功能. 使用 ScrewSlide 也是可行的, 不具备堵转检测但是更加灵活. 请结合使用场景来选择这两个类, 本节只讲 Slide 类.

+
    +
  • 实现原理
  • +
+

TMC2209 内部存在一个寄存器 SG_RESULT, 该寄存器保存的数据与驱动电机剩余力矩成正比. 如果电机负载恒定, 该寄存器值变化幅度很小, 在堵转时, 该寄存器值将会快速减小并维持一个较低的值. 找到该恒定负载电机这个寄存器的运行平均值和堵转平均值, 即可衡量该电机在某时刻是否堵转.

+
    +
  • 获取 SG_RESULT 寄存器的平均值
  • +
+

maix.ext_dev.tmc2209 中提供了获取并保存该平均值的函数 maix.ext_dev.tmc2209.slide_scan.

+

example:

+ +
from maix import ext_dev, pinmap, err
+
+port = "/dev/ttyS1"
+uart_addr = 0x00
+uart_baudrate = 115200
+step_angle = 1.8
+micro_step = 256
+round_mm = 60
+speed = 60
+use_internal_sense_resistors = True
+run_current_per = 100
+hold_current_per = 100
+
+if port == "/dev/ttyS1":
+    ret = pinmap.set_pin_function("A19", "UART1_TX")
+    if ret != err.Err.ERR_NONE:
+        print("Failed in function pinmap...")
+        exit(-1)
+    ret = pinmap.set_pin_function("A18", "UART1_RX")
+    if ret != err.Err.ERR_NONE:
+        print("Failed in function pinmap...")
+        exit(-1)
+
+ext_dev.tmc2209.slide_scan(port, uart_addr, uart_baudrate,
+                           step_angle, micro_step, round_mm, speed, True,
+                           True, run_current_per, hold_current_per,
+                           conf_save_path='./slide_scan_example.bin', force_update=False)
+
+

配置好串口和驱动器参数, 然后调用 slide_scan. slide_scan 的最后一个参数 force_update 决定了在该配置文件已经存在时的行为:

+
+

如果 force_update 为 True, 将会用新的配置覆盖旧的配置

+

如果 force_update 为 False, 运行平均值将会更新为新旧值的平均值, 堵转平均值将会更新成新旧堵转平均值中较大的那一个值(例如一个滑胎有左右边界, 左边界堵转平均值小于右边界堵转平均值, 也就是说右边界比左边界更容易堵转, 保存最容易堵转的平均值).

+
+

该程序执行后, 步进电机会一直保持正向旋转, 当遇到堵转时, 稍等300ms左右, 停止该程序. 程序会记录运行时的 SG_RESULT 寄存器平均值和堵转时的寄存器平均值到 conf_save_path 中.

+

后续 Slide 类可以加载该配置文件实现堵转时停止电机.

+
    +
  • 验证配置文件的值
  • +
+

或许您会好奇这个配置到底能不能用. maix.ext_dev.tmc2209 提供了测试该配置文件的函数 slide_test.

+

先保证电机微处于堵转状态, 然后修改参数以匹配您调用 slide_scan 的参数, 执行以下代码.

+

example

+ +
from maix import ext_dev, pinmap, err
+
+port = "/dev/ttyS1"
+uart_addr = 0x00
+uart_baudrate = 115200
+step_angle = 1.8
+micro_step = 256
+round_mm = 60
+speed = 60
+use_internal_sense_resistors = True
+run_current_per = 100
+hold_current_per = 100
+
+if port == "/dev/ttyS1":
+    ret = pinmap.set_pin_function("A19", "UART1_TX")
+    if ret != err.Err.ERR_NONE:
+        print("Failed in function pinmap...")
+        exit(-1)
+    ret = pinmap.set_pin_function("A18", "UART1_RX")
+    if ret != err.Err.ERR_NONE:
+        print("Failed in function pinmap...")
+        exit(-1)
+
+ext_dev.tmc2209.slide_test(port, uart_addr, uart_baudrate,
+                           step_angle, micro_step, round_mm, speed, True,
+                           True, run_current_per, hold_current_per,
+                           conf_save_path='./slide_scan_example.bin')
+
+

电机将会在堵转瞬间停止转动, 程序也会随之结束.

+

Slide.move()Slide.reset() 堵转停止逻辑也是如此.

+
    +
  • 使用 Slide
  • +
+

使用 Slide 的思路与 ScrewSlide 基本无异, 只是 Slide 取消了回调函数并增加了堵转停止逻辑.

+

如果使用 Slide 时未传入配置文件, Slide也是可以使用的. 堵转检测阈值为电机运行开始时的平均数*Slide.stop_default_per()/100. 电机运行近期平均数低于该值时电机停止. 可以通过 Slide.stop_default_per() 获取和修改该值.

+ +
from maix import pinmap, ext_dev, err, time
+
+port = "/dev/ttyS1"
+uart_addr = 0x00
+uart_baudrate = 115200
+step_angle = 1.8
+micro_step = 256
+round_mm = 60
+speed = 60
+use_internal_sense_resistors = True
+run_current_per = 100
+hold_current_per = 100
+
+if port == "/dev/ttyS1":
+    ret = pinmap.set_pin_function("A19", "UART1_TX")
+    if ret != err.Err.ERR_NONE:
+        print("Failed in function pinmap...")
+        exit(-1)
+    ret = pinmap.set_pin_function("A18", "UART1_RX")
+    if ret != err.Err.ERR_NONE:
+        print("Failed in function pinmap...")
+        exit(-1)
+
+slide = ext_dev.tmc2209.Slide(port, uart_addr, uart_baudrate,
+                              step_angle, micro_step, round_mm, speed,
+                              cfg_file_path="./slide_conf.bin")
+
+slide.reset()
+slide.move(60)
+slide.move(-60)
+
+

注意事项

+

本驱动程序由纯串口实现, 优点是占有引脚占用引脚较少即可实现至多4个较高精度的电机驱动. 缺点是不适用于高精度要求的应用场景.

+

已知问题:

+
    +
  • 请勿使用 MaixCAM 的 UART0 作为驱动串口, 会导致 MaixCAM 无法正常开机等问题.
  • +
+

!!!如有BUG, 非常欢迎您提交 PR 反馈.

+

免责声明

+

本电机驱动程序(以下简称“程序”)是由[Sipeed]基于BSD-3开源协议的仓库 janelia-arduino/TMC2209 进行修改和使用的。本程序仅供学习和研究使用,不保证在所有环境和条件下都能正常工作。使用本程序的风险由用户自行承担。

+

[Sipeed]不对因使用或无法使用本程序而导致的任何损失或损害承担责任,包括但不限于直接损失、间接损失、偶然损失、特殊损失、惩罚性损失或后果性损失。

+

用户在实际应用中使用本程序前,应自行进行充分的测试和验证,确保程序符合其特定需求和环境。[Sipeed]不对程序的准确性、可靠性、完整性或适用性做出任何明示或暗示的保证。

+

用户在使用本程序时应遵守所有适用的法律法规,并确保不侵犯任何第三方的合法权益。[Sipeed]不对因用户违反法律法规或侵犯第三方权益而导致的任何后果承担责任。

+

本免责声明的解释权归[Sipeed]所有,并保留随时修改本免责声明的权利。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/modules/tof.html b/maixpy/doc/zh/modules/tof.html new file mode 100644 index 00000000..77445edb --- /dev/null +++ b/maixpy/doc/zh/modules/tof.html @@ -0,0 +1,375 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

Sipeed 官方有另外两款 TOF 模块 可以用来测距,可以购买使用串口通信使用。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/network/flask.html b/maixpy/doc/zh/network/flask.html new file mode 100644 index 00000000..217cb1ab --- /dev/null +++ b/maixpy/doc/zh/network/flask.html @@ -0,0 +1,410 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy MaixCAM 使用 Flask 建立 HTTP 网页服务器 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixPy MaixCAM 使用 Flask 建立 HTTP 网页服务器

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

简介

+

MaixPy 基于 Python, 所以你可以使用 Python 库 Flask,通过它可以快速实现一个 Web 网页服务器,因为是 Python 通用的,具体的用处和使用方法可以自行搜索,这里不过多阐述。

+

如果你只是想做一个显示摄像头图像的页面,也可以参考JPEG 串流 中的 HTTP 图像服务器的方法。

+

简单的 HTTP 服务例程

+

运行下面的程序后,电脑浏览器访问 http://设备ip:8000 就会显示 hello world 字符和一张图片了。

+ +
from flask import Flask, request, send_file
+import maix # we not use it but we import it to listening key event to exit this program
+
+app = Flask(__name__)
+
+@app.route("/", methods=["GET", "POST"])
+def root():
+    print("========")
+    print(request.remote_addr)
+    print(f'headers:\n{request.headers}')
+    print(f'data: {request.data}')
+    print("========")
+    return 'hello world<br><img src="/img" style="background-color: black">'
+
+@app.route("/<path:path>")
+def hello(path):
+    print(path)
+    print(f'headers:\n{request.headers}')
+    print(f'data: {request.data}')
+    print("---------\n\n")
+    return f"hello from {path}"
+
+@app.route("/img")
+def img():
+    return send_file("/maixapp/share/icon/detector.png")
+
+if __name__ == "__main__":
+    app.run(host="0.0.0.0", port=8000)
+
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/network/http.html b/maixpy/doc/zh/network/http.html new file mode 100644 index 00000000..ee996d20 --- /dev/null +++ b/maixpy/doc/zh/network/http.html @@ -0,0 +1,395 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy MaixCAM 使用 http 网络通信 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixPy MaixCAM 使用 http 网络通信

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

简介

+

HTTP 是一个应用层网络协议,底层基于 TCP,通过它我们可以向网络服务器发送和接受信息,比如从网页服务器获取网页内容等。
+更多介绍可以自行搜索 HTTP。

+

在 MaixPy 使用 HTTP 请求

+

因为 MaixPy 基于 Python, 所以直接使用自带的 requests 库即可,requests 库是一个非常健全易用的库,这里就不进行过多的介绍,请自行搜索相关文档和教程使用。

+

这里举个例子,获取https://example.com 的首页内容。

+ +
import requests
+
+url = 'https://example.com'
+response = requests.get(url)
+print("Response:")
+print("-- status code:", response.status_code)
+print("")
+print("-- headers:", response.headers)
+print("")
+print("-- content:", response.content)
+print("")
+print("-- text:", response.text)
+print("")
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/network/mqtt.html b/maixpy/doc/zh/network/mqtt.html new file mode 100644 index 00000000..5cc26a45 --- /dev/null +++ b/maixpy/doc/zh/network/mqtt.html @@ -0,0 +1,391 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy MaixCAM 使用 MQTT 订阅发布消息 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixPy MaixCAM 使用 MQTT 订阅发布消息

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

MQTT 简介

+

使用 MQTT 可以快速简单地使用 订阅-发布 模型来进行实时通信。

+

系统组成:

+
    +
  • MQTT 服务器(broker),负责转发消息。
  • +
  • MQTT 客户端,从服务器订阅主题,并且接收消息,以及像服务器特定的主题发布消息。
  • +
+

通信过程:

+
    +
  • 客户端连接 MQTT 服务器。
  • +
  • 客户端订阅自己感兴趣的主题,比如topic1
  • +
  • 有其它客户端或者服务器发布topic1这个主题的信息时,会被实时推送到客户端。
  • +
  • 客户端也可以主动向特定的主题推送消息,其它订阅了这个主题的客户端都会收到,比如向自己订阅了的topic1推送消息自己也会收到。
  • +
+

MaixPy MaixCAM 中使用 MQTT

+

使用 paho-mqtt 这个模块即可,具体用法可以自行搜索paho-mqtt的用法,也可以参考MaixPy/examples中的例程。

+

如果你使用了早期的系统,可能需要手动安装一下paho-mqtt这个包,安装方法见添加额外的 Python 软件包

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/network/network_settings.html b/maixpy/doc/zh/network/network_settings.html new file mode 100644 index 00000000..75f7e176 --- /dev/null +++ b/maixpy/doc/zh/network/network_settings.html @@ -0,0 +1,408 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy MaixCAM 网络设置 WiFi 设置 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixPy MaixCAM 网络设置 WiFi 设置

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

简介

+

要让 MaixCAM 能够使用网络,首先需要使用 WiFi 连接到网络。
+MaixCAM 提供了几种方法连接 WiFi 热点。

+

使用内置设置应用连接

+

开及后进入设置应用,选择WiFi功能,可以通过手机分享WiFi 二维码或者再maixhub.com/wifi 生成二维码,然后扫码连接。
+也可以手动扫描WiFi热点,然后输入密码进行连接。

+

连接成功等待 DHCP 获得 IP 后界面会显示 IP。

+

通过 MaixPy 连接

+ +
from maix import network, err
+
+w = network.wifi.Wifi()
+print("ip:", w.get_ip())
+
+SSID = "Sipeed_Guest"
+PASSWORD = "qwert123"
+print("connect to", SSID)
+
+e = w.connect(SSID, PASSWORD, wait=True, timeout=60)
+err.check_raise(e, "connect wifi failed")
+print("ip:", w.get_ip())
+
+

DNS 服务器设置

+

实际使用时发现有些用户的路由器 DNS 解析可能解析不到某些域名,所以默认系统中在/boot/resolv.conf文件设置了 DNS 服务器

+ +
nameserver 114.114.114.114 # China
+nameserver 223.5.5.5 # aliyun China
+nameserver 8.8.4.4 # google
+nameserver 8.8.8.8 # google
+nameserver 223.6.6.6 # aliyun China
+
+

一般不需要修改,如果你的 DNS 解析遇到了问题可以修改这个文件。

+

实际系统用的配置文件路径是/etc/resolv.conf, 这个文件在开机时会被自动拷贝到/etc/resolv.conf,所以修改后直接重启最简单。

+

不想重启的话需要同时修改这两个文件。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/network/socket.html b/maixpy/doc/zh/network/socket.html new file mode 100644 index 00000000..e05a7351 --- /dev/null +++ b/maixpy/doc/zh/network/socket.html @@ -0,0 +1,488 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy MaixCAM 使用 socket 进行 TCP/UDP 通信 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixPy MaixCAM 使用 socket 进行 TCP/UDP 通信

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

socket 简介

+

socket 就是 TCP/UDP 通信在软件上的封装,通过 socket 接口,我们可以进行 TCP/UDP 通信。

+

MaixPy 由于基于 Python,我们可以直接使用内置的socket库进行通信,更多文档和使用教程可以自行搜索学习。

+

这里介绍简单的使用方法,通过这些示例代码,你可以在 MaixPy MaixCAM 上进行基本的 TCP 和 UDP 通信。
+记得根据实际情况修改 IP 地址和端口号。

+

socket TCP 客户端

+

这里请求 TCP 服务器,发送了一句消息并等待回应,然后关闭连接。

+ +
import socket
+def tcp_client(ip, port):
+    client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+    server_address = (ip, port)
+    client_socket.connect(server_address)
+
+    try:
+        # 发送数据到服务器
+        message = 'Hello, Server!'
+        print("send:", message)
+        client_socket.sendall(message.encode('utf-8'))
+
+        # 接收服务器的响应
+        data = client_socket.recv(1024)
+        print('Received:', data.decode('utf-8'))
+    finally:
+        # 关闭连接
+        client_socket.close()
+
+if __name__ == "__main__":
+    tcp_client("10.228.104.1", 8080)
+
+

socket TCP 服务端

+

这里创建一个 socket 服务器,并且不停等待客户端连接,客户端连接后创建一个线程用以和客户端通信,读取客户端的信息并原样发送回去。

+ +
import socket
+import threading
+
+local_ip   = "0.0.0.0"
+local_port = 8080
+
+def receiveThread(conn, addr):
+    while True:
+        print('read...')
+        client_data = conn.recv(1024)
+        if not client_data:
+            break
+        print(client_data)
+        conn.sendall(client_data)
+    print(f"client {addr} disconnected")
+
+ip_port = (local_ip,local_port)
+sk = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+sk.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1)
+sk.bind(ip_port)
+sk.listen(50)
+
+print("accept now,wait for client")
+while True:
+    conn, addr = sk.accept()
+    print(f"client {addr} connected")
+    # create new thread to communicate for this client
+    t = threading.Thread(target=receiveThread,args=(conn, addr))
+    t.daemon = True
+    t.start()
+
+

socket UDP 客户端

+ +
import socket
+
+def udp_send(ip, port):
+    # 创建 socket 对象
+    udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+
+    # 定义服务器的 IP 地址和端口号
+    server_address = (ip, port)
+
+    try:
+        # 发送数据到服务器
+        message = 'Hello, Server!'
+        udp_socket.sendto(message.encode('utf-8'), server_address)
+
+    finally:
+        # 关闭连接
+        udp_socket.close()
+
+# 调用函数
+udp_send("10.228.104.1", 8080)
+
+

socket UDP 服务器

+ +
import socket
+
+def udp_receive(ip, port):
+    # 创建 socket 对象
+    udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+
+    # 定义服务器的 IP 地址和端口号
+    server_address = (ip, port)
+
+    # 绑定端口
+    udp_socket.bind(server_address)
+
+    print('Waiting for a message...')
+
+    while True:
+        data, address = udp_socket.recvfrom(1024)
+        print('Received:', data.decode('utf-8'))
+        print('From:', address)
+
+    # 关闭连接
+    udp_socket.close()
+
+# 调用函数
+udp_receive('0.0.0.0', 8080)
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/network/websocket.html b/maixpy/doc/zh/network/websocket.html new file mode 100644 index 00000000..608f446c --- /dev/null +++ b/maixpy/doc/zh/network/websocket.html @@ -0,0 +1,434 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy MaixCAM 使用 websocket - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixPy MaixCAM 使用 websocket

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

简介

+

类似 socket,使用 websocket 可以实现长链接通信,同时还支持和 web 页面通信。

+

因为 MaixPy 基于 Python,所以使用 Python 通用的 websocketsasyncio 模块即可,更多内容可以自行搜索学习。

+

websocket 客户端

+

连接服务器发送 10 次消息就结束:

+ +
import asyncio
+import websockets
+import time
+
+async def send_msg(websocket):
+    count = 1
+    while count <= 10:
+        msg = f"hello {count}"
+        await websocket.send(msg)
+        recv_text = await websocket.recv()
+        print(f"receive: {recv_text}", end="\n")
+        count += 1
+        time.sleep(1)
+    await websocket.close(reason="client exit")
+
+async def main_logic(ip, port):
+    async with websockets.connect(f'ws://{ip}:{port}') as websocket:
+        await send_msg(websocket)
+
+ip = "10.228.104.100"
+port = 5678
+asyncio.get_event_loop().run_until_complete(main_logic(ip, port))
+
+

websocket 服务端

+

接受客户端的连接并且客户端发送过来消息后,返回ack for msg: + 发送过来的消息。

+ +
import asyncio
+import websockets
+import functools
+
+async def recv_msg(websocket):
+    print("new client connected, recv_msg start")
+    while True:
+        try:
+            recv_text = await websocket.recv()
+        except Exception as e:
+            print("receive failed")
+            break
+        print("received:", recv_text)
+        response_text = f"ack for msg: {recv_text}"
+        await websocket.send(response_text)
+    print("recv_msg end")
+
+async def main_logic(websocket, path, other_param):
+    await recv_msg(websocket)
+
+ip = "0.0.0.0"
+port = 5678
+start_server = websockets.serve(functools.partial(main_logic, other_param="test_value"), ip, port)
+print("start server")
+asyncio.get_event_loop().run_until_complete(start_server)
+print("start server loop")
+asyncio.get_event_loop().run_forever()
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/peripheral/adc.html b/maixpy/doc/zh/peripheral/adc.html new file mode 100644 index 00000000..b52216a0 --- /dev/null +++ b/maixpy/doc/zh/peripheral/adc.html @@ -0,0 +1,479 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy ADC 使用介绍 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy ADC 使用介绍

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-06-111.0.0iawak9lkm + + 初版文档 + +
+
+
+ +
+
+ +

ADC 简介

+

ADC,即模拟信号数字转换器,将一个输入电压信号转换为一个输出的数字信号。由于数字信号本身不具有实际意义,仅仅表示一个相对大小。故任何一个模数转换器都需要一个参考模拟量作为转换的标准,参考标准一般为最大的可转换信号大小。而输出的数字量则表示输入信号相对于参考信号的大小。

+

ADC 外设一般有两个主要参数:分辨率和参考电压。

+
    +
  • 分辨率:ADC 的分辨率以输出二进制(或十进制)数的位数来表示。它说明 A/D 转换器对输入信号的分辨能力。一般来说,n 位输出的 A/D 转换器能区分 2^n 个不同等级的输入模拟电压,能区分输入电压的最小值为满量程输入的 1/(2^n)。在最大输入电压一定时,输出位数愈多,分辨率愈高。
  • +
  • 参考电压:ADC 参考电压是在 AD 转换过程中与已知电压进行比较来找到未知电压的值的电压。参考电压可以认为是最高上限电压,当信号电压较低时,可以降低参考电压来提高分辨率。
  • +
+

通过板子的 ADC,可以采集外部的电压,并让板子检验电压是否达标,或是在检测到特定的电压时执行特定的任务(例如 ADC 检测多个按钮)。

+

MaixPy 中使用 ADC

+

通过 MaixPy 使用 ADC 很简单:

+ +
from maix.peripheral import adc
+from maix import time
+
+a = adc.ADC(0, adc.RES_BIT_12)
+
+raw_data = a.read()
+print(f"ADC raw data:{raw_data}")
+
+time.sleep_ms(50)
+
+vol = a.read_vol()
+print(f"ADC vol:{vol}")
+
+

使用 ADC0,从中读取原始的转换数据,或是直接从中读取电压数据。

+

有关 ADC API 的详细说明请看 ADC API 文档

+

关于 MaixCAM ADC 外设的一些说明

+

MaixCAM 引出一个连接 ADC 的 IO,为 GPIO B3,如下图所示(对于MaixCAM-Pro 由于 B3 已经连接到了闪光灯, ADC 无法直接使用):

+

+

该 IO 默认为 ADC, 无需额外进行配置。

+

MaixCAM ADC 外设采样精度为 12bit,也就是说采样输出范围为 0~4095。采样精度为参考电压的 1/4096。

+

MaixCAM ADC 外设的扫描频率不能高于 320K/s,也就是上述示例中增加延时的原因。

+

MaixCAM ADC 外设内部参考电压Vref为 1.5V,实际使用时会有些许偏差。因为内部参考电压典型值为 1.5V,所以 Soc 的 ADC 量程为 0~1.5V。该量程的 ADC 应用范围较小,故 MaixCAM 额外为 ADC 外设设计了分压电路来增大 ADC 的应用范围,该分压电路如下图所示。由于电路中电阻阻值存在误差、ADC 外设有阻抗、内部参考电压有些许偏差,该分压电路的参考电压 Vin_max 约为 4.6~5.0V。API 中已经选择一个精度较高的默认值,一般情况下无需传递该参数。

+

+

若需要较高的精度,可以通过以下步骤计算出该分压电路的参考电压:

+
    +
  • 先测得 ADC_PIN 的实际输入电压 Vin。

    +
  • +
  • 然后测得 ADC1 处的实际输入电压 Vadc,电阻R10的位置可参考这个 BOM 表。

    +
  • +
  • 保持第一步的电压输入,在shell中执行以下命令:

    + +
    echo 1 > /sys/class/cvi-saradc/cvi-saradc0/device/cv_saradc
    +cat /sys/class/cvi-saradc/cvi-saradc0/device/cv_saradc
    +
    +

    此时你将获得 ADC 原始数据 adc_data。

    +
  • +
  • 接地电阻为 R10,另一个电阻为 R6, 记录它们的阻值。通常,MaixCAM 的 R6 阻值为 10KΩ(10 000Ω),R10 阻值为 5.1KΩ(5 100Ω)。

    +
  • +
  • 将上述参数传递给以下 python 代码,即可得出 ADC_PIN 端的量程 [0, Vin_max] (闭区间)。

    + +
    def maixcam_get_vin_max(Vin:float, Vadc:float, adc_data:int, r6:int, r10:int, adc_max:int=4095):
    +    Vref = (Vadc/adc_data)*(adc_max+1)
    +    r3 = Vadc*r6/(Vin-Vadc)
    +    Vin_max = (Vref/r3)*(r6+r3)
    +    return Vin_max
    +
    +Vin = 3.3		# step 1
    +Vadc = 1.06		# step 2
    +adc_data=2700	# step 3
    +r6=10000		# step 4
    +r10=5100		# step 4
    +
    +if __name__ == '__main__':
    +    print(maixcam_get_vin_max(Vin, Vadc, adc_data, r6, r10))
    +
    +

    现在将结果传递给 adc.ADC() 的第三个参数,你将获得一个高精度的 ADC。

    +
  • +
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/peripheral/gpio.html b/maixpy/doc/zh/peripheral/gpio.html new file mode 100644 index 00000000..c3e2609b --- /dev/null +++ b/maixpy/doc/zh/peripheral/gpio.html @@ -0,0 +1,424 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 使用 GPIO - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 使用 GPIO

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

简介

+

使用 GPIO 可以控制引脚输入或者输出高低电平,用来读取信号或者输出控制信号,十分常用。

+

注意 MaixCAM 的引脚是 3.3V 耐受,请勿输入 5V 电压。

+

MaixPy 中使用 GPIO

+

首先我们需要知道设备有哪些引脚和 GPIO,对于 MaixCAM 每个引脚都对应了一个 GPIO 控制器,如图:

+


+maixcam_pro_io

+

需要注意的是,引脚除了作为 GPIO 使用,还能用作其它功能比如 PWM 使用,使用前我们需要设置一下引脚的功能为 GPIO。

+

比如在 MaixCAM 上有些引脚默认已经被其它功能占用,比如 UART0, WiFi(SDIO1 + A26), 不建议使用它们。

+

其它的可以使用,另外 A14 引脚连接到了板载的 LED,默认是作为系统的负载提示灯,如果初始化它会自动取消系统提示灯功能作为普通 GPIO 被使用(注意A14只能作为输出),这样你就能控制这颗 LED 的亮灭了。

+

LED 的电路图如图所示,所以我们只需要给 A14 引脚一个高电平 LED 就会导通并亮起来:
+

+ +
from maix import gpio, pinmap, time
+
+pinmap.set_pin_function("A14", "GPIOA14")
+led = gpio.GPIO("GPIOA14", gpio.Mode.OUT)
+led.value(0)
+
+while 1:
+    led.toggle()
+    time.sleep_ms(500)
+
+

这里先使用pinmap设置了A14引脚的功能为GPIO,当然,对于A14因为只有GPIO功能,可以不设置,为了程序通用起见,其它引脚可能需要设置,所以这里例程设置了。

+

更多 API 请看 GPIO API 文档

+

GPIO 作为输入模式

+ +
from maix import gpio, pinmap, time
+
+pinmap.set_pin_function("A19", "GPIOA19")
+led = gpio.GPIO("GPIOA19", gpio.Mode.IN)
+
+while 1:
+    print(led.value())
+    time.sleep_ms(1) # sleep to make cpu free
+
+

MaixCAM-Pro 使用照明 LED

+

MaixCAM 和 MaixCAM-Pro 都有一个 LED 小灯,即接到了引脚 A14,另外 MaixCAM-Pro 还板载了一个照明 LED,连接到了 B3 引脚,也是高电平开启低电平关闭:

+ +
from maix import gpio, pinmap, time
+
+pinmap.set_pin_function("B3", "GPIOB3")
+led = gpio.GPIO("GPIOB3", gpio.Mode.OUT)
+led.value(0)
+
+while 1:
+    led.toggle()
+    time.sleep_ms(500)
+
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/peripheral/hid.html b/maixpy/doc/zh/peripheral/hid.html new file mode 100644 index 00000000..f168324c --- /dev/null +++ b/maixpy/doc/zh/peripheral/hid.html @@ -0,0 +1,451 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 使用 HID 设备 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 使用 HID 设备

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

简介

+

HID(Human Interface Device)设备是一类计算机外围设备,用于向计算机传输输入数据,或从计算机接收输出数据。HID 设备最常见的例子包括键盘、鼠标、游戏控制器、触摸屏、和手写板等。HID 协议是一种用于人机交互设备的通信协议,它允许这些设备通过 USB、蓝牙或其他连接方式与主机进行数据交换。MaixPy目前支持作为键盘、鼠标和触摸屏来使用,下面将会介入如何使用MaixPy通过HID来控制你的个人电脑~

+

一定要操作的前期准备

+
+

MaixPy 固件版本应该 >= 4.5.1

+
+

在操作HID前一定要先使能HID设备,有两种方法:

+
    +
  1. 打开MaixCAM自带的Settings应用,依次点击USB Settings->勾选需要的HID设备,如KeyboardMouseTouchscreen,然后点击Confirm后重启MaixCAM
  2. +
  3. 通过MaixVision中的Examples/tools/maixcam_switch_usb_mode.py示例,修改代码device_list中需要开启的HID设备,运行后重启MaixCAM
  4. +
+

注意:由于最多只支持4个USB设备,因此在ncmrndiskeyboardmousetouchpad之中只能同时启动4个设备,根据实际需求选择,其中ncmrndis是USB网络协议设备,如果不需要可以关掉,默认是打开的。

+

用MaixPy编写一个键盘

+

需要使能了HID Keyboard后才能运行。

+

下面示例中,通过键盘发送rstuv四个字符,然后松开按键。

+ +
from maix import hid, time
+
+keyboard = hid.Hid(hid.DeviceType.DEVICE_KEYBOARD)
+
+# 按键编号参考[USB HID文档](https://www.usb.org))的"Universal Serial Bus HID Usage Tables"部分
+keys = [21, 22, 23, 24, 25, 0]    # 表示[r, s, t, u, v, 0], 0表示松开按键
+
+for key in keys:
+    keyboard.write([0, 0, key, 0, 0, 0, 0, 0])
+
+
+

用MaixPy编写一个鼠标

+

需要使能了HID Mouse后才能运行。

+

下面示例中,每隔100ms移动鼠标5个像素。

+ +
from maix import hid, time
+
+mouse = hid.Hid(hid.DeviceType.DEVICE_MOUSE)
+
+button = 0      # 按键状态,0表示松开,1表示按下左键,2表示按下右键,4表示按下滚轮键
+x_oft = 0       # 相对当前位置的偏移量,数值范围是-127~127
+y_oft = 0       # 相对当前位置的偏移量,数值范围是-127~127
+wheel_move = 0  # 滚轮移动距离,数值范围是-127~127
+
+count = 0
+while True:
+    x_oft += 5
+    y_oft += 5
+    mouse.write([button, x_oft, y_oft, wheel_move])
+    time.sleep_ms(100)
+    count += 1
+    if count > 50:
+        break
+
+

用MaixPy编写一个触摸屏

+

需要使能了HID Touchpad后才能运行。

+

下面示例中,每隔100ms移动触摸屏150个单位,注意触摸屏的坐标系是绝对坐标,而不是相对坐标,另外需要将屏幕实际尺寸映射到[1, 0x7FFF]区间,坐标(1,1)表示左上角,坐标(0x7FFF,0x7FFF)表示右下角。

+ +
from maix import hid, time
+
+touchpad = hid.Hid(hid.DeviceType.DEVICE_TOUCHPAD)
+
+def touchpad_set(button, x_oft, y_oft, wheel_move):
+    touchpad.write([button,                             # 按键状态,0表示松开,1表示按下左键,2表示按下右键,4表示按下滚轮键
+                    x_oft & 0xff, (x_oft >> 8) & 0xff,  # 绝对位置,最左为1, 最右为0x7fff,0表示不操作,数值范围是0~0x7fff
+                    y_oft & 0xff, (y_oft >> 8) & 0xff,  # 绝对位置,最上为1, 最下为0x7fff,0表示不操作,数值范围是0~0x7fff
+                    wheel_move])                        # 滚轮移动距离,数值范围是-127~127
+button = 0
+x_oft = 0
+y_oft = 0
+wheel_move = 0
+count = 0
+while True:
+    x_oft += 150
+    y_oft += 150
+    touchpad_set(button, x_oft, y_oft, wheel_move)
+    time.sleep_ms(100)
+    count += 1
+    if count > 50:
+        break
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/peripheral/i2c.html b/maixpy/doc/zh/peripheral/i2c.html new file mode 100644 index 00000000..3ce08abb --- /dev/null +++ b/maixpy/doc/zh/peripheral/i2c.html @@ -0,0 +1,410 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 使用 I2C - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 使用 I2C

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +
+

注意需要 MaixPy 镜像和固件 >= 4.2.1

+
+

MaixCAMI2C 及对应的 引脚 看图:

+


+maixcam_pro_io

+

对于 MaixCAM,由于引脚资源比较紧张,引出的 I2C1 I2C3 引脚和 WiFi 模块(SDIO1)重合了,所以 WiFi 和硬件 I2C 只能二选一使用。
+另外还有一个I2C5,是底层驱动软件模拟的,建议使用它,底层已经做好了驱动,使用时和使用硬件I2C一样。

+

默认I2C5的引脚是GPIO,所以使用i2c模块前先用pinmap设置以下引脚功能为I2C5

+ +
from maix import i2c, pinmap
+
+pinmap.set_pin_function("A15", "I2C5_SCL")
+pinmap.set_pin_function("A27", "I2C5_SDA")
+
+bus1 = i2c.I2C(5, i2c.Mode.MASTER)
+slaves = bus1.scan()
+print("find slaves:", slaves)
+
+
+

更多 API 看 i2c API 文档

+

如上面所说, 对于 MaixCAM 硬件 I2CWiFi 只能二选一,如果一定要用,需要禁用WiFi,使用pinmap模块设置引脚功能为 I2C,再使用maix.i2c模块操作。

+
+

TODO: 提供禁用 WiFi 的方法(需要系统里面禁用掉 WiFi 驱动,比较复杂)

+
+ +
from maix import i2c, pinmap
+
+pinmap.set_pin_function("P18", "I2C1_SCL")
+pinmap.set_pin_function("P21", "I2C1_SDA")
+
+bus1 = i2c.I2C(1, i2c.Mode.MASTER)
+slaves = bus1.scan()
+print("find slaves:", slaves)
+
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/peripheral/pinmap.html b/maixpy/doc/zh/peripheral/pinmap.html new file mode 100644 index 00000000..fd99ab2b --- /dev/null +++ b/maixpy/doc/zh/peripheral/pinmap.html @@ -0,0 +1,445 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy Pinmap 使用介绍 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy Pinmap 使用介绍

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-06-111.0.0iawak9lkm + + 初版文档 + +
+
+
+ +
+
+ +

管脚映射简介

+

在系统级芯片(System on Chip, SoC)设计中,一个管脚通常具有多个功能,这种设计方法被称为引脚复用。其原因主要有以下几个方面:

+
    +
  • 节省引脚数量:SoC 集成了大量的功能模块,如 CPU、GPU、内存控制器、I/O 接口、通信模块等。如果每个功能都分配独立的引脚,会导致需要的引脚数量非常庞大,增加封装的复杂性和成本。通过引脚复用,一个引脚可以在不同的模式下支持不同的功能,从而显著减少引脚的总数。
  • +
  • 降低芯片封装和制造成本:减少引脚数量可以选择更小的封装尺寸,从而降低封装和制造成本。小封装不仅降低了材料成本,还减少了芯片在电路板上的占用空间,有利于设计更紧凑的电子产品。
  • +
  • 提高设计灵活性:引脚复用提供了更大的设计灵活性。不同的应用场景可能需要不同的功能组合,通过软件配置可以根据具体需求启用不同的引脚功能。例如,同一个引脚在一个实际应用中可以作为 UART 通信接口,而在另一个实际应用中可以作为 SPI 总线接口。
  • +
  • 简化 PCB 布局:减少引脚数量可以简化印刷电路板(PCB)的布局设计。更少的引脚意味着更少的布线层数和过孔,从而简化了 PCB 设计,降低了生产难度和成本。
  • +
  • 优化性能:在某些情况下,通过复用引脚可以优化信号路径和性能。例如,通过选择适当的引脚功能组合,可以减少信号传输路径上的干扰和噪声,提高系统的整体性能和可靠性。
  • +
+

而 Pinmap 展示和管理芯片各个引脚配置,这些配置通常包括每个引脚的名称及其功能(通常有多个功能)。

+

以 MaixCAM GPIO A28为例子:

+
    +
  • A28 为引脚名称。
  • +
  • GPIOA28/UART2_TX/JTAG_TDI 为引脚功能(可从 SoC 手册查询),同一时间该引脚只能是这三个功能中的其中一个。
  • +
+

通过 Pinmap,可以设定指定的芯片引脚为指定的功能。

+

MaixPy 中使用Pinmap

+

对于 MaixCAM 板子上各个引脚的编号及其功能,可以参考下图:
+
+maixcam_pro_io

+

或是阅读 SG2002芯片手册 Pinmux 章节了解剩余的引脚的编号及功能。

+

介绍了那么多,其实通过 MaixPy 使用 Pinmap 来管理引脚功能很简单:

+ +
from maix import pinmap
+
+print(pinmap.get_pins())
+
+f = pinmap.get_pin_functions("A28")
+print(f"GPIO A28 pin functions:{f}")
+
+print(f"Set GPIO A28 to {f[0]} function")
+pinmap.set_pin_function("A28", f[0])
+
+

先列出了可供管脚映射的所有引脚,然后查询 GPIO A28 可供选择的引脚功能,最后将该引脚设置为该引脚的第一个功能(作为GPIO)。

+

更详细的 Pinmap 的 API 说明请看 Pinmap API 文档

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/peripheral/pwm.html b/maixpy/doc/zh/peripheral/pwm.html new file mode 100644 index 00000000..02b5af44 --- /dev/null +++ b/maixpy/doc/zh/peripheral/pwm.html @@ -0,0 +1,415 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 使用 PWM - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 使用 PWM

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

简介

+

在 MaixPy (v4) 中使用 PWM,先使用pinmap设置引脚的功能为 PWM,在使用。

+

以及每个 PWM 有对应的引脚,根据 MaixCAM 的引脚图可以看到:

+


+maixcam_pro_io

+

这里我们推荐使用PWM6PWM7

+

对于 MaixCAM 因为WiFi 使用了SDIO1的所有引脚,所以PWM4~9只能和WiFi二选一使用。

+
+

TODO: 提供禁用 WiFi 的方法(需要系统里面禁用掉 WiFi 驱动,比较复杂)

+
+

MaixPy 使用 PWM 控制舵机

+

这里我们以控制舵机为例, 使用MaixCAMPWM7A19引脚:

+ +
from maix import pwm, time, pinmap
+
+SERVO_PERIOD = 50     # 50Hz 20ms
+SERVO_MIN_DUTY = 2.5  # 2.5% -> 0.5ms
+SERVO_MAX_DUTY = 12.5  # 12.5% -> 2.5ms
+
+# Use PWM7
+pwm_id = 7
+# !! set pinmap to use PWM7
+pinmap.set_pin_function("A19", "PWM7")
+
+
+
+def angle_to_duty(percent):
+    return (SERVO_MAX_DUTY - SERVO_MIN_DUTY) * percent / 100.0 + SERVO_MIN_DUTY
+
+
+out = pwm.PWM(pwm_id, freq=SERVO_PERIOD, duty=angle_to_duty(0), enable=True)
+
+for i in range(100):
+    out.duty(angle_to_duty(i))
+    time.sleep_ms(100)
+
+for i in range(100):
+    out.duty(angle_to_duty(100 - i))
+    time.sleep_ms(100)
+
+

这里的功能是控制舵机从最小角度旋转到最大角度再旋转回最小角度。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/peripheral/spi.html b/maixpy/doc/zh/peripheral/spi.html new file mode 100644 index 00000000..b3d7d42c --- /dev/null +++ b/maixpy/doc/zh/peripheral/spi.html @@ -0,0 +1,523 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy SPI 串行外设接口使用介绍 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy SPI 串行外设接口使用介绍

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-06-111.0.0iawak9lkm + + 初版文档 + +
+
+
+ +
+
+ +

SPI 简介

+

SPI (Serial Peripheral Interface,即串行外设接口),是一种同步外设接口,它可以使 SoC 与各种外围设备以串行方式进行通信以交换信息。常见的外围设备有 Flash RAM、网络控制器、LCD显示驱动器和A/D转换器等。

+

SPI 采用主从模式(Master—Slave)架构,支持一个或多个Slave设备。

+

在硬件电路上,SPI 通常由 4 根线组成,它们分别是:

+
    +
  • MISO:即主设备输入从设备输出(Master Output Slave Input),该引脚在从模式下发送数据,在主模式下接收数据。
  • +
  • MOSI:即主设备输出从设备输入(Master Input Slave Output),该引脚在主模式下发送数据,在从模式下接收数据。
  • +
  • SCK:串行总线时钟,由主设备输出,从设备输入。
  • +
  • NSS/CS:从设备选择。它作为片选引脚,让主设备可以单独地与特定从设备通信,避免数据线上的冲突。
  • +
+

在通信协议上,SPI 行为一般如下:

+
    +
  • SPI 支持一主多从,主设备通过片选引脚来选择需要进行通信的从设备,一般情况下,从设备 SPI 接口只需一根片选引脚,而主设备的片选引脚数量等同于设备数量。主设备使能某个从设备的片选信号期间,该从设备会响应主设备的所有请求,其余从设备会忽略总线上的所有数据。

    +
  • +
  • SPI 有四种模式,取决于极性(CPOL)和相位(CPHA)的配置。

    +

    极性,影响 SPI 总线空闲时的时钟信号电平。

    +
      +
    1. CPOL = 1:表示空闲时是高电平
    2. +
    3. CPOL = 0:表示空闲时是低电平
    4. +
    +

    相位,决定 SPI 总线采集数据的跳变沿。

    +
      +
    1. CPHA = 0:表示从第一个跳变沿开始采样
    2. +
    3. CPHA = 1:表示从第二个跳变沿开始采样
    4. +
    +

    极性与相位组合成了 SPI 的四种模式:

    +
  • +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ModeCPOLCPHA
000
101
210
311
+
    +
  • SPI 通常支持全双工和半双工通信。

    +
  • +
  • SPI 不规定最大传输速率,没有地址方案;SPI 也没规定通信应答机制,没有规定流控制规则。

    +
  • +
+

SPI 是非常常见的通信接口,通过 SPI 接口,SoC 能控制各式各样的的外围设备。

+

MaixPy 中使用 SPI

+

MaixCAM 的引脚分布如下:

+


+maixcam_pro_io

+

使用前需要 maix.peripheral.pinmap 完成对 SPI 的管脚映射。

+

注意:MaixCAM 由于其 SPI 外设的限制,只能作为 SPI 主设备使用。MaixCAM 的 SPI 暂时不支持修改硬件 CS 引脚有效电平,所有 SPI 硬件 CS 的有效电平为低电平。如需要使用其他的 CS 有效电平,请在 SPI API 中配置软件 CS 引脚及其有效电平。SPI4 为软件模拟的 SPI,实测最大速率为 1.25MHz,使用方法与硬件 SPI 无异。

+

通过 MaixPy 使用 SPI 很简单:

+ +
from maix import spi, pinmap
+
+pin_function = {
+    "A24": "SPI4_CS",
+    "A23": "SPI4_MISO",
+    "A25": "SPI4_MOSI",
+    "A22": "SPI4_SCK"
+}
+
+for pin, func in pin_function.items():
+    if 0 != pinmap.set_pin_function(pin, func):
+        print(f"Failed: pin{pin}, func{func}")
+        exit(-1)
+
+
+spidev = spi.SPI(4, spi.Mode.MASTER, 1250000)
+
+### Example of full parameter passing.
+# spidev = spi.SPI(id=4,                  # SPI ID
+#                  mode=spi.Mode.MASTER,  # SPI mode
+#                  freq=1250000,          # SPI speed
+#                  polarity=0,            # CPOL 0/1, default is 0
+#                  phase=0,               # CPHA 0/1, default is 0
+#                  bits=8,                # Bits of SPI, default is 8
+#                  cs_enable=True,        # Use soft CS pin? True/False, default is False
+#                  cs='GPIOA19')          # Soft cs pin number, default is 'GPIOA19'
+
+b = bytes(range(0, 8))
+
+res = spidev.write_read(b, len(b))
+if res == b:
+    print("loopback test succeed")
+else:
+    print("loopback test failed")
+    print(f"send:{b}\nread:{res}")
+
+

请先连接该 SPI 的 MOSIMISO

+

先通过 pinmap 配置所需的引脚,然后启用全双工通信,返回值将等于发送值。

+

更多 SPI API 的详细说明请看 SPI API 文档

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/peripheral/uart.html b/maixpy/doc/zh/peripheral/uart.html new file mode 100644 index 00000000..93e2890e --- /dev/null +++ b/maixpy/doc/zh/peripheral/uart.html @@ -0,0 +1,659 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy UART 串口使用介绍 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy UART 串口使用介绍

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

串口简介

+

串口是一种通信方式,包含了硬件和通信协议的定义。

+
    +
  • 硬件包括:
      +
    • 3 个引脚: GNDRXTX,通信双发交叉连接 RX TX, 即一方 TX 发送到另一方的 RX, 双方 GND 连接到一起。
    • +
    • 控制器,一般在芯片内部,也叫 UART 外设,一般一个芯片有一个或者多个 UART 控制器,每个控制器有相对应的引脚。
    • +
    +
  • +
  • 串口通信协议: 为了让双方能顺利通信,规定了一套协议,即以什么样的时序通信,具体可以自行学习,常见的参数有 波特率 校验位等,波特率是我们用得最多的参数。
  • +
+

通过板子的串口,可以和其它单片机或者 SOC 进行数据通信,比如可以在 MaixCAM 上实现人体检测功能,检测到坐标后通过串口发送给 STM32/Arduino 单片机。

+

MaixPy 中使用串口

+

对于 MaixCAM 默认从 USB 口引出了一个串口,可以插上配套的 Type-C 转接小板,就能直接使用上面的串口引脚,
+也可以不用转接板,直接使用板子上的 A16(TX)A17(RX)引脚, 和 USB 口引出的是同样的引脚,是等效的,具体看接口图:

+


+maixcam_pro_io

+

对于 MaixCAM 使用 USB 引出的串口时需要注意,Typc-C 正插和反插,转接小板上的 RXTX会交换(默认 Type-C 母口朝前和丝印符合),所以当你发现无法通信时,有可能就是 RX TX 反了,可以尝试将 Type-C 翻转一面插再看看通信是否正常。这个算是设计缺陷,不过一般也不会经常拔插所以适应一下也能接受。

+

将两个通信的板子双方连接好后(通信双发交叉连接 RX TX, 即一方 TX 发送到另一方的 RX, 双方 GND 连接到一起),就可以使用软件了。

+

通过 MaixPy 使用串口很简单:

+ +
from maix import uart
+
+device = "/dev/ttyS0"
+# ports = uart.list_devices() # 列出当前可用的串口
+
+serial = uart.UART(device, 115200)
+serial.write_str("hello world")
+print("received:", serial.read(timeout = 2000))
+
+

这里使用了第一个串口/dev/ttyS0,也就是上面说的 Type-C 出 引出的串口。

+

更多串口的 API 在 UART API 文档

+

MaixCAM 串口使用注意点

+

TX 引脚注意点

+

MaixCAM 的 TX(UART0) 引脚在开机时不能是被拉低的状态,不然会导致无法开机,是芯片的特性,如果你在做 3.3v5v 的电平转换电路要十分注意不要默认拉低请保持浮空(可以考虑使用电平转换芯片)。

+

以及如果你发现无法开机,也可以先检查一下 TX 是否被拉低了。

+

串口连接电脑

+

有开发者可能会问:为什么插上 USB 电脑没出现串口设备?
+答: 因为设备的 USB 默认是 虚拟 USB 网卡,没有串口功能,如果要访问设备的终端,请使用 ssh 连接。

+

对于 MaixCAM, 从 Type-C 转接板引出的串口0直连到 A16(TX)A17(RX)引脚,可以直接接到其它设备比如单片机的串口引脚;
+如果要和电脑通信,需要使用 USB 转串口小板(比如这个)连接到电脑。

+

开机日志输出

+

需要注意的是, MaixCAM 的串口0 在开机时会打印一部分开机日志, 启动完毕后会打印serial ready字样,如果和单片机通信需要注意丢弃这部分信息,如果出现系统启动出现问题也可以通过查看串口0的开机打印来诊断问题。

+

发送数据

+

主要有两个函数write_strwrite函数。

+

write_str函数来发送字符串,write用来发送字节流,即strbytes类型,两者可以互相转换,比如:

+
    +
  • "A" 调用encode()方法变成b"A",反过来b"A"调用decode()方法变成"A"
  • +
  • str 没法显示一些不可见字符比如 ASCII 码中的值0,在字符串中也是\0一般作为结束符,在bytes类型中就可以用b"\x00"来储存。
  • +
  • 对于非 ASCII 编码的字符串更有用,比如UTF-8编码中中文是由三个字节\xe5\xa5\xbd来表示的,我们可以通过"好".encode("utf-8")得到b"\xe5\xa5\xbd",也可以通过b'\xe5\xa5\xbd'.decode("utf-8)得到"好"
  • +
+

所以如果我们需要发送字节数据,则用write()方法发送即可, 比如:

+ +
bytes_content = b'\x01\x02\x03'
+serial.write(bytes_content)
+
+

所以对于 str 类型,也可以不用write_str,而是使用serial.write(str_content.encode()) 来发送。

+

如果你有其它类型的数据,想将它们变成一个字符串发送,可以使用Python 字符串格式化来创建一个字符串,比如:
+想发送I have xxx apple,这里xxx 想用一个整型变量,则:

+ +
num = 10
+content = "I have {} apple".format(num)
+content2 = f"I have {num} apple"
+content3 = "I have {:04d} apple".format(num)
+content4 = f"I have {num:d} apple"
+print(content)
+print(content2)
+print(content3)
+print(content4)
+print(type(content))
+serial.write_str(content)
+
+

另外你也可以把数据编码成二进制流数据发送,比如前 4 个字节是十六进制的 AABBCCDD,中间发送一个 int 类型的数值,最后再加一个0xFF结尾,使用struct.pack来进行编码(看不懂可以看后文的介绍):

+ +
from struct import pack
+num = 10
+bytes_content = b'\xAA\xBB\xCC\xDD'
+bytes_content += pack("<i", num)
+bytes_content += b'\xFF'
+print(bytes_content, type(bytes_content))
+serial.write(bytes_content)
+
+

这里 pack("<i", num)num编码为int类型即4字节的有符号数,<符号意思是小端编码,低位在前,这里num = 10,十六进制 4 字节表示就是0x0000000A,小端就是把低字节0x0A放在前面,得到一个b'\x0A\x00\x00\x00'的字节类型数据。

+
+

这里只举例使用i编码int类型的数据,还有其它类型比如B表示unsigned char等等,更多的struct.pack格式化用法可以自行搜索python struct pack

+
+

这样最终发送的就是AA BB CC DD 0A 00 00 00 FF二进制数据了。

+

接收

+

使用read方法进行读取数据,直接:

+ +
while not app.need_exit():
+    data = serial.read()
+    if data:
+        print(data)
+    time.sleep_ms(1)
+
+

同样,read方法获得的数据也是bytes类型,这里read会读取对方一次性发送的一串数据,如果没有数据就是b''即空字节。
+这里用了time.sleep_ms(1)进行睡眠了1ms,用来释放 CPU,不让这个线程占用所有 CPU 资源,而且1ms不影响我们程序的效率,特别是在多线程时有用。

+

另外read函数有两个参数:

+
    +
  • len:代表想接收的最大长度,默认-1代表缓冲区有多少就返回多少,传>0的值则代表最多返回这个长度的数据。
  • +
  • timeout
      +
    • 默认 0 代表从缓冲区读取数据立马返回数据,如果len-1则返回所有数据,如果指定了len则返回长度不超过len 的数据。
    • +
    • <0 代表一直等待直到接收到了数据才返回,如果len-1则等待到接收到数据才返回(一串连续接收到的数据,即阻塞式读取所有数据),如果指定了len则等待接收数量达到len才返回。
    • +
    • >0 代表无论有没有接收到数据,超过这个时间就会返回。
    • +
    +
  • +
+

看起来有点复杂,常见的参数组合:

+
    +
  • read(): 即read(-1, 0),从缓冲区读取收到的数据,通常是对方一次性发来的一串数据,等到对方没有发送(一个字符的发送时间内没有再发)就立刻返回。
  • +
  • read(len = -1, timeout = -1): 阻塞式读取一串数据,等到对方发送了数据并且一个字符的发送时间内没有再发才返回。
  • +
  • read(len = 10, timeout = 1000): 阻塞式读取 10 个字符,读取到 10 个字符或者 超过 1000ms 还没收到就返回已经收到的数据。
  • +
+

设置接收回调函数

+

在 MCU 开发中,串口收到数据通常会有中断事件发生, MaixPy 已经在底层处理好了中断,开发者无需再处理中断。
+如果你想在接收到数据时调用一个回调函数,可以用set_received_callback设置回调函数:

+ +

+from maix import uart, app, time
+
+def on_received(serial : uart.UART, data : bytes):
+    print("received:", data)
+    # send back
+    serial.write(data)
+
+device = "/dev/ttyS0"
+
+serial = uart.UART(device, 115200)
+serial.set_received_callback(on_received)
+
+serial0.write_str("hello\r\n")
+print("sent hello")
+print("wait data")
+
+while not app.need_exit():
+    time.sleep_ms(100) # sleep to make CPU free
+
+

在接收到数据后会在另外一个线程里调用设置的回调函数,因为是在另外的线程里调用的,所以不像中断函数要尽快退出函数,你可以在回调函数里处理一些事务再退出也是可以的,注意多线程常见问题。

+

使用回调函数的方式接收数据请不要再使用read函数读取,否则会读取出错。

+

使用其它串口

+

每个引脚可能可以对应不同的外设功能,这也叫引脚复用,如下图,每个引脚对应了不同功能,比如A17引脚(板子的丝引标识)对应了GPIOA17 UART0_RX PWM5 这三种功能,默认是UART0_RX

+


+maixcam_pro_io

+

默认我们就能像上面直接使用UART0,对于其它串口的引脚默认都不是串口外设功能,所以要使用其它串口,需要先设置一下映射,使用pinmap.set_pin_function来设置。

+

这里以使用UART1 为例,先设置引脚映射选择引脚功能为串口,然后设备编号使用/dev/ttyS1,注意uart.list_devices() 默认不会返回需要手动映射的串口,所以直接手动传参就可以了:

+ +
from maix import app, uart, pinmap, time
+
+pinmap.set_pin_function("A18", "UART1_RX")
+pinmap.set_pin_function("A19", "UART1_TX")
+
+device = "/dev/ttyS1"
+
+serial1 = uart.UART(device, 115200)
+
+

应用层通信协议

+

概念和字符协议

+

串口只是规定了保证硬件通信的时序,为了让接收方知道发送方发送的字符流的含义,我们一般会规定一个应用通信协议。
+比如发送放需要发送一个坐标,包含了x, y两个整型值,为了让接收方能理解我们发送的字节流的含义,我们规定:

+
    +
  • 帧头:当我开始发送$符号时,就代表我要开始发送有效的数据啦。
  • +
+
+

内容:设计一个开头符号的原因是串口是流式传输,比如发送两次12345有可能在某个时刻接收到了12345123这样的数据,第二帧的45还没有接收到,我们可以根据起始和结尾符号来判断一个完整的数据帧。

+
+
    +
  • x, y 的取值范围是 0~65535, 即两个字节的无符号短整型(unsinged short),我会先发 x 再发 y,用逗号隔开,比如10,20
  • +
  • 帧尾:最后我会再发一个*标记来代表我这次数据发送完成了。
  • +
+

这样发送一次数据就类似$10,20*这样一个字符串,对方如果用 C 语言接收和解析:

+ +
// 1. 接收数据
+// 2. 根据帧头帧尾判断是否接收完毕了,并将完整的一帧数据存到 buff 数组里面
+// 3. 解析一帧数据
+uint16_t x, y;
+sscanf(buff, "$%d,%d*", &x, &y);
+
+

这样我们就制定了最简单的字符通信协议,具有一定的可靠性。
+但是由于我们串口一般用的参数是115200 8 N 1,这里的N就是无奇偶校验,我们可以在自己的协议里面加一个校验值放在末尾,比如:

+
    +
  • 这里我们规定 x,y 后还有一个校验值,取值范围是 0 到 255,它的值为前面所有字符加起来的和对 255 取余。
  • +
  • 这里以 $10,20举例,在Python只需要使用sum函数就可以sum(b'$10,20') % 255 --> 20,最终发送$10,20,20*
  • +
  • 接收放接收到数据后读取到校验值20,然后自己也同样的方式计算一遍$10,20的校验值,如果也是20说明数据传输没有发生错误,如果不相同我们则可以认为数据传输过程中发生了错误,可以丢弃等下一个数据包。
  • +
+

比如在 MaixPy 中,我们需要编码一个字符协议,直接使用 Python 的字符串格式化功能即可:

+ +
x = 10
+y = 20
+content = "${},{}*".format(x, y)
+print(content)
+
+

二进制通信协议

+

上面的字符协议有个很明显的特征,我们都是用可见字符的方式在传输数据,传输数据时有点就是简单,人眼能直接看懂;
+缺点就是占用字符数量不固定,数据量比较大,比如$10,20*$1000,2000*,同样的格式,数值不同长度不同,这里1000用了 4 个字符也就是4个字节,我们都知道一个无符号短整型(uint16)类型的数据只需要两个字节就能表示0~65535的取值范围,用这种表示方法可以少传输数据。
+我们也知道可见字符可以通过ASCII码表转换成二进制表示形式,比如$1000查找ASCII码表换成二进制表示就是0x24 0x31 0x30 0x30 0x30一共 5 个字节,也就是我们实际传输数据的时候传输的二进制内容,如果现在我们用二进制的方式直接编码1000,即0x03E8,就可以直接发送0x24 0x03 0xE8,最终只需要发送 3 个字节,减少了通信开销。

+

另外这里0x03E8两个字节低位是0xE8,先发送低位0xE8我们称之为小端编码,反之则是大端编码,两个皆可,双方规定一致即可。

+

在 MaixPy 中,要将一个数值转换成 bytes 类型也很简单,使用struct.pack函数即可,比如这里的0x03E8也就是十进制的1000,我们用

+ +
from struct import pack
+b = pack("<H", 1000)
+print(b)
+
+

这里<H表示小端编码,H表示一个 uint16类型的数据,最终得到b'\xe8\x03'的 bytes 类型数据。

+

同样的,二进制协议也可以有 帧头,数据内容,校验值,帧尾等,也可以不要帧尾,而是设计一个帧长的字段,看个人喜好即可。

+

MaixPy MaixPy 内置通信协议

+

另外 MaixPy 也内置了一个通信协议可以直接使用。

+

这里的通信协议即:规定通信双方的以什么样的格式来传输内容,方便双方解析识别信息,是一个二进制协议,包括帧头、数据内容、校验等。
+完整的协议定义在 Maix 串口通信协议标准
+没有接触过通信协议可能看起来有点困难,结合下面的例子多看几遍就能理解了。

+

比如我们现在有一个物体检测,我们想检测到物体后通过串口发送给其它设备(比如 STM32 单片机或者 Arduino 单片机),告诉其我们检测到了什么物体,坐标是多少。

+

完整的例程:MaixPy/examples/protocol/comm_protocol_yolov5.py

+

首先我们需要检测到物体,参考 yolov5 检测物体的例程即可,这里我们就省略其它细节,来看检测到的结果是什么样

+ +
while not app.need_exit():
+    img = cam.read()
+    objs = detector.detect(img, conf_th = 0.5, iou_th = 0.45)
+    for obj in objs:
+        img.draw_rect(obj.x, obj.y, obj.w, obj.h, color = image.COLOR_RED)
+        msg = f'{detector.labels[obj.class_id]}: {obj.score:.2f}'
+        img.draw_string(obj.x, obj.y, msg, color = image.COLOR_RED)
+    dis.show(img)
+
+

可以看到objs是多个检测结果,这里在屏幕上进行画框了,我们也可以在这里想办法把结果通过串口发送出去。
+这里我们不需要手动初始化串口,直接使用内置的maix.comm, maix.protocol模块,调用comm.CommProtoco会自动初始化串口,默认波特率是115200,串口协议的相关可以在设备系统设置->通信协议里面设置。
+系统设置里面可能还有其它通信方式比如tcp,默认是uart,你也可以通过maix.app.get_sys_config_kv("comm", "method")来获取到当前设置的是不是uart

+ +
from maix import comm, protocol, app
+from maix.err import Err
+import struct
+
+def encode_objs(objs):
+    '''
+        encode objs info to bytes body for protocol
+        2B x(LE) + 2B y(LE) + 2B w(LE) + 2B h(LE) + 2B idx ...
+    '''
+    body = b""
+    for obj in objs:
+        body += struct.pack("<hhHHH", obj.x, obj.y, obj.w, obj.h, obj.class_id)
+    return body
+
+APP_CMD_ECHO = 0x01        # 自定义命令 1, 测试用,这里没用到,保留
+APP_CMD_DETECT_RES = 0x02  # 自定义命令 2, 发送检测到的物体信息
+                           # 可以根据自己的应用自定义更多的命令
+
+p = comm.CommProtocol(buff_size = 1024)
+
+while not app.need_exit():
+    # ...
+    objs = detector.detect(img, conf_th = 0.5, iou_th = 0.45)
+    if len(objs) > 0:
+        body = encode_objs(objs)
+        p.report(APP_CMD_DETECT_RES, body)
+    # ...
+
+

这里通过encode_objs函数将所有检测到的物体信息打包成bytes类型的数据,然后用p.report函数将结果发送出去。

+

这里我们对body内容进行了一个简单的定义,即2B x(LE) + 2B y(LE) + 2B w(LE) + 2B h(LE) + 2B idx ...
+含义是:

+
    +
  • 这张图中检测到多个物体,在body中按顺序排列,每个目标占用 2+2+2+2+2 = 10 个字节的长度,一共有body_len / 10个物体。
  • +
  • 第1、2个字节代表识别到的物体的左上角的 x 坐标,单位是像素,因为 yolov5 的结果这个坐标值有可能为负数,所以我们用一个short类型的值来表示,这里使用了小端编码(LE)。
  • +
+
+

这里小端即数值的低字节在前,比如坐标 x100, 十六进制为 0x64,我们用两个字节的short来表示就是0x0064,这里小端编码成 bytes 就是0x64在前, 结果就是b'\x64\x00'

+
+
    +
  • 同理,将后面的数据都依次编码,一个物体得到一个10字节长的bytes类型数据。
  • +
  • 循环将所有物体信息编码并拼接成一个bytes
  • +
+

在调用report函数时,底层会自动按照协议拼接上协议头、校验和等等,这是在另一端就能收到一帧完整的数据了。

+

在另一端收到信息后也要按照协议进行解码,如果接收端也是用 MaixPy 可以直接:

+ +
while not app.need_exit():
+    msg = p.get_msg()
+    if msg and msg.is_report and msg.cmd == APP_CMD_DETECT_RES:
+        print("receive objs:", decode_objs(msg.get_body()))
+        p.resp_ok(msg.cmd, b'1')
+
+

如果是其它设备比如STM32或者Arduino则可以参考 Maix 串口通信协议标准 附录中的 C 语言函数进行编解码。

+

其它教程

+ + + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/peripheral/wdt.html b/maixpy/doc/zh/peripheral/wdt.html new file mode 100644 index 00000000..a06597ec --- /dev/null +++ b/maixpy/doc/zh/peripheral/wdt.html @@ -0,0 +1,390 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 使用看门狗定时器 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 使用看门狗定时器

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

简介

+

为了防止程序出现问题,常常会用到看门狗定时器(WDT), 在程序出问题时自动重启系统。

+

原理就是有一个倒计时计数器,我们需要在程序的逻辑中定期地去设置这个倒计时时间(也叫喂狗),如果我们的程序在哪儿卡住了导致没有定期去设置倒计时,倒计时到 0 后硬件就会出发系统重启。

+

MaixPy 中使用 WDT

+ +
from maix import wdt, app, time
+
+w = wdt.WDT(0, 1000)
+
+while not app.need_exit():
+    w.feed()
+    # here sleep op is our operation
+    # 200 ms is normal, if > 1000ms will cause system reset
+    time.sleep_ms(200)
+
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/pro/compile_os.html b/maixpy/doc/zh/pro/compile_os.html new file mode 100644 index 00000000..e27076ef --- /dev/null +++ b/maixpy/doc/zh/pro/compile_os.html @@ -0,0 +1,411 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 为 MaixCAM 编译系统 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

为 MaixCAM 编译系统

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

为什么需要定制系统

+

正常情况下你只需要从 https://github.com/sipeed/MaixPy/releases 下载到适合 MaixCAM 的最新系统使用即可。
+有些情况需要定制系统,比如:

+
    +
  • 比如你要量产 1k 个产品,都想放一个自己的应用,并且自动开机启动,不想一个一个配置,就可以改一下builtin_files,然后打包一个系统,所有板子一烧录就自带了自定义的文件,不要开及后再拷贝一次。
  • +
  • 现在官方的系统没有你想要的软件包或者驱动,可以自己编译系统,选择自己想要的软件包编译定制的系统。
  • +
+

基础系统获取

+

原理是系统来自 https://github.com/sipeed/LicheeRV-Nano-Build/releases 作为基础系统(不能直接给 MaixCAM 烧录使用,否则有烧坏屏幕风险),然后将 MaixCAM 定制的相关文件拷贝到基础系统重新打包成 MaixCAM 能用的系统。

+

如果不需要对基础系统进行自定义,直接从 https://github.com/sipeed/LicheeRV-Nano-Build/releases 下载最新的系统镜像包即可。

+

如果基础系统无法满足你的要求,比如你需要自定义增删一些软件包和驱动等,按照 https://github.com/sipeed/LicheeRV-Nano-Build README 文档进行编译, 尽量使用 docker 编译以避免遇到编译环境问题,以及使用bash,不要使用zsh

+

注意编译出来的系统不能直接给 MaixCAM 烧录使用,否则有烧坏屏幕风险。

+

为 MaixCAM 拷贝文件

+

准备以下内容:

+
    +
  • 基础系统,是一个 .img 或者 .img.xz 文件。
  • +
  • 对于 MaixCAM 还需要放一些额外的文件进去,到MaixPy release 下载最新的 builtin_files.tar.xz 文件。
  • +
+
+

如果你需要放一些自定义的文件进系统,可以解压后往目录里面填加,比如你想系统烧录后 /root 目录下就会有一个 cat.jpg, 那么就往这里 root 目录下放一个 cat.jpg

+
+
    +
  • 下载或克隆 MaixPy 源码到本地。
  • +
  • 编译 MaixPy 获得 .whl 安装包文件,你也可以到 MaixPy release 下载最新的安装包。
  • +
+

MaixPy/tools/os目录下,执行

+ +
./gen_os.sh <base_os_filepath> <maixpy_whl_filepath> <builtin_files_dir_path> <os_version_str> [skip_build_apps]"
+
+

这里参数说明:

+
    +
  • base_os_filepath: 基础系统路径, img 或者 img.xz 格式。
  • +
  • maixpy_whl_filepath: MaixPy 软件包, whl 格式。
  • +
  • builtin_files_dir_path: MaixCAM 自定义文件, 可以在 MaixPy release 下载到最新的。
  • +
  • os_version_str: 系统版本,格式要满足类似 maixcam-2024-08-16-maixpy-v4.4.21 的规范。
  • +
  • skip_build_apps: 跳过编译内置应用,可选参数,传 1 则会跳过,不传这个参数会将 MaixCDK 和 MaixPy 中的应用都编译并拷贝到系统中。
  • +
+

举例:

+ +
./gen_os.sh '/home/xxx/.../LicheeRV-Nano-Build/install/soc_sg2002_licheervnano_sd/images/2024-08-13-14-43-0de38f.img' ../../dist/MaixPy-4.4.21-py3-none-any.whl '/home/xxx/.../sys_builtin_files' maixcam-2024-08-15-maixpy-v4.4.21
+
+

等待编译内置应用以及拷贝完成,在 MaixPy/tools/os/tmp 目录下机会有一个maixcam-2024-08-15-maixpy-v4.4.21.img.xz系统镜像了。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/projects/face_tracking.html b/maixpy/doc/zh/projects/face_tracking.html new file mode 100644 index 00000000..269992f1 --- /dev/null +++ b/maixpy/doc/zh/projects/face_tracking.html @@ -0,0 +1,486 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 2轴舵机云台人脸追踪 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 2轴舵机云台人脸追踪

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-06-111.0.0iawak9lkm + + 初版文档 + +
+
+
+ +
+
+ +

阅读本文前,确保已经知晓如何开发MaixCAM,详情请阅读快速开始

+

源码地址

+

APP下载地址

+

简介

+

基于 MaixCAM 和云台的人脸追踪程序。实际效果如下图所示:

+

+

+

如何使用例程

+
    +
  • 组装好您的云台和MaixCAM。

    +
  • +
  • 修改 main.py 中的参数。

    +

    修改每个舵机使用的 MaixCAM 引脚,指定的引脚必须具备 PWM 功能。servos.Servos 会自行将该引脚配置为 PWM 功能。

    + +
    ROLL_PWM_PIN_NAME = "A17"
    +PITCH_PWM_PIN_NAME = "A16"
    +
    +

    修改两个舵机的初始位置。

    + +
    init_pitch = 80         # init position, value: [0, 100], means minimum angle to maxmum angle of servo
    +init_roll = 50          # 50 means middle
    +
    +

    修改两个舵机各自的活动范围的最小最大 PWM 占空比。请注意,某些云台结构中的舵机超出物理限制的最大活动范围时可能会造成不可预期的后果,请务必保证以下设定值对应的舵机运动范围内无阻碍。

    + +
    PITCH_DUTY_MIN  = 3.5   # The minimum duty cycle corresponding to the range of motion of the y-axis servo.
    +PITCH_DUTY_MAX  = 9.5   # Maximum duty cycle corresponding to the y-axis servo motion range.
    +ROLL_DUTY_MIN   = 2.5   # Minimum duty cycle for x-axis servos.
    +ROLL_DUTY_MAX   = 12.5  # Maxmum duty cycle for x-axis servos.
    +
    +

    选择舵机的运动方向。

    + +
    pitch_reverse = False                   # reverse out value direction
    +roll_reverse = True                     # reverse out value direction
    +
    +
  • +
  • 最后执行代码即可。

    +

    如果您是从 MaixHub 上安装的应用,在启动器中点击 face_tracking 即可执行本程序。

    +

    如果您是从 Github 上获取的源码, 您可以在 MaixVision 中导入该工程的文件夹执行整个工程即可。 MaixVision详情请参考 MaixVision说明

    +

    当然您也可以将整个工程文件夹通过您喜欢的方式拷贝到我们的 MaixCAM 上, 然后用 python 执行。

    +
  • +
  • 想要退出程序按左上角的按钮即可。

    +

    +
  • +
+

常见问题

+
    +
  • 人脸跟踪效果不理想。

    +

    不同的云台使用的 PID 参数不尽相同,您可以通过调节 PID 值来使得追踪效果更丝滑。

    + +
    pitch_pid = [0.3, 0.0001, 0.0018, 0]    # [P I D I_max]
    +roll_pid  = [0.3, 0.0001, 0.0018, 0]    # [P I D I_max]
    +
    +
  • +
  • 在完成跟踪后,云台对着不动的人脸小幅度左右抖动一段时间。

    +

    通常可以通过调节 PID 来使得该影响尽可能小;但是无法避免云台物理结构带来的抖动。可以尝试调节死区来减小抖动。

    + +
    target_ignore_limit = 0.08
    +# when target error < target_err_range*target_ignore_limit , set target error to 0
    +
    +
  • +
  • 显示屏显示或终端打印 PIN: XXX does not exist

    +

    这是因为 MaixCAM 板子上引出的引脚中并不存在该引脚,请在 MaixCAM 上选择一个带 PWM 功能的引脚。

    +
  • +
  • 显示屏显示或终端打印 Pin XXX doesn't have PWM function

    +

    这是因为 MaixCAM 板子上引出的这个引脚没有 PWM 功能,请选择一个带 PWM 功能的引脚。

    +
  • +
+

如何追踪其他物体

+
    +
  • main.py 中存在一个类 Target,该类用于自定义需要追踪的目标。

    +
  • +
  • __init__ 中,请初始化您需要用到的对象,比如摄像头等。

    +
  • +
  • __get_target() 中,您需要计算出被追踪物体的中心点,如果帧中不存在被追踪物体,请返回 -1,-1 以确保程序在未找到目标时暂时不做动作。同时,您也需要在返回坐标点之前调用 self.__exit_listener(img)self.disp.show(img) 确保程序能够与您正常的完成交互。

    +
  • +
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/projects/index.html b/maixpy/doc/zh/projects/index.html new file mode 100644 index 00000000..a019661b --- /dev/null +++ b/maixpy/doc/zh/projects/index.html @@ -0,0 +1,396 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 项目实战 介绍和汇总 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 项目实战 介绍和汇总

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

简介

+

这里提供一些常见的项目实战示例,方便社区成员可以参考复现使用,也方便激发大家的灵感做出更多更好的应用和项目出来。

+

要找到用 MaixPy 实现的相关的项目,有几种方式:

+

MaixPy 官方文档

+

也就是本文档左边目录可以找到的项目实战,比如小车巡线

+

如果你有好的项目,或者好的项目推荐,也可以贡献文档添加进来。

+

MaixHub 项目分享广场

+

MaixHub 项目分享 栏目可以找到项目分享。

+

有高质量的分享也会被链接到 MaixPy 官方文档。

+

你也可以分享你的项目制作方法,会获得官方(必获得)以及社区成员的现金打赏(通常高质量能解决急需需求的更容易被打赏)。

+

推荐项目:

+ +

MaixHub 应用分享

+

除了项目分享以外,还可以在MaixHub 应用商店 找到可以直接运行的应用,有部分应用可能是用 MaixPy 编写的,如果作者提供了源码或者写了详细的教程也都可以参考。

+

推荐项目:

+ + + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/projects/line_tracking_robot.html b/maixpy/doc/zh/projects/line_tracking_robot.html new file mode 100644 index 00000000..dbe673b4 --- /dev/null +++ b/maixpy/doc/zh/projects/line_tracking_robot.html @@ -0,0 +1,461 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 小车巡线 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 小车巡线

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-05-091.0.0lxowalle + + 初版文档 + +
+
+
+ +
+
+ +

阅读本文前,确保已经知晓如何开发MaixCAM,详情请阅读快速开始

+

简介

+

本文将介绍如何使用MaixPy实现寻线小车

+

如何使用MaixPy实现寻线小车

+
    +
  1. 准备MaixCAM与小车
  2. +
  3. 实现寻线功能
  4. +
  5. 实现小车控制功能
  6. +
+

准备MaixCAM与小车

+

TODO

+

实现寻线功能

+

使用image模块的get_regression可以快速寻找到直线,详情见寻找直线

+

代码实现:

+ +
from maix import camera, display, image
+
+cam = camera.Camera(320, 240)
+disp = display.Display()
+
+# thresholds = [[0, 80, 40, 80, 10, 80]]      # red
+thresholds = [[0, 80, -120, -10, 0, 30]]    # green
+# thresholds = [[0, 80, 30, 100, -120, -60]]  # blue
+
+while 1:
+    img = cam.read()
+
+    lines = img.get_regression(thresholds, area_threshold = 100)
+    for a in lines:
+        img.draw_line(a.x1(), a.y1(), a.x2(), a.y2(), image.COLOR_GREEN, 2)
+        theta = a.theta()
+        rho = a.rho()
+        if theta > 90:
+            theta = 270 - theta
+        else:
+            theta = 90 - theta
+        img.draw_string(0, 0, "theta: " + str(theta) + ", rho: " + str(rho), image.COLOR_BLUE)
+
+    disp.show(img)
+
+
+

上述代码实现了寻线功能, 上述参数中需注意:

+
    +
  • 设置合适的thresholds值来寻找到对应的直线
  • +
  • 设置合适的area_threshold值来过滤环境干扰,可以过滤一些面积小的直线
  • +
  • 使用a.theta()获取直线的角度
  • +
  • 使用a.rho()获取直线与原点(原点在左上角)的距离
  • +
+

根据实际环境调试好寻线参数后, 就可以利用a.theta()a.rho()控制小车方向了。

+

实现小车控制功能

+

TODO

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/sidebar.yaml b/maixpy/doc/zh/sidebar.yaml new file mode 100644 index 00000000..1b098e5d --- /dev/null +++ b/maixpy/doc/zh/sidebar.yaml @@ -0,0 +1,194 @@ +items: +- file: README.md + label: 快速开始 +- file: README_no_screen.md + label: 快速开始(无屏幕版) +- file: faq.md + label: FAQ 常见问题 + +- label: 基础 + collapsed: false + items: + - file: basic/os.md + label: 升级和烧录系统 + - file: basic/app.md + label: 应用使用-开发-商店 + - file: basic/maixpy_upgrade.md + label: 更新 MaixPy + - file: basic/maixvision.md + label: MaixVision 使用 + - file: basic/python.md + label: Python 语法 + - file: basic/linux_basic.md + label: Linux 基础知识 + - file: basic/python_pkgs.md + label: 添加额外的 Python 包 + - file: basic/auto_start.md + label: 开机自启 + - file: basic/view_src_code.md + label: 查看 API 对应源码 + +- label: 基础图像和算法 + items: + - file: vision/display.md + label: 屏幕使用 + - file: vision/camera.md + label: 摄像头使用 + - file: vision/image_ops.md + label: 基本图像操作 + - file: vision/touchscreen.md + label: 触摸屏使用 + - file: vision/find_blobs.md + label: 寻找色块 + - file: vision/line_tracking.md + label: 寻找直线 + - file: vision/qrcode.md + label: 二维码识别 + - file: vision/apriltag.md + label: AprilTag 识别 + - file: vision/opencv.md + label: 使用 OpenCV + - file: gui/i18n.md + label: I18N 国际化(多语言) + +- label: AI 视觉 + items: + - file: vision/ai.md + label: AI 视觉基本知识 + - file: vision/classify.md + label: AI 物体分类 + - file: vision/yolov5.md + label: YOLO11/v8/v5 物体检测 + - file: vision/face_detection.md + label: 人脸及关键点检测 + - file: vision/face_recognition.md + label: 人脸识别 + - file: vision/body_key_points.md + label: 人体关键点检测 + - file: vision/segmentation.md + label: 图像语义分割 + - file: vision/self_learn_classifier.md + label: 自学习分类器 + - file: vision/self_learn_detector.md + label: 自学习检测器 + - file: vision/object_track.md + label: 物体轨迹跟踪和计数 + - file: vision/ocr.md + label: OCR 文字识别 + - file: vision/maixhub_train.md + label: MaixHub 在线训练 AI 模型 + - file: vision/customize_model_yolov5.md + label: 离线训练 YOLOv5 模型 + - file: vision/customize_model_yolov8.md + label: 离线训练 YOLO11/YOLOv8 模型 + - file: ai_model_converter/maixcam.md + label: ONNX 模型转给 MaixCAM 用 + + +- label: AI 听觉 + items: + - file: audio/record.md + label: 录音 + - file: audio/play.md + label: 播放音频 + - file: audio/recognize.md + label: 语音实时识别 + - file: audio/digit.md + label: 连续中文数字识别 + - file: audio/keyword.md + label: 关键词识别 + - file: audio/synthesis.md + label: 语音合成 + - file: audio/ai_classify.md + label: AI 声音分类 + +- label: 视频 + items: + - file: video/record.md + label: 录像 + - file: video/play.md + label: 播放视频 + - file: video/jpeg_streaming.md + label: JPEG 串流 + - file: video/rtsp_streaming.md + label: RTSP 串流 + - file: video/rtmp_streaming.md + label: RTMP 串流 + +- label: 网络通信 + items: + - file: network/network_settings.md + label: 网络设置 + - file: network/http.md + label: http 网络通信 + - file: network/socket.md + label: socket 网络通信 + - file: network/mqtt.md + label: MQTT 网络通信 + - file: network/websocket.md + label: websocket 网络通信 + - file: network/flask.md + label: Flask Web 服务器 + +- label: 片上外设 + items: + - file: peripheral/pinmap.md + label: PINMAP 使用 + - file: peripheral/gpio.md + label: GPIO 和 点灯 + - file: peripheral/uart.md + label: UART 串口使用 + - file: peripheral/i2c.md + label: I2C 使用 + - file: peripheral/pwm.md + label: PWM 使用 + - file: peripheral/spi.md + label: SPI 使用 + - file: peripheral/wdt.md + label: WDT 看门狗使用 + - file: peripheral/adc.md + label: ADC 使用 + - file: peripheral/hid.md + label: HID Device 使用 + +- label: 片外模块 + items: + - file: modules/temp_humi.md + label: 温湿度传感器 + - file: modules/acc.md + label: 加速度计使用 + - file: modules/rtc.md + label: RTC 时钟模块使用 + - file: modules/tmc2209.md + label: 步进电机 TMC2209 + - file: modules/tof.md + label: TOF 测距 + - file: modules/thermal_cam.md + label: 热成像摄像头 + +- label: 项目实战 + items: + - file: projects/README.md + label: 介绍和汇总 + - file: projects/line_tracking_robot.md + label: 小车巡线 + - file: projects/face_tracking.md + label: 人脸追踪2轴云台 + +- label: 进阶 + collapsed: false + items: + - file: source_code/contribute.md + label: 贡献文档和代码 + - file: source_code/build.md + label: 构建 MaixPy 源码 + - file: source_code/faq.md + label: MaixPy 源码 FAQ + - file: source_code/add_c_module.md + label: 使用 C/C++ 写一个模块 + - file: source_code/maixcdk.md + label: 使用 MaixCDK 开发 + - file: pro/compile_os.md + label: 编译系统 + + diff --git a/maixpy/doc/zh/source_code/add_c_module.html b/maixpy/doc/zh/source_code/add_c_module.html new file mode 100644 index 00000000..353ab8c5 --- /dev/null +++ b/maixpy/doc/zh/source_code/add_c_module.html @@ -0,0 +1,440 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 给 MaixCAM MaixPy 添加一个 C/C++ 模块 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

给 MaixCAM MaixPy 添加一个 C/C++ 模块

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

简介

+

有时候需要高效地执行某个函数, Python 的速度无法满足时,就可以使用 C/C++ 或者其它编译型语言来实现。

+

通用函数封装

+

如果你想封装的函数实现的功能不依赖 MaixPy 的其它功能,直接使用 Python 使用 C/C++ 添加模块的通用方法,具体方法可以自行百度,比如 ffi, ctype 等

+
+

欢迎 PR 添加方法

+
+

如果你的模块还想依赖 MaixPy 的其它基础 API

+

方法一

+

直接修改 MaixPy 固件,然后编译过即可,参考 查看 MaixPy API 源码,这种方法最简单快捷,如果代码封装好了还能合并到官方仓库(提交 PR)。

+
    +
  • 按照编译 MaixPy 源码 通过即可获得dist/***.whl安装包。
  • +
  • dist目录下的.whl包发送到设备,然后使用运行代码import os;os.system("pip install /root/xxxxx.whl")即可(替换路径)。
  • +
  • 如果调试的时候觉得安装 .whl 包太慢了,可以使用maixcdk build 编译,然后使用scp -r maix_xxx root@10.228.104.1:/usr/lib/python3.11/site-packages直接拷贝到设备系统种覆盖包,这里需要根据你的包名和设备 ip 替换一下。
  • +
  • 当你调试好后如果觉得自己填加的功能不错,可以考虑合并到官方的仓库,具体方法可以搜索引擎搜索"github 提交 PR"相关关键词学习。
  • +
+

修改代码:
+正如 查看 MaixPy API 源码 问种所描述的查看和修改源码的方式,增加 C++ 函数,并且填加注释,然后编译后 MaixPy 中就能调用了,非常简单。

+

比如:

+ +
namespace maix::test
+{
+    /**
+     * My function, add two integer.
+     * @param a arg a, int type
+     * @param b arg b, int type
+     * @return int type, will a + b
+     * @maixpy maix.test.add
+     */
+    int add(int a, int b);
+}
+
+

没错,直接写一个 C++ 语法的函数,注意这里加了一个@maixpy 的注释,编译时会自动生成 Python 函数,就是这么简单!
+然后就能通过maix.test.add(1, 2) 来调用函数了。

+

方法二

+

基于工程模板创建一个 MaixPy 模块工程,这种方法适用于不想改动 MaixPy 源码,希望单独加一个包,并且还能用上 MaixPy(MaixCDK)的 API 的情况。方法如下:

+
    +
  • 首先编译 MaixPy 源码 通过,保证我们的编译环境没问题。
  • +
  • 复制一份 MaixPy/tools/maix_module 工程模板到一个新的目录,可以和MaixPy放在同一个目录。比如将所有文件和目录复制到了maix_xxx 目录下。
  • +
  • maix_xxx目录下,终端执行python init_files.py来初始化项目文件。
  • +
  • 修改项目名:修改module_name.txt 文件,改成你要的模块名称,必须以maix_开头,这样方便其它用户能在 pypi.org 或者 github.com 搜索到你的项目。
  • +
  • 和 MaixPy 一样执行python setup.py bdist_wheel linux 就可以开始为电脑构建。
  • +
  • 构建完成后可以直接在项目根目录执行python -c "import maix_xxx;maix_xxx.basic.print('Li Hua')"就能运行你的模块函数了。
  • +
  • 执行python setup.py bdist_wheel maixcam 就可以为MaixCAM 构建软件包了。需要注意的是,构建过程种的代码提示文件(pyi文件)只能在给linux 平台构建的时候生成,所以在正式发布的时候需要先执行上一步的linux平台构建生成代码提示文件,然后再执行本步的命令生成MaixCAM平台的软件包。
  • +
  • dist目录下的.whl包发送到设备,然后使用运行代码import os;os.system("pip install /root/xxxxx.whl")即可(替换路径)。
  • +
  • 如果调试的时候觉得安装 .whl 包太慢了,可以使用maixcdk build 编译,然后使用scp -r maix_xxx root@10.228.104.1:/usr/lib/python3.11/site-packages直接拷贝到设备系统种覆盖包,这里需要根据你的包名和设备 ip 替换一下。
  • +
  • 当你调试好代码后,可以考虑将代码开源到github.com,并且上传到pypi.org(具体上传方法可以看官方文档或者搜索教程,大概就是pip install twine然后 twine upload dist/maix_xxx***.whl就可以了。),写好后欢迎到maixhub.com/share来分享告诉大家你的成果!
  • +
+

修改代码:
+正如 查看 MaixPy API 源码 问种所描述的查看和修改源码的方式,在components/maix/includecomponents/maix/src 下增加源文件,增加 C++ 函数,并且填加注释,然后编译后就直接能调用了,非常简单。
+比如:

+ +
namespace maix_xxx::test
+{
+    /**
+     * My function, add two integer.
+     * @param a arg a, int type
+     * @param b arg b, int type
+     * @return int type, will a + b
+     * @maix_xxx maix_xxx.test.add
+     */
+    int add(int a, int b);
+}
+
+

没错,直接写一个 C++ 语法的函数,注意这里加了一个@maix_xxx 的注释,编译时会自动生成 Python 函数,就是这么简单!
+然后就能通过maix_xxx.test.add(1, 2) 来调用函数了。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/source_code/build.html b/maixpy/doc/zh/source_code/build.html new file mode 100644 index 00000000..0881176f --- /dev/null +++ b/maixpy/doc/zh/source_code/build.html @@ -0,0 +1,443 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 开发源代码指南 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 开发源代码指南

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

获取源代码

+ +
mkdir -p ~/maix
+cd ~/maix
+git clone https://github.com/sipeed/MaixPy
+
+

获取 MaixCDK 源码

+

MaixPy 项目依赖于 MaixCDK,需要先克隆它,放到电脑的某个目录(勿放在 MaixPy 目录下)

+ +
cd ~/maix
+git clone https://github.com/sipeed/MaixCDK
+
+

然后需要设置环境变量 MAIXCDK_PATH 指定 MaixCDK 的路径,可以在 ~/.bashrc 或者~/.zshrc(根据你使用的shell决定)添加:

+ +
export MAIXCDK_PATH=~/maix/MaixCDK
+
+

只有在成功设置环境变量后, MaixPy 才能找到 MaixCDK 源码。

+

构建并打包成 wheel 文件

+ +
cd ~/maix/MaixPy
+python setup.py bdist_wheel maixcam
+
+

maixcam 可以被替换为其他板卡配置, 请查看 MaixPy/platforms 目录。

+

构建成功后, 你会在 dist 目录中找到 wheel 文件, 传输到设备(开发板),在设备终端中使用 pip install -U MaixPy****.whl 在你的设备上安装或升级。

+
+

python setup.py bdist_wheel maixcam --skip-build 不会执行构建命令, 只会打包 wheel 文件, 因此你可以先使用 maixcdk menuconfigmaixcdk build 来自定义构建。

+
+
+

另外如果你是在调试 API,需要频繁安装,使用 pip 安装会比较慢,可以直接编译后拷贝 maix 目录到设备的 /usr/lib/python3.11/site-packages目录下覆盖旧的文件即可。

+
+

手动构建

+ +
maixcdk build
+
+

修改源代码后运行测试

+
    +
  • 首先, 构建源代码
  • +
+ +
maixcdk build
+
+
    +
  • 如果为 PC 自身构建(平台 linux):
  • +
+

然后执行 ./run.sh your_test_file_name.py 来运行 Python 脚本。

+ +
cd test
+./run.sh examples/hello_maix.py
+
+
    +
  • 如果为板卡交叉编译:
      +
    • 最快的方式是将 maix 目录复制到设备的 /usr/lib/python3.11/site-packages/ 目录, 然后在设备上运行脚本。
    • +
    • 或者打包 wheel 文件并在设备上使用 pip install -U MaixPy****.whl 安装, 然后在设备上运行脚本。
    • +
    +
  • +
+

本地预览文档

+

文档位于 docs 目录, 使用 Markdown 格式, 你可以使用 teedoc 来生成网页版本的文档。

+

API 文档会在构建 MaixPy 固件时生成, 如果你没有构建 MaixPy, API 文档将会是空的

+ +
pip install teedoc -U
+cd docs
+teedoc install -i https://pypi.tuna.tsinghua.edu.cn/simple
+teedoc serve
+
+

然后访问 http://127.0.0.1:2333 在网页浏览器中预览文档。

+

对于想要贡献的开发者

+

请查看 MaixPy 开发源代码指南

+

如果在使用源代码时遇到任何问题, 请先参考 FAQ

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/source_code/contribute.html b/maixpy/doc/zh/source_code/contribute.html new file mode 100644 index 00000000..66a8381f --- /dev/null +++ b/maixpy/doc/zh/source_code/contribute.html @@ -0,0 +1,409 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 参与 MaixCAM MaixPy 文档修改和贡献代码 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

参与 MaixCAM MaixPy 文档修改和贡献代码

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

参与 MaixPy 文档修改

+
    +
  • 点击要修改的文档右上角的编辑本页按钮,进入 github 源文档页面。
  • +
  • 保证已经登录了 GitHub 账号。
  • +
  • 在 github 预案文档页面点击右上角铅笔按钮修改文档内容。
  • +
  • github 会提示需要 fork 一份到自己的仓库,点击 fork 按钮。
  • +
+
+

这一步就是将 MaixPy 源码仓库复刻一份到你自己的账号下,这样你就可以自由修改了。

+
+
    +
  • 修改文档内容,然后在页面底部填写修改说明,点击提交修改。
  • +
  • 然后在你的仓库中找到 Pull requests 按钮,点击创建一个 Pull requests。
  • +
  • 然后在弹出的页面中填写修改说明,点击提交 Pull requests,其它人和管理员就可以在Pull requests 页面看到你的修改了。
  • +
  • 等待管理员审核通过后,你的修改就会合并到 MaixPy 源码仓库中了。
  • +
  • 合并成功后,文档会自动更新到 MaixPy 官方文档
  • +
+
+

文档经过 CDN 缓存了的,可能需要等待一段时间才能看到更新,紧急更新可以联系管理员手动刷新。
+也可以访问 en.wiki.sipeed.com/maixpy 查看 github pages 服务版本,这个是没有缓存实时更新的。

+
+

参与 MaixPy 代码贡献

+
    +
  • 访问 MaixPy 代码仓库地址:github.com/sipeed/MaixPy
  • +
  • 在修改代码前最好先创建一个 issue ,描述你要修改的内容让大家知道你的想法和计划,这样大家可以参与修改讨论,以免重复劳动。
  • +
  • 点击右上角的 fork 按钮,将 MaixPy 代码仓库复刻一份到你自己的账号下。
  • +
  • 然后在你的账号下 clone 一份代码到本地。
  • +
  • 修改代码后提交到你的仓库中。
  • +
  • 然后在你的仓库中找到 Pull requests 按钮,点击创建一个 Pull requests。
  • +
  • 然后在弹出的页面中填写修改说明,点击提交 Pull requests,其它人和管理员就可以在Pull requests 页面看到你的修改了。
  • +
  • 等待管理员审核通过后,你的修改就会合并到 MaixPy 源码仓库中了。
  • +
+
+

需要注意的是 MaixPy 的代码大多数是从 MaixCDK 自动生成的,所以如果你修改 C/C++ 源码,很有可能你需要先修改这个仓库。

+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/source_code/faq.html b/maixpy/doc/zh/source_code/faq.html new file mode 100644 index 00000000..02b28d11 --- /dev/null +++ b/maixpy/doc/zh/source_code/faq.html @@ -0,0 +1,391 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 源代码常见问题 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 源代码常见问题

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

subprocess.CalledProcessError: Command '('lsb_release', '-a')' returned non-zero exit status 1.

+

以 root 身份编辑 /usr/bin/lsb_release,将第一行从 #!/usr/bin/python3 更改为 python3

+

然后重新编译,应该就可以工作了。

+

ImportError: arg(): could not convert default argument 'format: maix::image::Format' in method '<class 'maix._maix.camera.Camera'>.init' into a Python object (type not registered yet?)

+

Pybind11 需要你先注册 image::Format,然后才能在 camera::Camera 中使用它,所以我们必须先在生成的 build/maixpy_wrapper.cpp 源文件中定义 image::Format

+

要实现这一点,请编辑 components/maix/headers_priority.txt,被依赖的应该放在依赖它的前面。
+例如:

+ +
maix_image.hpp
+maix_camera.hpp
+
+

/usr/bin/ld: /lib/libgdal.so.30: undefined reference to `std::condition_variable::wait(std::unique_lockstd::mutex&)@GLIBCXX_3.4.30' collect2: error: ld returned 1 exit status

+

一般在为 Linux 构建时并且使用 conda 环境时容易出现,conda 环境中的一些库编译参数问题,解决方法就是不用 conda 即可, 或者单独找到 conda 中的那个库,替换成系统的或者直接删掉(会从系统找)

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/source_code/maixcdk.html b/maixpy/doc/zh/source_code/maixcdk.html new file mode 100644 index 00000000..1d6a5564 --- /dev/null +++ b/maixpy/doc/zh/source_code/maixcdk.html @@ -0,0 +1,380 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM 切换到 MaixCDK 使用 C/C++ 开发应用 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM 切换到 MaixCDK 使用 C/C++ 开发应用

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

除了使用 MaixPy 开发,还有对应的 C/C++ SDK 可以使用,项目名称为 MaixCDK

+

MaixCDK 介绍

+

MaixPy 基于 MaixCDK 构建,MaixPy 的大多数 API 都是基于 MaixCDK 的 API 自动生成的,所以 MaixPy 有的功能 MaixCDK 都包含。
+如果你更熟悉 C/C++ 编程,或者需要更高的性能,可以使用 MaixCDK 进行开发。

+

MaixCDK 使用

+

MaixCDK 代码仓库地址:github.com/sipeed/MaixCDK, 你可以在这里找到 MaixCDK 的代码和文档。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/video/jpeg_streaming.html b/maixpy/doc/zh/video/jpeg_streaming.html new file mode 100644 index 00000000..d669918d --- /dev/null +++ b/maixpy/doc/zh/video/jpeg_streaming.html @@ -0,0 +1,539 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 视频流 JPEG 推流 / 发送图片到服务器 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 视频流 JPEG 推流 / 发送图片到服务器

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-05-201.0.1lxowalle + + 更新JPEG-HTTP用法 + +
2024-04-031.0.0neucrack + + 初版文档 + +
+
+
+ +
+
+ +

简介

+

有时需要将图像发送到服务器,或者将摄像头的视频推送到服务器,这里提供两种方法:

+
    +
  • 一个最简单的方法,即压缩成 JPEG 图片,然后一张一张地发送到服务器。注意,这是一种最简单的方法,不算很正规的视频推流方法,也不适合高分辨率高帧率的视频流,因为这只是一张一张发送图片,如果要高效推送视频流,请使用后文的 RTSP 或者 RTMP 模块。
  • +
  • 建立一个HTTP服务器, 让PC端可以通过浏览器直接访问
  • +
+

作为客户端推流的方法

+ +
from maix import image
+import requests
+
+# create image
+img = image.Image(640, 480, image.Format.FMT_RGB)
+# draw something
+img.draw_rect(60, 60, 80, 80, image.Color.from_rgb(255, 0, 0))
+
+# convert to jpeg
+jpeg = img.to_format(image.Format.FMT_JPEG) # image.Format.FMT_PNG
+# get jpeg bytes
+jpeg_bytes = jpeg.to_bytes()
+
+# faster way, borrow memory from jpeg object,
+# but be carefully, when jpeg object is deleted, jpeg_bytes object MUST NOT be used, or program will crash
+# jpeg_bytes = jpeg.to_bytes(copy = False)
+
+# send image binary bytes to server
+url = "http://192.168.0.123:8080/upload"
+res = requests.post(url, data=jpeg_bytes)
+print(res.status_code)
+print(res.text)
+
+

可以看到,先将图片转换成了 JPEG 格式,然后将 JPEG 图片的二进制数据通过TCP发送到服务器。

+

作为服务器推流的方法

+ +
from maix import camera, time, app, http
+
+html = """<!DOCTYPE html>
+<html>
+<head>
+    <title>JPG Stream</title>
+</head>
+<body>
+    <h1>MaixPy JPG Stream</h1>
+    <img src="/stream" alt="Stream">
+</body>
+</html>"""
+
+cam = camera.Camera(320, 240)
+stream = http.JpegStreamer()
+stream.set_html(html)
+stream.start()
+
+print("http://{}:{}".format(stream.host(), stream.port()))
+while not app.need_exit():
+    t = time.ticks_ms()
+    img = cam.read()
+    jpg = img.to_jpeg()
+    stream.write(jpg)
+    print(f"time: {time.ticks_ms() - t}ms, fps: {1000 / (time.ticks_ms() - t)}")
+
+

步骤:

+
    +
  1. 导入image、camera和http模块

    + +
    from maix import image, camera, http
    +
    +
  2. +
  3. 初始化摄像头

    + +
    cam = camera.Camera(320, 240) # 初始化摄像头,输出分辨率320x240 RGB格式
    +
    +
  4. +
  5. 初始化Stream对象

    + +
    stream = http.JpegStreamer()
    +stream.start()
    +
    +
      +
    • http.JpegStreamer()用来创建一个JpegStreamer对象,这个对象将会启动一个http服务器,用来向客户端发布jpeg图像流
    • +
    • stream.start()用来启动http服务器
    • +
    +
  6. +
  7. 自定义html样式(可选)

    + +
    html = """<!DOCTYPE html>
    +<html>
    +<head>
    +    <title>JPG Stream</title>
    +</head>
    +<body>
    +    <h1>MaixPy JPG Stream</h1>
    +    <img src="/stream" alt="Stream">
    +</body>
    +</html>"""
    +
    +stream.set_html(html)
    +
    +
      +
    • html = xxxhtml代码,可以用来定制自己的网页风格。注意核心代码是<img src="/stream" alt="Stream">,一定不要漏了这行代码。
    • +
    • stream.set_html(html)用来设置自定义的html代码,这一步是可选的。默认浏览地址是http://设备的ip:8000
    • +
    +
  8. +
  9. 从摄像头获取图片并推流

    + +
    while 1:
    +    img = cam.read()
    +    jpg = img.to_jpeg()
    +    stream.write(jpg)
    +
    +
      +
    • img = cam.read()从摄像头获取一张图像,当初始化的方式为cam = camera.Camera(320, 240)时,img对象是一张分辨率为320x240的RGB图。
    • +
    • jpg = img.to_jpeg()将图像转换为jpeg格式
    • +
    • stream.write(jpg)向服务器写入图像格式,http服务器将会把这个图像发送到http客户端。
    • +
    +
  10. +
  11. 完成,运行上须代码后, 你可以通过浏览器直接看到视频流, 默认地址为http://设备的ip:8000。打开你的浏览器看看吧!

    +
  12. +
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/video/play.html b/maixpy/doc/zh/video/play.html new file mode 100644 index 00000000..2c3f1ddd --- /dev/null +++ b/maixpy/doc/zh/video/play.html @@ -0,0 +1,479 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy 播放视频 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixPy 播放视频

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-08-191.0.0lxowalle + + 初版文档 + +
+
+
+ +
+
+ +

简介

+

本文档提供播放视频功能的使用方法。

+

MaixPy支持播放h264mp4flv格式的视频,需要注意目前只支持avc编码的mp4flv文件

+

播放MP4视频

+

一个播放mp4视频的示例,视频文件路径为/root/output.mp4

+ +
from maix import video, display, app
+
+disp = display.Display()
+d = video.Decoder('/root/output.mp4')
+print(f'resolution: {d.width()}x{d.height()} bitrate: {d.bitrate()} fps: {d.fps()}')
+d.seek(0)
+while not app.need_exit():
+    ctx = d.decode_video()
+    if not ctx:
+        d.seek(0)
+        continue
+
+    img = ctx.image()
+    disp.show(img)
+    print(f'need wait : {ctx.duration_us()} us')
+
+

步骤:

+
    +
  1. 导入模块并初始化摄像头

    + +
    from maix import video, display, app
    +disp = display.Display()
    +
    +
      +
    • disp = display.Display()用来初始化显示屏,用于显示解码的图像
    • +
    +
  2. +
  3. 初始化Decoder模块

    + +
    d = video.Decoder('/root/output.mp4')
    +
    +
      +
    • d = video.Decoder('/root/output.mp4')用来初始化解码器,并设置需要播放的视频文件路径。如果你需要播放flv文件,则可以填写flv为后缀的文件路径,例如{your_file_path}.flv,如果你需要播放h264文件,则可以填写h264为后缀的文件路径,例如{your_file_path}.h264
    • +
    +
  4. +
  5. 设置解码的位置

    + +
    d.seek(0)
    +
    +
      +
    • 可以用来设置播放视频的位置,单位是秒
    • +
    +
  6. +
  7. 获取解码后的图像

    + +
    ctx = d.decode_video()
    +img = ctx.image()
    +
    +
      +
    • 每次调用都会返回一帧图像的上下文ctx,通过ctx.image()获取img。目前解码后只能支持输出NV21格式的图像
    • +
    +
  8. +
  9. 显示解码后的图像

    + +
    disp.show(img)
    +
    +
      +
    • 显示图像时使用ctx.duration_us()可以获取每帧图像的时长,单位是微秒
    • +
    +
  10. +
  11. 完成,更多Decoder的用法请看API文档

    +
  12. +
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/video/record.html b/maixpy/doc/zh/video/record.html new file mode 100644 index 00000000..d257dcca --- /dev/null +++ b/maixpy/doc/zh/video/record.html @@ -0,0 +1,534 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 录像 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 录像

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-05-201.0.0lxowalle + + 初版文档 + +
+
+
+ +
+
+ +

简介

+

本文档提供录像功能的使用方法

+

示例一

+

一个录入h265格式视频的示例

+ +
from maix import video, image, camera, app, time
+
+cam = camera.Camera(640, 480, image.Format.FMT_YVU420SP)
+e = video.Encoder()
+f = open('/root/output.h265', 'wb')
+
+record_ms = 2000
+start_ms = time.ticks_ms()
+while not app.need_exit():
+    img = cam.read()
+    frame = e.encode(img)
+
+    print(frame.size())
+    f.write(frame.to_bytes())
+
+    if time.ticks_ms() - start_ms > record_ms:
+        app.set_exit_flag(True)
+
+

步骤:

+
    +
  1. 导入模块并初始化摄像头

    + +
    from maix import video, image, camera, app, time
    +cam = camera.Camera(640, 480, image.Format.FMT_YVU420SP)
    +
    +
      +
    • camera.Camera()用来初始化摄像头, 这里初始化摄像头分辨率为640x480,注意目前Encoder只支持NV21格式,因此设置图像格式为image.Format.FMT_YVU420SP
    • +
    +
  2. +
  3. 初始化Encoder模块

    + +
    e = video.Encoder()
    +
    +
      +
    • video.Encoder()模块目前只支持处理image.Format.FMT_YVU420SP格式图像,支持h265h264编码, 默认为h265编码。如果你想使用h264编码,则可以修改初始化参数为video.Encoder(type=video.VideoType.VIDEO_H264_CBR)
    • +
    • 注意,同时只能存在一个编码器
    • +
    +
  4. +
  5. 编码摄像头的图像

    + +
    img = cam.read()
    +frame = e.encode(img)
    +
    +
      +
    • img = cam.read()读取摄像头图像并保存到img
    • +
    • frame = e.encode(img)img编码并保存结果到frame
    • +
    +
  6. +
  7. 保存编码结果到文件

    + +
    f = open('/root/output.h265', 'wb')
    +f.write(frame.to_bytes(False))
    +
    +
      +
    • f = open(xxx)打开并创建一个文件
    • +
    • f.write(frame.to_bytes(False))将编码结果frame转换为bytes类型,然后调用f.write()将数据写入文件中
    • +
    +
  8. +
  9. 定时2s退出

    + +
    record_ms = 2000
    +start_ms = time.ticks_ms()
    +while not app.need_exit():
    +    if time.ticks_ms() - start_ms > record_ms:
    +    app.set_exit_flag(True)
    +
    +
      +
    • 这里是定时退出的应用逻辑,自己看看吧
    • +
    +
  10. +
  11. 完成

    +
  12. +
+

示例二

+

一个录入h265格式视频的示例

+ +
from maix import video, time, image, camera, app
+
+cam = camera.Camera(640, 480, image.Format.FMT_YVU420SP)
+e = video.Encoder(capture = True)
+e.bind_camera(cam)
+
+f = open('/root/output.h265', 'wb')
+
+record_ms = 2000
+start_ms = time.ticks_ms()
+while not app.need_exit():
+    frame = e.encode()
+    img = e.capture()
+
+    print(frame.size())
+    f.write(frame.to_bytes(True))
+
+    if time.ticks_ms() - start_ms > record_ms:
+        app.set_exit_flag(True)
+
+

与示例一类似,区别在于调用了Encoder对象的bind_camera方法,Encoder主动取图,这样的优点是可以充分利用硬件特性,增加编码速率

+ +
e = video.Encoder(capture = True)
+e.bind_camera(cam)
+frame = e.encode()
+img = e.capture()
+
+
    +
  • e = video.Encoder(capture = True)使能了capture参数,让编码时可以抓取编码的图像
  • +
  • e.bind_camera(cam)将摄像头绑定到Encoder对象
  • +
  • frame = e.encode()编码时不需要再传入img,而是内部从摄像头取图
  • +
  • img = e.capture()Encoder对象中抓取编码的图像
  • +
+

转换为MP4格式

+

如果想要录制mp4格式视频,可以先录制好H265视频,再使用系统内的ffmpeg工具转换为mp4格式

+ +
import os
+
+# Pack h265 to mp4
+# /root/output.h265 是h265文件路径
+# /root/output.mp4  是mp4文件路径
+os.system('ffmpeg -loglevel quiet -i /root/output.h265 -c:v copy -c:a copy /root/output.mp4 -y')
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/video/rtmp_streaming.html b/maixpy/doc/zh/video/rtmp_streaming.html new file mode 100644 index 00000000..6ad7b766 --- /dev/null +++ b/maixpy/doc/zh/video/rtmp_streaming.html @@ -0,0 +1,517 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 视频流 RTMP 推流 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 视频流 RTMP 推流

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-05-201.0.0lxowalle + + 初版文档 + +
+
+
+ +
+
+ +

简介

+

本文档提供通过RTMP推送H264视频流的方法

+

使用方法

+ +
from maix import camera, time, rtmp, image
+
+cam = camera.Camera(640, 480, image.Format.FMT_YVU420SP)
+
+# rtmp://192.168.0.30:1935/live/stream
+host = '192.168.0.30'
+port = 1935
+app = 'live'
+stream = 'stream'
+bitrate = 1000_000
+r = rtmp.Rtmp(host, port, app, stream, bitrate)
+r.bind_camera(cam)
+r.start()
+
+while True:
+    time.sleep(1)
+
+

步骤:

+
    +
  1. 导入camera, time, rtmp和image模块

    + +
    from maix import camera, time, rtmp, image
    +
    +
  2. +
  3. 初始化摄像头

    + +
    cam = camera.Camera(640, 480, image.Format.FMT_YVU420SP) # 初始化摄像头,输出分辨率640x480 NV21格式
    +
    +
      +
    • 注意RTMP模块目前只支持NV21格式, 因此摄像头需要配置为NV21格式输出
    • +
    +
  4. +
  5. 初始化并启动Rtmp对象

    + +
    r = rtmp.Rtmp(host, port, app, stream, bitrate)
    +r.bind_camera(cam)
    +r.start()
    +
    +
      +
    • r = rtmp.Rtmp(host, port, app, stream, bitrate)用来创建一个Rtmp对象,其中host指rtmp服务器的ip地址或者域名,app指rtmp服务器开放的应用名,stream指rtmp流的名称,也可以作为本次推流的密钥
    • +
    • r.bind_camera(cam)用来绑定一个Camera对象, 绑定后原Camera对象将不能再使用
    • +
    • r.start()用来启动rtmp推流
    • +
    +
  6. +
  7. 完成

    +
  8. +
+

向Bilibili推流测试

+

启动bilibili直播

+
    +
  1. 点击直播

    +

    +
  2. +
  3. 点击开播设置

    +
  4. +
+

+
    +
  1. 通过我的直播间链接找到直播地址
  2. +
+

+
    +
  1. 往下翻,选择一个分类,再点击开始直播
  2. +
+

+
    +
  1. 执行步骤4后,可以看到
  2. +
+

+
    +
  • 直播服务器的地址为:rtmp://live-push.bilivideo.com/live-bvc
  • +
  • 串流密钥为:?streamname=live_xxxx&key=1fbfxxxxxxxxxxxxxffe0&schedule=rtmp&pflag=1
  • +
+

组合起来的rtmp推流地址就是:rtmp://live-push.bilivideo.com/live-bvc/?streamname=live_xxxx&key=1fbfxxxxxxxxxxxxxffe0&schedule=rtmp&pflag=1

+

运行RTMP客户端

+ +
from maix import camera, time, rtmp, image
+
+cam = camera.Camera(640, 480, image.Format.FMT_YVU420SP)
+
+# rtmp://live-push.bilivideo.com/live-bvc/?streamname=live_xxxx&key=1fbfxxxxxxxxxxxxxffe0&schedule=rtmp&pflag=1
+host = 'live-push.bilivideo.com'
+port = 1935
+app = 'live-bvc'
+stream = '?streamname=live_xxxx&key=1fbfxxxxxxxxxxxxxffe0&schedule=rtmp&pflag=1'
+bitrate = 1000_000
+r = rtmp.Rtmp(host, port, app, stream, bitrate)
+r.bind_camera(cam)
+r.start()
+
+while True:
+    time.sleep(1)
+
+

上面拿到bilibili的推流地址为rtmp://live-push.bilivideo.com/live-bvc/?streamname=live_xxxx&key=1fbfxxxxxxxxxxxxxffe0&schedule=rtmp&pflag=1

+

可以拆出

+
    +
  1. 服务器地址为live-push.bilivideo.com
  2. +
  3. 端口号为1935,即没有端口号则默认为1935
  4. +
  5. 应用名为live-bvc
  6. +
  7. 流名称为?streamname=live_xxxx&key=1fbfxxxxxxxxxxxxxffe0&schedule=rtmp&pflag=1
  8. +
+

运行代码,就能在直播间看到maixcam的画面了,如果发现直播没有显示,可以尝试先关闭直播间,再重新打开直播间,再运行代码。

+

动手试试吧~

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/video/rtsp_streaming.html b/maixpy/doc/zh/video/rtsp_streaming.html new file mode 100644 index 00000000..b391f333 --- /dev/null +++ b/maixpy/doc/zh/video/rtsp_streaming.html @@ -0,0 +1,467 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 视频流 RTSP 推流 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 视频流 RTSP 推流

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-05-201.0.0lxowalle + + 初版文档 + +
+
+
+ +
+
+ +

简介

+

本文档提供通过RTSP推流摄像头画面的方法

+

使用方法

+ +
from maix import time, rtsp, camera, image
+
+cam = camera.Camera(2560, 1440, image.Format.FMT_YVU420SP)
+server = rtsp.Rtsp()
+server.bind_camera(cam)
+server.start()
+
+print(server.get_url())
+
+while True:
+    time.sleep(1)
+
+

步骤:

+
    +
  1. 导入time、rtsp、camera和image模块

    + +
    from maix import time, rtsp, camera, image
    +
    +
  2. +
  3. 初始化摄像头

    + +
    cam = camera.Camera(2560, 1440, image.Format.FMT_YVU420SP) # 初始化摄像头,输出分辨率2560x1440 NV21格式
    +
    +
      +
    • 注意RTSP模块目前只支持NV21格式, 因此摄像头需要配置为NV21格式输出
    • +
    +
  4. +
  5. 初始化并启动Rtsp对象

    + +
    server = rtsp.Rtsp()
    +server.bind_camera(cam)
    +server.start()
    +
    +
      +
    • server = rtsp.Rtsp()用来创建一个Rtsp对象
    • +
    • server.bind_camera(cam)用来绑定一个Camera对象, 绑定后原Camera对象将不能再使用
    • +
    • server.start()用来启动rtsp推流
    • +
    +
  6. +
  7. 打印当前RTSP流的URL

    + +
    print(server.get_url())
    +
    +
      +
    • server.get_url()用来获取RTSP播放地址
    • +
    +
  8. +
  9. 完成,运行上须代码后, 你可以通过VLC软件播放视频流, 已测试的VLC版本是3.0.20. 默认播放地址为rtsp://设备的ip:8554/live

    +
  10. +
+

OSD

+

通过OSD来实现画线与画框

+

TODO

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/ai.html b/maixpy/doc/zh/vision/ai.html new file mode 100644 index 00000000..00aced74 --- /dev/null +++ b/maixpy/doc/zh/vision/ai.html @@ -0,0 +1,418 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy AI 视觉基本知识 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy AI 视觉基本知识

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-04-031.0.0neucrack + + 初版文档 + +
+
+
+ +
+
+ +

简介

+

如果没有 AI 基础,在学习 AI 前可以先看什么是人工智能(AI)和机器学习 了解一下 AI 的基本概念。

+

然后我们使用的视觉 AI 一般都是基于深度神经网络学习这个方法,有兴趣可以看看深度神经网络(DNN)基础知识

+

MaixPy 中使用视觉 AI

+

在 MaixPy 中使用视觉 AI 很简单,默认提供了常用的 AI 模型,不需要自己训练模型就可以直接使用,在MaixHub 模型库 中选择maixcam 就可以找到。

+

并且在底层已经封装好的 API,只需要简单的调用就可以实现。

+

如果你想训练自己的模型,也可以先从MaixHub 在线训练 开始,在线平台只需要点点点就能训练出模型,不需要购买昂贵的机器,不需要搭建复杂的开发环境,也不需要写代码,非常适合入门,也适合懒得翻代码的老手。

+

一般训练得到了模型文件,直接传输到设备上,调用 MaixPy 的 API 就可以使用了,具体的调用方法看后文。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/apriltag.html b/maixpy/doc/zh/vision/apriltag.html new file mode 100644 index 00000000..7705f6d7 --- /dev/null +++ b/maixpy/doc/zh/vision/apriltag.html @@ -0,0 +1,532 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 识别 Apriltag 标签 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 识别 Apriltag 标签

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-04-031.0.0lxowalle + + 初版文档 + +
+
+
+ +
+
+ +

阅读本文前,确保已经知晓如何开发MaixCAM,详情请阅读快速开始

+

简介

+

本文介绍如何使用MaixPy来识别Apriltag标签

+

使用 MaixPy 识别Apriltag标签

+

MaixPy的 maix.image.Image中提供了find_apriltags方法,可以可以识别apriltag标签。

+

如何识别Apriltag标签

+

一个简单的示例,实现识别apriltag标签并画框

+ +
from maix import image, camera, display
+
+cam = camera.Camera()
+disp = display.Display()
+
+families = image.ApriltagFamilies.TAG36H11
+x_scale = cam.width() / 160
+y_scale = cam.height() / 120
+
+while 1:
+    img = cam.read()
+
+    new_img = img.resize(160, 120)
+    apriltags = new_img.find_apriltags(families = families)
+    for a in apriltags:
+        corners = a.corners()
+
+        for i in range(4):
+            corners[i][0] = int(corners[i][0] * x_scale)
+            corners[i][1] = int(corners[i][1] * y_scale)
+        x = int(a.x() * x_scale)
+        y = int(a.y() * y_scale)
+        w = int(a.w() * x_scale)
+        h = int(a.h() * y_scale)
+
+        for i in range(4):
+            img.draw_line(corners[i][0], corners[i][1], corners[(i + 1) % 4][0], corners[(i + 1) % 4][1], image.COLOR_RED)
+        img.draw_string(x + w, y, "id: " + str(a.id()), image.COLOR_RED)
+        img.draw_string(x + w, y + 15, "family: " + str(a.family()), image.COLOR_RED)
+
+    disp.show(img)
+
+

步骤:

+
    +
  1. 导入image、camera、display模块

    + +
    from maix import image, camera, display
    +
    +
  2. +
  3. 初始化摄像头和显示

    + +
    cam = camera.Camera()
    +disp = display.Display()
    +
    +
  4. +
  5. 从摄像头获取图片并显示

    + +
    while 1:
    +    img = cam.read()
    +    disp.show(img)
    +
    +
  6. +
  7. 调用find_apriltags方法识别摄像头图片中的apriltag标签

    + +
    new_img = img.resize(160, 120)
    +apriltags = new_img.find_apriltags(families = families)
    +
    +
      +
    • img是通过cam.read()读取到的摄像头图像
    • +
    • img.resize(160, 120)是用来将图像缩放得更小,用更小的图像来让算法计算得更快
    • +
    • new_img.find_apriltags(families = families)用来寻找apriltag标签,并将查询结果保存到apriltags,以供后续处理。其中families用来选择apriltag族,默认为image.ApriltagFamilies.TAG36H11
    • +
    +
  8. +
  9. 处理识别标签的结果并显示到屏幕上

    + +
    for a in apriltags:
    +    # 获取位置信息(并映射坐标到原图)
    +    x = int(a.x() * x_scale)
    +    y = int(a.y() * y_scale)
    +    w = int(a.w() * x_scale)
    +    corners = a.corners()
    +    for i in range(4):
    +        corners[i][0] = int(corners[i][0] * x_scale)
    +        corners[i][1] = int(corners[i][1] * y_scale)
    +
    +    # 显示
    +    for i in range(4):
    +        img.draw_line(corners[i][0], corners[i][1], corners[(i + 1) % 4][0], corners[(i + 1) % 4][1], image.COLOR_RED)
    +        img.draw_string(x + w, y, "id: " + str(a.id()), image.COLOR_RED)
    +        img.draw_string(x + w, y + 15, "family: " + str(a.family()), image.COLOR_RED)
    +        img.draw_string(x + w, y + 30, "rotation : " + str(180 * a.rotation() // 3.1415), image.COLOR_RED)
    +
    +
      +
    • 遍历apriltags的成员,apriltags是通过img.find_apriltags()扫描apriltag标签的结果,如果找不到标签则apriltags的成员为空
    • +
    • x_scaley_scale用来映射坐标,由于new_img是缩放后的图像,计算apriltag的坐标时需要经过映射后才能正常的画在原图img
    • +
    • a.corners()用来获取已扫描到的标签的四个顶点坐标,img.draw_line()利用这四个顶点坐标画出标签的形状
    • +
    • img.draw_string用来显示标签的内容,其中a.x()a.y()用来获取标签左上角坐标x和坐标y,a.id()用来获取标签的id,a.family()用来获取标签族类型,a.rotation()用来获取标签的旋转角度。
    • +
    +
  10. +
+

常用参数说明

+

列举常用参数说明,如果没有找到可以实现应用的参数,则需要考虑是否使用其他算法实现,或者基于目前算法的结果扩展所需的功能

+ + + + + + + + + + + + + + + + + + + + +
参数说明示例
roi设置算法计算的矩形区域,roi=[x, y, w, h],x,y表示矩形区域左上角坐标,w,h表示矩形区域的宽度和高度,默认为整张图片计算坐标为(50,50),宽和高为100的区域
img.find_apriltags(roi=[50, 50, 100, 100])
familiesapriltag标签家族类型扫描TAG36H11家族的标签
img.find_apriltags(families = image.ApriltagFamilies.TAG36H11)
+

本文介绍常用方法,更多 API 请看 API 文档的 image 部分。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/body_key_points.html b/maixpy/doc/zh/vision/body_key_points.html new file mode 100644 index 00000000..8c90831e --- /dev/null +++ b/maixpy/doc/zh/vision/body_key_points.html @@ -0,0 +1,430 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 检测人体关键点姿态检测 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 检测人体关键点姿态检测

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

简介

+

使用 MaixPy 可以轻松检测人体关节的关键点的坐标,用在姿态检测比如坐姿检测,体感游戏输入等。

+

MaixPy 实现了基于 YOLOv8-Pose / YOLO11-Pose 的人体姿态检测,可以检测到人体17个关键点。

+

+

使用

+

使用 MaixPy 的 maix.nn.YOLOv8 或者 maix.nn.YOLO11 类可以轻松实现:

+ +
from maix import camera, display, image, nn, app
+
+detector = nn.YOLOv8(model="/root/models/yolov8n_pose.mud", dual_buff = True)
+# detector = nn.YOLO11(model="/root/models/yolo11n_pose.mud", dual_buff = True)
+
+cam = camera.Camera(detector.input_width(), detector.input_height(), detector.input_format())
+dis = display.Display()
+
+while not app.need_exit():
+    img = cam.read()
+    objs = detector.detect(img, conf_th = 0.5, iou_th = 0.45, keypoint_th = 0.5)
+    for obj in objs:
+        img.draw_rect(obj.x, obj.y, obj.w, obj.h, color = image.COLOR_RED)
+        msg = f'{detector.labels[obj.class_id]}: {obj.score:.2f}'
+        img.draw_string(obj.x, obj.y, msg, color = image.COLOR_RED)
+        detector.draw_pose(img, obj.points, 8 if detector.input_width() > 480 else 4, image.COLOR_RED)
+    dis.show(img)
+
+

另外代码也在MaixPy/examples/vision目录下可以找到。

+

可以看到因为用了YOLOv8-Pose 所以这里直接用了YOLOv8这个类,和YOLOv8物体检测模型只是模型文件不同, YOLO11同理,然后就是detect函数返回的结果多了points值,是一个int类型的list列表,一共17个点,按次序依次排列,比如第一个值是鼻子的 x 坐标, 第二个值是鼻子的 y 坐标,依次为:

+ +
1. 鼻子(Nose)
+2. 左眼(Left Eye)
+3. 右眼(Right Eye)
+4. 左耳(Left Ear)
+5. 右耳(Right Ear)
+6. 左肩(Left Shoulder)
+7. 右肩(Right Shoulder)
+8. 左肘(Left Elbow)
+9. 右肘(Right Elbow)
+10. 左手腕(Left Wrist)
+11. 右手腕(Right Wrist)
+12. 左髋(Left Hip)
+13. 右髋(Right Hip)
+14. 左膝(Left Knee)
+15. 右膝(Right Knee)
+16. 左脚踝(Left Ankle)
+17. 右脚踝(Right Ankle)
+
+

如果某些部位被遮挡,那么值为-1

+

更多输入分辨率模型

+

默认的模型是输入是320x224分辨率,如果你希望使用更大分辨率的模型,可以到 MaixHub 模型库下载并传输到设备使用:

+ +

分辨率越大理论上精度越高但是运行速度更低,根据你的使用场景选择,另外如果提供的分辨率不满足你的要求你也可以自己到 YOLOv8-Pose / YOLO11-Pose 使用摸新训练源码导出自己的onnx模型,然后转换为 MaixCAM 支持的模型(方法见后面的文章)。

+

dual_buff 双缓冲区加速

+

你可能注意到这里模型初始化使用了dual_buff(默认值就是 True),使能 dual_buff 参数可以加快运行效率,提高帧率,具体原理和使用注意点见 dual_buff 介绍

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/camera.html b/maixpy/doc/zh/vision/camera.html new file mode 100644 index 00000000..657b4a7c --- /dev/null +++ b/maixpy/doc/zh/vision/camera.html @@ -0,0 +1,570 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 摄像头使用 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 摄像头使用

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-10-241.1.0neucrack + + 增加 USB 摄像头支持说明 + +
2024-08-211.0.1YWJ + + 修正文档部分bug,增加部分内容 + +
2024-04-031.0.0neucrack + + 初版文档 + +
+
+
+ +
+
+ +

简介

+

对于 MaixCAM 默认搭载了 GC4653 摄像头,或者可选的 OS04A10 摄像头或者全局快门摄像头,甚至是 HDMI 转 MIPI 模块,都可以直接用简单的 API 调用。

+

API 文档

+

本文介绍常用方法,更多 API 使用参考 maix.camera 模块的文档。

+

摄像头切换

+

目前支持的摄像头:

+
    +
  • GC4653:M12 通用镜头, 1/3" 传感器,画质清晰, 4M 像素。
  • +
  • OS04A10:M12 通用镜头,1/1.8" 大底传感器,画质超清, 4M像素。
  • +
  • OV2685:不支持镜头更换,1/5"传感器,2M 像素,画质最差,成本最低,一般不建议使用。
  • +
  • SC035HGS:黑白全局快门摄像头,30W黑白像素,适合拍摄高速物体。
  • +
+

系统会自动切换,只接硬件换上即可使用。

+

获取摄像头的图像信息

+

使用 MaixPy 轻松获取:

+ +
from maix import camera
+
+cam = camera.Camera(640, 480)
+
+while 1:
+    img = cam.read()
+    print(img)
+
+

这里我们从maix模块导入camera模块,然后创建一个Camera对象,指定图像的宽度和高度。然后在一个循环中不断读取图像, 默认出的图为RGB格式,如果需要BGR格式,其它格式请看 API 文档。

+

你还可以获取灰度图像

+ +
from maix import camera, image
+cam = camera.Camera(640, 480, image.Format.FMT_GRAYSCALE)	# 设置输出灰度图像
+
+

还可以获取NV21图像

+ +
from maix import camera, image
+cam = camera.Camera(640, 480, image.Format.FMT_YVU420SP)	# 设置输出NV21图像
+
+

注意:如果设置了很高的分辨率(例如2560x1440)时需要关闭MaixVision的在线浏览功能,否则可能会因为内存不足导致代码运行异常。

+

设置摄像头的帧率

+

目前摄像头支持30fps60fps80fps三种配置,由创建Camera对象时传入的widthheightfps参数来选择帧率,目前60/80fps下最大支持分辨率1280x72030fps下最大支持分辨率2560x1440

+

设置帧率为30帧

+ +
from maix import camera
+cam = camera.Camera(640, 480, fps=30)			# 设置帧率为30帧
+# or
+cam = camera.Camera(1920, 1280)             # 分辨率高于1280x720时帧率会设置为30帧
+
+

设置帧率为60帧

+ +
from maix import camera
+cam = camera.Camera(640, 480, fps=60)	        # 设置帧率为60帧
+# or
+cam = camera.Camera(640, 480)                  # 分辨率低于或等于1280x720时帧率会设置为80fps
+
+

设置帧率为80帧

+ +
from maix import camera
+cam = camera.Camera(640, 480, fps=80)	        # 设置帧率为80帧
+
+

注意:

+
    +
  1. 如果Camera传入的尺寸大于1280x720,例如写成camera.Camera(1920, 1080, fps=60),此时fps参数将会失效,帧率将保持在30fps
  2. +
  3. 60/80fps30fps的画面相比会有几个像素的偏移,在对视角有严格要求的应用下需要注意修正偏移。
  4. +
  5. 需要注意由于60/80fps30fps共用了isp配置,在某些环境下两种帧率下的画面画质会存在一些偏差。
  6. +
  7. 摄像头需要看体制,有些体制无法设置到80fps,会出现画面有奇怪的纹路,请换会正常的 60fps使用。
  8. +
+

图像矫正

+

对于画面存在鱼眼等畸变的情况,可以使用Image对象下的lens_corr函数对图片进行畸变矫正。一般情况只需要调大和调小strength的值来将画面调整到合适效果即可。

+ +
from maix import camera, display,app,time
+
+cam = camera.Camera(320, 240)
+disp = display.Display()
+while not app.need_exit():
+    t = time.ticks_ms()
+    img = cam.read() 
+    img = img.lens_corr(strength=1.5)	# 调整strength的值直到画面不再畸变
+    disp.show(img)
+
+
+

注意由于是软件矫正,需要耗费一定时间,另外也可以只接用无畸变镜头(询问商家)从硬件层面解决。

+

跳过 开头的帧

+

摄像头初始化的一小段时间,可能图像采集还没稳定出现奇怪的画面,可以通过skip_frames函数跳过开头的几帧:

+ +
cam = camera.Camera(640, 480)
+cam.skip_frames(30)           # 跳过开头的30帧
+
+

显示摄像头获取的图像

+

MaixPy 提供了display模块,可以方便的显示图像:

+ +
from maix import camera, display
+
+cam = camera.Camera(640, 480)
+disp = display.Display()
+
+while 1:
+    img = cam.read()
+    disp.show(img)
+
+

设置摄像头参数

+

设置曝光时间

+

注意设置曝光时间后,摄像头会切换到手动曝光模式,如果要切换回自动曝光模式需运行cam.exp_mode(0)

+ +
cam = camera.Camera()
+cam.exposure(1000)
+
+

设置增益

+

注意设置增益后,摄像头会切换到手动曝光模式,如果要切换回自动曝光模式需运行cam.exp_mode(0)。自定义的增益值只能在手动曝光模式下生效。

+ +
cam = camera.Camera()
+cam.gain(100)
+
+

设置白平衡

+ +
cam = camera.Camera()
+cam.awb_mode(1)			# 0,开启白平衡;1,关闭白平衡
+
+

设置亮度、对比度和饱和度

+ +
cam = camera.Camera()
+cam.luma(50)		    # 设置亮度,范围[0, 100]
+cam.constrast(50)		# 设置对比度,范围[0, 100]
+cam.saturation(50)		# 设置饱和度,范围[0, 100]
+
+

更改图片长宽

+ +
cam = camera.Camera(width=640, height=480)
+
+

+ +
cam = camera.Camera()
+cam.set_resolution(width=640, height=480)
+
+

使用 USB 摄像头

+

除了使用开发板自带的 MIPI 接口摄像头,你也可以使用 USB 外接 USB 摄像头。
+方法:

+
    +
  • 先在开发板设置里面USB设置中选择USB 模式HOST模式。如果没有屏幕,可以用examples/tools/maixcam_switch_usb_mode.py脚本进行设置。
  • +
  • maix.camera 模块目前(2024.10.24) 还不支持 USB 摄像头,不过你可以参考 OpenCV 使用 USB 摄像头
  • +
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/classify.html b/maixpy/doc/zh/vision/classify.html new file mode 100644 index 00000000..661e7c9d --- /dev/null +++ b/maixpy/doc/zh/vision/classify.html @@ -0,0 +1,407 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 使用 AI 模型进行物体分类 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 使用 AI 模型进行物体分类

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

物体分类概念

+

比如眼前有两张图片,一张图里面是苹果,另一张是飞机,物体分类的任务就是把两张图分别依次输入给 AI 模型,模型会依次输出两个结果,一个是苹果,一个是飞机。

+

MaixPy 中使用物体分类

+

MaixPy 默认提供了 imagenet 数据集训练得到的 1000分类模型,可以直接使用:

+ +
from maix import camera, display, image, nn
+
+classifier = nn.Classifier(model="/root/models/mobilenetv2.mud", dual_buff = True)
+cam = camera.Camera(classifier.input_width(), classifier.input_height(), classifier.input_format())
+dis = display.Display()
+
+while 1:
+    img = cam.read()
+    res = classifier.classify(img)
+    max_idx, max_prob = res[0]
+    msg = f"{max_prob:5.2f}: {classifier.labels[max_idx]}"
+    img.draw_string(10, 10, msg, image.COLOR_RED)
+    dis.show(img)
+
+

效果视频:

+ +

这里使用了摄像头拍摄图像,然后传给 classifier进行识别,得出结果后,将结果显示在屏幕上。

+

更多 API 使用参考 maix.nn 模块的文档。

+

dual_buff 双缓冲区加速

+

你可能注意到这里模型初始化使用了dual_buff(默认值就是 True),使能 dual_buff 参数可以加快运行效率,提高帧率,具体原理和使用注意点见 dual_buff 介绍

+

使用 MaixHub 训练自己的分类模型

+

如果你想训练特定图像的分类模型,请到MaixHub 学习并训练分类模型,创建项目时选择分类模型,然后上传图片训练即可,无需搭建训练环境也无需花钱购买昂贵的GPU,快速一键训练。

+

离线训练自己的分类模型

+

离线训练需要自己搭建环境,请自行搜索 PyTorch 分类模型训练 Mobilenet等相关关键字进行参考。
+训练好模型后导出 onnx 格式的模型,然后参考 MaixCAM 模型转换文档 转换为 MaixCAM 支持的模型格式,最后使用上面的nn.Classifier类加载模型即可。

+

这里分类模型可以是 mobilenet 也可以是 其它模型比如 Resnet 等,模型转换时最好提取 softmax前一层作为最后的输出层,因为classifier.classify(img, softmax=True) 识别函数的softmax参数默认为True,即会对结果计算一次softmax,所以模型就不用softmax这一层了,当然如果模型包含了softmax层,也可以指定不再执行一遍softmaxclassifier.classify(img, softmax=False)

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/custmize_model.html b/maixpy/doc/zh/vision/custmize_model.html new file mode 100644 index 00000000..6f2d8fa9 --- /dev/null +++ b/maixpy/doc/zh/vision/custmize_model.html @@ -0,0 +1,369 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

请看 MaixCAM 模型转换,以及在左边目录找到你要转换的模型文档,比如自定义 yolov5 模型

+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/customize_model_yolov5.html b/maixpy/doc/zh/vision/customize_model_yolov5.html new file mode 100644 index 00000000..278cc59f --- /dev/null +++ b/maixpy/doc/zh/vision/customize_model_yolov5.html @@ -0,0 +1,466 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 为 MaixCAM MaixPy 离线训练 YOLOv5 模型,自定义检测物体 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

为 MaixCAM MaixPy 离线训练 YOLOv5 模型,自定义检测物体

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-6-20v1.0neucrack + + 编写文档 + +
+
+
+ +
+
+ +

简介

+

默认官方提供了 80 种物体检测,如果不满足你的需求,可以自己训练检测的物体,两种方式:

+
    +
  • 使用 MaixHub 在线训练,方便快捷,无需购买服务器也无需搭建环境,点几下鼠标就完成。
  • +
  • 在自己的电脑或者服务器搭建训练环境训练。
  • +
+

前者好处是简单快速,后者是使用自己电脑,训练图片数量不受限制,但是后者难度会大非常多。

+

注意: 本文讲了如何自定义训练,但是有一些基础知识默认你已经拥有,如果没有请自行学习:

+
    +
  • 本文不会讲解如何安装训练环境,请自行搜索安装(Pytorch 环境安装)测试。
  • +
  • 本文不会讲解机器学习的基本概念、linux相关基础使用知识。
  • +
+

如果你觉得本文哪里需要改进,欢迎点击右上角编辑本文贡献并提交 文档 PR。

+

流程和本文目标

+

要想我们的模型能在 MaixPy (MaixCAM)上使用,需要经历以下过程:

+
    +
  • 搭建训练环境,本文略过,请自行搜索 pytorch 训练环境搭建。
  • +
  • 拉取 yolov5 源码到本地。
  • +
  • 准备数据集,并做成 yolov5 项目需要的格式。
  • +
  • 训练模型,得到一个 onnx 模型文件,也是本文的最终输出文件。
  • +
  • onnx模型转换成 MaixPy 支持的 MUD 文件,这个过程在MaixCAM 模型转换 一文种有详细介绍。
  • +
  • 使用 MaixPy 加载模型运行。
  • +
+

参考文章

+

因为是比较通用的操作过程,本文只给一个流程介绍,具体细节可以自行看 YOLOv5 官方代码和文档(推荐),以及搜索其训练教程,最终导出 onnx 文件即可。

+

这里有 MaixHub 的社区的几篇文章:

+ +

如果你有觉得讲得不错的文章欢迎修改本文并提交 PR。

+

YOLOv5 导出 ONNX 模型文件

+

YOLOv5 提供了导出选项,直接在yolov5目录下执行

+ +
python export.py --weights ../yolov5s.pt --include onnx --img 224 320
+
+

这里加载 pt 参数文件,转换成 onnx, 同时指定分辨率,注意这里 高在前,宽在后。
+模型训练的时候用的640x640,我们重新指定了分辨率方便提升运行速度,这里使用320x224的原因是和 MaixCAM 的屏幕比例比较相近方便显示,具体可以根据你的需求设置就好了。

+

MaixCAM MUD 文件

+

将 onnx 转换为 mud 格式的模型文件时,参照 MaixCAM 模型转换 即可,最终会得到一个mud文件和cvimodel文件,其中 mud 文件内容:

+ +
[basic]
+type = cvimodel
+model = yolov8n.cvimodel
+
+[extra]
+model_type = yolov8
+input_type = rgb
+mean = 0, 0, 0
+scale = 0.00392156862745098, 0.00392156862745098, 0.00392156862745098
+labels = person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign, parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket, bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut, cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair drier, toothbrush
+
+

根据你训练的内容替换参数即可,比如你训练检测0~9数字,那么只需要替换labels=0,1,2,3,4,5,6,7,8,9 即可, 然后运行模型时将两个文件放在同一个目录下加载mud文件即可。

+

上传分享到 MaixHub

+

MaixHub 模型库 上传并分享你的模型,可以多提供几个分辨率供大家选择。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/customize_model_yolov8.html b/maixpy/doc/zh/vision/customize_model_yolov8.html new file mode 100644 index 00000000..abc677fe --- /dev/null +++ b/maixpy/doc/zh/vision/customize_model_yolov8.html @@ -0,0 +1,533 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 为 MaixCAM MaixPy 离线训练 YOLO11/YOLOv8 模型,自定义检测物体、关键点检测 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

为 MaixCAM MaixPy 离线训练 YOLO11/YOLOv8 模型,自定义检测物体、关键点检测

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-10-10v2.0neucrack + + 增加 YOLO11 支持 + +
2024-06-21v1.0neucrack + + 编写文档 + +
+
+
+ +
+
+ +

简介

+

默认官方提供了 80 种物体检测,如果不满足你的需求,可以自己训练检测的物体,可以在自己的电脑或者服务器搭建训练环境训练。

+

YOLOv8 / YOLO11 不光支持检测物体,还有 yolov8-pose / YOLO11-pose 支持关键点检测,出了官方的人体关键点,你还可以制作你自己的关键点数据集来训练检测指定的物体和关键点

+

因为 YOLOv8 和 YOLO11 主要是修改了内部网络,预处理和后处理都是一样的,所以 YOLOv8 和 YOLO11 的训练转换步骤相同,只是输出节点的名称不一样。

+

注意: 本文讲了如何自定义训练,但是有一些基础知识默认你已经拥有,如果没有请自行学习:

+
    +
  • 本文不会讲解如何安装训练环境,请自行搜索安装(Pytorch 环境安装)测试。
  • +
  • 本文不会讲解机器学习的基本概念、linux相关基础使用知识。
  • +
+

如果你觉得本文哪里需要改进,欢迎点击右上角编辑本文贡献并提交 文档 PR。

+

流程和本文目标

+

要想我们的模型能在 MaixPy (MaixCAM)上使用,需要经历以下过程:

+
    +
  • 搭建训练环境,本文略过,请自行搜索 pytorch 训练环境搭建。
  • +
  • 拉取 YOLO11/YOLOv8 源码到本地。
  • +
  • 准备数据集,并做成 YOLO11 / YOLOv8 项目需要的格式。
  • +
  • 训练模型,得到一个 onnx 模型文件,也是本文的最终输出文件。
  • +
  • onnx模型转换成 MaixPy 支持的 MUD 文件,这个过程在MaixCAM 模型转换 一文种有详细介绍。
  • +
  • 使用 MaixPy 加载模型运行。
  • +
+

参考文章

+

因为是比较通用的操作过程,本文只给一个流程介绍,具体细节可以自行看 YOLO11 / YOLOv8 官方代码和文档(推荐),以及搜索其训练教程,最终导出 onnx 文件即可。

+

如果你有觉得讲得不错的文章欢迎修改本文并提交 PR。

+

YOLO11 / YOLOv8 导出 onnx 模型

+

ultralytics 目录下创建一个export_onnx.py 文件

+ +
from ultralytics import YOLO
+import sys
+
+print(sys.path)
+net_name = sys.argv[1] # yolov8n.pt yolov8n-pose.pt # https://docs.ultralytics.com/models/yolov8/#supported-tasks-and-modes
+input_width = int(sys.argv[2])
+input_height = int(sys.argv[3])
+
+# Load a model
+model = YOLO(net_name)  # load an official model
+# model = YOLO("path/to/best.pt")  # load a custom model
+
+# Predict with the model
+results = model("https://ultralytics.com/images/bus.jpg")  # predict on an image
+path = model.export(format="onnx", imgsz=[input_height, input_width])  # export the model to ONNX format
+print(path)
+
+
+

然后执行python export_onnx.py yolov8n.pt 320 224 就能导出 onnx 模型了,这里重新指定了输入分辨率,模型训练的时候用的640x640,我们重新指定了分辨率方便提升运行速度,这里使用320x224的原因是和 MaixCAM 的屏幕比例比较相近方便显示,具体可以根据你的需求设置就好了。

+

转换为 MaixCAM 支持的模型以及 mud 文件

+

MaixPy/MaixCDK 目前支持了 YOLOv8 / YOLO11 检测 以及 YOLOv8-pose / YOLO11-pose 关键点检测 以及 YOLOv8-seg / YOLO11-seg 三种模型(2024.10.10)。

+

按照MaixCAM 模型转换 进行模型转换。

+

注意模型输出节点的选择:

+
    +
  • 检测模型:
      +
    • YOLOv8 提取 onnx 的 /model.22/dfl/conv/Conv_output_0,/model.22/Sigmoid_output_0 这两个输出。
    • +
    • YOLO11 提取/model.23/dfl/conv/Conv_output_0,/model.23/Sigmoid_output_0输出。
    • +
    +
  • +
  • 关键点检测:
      +
    • YOLOv8-pose 提取/model.22/dfl/conv/Conv_output_0,/model.22/Sigmoid_output_0,/model.22/Concat_output_0这三个输出。
    • +
    • YOLO11-pose 提取/model.23/dfl/conv/Conv_output_0,/model.23/Sigmoid_output_0,/model.23/Concat_output_0这三个输出。
    • +
    +
  • +
  • 图像分割:
      +
    • YOLOv8-seg 提取 /model.22/dfl/conv/Conv_output_0,/model.22/Sigmoid_output_0,/model.22/Concat_output_0,output1
    • +
    • YOLO11-seg 提取 /model.23/dfl/conv/Conv_output_0,/model.23/Sigmoid_output_0,/model.23/Concat_output_0,output1四个输出。
    • +
    +
  • +
+

+

对于物体检测,mud 文件为(YOLO11 model_type 改为 yolo11)

+ +
[basic]
+type = cvimodel
+model = yolov8n.cvimodel
+
+[extra]
+model_type = yolov8
+input_type = rgb
+mean = 0, 0, 0
+scale = 0.00392156862745098, 0.00392156862745098, 0.00392156862745098
+labels = person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign, parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket, bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut, cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair drier, toothbrush
+
+

根据你训练的对象替换labels即可。

+

对于关键点检测(yolov8-pose), mud 文件为(YOLO11 model_type 改为 yolo11):

+ +
[basic]
+type = cvimodel
+model = yolov8n_pose.cvimodel
+
+[extra]
+model_type = yolov8
+type = pose
+input_type = rgb
+mean = 0, 0, 0
+scale = 0.00392156862745098, 0.00392156862745098, 0.00392156862745098
+labels = person
+
+

官方默认的时人体姿态关键点检测,所以labels只有一个 person,根据你检测的物体替换即可。

+

对于图像分割(yolov8-seg), mud 文件(YOLO11 model_type 改为 yolo11):

+ +
[basic]
+type = cvimodel
+model = yolo11n-seg_320x224_int8.cvimodel
+
+[extra]
+model_type = yolov8
+input_type = rgb
+type = seg
+mean = 0, 0, 0
+scale = 0.00392156862745098, 0.00392156862745098, 0.00392156862745098
+labels = person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign, parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket, bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut, cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair drier, toothbrush
+
+

上传分享到 MaixHub

+

MaixHub 模型库 上传并分享你的模型,可以多提供几个分辨率供大家选择。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/display.html b/maixpy/doc/zh/vision/display.html new file mode 100644 index 00000000..e1451111 --- /dev/null +++ b/maixpy/doc/zh/vision/display.html @@ -0,0 +1,513 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 屏幕使用 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 屏幕使用

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-03-311.0.0neucrack + + 初版文档 + +
+
+
+ +
+
+ +

简介

+

MaixPy 提供了display模块,可以将图像显示到屏幕上,同时,也可以将图像发送到 MaixVision 显示,方便调试和开发。

+

API 文档

+

本文介绍常用方法,更多 API 请看 API 文档的 display 部分。

+

使用屏幕

+
    +
  • 导入display模块:
  • +
+ +
from maix import display
+
+
    +
  • 创建一个Display对象:
  • +
+ +
disp = display.Display()
+
+
    +
  • 显示图像:
  • +
+ +
disp.show(img)
+
+

这里img对象是maix.image.Image对象,可以通过camera模块的read方法获取,也可以通过image模块的load方法加载文件系统中的图像,也可以通过image模块的Image类创建一个空白图像。

+

比如:

+ +
from maix import image, display
+
+disp = display.Display()
+img = image.load("/root/dog.jpg")
+disp.show(img)
+
+

这里需要先把dog.jpg文件传到设备的/root目录下。

+

显示文字:

+ +
from maix import image, display
+
+disp = display.Display()
+img = image.Image(320, 240)
+img.draw_rect(0, 0, disp.width(), disp.height(), color=image.Color.from_rgb(255, 0, 0), thickness=-1)
+img.draw_rect(10, 10, 100, 100, color=image.Color.from_rgb(255, 0, 0))
+img.draw_string(10, 10, "Hello MaixPy!", color=image.Color.from_rgb(255, 255, 255))
+disp.show(img)
+
+

从摄像头读取图像并显示:

+ +
from maix import camera, display, app
+
+disp = display.Display()
+cam = camera.Camera(320, 240)
+while not app.need_exit():
+    img = cam.read()
+    disp.show(img)
+
+
+

这里用了一个while not app.need_exit(): 是方便程序在其它地方调用app.set_exit_flag()方法后退出循环。

+
+

调整背光亮度

+

在系统的设置应用中可以手动调整背光亮度,如果你想在程序中调整背光亮度,可以使用set_backlight方法,参数就是亮度百分比,取值范围是 0-100:

+ +
disp.set_backlight(50)
+
+

注意,程序退出回到应用选择界面后会自动恢复到系统设置的背光亮度。

+

显示到 MaixVision

+

在使用 MaixVision 运行代码时,能够将图像显示到 MaixVision 上,方便调试和开发。

+

在调用show方法时,会自动压缩图像并发送到 MaixVision 显示。

+

当然,如果你没有屏幕,或者为了节省内存不想初始化屏幕,也可以直接调用maix.dispaly对象的send_to_maixvision方法发送图像到 MaixVision 显示。

+ +
from maix import image,display
+
+img = image.Image(320, 240)
+disp = display.Display()
+
+img.draw_rect(0, 0, img.width(), img.height(), color=image.Color.from_rgb(255, 0, 0), thickness=-1)
+img.draw_rect(10, 10, 100, 100, color=image.Color.from_rgb(255, 0, 0))
+img.draw_string(10, 10, "Hello MaixPy!", color=image.Color.from_rgb(255, 255, 255))
+display.send_to_maixvision(img)
+
+

更换其它型号屏幕

+

如果想换不同尺寸的屏幕,可以到商城咨询购买。
+对于 MaixCAM,目前支持 4 款屏幕:

+
    +
  • 2.3寸 552x368 分辨率电容触摸屏: MaixCAM 带的屏幕。
  • +
  • 2.4寸 640x480 分辨率电容触摸屏: MaixCAM-Pro 带的屏幕。
  • +
  • 5寸 854x480 分辨率无触摸屏: 注意无触摸,类似手机屏幕大小。
  • +
  • 7寸 1280x800 分辨率电容触摸屏:7寸大屏,适合更多需要固定屏幕观看场景。
  • +
+

不同屏幕的刷新图像时间差别在1~5毫秒,差别不是很大,主要的区别在于图像分辨率大了图像处理时间的差别。

+

更换屏幕需要同时修改配置文件,否则可能刷新时序不同会导致烧屏(屏幕留下显示过的影子),所以需要注意,最好严格按照下面的步骤操作,如果出现了烧屏的问题也不要紧张,断电放置一晚上一般会恢复。

+
    +
  • 按照烧录系统的文档烧录系统,烧录完成后会有 U 盘出现。
  • +
  • 打开 U 盘内容,看到有一个 uEnv.txt 文件。
  • +
  • 编辑uEnv.txt文件,修改pannel键值,取值如下:
      +
    • 2.3寸(MaixCAM 自带屏幕):st7701_hd228001c31
    • +
    • 2.4寸(MaixCAM-Pro 自带屏幕): st7701_lct024bsi20
    • +
    • 5寸:st7701_dxq5d0019_V0 早期(2023年)测试屏幕st7701_dxq5d0019b480854
    • +
    • 7寸:mtd700920b,早期(2023年)测试屏幕用 zct2133v1
    • +
    +
  • +
  • 保存uEnv.txt,并且点击弹出 U 盘,不要只接断电,否则可能文件丢失。
  • +
  • 按下板子的reset按键,或者重新上电启动。
  • +
+

以上的方式最保险,保证上电前已经设置好了屏幕型号,如果你已经烧录好系统了,也可以修改系统的/boot/uEnv.txt文件然后重启。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/dual_buff.html b/maixpy/doc/zh/vision/dual_buff.html new file mode 100644 index 00000000..f1453e53 --- /dev/null +++ b/maixpy/doc/zh/vision/dual_buff.html @@ -0,0 +1,400 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy MaixCAM 模型运行 dual_buff 模式介绍 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy MaixCAM 模型运行 dual_buff 模式介绍

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

简介

+

细心的你可能注意到模型运行相关的的代码初始化时有一个参数dual_buff=True
+比如 YOLOv5

+ +
from maix import camera, display, image, nn, app
+
+detector = nn.YOLOv5(model="/root/models/yolov5s.mud", dual_buff=True)
+# detector = nn.YOLOv8(model="/root/models/yolov8n.mud", dual_buff=True)
+
+cam = camera.Camera(detector.input_width(), detector.input_height(), detector.input_format())
+dis = display.Display()
+
+while not app.need_exit():
+    img = cam.read()
+    objs = detector.detect(img, conf_th = 0.5, iou_th = 0.45)
+    for obj in objs:
+        img.draw_rect(obj.x, obj.y, obj.w, obj.h, color = image.COLOR_RED)
+        msg = f'{detector.labels[obj.class_id]}: {obj.score:.2f}'
+        img.draw_string(obj.x, obj.y, msg, color = image.COLOR_RED)
+    dis.show(img)
+
+

一般来说这个参数默认就是True,除非手动设置dual_buff=False才会关闭 dual_buff功能。

+

使能这个功能后运行的效率会提升,即帧率会提升(以上代码假设摄像头的帧率没有限制的情况下,在 MaixCAM 上会减少循环一半的时间即帧率翻倍)。
+但是也有缺点,detect函数返回的结果是上一次调用detect函数的图的结果,所以结果和输入会有一帧的时间差,如果你希望detect出来的结果就是输入的img的结果而不是上一帧的结果,请禁用这个功能;另外由于准备了双份缓冲区,也会加大内存的使用,如果使用时发现内存不足,也需要禁用这个功能。

+

原理

+

模型检测物体分为了几步:

+
    +
  • 获取图像
  • +
  • 图像预处理
  • +
  • 模型运行
  • +
  • 结果后处理
  • +
+

其中只有 模型运行这一步是硬件NPU 上运行的,其它步骤都在 CPU 运行。

+

如果dual_buff设置为False,在detect的时候,CPU 先预处理(此时 NPU 空闲), 然后给 NPU 运算(此时 CPU 空闲等待 NPU 运算结束),然后 CPU 后处理(NPU 空闲), 整过过程是线性的,比较简单。
+但是这里发现了问题,就是 CPU 和 NPU 两者总有一个空闲着的,当加了dual_buff=True, CPU 预处理后交给 NPU 运算,此时 CPU 不再等待 NPU 出结果,二是直接退出detect函数进行下一次摄像头读取和预处理,等 NPU 运算完成后, CPU 已经准备好了下一次的数据直接交给 NPU 继续运算,不给 NPU 喘息的机会,这样就充分利用了 CPU 和 NPU 高效地同时进行运算。

+

不过这里也需要注意,摄像头帧率如果不够高也会限制整体帧率。

+ + +
+
+
+ + +
+
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/face_detection.html b/maixpy/doc/zh/vision/face_detection.html new file mode 100644 index 00000000..3caa4d4e --- /dev/null +++ b/maixpy/doc/zh/vision/face_detection.html @@ -0,0 +1,432 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 人脸检测和关键点检测 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 人脸检测和关键点检测

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

简介

+

人脸检测在很多地方都能用到,比如是为人脸识别提供人脸检测这一步骤,或者是人脸跟踪相关的应用等等。

+

这里提供的人脸检测不光可以检测到人脸,还能检测到 5 个关键点,包括两个眼睛,一个鼻子,一张嘴巴的两个嘴角。

+

face detection

+

MaixPy 中使用人脸检测

+

MaixPy 官方提供了三种人脸检测模型,分别来自开源项目 face detector 1MB with landmarkRetinafate 以及 YOLOv8-face

+

这三种模型都可以用,YOLOv8-face 效果比较好但是速度略微慢一些,可以自己实际测试选择使用。

+

使用YOLOv8-face:(需要 MaixPy 版本 >= 4.3.8)

+ +
from maix import camera, display, image, nn, app
+
+detector = nn.YOLOv8(model="/root/models/yolov8n_face.mud", dual_buff = True)
+
+cam = camera.Camera(detector.input_width(), detector.input_height(), detector.input_format())
+dis = display.Display()
+
+while not app.need_exit():
+    img = cam.read()
+    objs = detector.detect(img, conf_th = 0.5, iou_th = 0.45, keypoint_th = 0.5)
+    for obj in objs:
+        img.draw_rect(obj.x, obj.y, obj.w, obj.h, color = image.COLOR_RED)
+        msg = f'{detector.labels[obj.class_id]}: {obj.score:.2f}'
+        img.draw_string(obj.x, obj.y, msg, color = image.COLOR_RED)
+        detector.draw_pose(img, obj.points, 2, image.COLOR_RED)
+    dis.show(img)
+
+

另外两种模型使用方法:
+这里有一行被注释了代码是加载Retinafae模型,根据你下载的模型选择使用哪一行代码

+ +
from maix import camera, display, image, nn, app
+import math
+
+
+detector = nn.Retinaface(model="/root/models/retinaface.mud")
+# detector = nn.FaceDetector(model="/root/models/face_detector.mud")
+
+cam = camera.Camera(detector.input_width(), detector.input_height(), detector.input_format())
+dis = display.Display()
+
+while not app.need_exit():
+    img = cam.read()
+    objs = detector.detect(img, conf_th = 0.4, iou_th = 0.45)
+    for obj in objs:
+        img.draw_rect(obj.x, obj.y, obj.w, obj.h, color = image.COLOR_RED)
+        radius = math.ceil(obj.w / 10)
+        img.draw_keypoints(obj.points, image.COLOR_RED, size = radius if radius < 5 else 4)
+    dis.show(img)
+
+
+

模型下载和其它分辨率模型

+

下载模型,下载的压缩包里面有多个分辨率可以选择,分辨率越高越精准但耗时更长:

+ +

dual_buff 双缓冲区加速

+

你可能注意到这里模型初始化使用了dual_buff(默认值就是 True),使能 dual_buff 参数可以加快运行效率,提高帧率,具体原理和使用注意点见 dual_buff 介绍

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/face_recognition.html b/maixpy/doc/zh/vision/face_recognition.html new file mode 100644 index 00000000..d73cb9bf --- /dev/null +++ b/maixpy/doc/zh/vision/face_recognition.html @@ -0,0 +1,431 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 人脸识别 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 人脸识别

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

人脸识别简介

+

face_recognize

+

人脸识别就是识别当前画面中的人脸的位置以及是谁。
+所以人脸识别除了要检测到人脸,一般会有一个库来保存认识的人和不认识的人。

+

识别原理

+
    +
  • 使用 AI 模型检测人脸,获得坐标和五官的坐标。
  • +
  • 利用五官的坐标仿射变换将图中的脸拉正对其到标准脸的样子,方便模型提取脸的特征。
  • +
  • 使用特征提取模型提取脸的特征值。
  • +
  • 与库中记录的人脸特征值进行对比(计算保存的和当前画面中的脸的特征值的余弦距离,得出最小的距离的库中的人脸,小于设定的阈值就认为当前画面中就是这个库中的人)
  • +
+

MaixPy 使用

+

MaixPy maix.nn 模块中提供了人脸识别的 API, 可以直接使用,模型也内置了,也可以到 MaixHub 模型库 下载(筛选选则对应的硬件平台,比如 maixcam)。

+

识别:

+ +
from maix import nn, camera, display, image
+import os
+import math
+
+recognizer = nn.FaceRecognizer(detect_model="/root/models/retinaface.mud", feature_model = "/root/models/face_feature.mud", dual_buff=True)
+if os.path.exists("/root/faces.bin"):
+    recognizer.load_faces("/root/faces.bin")
+cam = camera.Camera(recognizer.input_width(), recognizer.input_height(), recognizer.input_format())
+dis = display.Display()
+
+while 1:
+    img = cam.read()
+    faces = recognizer.recognize(img, 0.5, 0.45, 0.8)
+    for obj in faces:
+        img.draw_rect(obj.x, obj.y, obj.w, obj.h, color = image.COLOR_RED)
+        radius = math.ceil(obj.w / 10)
+        img.draw_keypoints(obj.points, image.COLOR_RED, size = radius if radius < 5 else 4)
+        msg = f'{recognizer.labels[obj.class_id]}: {obj.score:.2f}'
+        img.draw_string(obj.x, obj.y, msg, color = image.COLOR_RED)
+    dis.show(img)
+
+

第一次运行这个代码会发现能检测到人脸,但是都不认识,需要我们进入添加人脸模式学习人脸才行。

+
+

这里 recognizer.labels[0] 默认就是unknown,后面每添加一个人脸就会自动给 labels 增加一个。

+
+

比如可以在用户按下按键的时候学习人脸:

+ +
faces = recognizer.recognize(img, 0.5, 0.45, True)
+for face in faces:
+    print(face)
+    # 这里考虑到了一个画面中有多个人脸的情况, obj.class_id 为 0 代表是没有录入的人脸
+    # 这里写你自己的逻辑
+    #   比如可以在这里根据 face 的 class_id 和坐标决定要不要添加到库里面,以及可以做用户交互逻辑,比如按下按钮才录入等
+    recognizer.add_face(face, label) # label 是要给人脸取的标签(名字)
+recognizer.save_faces("/root/faces.bin")
+
+

完整例程

+

这里提供一个按键录入未知人脸,以及人脸识别的例程,可以在MaixPy 的 example 目录 找到nn_face_recognize.py

+

dual_buff 双缓冲区加速

+

你可能注意到这里模型初始化使用了dual_buff(默认值就是 True),使能 dual_buff 参数可以加快运行效率,提高帧率,具体原理和使用注意点见 dual_buff 介绍

+

更换其它默认识别模型

+

这里识别模型(区分不同人)用了 mobilenetv2 模型,如果不满足精度要求,可以更换成其它模型,比如insight face resnet50 模型,当然你也可以自己训练或者找其它训练好的模型转换成 MaixCAM 支持的模型即可,转换方法看MaixCAM 模型转换文档, mud 文件参考以有的文件写即可。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/find_blobs.html b/maixpy/doc/zh/vision/find_blobs.html new file mode 100644 index 00000000..de430fa5 --- /dev/null +++ b/maixpy/doc/zh/vision/find_blobs.html @@ -0,0 +1,579 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 寻找色块 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 寻找色块

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-04-031.0.0neucrack + + 初版文档 + +
2024-04-031.0.1lxowalle + + 添加寻找色块的详细用法 + +
+
+
+ +
+
+ +

阅读本文前,确保已经知晓如何开发MaixCAM,详情请阅读快速开始

+

简介

+

本文将介绍如何使用MaixPy来寻找色块,以及如何使用MaixCam的默认应用程序寻找色块。

+

在视觉应用中,寻找色块是一个非常常见的需求,比如机器人找色块,自动化生产线找色块等等,即需要识别画面中的特定的颜色区域,获取这个区域的位置和大小等信息。

+

使用 MaixPy 寻找色块

+

MaixPy的 maix.image.Image中提供了find_blobs方法,可以方便的找色块。

+

如何寻找色块

+

一个简单的示例,实现寻找色块并画框

+ +
from maix import image, camera, display
+
+cam = camera.Camera(320, 240)
+disp = display.Display()
+
+# 根据色块颜色选择对应配置
+thresholds = [[0, 80, 40, 80, 10, 80]]      # red
+# thresholds = [[0, 80, -120, -10, 0, 30]]    # green
+# thresholds = [[0, 80, 30, 100, -120, -60]]  # blue
+
+while 1:
+    img = cam.read()
+    blobs = img.find_blobs(thresholds, pixels_threshold=500)
+    for blob in blobs:
+        img.draw_rect(blob[0], blob[1], blob[2], blob[3], image.COLOR_GREEN)
+    disp.show(img)
+
+

步骤:

+
    +
  1. 导入image、camera、display模块

    + +
    from maix import image, camera, display
    +
    +
  2. +
  3. 初始化摄像头和显示

    + +
    cam = camera.Camera(320, 240)	# 初始化摄像头,输出分辨率320x240 RGB格式
    +disp = display.Display()
    +
    +
  4. +
  5. 从摄像头获取图片并显示

    + +
    while 1:
    +    img = cam.read()
    +    disp.show(img)
    +
    +
  6. +
  7. 调用find_blobs方法寻找摄像头图片中的色块,并画到屏幕上

    + +
    blobs = img.find_blobs(thresholds, pixels_threshold=500)
    +for blob in blobs:
    +    img.draw_rect(blob[0], blob[1], blob[2], blob[3], image.COLOR_GREEN)
    +
    +
      +
    • img是通过cam.read()读取到的摄像头图像,当初始化的方式为cam = camera.Camera(320, 240)时,img对象是一张分辨率为320x240的RGB图。
    • +
    • img.find_blobs用来寻找色块, thresholds 是一个颜色阈值列表,每个元素是一个颜色阈值,同时找到多个阈值就传入多个,每个颜色阈值的格式为 [L_MIN, L_MAX, A_MIN, A_MAX, B_MIN, B_MAX],这里的 LABLAB颜色空间的三个通道,L 通道是亮度,A 通道是红绿通道,B 通道是蓝黄通道。pixels_threshold是一个像素点数量的阈值,用来过滤一些不需要的小色块。
    • +
    • img.draw_rect用来画色块框,blob[0]blob[1]blob[1]blob[1]分别代表色块左上角坐标x,色块左上角坐标y,色块宽度w和色块高度h
    • +
    +
  8. +
+

常用参数说明

+

列举常用参数说明,如果没有找到可以实现应用的参数,则需要考虑是否使用其他算法实现,或者基于目前算法的结果扩展所需的功能

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
参数说明示例
thresholds基于lab颜色空间的阈值,threshold=[[l_min, l_max, a_min, a_max, b_min, b_max]],分别表示:
亮度范围为[l_min, l_max]|
绿色到红色的分量范围为[a_min, a_max]
蓝色到黄色的分量范围为[b_min, b_max]
可同时设置多个阈值
设置两个阈值来检测红色和绿色
img.find_blobs(threshold=[[0, 80, 40, 80, 10, 80], [0, 80, -120, -10, 0, 30]])
红色阈值为[0, 80, 40, 80, 10, 80]
绿色阈值为[0, 80, -120, -10, 0, 30]
invert使能阈值反转,使能后传入阈值与实际阈值相反,默认为False使能阈值反转
img.find_blobs(invert=True)
roi设置算法计算的矩形区域,roi=[x, y, w, h],x,y表示矩形区域左上角坐标,w,h表示矩形区域的宽度和高度,默认为整张图片计算坐标为(50,50),宽和高为100的区域
img.find_blobs(roi=[50, 50, 100, 100])
area_threshold过滤像素面积小于area_threshold的色块,单位为像素点,默认为10。该参数可用于过滤一些无用的小色块过滤面积小于1000的色块
img.find_blobs(area_threshold=1000)
pixels_threshold过滤有效像素点小于pixels_threshold的色块,默认为10。该参数可用于过滤一些无用的小色块过滤有效像素点小于1000的色块
img.find_blobs(pixels_threshold=1000)
+

本文介绍常用方法,更多 API 请看 API 文档的 image 部分。

+

离线设置阈值

+

为了快速验证寻找色块的功能,可以先使用MaixCam提供的寻找色块应用程序来体验寻找色块的效果。

+

演示

+

打开设备,选择找色块应用,然后在下方选择要识别的颜色,或者自定义颜色,即可以识别到对应的颜色了,在下方设置栏会显示当前设置的阈值范围,同时串口也会输出识别到的坐标和颜色信息。

+

+

源码地址

+

快速使用

+

使用默认阈值

+

寻找色块APP提供了redgreenblueuser四种配置,其中redgreenblue用来寻找红色绿色蓝色的色块,user自定义的阈值在程序退出时会保存下来,下次打开应用时会加载上一次调试的阈值。快速体验时通过点击界面下方按钮即可切换到对应配置,APP界面参考如下:

+

+

快速调试阈值

+

操作方法:

+
    +
  1. 摄像头对准需要寻找的物体点击屏幕上的目标物体,此时左侧会显示该物体对应颜色的矩形框,并显示该物体颜色的LAB值。
  2. +
  3. 点击出现的矩形框,系统将会自动设置LAB阈值,此时画面将会画出该物体边缘。
  4. +
+

这个方法优点是方便,快捷,可以很快的设置阈值并找到对应的色块。缺点是还不够精确,可以在下一步中手动微调。

+

手动微调阈值

+

操作方法:

+
    +
  1. 点击左下角选项图标,进入配置模式

    +
  2. +
  3. 摄像头对准需要寻找的物体点击屏幕上的目标物体,此时左侧会显示该物体对应颜色的矩形框,并显示该物体颜色的LAB值

    +
  4. +
  5. 点击下方选项L Min,L Max,A Min,A Max,B Min,B Max,点击后右侧会出现滑动条来设置该选项值。这些值分别对应LAB颜色格式的L通道、A通道和B通道的最小值和最大值

    +
  6. +
  7. 参考步骤2计算的物体颜色的LAB值,将L Min,L Max,A Min,A Max,B Min,B Max调整到合适的值,即可识别到对应的色块。

    +

    例如LAB=(20, 50, 80),由于L=20,为了适配一定范围让L Min=10L Max=30;同理,由于A=50,让A Min=40A Max=60; 由于B=80,让B Min=70B Max=90

    +
  8. +
+

这个方法可以更精确的找到合适的阈值,搭配快速调试阈值的方法,轻松找到想要的阈值。

+

通过串口协议获取识别结果

+

寻找色块APP支持通过串口(默认波特率为115200)上报检测到的色块信息。

+

由于上报信息只有一条,这里直接用示例来说明上报信息的内容。

+

例如上报信息为:

+ +
AA CA AC BB 14 00 00 00 E1 08 EE 00 37 00 15 01 F7 FF 4E 01 19 00 27 01 5A 00 A7 20
+
+
    +
  • AA CA AC BB:协议头部,内容固定

    +
  • +
  • 14 00 00 00:数据长度,除了协议头部和数据长度外的总长度

    +
  • +
  • E1:标志位,用来标识串口消息标志

    +
  • +
  • 08:命令类型,对于寻找色块APP应用该值固定为0x08

    +
  • +
  • EE 00 37 00 15 01 F7 FF 4E 01 19 00 27 01 5A 00:已找到色块的四个顶点坐标,每个值用小端格式的2字节表示。EE 0037 00表示第一个顶点坐标为(238, 55),15 01F7 FF表示第二个顶点坐标为(277, -9),4E 0119 00表示第三个顶点坐标为(334, 25),27 015A 00表示第四个顶点坐标为(295, 90)。

    +
  • +
  • A7 20:CRC 校验值,用以校验帧数据在传输过程中是否出错

    +
  • +
+

关于LAB颜色空间

+

LAB颜色空间和RGB颜色空间一样是一种表示颜色的方法,LAB可以表示人眼能看到的所有颜色。如果需要了解LAB可以去网络上搜索相关文章,那样更详细,而对于你应该只需要了解为什么选用LAB对于MaixPy的优势。

+

LAB对于MaixPy的优势:

+
    +
  1. LAB颜色空间的色域比RGB都要大,因此完全可以替换RGB。
  2. +
  3. LAB颜色空间下,由于L通道是亮度通道,我们常常设置到较大的范围即可(常用[0,80]),而编写代码时主要关注是A通道和B通道,这样可以减少大量的时间在纠结颜色阈值如何选择的问题上。
  4. +
  5. LAB颜色空间的颜色感知更均匀,更容易用代码调试。例如,对于只需要寻找红色色块,可以固定L通道和B通道值,只需要调整A通道的值即可(这是在颜色精度要求不高的情况下);如果是RGB通道则基本需要R、G、B三个通道同时变动才能找到合适的阈值。
  6. +
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/image_ops.html b/maixpy/doc/zh/vision/image_ops.html new file mode 100644 index 00000000..e4aff3c3 --- /dev/null +++ b/maixpy/doc/zh/vision/image_ops.html @@ -0,0 +1,676 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 图像基础操作 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 图像基础操作

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-07-081.1.0neucrack + + 优化 cv 和 image 转换文档 + +
2024-04-031.0.0neucrack + + 初版文档 + +
+
+
+ +
+
+ +

简介

+

视觉应用中图像占据非常重要的位置,不管是图片还是视频,因为视频本质上就是一帧帧的图像,所以图像处理是视觉应用的基础。

+

API 文档

+

本文介绍常用方法, 更多 API 参考 maix.image 模块的文档。

+

图像格式

+

MaixPy 提供基础图像模块image,里面最重要的就是image.Image类,用于图像的创建以及各种图像基础操作,以及图像加载和保存等。

+

图像格式有很多,一般我们用image.Format.FMT_RGB888 或者 image.Format.FMT_RGBA8888 或者 image.Format.FMT_GRAYSCALE或者image.Format.FMT_BGR888等。

+

大家知道 RGB 三色可以合成任意颜色,所以一般情况下我们使用 image.Format.FMT_RGB888就足够, RGB888 在内存中是 RGB packed 排列,即在内存中的排列:
+像素1_红色, 像素1_绿色, 像素1_蓝色, 像素2_红色, 像素2_绿色, 像素2_蓝色, ... 依次排列。

+

创建图像

+

创建图像很简单,只需要指定图像的宽度和高度以及图像格式即可:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+print(img)
+print(img.width(), img.height(), img.format())
+
+

320 是图像的宽度,240 是图像的高度,image.Format.FMT_RGB888 是图像的格式,格式参数可以省略,默认是image.Format.FMT_RGB888

+

这里通过img.width()img.height()img.format()可以获取图像的宽度、高度和格式。

+

显示到屏幕

+

MaixPy 提供了maix.display.Display类,可以方便的显示图像:

+ +
from maix import image, display
+
+disp = display.Display()
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+disp.show(img)
+
+

注意这里因为没有图像数据,所以显示的是黑色的图像,修改画面看后文。

+

从文件系统读取图像

+

MaixPy 提供了maix.image.load方法,可以从文件系统读取图像:

+ +
from maix import image
+
+img = image.load("/root/image.jpg")
+if img is None:
+    raise Exception(f"load image failed")
+print(img)
+
+

注意这里/root/image.jpg 是提前传输到了板子上的,方法可以看前面的教程。
+可以支持 jpgpng 格式的图像。

+

保存图像到文件系统

+

MaixPy 的maix.image.Image提供了save方法,可以保存图像到文件系统:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+
+# do something with img
+img.save("/root/image.jpg")
+
+

画框

+

image.Image提供了draw_rect方法,可以在图像上画框:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img.draw_rect(10, 10, 100, 100, image.Color.from_rgb(255, 0, 0))
+
+

这里的参数依次是:x, y, w, h, colorxy 是框的左上角坐标,wh 是框的宽度和高度,color 是框的颜色,可以使用image.Color.from_rgb方法创建颜色。
+可以用thickness指定框的线宽,默认是1

+

也可以画实心框,传参 thickness=-1 即可:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img.draw_rect(10, 10, 100, 100, (255, 0, 0), thickness=-1)
+
+

写字符串

+

image.Image提供了draw_string方法,可以在图像上写字:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img.draw_string(10, 10, "Hello MaixPy", image.Color.from_rgb(255, 0, 0))
+
+

这里的参数依次是:x, y, text, colorxy 是文字的左上角坐标,text 是要写的文字,color 是文字的颜色,可以使用image.Color.from_rgb方法创建颜色。

+

还可以放大字体,传参 scale 即可:

+ +
img.draw_string(10, 10, "Hello MaixPy", image.Color.from_rgb(255, 0, 0), scale=2)
+
+

获取字体的宽度和高度:

+ +
w, h = image.string_size("Hello MaixPy", scale=2)
+print(w, h)
+
+

注意这里scale是放大倍数,默认是1,和draw_string应该保持一致。

+

中文支持和自定义字体

+

image 模块支持加载ttf/otf字体,默认字体只支持英文,如果要显示中文或者自定义字体可以先下载字体文件到设备上,然后加载字体。
+系统也内置了几个字体,在/maixapp/share/font目录下面,代码示例:

+ +
from maix import image, display, app, time
+
+image.load_font("sourcehansans", "/maixapp/share/font/SourceHanSansCN-Regular.otf", size = 32)
+print("fonts:", image.fonts())
+image.set_default_font("sourcehansans")
+
+disp = display.Display()
+
+img = image.Image(disp.width(), disp.height())
+img.draw_string(2, 2, "你好!Hello, world!", image.Color.from_rgba(255, 0, 0, 0.8))
+
+disp.show(img)
+while not app.need_exit():
+    time.sleep(1)
+
+

加载字体文件,然后设置默认的字体,也可以不设置默认的字体,在写字的函数参数设置:

+ +
img.draw_string(2, 2, "你好!Hello, world!", image.Color.from_rgba(255, 0, 0, 0.8), font="sourcehansans")
+
+

注意 string_size方法也会使用设置的默认字体计算大小,也可以通过font参数单独设置要计算大小的字体。

+

画线

+

image.Image提供了draw_line方法,可以在图像上画线:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img.draw_line(10, 10, 100, 100, image.Color.from_rgb(255, 0, 0))
+
+

这里的参数依次是:x1, y1, x2, y2, colorx1y1 是线的起点坐标,x2y2 是线的终点坐标,color 是线的颜色,可以使用image.Color.from_rgb方法创建颜色。

+

画圆

+

image.Image提供了draw_circle方法,可以在图像上画圆:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img.draw_circle(100, 100, 50, image.Color.from_rgb(255, 0, 0))
+
+

这里的参数依次是:x, y, r, colorxy 是圆心坐标,r 是半径,color 是圆的颜色,可以使用image.Color.from_rgb方法创建颜色。

+

缩放图像

+

image.Image提供了resize方法,可以缩放图像:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img_new = img.resize(160, 120)
+print(img, img_new)
+
+

注意这里resize方法返回一个新的图像对象,原图像不变。

+

剪裁图像

+

image.Image提供了crop方法,可以剪裁图像:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img_new = img.crop(10, 10, 100, 100)
+print(img, img_new)
+
+

注意这里crop方法返回一个新的图像对象,原图像不变。

+

旋转图像

+

image.Image提供了rotate方法,可以旋转图像:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img_new = img.rotate(90)
+print(img, img_new)
+
+

注意这里rotate方法返回一个新的图像对象,原图像不变。

+

拷贝图像

+

image.Image提供了copy方法,可以拷贝一份独立的图像:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img_new = img.copy()
+print(img, img_new)
+
+

仿射变换

+

image.Image提供了affine方法,可以进行仿射变换,即提供当前图中三个及以上的点坐标,以及目标图中对应的点坐标,可以自动进行图像的旋转、缩放、平移等操作变换到目标图像:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img_new = img.affine([(10, 10), (100, 10), (10, 100)], [(10, 10), (100, 20), (20, 100)])
+print(img, img_new)
+
+

更多参数和用法请参考 API 文档。

+

画关键点

+

image.Image提供了draw_keypoints方法,可以在图像上画关键点:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+
+keypoints = [10, 10, 100, 10, 10, 100]
+img.draw_keypoints(keypoints, image.Color.from_rgb(255, 0, 0), size=10, thickness=1, fill=False)
+
+

在坐标(10, 10)(100, 10)(10, 100)画三个红色的关键点,关键点的大小是10,线宽是1,不填充。

+

画十字

+

image.Image提供了draw_cross方法,可以在图像上画十字:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img.draw_cross(100, 100, image.Color.from_rgb(255, 0, 0), size=5, thickness=1)
+
+

在坐标(100, 100)画一个红色的十字,十字的延长大小是5,所以线段长度为2 * size + thickness, 线宽是1

+

画箭头

+

image.Image提供了draw_arrow方法,可以在图像上画箭头:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img.draw_arrow(10, 10, 100, 100, image.Color.from_rgb(255, 0, 0), thickness=1)
+
+

在坐标(10, 10)画一个红色的箭头,箭头的终点是(100, 100),线宽是1

+

画图

+

image.Image提供了draw_image方法,可以在图像上画图:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img2 = image.Image(100, 100, image.Format.FMT_RGB888)
+img2.draw_rect(10, 10, 90, 90, image.Color.from_rgb(255, 0, 0))
+img.draw_image(10, 10, img2)
+
+

转换格式

+

image.Image提供了to_format方法,可以转换图像格式:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+img_new = img.to_format(image.Format.FMT_BGR888)
+print(img, img_new)
+img_jpg = img.to_format(image.Format.FMT_JPEG)
+print(img, img_new)
+
+

注意这里to_format方法返回一个新的图像对象,原图像不变。

+

maix.image.Image 对象和 Numpy/OpenCV 格式互相转换

+

MaixPy 使用 OpenCV 文档

+

和 bytes 数据互相转换

+

image.Image提供了to_bytes方法,可以转换图像为bytes数据:

+ +
from maix import image
+
+img = image.Image(320, 240, image.Format.FMT_RGB888)
+data = img.to_bytes()
+print(type(data), len(data), img.data_size())
+
+img_jpeg = image.from_bytes(320, 240, image.Format.FMT_RGB888, data)
+print(img_jpeg)
+img = img_jpeg.to_format(image.Format.FMT_RGB888)
+print(img)
+
+

这里to_bytes获得一个新的bytes对象,是独立的内存,不会影响原图。
+image.Image构造函数中传入data参数可以直接从bytes数据构造图像对象,注意新的图像也是独立的内存,不会影响到data

+

因为涉及到内存拷贝,所以这个方法比较耗时,不建议频繁使用。

+
+

如果你想用不拷贝的方式优化程序(不建议轻易使用,写不好代码会导致程序容易崩溃,),请看 API 文档。

+
+

更多基础 API 使用方法

+

更多 API 使用方法请参考 maix.image 模块的文档。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/line_tracking.html b/maixpy/doc/zh/vision/line_tracking.html new file mode 100644 index 00000000..6d68e647 --- /dev/null +++ b/maixpy/doc/zh/vision/line_tracking.html @@ -0,0 +1,601 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 寻找直线 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 寻找直线

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-05-091.0.0lxowalle + + 初版文档 + +
+
+
+ +
+
+ +

阅读本文前,确保已经知晓如何开发MaixCAM,详情请阅读快速开始

+

简介

+

在视觉应用中,在巡迹小车、巡线机器人等应用中经常需要寻找线条的功能。本文将介绍:

+
    +
  • 如何使用MaixPy来实现巡线功能

    +
  • +
  • 如何使用MaixCam的默认应用程序巡线

    +
  • +
+

如何使用MaixPy来寻找直线

+

MaixPy的 maix.image.Image中提供了get_regression方法来寻找直线

+

代码示例

+

一个简单的示例,实现寻找并画出直线

+ +
from maix import camera, display, image
+
+cam = camera.Camera(320, 240)
+disp = display.Display()
+
+# thresholds = [[0, 80, 40, 80, 10, 80]]      # red
+thresholds = [[0, 80, -120, -10, 0, 30]]    # green
+# thresholds = [[0, 80, 30, 100, -120, -60]]  # blue
+
+while 1:
+    img = cam.read()
+
+    lines = img.get_regression(thresholds, area_threshold = 100)
+    for a in lines:
+        img.draw_line(a.x1(), a.y1(), a.x2(), a.y2(), image.COLOR_GREEN, 2)
+        theta = a.theta()
+        rho = a.rho()
+        if theta > 90:
+            theta = 270 - theta
+        else:
+            theta = 90 - theta
+        img.draw_string(0, 0, "theta: " + str(theta) + ", rho: " + str(rho), image.COLOR_BLUE)
+
+    disp.show(img)
+
+

步骤:

+
    +
  1. 导入image、camera、display模块

    + +
    from maix import image, camera, display
    +
    +
  2. +
  3. 初始化摄像头和显示

    + +
    cam = camera.Camera(320, 240)	# 初始化摄像头,输出分辨率320x240 RGB格式
    +disp = display.Display()
    +
    +
  4. +
  5. 从摄像头获取图片并显示

    + +
    while 1:
    +    img = cam.read()
    +    disp.show(img)
    +
    +
  6. +
  7. 调用get_regression方法寻找摄像头图片中的直线,并画到屏幕上

    + +
    lines = img.get_regression(thresholds, area_threshold = 100)
    +for a in lines:
    +   img.draw_line(a.x1(), a.y1(), a.x2(), a.y2(), image.COLOR_GREEN, 2)
    +   theta = a.theta()
    +   rho = a.rho()
    +   if theta > 90:
    +      theta = 270 - theta
    +   else:
    +      theta = 90 - theta
    +   img.draw_string(0, 0, "theta: " + str(theta) + ", rho: " + str(rho), image.COLOR_BLUE)
    +
    +
      +
    • img是通过cam.read()读取到的摄像头图像,当初始化的方式为cam = camera.Camera(320, 240)时,img对象是一张分辨率为320x240的RGB图。
    • +
    • img.get_regression用来寻找直线, thresholds 是一个颜色阈值列表,每个元素是一个颜色阈值,同时找到多个阈值就传入多个,每个颜色阈值的格式为 [L_MIN, L_MAX, A_MIN, A_MAX, B_MIN, B_MAX],这里的 LABLAB颜色空间的三个通道,L 通道是亮度,A 通道是红绿通道,B 通道是蓝黄通道。pixels_threshold是一个像素面积的阈值,用来过滤一些不需要直线。
    • +
    • for a in lines用来遍历返回的Line对象, 其中a就是当前的Line对象。通常get_regression函数只会返回一个Line对象,如果需要寻找多条直线,可以尝试使用find_line方法
    • +
    • 使用img.draw_line来画出找到的线条,a.x1(), a.y1(), a.x2(), a.y2()分别代表直线两端的坐标
    • +
    • 使用img.draw_string在左上角显示直线与x轴的夹角, a.theta()是直线与y轴的夹角, 这里为了方便理解转换成直线与x轴的夹角thetaa.rho()是原点与直线的垂线的长度.
    • +
    +
  8. +
  9. 通过maixvision运行代码,就可以寻线啦,看看效果吧

    +

    image-20240509110204007

    +
  10. +
+

常用参数说明

+

列举常用参数说明,如果没有找到可以实现应用的参数,则需要考虑是否使用其他算法实现,或者基于目前算法的结果扩展所需的功能

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
参数说明示例
thresholds基于lab颜色空间的阈值,threshold=[[l_min, l_max, a_min, a_max, b_min, b_max]],分别表示:
亮度范围为[l_min, l_max]|
绿色到红色的分量范围为[a_min, a_max]
蓝色到黄色的分量范围为[b_min, b_max]
可同时设置多个阈值
设置两个阈值来检测红色和绿色
img.find_blobs(threshold=[[0, 80, 40, 80, 10, 80], [0, 80, -120, -10, 0, 30]])
红色阈值为[0, 80, 40, 80, 10, 80]
绿色阈值为[0, 80, -120, -10, 0, 30]
invert使能阈值反转,使能后传入阈值与实际阈值相反,默认为False使能阈值反转
img.find_blobs(invert=True)
roi设置算法计算的矩形区域,roi=[x, y, w, h],x,y表示矩形区域左上角坐标,w,h表示矩形区域的宽度和高度,默认为整张图片计算坐标为(50,50),宽和高为100的区域
img.find_blobs(roi=[50, 50, 100, 100])
area_threshold过滤像素面积小于area_threshold的直线,单位为像素点,默认为10。该参数可用于过滤一些无用的小直线过滤面积小于1000的直线
img.find_blobs(area_threshold=1000)
pixels_threshold过滤有效像素点小于pixels_threshold的直线,默认为10。该参数可用于过滤一些无用的小直线过滤有效像素点小于1000的直线
img.find_blobs(pixels_threshold=1000)
+

本文介绍常用方法,更多 API 请看 API 文档的 image 部分。

+

提升巡线的速度

+

这里提供几个提升巡线速度的方法

+
    +
  1. 选择合适的分辨率

    +

    越大的分辨率计算速度越慢,可以根据识别距离和精度的要求来选择更合适的分辨率

    +
  2. +
  3. 使用灰度图识别

    +

    使用灰度图识别时,算法只会处理一个通道,有更快的识别速度,在颜色单一的环境会很有用。注意此时向get_regression传入thresholds时,只有l_minl_max有效。

    +

    获取灰度图的方法:

    + +
    # 方法1
    +cam = camera.Camera(320, 240, image.Format.FMT_GRAYSCALE)    # MaixPy v4.2.1后支持
    +gray_img = cam.read()										# 获取灰度图
    +
    +# 方法2
    +cam = camera.Camera(320, 240)
    +img = cam.read()
    +gray_img = img.to_format(image.Format.FMT_GRAYSCALE)			# 获取灰度图
    +
    +
  4. +
+

如何使用MaixCam的默认应用程序寻找直线

+

为了快速验证寻找直线的功能,可以先使用MaixCam提供的line_tracking应用程序来体验寻找直线的效果。

+

使用方法

+
    +
  1. 选择并打开Line tracking应用
  2. +
  3. 点击屏幕中需要识别的直线,左侧会显示该直线的颜色
  4. +
  5. 点击左侧(界面中L A B下方的颜色)需要检测的颜色
  6. +
  7. 此时就可以识别到对应的直线了,同时串口也会输出直线的坐标和角度信息。
  8. +
+

演示

+

+

进阶操作

+

手动设置LAB阈值寻找直线

+

APP提供手动设置LAB阈值来精确的寻找直线

+

操作方法:

+
    +
  1. 点击左下角选项图标,进入配置模式

    +
  2. +
  3. 摄像头对准需要寻找的物体点击屏幕上的目标直线,此时界面中L A B下方会显示该物体对应颜色的矩形框,并显示该物体颜色的LAB值

    +
  4. +
  5. 点击下方选项L Min,L Max,A Min,A Max,B Min,B Max,点击后右侧会出现滑动条来设置该选项值。这些值分别对应LAB颜色格式的L通道、A通道和B通道的最小值和最大值

    +
  6. +
  7. 参考步骤2计算的物体颜色的LAB值,将L Min,L Max,A Min,A Max,B Min,B Max调整到合适的值,即可识别到对应的直线。

    +

    例如LAB=(20, 50, 80),由于L=20,为了适配一定范围让L Min=10L Max=30;同理,由于A=50,让A Min=40A Max=60; 由于B=80,让B Min=70B Max=90

    +
  8. +
+

通过串口协议获取检测数据

+

寻找直线应用支持通过串口(默认波特率为115200)上报检测到的直线信息。

+

由于上报信息只有一条,这里直接用示例来说明上报信息的内容。

+

例如上报信息为:

+ +
AA CA AC BB 0E 00 00 00 E1 09 FC 01 01 00 E9 01 6F 01 57 00 C1 C6
+
+
    +
  • AA CA AC BB:协议头部,内容固定

    +
  • +
  • 0E 00 00 00:数据长度,除了协议头部和数据长度外的总长度,这里表示长度为14

    +
  • +
  • E1:标志位,用来标识串口消息标志

    +
  • +
  • 09:命令类型,对于寻找直线APP应用该值固定为0x09

    +
  • +
  • FC 01 01 00 E9 01 6F 01 57 00:直线的两端坐标和角度信息,每个值用小端格式的2字节表示。FC 0101 00表示第一个端点坐标为(508, 1),E9 016F 01表示第二个端点坐标为(489, 367),57 00表示直线与x轴的角度为87度

    +
  • +
  • C1 C6:CRC 校验值,用以校验帧数据在传输过程中是否出错

    +
  • +
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/maixhub_train.html b/maixpy/doc/zh/vision/maixhub_train.html new file mode 100644 index 00000000..139793d5 --- /dev/null +++ b/maixpy/doc/zh/vision/maixhub_train.html @@ -0,0 +1,434 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 使用 MaixHub 在线训练 AI 模型给 MaixPy 使用 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 使用 MaixHub 在线训练 AI 模型给 MaixPy 使用

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-04-031.0.0neucrack + + 初版文档 + +
+
+
+ +
+
+ +

简介

+

MaixHub 提供了在线训练 AI 模型的功能,可以直接在浏览器中训练模型,不需要购买昂贵的机器,不需要搭建复杂的开发环境,也不需要写代码,非常适合入门,也适合懒得翻代码的老手。

+

使用 MaixHub 训练模型的基本步骤

+

确认要识别的数据类型和模型类型

+

要训练一个 AI 模型,需要先确定是什么数据和模型,目前 MaixHub(2024.4)提供了图像数据的物体分类模型物体检测模型,都是图像识别模型, 物体分类模型物体检测模型 更简单,因为物体检测需要标注物体在图中的位置,会比较麻烦,物体分类则只需要给出图像中是什么,不需要坐标,所以更简单, 如果是初学者建议先从物体分类开始。

+

采集数据

+

如前面的 AI 基础所说,要训练模型,必须准备训练用的数据集让 AI 学习,对于图像训练,我们需要创建一个数据集,并且上传图片到数据集。

+

保证设备已经连接网络(WiFi)。
+打开设备上的 MaixHub 应用选择 采集数据 来拍照并一键上传到 MaixHub。需要先在 MaixHub 创建数据集,然后点击 设备 上传数据,会出现一个 二维码,设备扫描二维码来与MaixHub 建立连接。

+

注意要分清训练集和验证集的区别,要想实机运行的效果和训练效果相当,验证集的数据一定要和实机运行拍摄的图像质量一样,训练集也建议用设备拍摄的,如果要用网上的图片,一定只能用在训练集,不要用在验证集,因为数据量小,数据集与实机运行越接近越好。

+

标注数据

+

对于分类模型,在上传的时候就顺便已经标注好了,即上传时选择好了图片属于那个分类。

+

对于目标检测模型,上传完成后需要进行手动标注,即在每一张图中框出要被识别物体的坐标大小和分类。
+这个标注过程你也可以选择自己在自己的电脑中离线用比如 labelimg 这样的软件标注完毕后使用数据集中的导入功能导入到 MaixHub。
+标注时善用快捷键标注起来会更快,后面MaixHub 也会增加更多辅助标注和自动标注工具(目前在上传视频处有自动标注工具也可以尝试使用)。

+

训练模型

+

选择训练参数训练,选择对应的设备平台,选择 maixcam,等待排队训练,可以实时看到训练进度,等待完成即可。

+

部署模型

+

训练完成后,可以设备的 MaixHub 应用中选择 部署 功能,扫码进行部署。
+设备开会自动下载模型并且运行起来,模型会被存在本地,后面也能选择再次运行。

+

如果你觉得识别效果很不错,可以一键分享到模型库让更多人使用。

+

使用方法

+

请到 MaixHub 注册账号,然后登录,主页有视频教程,学习即可。

+

注意教程如果是使用了 M2dock 这个开发板,和 MaixCAM也是类似的,只是设备(板子)上使用的 MaixHub 应用可能稍微有点区别,大体上是相同的,请注意举一反三。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/object_track.html b/maixpy/doc/zh/vision/object_track.html new file mode 100644 index 00000000..230c28d9 --- /dev/null +++ b/maixpy/doc/zh/vision/object_track.html @@ -0,0 +1,409 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 物体轨迹追踪和计数(如人流计数) - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 物体轨迹追踪和计数(如人流计数)

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

轨迹追踪简介

+

前面我们使用 YOLOv5 YOLOv8 甚至是 find_blobs 都可以检测到物体,但是如果画面中同时存在多个物体,当我们需要区分每一个物体,就需要物体追踪功能了。

+

比如画面中同时有 5 个人在移动,我们需要给每个人编号,知道他们的行动轨迹。

+

应用:

+
    +
  • 人流计数,比如通过某个地段的人数量。
  • +
  • 工件计数,比如流水线对生产的产品进行计数。
  • +
  • 物体移动轨迹记录和识别。
  • +
+

MaixCAM/MaixPy 物体追踪和人流计数效果

+

效果如下视频,可以跟踪每个人,以及对从上往下跨越黄色区域的人进行计数(左下角):

+

+

MaixCAM / MaixPy 使用 物体追踪和人流计数

+

可以参考直接安装应用 体验。
+可以看examples/vision/tracker 下的例程

+

其中tracker_bytetrack.py 例程是基本的物体跟踪例程,分为几个步骤:

+
    +
  • 使用 YOLOv5 或者 YOLOv8 检测物体,这样你就可以根据你自己要检测的物体更换模型即可检测不同物体。
  • +
  • 使用maix.tracker.ByteTracker 这个算法进行物体追踪,只需要调用一个update函数即可得到结果(画面中的每个轨迹),十分简单。
  • +
+

其中有几个参数根据自己的实际场景进行调整,具体参数以例程代码和 API 参数说明为准:

+ +
# configs
+conf_threshold = 0.3       # detect threshold
+iou_threshold = 0.45       # detect iou threshold
+max_lost_buff_time = 120   # the frames for keep lost tracks.
+track_thresh = 0.4         # tracking confidence threshold.
+high_thresh = 0.6          # threshold to add to new track.
+match_thresh = 0.8         # matching threshold for tracking, e.g. one object in two frame iou < match_thresh we think they are the same obj.
+max_history_num = 5        # max tack's position history length.
+show_detect = False        # show detect
+valid_class_id = [0]       # we used classes index in detect model。
+
+

tracker_bytetrack_count.py 例程则增加了人流计数例程,这里为了让例程更加简单,只简单地写了一个判断人从上往下走的计数,即当人处在黄色区域以下,同时轨迹在黄色区域内就认为是从上往下跨越了黄色区域。
+实际在你的应用场景可以自己编写相关逻辑。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/ocr.html b/maixpy/doc/zh/vision/ocr.html new file mode 100644 index 00000000..9868e1ec --- /dev/null +++ b/maixpy/doc/zh/vision/ocr.html @@ -0,0 +1,585 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 实现 OCR 图片文字识别 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 实现 OCR 图片文字识别

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

OCR 简介

+

OCR(Optical Character Recognition,光学字符识别)即用视觉的方式识别图像中的文字。
+在很多场景会用到,比如:

+
    +
  • 识别文字/数字卡片
  • +
  • 提取卡片上的文字,比如身份证
  • +
  • 电子化纸质文档
  • +
  • 数显数字读取,可用于抄表、旧仪器数据电子化等
  • +
  • 车牌文字识别
  • +
+

MaixPy 中使用 OCR

+

MaixPy 移植了 PaddleOCR, 是由百度开源的一款 OCR 算法,想了解原理可以看这个开源项目。

+

OCR

+

首先保证 MaixPy 版本 >= 4.6

+

然后执行代码:(完整的最新的代码在MaixPy 仓库,以源码为准)

+ +
from maix import camera, display, image, nn, app
+
+model = "/root/models/pp_ocr.mud"
+ocr = nn.PP_OCR(model)
+
+cam = camera.Camera(ocr.input_width(), ocr.input_height(), ocr.input_format())
+dis = display.Display()
+
+image.load_font("ppocr", "/maixapp/share/font/ppocr_keys_v1.ttf", size = 20)
+image.set_default_font("ppocr")
+
+while not app.need_exit():
+    img = cam.read()
+    objs = ocr.detect(img)
+    for obj in objs:
+        points = obj.box.to_list()
+        img.draw_keypoints(points, image.COLOR_RED, 4, -1, 1)
+        img.draw_string(obj.box.x4, obj.box.y4, obj.char_str(), image.COLOR_RED)
+    dis.show(img)
+
+
+

可以看到用了ocr = nn.PP_OCR(model) 加载模型,然后用ocr.detect(img) 检测并且识别文字得到结果画在了屏幕上。

+

更多模型选择

+

MaixHub 模型下载 可以下载到更完整的模型,不同输入分辨率,不同语言,不同的版本(MaixPy 目前默认pp_ocr.mud 模型为 PPOCRv3 检测+v4识别)。

+

只识别不检测

+

如果你已经有处理好了的文字图,即一张图中已知文字的四个角坐标,可以不调用detect函数,二是只调用recognize函数,这样就不会检测而是仅仅识别图片中的文字。

+

自定义模型

+

默认模型提供了中文和英文文字检测识别模型,如果你有特殊的需求,比如其它语言,或者只想检测特定的图形不想检测所有类型的文字,
+可以到PaddleOCR 官方模型库 下载对应的模型然后转换成 MaixCAM 支持的模型格式即可。

+

这里最复杂的就是将模型转换成 MaixCAM 可用的模型,过程比较复杂,需要有基本的 Linux 使用基础以及灵活变通的能力。

+
    +
  • 首先自己使用 PaddleOCR 源码进行训练模型或者下载官方提供的模型,检测模型请选择 PP-OCRv3, 因为效果不错的同时速度比 v4 快,识别模型请下载 v4 模型,实测 v3 在 MaixCAM 上量化后效果不理想。
  • +
  • 然后将模型转成 onnx:
  • +
+ +
model_path=./models/ch_PP-OCRv3_rec_infer
+paddle2onnx --model_dir ${model_path} --model_filename inference.pdmodel --params_filename inference.pdiparams --save_file ${model_path}/inference.onnx --opset_version 14 --enable_onnx_checker True
+
+
    +
  • 然后按照onnx转MUD格式的模型文档 安装好环境再转换模型,这里在附录提供示例转换脚本。
  • +
  • 使用 MaixPy 加载运行即可。
  • +
+

附录:模型转换脚本

+

检测:

+ +
#!/bin/bash
+
+set -e
+
+net_name=ch_PP_OCRv3_det
+input_w=320
+input_h=224
+output_name=sigmoid_0.tmp_0
+
+# scale 1/255.0
+# "mean": [0.485, 0.456, 0.406],
+# "std": [0.229, 0.224, 0.225],
+
+# mean: mean * 255
+# scale: 1/(std*255)
+
+# mean: 123.675, 116.28, 103.53
+# scale: 0.01712475, 0.017507, 0.01742919
+
+mkdir -p workspace
+cd workspace
+
+# convert to mlir
+model_transform.py \
+--model_name ${net_name} \
+--model_def ../${net_name}.onnx \
+--input_shapes [[1,3,${input_h},${input_w}]] \
+--mean "123.675,116.28,103.53" \
+--scale "0.01712475,0.017507,0.01742919" \
+--keep_aspect_ratio \
+--pixel_format bgr \
+--channel_format nchw \
+--output_names "${output_name}" \
+--test_input ../test_images/test3.jpg \
+--test_result ${net_name}_top_outputs.npz \
+--tolerance 0.99,0.99 \
+--mlir ${net_name}.mlir
+
+
+# export bf16 model
+#   not use --quant_input, use float32 for easy coding
+model_deploy.py \
+--mlir ${net_name}.mlir \
+--quantize BF16 \
+--processor cv181x \
+--test_input ${net_name}_in_f32.npz \
+--test_reference ${net_name}_top_outputs.npz \
+--model ${net_name}_bf16.cvimodel
+
+echo "calibrate for int8 model"
+# export int8 model
+run_calibration.py ${net_name}.mlir \
+--dataset ../images \
+--input_num 200 \
+-o ${net_name}_cali_table
+
+echo "convert to int8 model"
+# export int8 model
+#    add --quant_input, use int8 for faster processing in maix.nn.NN.forward_image
+model_deploy.py \
+--mlir ${net_name}.mlir \
+--quantize INT8 \
+--quant_input \
+--calibration_table ${net_name}_cali_table \
+--processor cv181x \
+--test_input ${net_name}_in_f32.npz \
+--test_reference ${net_name}_top_outputs.npz \
+--tolerance 0.9,0.5 \
+--model ${net_name}_int8.cvimodel
+
+
+

识别:

+ +
#!/bin/bash
+
+set -e
+
+# net_name=ch_PP_OCRv4_rec
+# output_name=softmax_11.tmp_0
+
+net_name=ch_PP_OCRv3_rec_infer_sophgo
+output_name=softmax_5.tmp_0
+
+
+input_w=320
+input_h=48
+cali_images=../images_crop_320
+
+# scale 1/255.0
+# "mean": [0.5, 0.5, 0.5],
+# "std": [0.5, 0.5, 0.5],
+
+# mean: mean * 255
+# scale: 1/(std*255)
+
+# mean: 127.5,127.5,127.5
+# scale: 0.00784313725490196,0.00784313725490196,0.00784313725490196
+
+mkdir -p workspace
+cd workspace
+
+# convert to mlir
+model_transform.py \
+--model_name ${net_name} \
+--model_def ../${net_name}.onnx \
+--input_shapes [[1,3,${input_h},${input_w}]] \
+--mean "127.5,127.5,127.5" \
+--scale "0.00784313725490196,0.00784313725490196,0.00784313725490196" \
+--keep_aspect_ratio \
+--pixel_format bgr \
+--channel_format nchw \
+--output_names "${output_name}" \
+--test_input ../test_images/test3.jpg \
+--test_result ${net_name}_top_outputs.npz \
+--tolerance 0.99,0.99 \
+--mlir ${net_name}.mlir
+
+
+# export bf16 model
+#   not use --quant_input, use float32 for easy coding
+model_deploy.py \
+--mlir ${net_name}.mlir \
+--quantize BF16 \
+--processor cv181x \
+--test_input ${net_name}_in_f32.npz \
+--test_reference ${net_name}_top_outputs.npz \
+--model ${net_name}_bf16.cvimodel
+
+echo "calibrate for int8 model"
+# export int8 model
+run_calibration.py ${net_name}.mlir \
+--dataset $cali_images \
+--input_num 200 \
+-o ${net_name}_cali_table
+
+echo "convert to int8 model"
+# export int8 model
+#    add --quant_input, use int8 for faster processing in maix.nn.NN.forward_image
+model_deploy.py \
+--mlir ${net_name}.mlir \
+--quantize INT8 \
+--quant_input \
+--calibration_table ${net_name}_cali_table \
+--processor cv181x \
+--test_input ${net_name}_in_f32.npz \
+--test_reference ${net_name}_top_outputs.npz \
+--tolerance 0.9,0.5 \
+--model ${net_name}_int8.cvimodel
+
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/opencv.html b/maixpy/doc/zh/vision/opencv.html new file mode 100644 index 00000000..06ff76f4 --- /dev/null +++ b/maixpy/doc/zh/vision/opencv.html @@ -0,0 +1,524 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 使用 OpenCV - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 使用 OpenCV

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

简介

+

对于 MaixCAM,因为使用了 Linux, 并且性能基本能够支撑使用Python版本的OpenCV,所以除了使用maix模块,你也可以直接使用cv2模块。

+

本文例程以及更多可以在MaixPy/examples/vision/opencv 中找到。

+

注意 OpenCV 的函数基本都是 CPU 计算的,能使用 maix 的模块尽量不使用 OpenCV,因为 maix 有很多函数都是经过硬件加速过的。

+

maix.image.Image 对象和 Numpy/OpenCV 格式互相转换

+

maix.image.Image对象可以转换成numpy数组,这样就能给numpyopencv等库使用:

+ +
from maix import image, time, display, app
+
+disp = display.Display()
+
+while not app.need_exit():
+    img = image.Image(320, 240, image.Format.FMT_RGB888)
+    img.draw_rect(0, 0, 100, 100, image.COLOR_RED, thickness=-1)
+    t = time.ticks_ms()
+    img_bgr = image.image2cv(img, ensure_bgr=True, copy=True)
+    img2   = image.cv2image(img_bgr, bgr=True, copy=True)
+    print("time:", time.ticks_ms() - t)
+    print(type(img_bgr), img_bgr.shape)
+    print(type(img2), img2)
+    print("")
+    disp.show(img2)
+
+

前面的程序因为每次转换都要拷贝一次内存,所以速度会比较慢,下面为优化速度版本,如果不是极限追求速度不建议使用,容易出错:

+ +
from maix import image, time, display, app
+
+disp = display.Display()
+
+while not app.need_exit():
+    img = image.Image(320, 240, image.Format.FMT_RGB888)
+    img.draw_rect(0, 0, 100, 100, image.COLOR_RED, thickness=-1)
+
+    t = time.ticks_ms()
+    img_rgb = image.image2cv(img, ensure_bgr=False, copy=False)
+    img2 = image.cv2image(img_rgb, bgr=False, copy=False)
+    print("time:", time.ticks_ms() - t)
+    print(type(img_rgb), img_rgb.shape)
+    print(type(img2), img2)
+
+    disp.show(img2)
+
+
    +
  • img_rgb = image.image2cv(img, ensure_bgr=False, copy=False)img_rgb 会直接使用 img 的数据,不会产生内存拷贝,注意此时得到的img_rgbRGB 图,opencv的 API 都是认为图是 BGR 的,所以用opencv的 API 操作图像时要注意,如果你无法掌控请设置ensure_bgrTrue
  • +
  • img2 = image.cv2image(img_rgb, bgr=False, copy=False)中设置了copyFalse,即直接使用img_rgb的内存,不会新拷贝一份内存,所以速度更快了,但是需要小心,在 img2 使用结束前img_bgr不能被销毁,否则程序会崩溃。
  • +
  • 注意因为借用了内存,所以更改转换后的图像也会影响到转换前的图像。
  • +
+

加载一张图片

+ +
import cv2
+
+file_path = "/maixapp/share/icon/detector.png"
+img = cv2.imread(file_path)
+print(img)
+
+

因为cv2模块比较臃肿,import cv2可能会需要一点时间。

+

显示图像到屏幕

+

但是由于直接使用了官方的 OpenCV,没有对接显示,所以要显示到屏幕上需要转换成maix.image.Image对象后再用display来显示:

+ +
from maix import display, image, time
+import cv2
+
+disp = display.Display()
+
+file_path = "/maixapp/share/icon/detector.png"
+img = cv2.imread(file_path)
+
+img_show = image.cv2image(img)
+disp.show(img_show)
+
+while not app.need_exit():
+    time.sleep(1)
+
+

使用 OpenCV 函数

+

以边缘检测为例:

+

基于上面的代码,使用cv2.Canny函数即可:

+ +
from maix import image, display, app, time
+import cv2
+
+file_path = "/maixapp/share/icon/detector.png"
+img0 = cv2.imread(file_path)
+
+disp = display.Display()
+
+while not app.need_exit():
+    img = img0.copy()
+
+    # canny method
+    t = time.ticks_ms()
+    edged = cv2.Canny(img, 180, 60)
+    t2 = time.ticks_ms() - t
+
+    # show by maix.display
+    t = time.ticks_ms()
+    img_show = image.cv2image(edged)
+    print(f"edge time: {t2}ms, convert time: {time.ticks_ms() - t}ms")
+    disp.show(img_show)
+
+

使用摄像头

+

在 PC 上, 我们使用 OpenCVVideoCapture类来读取摄像头,对于 MaixCAM, OpenCV 没有适配,我们可以用maix.camera 模块来读取摄像头,然后给OpenCV使用。

+

通过image.image2cv函数将maix.image.Image对象转为numpy.ndarray对象给OpenCV使用:

+ +
from maix import image, display, app, time, camera
+import cv2
+
+disp = display.Display()
+cam = camera.Camera(320, 240, image.Format.FMT_BGR888)
+
+while not app.need_exit():
+    img = cam.read()
+
+    # convert maix.image.Image object to numpy.ndarray object
+    t = time.ticks_ms()
+    img = image.image2cv(img, ensure_bgr=False, copy=False)
+    print("time: ", time.ticks_ms() - t)
+
+    # canny method
+    edged = cv2.Canny(img, 180, 60)
+
+    # show by maix.display
+    img_show = image.cv2image(edged, bgr=True, copy=False)
+    disp.show(img_show)
+
+

读取 USB 摄像头

+

先在开发板设置里面USB设置中选择USB 模式HOST模式。如果没有屏幕,可以用examples/tools/maixcam_switch_usb_mode.py脚本进行设置。

+ +
from maix import image, display, app
+import cv2
+import sys
+
+cap = cv2.VideoCapture(0)
+cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640)
+cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 480)
+# cap.set(cv2.CAP_PROP_CONVERT_RGB, 0)
+
+disp = display.Display()
+
+if not cap.isOpened():
+    print("无法打开摄像头")
+    sys.exit(1)
+print("开始读取")
+while not app.need_exit():
+    ret, frame = cap.read()
+    if not ret:
+        print("无法读取帧")
+        break
+    img = image.cv2image(frame, bgr=True, copy=False)
+    disp.show(img)
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/qrcode.html b/maixpy/doc/zh/vision/qrcode.html new file mode 100644 index 00000000..4f016fe8 --- /dev/null +++ b/maixpy/doc/zh/vision/qrcode.html @@ -0,0 +1,496 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 二维码识别 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 二维码识别

+ +
+
+
    + +
+
+
+
+ + + + +
+
+ + + +
+
+
+
+ + +
+
+ + +
+ + 更新历史 +
+ + + + + + + + + + + + + + + + + + + +
日期版本作者更新内容
2024-04-031.0.0lxowalle + + 初版文档 + +
+
+
+ +
+
+ +

阅读本文前,确保已经知晓如何开发MaixCAM,详情请阅读快速开始

+

简介

+

本文介绍如何使用MaixPy来识别二维码

+

使用 MaixPy 识别二维码

+

MaixPy的 maix.image.Image中提供了find_qrcodes方法,用来识别二维码。

+

如何识别二维码

+

一个简单的示例,实现识别二维码并画框

+ +
from maix import image, camera, display
+
+cam = camera.Camera(320, 240)
+disp = display.Display()
+
+while 1:
+    img = cam.read()
+    qrcodes = img.find_qrcodes()
+    for qr in qrcodes:
+        corners = qr.corners()
+        for i in range(4):
+            img.draw_line(corners[i][0], corners[i][1], corners[(i + 1) % 4][0], corners[(i + 1) % 4][1], image.COLOR_RED)
+        img.draw_string(qr.x(), qr.y() - 15, qr.payload(), image.COLOR_RED)
+    disp.show(img)
+
+

步骤:

+
    +
  1. 导入image、camera、display模块

    + +
    from maix import image, camera, display
    +
    +
  2. +
  3. 初始化摄像头和显示

    + +
    cam = camera.Camera(320, 240) # 初始化摄像头,输出分辨率320x240 RGB格式
    +disp = display.Display()
    +
    +
  4. +
  5. 从摄像头获取图片并显示

    + +
    while 1:
    +    img = cam.read()
    +    disp.show(img)
    +
    +
  6. +
  7. 调用find_qrcodes方法识别摄像头中的二维码

    + +
    qrcodes = img.find_qrcodes()
    +
    +
      +
    • img是通过cam.read()读取到的摄像头图像,当初始化的方式为cam = camera.Camera(320, 240)时,img对象是一张分辨率为320x240的RGB图。
    • +
    • img.find_qrcodes用来寻找二维码,并将查询结果保存到qrocdes,以供后续处理
    • +
    +
  8. +
  9. 处理识别二维码的结果并显示到屏幕上

    + +
    for qr in qrcodes:
    +    corners = qr.corners()
    +    for i in range(4):
    +        img.draw_line(corners[i][0], corners[i][1], corners[(i + 1) % 4][0], corners[(i + 1) % 4][1], image.COLOR_RED)
    +    img.draw_string(qr.x(), qr.y() - 15, qr.payload(), image.COLOR_RED)
    +
    +
      +
    • qrcodes是通过img.find_qrcodes()查询二维码的结果,如果找不到二维码则qrcodes内部为空
    • +
    • qr.corners()用来获取已扫描到的二维码的四个顶点坐标,img.draw_line()利用这四个顶点坐标画出二维码的形状
    • +
    • img.draw_string用来显示二维码的内容和位置等信息,其中qr.x()qr.y()用来获取二维码左上角坐标x和坐标y,qr.payload()用来获取二维码的内容
    • +
    +
  10. +
+

常用参数说明

+

列举常用参数说明,如果没有找到可以实现应用的参数,则需要考虑是否使用其他算法实现,或者基于目前算法的结果扩展所需的功能

+ + + + + + + + + + + + + + + +
参数说明示例
roi设置算法计算的矩形区域,roi=[x, y, w, h],x,y表示矩形区域左上角坐标,w,h表示矩形区域的宽度和高度,默认为整张图片计算坐标为(50,50),宽和高为100的区域
img.find_qrcodes(roi=[50, 50, 100, 100])
+

本文介绍常用方法,更多 API 请看 API 文档的 image 部分。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/segmentation.html b/maixpy/doc/zh/vision/segmentation.html new file mode 100644 index 00000000..1d1d65fe --- /dev/null +++ b/maixpy/doc/zh/vision/segmentation.html @@ -0,0 +1,418 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 图像语义分割 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 图像语义分割

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

简介

+

图像语义分割,就是识别图中特定的物体,并且讲物体部分的像素识别出来,比如下图识别到了人体和狗的身体部分,可以拿来做碰撞检测、汽车自动导航、面积测算等等。

+

+

MaixPy 使用图像语义分割

+

MaixPy 内置了 YOLOv8-segYOLO11-seg 来进行对象检测和图像分割。

+

MaixPy 默认提供了 coco 数据集 80 种物体分类模型。

+
+

使用 YOLOv8 MaixPy 版本必须 >= 4.4.0
+使用 YOLO11 MaixPy 版本必须 >= 4.7.0

+
+

代码如下,也可以在 MaixPy examples 中找到。

+ +
from maix import camera, display, image, nn, app, time
+
+detector = nn.YOLOv8(model="/root/models/yolov8n_seg.mud", dual_buff = True)
+# detector = nn.YOLO11(model="/root/models/yolo11n_seg.mud", dual_buff = True)
+
+cam = camera.Camera(detector.input_width(), detector.input_height(), detector.input_format())
+dis = display.Display()
+
+while not app.need_exit():
+    img = cam.read()
+    objs = detector.detect(img, conf_th = 0.5, iou_th = 0.45)
+    for obj in objs:
+        # img.draw_image(obj.x, obj.y, obj.seg_mask)
+        detector.draw_seg_mask(img, obj.x, obj.y, obj.seg_mask, threshold=127)
+        img.draw_rect(obj.x, obj.y, obj.w, obj.h, color = image.COLOR_RED)
+        msg = f'{detector.labels[obj.class_id]}: {obj.score:.2f}'
+        img.draw_string(obj.x, obj.y, msg, color = image.COLOR_RED)
+    dis.show(img)
+
+
+

这里切换 YOLOv8 和 YOLO11 只需要修改上面代码种注释的部分即可。

+
+

更多分辨率模型

+

默认是 320x224 输入分辨率的模型, 更多分辨率请到 MaixHub 模型库 下载:

+ +

dual_buff 双缓冲区加速

+

你可能注意到这里模型初始化使用了dual_buff(默认值就是 True),使能 dual_buff 参数可以加快运行效率,提高帧率,具体原理和使用注意点见 dual_buff 介绍

+

自定义自己的物体分割模型

+

上面提供的是 coco 数据集 80 分类的模型,如果不满足你的要求,你也可以自己训练特定的物体检测和分割模型,按照 离线训练YOLOv8/YOLO11 所述使用 YOLOv8/YOLO11 官方的分格模型训练方法进行训练,然后转换成 MaixCAM 支持的模型格式即可。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/self_learn_classifier.html b/maixpy/doc/zh/vision/self_learn_classifier.html new file mode 100644 index 00000000..f92ed5e3 --- /dev/null +++ b/maixpy/doc/zh/vision/self_learn_classifier.html @@ -0,0 +1,435 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 自学习分类器 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 自学习分类器

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

MaixPy 自学习分类器介绍

+

一般情况下我们要识别新的类别,需要在电脑端重新采集数据集并训练,步骤很麻烦,难度较高,这里提供一种不需要电脑端训练,而是直接在设备端就能秒学习新的物体,适合场景不太复杂的使用场景。

+

比如眼前有饮料瓶和手机,使用设备分别拍一张它们的照片作为两个分类的依据,然后再采集几张他们各个角度的照片,提取它们的特征保存,然后识别时根据图像的特征值分别和保存的特征值进行对比,和保存的哪个更相近就认为是对应的分类。

+

MaixPy 中使用自学习分类器

+

默认镜像自带了 自学习分类 APP,可以直接尝试使用熟悉使用流程。

+

+

步骤:

+
    +
  • 点击+ Class 按钮, 采集 n 张分类(class)图,采集图时物体需要在屏幕的白色框中。
  • +
  • 点击+ Sample按钮,采集 m 张样本图,每个分类都采集一些,顺序无所谓,张数也比较随意,最好是在各个角度拍一点,不要差距过大。
  • +
  • 点击Learn按钮,启动学习,会自动根据采集的分类图和样本图进行分类学习,得到分类的特征。
  • +
  • 屏幕中央对准物体,识别图像输出结果,可以看到屏幕显示了所属的分类,以及和这个分类的相似距离,相似距离越近则越相似。
  • +
  • 此 APP 学习后的特征值会存到/root/my_classes.bin,所以退出应用或者重启了仍然会自动加载上一次的。
  • +
+

简洁版本代码,完整版本请看例程里面的完整代码。

+ +
from maix import nn, image
+
+classifier = nn.SelfLearnClassifier(model="/root/models/mobilenetv2.mud", dual_buff = True)
+
+img1 = image.load("/root/1.jpg")
+img2 = image.load("/root/2.jpg")
+img3 = image.load("/root/3.jpg")
+sample_1 = image.load("/root/sample_1.jpg")
+sample_2 = image.load("/root/sample_2.jpg")
+sample_3 = image.load("/root/sample_3.jpg")
+sample_4 = image.load("/root/sample_4.jpg")
+sample_5 = image.load("/root/sample_5.jpg")
+sample_6 = image.load("/root/sample_6.jpg")
+
+
+classifier.add_class(img1)
+classifier.add_class(img2)
+classifier.add_class(img3)
+classifier.add_sample(sample_1)
+classifier.add_sample(sample_2)
+classifier.add_sample(sample_3)
+classifier.add_sample(sample_4)
+classifier.add_sample(sample_5)
+classifier.add_sample(sample_6)
+
+classifier.learn()
+
+img = image.load("/root/test.jpg")
+max_idx, max_score = classifier.classify(img)
+print(maix_idx, max_score)
+
+

储存和加载学习到的特征值

+

使用 save 函数进行储存,会得到一个二进制文件,里面存了物体的特征值。
+再使用时用load函数进行加载即可。

+ +
classifier.save("/root/my_classes.bin")
+classifier.load("/root/my_classes.bin")
+
+

如果你给每一个分类命名了,比如存到了labels变量,也可以使用:

+ +
classifier.save("/root/my_classes.bin", labels = labels)
+labels = classifier.load("/root/my_classes.bin")
+
+

dual_buff 双缓冲区加速

+

你可能注意到这里模型初始化使用了dual_buff(默认值就是 True),使能 dual_buff 参数可以加快运行效率,提高帧率,具体原理和使用注意点见 dual_buff 介绍

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/self_learn_detector.html b/maixpy/doc/zh/vision/self_learn_detector.html new file mode 100644 index 00000000..cdda17c4 --- /dev/null +++ b/maixpy/doc/zh/vision/self_learn_detector.html @@ -0,0 +1,396 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixCAM MaixPy 自学习检测跟踪器 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixCAM MaixPy 自学习检测跟踪器

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

MaixPy 自学习检测跟踪器

+

和自学习分类器类似,不需要训练,直接框选目标物体即可实现检测并且跟踪物体,在简单检测场景下十分好用。
+和自学习分类器不同的是因为是检测器,会有物体的坐标和大小。

+

+

MaixPy 中使用自学习检测跟踪器

+

在 MaixPy 目前提供了一种单目标学习检测跟踪算法,即开始框选目标物体,后面会一直跟踪这个物体。
+这里使用的算法是NanoTrack,有兴趣了解原理的可以自行学习。

+

可以烧录最新的系统镜像(>=2024.9.5_v4.5.0)后直接使用内置的自学习跟踪应用看效果。

+

使用maix.nn.NanoTrack类即可,初始化对象后,先调用init方法指定要检测的目标,然后调用track方法连续跟踪目标,以下为简化的代码:

+ +
from maix import nn
+
+model_path = "/root/models/nanotrack.mud"
+tracker = nn.NanoTrack(model_path)
+tracker.init(img, x, y, w, h)
+pos = tracker.track(img)
+
+

注意这里使用了内置的模型,在系统/root/models下已经内置了,你也可以在MaixHub 模型库下载到模型。

+

具体详细代码请看MaixPy/examples/vision/ai_vision/nn_self_learn_tracker.py

+

其它自学习跟踪算法

+

目前实现了 NanoTrack 算法,在简单场景非常稳定可靠,而且帧率足够高,缺点就是物体出视野再回来需要回到上次消失的附近才能检测到,以及只能检测一个目标。

+

如果有更好的算法,可以自行参考已有的 NanoTrack 实现方式进行实现,也欢迎讨论或者提交代码PR。

+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/touchscreen.html b/maixpy/doc/zh/vision/touchscreen.html new file mode 100644 index 00000000..d9b8022c --- /dev/null +++ b/maixpy/doc/zh/vision/touchscreen.html @@ -0,0 +1,464 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy / MaixCAM 触摸屏使用方法 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixPy / MaixCAM 触摸屏使用方法

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

简介

+

对于 MaixCAM 自带了一个触摸屏,写应用时配合触摸屏可以实现很多有趣应用,我们可以通过 API 读取到触摸屏的点按操作。

+

MaixPy 读取触摸

+

MaixPy 提供了一个简单的maix.touchscreen.TouchScreen 类来读取,举例:

+ +
from maix import touchscreen, app, time
+
+ts = touchscreen.TouchScreen()
+
+pressed_already = False
+last_x = 0
+last_y = 0
+last_pressed = False
+while not app.need_exit():
+    x, y, pressed = ts.read()
+    if x != last_x or y != last_y or pressed != last_pressed:
+        print(x, y, pressed)
+        last_x = x
+        last_y = y
+        last_pressed = pressed
+    if pressed:
+        pressed_already = True
+    else:
+        if pressed_already:
+            print(f"clicked, x: {x}, y: {y}")
+            pressed_already = False
+    time.sleep_ms(1)  # sleep some time to free some CPU usage
+
+

配合屏幕实现交互

+

配合屏幕可以做出一些用户交互的内容,更多可以看MaixPy/examples/vision/touchscreen 目录下例程。

+

如前面的文章介绍的,我们要往屏幕显示内容,一般是得到一个maix.image.Image对象,然后调用disp.show(img)来显示这张图像。
+实现一个按钮的最原始和简单的方法就是在这个图像上画一个按钮,然后判断用户触摸到这个区域就算是触发了按下事件,注意图像的大小要和屏幕的大小保持一致:

+ +
from maix import touchscreen, app, time, display, image
+
+ts = touchscreen.TouchScreen()
+disp = display.Display()
+
+img = image.Image(disp.width(), disp.height())
+
+# draw exit button
+exit_label = "< Exit"
+size = image.string_size(exit_label)
+exit_btn_pos = [0, 0, 8*2 + size.width(), 12 * 2 + size.height()]
+img.draw_string(8, 12, exit_label, image.COLOR_WHITE)
+img.draw_rect(exit_btn_pos[0], exit_btn_pos[1], exit_btn_pos[2], exit_btn_pos[3],  image.COLOR_WHITE, 2)
+
+def is_in_button(x, y, btn_pos):
+    return x > btn_pos[0] and x < btn_pos[0] + btn_pos[2] and y > btn_pos[1] and y < btn_pos[1] + btn_pos[3]
+
+while not app.need_exit():
+    x, y, pressed = ts.read()
+    if is_in_button(x, y, exit_btn_pos):
+        app.set_exit_flag(True)
+    img.draw_circle(x, y, 1, image.Color.from_rgb(255, 255, 255), 2)
+    disp.show(img)
+
+

屏幕和图像大小不一样时如何处理

+

上面的例子可以看到 img 大小和屏幕大小一样,如果你的img和屏幕大小不一样怎么办(比如上面使用img = image.Image(240, 240),比如屏幕是640x480, 图像是240x240disp.show(img)的默认行为是image.Fit.FIT_CONTAIN, 即把图片放大到480x480然后边上填充黑色,如果你在240x240的图上画了按钮,比如坐标(0, 0, 60, 40),那么按钮也会自动被放大,所以触摸判断的坐标就不能用(0, 0, 60, 40),需要用((640 - 480) / 2, 0, 480/240*60, 480/240*40), 即(80, 0, 120, 80)

+

这里为了方便缩放图像时,快速计算源图像的点或者矩形框 在 缩放后的目标图像的位置和大小,提供了image.resize_map_pos函数来进行此计算过程。

+ +
from maix import touchscreen, app, time, display, image
+
+ts = touchscreen.TouchScreen()
+disp = display.Display()
+
+img = image.Image(240, 240)
+img.draw_rect(0, 0, img.width(), img.height(), image.COLOR_WHITE)
+
+# draw exit button
+exit_label = "< Exit"
+size = image.string_size(exit_label)
+exit_btn_pos = [0, 0, 8*2 + size.width(), 12 * 2 + size.height()]
+img.draw_string(8, 12, exit_label, image.COLOR_WHITE)
+img.draw_rect(exit_btn_pos[0], exit_btn_pos[1], exit_btn_pos[2], exit_btn_pos[3],  image.COLOR_WHITE, 2)
+# 图像按键坐标映射到屏幕上的坐标
+exit_btn_disp_pos = image.resize_map_pos(img.width(), img.height(), disp.width(), disp.height(), image.Fit.FIT_CONTAIN, exit_btn_pos[0], exit_btn_pos[1], exit_btn_pos[2], exit_btn_pos[3])
+
+def is_in_button(x, y, btn_pos):
+    return x > btn_pos[0] and x < btn_pos[0] + btn_pos[2] and y > btn_pos[1] and y < btn_pos[1] + btn_pos[3]
+
+while not app.need_exit():
+    x, y, pressed = ts.read()
+    if is_in_button(x, y, exit_btn_disp_pos):
+        app.set_exit_flag(True)
+    # 屏幕的坐标映射回图像上对应的坐标,然后在图像上画点
+    x, y = image.resize_map_pos_reverse(img.width(), img.height(), disp.width(), disp.height(), image.Fit.FIT_CONTAIN, x, y)
+    img.draw_circle(x, y, 1, image.Color.from_rgb(255, 255, 255), 2)
+    disp.show(img, fit=image.Fit.FIT_CONTAIN)
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/doc/zh/vision/yolov5.html b/maixpy/doc/zh/vision/yolov5.html new file mode 100644 index 00000000..079105f1 --- /dev/null +++ b/maixpy/doc/zh/vision/yolov5.html @@ -0,0 +1,514 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MaixPy MaixCAM 使用 YOLOv5 / YOLOv8 / YOLO11 模型进行目标检测 - MaixPy + + + + + + + + + + +
+ +
+ +
+
+
+
+ +

MaixPy MaixCAM 使用 YOLOv5 / YOLOv8 / YOLO11 模型进行目标检测

+ +
+
+
    + +
+
+
+
+ + +
+
+ + + +
+
+
+
+ + +
+
+ +
+
+ +

目标检测概念

+

目标检测是指在图像或视频中检测出目标的位置和类别,比如在一张图中检测出苹果、飞机等物体,并且标出物体的位置。

+

和分类不同的是多了一个位置信息,所以目标检测的结果一般是一个矩形框,框出物体的位置。

+

MaixPy 中使用目标检测

+

MaixPy 默认提供了 YOLOv5YOLOv8YOLO11 模型,可以直接使用:

+
+

YOLOv8 需要 MaixPy >= 4.3.0。
+YOLO11 需要 MaixPy >= 4.7.0。

+
+ +
from maix import camera, display, image, nn, app
+
+detector = nn.YOLOv5(model="/root/models/yolov5s.mud", dual_buff=True)
+# detector = nn.YOLOv8(model="/root/models/yolov8n.mud", dual_buff=True)
+# detector = nn.YOLO11(model="/root/models/yolo11n.mud", dual_buff=True)
+
+cam = camera.Camera(detector.input_width(), detector.input_height(), detector.input_format())
+dis = display.Display()
+
+while not app.need_exit():
+    img = cam.read()
+    objs = detector.detect(img, conf_th = 0.5, iou_th = 0.45)
+    for obj in objs:
+        img.draw_rect(obj.x, obj.y, obj.w, obj.h, color = image.COLOR_RED)
+        msg = f'{detector.labels[obj.class_id]}: {obj.score:.2f}'
+        img.draw_string(obj.x, obj.y, msg, color = image.COLOR_RED)
+    dis.show(img)
+
+

效果视频:

+
+
+

这里使用了摄像头拍摄图像,然后传给 detector进行检测,得出结果后,将结果(分类名称和位置)显示在屏幕上。

+

以及这里 替换YOLO11YOLOv5YOLOv8即可实现YOLO11/v5/v8/切换,注意模型文件路径也要修改。

+

模型支持的 80 种物体列表请看本文附录。

+

更多 API 使用参考 maix.nn 模块的文档。

+

dual_buff 双缓冲区加速

+

你可能注意到这里模型初始化使用了dual_buff(默认值就是 True),使能 dual_buff 参数可以加快运行效率,提高帧率,具体原理和使用注意点见 dual_buff 介绍

+

更多输入分辨率

+

默认的模型输入是320x224分辨率,因为这个分辨率比例和默认提供的屏幕分辨率接近,你也可以手动下载其它分辨率的模型替换:

+

YOLOv5: https://maixhub.com/model/zoo/365
+YOLOv8: https://maixhub.com/model/zoo/400
+YOLO11: https://maixhub.com/model/zoo/453

+

分辨率越大精度越高,但是运行耗时越长,根据你的应用场景选择合适的即可。

+

YOLOv5 和 YOLOv8 和 YOLO11 用哪个?

+

这里提供的 YOLOv5sYOLOv8nYOLO11n 三种模型,YOLOv5s模型更大,YOLOv8n YOLO11n速度快一点点, 精度按照官方数据来说YOLO11n > YOLOv8n > YOLOv5s,可以实际测试根据自己的实际情况选择。

+

另外你也可以尝试YOLOv8s或者YOLO11s,帧率会低一些(比如 yolov8s_320x224 比 yolov8n_320x224 慢 10ms),准确率会比前两个都高,模型可以在上面提到的模型库下载到或者自己从YOLO官方仓库导出模型。

+

摄像头分辨率和模型分辨率不同可以吗

+

上面使用detector.detect(img)函数进行检测时,如果 img 的分辨率和模型分辨率不同,这个函数内部会自动调用img.resize将图像缩放成和模型输入分辨率相同的,resize默认使用image.Fit.FIT_CONTAIN 方法,即保持宽高比缩放,周围填充黑色的方式,检测到的坐标也会自动映射到原img的坐标上。

+

MaixHub 在线训练自己的目标检测模型

+

默认提供的 80 分类检测模型,如果你需要检测特定的物体,请到MaixHub 学习并训练目标检测模型,创建项目时选择目标检测模型即可,参考MaixHub 在线训练文档

+

或者到MaixHub 模型库 找社区成员分享的模型。

+

离线训练自己的目标检测模型

+

强烈建议先使用 MaixHub 在线训练模型,此种方式难度比较大,不建议新手一来就碰这个方式。
+此种方式有些许默认你知道的知识文中不会提,遇到问题多上网搜索学习。
+请看 离线训练YOLOv5模型 或者 离线训练 YOLOv8/YOLO11 模型

+

附录:80分类

+

COCO 数据集的 8 种物体分别为:

+ +
person
+bicycle
+car
+motorcycle
+airplane
+bus
+train
+truck
+boat
+traffic light
+fire hydrant
+stop sign
+parking meter
+bench
+bird
+cat
+dog
+horse
+sheep
+cow
+elephant
+bear
+zebra
+giraffe
+backpack
+umbrella
+handbag
+tie
+suitcase
+frisbee
+skis
+snowboard
+sports ball
+kite
+baseball bat
+baseball glove
+skateboard
+surfboard
+tennis racket
+bottle
+wine glass
+cup
+fork
+knife
+spoon
+bowl
+banana
+apple
+sandwich
+orange
+broccoli
+carrot
+hot dog
+pizza
+donut
+cake
+chair
+couch
+potted plant
+bed
+dining table
+toilet
+tv
+laptop
+mouse
+remote
+keyboard
+cell phone
+microwave
+oven
+toaster
+sink
+refrigerator
+book
+clock
+vase
+scissors
+teddy bear
+hair drier
+toothbrush
+
+ + +
+
+ +
+
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/en/config.json b/maixpy/en/config.json new file mode 100644 index 00000000..04d94f89 --- /dev/null +++ b/maixpy/en/config.json @@ -0,0 +1,5 @@ +{ + "import": "config_en", + "class": "md_page", + "name": "MaixPy Pages" +} diff --git a/maixpy/en/index.html b/maixpy/en/index.html new file mode 100644 index 00000000..9bc788ae --- /dev/null +++ b/maixpy/en/index.html @@ -0,0 +1,915 @@ + + + + + + + + + + + + + + + + + + MaixPy - MaixPy + + + + + + + + + + +
+
+
+ +
+ +
+ + +
+
+
+ MaixPy Banner +
+

MaixPy (v4)

+

Fast implementation of AI vision and auditory applications

+
+ +
+

GitHub Repo starsApache 2.0PyPIPyPI - DownloadsGitHub downloads Build MaixCAMTrigger wiki

+
+
+

English | 中文

+
+
+ +
+
+

MaixPy-v1 (K210) usage refer to MaixPy-v1. MaixPy v4 does not support Maix-I Maix-II series hardware, please upgrade to the MaixCAM platform.

+

If you like MaixPy, please give a star ⭐️ to the MaixPy open source project to encourage us to develop more features.

+
+
+

Simple API Design, AI Image Recognition with Just 10 Lines of Code

+
+
+ +
from maix import camera, display, image, nn
+
+classifier = nn.Classifier(model="/root/models/mobilenetv2.mud")
+cam = camera.Camera(classifier.input_width(), classifier.input_height(), classifier.input_format())
+dis = display.Display()
+
+while 1:
+    img = cam.read()
+    res = classifier.classify(img)
+    max_idx, max_prob = res[0]
+    msg = f"{max_prob:5.2f}: {classifier.labels[max_idx]}"
+    img.draw_string(10, 10, msg, image.COLOR_RED)
+    dis.show(img)
+
+
+ +
+ +
+

Hardware Peripheral Control, No Big Deal

+
+
+

Serial Communication:

+ +
from maix import uart
+
+devices = uart.list_devices()
+
+serial = uart.UART(devices[0], 115200)
+serial.write_str("hello world")
+print("received:", serial.read(timeout = 2000))
+
+
+
+
+

I2C Communication:

+ +
from maix import i2c
+
+devices = i2c.list_devices()
+dev1 = i2c.I2C(devices[0], freq=100000)
+slaves = dev1.scan()
+print("find slaves:", slaves)
+dev1.writeto(0x12, b'hello')
+print("received:", dev1.readfrom(0x12, 5))
+
+
+
+
+ + +
+

Convenient MaixVision Workstation

+

Simplify the development environment to make development easier and faster

+

+ +

Online AI Training Platform MaixHub

+

No need for AI expertise or expensive training equipment, train models with one click, deploy to MaixCAM with one click.

+
+ +
+ +

High-performance MaixCAM Hardware Platform

+

MaixCAM

+
+
    +
  • CPU: 1GHz RISC-V (Linux) + 700MHz RISC-V (RTOS) + 25~300MHz 8051 (Low Power)
  • +
  • NPU: 1Tops@INT8 NPU, supports BF16, YOLO11, YOLOv8, YOLOv5, etc.
  • +
  • Memory: 256MB DDR3
  • +
  • Communication: USB2.0/WiFi6/BLE5.4
  • +
  • Peripherals: IIC/PWM/SPI/UART/WDT/GPIO/ADC
  • +
  • Multimedia: 4M camera, 2.4" 640x480 HD capacitive touchscreen, H.264/H.265/MJPEG 2K hardware codec.
  • +
  • Purchase: Various hardware versions are available, see Store (contact the store for availability)
  • +
  • More: See MaixCAM and MaixCAM-Pro hardware documentation
  • +
+ +
+

More Features

+
+

Here are some feature highlights, find more in the Community

+

You can create new features using the rich API provided by MaixPy.

+
+
+
+
+ +

OpenCV + OpenMV

+

Supports OpenCV, compatible with OpenMV

+
+
+
+
+
+
+ +

C++ Version

+

MaixCDK C++ version SDK, same API as MaixPy, commercial-friendly

+
+
+
+
+
+
+ +

As a Serial Module

+

Control other MCUs via serial commands

+
+
+
+
+
+
+ +

APP Store

+

Share your apps with the community and install them with one click via the APP Store.

+
+
+
+
+
+
+ +

MaixPy-v1 Compatible API

+

Quickly migrate from MaixPy-v1 (K210) to MaixPy-v4

+
+
+
+
+
+
+ +

AI Classification

+

Identify object categories

+
+
+
+
+
+
+ +

AI Object Detection

+

Identify object categories and coordinates

+
+
+
+
+
+
+ +

AI Face Recognition

+

Recognize different facial features

+
+
+
+
+
+
+ +

AI Body Keypoint Detection

+

Posture recognition, body-sensing games

+
+
+
+
+
+
+ +

AI Self-learning Classifier

+

Instantly learn any object on the device without PC training

+
+
+
+
+
+
+ +

AI Self-learning Detector

+

Instantly learn any object on the device without PC training

+
+
+
+
+
+
+ +

AI Object Tracking

+

Track objects, count traffic

+
+
+
+
+
+
+ +

AI Surveillance, Streaming

+

Security monitoring, streaming, even live stream to platforms like Bilibili.com

+
+
+
+
+
+
+ +

Color Detection

+

Detect color spots

+
+
+
+
+
+
+ +

Line Following

+

Line-following car, logistics transportation

+
+
+
+
+
+
+ +

QR Code and AprilTag

+

Recognize QR codes and AprilTag

+
+
+
+
+
+
+ +

OCR

+

Recognize characters in images, digitize old items

+
+
+
+
+
+
+ +

Voice Recognition

+

Real-time continuous voice recognition

+
+
+
+
+
+
+ +

Desktop Monitor

+

Monitor PC information such as CPU, memory, and network.

+
+
+
+
+
+
+ +

Weather Station

+

Monitor weather information such as temperature and humidity.

+
+
+
+
+
+
+ +

Thermal Infrared Camera

+

Optional camera, for temperature image acquisition/measurement

+
+
+
+
+
+
+ +

HDMI Video Capture

+

Optional feature, capture images via HDMI for server monitoring (KVM), remote control, external AI, streaming devices, etc.

+
+
+
+
+
+
+ +

Large Screen Video Playback

+

Multiple screen sizes (2.3", 2.4", 5", 7", etc.), hardware decoding support

+
+
+
+
+
+
+ +

Microscope

+

Pair with 1/8" large sensor + microscope lens = digital microscope

+
+
+
+
+
+
+ +

High-Speed Recognition

+

Pair with a global shutter camera to accurately recognize high-speed moving objects

+
+
+
+
+
+
+ +

Time-lapse Photography

+

Pair with a 1/8" large sensor for all-day time-lapse photography

+
+
+
+
+
+
+ +

Astronomical Photography

+

Pair with a 1/8" large sensor + high-power lens for astronomical photography, supports long exposure mode and RAW image output

+
+
+
+
+
+
+ +

Gyroscope Stabilization

+

Onboard gyroscope (MaixCAM-Pro only), supports exporting gyroflow stabilization format for DIY photography

+
+
+
+
+
+
+

Who Uses MaixPy?

+
+
    +
  • AI Algorithm Engineers: Easily deploy your AI models to embedded devices.
  • +
+
+

Easy-to-use API to access NPU, open-source quantization tools, detailed documentation on AI models.

+
+
    +
  • STEM: Teachers who want to teach students AI and embedded development.
  • +
+
+

Easy-to-use API, PC tools, online AI training services, allowing you to focus on teaching AI instead of hardware and complex software development.

+
+
    +
  • Makers: Want to create cool projects without spending too much time on complex hardware and software.
  • +
+
+

Rich, simple Python and C++ APIs, quick to get started, complete your DIY projects in just minutes.

+
+
    +
  • Engineers: Want to build projects but hope to have prototypes and solutions quickly.
  • +
+
+

Rich Python and C++ APIs, efficient, stable, and easy to use, helping you quickly create prototypes and implement projects directly.

+
+
    +
  • Students: Want to learn AI and embedded development.
  • +
+
+

Offers rich documentation, tutorials, and open-source code, helping you find learning paths and gradually grow, from simple Python programming to vision, AI, audio, Linux, RTOS, etc.

+
+
    +
  • Companies: Want to develop AI vision products but don’t have the time or engineers to develop complex embedded systems.
  • +
+
+

Use MaixPy or even graphical programming to reduce the number of employees and time. For example, adding an AI QA system to the production line, or an AI security monitor to the office.

+
+
    +
  • Competitors: People who want to win competitions.
  • +
+
+

MaixPy integrates many features, is easy to use, speeds up the output of your work, and helps you win competitions in a short time. Many students use MaixPy to win common competitions in China.

+
+
+

Performance Comparison

+

Compared to the limited NPU operator support and memory constraints of the previous two generations of Maix series products (K210, V831), MaixCAM offers significant improvements in performance and experience while maintaining an excellent price-performance ratio.

+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FeatureMaix-I K210Maix-II v831MaixCAM
CPU400MHz RISC-V x2800MHz ARM71GHz RISC-V(Linux)
700MHz RISC-V(RTOS)
25~300MHz 8051(Low Power)
Memory6MB SRAM64MB DDR2256MB DDR3
NPU0.25Tops@INT8
official says 1T but...
0.25Tops@INT81Tops@INT8
Encoder1080p@30fps2K@30fps
Screen2.4" 320x2401.3" 240x2402.3" 552x368(MaixCAM)
2.4" 640x480(MaixCAM-Pro)
5" 1280x720
7" 1280x800
10“ 1280x800
Touchscreen2.3" 552x368
Camera30W200W500W
WiFi2.4G2.4GWiFi6 2.4G/5G
USBUSB2.0USB2.0
Ethernet100M(optional)100M(optional)
SD Card InterfaceSPISDIOSDIO
BLEBLE5.4
Operating SystemRTOSTina LinuxLinux + RTOS
Programming LanguageC / C++ / MicroPythonC / C++ / Python3C / C++ / Python3
SoftwareMaixPyMaixPy3MaixCDK + MaixPy v4 + OpenCV + Numpy + ...
PC SoftwareMaixPy IDEMaixPy3 IDEMaixVision Workstation
Documentation⭐️⭐️⭐️⭐️⭐️⭐️⭐️🌟🌟🌟🌟🌟
Online AI Training⭐️⭐️⭐️⭐️⭐️⭐️⭐️🌟🌟🌟🌟🌟
Official Apps⭐️⭐️⭐️⭐️🌟🌟🌟🌟🌟
AI Classification (224x224)MobileNetv1 50fps
MobileNetv2 ✖
Resnet ✖
MobileNet ✖
Resnet18 20fps
Resnet50 ✖
MobileNetv2 130fps
Resnet18 62fps
Resnet50 28fps
AI Detection (NPU inference part)YOLOv2(224x224) 15fpsYOLOv2(224x224) 15fpsYOLOv5s(224x224) 100fps
YOLOv5s(320x256) 70fps
YOLOv5s(640x640) 15fps
YOLOv8n(640x640) 23fps
YOLO11n(224x224)175fps
YOLO11n(320x224)120fps
YOLO11n(320x320)95fps
YOLO11n(640x640)23fps
Ease of Use⭐️⭐️⭐️⭐️⭐️⭐️⭐️🌟🌟🌟🌟🌟
+
+
+

MaixCAM-Pro Upgrades compared to MaixCAM:

+
    +
  1. Optimized case design for better aesthetics and heat dissipation
  2. +
  3. Screen upgraded to 2.4 inches with 640x480 resolution
  4. +
  5. Dual-channel PWM servo interface, standard PMOD interface, 6-pin terminal interface
  6. +
  7. Onboard AXP2101 PMU, supports lithium battery charging and discharging, power metering function
  8. +
  9. Onboard six-axis IMU, qmi8658, supports video stabilization
  10. +
  11. Built-in 1W small speaker
  12. +
  13. Added 1/4 inch standard thread mount for easy installation
  14. +
  15. Added auxiliary lighting LED
  16. +
  17. Added RTC chip BM8653 and RTC battery
  18. +
+
+
+

Maix Ecosystem

+ +

Community

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
CommunityAddress
DocumentationMaixPy Documentation
App Storemaixhub.com/app
Project Sharingmaixhub.com/share
BilibiliSearch for MaixCAM or MaixPy on Bilibili
Discussionmaixhub.com/discussion
MaixPy issuesgithub.com/sipeed/MaixPy/issues
Telegramt.me/maixpy
QQ Group862340358
+
+

What Are the Differences Between MaixPy v1, MaixPy3, and MaixPy v4?

+
+
    +
  • MaixPy v1 uses the MicroPython programming language and only supports the Sipeed Maix-I K210 series hardware with limited third-party packages.
  • +
  • MaixPy3 is specifically designed for Sipeed Maix-II-Dock v831 and is not a long-term support version.
  • +
  • MaixPy v4 uses the Python programming language, allowing direct use of many packages.
    MaixPy v4 supports Sipeed's new hardware platform and is a long-term support version. Future hardware platforms will support this version.
    MaixPy v4 has a MaixPy-v1 compatible API, so you can quickly migrate your MaixPy v1 projects to MaixPy v4.
  • +
+

(MaixPy v4 does not support the K210 series. It is recommended to upgrade your hardware platform to use this version for more features, better performance,
and a more convenient programming experience.)

+
+
+ + + +
+
+ + +
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/en/no_translate.html b/maixpy/en/no_translate.html new file mode 100644 index 00000000..746aaf6f --- /dev/null +++ b/maixpy/en/no_translate.html @@ -0,0 +1,185 @@ + + + + + + + + + + + + + + + + + + no translation - MaixPy + + + + + + + + + + +
+
+
+ +
+
This page not translated yet
+
+ Please visit + +
+
+ +
+ + +
+
+ + +
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/favicon.ico b/maixpy/favicon.ico new file mode 100644 index 00000000..3d8e88b1 Binary files /dev/null and b/maixpy/favicon.ico differ diff --git a/maixpy/index.html b/maixpy/index.html new file mode 100644 index 00000000..3f27effd --- /dev/null +++ b/maixpy/index.html @@ -0,0 +1,915 @@ + + + + + + + + + + + + + + + + + + MaixPy - MaixPy + + + + + + + + + + +
+
+
+ +
+ +
+ + +
+
+
+ MaixPy Banner +
+

MaixPy (v4)

+

极速落地 AI 视觉、听觉应用

+
+ +
+

GitHub Repo starsApache 2.0PyPIPyPI - DownloadsGitHub downloads Build MaixCAMTrigger wiki

+
+
+

English | 中文

+
+
+ +
+
+

MaixPy-v1 (K210) 用户请查看 MaixPy-v1 文档。 MaixPy v4 不支持 Maix-I Maix-II 系列硬件,请更新到 MaixCAM 硬件平台。

+

喜欢 MaixPy 请给 MaixPy 开源项目 点个 Star ⭐️ 以鼓励我们开发更多功能。

+
+
+

简易的 API 设计, 10 行代码进行 AI 图像识别

+
+
+ +
from maix import camera, display, image, nn
+
+classifier = nn.Classifier(model="/root/models/mobilenetv2.mud")
+cam = camera.Camera(classifier.input_width(), classifier.input_height(), classifier.input_format())
+dis = display.Display()
+
+while 1:
+    img = cam.read()
+    res = classifier.classify(img)
+    max_idx, max_prob = res[0]
+    msg = f"{max_prob:5.2f}: {classifier.labels[max_idx]}"
+    img.draw_string(10, 10, msg, image.COLOR_RED)
+    dis.show(img)
+
+
+ +
+ +
+

硬件外设控制,不在话下

+
+
+

串口收发:

+ +
from maix import uart
+
+devices = uart.list_devices()
+
+serial = uart.UART(devices[0], 115200)
+serial.write_str("hello world")
+print("received:", serial.read(timeout = 2000))
+
+
+
+
+

I2C 收发:

+ +
from maix import i2c
+
+devices = i2c.list_devices()
+dev1 = i2c.I2C(devices[0], freq=100000)
+slaves = dev1.scan()
+print("find slaves:", slaves)
+dev1.writeto(0x12, b'hello')
+print("received:", dev1.readfrom(0x12, 5))
+
+
+
+
+ + +
+

便捷的 MaixVision 工作站

+

简化开发环境,让开发更简单快速

+

+ +

在线 AI 训练平台 MaixHub

+

无需 AI 基础和昂贵的训练设备,一键训练模型,一键部署到 MaixCAM

+
+ +
+ +

性能强劲 MaixCAM 硬件平台

+

MaixCAM

+
+
    +
  • CPU: 1GHz RISC-V(Linux) + 700MHz RISC-V(RTOS) + 25~300MHz 8051(Low Power)
  • +
  • NPU: 1Tops@INT8 NPU, 支持 BF16,支持 YOLO11、 YOLOv8、 YOLOv5 等。
  • +
  • 内存: 256MB DDR3。
  • +
  • 通信: USB2.0/WiFi6/BLE5.4。
  • +
  • 外设: IIC/PWM/SPI/UART/WDT/GPIO/ADC
  • +
  • 多媒体:4M 摄像头,2.4" 640x480 高清电容触摸屏,H.264/H.265/MJPEG 2K 硬件编解码。
  • +
  • 购买: 有各种版本硬件提供, 详情查看商城 (缺货时咨询店家)
  • +
  • 更多: 请看 MaixCAMMaixCAM-Pro 硬件文档
  • +
+ +
+

更多特性

+
+

以下为部分功能简介,更多到社区找到更多

+

基于 MaixPy 提供的丰富 API 可以创造出更多新功能

+
+
+
+
+ +

OpenCV + OpenMV

+

支持 OpenCV, 兼容 OpenMV

+
+
+
+
+
+
+ +

C++版本

+

MaixCDK C++版本的SDK,与MaixPy的API相同, 商业友好

+
+
+
+
+
+
+ +

作为串口模块

+

其它 MCU 通过串口命令控制

+
+
+
+
+
+
+ +

APP商店

+

将您的APP分享给社区,并一键安装APPs

+
+
+
+
+
+
+ +

提供 MaixPy-v1 兼容 API

+

快速从MaixPy-v1(K210)迁移到MaixPy-v4

+
+
+
+
+
+
+ +

AI 分类

+

识别物体类别

+
+
+
+
+
+
+ +

AI 对象检测

+

识别物体类别和坐标

+
+
+
+
+
+
+ +

AI 人脸识别

+

识别不同人脸特征

+
+
+
+
+
+
+ +

AI 人体关键点检测

+

姿态识别、体感游戏

+
+
+
+
+
+
+ +

AI 自学习分类器

+

无需在PC上训练,在设备上瞬间学习任意物体

+
+
+
+
+
+
+ +

AI 自学习检测器

+

无需在PC上训练,在设备上瞬间学习任意物体

+
+
+
+
+
+
+ +

AI 物体轨迹跟踪

+

轨迹追踪,流量统计

+
+
+
+
+
+
+ +

AI 监控,串流

+

安防监控,可串流,甚至可以向直播平台 比如 Bilibili.com 直播

+
+
+
+
+
+
+ +

查找颜色

+

查找颜色斑点

+
+
+
+
+
+
+ +

巡线

+

小车巡线,物流搬运

+
+
+
+
+
+
+ +

QR码和AprilTag

+

识别QR码和AprilTag

+
+
+
+
+
+
+ +

OCR

+

识别图片中的字符,旧物数字化

+
+
+
+
+
+
+ +

语音识别

+

实时连续语音识别

+
+
+
+
+
+
+ +

桌面监视器

+

监视PC信息,如CPU,内存,网络等。

+
+
+
+
+
+
+ +

天气站

+

监视天气信息,如温度,湿度等。

+
+
+
+
+
+
+ +

热红外摄像头

+

选配摄像头,温度图像获取/测量

+
+
+
+
+
+
+ +

HDMI 捕获视频

+

选配,通过 HDMI 捕获图像,作为服务器监控(KVM)和远程控制、外接 AI、推流设备等

+
+
+
+
+
+
+ +

大屏视频播放

+

多种规格屏幕选择(2.3" 2.4" 5" 7"等), 硬件解码支持

+
+
+
+
+
+
+ +

显微镜

+

搭配1/8"大底传感器 + 显微镜头 = 数字显微镜

+
+
+
+
+
+
+ +

高速识别

+

搭配全局摄像头,高速运动物体也能准确识别

+
+
+
+
+
+
+ +

延时摄影

+

搭配1/8"大底传感器实现全天候延时摄影

+
+
+
+
+
+
+ +

天文摄影

+

搭配1/8"大底传感器+高倍镜头实现天文摄影,支持长曝光模式和RAW 图输出

+
+
+
+
+
+
+ +

陀螺仪增稳

+

板载陀螺仪(仅MaixCAM-Pro) 支持导出 gyroflow 防抖格式,DIY 摄影

+
+
+
+
+
+
+

谁在用 MaixPy?

+
+
    +
  • AI 算法工程师: 轻松将你的 AI 模型部署到嵌入式设备。
  • +
+
+

易用的 API 访问 NPU,开源量化工具,详细的 AI 模型的文档。

+
+
    +
  • STEM:想要教学生 AI 和嵌入式开发的老师。
  • +
+
+

易用的 API,PC 工具,在线 AI 训练服务等,让你专注于教授 AI,而不是硬件和复杂的软件开发。

+
+
    +
  • 创客: 想要制作一些酷炫的项目,但不想把事件浪费在太复杂的硬件和软件。
  • +
+
+

Python 和 C++ 丰富简易 API,快速上手,甚至可以在几分钟内完成你的 DIY 项目。

+
+
    +
  • 工程师: 想要做一些项目,但希望尽快有原型和落地。
  • +
+
+

Python 和 C++ 丰富 API,高效稳定易使用,助力快速出原型及直接落地项目。

+
+
    +
  • 学生: 想要学习 AI,嵌入式开发。
  • +
+
+

提供丰富文档和教程和开源代码,帮助你找到学习路线,并逐步成长。从简单的 Python 编程到视觉,AI,音频,Linux,RTOS等。

+
+
    +
  • 企业: 想要开发 AI 视觉产品,但没有时间或工程师来开发复杂的嵌入式系统。
  • +
+
+

使用 MaixPy 甚至图形编程来,用更少的员工和时间。例如,向生产线添加 AI QA 系统,或为办公室添加一个 AI 安全监控器。

+
+
    +
  • 竞赛者: 想要赢得比赛的比赛人们。
  • +
+
+

MaixPy 集成了许多功能,易于使用,加快你作品产出速度,助力有限时间内赢得比赛,国内常见比赛都有很多同学使用 MaixPy 赢得比赛。

+
+
+

性能对比

+

相比上两代 Maix 系列产品(K210, V831)有限的 NPU 算子支持和内存限制,MaixCAM 在保持超高性价比的同时,性能和体验有了很大的提升。

+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
特征Maix-I K210Maix-II v831MaixCAM
CPU400MHz RISC-V x2800MHz ARM71GHz RISC-V(Linux)
700MHz RISC-V(RTOS)
25~300MHz 8051(Low Power)
内存6MB SRAM64MB DDR2256MB DDR3
NPU0.25Tops@INT8
official says 1T but...
0.25Tops@INT81Tops@INT8
Encoder1080p@30fps2K@30fps
屏幕2.4" 320x2401.3" 240x2402.3" 552x368(MaixCAM)
2.4" 640x480(MaixCAM-Pro)
5" 1280x720
7" 1280x800
10“ 1280x800
触摸屏2.3" 552x368
摄像头30W200W500W
WiFi2.4G2.4GWiFi6 2.4G/5G
USBUSB2.0USB2.0
以太网100M(选配)100M(选配)
SD 卡接口SPISDIOSDIO
BLEBLE5.4
操作系统RTOSTina LinuxLinux + RTOS
编程语言C / C++ / MicroPythonC / C++ / Python3C / C++ / Python3
SoftwareMaixPyMaixPy3MaixCDK + MaixPy v4 + opencv + numpy + ...
PC 软件MaixPy IDEMaixPy3 IDEMaixVision Workstation
文档⭐️⭐️⭐️⭐️⭐️⭐️⭐️🌟🌟🌟🌟🌟
在线 AI 训练⭐️⭐️⭐️⭐️⭐️⭐️⭐️🌟🌟🌟🌟🌟
官方应用⭐️⭐️⭐️⭐️🌟🌟🌟🌟🌟
AI 分类(224x224)MobileNetv1 50fps
MobileNetv2 ✖
Resnet ✖
MobileNet ✖
Resnet18 20fps
Resnet50 ✖
MobileNetv2 130fps
Resnet18 62fps
Resnet50 28fps
AI 检测(NPU推理部分)YOLOv2(224x224) 15fpsYOLOv2(224x224) 15fpsYOLOv5s(224x224) 100fps
YOLOv5s(320x256) 70fps
YOLOv5s(640x640) 15fps
YOLOv8n(640x640) 23fps
YOLO11n(224x224)175fps
YOLO11n(320x224)120fps
YOLO11n(320x320)95fps
YOLO11n(640x640)23fps
易用性⭐️⭐️⭐️⭐️⭐️⭐️⭐️🌟🌟🌟🌟🌟
+
+
+

MaixCAM-Pro 相比 MaixCAM 的升级点:

+
    +
  1. 优化外壳设计,更美观,散热更好
  2. +
  3. 屏幕升级到2.4寸 640x480分辨率
  4. +
  5. 板载双路PWM舵机接口,标准PMOD接口,6pin端子接口
  6. +
  7. 板载AXP2101 PMU,支持锂电池充放电,电量计功能
  8. +
  9. 板载六轴IMU,qmi8658,可支持视频防抖
  10. +
  11. 内置1W小喇叭
  12. +
  13. 增加1/4英寸标准螺纹口,便于安装
  14. +
  15. 增加辅助照明LED
  16. +
  17. 增加RTC芯片 BM8653 和 RTC电池
  18. +
+
+
+

Maix 生态

+ +

社区

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
社区地址
文档MaixPy 文档
应用商店maixhub.com/app
项目分享maixhub.com/share
BilibiliB站搜索 MaixCAM 或者 MaixPy
讨论maixhub.com/discussion
MaixPy issuesgithub.com/sipeed/MaixPy/issues
Telegramt.me/maixpy
QQ 群862340358
+
+

MaixPy v1, MaixPy3 and MaixPy v4 有什么区别?

+
+
    +
  • MaixPy v1 使用 MicroPython 编程语言,仅支持 Sipeed Maix-I K210 系列硬件,有限的第三方包。
  • +
  • MaixPy3 专为 Sipeed Maix-II-Dock v831 设计,不是长期支持版本。
  • +
  • MaixPy v4 使用 Python 编程语言,因此我们可以直接使用许多包。
    MaixPy v4 支持 Sipeed 的新硬件平台,这是一个长期支持版本,未来的硬件平台将支持这个版本。
    MaixPy v4 有一个 MaixPy-v1 兼容的 API,所以你可以快速将你的 MaixPy v1 项目迁移到 MaixPy v4。
  • +
+

(MaixPy v4 不支持 K210 系列,建议升级硬件平台以使用此版本,以获得更多功能和更好的性能和更方便的编程体验。)

+
+
+ + + +
+
+ + +
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/maixpy/robots.txt b/maixpy/robots.txt new file mode 100644 index 00000000..fbd77f3f --- /dev/null +++ b/maixpy/robots.txt @@ -0,0 +1,2 @@ +User-agent: * +Sitemap: https://wiki.sipeed.com/sitemap.xml diff --git a/maixpy/sitemap.xml b/maixpy/sitemap.xml new file mode 100644 index 00000000..983bc1d8 --- /dev/null +++ b/maixpy/sitemap.xml @@ -0,0 +1,1281 @@ + + + + https://wiki.sipeed.com/maixpy/api/maix/peripheral/pinmap.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/ext_dev/bm8563.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/ext_dev/qmi8658.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/ext_dev/tmc2209.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/ext_dev/imu.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/nn/F.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/camera.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/index.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/rtsp.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/peripheral.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/tracker.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/util.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/tensor.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/touchscreen.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/example.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/time.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/sys.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/network/wifi.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/rtmp.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/video.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/nn.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/http.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/peripheral/pwm.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/peripheral/wdt.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/peripheral/uart.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/peripheral/adc.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/peripheral/hid.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/peripheral/gpio.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/peripheral/spi.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/peripheral/timer.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/peripheral/i2c.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/peripheral/key.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/audio.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/err.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/fs.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/network.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/thread.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/i18n.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/image.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/display.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/protocol.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/app.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/ext_dev.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/api/maix/comm.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/basic/maixvision.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/basic/view_src_code.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/basic/python.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/audio/recognize.html + 2024-10-08 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/audio/record.html + 2024-05-20 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/audio/synthesis.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/audio/digit.html + 2024-10-08 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/audio/keyword.html + 2024-10-08 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/audio/play.html + 2024-05-20 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/audio/ai_classify.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/peripheral/pwm.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/peripheral/wdt.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/peripheral/uart.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/peripheral/adc.html + 2024-06-11 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/peripheral/hid.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/peripheral/gpio.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/peripheral/spi.html + 2024-06-11 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/peripheral/i2c.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/peripheral/pinmap.html + 2024-06-11 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/pro/compile_os.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/ai_model_converter/maixcam.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/modules/rtc.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/modules/acc.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/modules/bm8653.html + 2024-08-27 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/modules/qmi8658.html + 2024-08-27 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/modules/tmc2209.html + 2024-08-21 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/modules/thermal_cam.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/modules/temp_humi.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/modules/tof.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/source_code/build.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/source_code/add_c_module.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/source_code/contribute.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/source_code/faq.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/source_code/maixcdk.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/README_no_screen.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/projects/line_tracking_robot.html + 2024-05-09 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/projects/index.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/projects/face_tracking.html + 2024-06-11 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/apriltag.html + 2024-04-03 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/maixhub_train.html + 2024-04-03 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/face_recognition.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/gui/i18n.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/index.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/network/socket.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/network/websocket.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/network/flask.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/network/network_settings.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/network/mqtt.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/network/http.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/video/record.html + 2024-05-20 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/video/rtsp_streaming.html + 2024-05-20 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/video/play.html + 2024-08-19 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/video/rtmp_streaming.html + 2024-05-20 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/video/jpeg_streaming.html + 2024-05-20 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/faq.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/basic/app_usage.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/basic/python_pkgs.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/basic/app.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/basic/os.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/basic/linux_basic.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/basic/maixpy_upgrade.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/basic/auto_start.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/yolov5.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/ai.html + 2024-04-03 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/display.html + 2024-03-31 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/ocr.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/face_detection.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/qrcode.html + 2024-04-03 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/segmentation.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/image_ops.html + 2024-07-08 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/touchscreen.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/customize_model_yolov5.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/line_tracking.html + 2024-05-09 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/dual_buff.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/find_blobs.html + 2024-04-03 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/self_learn_detector.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/object_track.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/customize_model_yolov8.html + 2024-10-10 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/classify.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/body_key_points.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/custmize_model.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/opencv.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/self_learn_classifier.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/zh/vision/camera.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/ai_model_converter/maixcam.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/modules/rtc.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/modules/acc.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/modules/bm8653.html + 2024-08-27 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/modules/qmi8658.html + 2024-08-27 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/modules/tmc2209.html + 2024-08-21 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/modules/thermal_cam.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/modules/temp_humi.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/modules/tof.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/source_code/build.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/source_code/add_c_module.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/source_code/contribute.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/source_code/faq.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/source_code/maixcdk.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/README_no_screen.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/projects/line_tracking_robot.html + 2024-05-09 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/projects/index.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/projects/face_tracking.html + 2024-06-11 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/apriltag.html + 2024-04-03 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/maixhub_train.html + 2024-04-03 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/face_recognition.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/basic/maixvision.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/basic/view_src_code.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/basic/python.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/audio/recognize.html + 2024-10-08 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/audio/record.html + 2024-05-20 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/audio/synthesis.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/audio/digit.html + 2024-10-08 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/audio/keyword.html + 2024-10-08 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/audio/play.html + 2024-05-20 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/audio/ai_classify.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/peripheral/pwm.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/peripheral/wdt.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/peripheral/uart.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/peripheral/adc.html + 2024-06-11 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/peripheral/hid.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/peripheral/gpio.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/peripheral/spi.html + 2024-06-11 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/peripheral/i2c.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/peripheral/pinmap.html + 2024-06-11 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/pro/compile_os.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/gui/i18n.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/index.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/network/socket.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/network/websocket.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/network/flask.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/network/network_settings.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/network/mqtt.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/network/http.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/video/record.html + 2024-05-20 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/video/rtsp_streaming.html + 2024-05-20 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/video/play.html + 2024-08-19 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/video/rtmp_streaming.html + 2024-05-31 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/video/jpeg_streaming.html + 2024-05-20 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/faq.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/basic/app_usage.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/basic/python_pkgs.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/basic/app.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/basic/os.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/basic/linux_basic.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/basic/maixpy_upgrade.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/basic/auto_start.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/yolov5.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/ai.html + 2024-04-03 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/display.html + 2024-03-31 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/ocr.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/face_detection.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/qrcode.html + 2024-04-03 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/segmentation.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/image_ops.html + 2024-04-03 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/touchscreen.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/customize_model_yolov5.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/line_tracking.html + 2024-05-09 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/dual_buff.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/find_blobs.html + 2024-04-03 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/self_learn_detector.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/object_track.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/customize_model_yolov8.html + 2024-10-10 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/classify.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/body_key_points.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/custmize_model.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/opencv.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/self_learn_classifier.html + 2024-10-24 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/vision/camera.html + 2024-04-03 + weekly + 1.0 + + + https://wiki.sipeed.com/maixpy/doc/en/no_translate.html + 2024-10-24 + weekly + 1.0 + + diff --git a/maixpy/static/css/custom.css b/maixpy/static/css/custom.css new file mode 100644 index 00000000..db773d3b --- /dev/null +++ b/maixpy/static/css/custom.css @@ -0,0 +1,15 @@ + + +#home_page h1 { + color: #c33d45; +} +.dark #home_page h1{ + color: white; +} + +@media screen and (max-width: 900px) { + #home_page h1 { + color: #eb4848; + } +} + diff --git a/maixpy/static/css/search/style.css b/maixpy/static/css/search/style.css new file mode 100644 index 00000000..532536a9 --- /dev/null +++ b/maixpy/static/css/search/style.css @@ -0,0 +1,330 @@ +/** + teedoc search plugin css + @author neucrack + @copyright (c) neucrack CZD666666@gmail.com with MIT License + @changes 2021.2.1 add basic attrributes + */ +.blur { + -webkit-filter: blur(9px); + filter: blur(9px); +} +.pointer { + cursor: pointer; +} +.dark #search { + background-color: #2d2d2d; +} +#search { + border-radius: 2em; + background-color: #f1f1f1; + display: flex; + flex-direction: row; + justify-content: center; + align-items: center; + transition: 0.4s; +} + +#search .icon { + transition: transform 0.4s linear; + background: url("/maixpy/static/image/search/search.svg"); + background-size: contain; + background-repeat: no-repeat; + background-position: center; + align-self: center; + min-height: 1.8rem; + min-width: 1.8rem; + transition: 0.2s; +} +#search .placeholder { + padding: 0 1em; + color: #a5a5a5; +} +#search_hints { + display: none; +} + +#search_wrapper { + display: none; + position: fixed; + top: 0; + bottom: 0; + left: 0; + right: 0; + background-color: rgba(0, 0, 0, 0.73); + z-index: 100; +} +.dark #search_wrapper { + background-color: transparent; +} +#search_wrapper input:focus{ + outline: none; + border: 1px solid #58b195; +} +#search_wrapper .close { + background: url("/maixpy/static/image/search/close.svg"); + background-size: contain; + background-repeat: no-repeat; + background-position: center; + border-radius: 0.5em; + min-height: 2.5rem; + min-width: 2.5rem; + z-index: 100; + position: fixed; + top: 3em; + right: 3em; + cursor: pointer; +} +#search_title { + margin: 1em; +} +#search_wrapper #search_title > div { + display: flex; + flex-direction: row; + justify-content: center; +} +#search_wrapper input { + height: 60px; + width: 60%; + border: none; + border-radius: 0.5em; + margin: 0; + text-align: center; + color: #222222; + font-size: 1.2em; + display: inline-block; + box-shadow: 0 0 12px 0 #e8e8e8; +} +.dark #search_wrapper input { + box-shadow: 0 0 12px 0 #000000; + background-color: black; + color: white; +} +#search_wrapper > div { + display: flex; + height: 100%; +} +#search_wrapper #search_content { + display: flex; + flex-direction: column; + width: 75%; + height: 100%; + background-color: white; + margin: auto; + padding: 0; + border-radius: 0.5em; +} +.dark #search_wrapper #search_content { + background-color: #3c3c3c; +} +#search_result { + height: 100%; + display: flex; + flex-direction: row; + overflow: auto; +} +#search_result h1 { + font-size: 1.2em; +} +#search_result_name { + overflow-y: auto; + min-width: max-content; +} +#search_result_content { + overflow-y: auto; + flex-grow: 1; +} +#search_result ul { + padding-left: 0; + list-style: none; +} +#search_result li { + box-shadow: 0 0 10px #e0e0e0; + list-style: none; + padding: 1em; + margin: 1em; + border-radius: 0.5em; + transition: 0.4s; + background-color: white; +} +.dark #search_result li { + background-color: #2d2d2d; + box-shadow: 0 0 2px #000000; +} +#search_result #search_result_name li { + margin: 0.5em 1em 0.5em 0; + background: #4caf7d; + color: white; + border-radius: 0; +} +.dark #search_result #search_result_name li { + background: #1b4c33; +} +#search_result #search_result_name li:hover { + margin-right: 0; +} +#search_result li:hover { + box-shadow: 0px 5px 14px #868686; +} +.dark #search_result li:hover{ + box-shadow: 0px 5px 14px #1d1d1d; +} + +#search_result code { + background-color: #4caf7d; + color: white; + border-radius: 0.2em; + padding: 0.1em; +} +#search_result .loading_hint { + color: red; +} +#search_curr_result { + margin-top: 0; + padding-bottom: 3em; + border: 1px solid #4caf7d; + border-radius: 5px; +} +#search_others_result { + padding-bottom: 3em; + border: 1px solid #bdbdbd; + border-radius: 5px; +} +.dark #search_curr_result { + border: 1px solid #1b4c33; +} +.dark #search_others_result { + border: 1px solid #696969; +} +#search_curr_result:first-child, +#search_others_result:first-child { + border-radius: 5px; +} +#search_result .hint { + height: 2em; + color: white; + font-size: 1.5em; + display: flex; + justify-content: center; + flex-direction: column; + text-align: center; + border-top: none; + box-shadow: 0 6px 7px rgba(76, 175, 125, 0.38); + background: #4caf7d; +} +.dark #search_result .hint { + background: #1b4c33; +} +#search_curr_result > .hint { + background-color: #4caf7d; +} +#search_curr_result .searching { + background-color: #ff9800; +} +.search_highlight { + background-color: #FFEB3B; + border-radius: 0.2em; + padding: 0.1em; + +} +.dark .search_highlight{ + color: #1b1b1b; +} +.selected_highlight { + background-color: #ff9823; +} + +#search_ctrl_btn { + position: fixed; + top: 2em; + right: 1em; + display: flex; + flex-direction: row; + z-index: 999; + user-select: none; +} +#search_ctrl_btn > div { + border-radius: 0.2em; + min-width: 5em; + min-height: 2.5em; + background-color: #fae94e; + margin: 0.2em; + display: flex; + flex-direction: row; + align-items: center; + justify-content: center; + padding: 0.2em; + cursor: pointer; + box-shadow: 0 0 12px 0 rgb(0, 0, 0, 0.06); + transition: 0.4s; + color: #8b7000; +} +#search_ctrl_btn > div:hover { + box-shadow: 0 0 12px 0 rgb(0, 0, 0, 0.2); +} +#search_ctrl_btn > .previous .icon { + background-image: url("/static/image/search/up.svg"); + -ms-transform: rotate(270deg); + -moz-transform: rotate(270deg); + -webkit-transform: rotate(270deg); + transform: rotate(270deg); + background-size: 2em; + background-repeat: no-repeat; + background-position: center; + min-height: 1.5em; + min-width: 1.5em; + height: 1.5em; + width: 1.5em; +} +#search_ctrl_btn > .next .icon { + background-image: url("/static/image/search/up.svg"); + -ms-transform: rotate(90deg); + -moz-transform: rotate(90deg); + -webkit-transform: rotate(90deg); + transform: rotate(90deg); + background-size: 2em; + background-repeat: no-repeat; + background-position: center; + min-height: 1.5em; + min-width: 1.5em; + height: 1.5em; + width: 1.5em; +} + +#remove_search > .icon { + background-image: url("/static/image/search/cancel.svg"); + background-size: 2em; + background-repeat: no-repeat; + background-position: center; + min-height: 1.5em; + min-width: 1.5em; + height: 1.5em; + width: 1.5em; +} + + +@media screen and (max-width: 900px) { + #search_wrapper #search_content { + width: 100%; + height: 100%; + border-radius: 0; + } + #search_wrapper input { + font-size: 0.8em; + } + #search_wrapper .close { + top: 1.5em; + right: 1em; + } + #search_result li { + margin: 0; + } + #search_result #search_result_name li { + font-size: 0.8em; + } + #search_curr_result > .hint { + font-size: 1.2em; + } + #search_result .hint { + font-size: 1.2em; + } +} diff --git a/maixpy/static/css/tailwind.css b/maixpy/static/css/tailwind.css new file mode 100644 index 00000000..d705193e --- /dev/null +++ b/maixpy/static/css/tailwind.css @@ -0,0 +1,63 @@ +(()=>{var Sb=Object.create;var li=Object.defineProperty;var Cb=Object.getOwnPropertyDescriptor;var Ab=Object.getOwnPropertyNames;var _b=Object.getPrototypeOf,Eb=Object.prototype.hasOwnProperty;var uu=i=>li(i,"__esModule",{value:!0});var fu=i=>{if(typeof require!="undefined")return require(i);throw new Error('Dynamic require of "'+i+'" is not supported')};var C=(i,e)=>()=>(i&&(e=i(i=0)),e);var v=(i,e)=>()=>(e||i((e={exports:{}}).exports,e),e.exports),Ae=(i,e)=>{uu(i);for(var t in e)li(i,t,{get:e[t],enumerable:!0})},Ob=(i,e,t)=>{if(e&&typeof e=="object"||typeof e=="function")for(let r of Ab(e))!Eb.call(i,r)&&r!=="default"&&li(i,r,{get:()=>e[r],enumerable:!(t=Cb(e,r))||t.enumerable});return i},K=i=>Ob(uu(li(i!=null?Sb(_b(i)):{},"default",i&&i.__esModule&&"default"in i?{get:()=>i.default,enumerable:!0}:{value:i,enumerable:!0})),i);var m,l=C(()=>{m={platform:"",env:{},versions:{node:"14.17.6"}}});var Tb,te,ze=C(()=>{l();Tb=0,te={readFileSync:i=>self[i]||"",statSync:()=>({mtimeMs:Tb++}),promises:{readFile:i=>Promise.resolve(self[i]||"")}}});var Xn=v((X5,pu)=>{l();"use strict";var cu=class{constructor(e={}){if(!(e.maxSize&&e.maxSize>0))throw new TypeError("`maxSize` must be a number greater than 0");if(typeof e.maxAge=="number"&&e.maxAge===0)throw new TypeError("`maxAge` must be a number greater than 0");this.maxSize=e.maxSize,this.maxAge=e.maxAge||1/0,this.onEviction=e.onEviction,this.cache=new Map,this.oldCache=new Map,this._size=0}_emitEvictions(e){if(typeof this.onEviction=="function")for(let[t,r]of e)this.onEviction(t,r.value)}_deleteIfExpired(e,t){return typeof t.expiry=="number"&&t.expiry<=Date.now()?(typeof this.onEviction=="function"&&this.onEviction(e,t.value),this.delete(e)):!1}_getOrDeleteIfExpired(e,t){if(this._deleteIfExpired(e,t)===!1)return t.value}_getItemValue(e,t){return t.expiry?this._getOrDeleteIfExpired(e,t):t.value}_peek(e,t){let r=t.get(e);return this._getItemValue(e,r)}_set(e,t){this.cache.set(e,t),this._size++,this._size>=this.maxSize&&(this._size=0,this._emitEvictions(this.oldCache),this.oldCache=this.cache,this.cache=new Map)}_moveToRecent(e,t){this.oldCache.delete(e),this._set(e,t)}*_entriesAscending(){for(let e of this.oldCache){let[t,r]=e;this.cache.has(t)||this._deleteIfExpired(t,r)===!1&&(yield e)}for(let e of this.cache){let[t,r]=e;this._deleteIfExpired(t,r)===!1&&(yield e)}}get(e){if(this.cache.has(e)){let t=this.cache.get(e);return this._getItemValue(e,t)}if(this.oldCache.has(e)){let t=this.oldCache.get(e);if(this._deleteIfExpired(e,t)===!1)return this._moveToRecent(e,t),t.value}}set(e,t,{maxAge:r=this.maxAge===1/0?void 0:Date.now()+this.maxAge}={}){this.cache.has(e)?this.cache.set(e,{value:t,maxAge:r}):this._set(e,{value:t,expiry:r})}has(e){return this.cache.has(e)?!this._deleteIfExpired(e,this.cache.get(e)):this.oldCache.has(e)?!this._deleteIfExpired(e,this.oldCache.get(e)):!1}peek(e){if(this.cache.has(e))return this._peek(e,this.cache);if(this.oldCache.has(e))return this._peek(e,this.oldCache)}delete(e){let t=this.cache.delete(e);return t&&this._size--,this.oldCache.delete(e)||t}clear(){this.cache.clear(),this.oldCache.clear(),this._size=0}resize(e){if(!(e&&e>0))throw new TypeError("`maxSize` must be a number greater than 0");let t=[...this._entriesAscending()],r=t.length-e;r<0?(this.cache=new Map(t),this.oldCache=new Map,this._size=t.length):(r>0&&this._emitEvictions(t.slice(0,r)),this.oldCache=new Map(t.slice(r)),this.cache=new Map,this._size=0),this.maxSize=e}*keys(){for(let[e]of this)yield e}*values(){for(let[,e]of this)yield e}*[Symbol.iterator](){for(let e of this.cache){let[t,r]=e;this._deleteIfExpired(t,r)===!1&&(yield[t,r.value])}for(let e of this.oldCache){let[t,r]=e;this.cache.has(t)||this._deleteIfExpired(t,r)===!1&&(yield[t,r.value])}}*entriesDescending(){let e=[...this.cache];for(let t=e.length-1;t>=0;--t){let r=e[t],[n,a]=r;this._deleteIfExpired(n,a)===!1&&(yield[n,a.value])}e=[...this.oldCache];for(let t=e.length-1;t>=0;--t){let r=e[t],[n,a]=r;this.cache.has(n)||this._deleteIfExpired(n,a)===!1&&(yield[n,a.value])}}*entriesAscending(){for(let[e,t]of this._entriesAscending())yield[e,t.value]}get size(){if(!this._size)return this.oldCache.size;let e=0;for(let t of this.oldCache.keys())this.cache.has(t)||e++;return Math.min(this._size+e,this.maxSize)}};pu.exports=cu});var du,hu=C(()=>{l();du=i=>i&&i._hash});function ui(i){return du(i,{ignoreUnknown:!0})}var mu=C(()=>{l();hu()});function Xe(i){if(i=`${i}`,i==="0")return"0";if(/^[+-]?(\d+|\d*\.\d+)(e[+-]?\d+)?(%|\w+)?$/.test(i))return i.replace(/^[+-]?/,t=>t==="-"?"":"-");let e=["var","calc","min","max","clamp"];for(let t of e)if(i.includes(`${t}(`))return`calc(${i} * -1)`}var fi=C(()=>{l()});var gu,yu=C(()=>{l();gu=["preflight","container","accessibility","pointerEvents","visibility","position","inset","isolation","zIndex","order","gridColumn","gridColumnStart","gridColumnEnd","gridRow","gridRowStart","gridRowEnd","float","clear","margin","boxSizing","lineClamp","display","aspectRatio","height","maxHeight","minHeight","width","minWidth","maxWidth","flex","flexShrink","flexGrow","flexBasis","tableLayout","captionSide","borderCollapse","borderSpacing","transformOrigin","translate","rotate","skew","scale","transform","animation","cursor","touchAction","userSelect","resize","scrollSnapType","scrollSnapAlign","scrollSnapStop","scrollMargin","scrollPadding","listStylePosition","listStyleType","listStyleImage","appearance","columns","breakBefore","breakInside","breakAfter","gridAutoColumns","gridAutoFlow","gridAutoRows","gridTemplateColumns","gridTemplateRows","flexDirection","flexWrap","placeContent","placeItems","alignContent","alignItems","justifyContent","justifyItems","gap","space","divideWidth","divideStyle","divideColor","divideOpacity","placeSelf","alignSelf","justifySelf","overflow","overscrollBehavior","scrollBehavior","textOverflow","hyphens","whitespace","wordBreak","borderRadius","borderWidth","borderStyle","borderColor","borderOpacity","backgroundColor","backgroundOpacity","backgroundImage","gradientColorStops","boxDecorationBreak","backgroundSize","backgroundAttachment","backgroundClip","backgroundPosition","backgroundRepeat","backgroundOrigin","fill","stroke","strokeWidth","objectFit","objectPosition","padding","textAlign","textIndent","verticalAlign","fontFamily","fontSize","fontWeight","textTransform","fontStyle","fontVariantNumeric","lineHeight","letterSpacing","textColor","textOpacity","textDecoration","textDecorationColor","textDecorationStyle","textDecorationThickness","textUnderlineOffset","fontSmoothing","placeholderColor","placeholderOpacity","caretColor","accentColor","opacity","backgroundBlendMode","mixBlendMode","boxShadow","boxShadowColor","outlineStyle","outlineWidth","outlineOffset","outlineColor","ringWidth","ringColor","ringOpacity","ringOffsetWidth","ringOffsetColor","blur","brightness","contrast","dropShadow","grayscale","hueRotate","invert","saturate","sepia","filter","backdropBlur","backdropBrightness","backdropContrast","backdropGrayscale","backdropHueRotate","backdropInvert","backdropOpacity","backdropSaturate","backdropSepia","backdropFilter","transitionProperty","transitionDelay","transitionDuration","transitionTimingFunction","willChange","content"]});function wu(i,e){return i===void 0?e:Array.isArray(i)?i:[...new Set(e.filter(r=>i!==!1&&i[r]!==!1).concat(Object.keys(i).filter(r=>i[r]!==!1)))]}var bu=C(()=>{l()});var vu={};Ae(vu,{default:()=>_e});var _e,ci=C(()=>{l();_e=new Proxy({},{get:()=>String})});function Kn(i,e,t){typeof m!="undefined"&&m.env.JEST_WORKER_ID||t&&xu.has(t)||(t&&xu.add(t),console.warn(""),e.forEach(r=>console.warn(i,"-",r)))}function Zn(i){return _e.dim(i)}var xu,F,Ee=C(()=>{l();ci();xu=new Set;F={info(i,e){Kn(_e.bold(_e.cyan("info")),...Array.isArray(i)?[i]:[e,i])},warn(i,e){["content-problems"].includes(i)||Kn(_e.bold(_e.yellow("warn")),...Array.isArray(i)?[i]:[e,i])},risk(i,e){Kn(_e.bold(_e.magenta("risk")),...Array.isArray(i)?[i]:[e,i])}}});var ku={};Ae(ku,{default:()=>es});function nr({version:i,from:e,to:t}){F.warn(`${e}-color-renamed`,[`As of Tailwind CSS ${i}, \`${e}\` has been renamed to \`${t}\`.`,"Update your configuration file to silence this warning."])}var es,ts=C(()=>{l();Ee();es={inherit:"inherit",current:"currentColor",transparent:"transparent",black:"#000",white:"#fff",slate:{50:"#f8fafc",100:"#f1f5f9",200:"#e2e8f0",300:"#cbd5e1",400:"#94a3b8",500:"#64748b",600:"#475569",700:"#334155",800:"#1e293b",900:"#0f172a",950:"#020617"},gray:{50:"#f9fafb",100:"#f3f4f6",200:"#e5e7eb",300:"#d1d5db",400:"#9ca3af",500:"#6b7280",600:"#4b5563",700:"#374151",800:"#1f2937",900:"#111827",950:"#030712"},zinc:{50:"#fafafa",100:"#f4f4f5",200:"#e4e4e7",300:"#d4d4d8",400:"#a1a1aa",500:"#71717a",600:"#52525b",700:"#3f3f46",800:"#27272a",900:"#18181b",950:"#09090b"},neutral:{50:"#fafafa",100:"#f5f5f5",200:"#e5e5e5",300:"#d4d4d4",400:"#a3a3a3",500:"#737373",600:"#525252",700:"#404040",800:"#262626",900:"#171717",950:"#0a0a0a"},stone:{50:"#fafaf9",100:"#f5f5f4",200:"#e7e5e4",300:"#d6d3d1",400:"#a8a29e",500:"#78716c",600:"#57534e",700:"#44403c",800:"#292524",900:"#1c1917",950:"#0c0a09"},red:{50:"#fef2f2",100:"#fee2e2",200:"#fecaca",300:"#fca5a5",400:"#f87171",500:"#ef4444",600:"#dc2626",700:"#b91c1c",800:"#991b1b",900:"#7f1d1d",950:"#450a0a"},orange:{50:"#fff7ed",100:"#ffedd5",200:"#fed7aa",300:"#fdba74",400:"#fb923c",500:"#f97316",600:"#ea580c",700:"#c2410c",800:"#9a3412",900:"#7c2d12",950:"#431407"},amber:{50:"#fffbeb",100:"#fef3c7",200:"#fde68a",300:"#fcd34d",400:"#fbbf24",500:"#f59e0b",600:"#d97706",700:"#b45309",800:"#92400e",900:"#78350f",950:"#451a03"},yellow:{50:"#fefce8",100:"#fef9c3",200:"#fef08a",300:"#fde047",400:"#facc15",500:"#eab308",600:"#ca8a04",700:"#a16207",800:"#854d0e",900:"#713f12",950:"#422006"},lime:{50:"#f7fee7",100:"#ecfccb",200:"#d9f99d",300:"#bef264",400:"#a3e635",500:"#84cc16",600:"#65a30d",700:"#4d7c0f",800:"#3f6212",900:"#365314",950:"#1a2e05"},green:{50:"#f0fdf4",100:"#dcfce7",200:"#bbf7d0",300:"#86efac",400:"#4ade80",500:"#22c55e",600:"#16a34a",700:"#15803d",800:"#166534",900:"#14532d",950:"#052e16"},emerald:{50:"#ecfdf5",100:"#d1fae5",200:"#a7f3d0",300:"#6ee7b7",400:"#34d399",500:"#10b981",600:"#059669",700:"#047857",800:"#065f46",900:"#064e3b",950:"#022c22"},teal:{50:"#f0fdfa",100:"#ccfbf1",200:"#99f6e4",300:"#5eead4",400:"#2dd4bf",500:"#14b8a6",600:"#0d9488",700:"#0f766e",800:"#115e59",900:"#134e4a",950:"#042f2e"},cyan:{50:"#ecfeff",100:"#cffafe",200:"#a5f3fc",300:"#67e8f9",400:"#22d3ee",500:"#06b6d4",600:"#0891b2",700:"#0e7490",800:"#155e75",900:"#164e63",950:"#083344"},sky:{50:"#f0f9ff",100:"#e0f2fe",200:"#bae6fd",300:"#7dd3fc",400:"#38bdf8",500:"#0ea5e9",600:"#0284c7",700:"#0369a1",800:"#075985",900:"#0c4a6e",950:"#082f49"},blue:{50:"#eff6ff",100:"#dbeafe",200:"#bfdbfe",300:"#93c5fd",400:"#60a5fa",500:"#3b82f6",600:"#2563eb",700:"#1d4ed8",800:"#1e40af",900:"#1e3a8a",950:"#172554"},indigo:{50:"#eef2ff",100:"#e0e7ff",200:"#c7d2fe",300:"#a5b4fc",400:"#818cf8",500:"#6366f1",600:"#4f46e5",700:"#4338ca",800:"#3730a3",900:"#312e81",950:"#1e1b4b"},violet:{50:"#f5f3ff",100:"#ede9fe",200:"#ddd6fe",300:"#c4b5fd",400:"#a78bfa",500:"#8b5cf6",600:"#7c3aed",700:"#6d28d9",800:"#5b21b6",900:"#4c1d95",950:"#2e1065"},purple:{50:"#faf5ff",100:"#f3e8ff",200:"#e9d5ff",300:"#d8b4fe",400:"#c084fc",500:"#a855f7",600:"#9333ea",700:"#7e22ce",800:"#6b21a8",900:"#581c87",950:"#3b0764"},fuchsia:{50:"#fdf4ff",100:"#fae8ff",200:"#f5d0fe",300:"#f0abfc",400:"#e879f9",500:"#d946ef",600:"#c026d3",700:"#a21caf",800:"#86198f",900:"#701a75",950:"#4a044e"},pink:{50:"#fdf2f8",100:"#fce7f3",200:"#fbcfe8",300:"#f9a8d4",400:"#f472b6",500:"#ec4899",600:"#db2777",700:"#be185d",800:"#9d174d",900:"#831843",950:"#500724"},rose:{50:"#fff1f2",100:"#ffe4e6",200:"#fecdd3",300:"#fda4af",400:"#fb7185",500:"#f43f5e",600:"#e11d48",700:"#be123c",800:"#9f1239",900:"#881337",950:"#4c0519"},get lightBlue(){return nr({version:"v2.2",from:"lightBlue",to:"sky"}),this.sky},get warmGray(){return nr({version:"v3.0",from:"warmGray",to:"stone"}),this.stone},get trueGray(){return nr({version:"v3.0",from:"trueGray",to:"neutral"}),this.neutral},get coolGray(){return nr({version:"v3.0",from:"coolGray",to:"gray"}),this.gray},get blueGray(){return nr({version:"v3.0",from:"blueGray",to:"slate"}),this.slate}}});function rs(i,...e){for(let t of e){for(let r in t)i?.hasOwnProperty?.(r)||(i[r]=t[r]);for(let r of Object.getOwnPropertySymbols(t))i?.hasOwnProperty?.(r)||(i[r]=t[r])}return i}var Su=C(()=>{l()});function Ke(i){if(Array.isArray(i))return i;let e=i.split("[").length-1,t=i.split("]").length-1;if(e!==t)throw new Error(`Path is invalid. Has unbalanced brackets: ${i}`);return i.split(/\.(?![^\[]*\])|[\[\]]/g).filter(Boolean)}var pi=C(()=>{l()});function J(i,e){return di.future.includes(e)?i.future==="all"||(i?.future?.[e]??Cu[e]??!1):di.experimental.includes(e)?i.experimental==="all"||(i?.experimental?.[e]??Cu[e]??!1):!1}function Au(i){return i.experimental==="all"?di.experimental:Object.keys(i?.experimental??{}).filter(e=>di.experimental.includes(e)&&i.experimental[e])}function _u(i){if(m.env.JEST_WORKER_ID===void 0&&Au(i).length>0){let e=Au(i).map(t=>_e.yellow(t)).join(", ");F.warn("experimental-flags-enabled",[`You have enabled experimental features: ${e}`,"Experimental features in Tailwind CSS are not covered by semver, may introduce breaking changes, and can change at any time."])}}var Cu,di,De=C(()=>{l();ci();Ee();Cu={optimizeUniversalDefaults:!1,generalizedModifiers:!0,get disableColorOpacityUtilitiesByDefault(){return!1},get relativeContentPathsByDefault(){return!1}},di={future:["hoverOnlyWhenSupported","respectDefaultRingColorOpacity","disableColorOpacityUtilitiesByDefault","relativeContentPathsByDefault"],experimental:["optimizeUniversalDefaults","generalizedModifiers"]}});function Eu(i){(()=>{if(i.purge||!i.content||!Array.isArray(i.content)&&!(typeof i.content=="object"&&i.content!==null))return!1;if(Array.isArray(i.content))return i.content.every(t=>typeof t=="string"?!0:!(typeof t?.raw!="string"||t?.extension&&typeof t?.extension!="string"));if(typeof i.content=="object"&&i.content!==null){if(Object.keys(i.content).some(t=>!["files","relative","extract","transform"].includes(t)))return!1;if(Array.isArray(i.content.files)){if(!i.content.files.every(t=>typeof t=="string"?!0:!(typeof t?.raw!="string"||t?.extension&&typeof t?.extension!="string")))return!1;if(typeof i.content.extract=="object"){for(let t of Object.values(i.content.extract))if(typeof t!="function")return!1}else if(!(i.content.extract===void 0||typeof i.content.extract=="function"))return!1;if(typeof i.content.transform=="object"){for(let t of Object.values(i.content.transform))if(typeof t!="function")return!1}else if(!(i.content.transform===void 0||typeof i.content.transform=="function"))return!1;if(typeof i.content.relative!="boolean"&&typeof i.content.relative!="undefined")return!1}return!0}return!1})()||F.warn("purge-deprecation",["The `purge`/`content` options have changed in Tailwind CSS v3.0.","Update your configuration file to eliminate this warning.","https://tailwindcss.com/docs/upgrade-guide#configure-content-sources"]),i.safelist=(()=>{let{content:t,purge:r,safelist:n}=i;return Array.isArray(n)?n:Array.isArray(t?.safelist)?t.safelist:Array.isArray(r?.safelist)?r.safelist:Array.isArray(r?.options?.safelist)?r.options.safelist:[]})(),i.blocklist=(()=>{let{blocklist:t}=i;if(Array.isArray(t)){if(t.every(r=>typeof r=="string"))return t;F.warn("blocklist-invalid",["The `blocklist` option must be an array of strings.","https://tailwindcss.com/docs/content-configuration#discarding-classes"])}return[]})(),typeof i.prefix=="function"?(F.warn("prefix-function",["As of Tailwind CSS v3.0, `prefix` cannot be a function.","Update `prefix` in your configuration to be a string to eliminate this warning.","https://tailwindcss.com/docs/upgrade-guide#prefix-cannot-be-a-function"]),i.prefix=""):i.prefix=i.prefix??"",i.content={relative:(()=>{let{content:t}=i;return t?.relative?t.relative:J(i,"relativeContentPathsByDefault")})(),files:(()=>{let{content:t,purge:r}=i;return Array.isArray(r)?r:Array.isArray(r?.content)?r.content:Array.isArray(t)?t:Array.isArray(t?.content)?t.content:Array.isArray(t?.files)?t.files:[]})(),extract:(()=>{let t=(()=>i.purge?.extract?i.purge.extract:i.content?.extract?i.content.extract:i.purge?.extract?.DEFAULT?i.purge.extract.DEFAULT:i.content?.extract?.DEFAULT?i.content.extract.DEFAULT:i.purge?.options?.extractors?i.purge.options.extractors:i.content?.options?.extractors?i.content.options.extractors:{})(),r={},n=(()=>{if(i.purge?.options?.defaultExtractor)return i.purge.options.defaultExtractor;if(i.content?.options?.defaultExtractor)return i.content.options.defaultExtractor})();if(n!==void 0&&(r.DEFAULT=n),typeof t=="function")r.DEFAULT=t;else if(Array.isArray(t))for(let{extensions:a,extractor:s}of t??[])for(let o of a)r[o]=s;else typeof t=="object"&&t!==null&&Object.assign(r,t);return r})(),transform:(()=>{let t=(()=>i.purge?.transform?i.purge.transform:i.content?.transform?i.content.transform:i.purge?.transform?.DEFAULT?i.purge.transform.DEFAULT:i.content?.transform?.DEFAULT?i.content.transform.DEFAULT:{})(),r={};return typeof t=="function"&&(r.DEFAULT=t),typeof t=="object"&&t!==null&&Object.assign(r,t),r})()};for(let t of i.content.files)if(typeof t=="string"&&/{([^,]*?)}/g.test(t)){F.warn("invalid-glob-braces",[`The glob pattern ${Zn(t)} in your Tailwind CSS configuration is invalid.`,`Update it to ${Zn(t.replace(/{([^,]*?)}/g,"$1"))} to silence this warning.`]);break}return i}var Ou=C(()=>{l();De();Ee()});function ie(i){if(Object.prototype.toString.call(i)!=="[object Object]")return!1;let e=Object.getPrototypeOf(i);return e===null||e===Object.prototype}var xt=C(()=>{l()});function Ze(i){return Array.isArray(i)?i.map(e=>Ze(e)):typeof i=="object"&&i!==null?Object.fromEntries(Object.entries(i).map(([e,t])=>[e,Ze(t)])):i}var hi=C(()=>{l()});function ht(i){return i.replace(/\\,/g,"\\2c ")}var mi=C(()=>{l()});var is,Tu=C(()=>{l();is={aliceblue:[240,248,255],antiquewhite:[250,235,215],aqua:[0,255,255],aquamarine:[127,255,212],azure:[240,255,255],beige:[245,245,220],bisque:[255,228,196],black:[0,0,0],blanchedalmond:[255,235,205],blue:[0,0,255],blueviolet:[138,43,226],brown:[165,42,42],burlywood:[222,184,135],cadetblue:[95,158,160],chartreuse:[127,255,0],chocolate:[210,105,30],coral:[255,127,80],cornflowerblue:[100,149,237],cornsilk:[255,248,220],crimson:[220,20,60],cyan:[0,255,255],darkblue:[0,0,139],darkcyan:[0,139,139],darkgoldenrod:[184,134,11],darkgray:[169,169,169],darkgreen:[0,100,0],darkgrey:[169,169,169],darkkhaki:[189,183,107],darkmagenta:[139,0,139],darkolivegreen:[85,107,47],darkorange:[255,140,0],darkorchid:[153,50,204],darkred:[139,0,0],darksalmon:[233,150,122],darkseagreen:[143,188,143],darkslateblue:[72,61,139],darkslategray:[47,79,79],darkslategrey:[47,79,79],darkturquoise:[0,206,209],darkviolet:[148,0,211],deeppink:[255,20,147],deepskyblue:[0,191,255],dimgray:[105,105,105],dimgrey:[105,105,105],dodgerblue:[30,144,255],firebrick:[178,34,34],floralwhite:[255,250,240],forestgreen:[34,139,34],fuchsia:[255,0,255],gainsboro:[220,220,220],ghostwhite:[248,248,255],gold:[255,215,0],goldenrod:[218,165,32],gray:[128,128,128],green:[0,128,0],greenyellow:[173,255,47],grey:[128,128,128],honeydew:[240,255,240],hotpink:[255,105,180],indianred:[205,92,92],indigo:[75,0,130],ivory:[255,255,240],khaki:[240,230,140],lavender:[230,230,250],lavenderblush:[255,240,245],lawngreen:[124,252,0],lemonchiffon:[255,250,205],lightblue:[173,216,230],lightcoral:[240,128,128],lightcyan:[224,255,255],lightgoldenrodyellow:[250,250,210],lightgray:[211,211,211],lightgreen:[144,238,144],lightgrey:[211,211,211],lightpink:[255,182,193],lightsalmon:[255,160,122],lightseagreen:[32,178,170],lightskyblue:[135,206,250],lightslategray:[119,136,153],lightslategrey:[119,136,153],lightsteelblue:[176,196,222],lightyellow:[255,255,224],lime:[0,255,0],limegreen:[50,205,50],linen:[250,240,230],magenta:[255,0,255],maroon:[128,0,0],mediumaquamarine:[102,205,170],mediumblue:[0,0,205],mediumorchid:[186,85,211],mediumpurple:[147,112,219],mediumseagreen:[60,179,113],mediumslateblue:[123,104,238],mediumspringgreen:[0,250,154],mediumturquoise:[72,209,204],mediumvioletred:[199,21,133],midnightblue:[25,25,112],mintcream:[245,255,250],mistyrose:[255,228,225],moccasin:[255,228,181],navajowhite:[255,222,173],navy:[0,0,128],oldlace:[253,245,230],olive:[128,128,0],olivedrab:[107,142,35],orange:[255,165,0],orangered:[255,69,0],orchid:[218,112,214],palegoldenrod:[238,232,170],palegreen:[152,251,152],paleturquoise:[175,238,238],palevioletred:[219,112,147],papayawhip:[255,239,213],peachpuff:[255,218,185],peru:[205,133,63],pink:[255,192,203],plum:[221,160,221],powderblue:[176,224,230],purple:[128,0,128],rebeccapurple:[102,51,153],red:[255,0,0],rosybrown:[188,143,143],royalblue:[65,105,225],saddlebrown:[139,69,19],salmon:[250,128,114],sandybrown:[244,164,96],seagreen:[46,139,87],seashell:[255,245,238],sienna:[160,82,45],silver:[192,192,192],skyblue:[135,206,235],slateblue:[106,90,205],slategray:[112,128,144],slategrey:[112,128,144],snow:[255,250,250],springgreen:[0,255,127],steelblue:[70,130,180],tan:[210,180,140],teal:[0,128,128],thistle:[216,191,216],tomato:[255,99,71],turquoise:[64,224,208],violet:[238,130,238],wheat:[245,222,179],white:[255,255,255],whitesmoke:[245,245,245],yellow:[255,255,0],yellowgreen:[154,205,50]}});function sr(i,{loose:e=!1}={}){if(typeof i!="string")return null;if(i=i.trim(),i==="transparent")return{mode:"rgb",color:["0","0","0"],alpha:"0"};if(i in is)return{mode:"rgb",color:is[i].map(a=>a.toString())};let t=i.replace(Db,(a,s,o,u,c)=>["#",s,s,o,o,u,u,c?c+c:""].join("")).match(Pb);if(t!==null)return{mode:"rgb",color:[parseInt(t[1],16),parseInt(t[2],16),parseInt(t[3],16)].map(a=>a.toString()),alpha:t[4]?(parseInt(t[4],16)/255).toString():void 0};let r=i.match(Ib)??i.match(qb);if(r===null)return null;let n=[r[2],r[3],r[4]].filter(Boolean).map(a=>a.toString());return n.length===2&&n[0].startsWith("var(")?{mode:r[1],color:[n[0]],alpha:n[1]}:!e&&n.length!==3||n.length<3&&!n.some(a=>/^var\(.*?\)$/.test(a))?null:{mode:r[1],color:n,alpha:r[5]?.toString?.()}}function ns({mode:i,color:e,alpha:t}){let r=t!==void 0;return i==="rgba"||i==="hsla"?`${i}(${e.join(", ")}${r?`, ${t}`:""})`:`${i}(${e.join(" ")}${r?` / ${t}`:""})`}var Pb,Db,et,gi,Pu,tt,Ib,qb,ss=C(()=>{l();Tu();Pb=/^#([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})?$/i,Db=/^#([a-f\d])([a-f\d])([a-f\d])([a-f\d])?$/i,et=/(?:\d+|\d*\.\d+)%?/,gi=/(?:\s*,\s*|\s+)/,Pu=/\s*[,/]\s*/,tt=/var\(--(?:[^ )]*?)\)/,Ib=new RegExp(`^(rgba?)\\(\\s*(${et.source}|${tt.source})(?:${gi.source}(${et.source}|${tt.source}))?(?:${gi.source}(${et.source}|${tt.source}))?(?:${Pu.source}(${et.source}|${tt.source}))?\\s*\\)$`),qb=new RegExp(`^(hsla?)\\(\\s*((?:${et.source})(?:deg|rad|grad|turn)?|${tt.source})(?:${gi.source}(${et.source}|${tt.source}))?(?:${gi.source}(${et.source}|${tt.source}))?(?:${Pu.source}(${et.source}|${tt.source}))?\\s*\\)$`)});function Ie(i,e,t){if(typeof i=="function")return i({opacityValue:e});let r=sr(i,{loose:!0});return r===null?t:ns({...r,alpha:e})}function se({color:i,property:e,variable:t}){let r=[].concat(e);if(typeof i=="function")return{[t]:"1",...Object.fromEntries(r.map(a=>[a,i({opacityVariable:t,opacityValue:`var(${t})`})]))};let n=sr(i);return n===null?Object.fromEntries(r.map(a=>[a,i])):n.alpha!==void 0?Object.fromEntries(r.map(a=>[a,i])):{[t]:"1",...Object.fromEntries(r.map(a=>[a,ns({...n,alpha:`var(${t})`})]))}}var ar=C(()=>{l();ss()});function le(i,e){let t=[],r=[],n=0,a=!1;for(let s=0;s{l()});function yi(i){return le(i,",").map(t=>{let r=t.trim(),n={raw:r},a=r.split(Mb),s=new Set;for(let o of a)Du.lastIndex=0,!s.has("KEYWORD")&&Rb.has(o)?(n.keyword=o,s.add("KEYWORD")):Du.test(o)?s.has("X")?s.has("Y")?s.has("BLUR")?s.has("SPREAD")||(n.spread=o,s.add("SPREAD")):(n.blur=o,s.add("BLUR")):(n.y=o,s.add("Y")):(n.x=o,s.add("X")):n.color?(n.unknown||(n.unknown=[]),n.unknown.push(o)):n.color=o;return n.valid=n.x!==void 0&&n.y!==void 0,n})}function Iu(i){return i.map(e=>e.valid?[e.keyword,e.x,e.y,e.blur,e.spread,e.color].filter(Boolean).join(" "):e.raw).join(", ")}var Rb,Mb,Du,as=C(()=>{l();or();Rb=new Set(["inset","inherit","initial","revert","unset"]),Mb=/\ +(?![^(]*\))/g,Du=/^-?(\d+|\.\d+)(.*?)$/g});function os(i){return Bb.some(e=>new RegExp(`^${e}\\(.*\\)`).test(i))}function U(i,e=!0){return i.startsWith("--")?`var(${i})`:i.includes("url(")?i.split(/(url\(.*?\))/g).filter(Boolean).map(t=>/^url\(.*?\)$/.test(t)?t:U(t,!1)).join(""):(i=i.replace(/([^\\])_+/g,(t,r)=>r+" ".repeat(t.length-1)).replace(/^_/g," ").replace(/\\_/g,"_"),e&&(i=i.trim()),i=Nb(i),i)}function Nb(i){return i.replace(/(calc|min|max|clamp)\(.+\)/g,e=>{let t=[];return e.replace(/var\((--.+?)[,)]/g,(r,n)=>(t.push(n),r.replace(n,qu))).replace(/(-?\d*\.?\d(?!\b-\d.+[,)](?![^+\-/*])\D)(?:%|[a-z]+)?|\))([+\-/*])/g,"$1 $2 ").replace(Fb,()=>t.shift())})}function ls(i){return i.startsWith("url(")}function us(i){return!isNaN(Number(i))||os(i)}function lr(i){return i.endsWith("%")&&us(i.slice(0,-1))||os(i)}function ur(i){return i==="0"||new RegExp(`^[+-]?[0-9]*.?[0-9]+(?:[eE][+-]?[0-9]+)?${$b}$`).test(i)||os(i)}function Ru(i){return jb.has(i)}function Mu(i){let e=yi(U(i));for(let t of e)if(!t.valid)return!1;return!0}function Bu(i){let e=0;return le(i,"_").every(r=>(r=U(r),r.startsWith("var(")?!0:sr(r,{loose:!0})!==null?(e++,!0):!1))?e>0:!1}function Fu(i){let e=0;return le(i,",").every(r=>(r=U(r),r.startsWith("var(")?!0:ls(r)||Vb(r)||["element(","image(","cross-fade(","image-set("].some(n=>r.startsWith(n))?(e++,!0):!1))?e>0:!1}function Vb(i){i=U(i);for(let e of zb)if(i.startsWith(`${e}(`))return!0;return!1}function Nu(i){let e=0;return le(i,"_").every(r=>(r=U(r),r.startsWith("var(")?!0:Ub.has(r)||ur(r)||lr(r)?(e++,!0):!1))?e>0:!1}function Lu(i){let e=0;return le(i,",").every(r=>(r=U(r),r.startsWith("var(")?!0:r.includes(" ")&&!/(['"])([^"']+)\1/g.test(r)||/^\d/g.test(r)?!1:(e++,!0)))?e>0:!1}function $u(i){return Wb.has(i)}function ju(i){return Gb.has(i)}function zu(i){return Hb.has(i)}var Bb,qu,Fb,Lb,$b,jb,zb,Ub,Wb,Gb,Hb,fr=C(()=>{l();ss();as();or();Bb=["min","max","clamp","calc"];qu="--tw-placeholder",Fb=new RegExp(qu,"g");Lb=["cm","mm","Q","in","pc","pt","px","em","ex","ch","rem","lh","rlh","vw","vh","vmin","vmax","vb","vi","svw","svh","lvw","lvh","dvw","dvh","cqw","cqh","cqi","cqb","cqmin","cqmax"],$b=`(?:${Lb.join("|")})`;jb=new Set(["thin","medium","thick"]);zb=new Set(["conic-gradient","linear-gradient","radial-gradient","repeating-conic-gradient","repeating-linear-gradient","repeating-radial-gradient"]);Ub=new Set(["center","top","right","bottom","left"]);Wb=new Set(["serif","sans-serif","monospace","cursive","fantasy","system-ui","ui-serif","ui-sans-serif","ui-monospace","ui-rounded","math","emoji","fangsong"]);Gb=new Set(["xx-small","x-small","small","medium","large","x-large","x-large","xxx-large"]);Hb=new Set(["larger","smaller"])});function Vu(i){let e=["cover","contain"];return le(i,",").every(t=>{let r=le(t,"_").filter(Boolean);return r.length===1&&e.includes(r[0])?!0:r.length!==1&&r.length!==2?!1:r.every(n=>ur(n)||lr(n)||n==="auto")})}var Uu=C(()=>{l();fr();or()});function Wu(i,e){i.walkClasses(t=>{t.value=e(t.value),t.raws&&t.raws.value&&(t.raws.value=ht(t.raws.value))})}function Gu(i,e){if(!rt(i))return;let t=i.slice(1,-1);if(!!e(t))return U(t)}function Yb(i,e={},t){let r=e[i];if(r!==void 0)return Xe(r);if(rt(i)){let n=Gu(i,t);return n===void 0?void 0:Xe(n)}}function wi(i,e={},{validate:t=()=>!0}={}){let r=e.values?.[i];return r!==void 0?r:e.supportsNegativeValues&&i.startsWith("-")?Yb(i.slice(1),e.values,t):Gu(i,t)}function rt(i){return i.startsWith("[")&&i.endsWith("]")}function Hu(i){let e=i.lastIndexOf("/");return e===-1||e===i.length-1?[i,void 0]:rt(i)&&!i.includes("]/[")?[i,void 0]:[i.slice(0,e),i.slice(e+1)]}function kt(i){if(typeof i=="string"&&i.includes("")){let e=i;return({opacityValue:t=1})=>e.replace("",t)}return i}function Yu(i){return U(i.slice(1,-1))}function Qb(i,e={},{tailwindConfig:t={}}={}){if(e.values?.[i]!==void 0)return kt(e.values?.[i]);let[r,n]=Hu(i);if(n!==void 0){let a=e.values?.[r]??(rt(r)?r.slice(1,-1):void 0);return a===void 0?void 0:(a=kt(a),rt(n)?Ie(a,Yu(n)):t.theme?.opacity?.[n]===void 0?void 0:Ie(a,t.theme.opacity[n]))}return wi(i,e,{validate:Bu})}function Jb(i,e={}){return e.values?.[i]}function he(i){return(e,t)=>wi(e,t,{validate:i})}function Xb(i,e){let t=i.indexOf(e);return t===-1?[void 0,i]:[i.slice(0,t),i.slice(t+1)]}function cs(i,e,t,r){if(t.values&&e in t.values)for(let{type:a}of i??[]){let s=fs[a](e,t,{tailwindConfig:r});if(s!==void 0)return[s,a,null]}if(rt(e)){let a=e.slice(1,-1),[s,o]=Xb(a,":");if(!/^[\w-_]+$/g.test(s))o=a;else if(s!==void 0&&!Qu.includes(s))return[];if(o.length>0&&Qu.includes(s))return[wi(`[${o}]`,t),s,null]}let n=ps(i,e,t,r);for(let a of n)return a;return[]}function*ps(i,e,t,r){let n=J(r,"generalizedModifiers"),[a,s]=Hu(e);if(n&&t.modifiers!=null&&(t.modifiers==="any"||typeof t.modifiers=="object"&&(s&&rt(s)||s in t.modifiers))||(a=e,s=void 0),s!==void 0&&a===""&&(a="DEFAULT"),s!==void 0&&typeof t.modifiers=="object"){let u=t.modifiers?.[s]??null;u!==null?s=u:rt(s)&&(s=Yu(s))}for(let{type:u}of i??[]){let c=fs[u](a,t,{tailwindConfig:r});c!==void 0&&(yield[c,u,s??null])}}var fs,Qu,cr=C(()=>{l();mi();ar();fr();fi();Uu();De();fs={any:wi,color:Qb,url:he(ls),image:he(Fu),length:he(ur),percentage:he(lr),position:he(Nu),lookup:Jb,"generic-name":he($u),"family-name":he(Lu),number:he(us),"line-width":he(Ru),"absolute-size":he(ju),"relative-size":he(zu),shadow:he(Mu),size:he(Vu)},Qu=Object.keys(fs)});function N(i){return typeof i=="function"?i({}):i}var ds=C(()=>{l()});function St(i){return typeof i=="function"}function pr(i,...e){let t=e.pop();for(let r of e)for(let n in r){let a=t(i[n],r[n]);a===void 0?ie(i[n])&&ie(r[n])?i[n]=pr({},i[n],r[n],t):i[n]=r[n]:i[n]=a}return i}function Kb(i,...e){return St(i)?i(...e):i}function Zb(i){return i.reduce((e,{extend:t})=>pr(e,t,(r,n)=>r===void 0?[n]:Array.isArray(r)?[n,...r]:[n,r]),{})}function e0(i){return{...i.reduce((e,t)=>rs(e,t),{}),extend:Zb(i)}}function Ju(i,e){if(Array.isArray(i)&&ie(i[0]))return i.concat(e);if(Array.isArray(e)&&ie(e[0])&&ie(i))return[i,...e];if(Array.isArray(e))return e}function t0({extend:i,...e}){return pr(e,i,(t,r)=>!St(t)&&!r.some(St)?pr({},t,...r,Ju):(n,a)=>pr({},...[t,...r].map(s=>Kb(s,n,a)),Ju))}function*r0(i){let e=Ke(i);if(e.length===0||(yield e,Array.isArray(i)))return;let t=/^(.*?)\s*\/\s*([^/]+)$/,r=i.match(t);if(r!==null){let[,n,a]=r,s=Ke(n);s.alpha=a,yield s}}function i0(i){let e=(t,r)=>{for(let n of r0(t)){let a=0,s=i;for(;s!=null&&a(t[r]=St(i[r])?i[r](e,hs):i[r],t),{})}function Xu(i){let e=[];return i.forEach(t=>{e=[...e,t];let r=t?.plugins??[];r.length!==0&&r.forEach(n=>{n.__isOptionsFunction&&(n=n()),e=[...e,...Xu([n?.config??{}])]})}),e}function n0(i){return[...i].reduceRight((t,r)=>St(r)?r({corePlugins:t}):wu(r,t),gu)}function s0(i){return[...i].reduceRight((t,r)=>[...t,...r],[])}function ms(i){let e=[...Xu(i),{prefix:"",important:!1,separator:":"}];return Eu(rs({theme:i0(t0(e0(e.map(t=>t?.theme??{})))),corePlugins:n0(e.map(t=>t.corePlugins)),plugins:s0(i.map(t=>t?.plugins??[]))},...e))}var hs,Ku=C(()=>{l();fi();yu();bu();ts();Su();pi();Ou();xt();hi();cr();ar();ds();hs={colors:es,negative(i){return Object.keys(i).filter(e=>i[e]!=="0").reduce((e,t)=>{let r=Xe(i[t]);return r!==void 0&&(e[`-${t}`]=r),e},{})},breakpoints(i){return Object.keys(i).filter(e=>typeof i[e]=="string").reduce((e,t)=>({...e,[`screen-${t}`]:i[t]}),{})}}});var bi=v((eT,Zu)=>{l();Zu.exports={content:[],presets:[],darkMode:"media",theme:{accentColor:({theme:i})=>({...i("colors"),auto:"auto"}),animation:{none:"none",spin:"spin 1s linear infinite",ping:"ping 1s cubic-bezier(0, 0, 0.2, 1) infinite",pulse:"pulse 2s cubic-bezier(0.4, 0, 0.6, 1) infinite",bounce:"bounce 1s infinite"},aria:{busy:'busy="true"',checked:'checked="true"',disabled:'disabled="true"',expanded:'expanded="true"',hidden:'hidden="true"',pressed:'pressed="true"',readonly:'readonly="true"',required:'required="true"',selected:'selected="true"'},aspectRatio:{auto:"auto",square:"1 / 1",video:"16 / 9"},backdropBlur:({theme:i})=>i("blur"),backdropBrightness:({theme:i})=>i("brightness"),backdropContrast:({theme:i})=>i("contrast"),backdropGrayscale:({theme:i})=>i("grayscale"),backdropHueRotate:({theme:i})=>i("hueRotate"),backdropInvert:({theme:i})=>i("invert"),backdropOpacity:({theme:i})=>i("opacity"),backdropSaturate:({theme:i})=>i("saturate"),backdropSepia:({theme:i})=>i("sepia"),backgroundColor:({theme:i})=>i("colors"),backgroundImage:{none:"none","gradient-to-t":"linear-gradient(to top, var(--tw-gradient-stops))","gradient-to-tr":"linear-gradient(to top right, var(--tw-gradient-stops))","gradient-to-r":"linear-gradient(to right, var(--tw-gradient-stops))","gradient-to-br":"linear-gradient(to bottom right, var(--tw-gradient-stops))","gradient-to-b":"linear-gradient(to bottom, var(--tw-gradient-stops))","gradient-to-bl":"linear-gradient(to bottom left, var(--tw-gradient-stops))","gradient-to-l":"linear-gradient(to left, var(--tw-gradient-stops))","gradient-to-tl":"linear-gradient(to top left, var(--tw-gradient-stops))"},backgroundOpacity:({theme:i})=>i("opacity"),backgroundPosition:{bottom:"bottom",center:"center",left:"left","left-bottom":"left bottom","left-top":"left top",right:"right","right-bottom":"right bottom","right-top":"right top",top:"top"},backgroundSize:{auto:"auto",cover:"cover",contain:"contain"},blur:{0:"0",none:"0",sm:"4px",DEFAULT:"8px",md:"12px",lg:"16px",xl:"24px","2xl":"40px","3xl":"64px"},borderColor:({theme:i})=>({...i("colors"),DEFAULT:i("colors.gray.200","currentColor")}),borderOpacity:({theme:i})=>i("opacity"),borderRadius:{none:"0px",sm:"0.125rem",DEFAULT:"0.25rem",md:"0.375rem",lg:"0.5rem",xl:"0.75rem","2xl":"1rem","3xl":"1.5rem",full:"9999px"},borderSpacing:({theme:i})=>({...i("spacing")}),borderWidth:{DEFAULT:"1px",0:"0px",2:"2px",4:"4px",8:"8px"},boxShadow:{sm:"0 1px 2px 0 rgb(0 0 0 / 0.05)",DEFAULT:"0 1px 3px 0 rgb(0 0 0 / 0.1), 0 1px 2px -1px rgb(0 0 0 / 0.1)",md:"0 4px 6px -1px rgb(0 0 0 / 0.1), 0 2px 4px -2px rgb(0 0 0 / 0.1)",lg:"0 10px 15px -3px rgb(0 0 0 / 0.1), 0 4px 6px -4px rgb(0 0 0 / 0.1)",xl:"0 20px 25px -5px rgb(0 0 0 / 0.1), 0 8px 10px -6px rgb(0 0 0 / 0.1)","2xl":"0 25px 50px -12px rgb(0 0 0 / 0.25)",inner:"inset 0 2px 4px 0 rgb(0 0 0 / 0.05)",none:"none"},boxShadowColor:({theme:i})=>i("colors"),brightness:{0:"0",50:".5",75:".75",90:".9",95:".95",100:"1",105:"1.05",110:"1.1",125:"1.25",150:"1.5",200:"2"},caretColor:({theme:i})=>i("colors"),colors:({colors:i})=>({inherit:i.inherit,current:i.current,transparent:i.transparent,black:i.black,white:i.white,slate:i.slate,gray:i.gray,zinc:i.zinc,neutral:i.neutral,stone:i.stone,red:i.red,orange:i.orange,amber:i.amber,yellow:i.yellow,lime:i.lime,green:i.green,emerald:i.emerald,teal:i.teal,cyan:i.cyan,sky:i.sky,blue:i.blue,indigo:i.indigo,violet:i.violet,purple:i.purple,fuchsia:i.fuchsia,pink:i.pink,rose:i.rose}),columns:{auto:"auto",1:"1",2:"2",3:"3",4:"4",5:"5",6:"6",7:"7",8:"8",9:"9",10:"10",11:"11",12:"12","3xs":"16rem","2xs":"18rem",xs:"20rem",sm:"24rem",md:"28rem",lg:"32rem",xl:"36rem","2xl":"42rem","3xl":"48rem","4xl":"56rem","5xl":"64rem","6xl":"72rem","7xl":"80rem"},container:{},content:{none:"none"},contrast:{0:"0",50:".5",75:".75",100:"1",125:"1.25",150:"1.5",200:"2"},cursor:{auto:"auto",default:"default",pointer:"pointer",wait:"wait",text:"text",move:"move",help:"help","not-allowed":"not-allowed",none:"none","context-menu":"context-menu",progress:"progress",cell:"cell",crosshair:"crosshair","vertical-text":"vertical-text",alias:"alias",copy:"copy","no-drop":"no-drop",grab:"grab",grabbing:"grabbing","all-scroll":"all-scroll","col-resize":"col-resize","row-resize":"row-resize","n-resize":"n-resize","e-resize":"e-resize","s-resize":"s-resize","w-resize":"w-resize","ne-resize":"ne-resize","nw-resize":"nw-resize","se-resize":"se-resize","sw-resize":"sw-resize","ew-resize":"ew-resize","ns-resize":"ns-resize","nesw-resize":"nesw-resize","nwse-resize":"nwse-resize","zoom-in":"zoom-in","zoom-out":"zoom-out"},divideColor:({theme:i})=>i("borderColor"),divideOpacity:({theme:i})=>i("borderOpacity"),divideWidth:({theme:i})=>i("borderWidth"),dropShadow:{sm:"0 1px 1px rgb(0 0 0 / 0.05)",DEFAULT:["0 1px 2px rgb(0 0 0 / 0.1)","0 1px 1px rgb(0 0 0 / 0.06)"],md:["0 4px 3px rgb(0 0 0 / 0.07)","0 2px 2px rgb(0 0 0 / 0.06)"],lg:["0 10px 8px rgb(0 0 0 / 0.04)","0 4px 3px rgb(0 0 0 / 0.1)"],xl:["0 20px 13px rgb(0 0 0 / 0.03)","0 8px 5px rgb(0 0 0 / 0.08)"],"2xl":"0 25px 25px rgb(0 0 0 / 0.15)",none:"0 0 #0000"},fill:({theme:i})=>({none:"none",...i("colors")}),flex:{1:"1 1 0%",auto:"1 1 auto",initial:"0 1 auto",none:"none"},flexBasis:({theme:i})=>({auto:"auto",...i("spacing"),"1/2":"50%","1/3":"33.333333%","2/3":"66.666667%","1/4":"25%","2/4":"50%","3/4":"75%","1/5":"20%","2/5":"40%","3/5":"60%","4/5":"80%","1/6":"16.666667%","2/6":"33.333333%","3/6":"50%","4/6":"66.666667%","5/6":"83.333333%","1/12":"8.333333%","2/12":"16.666667%","3/12":"25%","4/12":"33.333333%","5/12":"41.666667%","6/12":"50%","7/12":"58.333333%","8/12":"66.666667%","9/12":"75%","10/12":"83.333333%","11/12":"91.666667%",full:"100%"}),flexGrow:{0:"0",DEFAULT:"1"},flexShrink:{0:"0",DEFAULT:"1"},fontFamily:{sans:["ui-sans-serif","system-ui","-apple-system","BlinkMacSystemFont",'"Segoe UI"',"Roboto",'"Helvetica Neue"',"Arial",'"Noto Sans"',"sans-serif",'"Apple Color Emoji"','"Segoe UI Emoji"','"Segoe UI Symbol"','"Noto Color Emoji"'],serif:["ui-serif","Georgia","Cambria",'"Times New Roman"',"Times","serif"],mono:["ui-monospace","SFMono-Regular","Menlo","Monaco","Consolas",'"Liberation Mono"','"Courier New"',"monospace"]},fontSize:{xs:["0.75rem",{lineHeight:"1rem"}],sm:["0.875rem",{lineHeight:"1.25rem"}],base:["1rem",{lineHeight:"1.5rem"}],lg:["1.125rem",{lineHeight:"1.75rem"}],xl:["1.25rem",{lineHeight:"1.75rem"}],"2xl":["1.5rem",{lineHeight:"2rem"}],"3xl":["1.875rem",{lineHeight:"2.25rem"}],"4xl":["2.25rem",{lineHeight:"2.5rem"}],"5xl":["3rem",{lineHeight:"1"}],"6xl":["3.75rem",{lineHeight:"1"}],"7xl":["4.5rem",{lineHeight:"1"}],"8xl":["6rem",{lineHeight:"1"}],"9xl":["8rem",{lineHeight:"1"}]},fontWeight:{thin:"100",extralight:"200",light:"300",normal:"400",medium:"500",semibold:"600",bold:"700",extrabold:"800",black:"900"},gap:({theme:i})=>i("spacing"),gradientColorStops:({theme:i})=>i("colors"),gradientColorStopPositions:{"0%":"0%","5%":"5%","10%":"10%","15%":"15%","20%":"20%","25%":"25%","30%":"30%","35%":"35%","40%":"40%","45%":"45%","50%":"50%","55%":"55%","60%":"60%","65%":"65%","70%":"70%","75%":"75%","80%":"80%","85%":"85%","90%":"90%","95%":"95%","100%":"100%"},grayscale:{0:"0",DEFAULT:"100%"},gridAutoColumns:{auto:"auto",min:"min-content",max:"max-content",fr:"minmax(0, 1fr)"},gridAutoRows:{auto:"auto",min:"min-content",max:"max-content",fr:"minmax(0, 1fr)"},gridColumn:{auto:"auto","span-1":"span 1 / span 1","span-2":"span 2 / span 2","span-3":"span 3 / span 3","span-4":"span 4 / span 4","span-5":"span 5 / span 5","span-6":"span 6 / span 6","span-7":"span 7 / span 7","span-8":"span 8 / span 8","span-9":"span 9 / span 9","span-10":"span 10 / span 10","span-11":"span 11 / span 11","span-12":"span 12 / span 12","span-full":"1 / -1"},gridColumnEnd:{auto:"auto",1:"1",2:"2",3:"3",4:"4",5:"5",6:"6",7:"7",8:"8",9:"9",10:"10",11:"11",12:"12",13:"13"},gridColumnStart:{auto:"auto",1:"1",2:"2",3:"3",4:"4",5:"5",6:"6",7:"7",8:"8",9:"9",10:"10",11:"11",12:"12",13:"13"},gridRow:{auto:"auto","span-1":"span 1 / span 1","span-2":"span 2 / span 2","span-3":"span 3 / span 3","span-4":"span 4 / span 4","span-5":"span 5 / span 5","span-6":"span 6 / span 6","span-full":"1 / -1"},gridRowEnd:{auto:"auto",1:"1",2:"2",3:"3",4:"4",5:"5",6:"6",7:"7"},gridRowStart:{auto:"auto",1:"1",2:"2",3:"3",4:"4",5:"5",6:"6",7:"7"},gridTemplateColumns:{none:"none",1:"repeat(1, minmax(0, 1fr))",2:"repeat(2, minmax(0, 1fr))",3:"repeat(3, minmax(0, 1fr))",4:"repeat(4, minmax(0, 1fr))",5:"repeat(5, minmax(0, 1fr))",6:"repeat(6, minmax(0, 1fr))",7:"repeat(7, minmax(0, 1fr))",8:"repeat(8, minmax(0, 1fr))",9:"repeat(9, minmax(0, 1fr))",10:"repeat(10, minmax(0, 1fr))",11:"repeat(11, minmax(0, 1fr))",12:"repeat(12, minmax(0, 1fr))"},gridTemplateRows:{none:"none",1:"repeat(1, minmax(0, 1fr))",2:"repeat(2, minmax(0, 1fr))",3:"repeat(3, minmax(0, 1fr))",4:"repeat(4, minmax(0, 1fr))",5:"repeat(5, minmax(0, 1fr))",6:"repeat(6, minmax(0, 1fr))"},height:({theme:i})=>({auto:"auto",...i("spacing"),"1/2":"50%","1/3":"33.333333%","2/3":"66.666667%","1/4":"25%","2/4":"50%","3/4":"75%","1/5":"20%","2/5":"40%","3/5":"60%","4/5":"80%","1/6":"16.666667%","2/6":"33.333333%","3/6":"50%","4/6":"66.666667%","5/6":"83.333333%",full:"100%",screen:"100vh",min:"min-content",max:"max-content",fit:"fit-content"}),hueRotate:{0:"0deg",15:"15deg",30:"30deg",60:"60deg",90:"90deg",180:"180deg"},inset:({theme:i})=>({auto:"auto",...i("spacing"),"1/2":"50%","1/3":"33.333333%","2/3":"66.666667%","1/4":"25%","2/4":"50%","3/4":"75%",full:"100%"}),invert:{0:"0",DEFAULT:"100%"},keyframes:{spin:{to:{transform:"rotate(360deg)"}},ping:{"75%, 100%":{transform:"scale(2)",opacity:"0"}},pulse:{"50%":{opacity:".5"}},bounce:{"0%, 100%":{transform:"translateY(-25%)",animationTimingFunction:"cubic-bezier(0.8,0,1,1)"},"50%":{transform:"none",animationTimingFunction:"cubic-bezier(0,0,0.2,1)"}}},letterSpacing:{tighter:"-0.05em",tight:"-0.025em",normal:"0em",wide:"0.025em",wider:"0.05em",widest:"0.1em"},lineHeight:{none:"1",tight:"1.25",snug:"1.375",normal:"1.5",relaxed:"1.625",loose:"2",3:".75rem",4:"1rem",5:"1.25rem",6:"1.5rem",7:"1.75rem",8:"2rem",9:"2.25rem",10:"2.5rem"},listStyleType:{none:"none",disc:"disc",decimal:"decimal"},listStyleImage:{none:"none"},margin:({theme:i})=>({auto:"auto",...i("spacing")}),lineClamp:{1:"1",2:"2",3:"3",4:"4",5:"5",6:"6"},maxHeight:({theme:i})=>({...i("spacing"),none:"none",full:"100%",screen:"100vh",min:"min-content",max:"max-content",fit:"fit-content"}),maxWidth:({theme:i,breakpoints:e})=>({none:"none",0:"0rem",xs:"20rem",sm:"24rem",md:"28rem",lg:"32rem",xl:"36rem","2xl":"42rem","3xl":"48rem","4xl":"56rem","5xl":"64rem","6xl":"72rem","7xl":"80rem",full:"100%",min:"min-content",max:"max-content",fit:"fit-content",prose:"65ch",...e(i("screens"))}),minHeight:{0:"0px",full:"100%",screen:"100vh",min:"min-content",max:"max-content",fit:"fit-content"},minWidth:{0:"0px",full:"100%",min:"min-content",max:"max-content",fit:"fit-content"},objectPosition:{bottom:"bottom",center:"center",left:"left","left-bottom":"left bottom","left-top":"left top",right:"right","right-bottom":"right bottom","right-top":"right top",top:"top"},opacity:{0:"0",5:"0.05",10:"0.1",20:"0.2",25:"0.25",30:"0.3",40:"0.4",50:"0.5",60:"0.6",70:"0.7",75:"0.75",80:"0.8",90:"0.9",95:"0.95",100:"1"},order:{first:"-9999",last:"9999",none:"0",1:"1",2:"2",3:"3",4:"4",5:"5",6:"6",7:"7",8:"8",9:"9",10:"10",11:"11",12:"12"},outlineColor:({theme:i})=>i("colors"),outlineOffset:{0:"0px",1:"1px",2:"2px",4:"4px",8:"8px"},outlineWidth:{0:"0px",1:"1px",2:"2px",4:"4px",8:"8px"},padding:({theme:i})=>i("spacing"),placeholderColor:({theme:i})=>i("colors"),placeholderOpacity:({theme:i})=>i("opacity"),ringColor:({theme:i})=>({DEFAULT:i("colors.blue.500","#3b82f6"),...i("colors")}),ringOffsetColor:({theme:i})=>i("colors"),ringOffsetWidth:{0:"0px",1:"1px",2:"2px",4:"4px",8:"8px"},ringOpacity:({theme:i})=>({DEFAULT:"0.5",...i("opacity")}),ringWidth:{DEFAULT:"3px",0:"0px",1:"1px",2:"2px",4:"4px",8:"8px"},rotate:{0:"0deg",1:"1deg",2:"2deg",3:"3deg",6:"6deg",12:"12deg",45:"45deg",90:"90deg",180:"180deg"},saturate:{0:"0",50:".5",100:"1",150:"1.5",200:"2"},scale:{0:"0",50:".5",75:".75",90:".9",95:".95",100:"1",105:"1.05",110:"1.1",125:"1.25",150:"1.5"},screens:{sm:"640px",md:"768px",lg:"1024px",xl:"1280px","2xl":"1536px"},scrollMargin:({theme:i})=>({...i("spacing")}),scrollPadding:({theme:i})=>i("spacing"),sepia:{0:"0",DEFAULT:"100%"},skew:{0:"0deg",1:"1deg",2:"2deg",3:"3deg",6:"6deg",12:"12deg"},space:({theme:i})=>({...i("spacing")}),spacing:{px:"1px",0:"0px",.5:"0.125rem",1:"0.25rem",1.5:"0.375rem",2:"0.5rem",2.5:"0.625rem",3:"0.75rem",3.5:"0.875rem",4:"1rem",5:"1.25rem",6:"1.5rem",7:"1.75rem",8:"2rem",9:"2.25rem",10:"2.5rem",11:"2.75rem",12:"3rem",14:"3.5rem",16:"4rem",20:"5rem",24:"6rem",28:"7rem",32:"8rem",36:"9rem",40:"10rem",44:"11rem",48:"12rem",52:"13rem",56:"14rem",60:"15rem",64:"16rem",72:"18rem",80:"20rem",96:"24rem"},stroke:({theme:i})=>({none:"none",...i("colors")}),strokeWidth:{0:"0",1:"1",2:"2"},supports:{},data:{},textColor:({theme:i})=>i("colors"),textDecorationColor:({theme:i})=>i("colors"),textDecorationThickness:{auto:"auto","from-font":"from-font",0:"0px",1:"1px",2:"2px",4:"4px",8:"8px"},textIndent:({theme:i})=>({...i("spacing")}),textOpacity:({theme:i})=>i("opacity"),textUnderlineOffset:{auto:"auto",0:"0px",1:"1px",2:"2px",4:"4px",8:"8px"},transformOrigin:{center:"center",top:"top","top-right":"top right",right:"right","bottom-right":"bottom right",bottom:"bottom","bottom-left":"bottom left",left:"left","top-left":"top left"},transitionDelay:{0:"0s",75:"75ms",100:"100ms",150:"150ms",200:"200ms",300:"300ms",500:"500ms",700:"700ms",1e3:"1000ms"},transitionDuration:{DEFAULT:"150ms",0:"0s",75:"75ms",100:"100ms",150:"150ms",200:"200ms",300:"300ms",500:"500ms",700:"700ms",1e3:"1000ms"},transitionProperty:{none:"none",all:"all",DEFAULT:"color, background-color, border-color, text-decoration-color, fill, stroke, opacity, box-shadow, transform, filter, backdrop-filter",colors:"color, background-color, border-color, text-decoration-color, fill, stroke",opacity:"opacity",shadow:"box-shadow",transform:"transform"},transitionTimingFunction:{DEFAULT:"cubic-bezier(0.4, 0, 0.2, 1)",linear:"linear",in:"cubic-bezier(0.4, 0, 1, 1)",out:"cubic-bezier(0, 0, 0.2, 1)","in-out":"cubic-bezier(0.4, 0, 0.2, 1)"},translate:({theme:i})=>({...i("spacing"),"1/2":"50%","1/3":"33.333333%","2/3":"66.666667%","1/4":"25%","2/4":"50%","3/4":"75%",full:"100%"}),width:({theme:i})=>({auto:"auto",...i("spacing"),"1/2":"50%","1/3":"33.333333%","2/3":"66.666667%","1/4":"25%","2/4":"50%","3/4":"75%","1/5":"20%","2/5":"40%","3/5":"60%","4/5":"80%","1/6":"16.666667%","2/6":"33.333333%","3/6":"50%","4/6":"66.666667%","5/6":"83.333333%","1/12":"8.333333%","2/12":"16.666667%","3/12":"25%","4/12":"33.333333%","5/12":"41.666667%","6/12":"50%","7/12":"58.333333%","8/12":"66.666667%","9/12":"75%","10/12":"83.333333%","11/12":"91.666667%",full:"100%",screen:"100vw",min:"min-content",max:"max-content",fit:"fit-content"}),willChange:{auto:"auto",scroll:"scroll-position",contents:"contents",transform:"transform"},zIndex:{auto:"auto",0:"0",10:"10",20:"20",30:"30",40:"40",50:"50"}},plugins:[]}});function vi(i){let e=(i?.presets??[ef.default]).slice().reverse().flatMap(n=>vi(n instanceof Function?n():n)),t={respectDefaultRingColorOpacity:{theme:{ringColor:({theme:n})=>({DEFAULT:"#3b82f67f",...n("colors")})}},disableColorOpacityUtilitiesByDefault:{corePlugins:{backgroundOpacity:!1,borderOpacity:!1,divideOpacity:!1,placeholderOpacity:!1,ringOpacity:!1,textOpacity:!1}}},r=Object.keys(t).filter(n=>J(i,n)).map(n=>t[n]);return[i,...r,...e]}var ef,tf=C(()=>{l();ef=K(bi());De()});var rf={};Ae(rf,{default:()=>dr});function dr(...i){let[,...e]=vi(i[0]);return ms([...i,...e])}var gs=C(()=>{l();Ku();tf()});var nf={};Ae(nf,{default:()=>Z});var Z,mt=C(()=>{l();Z={resolve:i=>i,extname:i=>"."+i.split(".").pop()}});function xi(i){return typeof i=="object"&&i!==null}function o0(i){return Object.keys(i).length===0}function sf(i){return typeof i=="string"||i instanceof String}function ys(i){return xi(i)&&i.config===void 0&&!o0(i)?null:xi(i)&&i.config!==void 0&&sf(i.config)?Z.resolve(i.config):xi(i)&&i.config!==void 0&&xi(i.config)?null:sf(i)?Z.resolve(i):l0()}function l0(){for(let i of a0)try{let e=Z.resolve(i);return te.accessSync(e),e}catch(e){}return null}var a0,af=C(()=>{l();ze();mt();a0=["./tailwind.config.js","./tailwind.config.cjs","./tailwind.config.mjs","./tailwind.config.ts"]});var of={};Ae(of,{default:()=>ws});var ws,bs=C(()=>{l();ws={parse:i=>({href:i})}});var vs=v(()=>{l()});var ki=v((fT,ff)=>{l();"use strict";var lf=(ci(),vu),uf=vs(),Ct=class extends Error{constructor(e,t,r,n,a,s){super(e);this.name="CssSyntaxError",this.reason=e,a&&(this.file=a),n&&(this.source=n),s&&(this.plugin=s),typeof t!="undefined"&&typeof r!="undefined"&&(typeof t=="number"?(this.line=t,this.column=r):(this.line=t.line,this.column=t.column,this.endLine=r.line,this.endColumn=r.column)),this.setMessage(),Error.captureStackTrace&&Error.captureStackTrace(this,Ct)}setMessage(){this.message=this.plugin?this.plugin+": ":"",this.message+=this.file?this.file:"",typeof this.line!="undefined"&&(this.message+=":"+this.line+":"+this.column),this.message+=": "+this.reason}showSourceCode(e){if(!this.source)return"";let t=this.source;e==null&&(e=lf.isColorSupported),uf&&e&&(t=uf(t));let r=t.split(/\r?\n/),n=Math.max(this.line-3,0),a=Math.min(this.line+2,r.length),s=String(a).length,o,u;if(e){let{bold:c,red:f,gray:p}=lf.createColors(!0);o=d=>c(f(d)),u=d=>p(d)}else o=u=c=>c;return r.slice(n,a).map((c,f)=>{let p=n+1+f,d=" "+(" "+p).slice(-s)+" | ";if(p===this.line){let h=u(d.replace(/\d/g," "))+c.slice(0,this.column-1).replace(/[^\t]/g," ");return o(">")+u(d)+c+` + `+h+o("^")}return" "+u(d)+c}).join(` +`)}toString(){let e=this.showSourceCode();return e&&(e=` + +`+e+` +`),this.name+": "+this.message+e}};ff.exports=Ct;Ct.default=Ct});var Si=v((cT,xs)=>{l();"use strict";xs.exports.isClean=Symbol("isClean");xs.exports.my=Symbol("my")});var ks=v((pT,pf)=>{l();"use strict";var cf={colon:": ",indent:" ",beforeDecl:` +`,beforeRule:` +`,beforeOpen:" ",beforeClose:` +`,beforeComment:` +`,after:` +`,emptyBody:"",commentLeft:" ",commentRight:" ",semicolon:!1};function u0(i){return i[0].toUpperCase()+i.slice(1)}var Ci=class{constructor(e){this.builder=e}stringify(e,t){if(!this[e.type])throw new Error("Unknown AST node type "+e.type+". Maybe you need to change PostCSS stringifier.");this[e.type](e,t)}document(e){this.body(e)}root(e){this.body(e),e.raws.after&&this.builder(e.raws.after)}comment(e){let t=this.raw(e,"left","commentLeft"),r=this.raw(e,"right","commentRight");this.builder("/*"+t+e.text+r+"*/",e)}decl(e,t){let r=this.raw(e,"between","colon"),n=e.prop+r+this.rawValue(e,"value");e.important&&(n+=e.raws.important||" !important"),t&&(n+=";"),this.builder(n,e)}rule(e){this.block(e,this.rawValue(e,"selector")),e.raws.ownSemicolon&&this.builder(e.raws.ownSemicolon,e,"end")}atrule(e,t){let r="@"+e.name,n=e.params?this.rawValue(e,"params"):"";if(typeof e.raws.afterName!="undefined"?r+=e.raws.afterName:n&&(r+=" "),e.nodes)this.block(e,r+n);else{let a=(e.raws.between||"")+(t?";":"");this.builder(r+n+a,e)}}body(e){let t=e.nodes.length-1;for(;t>0&&e.nodes[t].type==="comment";)t-=1;let r=this.raw(e,"semicolon");for(let n=0;n{if(n=u.raws[t],typeof n!="undefined")return!1})}return typeof n=="undefined"&&(n=cf[r]),s.rawCache[r]=n,n}rawSemicolon(e){let t;return e.walk(r=>{if(r.nodes&&r.nodes.length&&r.last.type==="decl"&&(t=r.raws.semicolon,typeof t!="undefined"))return!1}),t}rawEmptyBody(e){let t;return e.walk(r=>{if(r.nodes&&r.nodes.length===0&&(t=r.raws.after,typeof t!="undefined"))return!1}),t}rawIndent(e){if(e.raws.indent)return e.raws.indent;let t;return e.walk(r=>{let n=r.parent;if(n&&n!==e&&n.parent&&n.parent===e&&typeof r.raws.before!="undefined"){let a=r.raws.before.split(` +`);return t=a[a.length-1],t=t.replace(/\S/g,""),!1}}),t}rawBeforeComment(e,t){let r;return e.walkComments(n=>{if(typeof n.raws.before!="undefined")return r=n.raws.before,r.includes(` +`)&&(r=r.replace(/[^\n]+$/,"")),!1}),typeof r=="undefined"?r=this.raw(t,null,"beforeDecl"):r&&(r=r.replace(/\S/g,"")),r}rawBeforeDecl(e,t){let r;return e.walkDecls(n=>{if(typeof n.raws.before!="undefined")return r=n.raws.before,r.includes(` +`)&&(r=r.replace(/[^\n]+$/,"")),!1}),typeof r=="undefined"?r=this.raw(t,null,"beforeRule"):r&&(r=r.replace(/\S/g,"")),r}rawBeforeRule(e){let t;return e.walk(r=>{if(r.nodes&&(r.parent!==e||e.first!==r)&&typeof r.raws.before!="undefined")return t=r.raws.before,t.includes(` +`)&&(t=t.replace(/[^\n]+$/,"")),!1}),t&&(t=t.replace(/\S/g,"")),t}rawBeforeClose(e){let t;return e.walk(r=>{if(r.nodes&&r.nodes.length>0&&typeof r.raws.after!="undefined")return t=r.raws.after,t.includes(` +`)&&(t=t.replace(/[^\n]+$/,"")),!1}),t&&(t=t.replace(/\S/g,"")),t}rawBeforeOpen(e){let t;return e.walk(r=>{if(r.type!=="decl"&&(t=r.raws.between,typeof t!="undefined"))return!1}),t}rawColon(e){let t;return e.walkDecls(r=>{if(typeof r.raws.between!="undefined")return t=r.raws.between.replace(/[^\s:]/g,""),!1}),t}beforeAfter(e,t){let r;e.type==="decl"?r=this.raw(e,null,"beforeDecl"):e.type==="comment"?r=this.raw(e,null,"beforeComment"):t==="before"?r=this.raw(e,null,"beforeRule"):r=this.raw(e,null,"beforeClose");let n=e.parent,a=0;for(;n&&n.type!=="root";)a+=1,n=n.parent;if(r.includes(` +`)){let s=this.raw(e,null,"indent");if(s.length)for(let o=0;o{l();"use strict";var f0=ks();function Ss(i,e){new f0(e).stringify(i)}df.exports=Ss;Ss.default=Ss});var mr=v((hT,hf)=>{l();"use strict";var{isClean:Ai,my:c0}=Si(),p0=ki(),d0=ks(),h0=hr();function Cs(i,e){let t=new i.constructor;for(let r in i){if(!Object.prototype.hasOwnProperty.call(i,r)||r==="proxyCache")continue;let n=i[r],a=typeof n;r==="parent"&&a==="object"?e&&(t[r]=e):r==="source"?t[r]=n:Array.isArray(n)?t[r]=n.map(s=>Cs(s,t)):(a==="object"&&n!==null&&(n=Cs(n)),t[r]=n)}return t}var _i=class{constructor(e={}){this.raws={},this[Ai]=!1,this[c0]=!0;for(let t in e)if(t==="nodes"){this.nodes=[];for(let r of e[t])typeof r.clone=="function"?this.append(r.clone()):this.append(r)}else this[t]=e[t]}error(e,t={}){if(this.source){let{start:r,end:n}=this.rangeBy(t);return this.source.input.error(e,{line:r.line,column:r.column},{line:n.line,column:n.column},t)}return new p0(e)}warn(e,t,r){let n={node:this};for(let a in r)n[a]=r[a];return e.warn(t,n)}remove(){return this.parent&&this.parent.removeChild(this),this.parent=void 0,this}toString(e=h0){e.stringify&&(e=e.stringify);let t="";return e(this,r=>{t+=r}),t}assign(e={}){for(let t in e)this[t]=e[t];return this}clone(e={}){let t=Cs(this);for(let r in e)t[r]=e[r];return t}cloneBefore(e={}){let t=this.clone(e);return this.parent.insertBefore(this,t),t}cloneAfter(e={}){let t=this.clone(e);return this.parent.insertAfter(this,t),t}replaceWith(...e){if(this.parent){let t=this,r=!1;for(let n of e)n===this?r=!0:r?(this.parent.insertAfter(t,n),t=n):this.parent.insertBefore(t,n);r||this.remove()}return this}next(){if(!this.parent)return;let e=this.parent.index(this);return this.parent.nodes[e+1]}prev(){if(!this.parent)return;let e=this.parent.index(this);return this.parent.nodes[e-1]}before(e){return this.parent.insertBefore(this,e),this}after(e){return this.parent.insertAfter(this,e),this}root(){let e=this;for(;e.parent&&e.parent.type!=="document";)e=e.parent;return e}raw(e,t){return new d0().raw(this,e,t)}cleanRaws(e){delete this.raws.before,delete this.raws.after,e||delete this.raws.between}toJSON(e,t){let r={},n=t==null;t=t||new Map;let a=0;for(let s in this){if(!Object.prototype.hasOwnProperty.call(this,s)||s==="parent"||s==="proxyCache")continue;let o=this[s];if(Array.isArray(o))r[s]=o.map(u=>typeof u=="object"&&u.toJSON?u.toJSON(null,t):u);else if(typeof o=="object"&&o.toJSON)r[s]=o.toJSON(null,t);else if(s==="source"){let u=t.get(o.input);u==null&&(u=a,t.set(o.input,a),a++),r[s]={inputId:u,start:o.start,end:o.end}}else r[s]=o}return n&&(r.inputs=[...t.keys()].map(s=>s.toJSON())),r}positionInside(e){let t=this.toString(),r=this.source.start.column,n=this.source.start.line;for(let a=0;ae.root().toProxy():e[t]}}}toProxy(){return this.proxyCache||(this.proxyCache=new Proxy(this,this.getProxyProcessor())),this.proxyCache}addToError(e){if(e.postcssNode=this,e.stack&&this.source&&/\n\s{4}at /.test(e.stack)){let t=this.source;e.stack=e.stack.replace(/\n\s{4}at /,`$&${t.input.from}:${t.start.line}:${t.start.column}$&`)}return e}markDirty(){if(this[Ai]){this[Ai]=!1;let e=this;for(;e=e.parent;)e[Ai]=!1}}get proxyOf(){return this}};hf.exports=_i;_i.default=_i});var gr=v((mT,mf)=>{l();"use strict";var m0=mr(),Ei=class extends m0{constructor(e){e&&typeof e.value!="undefined"&&typeof e.value!="string"&&(e={...e,value:String(e.value)});super(e);this.type="decl"}get variable(){return this.prop.startsWith("--")||this.prop[0]==="$"}};mf.exports=Ei;Ei.default=Ei});var As=v((gT,gf)=>{l();gf.exports=function(i,e){return{generate:()=>{let t="";return i(e,r=>{t+=r}),[t]}}}});var yr=v((yT,yf)=>{l();"use strict";var g0=mr(),Oi=class extends g0{constructor(e){super(e);this.type="comment"}};yf.exports=Oi;Oi.default=Oi});var it=v((wT,_f)=>{l();"use strict";var{isClean:wf,my:bf}=Si(),vf=gr(),xf=yr(),y0=mr(),kf,_s,Es,Sf;function Cf(i){return i.map(e=>(e.nodes&&(e.nodes=Cf(e.nodes)),delete e.source,e))}function Af(i){if(i[wf]=!1,i.proxyOf.nodes)for(let e of i.proxyOf.nodes)Af(e)}var ye=class extends y0{push(e){return e.parent=this,this.proxyOf.nodes.push(e),this}each(e){if(!this.proxyOf.nodes)return;let t=this.getIterator(),r,n;for(;this.indexes[t]{let n;try{n=e(t,r)}catch(a){throw t.addToError(a)}return n!==!1&&t.walk&&(n=t.walk(e)),n})}walkDecls(e,t){return t?e instanceof RegExp?this.walk((r,n)=>{if(r.type==="decl"&&e.test(r.prop))return t(r,n)}):this.walk((r,n)=>{if(r.type==="decl"&&r.prop===e)return t(r,n)}):(t=e,this.walk((r,n)=>{if(r.type==="decl")return t(r,n)}))}walkRules(e,t){return t?e instanceof RegExp?this.walk((r,n)=>{if(r.type==="rule"&&e.test(r.selector))return t(r,n)}):this.walk((r,n)=>{if(r.type==="rule"&&r.selector===e)return t(r,n)}):(t=e,this.walk((r,n)=>{if(r.type==="rule")return t(r,n)}))}walkAtRules(e,t){return t?e instanceof RegExp?this.walk((r,n)=>{if(r.type==="atrule"&&e.test(r.name))return t(r,n)}):this.walk((r,n)=>{if(r.type==="atrule"&&r.name===e)return t(r,n)}):(t=e,this.walk((r,n)=>{if(r.type==="atrule")return t(r,n)}))}walkComments(e){return this.walk((t,r)=>{if(t.type==="comment")return e(t,r)})}append(...e){for(let t of e){let r=this.normalize(t,this.last);for(let n of r)this.proxyOf.nodes.push(n)}return this.markDirty(),this}prepend(...e){e=e.reverse();for(let t of e){let r=this.normalize(t,this.first,"prepend").reverse();for(let n of r)this.proxyOf.nodes.unshift(n);for(let n in this.indexes)this.indexes[n]=this.indexes[n]+r.length}return this.markDirty(),this}cleanRaws(e){if(super.cleanRaws(e),this.nodes)for(let t of this.nodes)t.cleanRaws(e)}insertBefore(e,t){let r=this.index(e),n=r===0?"prepend":!1,a=this.normalize(t,this.proxyOf.nodes[r],n).reverse();r=this.index(e);for(let o of a)this.proxyOf.nodes.splice(r,0,o);let s;for(let o in this.indexes)s=this.indexes[o],r<=s&&(this.indexes[o]=s+a.length);return this.markDirty(),this}insertAfter(e,t){let r=this.index(e),n=this.normalize(t,this.proxyOf.nodes[r]).reverse();r=this.index(e);for(let s of n)this.proxyOf.nodes.splice(r+1,0,s);let a;for(let s in this.indexes)a=this.indexes[s],r=e&&(this.indexes[r]=t-1);return this.markDirty(),this}removeAll(){for(let e of this.proxyOf.nodes)e.parent=void 0;return this.proxyOf.nodes=[],this.markDirty(),this}replaceValues(e,t,r){return r||(r=t,t={}),this.walkDecls(n=>{t.props&&!t.props.includes(n.prop)||t.fast&&!n.value.includes(t.fast)||(n.value=n.value.replace(e,r))}),this.markDirty(),this}every(e){return this.nodes.every(e)}some(e){return this.nodes.some(e)}index(e){return typeof e=="number"?e:(e.proxyOf&&(e=e.proxyOf),this.proxyOf.nodes.indexOf(e))}get first(){if(!!this.proxyOf.nodes)return this.proxyOf.nodes[0]}get last(){if(!!this.proxyOf.nodes)return this.proxyOf.nodes[this.proxyOf.nodes.length-1]}normalize(e,t){if(typeof e=="string")e=Cf(kf(e).nodes);else if(Array.isArray(e)){e=e.slice(0);for(let n of e)n.parent&&n.parent.removeChild(n,"ignore")}else if(e.type==="root"&&this.type!=="document"){e=e.nodes.slice(0);for(let n of e)n.parent&&n.parent.removeChild(n,"ignore")}else if(e.type)e=[e];else if(e.prop){if(typeof e.value=="undefined")throw new Error("Value field is missed in node creation");typeof e.value!="string"&&(e.value=String(e.value)),e=[new vf(e)]}else if(e.selector)e=[new _s(e)];else if(e.name)e=[new Es(e)];else if(e.text)e=[new xf(e)];else throw new Error("Unknown node type in node creation");return e.map(n=>(n[bf]||ye.rebuild(n),n=n.proxyOf,n.parent&&n.parent.removeChild(n),n[wf]&&Af(n),typeof n.raws.before=="undefined"&&t&&typeof t.raws.before!="undefined"&&(n.raws.before=t.raws.before.replace(/\S/g,"")),n.parent=this.proxyOf,n))}getProxyProcessor(){return{set(e,t,r){return e[t]===r||(e[t]=r,(t==="name"||t==="params"||t==="selector")&&e.markDirty()),!0},get(e,t){return t==="proxyOf"?e:e[t]?t==="each"||typeof t=="string"&&t.startsWith("walk")?(...r)=>e[t](...r.map(n=>typeof n=="function"?(a,s)=>n(a.toProxy(),s):n)):t==="every"||t==="some"?r=>e[t]((n,...a)=>r(n.toProxy(),...a)):t==="root"?()=>e.root().toProxy():t==="nodes"?e.nodes.map(r=>r.toProxy()):t==="first"||t==="last"?e[t].toProxy():e[t]:e[t]}}}getIterator(){this.lastEach||(this.lastEach=0),this.indexes||(this.indexes={}),this.lastEach+=1;let e=this.lastEach;return this.indexes[e]=0,e}};ye.registerParse=i=>{kf=i};ye.registerRule=i=>{_s=i};ye.registerAtRule=i=>{Es=i};ye.registerRoot=i=>{Sf=i};_f.exports=ye;ye.default=ye;ye.rebuild=i=>{i.type==="atrule"?Object.setPrototypeOf(i,Es.prototype):i.type==="rule"?Object.setPrototypeOf(i,_s.prototype):i.type==="decl"?Object.setPrototypeOf(i,vf.prototype):i.type==="comment"?Object.setPrototypeOf(i,xf.prototype):i.type==="root"&&Object.setPrototypeOf(i,Sf.prototype),i[bf]=!0,i.nodes&&i.nodes.forEach(e=>{ye.rebuild(e)})}});var Ti=v((bT,Tf)=>{l();"use strict";var w0=it(),Ef,Of,At=class extends w0{constructor(e){super({type:"document",...e});this.nodes||(this.nodes=[])}toResult(e={}){return new Ef(new Of,this,e).stringify()}};At.registerLazyResult=i=>{Ef=i};At.registerProcessor=i=>{Of=i};Tf.exports=At;At.default=At});var Os=v((vT,Df)=>{l();"use strict";var Pf={};Df.exports=function(e){Pf[e]||(Pf[e]=!0,typeof console!="undefined"&&console.warn&&console.warn(e))}});var Ts=v((xT,If)=>{l();"use strict";var Pi=class{constructor(e,t={}){if(this.type="warning",this.text=e,t.node&&t.node.source){let r=t.node.rangeBy(t);this.line=r.start.line,this.column=r.start.column,this.endLine=r.end.line,this.endColumn=r.end.column}for(let r in t)this[r]=t[r]}toString(){return this.node?this.node.error(this.text,{plugin:this.plugin,index:this.index,word:this.word}).message:this.plugin?this.plugin+": "+this.text:this.text}};If.exports=Pi;Pi.default=Pi});var Ii=v((kT,qf)=>{l();"use strict";var b0=Ts(),Di=class{constructor(e,t,r){this.processor=e,this.messages=[],this.root=t,this.opts=r,this.css=void 0,this.map=void 0}toString(){return this.css}warn(e,t={}){t.plugin||this.lastPlugin&&this.lastPlugin.postcssPlugin&&(t.plugin=this.lastPlugin.postcssPlugin);let r=new b0(e,t);return this.messages.push(r),r}warnings(){return this.messages.filter(e=>e.type==="warning")}get content(){return this.css}};qf.exports=Di;Di.default=Di});var Nf=v((ST,Ff)=>{l();"use strict";var Ps="'".charCodeAt(0),Rf='"'.charCodeAt(0),qi="\\".charCodeAt(0),Mf="/".charCodeAt(0),Ri=` +`.charCodeAt(0),wr=" ".charCodeAt(0),Mi="\f".charCodeAt(0),Bi=" ".charCodeAt(0),Fi="\r".charCodeAt(0),v0="[".charCodeAt(0),x0="]".charCodeAt(0),k0="(".charCodeAt(0),S0=")".charCodeAt(0),C0="{".charCodeAt(0),A0="}".charCodeAt(0),_0=";".charCodeAt(0),E0="*".charCodeAt(0),O0=":".charCodeAt(0),T0="@".charCodeAt(0),Ni=/[\t\n\f\r "#'()/;[\\\]{}]/g,Li=/[\t\n\f\r !"#'():;@[\\\]{}]|\/(?=\*)/g,P0=/.[\n"'(/\\]/,Bf=/[\da-f]/i;Ff.exports=function(e,t={}){let r=e.css.valueOf(),n=t.ignoreErrors,a,s,o,u,c,f,p,d,h,y,x=r.length,w=0,b=[],k=[];function S(){return w}function _(q){throw e.error("Unclosed "+q,w)}function E(){return k.length===0&&w>=x}function I(q){if(k.length)return k.pop();if(w>=x)return;let X=q?q.ignoreUnclosed:!1;switch(a=r.charCodeAt(w),a){case Ri:case wr:case Bi:case Fi:case Mi:{s=w;do s+=1,a=r.charCodeAt(s);while(a===wr||a===Ri||a===Bi||a===Fi||a===Mi);y=["space",r.slice(w,s)],w=s-1;break}case v0:case x0:case C0:case A0:case O0:case _0:case S0:{let ae=String.fromCharCode(a);y=[ae,ae,w];break}case k0:{if(d=b.length?b.pop()[1]:"",h=r.charCodeAt(w+1),d==="url"&&h!==Ps&&h!==Rf&&h!==wr&&h!==Ri&&h!==Bi&&h!==Mi&&h!==Fi){s=w;do{if(f=!1,s=r.indexOf(")",s+1),s===-1)if(n||X){s=w;break}else _("bracket");for(p=s;r.charCodeAt(p-1)===qi;)p-=1,f=!f}while(f);y=["brackets",r.slice(w,s+1),w,s],w=s}else s=r.indexOf(")",w+1),u=r.slice(w,s+1),s===-1||P0.test(u)?y=["(","(",w]:(y=["brackets",u,w,s],w=s);break}case Ps:case Rf:{o=a===Ps?"'":'"',s=w;do{if(f=!1,s=r.indexOf(o,s+1),s===-1)if(n||X){s=w+1;break}else _("string");for(p=s;r.charCodeAt(p-1)===qi;)p-=1,f=!f}while(f);y=["string",r.slice(w,s+1),w,s],w=s;break}case T0:{Ni.lastIndex=w+1,Ni.test(r),Ni.lastIndex===0?s=r.length-1:s=Ni.lastIndex-2,y=["at-word",r.slice(w,s+1),w,s],w=s;break}case qi:{for(s=w,c=!0;r.charCodeAt(s+1)===qi;)s+=1,c=!c;if(a=r.charCodeAt(s+1),c&&a!==Mf&&a!==wr&&a!==Ri&&a!==Bi&&a!==Fi&&a!==Mi&&(s+=1,Bf.test(r.charAt(s)))){for(;Bf.test(r.charAt(s+1));)s+=1;r.charCodeAt(s+1)===wr&&(s+=1)}y=["word",r.slice(w,s+1),w,s],w=s;break}default:{a===Mf&&r.charCodeAt(w+1)===E0?(s=r.indexOf("*/",w+2)+1,s===0&&(n||X?s=r.length:_("comment")),y=["comment",r.slice(w,s+1),w,s],w=s):(Li.lastIndex=w+1,Li.test(r),Li.lastIndex===0?s=r.length-1:s=Li.lastIndex-2,y=["word",r.slice(w,s+1),w,s],b.push(y),w=s);break}}return w++,y}function B(q){k.push(q)}return{back:B,nextToken:I,endOfFile:E,position:S}}});var $i=v((CT,$f)=>{l();"use strict";var Lf=it(),br=class extends Lf{constructor(e){super(e);this.type="atrule"}append(...e){return this.proxyOf.nodes||(this.nodes=[]),super.append(...e)}prepend(...e){return this.proxyOf.nodes||(this.nodes=[]),super.prepend(...e)}};$f.exports=br;br.default=br;Lf.registerAtRule(br)});var _t=v((AT,Uf)=>{l();"use strict";var jf=it(),zf,Vf,gt=class extends jf{constructor(e){super(e);this.type="root",this.nodes||(this.nodes=[])}removeChild(e,t){let r=this.index(e);return!t&&r===0&&this.nodes.length>1&&(this.nodes[1].raws.before=this.nodes[r].raws.before),super.removeChild(e)}normalize(e,t,r){let n=super.normalize(e);if(t){if(r==="prepend")this.nodes.length>1?t.raws.before=this.nodes[1].raws.before:delete t.raws.before;else if(this.first!==t)for(let a of n)a.raws.before=t.raws.before}return n}toResult(e={}){return new zf(new Vf,this,e).stringify()}};gt.registerLazyResult=i=>{zf=i};gt.registerProcessor=i=>{Vf=i};Uf.exports=gt;gt.default=gt;jf.registerRoot(gt)});var Ds=v((_T,Wf)=>{l();"use strict";var vr={split(i,e,t){let r=[],n="",a=!1,s=0,o=!1,u="",c=!1;for(let f of i)c?c=!1:f==="\\"?c=!0:o?f===u&&(o=!1):f==='"'||f==="'"?(o=!0,u=f):f==="("?s+=1:f===")"?s>0&&(s-=1):s===0&&e.includes(f)&&(a=!0),a?(n!==""&&r.push(n.trim()),n="",a=!1):n+=f;return(t||n!=="")&&r.push(n.trim()),r},space(i){let e=[" ",` +`," "];return vr.split(i,e)},comma(i){return vr.split(i,[","],!0)}};Wf.exports=vr;vr.default=vr});var ji=v((ET,Hf)=>{l();"use strict";var Gf=it(),D0=Ds(),xr=class extends Gf{constructor(e){super(e);this.type="rule",this.nodes||(this.nodes=[])}get selectors(){return D0.comma(this.selector)}set selectors(e){let t=this.selector?this.selector.match(/,\s*/):null,r=t?t[0]:","+this.raw("between","beforeOpen");this.selector=e.join(r)}};Hf.exports=xr;xr.default=xr;Gf.registerRule(xr)});var Kf=v((OT,Xf)=>{l();"use strict";var I0=gr(),q0=Nf(),R0=yr(),M0=$i(),B0=_t(),Yf=ji(),Qf={empty:!0,space:!0};function F0(i){for(let e=i.length-1;e>=0;e--){let t=i[e],r=t[3]||t[2];if(r)return r}}var Jf=class{constructor(e){this.input=e,this.root=new B0,this.current=this.root,this.spaces="",this.semicolon=!1,this.customProperty=!1,this.createTokenizer(),this.root.source={input:e,start:{offset:0,line:1,column:1}}}createTokenizer(){this.tokenizer=q0(this.input)}parse(){let e;for(;!this.tokenizer.endOfFile();)switch(e=this.tokenizer.nextToken(),e[0]){case"space":this.spaces+=e[1];break;case";":this.freeSemicolon(e);break;case"}":this.end(e);break;case"comment":this.comment(e);break;case"at-word":this.atrule(e);break;case"{":this.emptyRule(e);break;default:this.other(e);break}this.endFile()}comment(e){let t=new R0;this.init(t,e[2]),t.source.end=this.getPosition(e[3]||e[2]);let r=e[1].slice(2,-2);if(/^\s*$/.test(r))t.text="",t.raws.left=r,t.raws.right="";else{let n=r.match(/^(\s*)([^]*\S)(\s*)$/);t.text=n[2],t.raws.left=n[1],t.raws.right=n[3]}}emptyRule(e){let t=new Yf;this.init(t,e[2]),t.selector="",t.raws.between="",this.current=t}other(e){let t=!1,r=null,n=!1,a=null,s=[],o=e[1].startsWith("--"),u=[],c=e;for(;c;){if(r=c[0],u.push(c),r==="("||r==="[")a||(a=c),s.push(r==="("?")":"]");else if(o&&n&&r==="{")a||(a=c),s.push("}");else if(s.length===0)if(r===";")if(n){this.decl(u,o);return}else break;else if(r==="{"){this.rule(u);return}else if(r==="}"){this.tokenizer.back(u.pop()),t=!0;break}else r===":"&&(n=!0);else r===s[s.length-1]&&(s.pop(),s.length===0&&(a=null));c=this.tokenizer.nextToken()}if(this.tokenizer.endOfFile()&&(t=!0),s.length>0&&this.unclosedBracket(a),t&&n){if(!o)for(;u.length&&(c=u[u.length-1][0],!(c!=="space"&&c!=="comment"));)this.tokenizer.back(u.pop());this.decl(u,o)}else this.unknownWord(u)}rule(e){e.pop();let t=new Yf;this.init(t,e[0][2]),t.raws.between=this.spacesAndCommentsFromEnd(e),this.raw(t,"selector",e),this.current=t}decl(e,t){let r=new I0;this.init(r,e[0][2]);let n=e[e.length-1];for(n[0]===";"&&(this.semicolon=!0,e.pop()),r.source.end=this.getPosition(n[3]||n[2]||F0(e));e[0][0]!=="word";)e.length===1&&this.unknownWord(e),r.raws.before+=e.shift()[1];for(r.source.start=this.getPosition(e[0][2]),r.prop="";e.length;){let c=e[0][0];if(c===":"||c==="space"||c==="comment")break;r.prop+=e.shift()[1]}r.raws.between="";let a;for(;e.length;)if(a=e.shift(),a[0]===":"){r.raws.between+=a[1];break}else a[0]==="word"&&/\w/.test(a[1])&&this.unknownWord([a]),r.raws.between+=a[1];(r.prop[0]==="_"||r.prop[0]==="*")&&(r.raws.before+=r.prop[0],r.prop=r.prop.slice(1));let s=[],o;for(;e.length&&(o=e[0][0],!(o!=="space"&&o!=="comment"));)s.push(e.shift());this.precheckMissedSemicolon(e);for(let c=e.length-1;c>=0;c--){if(a=e[c],a[1].toLowerCase()==="!important"){r.important=!0;let f=this.stringFrom(e,c);f=this.spacesFromEnd(e)+f,f!==" !important"&&(r.raws.important=f);break}else if(a[1].toLowerCase()==="important"){let f=e.slice(0),p="";for(let d=c;d>0;d--){let h=f[d][0];if(p.trim().indexOf("!")===0&&h!=="space")break;p=f.pop()[1]+p}p.trim().indexOf("!")===0&&(r.important=!0,r.raws.important=p,e=f)}if(a[0]!=="space"&&a[0]!=="comment")break}e.some(c=>c[0]!=="space"&&c[0]!=="comment")&&(r.raws.between+=s.map(c=>c[1]).join(""),s=[]),this.raw(r,"value",s.concat(e),t),r.value.includes(":")&&!t&&this.checkMissedSemicolon(e)}atrule(e){let t=new M0;t.name=e[1].slice(1),t.name===""&&this.unnamedAtrule(t,e),this.init(t,e[2]);let r,n,a,s=!1,o=!1,u=[],c=[];for(;!this.tokenizer.endOfFile();){if(e=this.tokenizer.nextToken(),r=e[0],r==="("||r==="["?c.push(r==="("?")":"]"):r==="{"&&c.length>0?c.push("}"):r===c[c.length-1]&&c.pop(),c.length===0)if(r===";"){t.source.end=this.getPosition(e[2]),this.semicolon=!0;break}else if(r==="{"){o=!0;break}else if(r==="}"){if(u.length>0){for(a=u.length-1,n=u[a];n&&n[0]==="space";)n=u[--a];n&&(t.source.end=this.getPosition(n[3]||n[2]))}this.end(e);break}else u.push(e);else u.push(e);if(this.tokenizer.endOfFile()){s=!0;break}}t.raws.between=this.spacesAndCommentsFromEnd(u),u.length?(t.raws.afterName=this.spacesAndCommentsFromStart(u),this.raw(t,"params",u),s&&(e=u[u.length-1],t.source.end=this.getPosition(e[3]||e[2]),this.spaces=t.raws.between,t.raws.between="")):(t.raws.afterName="",t.params=""),o&&(t.nodes=[],this.current=t)}end(e){this.current.nodes&&this.current.nodes.length&&(this.current.raws.semicolon=this.semicolon),this.semicolon=!1,this.current.raws.after=(this.current.raws.after||"")+this.spaces,this.spaces="",this.current.parent?(this.current.source.end=this.getPosition(e[2]),this.current=this.current.parent):this.unexpectedClose(e)}endFile(){this.current.parent&&this.unclosedBlock(),this.current.nodes&&this.current.nodes.length&&(this.current.raws.semicolon=this.semicolon),this.current.raws.after=(this.current.raws.after||"")+this.spaces}freeSemicolon(e){if(this.spaces+=e[1],this.current.nodes){let t=this.current.nodes[this.current.nodes.length-1];t&&t.type==="rule"&&!t.raws.ownSemicolon&&(t.raws.ownSemicolon=this.spaces,this.spaces="")}}getPosition(e){let t=this.input.fromOffset(e);return{offset:e,line:t.line,column:t.col}}init(e,t){this.current.push(e),e.source={start:this.getPosition(t),input:this.input},e.raws.before=this.spaces,this.spaces="",e.type!=="comment"&&(this.semicolon=!1)}raw(e,t,r,n){let a,s,o=r.length,u="",c=!0,f,p;for(let d=0;dh+y[1],"");e.raws[t]={value:u,raw:d}}e[t]=u}spacesAndCommentsFromEnd(e){let t,r="";for(;e.length&&(t=e[e.length-1][0],!(t!=="space"&&t!=="comment"));)r=e.pop()[1]+r;return r}spacesAndCommentsFromStart(e){let t,r="";for(;e.length&&(t=e[0][0],!(t!=="space"&&t!=="comment"));)r+=e.shift()[1];return r}spacesFromEnd(e){let t,r="";for(;e.length&&(t=e[e.length-1][0],t==="space");)r=e.pop()[1]+r;return r}stringFrom(e,t){let r="";for(let n=t;n=0&&(n=e[a],!(n[0]!=="space"&&(r+=1,r===2)));a--);throw this.input.error("Missed semicolon",n[0]==="word"?n[3]+1:n[2])}};Xf.exports=Jf});var Zf=v(()=>{l()});var tc=v((DT,ec)=>{l();var N0="useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict",L0=(i,e=21)=>(t=e)=>{let r="",n=t;for(;n--;)r+=i[Math.random()*i.length|0];return r},$0=(i=21)=>{let e="",t=i;for(;t--;)e+=N0[Math.random()*64|0];return e};ec.exports={nanoid:$0,customAlphabet:L0}});var Is=v((IT,rc)=>{l();rc.exports={}});var Vi=v((qT,ac)=>{l();"use strict";var{SourceMapConsumer:j0,SourceMapGenerator:z0}=Zf(),{fileURLToPath:ic,pathToFileURL:zi}=(bs(),of),{resolve:qs,isAbsolute:Rs}=(mt(),nf),{nanoid:V0}=tc(),Ms=vs(),nc=ki(),U0=Is(),Bs=Symbol("fromOffsetCache"),W0=Boolean(j0&&z0),sc=Boolean(qs&&Rs),kr=class{constructor(e,t={}){if(e===null||typeof e=="undefined"||typeof e=="object"&&!e.toString)throw new Error(`PostCSS received ${e} instead of CSS string`);if(this.css=e.toString(),this.css[0]==="\uFEFF"||this.css[0]==="\uFFFE"?(this.hasBOM=!0,this.css=this.css.slice(1)):this.hasBOM=!1,t.from&&(!sc||/^\w+:\/\//.test(t.from)||Rs(t.from)?this.file=t.from:this.file=qs(t.from)),sc&&W0){let r=new U0(this.css,t);if(r.text){this.map=r;let n=r.consumer().file;!this.file&&n&&(this.file=this.mapResolve(n))}}this.file||(this.id=""),this.map&&(this.map.file=this.from)}fromOffset(e){let t,r;if(this[Bs])r=this[Bs];else{let a=this.css.split(` +`);r=new Array(a.length);let s=0;for(let o=0,u=a.length;o=t)n=r.length-1;else{let a=r.length-2,s;for(;n>1),e=r[s+1])n=s+1;else{n=s;break}}return{line:n+1,col:e-r[n]+1}}error(e,t,r,n={}){let a,s,o;if(t&&typeof t=="object"){let c=t,f=r;if(typeof c.offset=="number"){let p=this.fromOffset(c.offset);t=p.line,r=p.col}else t=c.line,r=c.column;if(typeof f.offset=="number"){let p=this.fromOffset(f.offset);s=p.line,o=p.col}else s=f.line,o=f.column}else if(!r){let c=this.fromOffset(t);t=c.line,r=c.col}let u=this.origin(t,r,s,o);return u?a=new nc(e,u.endLine===void 0?u.line:{line:u.line,column:u.column},u.endLine===void 0?u.column:{line:u.endLine,column:u.endColumn},u.source,u.file,n.plugin):a=new nc(e,s===void 0?t:{line:t,column:r},s===void 0?r:{line:s,column:o},this.css,this.file,n.plugin),a.input={line:t,column:r,endLine:s,endColumn:o,source:this.css},this.file&&(zi&&(a.input.url=zi(this.file).toString()),a.input.file=this.file),a}origin(e,t,r,n){if(!this.map)return!1;let a=this.map.consumer(),s=a.originalPositionFor({line:e,column:t});if(!s.source)return!1;let o;typeof r=="number"&&(o=a.originalPositionFor({line:r,column:n}));let u;Rs(s.source)?u=zi(s.source):u=new URL(s.source,this.map.consumer().sourceRoot||zi(this.map.mapFile));let c={url:u.toString(),line:s.line,column:s.column,endLine:o&&o.line,endColumn:o&&o.column};if(u.protocol==="file:")if(ic)c.file=ic(u);else throw new Error("file: protocol is not available in this PostCSS build");let f=a.sourceContentFor(s.source);return f&&(c.source=f),c}mapResolve(e){return/^\w+:\/\//.test(e)?e:qs(this.map.consumer().sourceRoot||this.map.root||".",e)}get from(){return this.file||this.id}toJSON(){let e={};for(let t of["hasBOM","css","file","id"])this[t]!=null&&(e[t]=this[t]);return this.map&&(e.map={...this.map},e.map.consumerCache&&(e.map.consumerCache=void 0)),e}};ac.exports=kr;kr.default=kr;Ms&&Ms.registerInput&&Ms.registerInput(kr)});var Wi=v((RT,oc)=>{l();"use strict";var G0=it(),H0=Kf(),Y0=Vi();function Ui(i,e){let t=new Y0(i,e),r=new H0(t);try{r.parse()}catch(n){throw n}return r.root}oc.exports=Ui;Ui.default=Ui;G0.registerParse(Ui)});var Ls=v((BT,cc)=>{l();"use strict";var{isClean:qe,my:Q0}=Si(),J0=As(),X0=hr(),K0=it(),Z0=Ti(),MT=Os(),lc=Ii(),ev=Wi(),tv=_t(),rv={document:"Document",root:"Root",atrule:"AtRule",rule:"Rule",decl:"Declaration",comment:"Comment"},iv={postcssPlugin:!0,prepare:!0,Once:!0,Document:!0,Root:!0,Declaration:!0,Rule:!0,AtRule:!0,Comment:!0,DeclarationExit:!0,RuleExit:!0,AtRuleExit:!0,CommentExit:!0,RootExit:!0,DocumentExit:!0,OnceExit:!0},nv={postcssPlugin:!0,prepare:!0,Once:!0},Et=0;function Sr(i){return typeof i=="object"&&typeof i.then=="function"}function uc(i){let e=!1,t=rv[i.type];return i.type==="decl"?e=i.prop.toLowerCase():i.type==="atrule"&&(e=i.name.toLowerCase()),e&&i.append?[t,t+"-"+e,Et,t+"Exit",t+"Exit-"+e]:e?[t,t+"-"+e,t+"Exit",t+"Exit-"+e]:i.append?[t,Et,t+"Exit"]:[t,t+"Exit"]}function fc(i){let e;return i.type==="document"?e=["Document",Et,"DocumentExit"]:i.type==="root"?e=["Root",Et,"RootExit"]:e=uc(i),{node:i,events:e,eventIndex:0,visitors:[],visitorIndex:0,iterator:0}}function Fs(i){return i[qe]=!1,i.nodes&&i.nodes.forEach(e=>Fs(e)),i}var Ns={},Ve=class{constructor(e,t,r){this.stringified=!1,this.processed=!1;let n;if(typeof t=="object"&&t!==null&&(t.type==="root"||t.type==="document"))n=Fs(t);else if(t instanceof Ve||t instanceof lc)n=Fs(t.root),t.map&&(typeof r.map=="undefined"&&(r.map={}),r.map.inline||(r.map.inline=!1),r.map.prev=t.map);else{let a=ev;r.syntax&&(a=r.syntax.parse),r.parser&&(a=r.parser),a.parse&&(a=a.parse);try{n=a(t,r)}catch(s){this.processed=!0,this.error=s}n&&!n[Q0]&&K0.rebuild(n)}this.result=new lc(e,n,r),this.helpers={...Ns,result:this.result,postcss:Ns},this.plugins=this.processor.plugins.map(a=>typeof a=="object"&&a.prepare?{...a,...a.prepare(this.result)}:a)}get[Symbol.toStringTag](){return"LazyResult"}get processor(){return this.result.processor}get opts(){return this.result.opts}get css(){return this.stringify().css}get content(){return this.stringify().content}get map(){return this.stringify().map}get root(){return this.sync().root}get messages(){return this.sync().messages}warnings(){return this.sync().warnings()}toString(){return this.css}then(e,t){return this.async().then(e,t)}catch(e){return this.async().catch(e)}finally(e){return this.async().then(e,e)}async(){return this.error?Promise.reject(this.error):this.processed?Promise.resolve(this.result):(this.processing||(this.processing=this.runAsync()),this.processing)}sync(){if(this.error)throw this.error;if(this.processed)return this.result;if(this.processed=!0,this.processing)throw this.getAsyncError();for(let e of this.plugins){let t=this.runOnRoot(e);if(Sr(t))throw this.getAsyncError()}if(this.prepareVisitors(),this.hasListener){let e=this.result.root;for(;!e[qe];)e[qe]=!0,this.walkSync(e);if(this.listeners.OnceExit)if(e.type==="document")for(let t of e.nodes)this.visitSync(this.listeners.OnceExit,t);else this.visitSync(this.listeners.OnceExit,e)}return this.result}stringify(){if(this.error)throw this.error;if(this.stringified)return this.result;this.stringified=!0,this.sync();let e=this.result.opts,t=X0;e.syntax&&(t=e.syntax.stringify),e.stringifier&&(t=e.stringifier),t.stringify&&(t=t.stringify);let n=new J0(t,this.result.root,this.result.opts).generate();return this.result.css=n[0],this.result.map=n[1],this.result}walkSync(e){e[qe]=!0;let t=uc(e);for(let r of t)if(r===Et)e.nodes&&e.each(n=>{n[qe]||this.walkSync(n)});else{let n=this.listeners[r];if(n&&this.visitSync(n,e.toProxy()))return}}visitSync(e,t){for(let[r,n]of e){this.result.lastPlugin=r;let a;try{a=n(t,this.helpers)}catch(s){throw this.handleError(s,t.proxyOf)}if(t.type!=="root"&&t.type!=="document"&&!t.parent)return!0;if(Sr(a))throw this.getAsyncError()}}runOnRoot(e){this.result.lastPlugin=e;try{if(typeof e=="object"&&e.Once){if(this.result.root.type==="document"){let t=this.result.root.nodes.map(r=>e.Once(r,this.helpers));return Sr(t[0])?Promise.all(t):t}return e.Once(this.result.root,this.helpers)}else if(typeof e=="function")return e(this.result.root,this.result)}catch(t){throw this.handleError(t)}}getAsyncError(){throw new Error("Use process(css).then(cb) to work with async plugins")}handleError(e,t){let r=this.result.lastPlugin;try{t&&t.addToError(e),this.error=e,e.name==="CssSyntaxError"&&!e.plugin?(e.plugin=r.postcssPlugin,e.setMessage()):r.postcssVersion}catch(n){console&&console.error&&console.error(n)}return e}async runAsync(){this.plugin=0;for(let e=0;e0;){let r=this.visitTick(t);if(Sr(r))try{await r}catch(n){let a=t[t.length-1].node;throw this.handleError(n,a)}}}if(this.listeners.OnceExit)for(let[t,r]of this.listeners.OnceExit){this.result.lastPlugin=t;try{if(e.type==="document"){let n=e.nodes.map(a=>r(a,this.helpers));await Promise.all(n)}else await r(e,this.helpers)}catch(n){throw this.handleError(n)}}}return this.processed=!0,this.stringify()}prepareVisitors(){this.listeners={};let e=(t,r,n)=>{this.listeners[r]||(this.listeners[r]=[]),this.listeners[r].push([t,n])};for(let t of this.plugins)if(typeof t=="object")for(let r in t){if(!iv[r]&&/^[A-Z]/.test(r))throw new Error(`Unknown event ${r} in ${t.postcssPlugin}. Try to update PostCSS (${this.processor.version} now).`);if(!nv[r])if(typeof t[r]=="object")for(let n in t[r])n==="*"?e(t,r,t[r][n]):e(t,r+"-"+n.toLowerCase(),t[r][n]);else typeof t[r]=="function"&&e(t,r,t[r])}this.hasListener=Object.keys(this.listeners).length>0}visitTick(e){let t=e[e.length-1],{node:r,visitors:n}=t;if(r.type!=="root"&&r.type!=="document"&&!r.parent){e.pop();return}if(n.length>0&&t.visitorIndex{Ns=i};cc.exports=Ve;Ve.default=Ve;tv.registerLazyResult(Ve);Z0.registerLazyResult(Ve)});var dc=v((NT,pc)=>{l();"use strict";var sv=As(),av=hr(),FT=Os(),ov=Wi(),lv=Ii(),Gi=class{constructor(e,t,r){t=t.toString(),this.stringified=!1,this._processor=e,this._css=t,this._opts=r,this._map=void 0;let n,a=av;this.result=new lv(this._processor,n,this._opts),this.result.css=t;let s=this;Object.defineProperty(this.result,"root",{get(){return s.root}});let o=new sv(a,n,this._opts,t);if(o.isMap()){let[u,c]=o.generate();u&&(this.result.css=u),c&&(this.result.map=c)}}get[Symbol.toStringTag](){return"NoWorkResult"}get processor(){return this.result.processor}get opts(){return this.result.opts}get css(){return this.result.css}get content(){return this.result.css}get map(){return this.result.map}get root(){if(this._root)return this._root;let e,t=ov;try{e=t(this._css,this._opts)}catch(r){this.error=r}if(this.error)throw this.error;return this._root=e,e}get messages(){return[]}warnings(){return[]}toString(){return this._css}then(e,t){return this.async().then(e,t)}catch(e){return this.async().catch(e)}finally(e){return this.async().then(e,e)}async(){return this.error?Promise.reject(this.error):Promise.resolve(this.result)}sync(){if(this.error)throw this.error;return this.result}};pc.exports=Gi;Gi.default=Gi});var mc=v((LT,hc)=>{l();"use strict";var uv=dc(),fv=Ls(),cv=Ti(),pv=_t(),Ot=class{constructor(e=[]){this.version="8.4.24",this.plugins=this.normalize(e)}use(e){return this.plugins=this.plugins.concat(this.normalize([e])),this}process(e,t={}){return this.plugins.length===0&&typeof t.parser=="undefined"&&typeof t.stringifier=="undefined"&&typeof t.syntax=="undefined"?new uv(this,e,t):new fv(this,e,t)}normalize(e){let t=[];for(let r of e)if(r.postcss===!0?r=r():r.postcss&&(r=r.postcss),typeof r=="object"&&Array.isArray(r.plugins))t=t.concat(r.plugins);else if(typeof r=="object"&&r.postcssPlugin)t.push(r);else if(typeof r=="function")t.push(r);else if(!(typeof r=="object"&&(r.parse||r.stringify)))throw new Error(r+" is not a PostCSS plugin");return t}};hc.exports=Ot;Ot.default=Ot;pv.registerProcessor(Ot);cv.registerProcessor(Ot)});var yc=v(($T,gc)=>{l();"use strict";var dv=gr(),hv=Is(),mv=yr(),gv=$i(),yv=Vi(),wv=_t(),bv=ji();function Cr(i,e){if(Array.isArray(i))return i.map(n=>Cr(n));let{inputs:t,...r}=i;if(t){e=[];for(let n of t){let a={...n,__proto__:yv.prototype};a.map&&(a.map={...a.map,__proto__:hv.prototype}),e.push(a)}}if(r.nodes&&(r.nodes=i.nodes.map(n=>Cr(n,e))),r.source){let{inputId:n,...a}=r.source;r.source=a,n!=null&&(r.source.input=e[n])}if(r.type==="root")return new wv(r);if(r.type==="decl")return new dv(r);if(r.type==="rule")return new bv(r);if(r.type==="comment")return new mv(r);if(r.type==="atrule")return new gv(r);throw new Error("Unknown node type: "+i.type)}gc.exports=Cr;Cr.default=Cr});var me=v((jT,Cc)=>{l();"use strict";var vv=ki(),wc=gr(),xv=Ls(),kv=it(),$s=mc(),Sv=hr(),Cv=yc(),bc=Ti(),Av=Ts(),vc=yr(),xc=$i(),_v=Ii(),Ev=Vi(),Ov=Wi(),Tv=Ds(),kc=ji(),Sc=_t(),Pv=mr();function j(...i){return i.length===1&&Array.isArray(i[0])&&(i=i[0]),new $s(i)}j.plugin=function(e,t){let r=!1;function n(...s){console&&console.warn&&!r&&(r=!0,console.warn(e+`: postcss.plugin was deprecated. Migration guide: +https://evilmartians.com/chronicles/postcss-8-plugin-migration`),m.env.LANG&&m.env.LANG.startsWith("cn")&&console.warn(e+`: \u91CC\u9762 postcss.plugin \u88AB\u5F03\u7528. \u8FC1\u79FB\u6307\u5357: +https://www.w3ctech.com/topic/2226`));let o=t(...s);return o.postcssPlugin=e,o.postcssVersion=new $s().version,o}let a;return Object.defineProperty(n,"postcss",{get(){return a||(a=n()),a}}),n.process=function(s,o,u){return j([n(u)]).process(s,o)},n};j.stringify=Sv;j.parse=Ov;j.fromJSON=Cv;j.list=Tv;j.comment=i=>new vc(i);j.atRule=i=>new xc(i);j.decl=i=>new wc(i);j.rule=i=>new kc(i);j.root=i=>new Sc(i);j.document=i=>new bc(i);j.CssSyntaxError=vv;j.Declaration=wc;j.Container=kv;j.Processor=$s;j.Document=bc;j.Comment=vc;j.Warning=Av;j.AtRule=xc;j.Result=_v;j.Input=Ev;j.Rule=kc;j.Root=Sc;j.Node=Pv;xv.registerPostcss(j);Cc.exports=j;j.default=j});var W,z,zT,VT,UT,WT,GT,HT,YT,QT,JT,XT,KT,ZT,eP,tP,rP,iP,nP,sP,aP,oP,lP,uP,fP,cP,nt=C(()=>{l();W=K(me()),z=W.default,zT=W.default.stringify,VT=W.default.fromJSON,UT=W.default.plugin,WT=W.default.parse,GT=W.default.list,HT=W.default.document,YT=W.default.comment,QT=W.default.atRule,JT=W.default.rule,XT=W.default.decl,KT=W.default.root,ZT=W.default.CssSyntaxError,eP=W.default.Declaration,tP=W.default.Container,rP=W.default.Processor,iP=W.default.Document,nP=W.default.Comment,sP=W.default.Warning,aP=W.default.AtRule,oP=W.default.Result,lP=W.default.Input,uP=W.default.Rule,fP=W.default.Root,cP=W.default.Node});var js=v((dP,Ac)=>{l();Ac.exports=function(i,e,t,r,n){for(e=e.split?e.split("."):e,r=0;r{l();"use strict";Hi.__esModule=!0;Hi.default=qv;function Dv(i){for(var e=i.toLowerCase(),t="",r=!1,n=0;n<6&&e[n]!==void 0;n++){var a=e.charCodeAt(n),s=a>=97&&a<=102||a>=48&&a<=57;if(r=a===32,!s)break;t+=e[n]}if(t.length!==0){var o=parseInt(t,16),u=o>=55296&&o<=57343;return u||o===0||o>1114111?["\uFFFD",t.length+(r?1:0)]:[String.fromCodePoint(o),t.length+(r?1:0)]}}var Iv=/\\/;function qv(i){var e=Iv.test(i);if(!e)return i;for(var t="",r=0;r{l();"use strict";Qi.__esModule=!0;Qi.default=Rv;function Rv(i){for(var e=arguments.length,t=new Array(e>1?e-1:0),r=1;r0;){var n=t.shift();if(!i[n])return;i=i[n]}return i}Ec.exports=Qi.default});var Pc=v((Ji,Tc)=>{l();"use strict";Ji.__esModule=!0;Ji.default=Mv;function Mv(i){for(var e=arguments.length,t=new Array(e>1?e-1:0),r=1;r0;){var n=t.shift();i[n]||(i[n]={}),i=i[n]}}Tc.exports=Ji.default});var Ic=v((Xi,Dc)=>{l();"use strict";Xi.__esModule=!0;Xi.default=Bv;function Bv(i){for(var e="",t=i.indexOf("/*"),r=0;t>=0;){e=e+i.slice(r,t);var n=i.indexOf("*/",t+2);if(n<0)return e;r=n+2,t=i.indexOf("/*",r)}return e=e+i.slice(r),e}Dc.exports=Xi.default});var Ar=v(Re=>{l();"use strict";Re.__esModule=!0;Re.unesc=Re.stripComments=Re.getProp=Re.ensureObject=void 0;var Fv=Ki(Yi());Re.unesc=Fv.default;var Nv=Ki(Oc());Re.getProp=Nv.default;var Lv=Ki(Pc());Re.ensureObject=Lv.default;var $v=Ki(Ic());Re.stripComments=$v.default;function Ki(i){return i&&i.__esModule?i:{default:i}}});var Ue=v((_r,Mc)=>{l();"use strict";_r.__esModule=!0;_r.default=void 0;var qc=Ar();function Rc(i,e){for(var t=0;tr||this.source.end.linen||this.source.end.line===r&&this.source.end.column{l();"use strict";G.__esModule=!0;G.UNIVERSAL=G.TAG=G.STRING=G.SELECTOR=G.ROOT=G.PSEUDO=G.NESTING=G.ID=G.COMMENT=G.COMBINATOR=G.CLASS=G.ATTRIBUTE=void 0;var Uv="tag";G.TAG=Uv;var Wv="string";G.STRING=Wv;var Gv="selector";G.SELECTOR=Gv;var Hv="root";G.ROOT=Hv;var Yv="pseudo";G.PSEUDO=Yv;var Qv="nesting";G.NESTING=Qv;var Jv="id";G.ID=Jv;var Xv="comment";G.COMMENT=Xv;var Kv="combinator";G.COMBINATOR=Kv;var Zv="class";G.CLASS=Zv;var ex="attribute";G.ATTRIBUTE=ex;var tx="universal";G.UNIVERSAL=tx});var Zi=v((Er,Lc)=>{l();"use strict";Er.__esModule=!0;Er.default=void 0;var rx=nx(Ue()),We=ix(ne());function Bc(i){if(typeof WeakMap!="function")return null;var e=new WeakMap,t=new WeakMap;return(Bc=function(n){return n?t:e})(i)}function ix(i,e){if(!e&&i&&i.__esModule)return i;if(i===null||typeof i!="object"&&typeof i!="function")return{default:i};var t=Bc(e);if(t&&t.has(i))return t.get(i);var r={},n=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var a in i)if(a!=="default"&&Object.prototype.hasOwnProperty.call(i,a)){var s=n?Object.getOwnPropertyDescriptor(i,a):null;s&&(s.get||s.set)?Object.defineProperty(r,a,s):r[a]=i[a]}return r.default=i,t&&t.set(i,r),r}function nx(i){return i&&i.__esModule?i:{default:i}}function sx(i,e){var t=typeof Symbol!="undefined"&&i[Symbol.iterator]||i["@@iterator"];if(t)return(t=t.call(i)).next.bind(t);if(Array.isArray(i)||(t=ax(i))||e&&i&&typeof i.length=="number"){t&&(i=t);var r=0;return function(){return r>=i.length?{done:!0}:{done:!1,value:i[r++]}}}throw new TypeError(`Invalid attempt to iterate non-iterable instance. +In order to be iterable, non-array objects must have a [Symbol.iterator]() method.`)}function ax(i,e){if(!!i){if(typeof i=="string")return Fc(i,e);var t=Object.prototype.toString.call(i).slice(8,-1);if(t==="Object"&&i.constructor&&(t=i.constructor.name),t==="Map"||t==="Set")return Array.from(i);if(t==="Arguments"||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(t))return Fc(i,e)}}function Fc(i,e){(e==null||e>i.length)&&(e=i.length);for(var t=0,r=new Array(e);t=n&&(this.indexes[s]=a-1);return this},t.removeAll=function(){for(var n=sx(this.nodes),a;!(a=n()).done;){var s=a.value;s.parent=void 0}return this.nodes=[],this},t.empty=function(){return this.removeAll()},t.insertAfter=function(n,a){a.parent=this;var s=this.index(n);this.nodes.splice(s+1,0,a),a.parent=this;var o;for(var u in this.indexes)o=this.indexes[u],s<=o&&(this.indexes[u]=o+1);return this},t.insertBefore=function(n,a){a.parent=this;var s=this.index(n);this.nodes.splice(s,0,a),a.parent=this;var o;for(var u in this.indexes)o=this.indexes[u],o<=s&&(this.indexes[u]=o+1);return this},t._findChildAtPosition=function(n,a){var s=void 0;return this.each(function(o){if(o.atPosition){var u=o.atPosition(n,a);if(u)return s=u,!1}else if(o.isAtPosition(n,a))return s=o,!1}),s},t.atPosition=function(n,a){if(this.isAtPosition(n,a))return this._findChildAtPosition(n,a)||this},t._inferEndPosition=function(){this.last&&this.last.source&&this.last.source.end&&(this.source=this.source||{},this.source.end=this.source.end||{},Object.assign(this.source.end,this.last.source.end))},t.each=function(n){this.lastEach||(this.lastEach=0),this.indexes||(this.indexes={}),this.lastEach++;var a=this.lastEach;if(this.indexes[a]=0,!!this.length){for(var s,o;this.indexes[a]{l();"use strict";Or.__esModule=!0;Or.default=void 0;var fx=px(Zi()),cx=ne();function px(i){return i&&i.__esModule?i:{default:i}}function $c(i,e){for(var t=0;t{l();"use strict";Tr.__esModule=!0;Tr.default=void 0;var gx=wx(Zi()),yx=ne();function wx(i){return i&&i.__esModule?i:{default:i}}function bx(i,e){i.prototype=Object.create(e.prototype),i.prototype.constructor=i,Ws(i,e)}function Ws(i,e){return Ws=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(r,n){return r.__proto__=n,r},Ws(i,e)}var vx=function(i){bx(e,i);function e(t){var r;return r=i.call(this,t)||this,r.type=yx.SELECTOR,r}return e}(gx.default);Tr.default=vx;zc.exports=Tr.default});var en=v((gP,Vc)=>{l();"use strict";var xx={},kx=xx.hasOwnProperty,Sx=function(e,t){if(!e)return t;var r={};for(var n in t)r[n]=kx.call(e,n)?e[n]:t[n];return r},Cx=/[ -,\.\/:-@\[-\^`\{-~]/,Ax=/[ -,\.\/:-@\[\]\^`\{-~]/,_x=/(^|\\+)?(\\[A-F0-9]{1,6})\x20(?![a-fA-F0-9\x20])/g,Hs=function i(e,t){t=Sx(t,i.options),t.quotes!="single"&&t.quotes!="double"&&(t.quotes="single");for(var r=t.quotes=="double"?'"':"'",n=t.isIdentifier,a=e.charAt(0),s="",o=0,u=e.length;o126){if(f>=55296&&f<=56319&&o{l();"use strict";Pr.__esModule=!0;Pr.default=void 0;var Ex=Uc(en()),Ox=Ar(),Tx=Uc(Ue()),Px=ne();function Uc(i){return i&&i.__esModule?i:{default:i}}function Wc(i,e){for(var t=0;t{l();"use strict";Dr.__esModule=!0;Dr.default=void 0;var Rx=Bx(Ue()),Mx=ne();function Bx(i){return i&&i.__esModule?i:{default:i}}function Fx(i,e){i.prototype=Object.create(e.prototype),i.prototype.constructor=i,Js(i,e)}function Js(i,e){return Js=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(r,n){return r.__proto__=n,r},Js(i,e)}var Nx=function(i){Fx(e,i);function e(t){var r;return r=i.call(this,t)||this,r.type=Mx.COMMENT,r}return e}(Rx.default);Dr.default=Nx;Hc.exports=Dr.default});var Zs=v((Ir,Yc)=>{l();"use strict";Ir.__esModule=!0;Ir.default=void 0;var Lx=jx(Ue()),$x=ne();function jx(i){return i&&i.__esModule?i:{default:i}}function zx(i,e){i.prototype=Object.create(e.prototype),i.prototype.constructor=i,Ks(i,e)}function Ks(i,e){return Ks=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(r,n){return r.__proto__=n,r},Ks(i,e)}var Vx=function(i){zx(e,i);function e(r){var n;return n=i.call(this,r)||this,n.type=$x.ID,n}var t=e.prototype;return t.valueToString=function(){return"#"+i.prototype.valueToString.call(this)},e}(Lx.default);Ir.default=Vx;Yc.exports=Ir.default});var tn=v((qr,Xc)=>{l();"use strict";qr.__esModule=!0;qr.default=void 0;var Ux=Qc(en()),Wx=Ar(),Gx=Qc(Ue());function Qc(i){return i&&i.__esModule?i:{default:i}}function Jc(i,e){for(var t=0;t{l();"use strict";Rr.__esModule=!0;Rr.default=void 0;var Jx=Kx(tn()),Xx=ne();function Kx(i){return i&&i.__esModule?i:{default:i}}function Zx(i,e){i.prototype=Object.create(e.prototype),i.prototype.constructor=i,ta(i,e)}function ta(i,e){return ta=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(r,n){return r.__proto__=n,r},ta(i,e)}var e1=function(i){Zx(e,i);function e(t){var r;return r=i.call(this,t)||this,r.type=Xx.TAG,r}return e}(Jx.default);Rr.default=e1;Kc.exports=Rr.default});var na=v((Mr,Zc)=>{l();"use strict";Mr.__esModule=!0;Mr.default=void 0;var t1=i1(Ue()),r1=ne();function i1(i){return i&&i.__esModule?i:{default:i}}function n1(i,e){i.prototype=Object.create(e.prototype),i.prototype.constructor=i,ia(i,e)}function ia(i,e){return ia=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(r,n){return r.__proto__=n,r},ia(i,e)}var s1=function(i){n1(e,i);function e(t){var r;return r=i.call(this,t)||this,r.type=r1.STRING,r}return e}(t1.default);Mr.default=s1;Zc.exports=Mr.default});var aa=v((Br,ep)=>{l();"use strict";Br.__esModule=!0;Br.default=void 0;var a1=l1(Zi()),o1=ne();function l1(i){return i&&i.__esModule?i:{default:i}}function u1(i,e){i.prototype=Object.create(e.prototype),i.prototype.constructor=i,sa(i,e)}function sa(i,e){return sa=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(r,n){return r.__proto__=n,r},sa(i,e)}var f1=function(i){u1(e,i);function e(r){var n;return n=i.call(this,r)||this,n.type=o1.PSEUDO,n}var t=e.prototype;return t.toString=function(){var n=this.length?"("+this.map(String).join(",")+")":"";return[this.rawSpaceBefore,this.stringifyProperty("value"),n,this.rawSpaceAfter].join("")},e}(a1.default);Br.default=f1;ep.exports=Br.default});var tp={};Ae(tp,{deprecate:()=>c1});function c1(i){return i}var rp=C(()=>{l()});var np=v((yP,ip)=>{l();ip.exports=(rp(),tp).deprecate});var pa=v(Lr=>{l();"use strict";Lr.__esModule=!0;Lr.default=void 0;Lr.unescapeValue=fa;var Fr=la(en()),p1=la(Yi()),d1=la(tn()),h1=ne(),oa;function la(i){return i&&i.__esModule?i:{default:i}}function sp(i,e){for(var t=0;t0&&!n.quoted&&o.before.length===0&&!(n.spaces.value&&n.spaces.value.after)&&(o.before=" "),ap(s,o)}))),a.push("]"),a.push(this.rawSpaceAfter),a.join("")},m1(e,[{key:"quoted",get:function(){var n=this.quoteMark;return n==="'"||n==='"'},set:function(n){b1()}},{key:"quoteMark",get:function(){return this._quoteMark},set:function(n){if(!this._constructed){this._quoteMark=n;return}this._quoteMark!==n&&(this._quoteMark=n,this._syncRawValue())}},{key:"qualifiedAttribute",get:function(){return this.qualifiedName(this.raws.attribute||this.attribute)}},{key:"insensitiveFlag",get:function(){return this.insensitive?"i":""}},{key:"value",get:function(){return this._value},set:function(n){if(this._constructed){var a=fa(n),s=a.deprecatedUsage,o=a.unescaped,u=a.quoteMark;if(s&&w1(),o===this._value&&u===this._quoteMark)return;this._value=o,this._quoteMark=u,this._syncRawValue()}else this._value=n}},{key:"insensitive",get:function(){return this._insensitive},set:function(n){n||(this._insensitive=!1,this.raws&&(this.raws.insensitiveFlag==="I"||this.raws.insensitiveFlag==="i")&&(this.raws.insensitiveFlag=void 0)),this._insensitive=n}},{key:"attribute",get:function(){return this._attribute},set:function(n){this._handleEscapes("attribute",n),this._attribute=n}}]),e}(d1.default);Lr.default=rn;rn.NO_QUOTE=null;rn.SINGLE_QUOTE="'";rn.DOUBLE_QUOTE='"';var ca=(oa={"'":{quotes:"single",wrap:!0},'"':{quotes:"double",wrap:!0}},oa[null]={isIdentifier:!0},oa);function ap(i,e){return""+e.before+i+e.after}});var ha=v(($r,op)=>{l();"use strict";$r.__esModule=!0;$r.default=void 0;var k1=C1(tn()),S1=ne();function C1(i){return i&&i.__esModule?i:{default:i}}function A1(i,e){i.prototype=Object.create(e.prototype),i.prototype.constructor=i,da(i,e)}function da(i,e){return da=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(r,n){return r.__proto__=n,r},da(i,e)}var _1=function(i){A1(e,i);function e(t){var r;return r=i.call(this,t)||this,r.type=S1.UNIVERSAL,r.value="*",r}return e}(k1.default);$r.default=_1;op.exports=$r.default});var ga=v((jr,lp)=>{l();"use strict";jr.__esModule=!0;jr.default=void 0;var E1=T1(Ue()),O1=ne();function T1(i){return i&&i.__esModule?i:{default:i}}function P1(i,e){i.prototype=Object.create(e.prototype),i.prototype.constructor=i,ma(i,e)}function ma(i,e){return ma=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(r,n){return r.__proto__=n,r},ma(i,e)}var D1=function(i){P1(e,i);function e(t){var r;return r=i.call(this,t)||this,r.type=O1.COMBINATOR,r}return e}(E1.default);jr.default=D1;lp.exports=jr.default});var wa=v((zr,up)=>{l();"use strict";zr.__esModule=!0;zr.default=void 0;var I1=R1(Ue()),q1=ne();function R1(i){return i&&i.__esModule?i:{default:i}}function M1(i,e){i.prototype=Object.create(e.prototype),i.prototype.constructor=i,ya(i,e)}function ya(i,e){return ya=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(r,n){return r.__proto__=n,r},ya(i,e)}var B1=function(i){M1(e,i);function e(t){var r;return r=i.call(this,t)||this,r.type=q1.NESTING,r.value="&",r}return e}(I1.default);zr.default=B1;up.exports=zr.default});var cp=v((nn,fp)=>{l();"use strict";nn.__esModule=!0;nn.default=F1;function F1(i){return i.sort(function(e,t){return e-t})}fp.exports=nn.default});var ba=v(D=>{l();"use strict";D.__esModule=!0;D.word=D.tilde=D.tab=D.str=D.space=D.slash=D.singleQuote=D.semicolon=D.plus=D.pipe=D.openSquare=D.openParenthesis=D.newline=D.greaterThan=D.feed=D.equals=D.doubleQuote=D.dollar=D.cr=D.comment=D.comma=D.combinator=D.colon=D.closeSquare=D.closeParenthesis=D.caret=D.bang=D.backslash=D.at=D.asterisk=D.ampersand=void 0;var N1=38;D.ampersand=N1;var L1=42;D.asterisk=L1;var $1=64;D.at=$1;var j1=44;D.comma=j1;var z1=58;D.colon=z1;var V1=59;D.semicolon=V1;var U1=40;D.openParenthesis=U1;var W1=41;D.closeParenthesis=W1;var G1=91;D.openSquare=G1;var H1=93;D.closeSquare=H1;var Y1=36;D.dollar=Y1;var Q1=126;D.tilde=Q1;var J1=94;D.caret=J1;var X1=43;D.plus=X1;var K1=61;D.equals=K1;var Z1=124;D.pipe=Z1;var ek=62;D.greaterThan=ek;var tk=32;D.space=tk;var pp=39;D.singleQuote=pp;var rk=34;D.doubleQuote=rk;var ik=47;D.slash=ik;var nk=33;D.bang=nk;var sk=92;D.backslash=sk;var ak=13;D.cr=ak;var ok=12;D.feed=ok;var lk=10;D.newline=lk;var uk=9;D.tab=uk;var fk=pp;D.str=fk;var ck=-1;D.comment=ck;var pk=-2;D.word=pk;var dk=-3;D.combinator=dk});var mp=v(Vr=>{l();"use strict";Vr.__esModule=!0;Vr.FIELDS=void 0;Vr.default=vk;var O=hk(ba()),Tt,V;function dp(i){if(typeof WeakMap!="function")return null;var e=new WeakMap,t=new WeakMap;return(dp=function(n){return n?t:e})(i)}function hk(i,e){if(!e&&i&&i.__esModule)return i;if(i===null||typeof i!="object"&&typeof i!="function")return{default:i};var t=dp(e);if(t&&t.has(i))return t.get(i);var r={},n=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var a in i)if(a!=="default"&&Object.prototype.hasOwnProperty.call(i,a)){var s=n?Object.getOwnPropertyDescriptor(i,a):null;s&&(s.get||s.set)?Object.defineProperty(r,a,s):r[a]=i[a]}return r.default=i,t&&t.set(i,r),r}var mk=(Tt={},Tt[O.tab]=!0,Tt[O.newline]=!0,Tt[O.cr]=!0,Tt[O.feed]=!0,Tt),gk=(V={},V[O.space]=!0,V[O.tab]=!0,V[O.newline]=!0,V[O.cr]=!0,V[O.feed]=!0,V[O.ampersand]=!0,V[O.asterisk]=!0,V[O.bang]=!0,V[O.comma]=!0,V[O.colon]=!0,V[O.semicolon]=!0,V[O.openParenthesis]=!0,V[O.closeParenthesis]=!0,V[O.openSquare]=!0,V[O.closeSquare]=!0,V[O.singleQuote]=!0,V[O.doubleQuote]=!0,V[O.plus]=!0,V[O.pipe]=!0,V[O.tilde]=!0,V[O.greaterThan]=!0,V[O.equals]=!0,V[O.dollar]=!0,V[O.caret]=!0,V[O.slash]=!0,V),va={},hp="0123456789abcdefABCDEF";for(sn=0;sn0?(k=s+x,S=b-w[x].length):(k=s,S=a),E=O.comment,s=k,d=k,p=b-S):c===O.slash?(b=o,E=c,d=s,p=o-a,u=b+1):(b=yk(t,o),E=O.word,d=s,p=b-a),u=b+1;break}e.push([E,s,o-a,d,p,o,u]),S&&(a=S,S=null),o=u}return e}});var Sp=v((Ur,kp)=>{l();"use strict";Ur.__esModule=!0;Ur.default=void 0;var xk=we(Us()),xa=we(Gs()),kk=we(Qs()),gp=we(Xs()),Sk=we(Zs()),Ck=we(ra()),ka=we(na()),Ak=we(aa()),yp=an(pa()),_k=we(ha()),Sa=we(ga()),Ek=we(wa()),Ok=we(cp()),A=an(mp()),T=an(ba()),Tk=an(ne()),Y=Ar(),yt,Ca;function wp(i){if(typeof WeakMap!="function")return null;var e=new WeakMap,t=new WeakMap;return(wp=function(n){return n?t:e})(i)}function an(i,e){if(!e&&i&&i.__esModule)return i;if(i===null||typeof i!="object"&&typeof i!="function")return{default:i};var t=wp(e);if(t&&t.has(i))return t.get(i);var r={},n=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var a in i)if(a!=="default"&&Object.prototype.hasOwnProperty.call(i,a)){var s=n?Object.getOwnPropertyDescriptor(i,a):null;s&&(s.get||s.set)?Object.defineProperty(r,a,s):r[a]=i[a]}return r.default=i,t&&t.set(i,r),r}function we(i){return i&&i.__esModule?i:{default:i}}function bp(i,e){for(var t=0;t0){var s=this.current.last;if(s){var o=this.convertWhitespaceNodesToSpace(a),u=o.space,c=o.rawSpace;c!==void 0&&(s.rawSpaceAfter+=c),s.spaces.after+=u}else a.forEach(function(E){return r.newNode(E)})}return}var f=this.currToken,p=void 0;n>this.position&&(p=this.parseWhitespaceEquivalentTokens(n));var d;if(this.isNamedCombinator()?d=this.namedCombinator():this.currToken[A.FIELDS.TYPE]===T.combinator?(d=new Sa.default({value:this.content(),source:Pt(this.currToken),sourceIndex:this.currToken[A.FIELDS.START_POS]}),this.position++):Aa[this.currToken[A.FIELDS.TYPE]]||p||this.unexpected(),d){if(p){var h=this.convertWhitespaceNodesToSpace(p),y=h.space,x=h.rawSpace;d.spaces.before=y,d.rawSpaceBefore=x}}else{var w=this.convertWhitespaceNodesToSpace(p,!0),b=w.space,k=w.rawSpace;k||(k=b);var S={},_={spaces:{}};b.endsWith(" ")&&k.endsWith(" ")?(S.before=b.slice(0,b.length-1),_.spaces.before=k.slice(0,k.length-1)):b.startsWith(" ")&&k.startsWith(" ")?(S.after=b.slice(1),_.spaces.after=k.slice(1)):_.value=k,d=new Sa.default({value:" ",source:_a(f,this.tokens[this.position-1]),sourceIndex:f[A.FIELDS.START_POS],spaces:S,raws:_})}return this.currToken&&this.currToken[A.FIELDS.TYPE]===T.space&&(d.spaces.after=this.optionalSpace(this.content()),this.position++),this.newNode(d)},e.comma=function(){if(this.position===this.tokens.length-1){this.root.trailingComma=!0,this.position++;return}this.current._inferEndPosition();var r=new xa.default({source:{start:vp(this.tokens[this.position+1])}});this.current.parent.append(r),this.current=r,this.position++},e.comment=function(){var r=this.currToken;this.newNode(new gp.default({value:this.content(),source:Pt(r),sourceIndex:r[A.FIELDS.START_POS]})),this.position++},e.error=function(r,n){throw this.root.error(r,n)},e.missingBackslash=function(){return this.error("Expected a backslash preceding the semicolon.",{index:this.currToken[A.FIELDS.START_POS]})},e.missingParenthesis=function(){return this.expected("opening parenthesis",this.currToken[A.FIELDS.START_POS])},e.missingSquareBracket=function(){return this.expected("opening square bracket",this.currToken[A.FIELDS.START_POS])},e.unexpected=function(){return this.error("Unexpected '"+this.content()+"'. Escaping special characters with \\ may help.",this.currToken[A.FIELDS.START_POS])},e.unexpectedPipe=function(){return this.error("Unexpected '|'.",this.currToken[A.FIELDS.START_POS])},e.namespace=function(){var r=this.prevToken&&this.content(this.prevToken)||!0;if(this.nextToken[A.FIELDS.TYPE]===T.word)return this.position++,this.word(r);if(this.nextToken[A.FIELDS.TYPE]===T.asterisk)return this.position++,this.universal(r);this.unexpectedPipe()},e.nesting=function(){if(this.nextToken){var r=this.content(this.nextToken);if(r==="|"){this.position++;return}}var n=this.currToken;this.newNode(new Ek.default({value:this.content(),source:Pt(n),sourceIndex:n[A.FIELDS.START_POS]})),this.position++},e.parentheses=function(){var r=this.current.last,n=1;if(this.position++,r&&r.type===Tk.PSEUDO){var a=new xa.default({source:{start:vp(this.tokens[this.position-1])}}),s=this.current;for(r.append(a),this.current=a;this.position1&&r.nextToken&&r.nextToken[A.FIELDS.TYPE]===T.openParenthesis&&r.error("Misplaced parenthesis.",{index:r.nextToken[A.FIELDS.START_POS]})});else return this.expected(["pseudo-class","pseudo-element"],this.currToken[A.FIELDS.START_POS])},e.space=function(){var r=this.content();this.position===0||this.prevToken[A.FIELDS.TYPE]===T.comma||this.prevToken[A.FIELDS.TYPE]===T.openParenthesis||this.current.nodes.every(function(n){return n.type==="comment"})?(this.spaces=this.optionalSpace(r),this.position++):this.position===this.tokens.length-1||this.nextToken[A.FIELDS.TYPE]===T.comma||this.nextToken[A.FIELDS.TYPE]===T.closeParenthesis?(this.current.last.spaces.after=this.optionalSpace(r),this.position++):this.combinator()},e.string=function(){var r=this.currToken;this.newNode(new ka.default({value:this.content(),source:Pt(r),sourceIndex:r[A.FIELDS.START_POS]})),this.position++},e.universal=function(r){var n=this.nextToken;if(n&&this.content(n)==="|")return this.position++,this.namespace();var a=this.currToken;this.newNode(new _k.default({value:this.content(),source:Pt(a),sourceIndex:a[A.FIELDS.START_POS]}),r),this.position++},e.splitWord=function(r,n){for(var a=this,s=this.nextToken,o=this.content();s&&~[T.dollar,T.caret,T.equals,T.word].indexOf(s[A.FIELDS.TYPE]);){this.position++;var u=this.content();if(o+=u,u.lastIndexOf("\\")===u.length-1){var c=this.nextToken;c&&c[A.FIELDS.TYPE]===T.space&&(o+=this.requiredSpace(this.content(c)),this.position++)}s=this.nextToken}var f=Ea(o,".").filter(function(y){var x=o[y-1]==="\\",w=/^\d+\.\d+%$/.test(o);return!x&&!w}),p=Ea(o,"#").filter(function(y){return o[y-1]!=="\\"}),d=Ea(o,"#{");d.length&&(p=p.filter(function(y){return!~d.indexOf(y)}));var h=(0,Ok.default)(Ik([0].concat(f,p)));h.forEach(function(y,x){var w=h[x+1]||o.length,b=o.slice(y,w);if(x===0&&n)return n.call(a,b,h.length);var k,S=a.currToken,_=S[A.FIELDS.START_POS]+h[x],E=wt(S[1],S[2]+y,S[3],S[2]+(w-1));if(~f.indexOf(y)){var I={value:b.slice(1),source:E,sourceIndex:_};k=new kk.default(Dt(I,"value"))}else if(~p.indexOf(y)){var B={value:b.slice(1),source:E,sourceIndex:_};k=new Sk.default(Dt(B,"value"))}else{var q={value:b,source:E,sourceIndex:_};Dt(q,"value"),k=new Ck.default(q)}a.newNode(k,r),r=null}),this.position++},e.word=function(r){var n=this.nextToken;return n&&this.content(n)==="|"?(this.position++,this.namespace()):this.splitWord(r)},e.loop=function(){for(;this.position{l();"use strict";Wr.__esModule=!0;Wr.default=void 0;var Rk=Mk(Sp());function Mk(i){return i&&i.__esModule?i:{default:i}}var Bk=function(){function i(t,r){this.func=t||function(){},this.funcRes=null,this.options=r}var e=i.prototype;return e._shouldUpdateSelector=function(r,n){n===void 0&&(n={});var a=Object.assign({},this.options,n);return a.updateSelector===!1?!1:typeof r!="string"},e._isLossy=function(r){r===void 0&&(r={});var n=Object.assign({},this.options,r);return n.lossless===!1},e._root=function(r,n){n===void 0&&(n={});var a=new Rk.default(r,this._parseOptions(n));return a.root},e._parseOptions=function(r){return{lossy:this._isLossy(r)}},e._run=function(r,n){var a=this;return n===void 0&&(n={}),new Promise(function(s,o){try{var u=a._root(r,n);Promise.resolve(a.func(u)).then(function(c){var f=void 0;return a._shouldUpdateSelector(r,n)&&(f=u.toString(),r.selector=f),{transform:c,root:u,string:f}}).then(s,o)}catch(c){o(c);return}})},e._runSync=function(r,n){n===void 0&&(n={});var a=this._root(r,n),s=this.func(a);if(s&&typeof s.then=="function")throw new Error("Selector processor returned a promise to a synchronous call.");var o=void 0;return n.updateSelector&&typeof r!="string"&&(o=a.toString(),r.selector=o),{transform:s,root:a,string:o}},e.ast=function(r,n){return this._run(r,n).then(function(a){return a.root})},e.astSync=function(r,n){return this._runSync(r,n).root},e.transform=function(r,n){return this._run(r,n).then(function(a){return a.transform})},e.transformSync=function(r,n){return this._runSync(r,n).transform},e.process=function(r,n){return this._run(r,n).then(function(a){return a.string||a.root.toString()})},e.processSync=function(r,n){var a=this._runSync(r,n);return a.string||a.root.toString()},i}();Wr.default=Bk;Cp.exports=Wr.default});var _p=v(H=>{l();"use strict";H.__esModule=!0;H.universal=H.tag=H.string=H.selector=H.root=H.pseudo=H.nesting=H.id=H.comment=H.combinator=H.className=H.attribute=void 0;var Fk=be(pa()),Nk=be(Qs()),Lk=be(ga()),$k=be(Xs()),jk=be(Zs()),zk=be(wa()),Vk=be(aa()),Uk=be(Us()),Wk=be(Gs()),Gk=be(na()),Hk=be(ra()),Yk=be(ha());function be(i){return i&&i.__esModule?i:{default:i}}var Qk=function(e){return new Fk.default(e)};H.attribute=Qk;var Jk=function(e){return new Nk.default(e)};H.className=Jk;var Xk=function(e){return new Lk.default(e)};H.combinator=Xk;var Kk=function(e){return new $k.default(e)};H.comment=Kk;var Zk=function(e){return new jk.default(e)};H.id=Zk;var eS=function(e){return new zk.default(e)};H.nesting=eS;var tS=function(e){return new Vk.default(e)};H.pseudo=tS;var rS=function(e){return new Uk.default(e)};H.root=rS;var iS=function(e){return new Wk.default(e)};H.selector=iS;var nS=function(e){return new Gk.default(e)};H.string=nS;var sS=function(e){return new Hk.default(e)};H.tag=sS;var aS=function(e){return new Yk.default(e)};H.universal=aS});var Pp=v(L=>{l();"use strict";L.__esModule=!0;L.isComment=L.isCombinator=L.isClassName=L.isAttribute=void 0;L.isContainer=wS;L.isIdentifier=void 0;L.isNamespace=bS;L.isNesting=void 0;L.isNode=Oa;L.isPseudo=void 0;L.isPseudoClass=yS;L.isPseudoElement=Tp;L.isUniversal=L.isTag=L.isString=L.isSelector=L.isRoot=void 0;var Q=ne(),fe,oS=(fe={},fe[Q.ATTRIBUTE]=!0,fe[Q.CLASS]=!0,fe[Q.COMBINATOR]=!0,fe[Q.COMMENT]=!0,fe[Q.ID]=!0,fe[Q.NESTING]=!0,fe[Q.PSEUDO]=!0,fe[Q.ROOT]=!0,fe[Q.SELECTOR]=!0,fe[Q.STRING]=!0,fe[Q.TAG]=!0,fe[Q.UNIVERSAL]=!0,fe);function Oa(i){return typeof i=="object"&&oS[i.type]}function ve(i,e){return Oa(e)&&e.type===i}var Ep=ve.bind(null,Q.ATTRIBUTE);L.isAttribute=Ep;var lS=ve.bind(null,Q.CLASS);L.isClassName=lS;var uS=ve.bind(null,Q.COMBINATOR);L.isCombinator=uS;var fS=ve.bind(null,Q.COMMENT);L.isComment=fS;var cS=ve.bind(null,Q.ID);L.isIdentifier=cS;var pS=ve.bind(null,Q.NESTING);L.isNesting=pS;var Ta=ve.bind(null,Q.PSEUDO);L.isPseudo=Ta;var dS=ve.bind(null,Q.ROOT);L.isRoot=dS;var hS=ve.bind(null,Q.SELECTOR);L.isSelector=hS;var mS=ve.bind(null,Q.STRING);L.isString=mS;var Op=ve.bind(null,Q.TAG);L.isTag=Op;var gS=ve.bind(null,Q.UNIVERSAL);L.isUniversal=gS;function Tp(i){return Ta(i)&&i.value&&(i.value.startsWith("::")||i.value.toLowerCase()===":before"||i.value.toLowerCase()===":after"||i.value.toLowerCase()===":first-letter"||i.value.toLowerCase()===":first-line")}function yS(i){return Ta(i)&&!Tp(i)}function wS(i){return!!(Oa(i)&&i.walk)}function bS(i){return Ep(i)||Op(i)}});var Dp=v(Oe=>{l();"use strict";Oe.__esModule=!0;var Pa=ne();Object.keys(Pa).forEach(function(i){i==="default"||i==="__esModule"||i in Oe&&Oe[i]===Pa[i]||(Oe[i]=Pa[i])});var Da=_p();Object.keys(Da).forEach(function(i){i==="default"||i==="__esModule"||i in Oe&&Oe[i]===Da[i]||(Oe[i]=Da[i])});var Ia=Pp();Object.keys(Ia).forEach(function(i){i==="default"||i==="__esModule"||i in Oe&&Oe[i]===Ia[i]||(Oe[i]=Ia[i])})});var Me=v((Gr,qp)=>{l();"use strict";Gr.__esModule=!0;Gr.default=void 0;var vS=SS(Ap()),xS=kS(Dp());function Ip(i){if(typeof WeakMap!="function")return null;var e=new WeakMap,t=new WeakMap;return(Ip=function(n){return n?t:e})(i)}function kS(i,e){if(!e&&i&&i.__esModule)return i;if(i===null||typeof i!="object"&&typeof i!="function")return{default:i};var t=Ip(e);if(t&&t.has(i))return t.get(i);var r={},n=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var a in i)if(a!=="default"&&Object.prototype.hasOwnProperty.call(i,a)){var s=n?Object.getOwnPropertyDescriptor(i,a):null;s&&(s.get||s.set)?Object.defineProperty(r,a,s):r[a]=i[a]}return r.default=i,t&&t.set(i,r),r}function SS(i){return i&&i.__esModule?i:{default:i}}var qa=function(e){return new vS.default(e)};Object.assign(qa,xS);delete qa.__esModule;var CS=qa;Gr.default=CS;qp.exports=Gr.default});function Ge(i){return["fontSize","outline"].includes(i)?e=>(typeof e=="function"&&(e=e({})),Array.isArray(e)&&(e=e[0]),e):i==="fontFamily"?e=>{typeof e=="function"&&(e=e({}));let t=Array.isArray(e)&&ie(e[1])?e[0]:e;return Array.isArray(t)?t.join(", "):t}:["boxShadow","transitionProperty","transitionDuration","transitionDelay","transitionTimingFunction","backgroundImage","backgroundSize","backgroundColor","cursor","animation"].includes(i)?e=>(typeof e=="function"&&(e=e({})),Array.isArray(e)&&(e=e.join(", ")),e):["gridTemplateColumns","gridTemplateRows","objectPosition"].includes(i)?e=>(typeof e=="function"&&(e=e({})),typeof e=="string"&&(e=z.list.comma(e).join(" ")),e):(e,t={})=>(typeof e=="function"&&(e=e(t)),e)}var Hr=C(()=>{l();nt();xt()});var $p=v((EP,Na)=>{l();var{Rule:Rp,AtRule:AS}=me(),Mp=Me();function Ra(i,e){let t;try{Mp(r=>{t=r}).processSync(i)}catch(r){throw i.includes(":")?e?e.error("Missed semicolon"):r:e?e.error(r.message):r}return t.at(0)}function Bp(i,e){let t=!1;return i.each(r=>{if(r.type==="nesting"){let n=e.clone({});r.value!=="&"?r.replaceWith(Ra(r.value.replace("&",n.toString()))):r.replaceWith(n),t=!0}else"nodes"in r&&r.nodes&&Bp(r,e)&&(t=!0)}),t}function Fp(i,e){let t=[];return i.selectors.forEach(r=>{let n=Ra(r,i);e.selectors.forEach(a=>{if(!a)return;let s=Ra(a,e);Bp(s,n)||(s.prepend(Mp.combinator({value:" "})),s.prepend(n.clone({}))),t.push(s.toString())})}),t}function on(i,e){let t=i.prev();for(e.after(i);t&&t.type==="comment";){let r=t.prev();e.after(t),t=r}return i}function _S(i){return function e(t,r,n,a=n){let s=[];if(r.each(o=>{o.type==="rule"&&n?a&&(o.selectors=Fp(t,o)):o.type==="atrule"&&o.nodes?i[o.name]?e(t,o,a):r[Ba]!==!1&&s.push(o):s.push(o)}),n&&s.length){let o=t.clone({nodes:[]});for(let u of s)o.append(u);r.prepend(o)}}}function Ma(i,e,t){let r=new Rp({selector:i,nodes:[]});return r.append(e),t.after(r),r}function Np(i,e){let t={};for(let r of i)t[r]=!0;if(e)for(let r of e)t[r.replace(/^@/,"")]=!0;return t}function ES(i){i=i.trim();let e=i.match(/^\((.*)\)$/);if(!e)return{type:"basic",selector:i};let t=e[1].match(/^(with(?:out)?):(.+)$/);if(t){let r=t[1]==="with",n=Object.fromEntries(t[2].trim().split(/\s+/).map(s=>[s,!0]));if(r&&n.all)return{type:"noop"};let a=s=>!!n[s];return n.all?a=()=>!0:r&&(a=s=>s==="all"?!1:!n[s]),{type:"withrules",escapes:a}}return{type:"unknown"}}function OS(i){let e=[],t=i.parent;for(;t&&t instanceof AS;)e.push(t),t=t.parent;return e}function TS(i){let e=i[Lp];if(!e)i.after(i.nodes);else{let t=i.nodes,r,n=-1,a,s,o,u=OS(i);if(u.forEach((c,f)=>{if(e(c.name))r=c,n=f,s=o;else{let p=o;o=c.clone({nodes:[]}),p&&o.append(p),a=a||o}}),r?s?(a.append(t),r.after(s)):r.after(t):i.after(t),i.next()&&r){let c;u.slice(0,n+1).forEach((f,p,d)=>{let h=c;c=f.clone({nodes:[]}),h&&c.append(h);let y=[],w=(d[p-1]||i).next();for(;w;)y.push(w),w=w.next();c.append(y)}),c&&(s||t[t.length-1]).after(c)}}i.remove()}var Ba=Symbol("rootRuleMergeSel"),Lp=Symbol("rootRuleEscapes");function PS(i){let{params:e}=i,{type:t,selector:r,escapes:n}=ES(e);if(t==="unknown")throw i.error(`Unknown @${i.name} parameter ${JSON.stringify(e)}`);if(t==="basic"&&r){let a=new Rp({selector:r,nodes:i.nodes});i.removeAll(),i.append(a)}i[Lp]=n,i[Ba]=n?!n("all"):t==="noop"}var Fa=Symbol("hasRootRule");Na.exports=(i={})=>{let e=Np(["media","supports","layer","container"],i.bubble),t=_S(e),r=Np(["document","font-face","keyframes","-webkit-keyframes","-moz-keyframes"],i.unwrap),n=(i.rootRuleName||"at-root").replace(/^@/,""),a=i.preserveEmpty;return{postcssPlugin:"postcss-nested",Once(s){s.walkAtRules(n,o=>{PS(o),s[Fa]=!0})},Rule(s){let o=!1,u=s,c=!1,f=[];s.each(p=>{p.type==="rule"?(f.length&&(u=Ma(s.selector,f,u),f=[]),c=!0,o=!0,p.selectors=Fp(s,p),u=on(p,u)):p.type==="atrule"?(f.length&&(u=Ma(s.selector,f,u),f=[]),p.name===n?(o=!0,t(s,p,!0,p[Ba]),u=on(p,u)):e[p.name]?(c=!0,o=!0,t(s,p,!0),u=on(p,u)):r[p.name]?(c=!0,o=!0,t(s,p,!1),u=on(p,u)):c&&f.push(p)):p.type==="decl"&&c&&f.push(p)}),f.length&&(u=Ma(s.selector,f,u)),o&&a!==!0&&(s.raws.semicolon=!0,s.nodes.length===0&&s.remove())},RootExit(s){s[Fa]&&(s.walkAtRules(n,TS),s[Fa]=!1)}}};Na.exports.postcss=!0});var Up=v((OP,Vp)=>{l();"use strict";var jp=/-(\w|$)/g,zp=(i,e)=>e.toUpperCase(),DS=i=>(i=i.toLowerCase(),i==="float"?"cssFloat":i.startsWith("-ms-")?i.substr(1).replace(jp,zp):i.replace(jp,zp));Vp.exports=DS});var ja=v((TP,Wp)=>{l();var IS=Up(),qS={boxFlex:!0,boxFlexGroup:!0,columnCount:!0,flex:!0,flexGrow:!0,flexPositive:!0,flexShrink:!0,flexNegative:!0,fontWeight:!0,lineClamp:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,tabSize:!0,widows:!0,zIndex:!0,zoom:!0,fillOpacity:!0,strokeDashoffset:!0,strokeOpacity:!0,strokeWidth:!0};function La(i){return typeof i.nodes=="undefined"?!0:$a(i)}function $a(i){let e,t={};return i.each(r=>{if(r.type==="atrule")e="@"+r.name,r.params&&(e+=" "+r.params),typeof t[e]=="undefined"?t[e]=La(r):Array.isArray(t[e])?t[e].push(La(r)):t[e]=[t[e],La(r)];else if(r.type==="rule"){let n=$a(r);if(t[r.selector])for(let a in n)t[r.selector][a]=n[a];else t[r.selector]=n}else if(r.type==="decl"){r.prop[0]==="-"&&r.prop[1]==="-"||r.parent&&r.parent.selector===":export"?e=r.prop:e=IS(r.prop);let n=r.value;!isNaN(r.value)&&qS[e]&&(n=parseFloat(r.value)),r.important&&(n+=" !important"),typeof t[e]=="undefined"?t[e]=n:Array.isArray(t[e])?t[e].push(n):t[e]=[t[e],n]}}),t}Wp.exports=$a});var ln=v((PP,Qp)=>{l();var Yr=me(),Gp=/\s*!important\s*$/i,RS={"box-flex":!0,"box-flex-group":!0,"column-count":!0,flex:!0,"flex-grow":!0,"flex-positive":!0,"flex-shrink":!0,"flex-negative":!0,"font-weight":!0,"line-clamp":!0,"line-height":!0,opacity:!0,order:!0,orphans:!0,"tab-size":!0,widows:!0,"z-index":!0,zoom:!0,"fill-opacity":!0,"stroke-dashoffset":!0,"stroke-opacity":!0,"stroke-width":!0};function MS(i){return i.replace(/([A-Z])/g,"-$1").replace(/^ms-/,"-ms-").toLowerCase()}function Hp(i,e,t){t===!1||t===null||(e.startsWith("--")||(e=MS(e)),typeof t=="number"&&(t===0||RS[e]?t=t.toString():t+="px"),e==="css-float"&&(e="float"),Gp.test(t)?(t=t.replace(Gp,""),i.push(Yr.decl({prop:e,value:t,important:!0}))):i.push(Yr.decl({prop:e,value:t})))}function Yp(i,e,t){let r=Yr.atRule({name:e[1],params:e[3]||""});typeof t=="object"&&(r.nodes=[],za(t,r)),i.push(r)}function za(i,e){let t,r,n;for(t in i)if(r=i[t],!(r===null||typeof r=="undefined"))if(t[0]==="@"){let a=t.match(/@(\S+)(\s+([\W\w]*)\s*)?/);if(Array.isArray(r))for(let s of r)Yp(e,a,s);else Yp(e,a,r)}else if(Array.isArray(r))for(let a of r)Hp(e,t,a);else typeof r=="object"?(n=Yr.rule({selector:t}),za(r,n),e.push(n)):Hp(e,t,r)}Qp.exports=function(i){let e=Yr.root();return za(i,e),e}});var Va=v((DP,Jp)=>{l();var BS=ja();Jp.exports=function(e){return console&&console.warn&&e.warnings().forEach(t=>{let r=t.plugin||"PostCSS";console.warn(r+": "+t.text)}),BS(e.root)}});var Kp=v((IP,Xp)=>{l();var FS=me(),NS=Va(),LS=ln();Xp.exports=function(e){let t=FS(e);return async r=>{let n=await t.process(r,{parser:LS,from:void 0});return NS(n)}}});var ed=v((qP,Zp)=>{l();var $S=me(),jS=Va(),zS=ln();Zp.exports=function(i){let e=$S(i);return t=>{let r=e.process(t,{parser:zS,from:void 0});return jS(r)}}});var rd=v((RP,td)=>{l();var VS=ja(),US=ln(),WS=Kp(),GS=ed();td.exports={objectify:VS,parse:US,async:WS,sync:GS}});var It,id,MP,BP,FP,NP,nd=C(()=>{l();It=K(rd()),id=It.default,MP=It.default.objectify,BP=It.default.parse,FP=It.default.async,NP=It.default.sync});function qt(i){return Array.isArray(i)?i.flatMap(e=>z([(0,sd.default)({bubble:["screen"]})]).process(e,{parser:id}).root.nodes):qt([i])}var sd,Ua=C(()=>{l();nt();sd=K($p());nd()});function Rt(i,e,t=!1){if(i==="")return e;let r=typeof e=="string"?(0,ad.default)().astSync(e):e;return r.walkClasses(n=>{let a=n.value,s=t&&a.startsWith("-");n.value=s?`-${i}${a.slice(1)}`:`${i}${a}`}),typeof e=="string"?r.toString():r}var ad,un=C(()=>{l();ad=K(Me())});function ce(i){let e=od.default.className();return e.value=i,ht(e?.raws?.value??e.value)}var od,Mt=C(()=>{l();od=K(Me());mi()});function Wa(i){return ht(`.${ce(i)}`)}function fn(i,e){return Wa(Qr(i,e))}function Qr(i,e){return e==="DEFAULT"?i:e==="-"||e==="-DEFAULT"?`-${i}`:e.startsWith("-")?`-${i}${e}`:e.startsWith("/")?`${i}${e}`:`${i}-${e}`}var Ga=C(()=>{l();Mt();mi()});function P(i,e=[[i,[i]]],{filterDefault:t=!1,...r}={}){let n=Ge(i);return function({matchUtilities:a,theme:s}){for(let o of e){let u=Array.isArray(o[0])?o:[o];a(u.reduce((c,[f,p])=>Object.assign(c,{[f]:d=>p.reduce((h,y)=>Array.isArray(y)?Object.assign(h,{[y[0]]:y[1]}):Object.assign(h,{[y]:n(d)}),{})}),{}),{...r,values:t?Object.fromEntries(Object.entries(s(i)??{}).filter(([c])=>c!=="DEFAULT")):s(i)})}}}var ld=C(()=>{l();Hr()});function st(i){return i=Array.isArray(i)?i:[i],i.map(e=>{let t=e.values.map(r=>r.raw!==void 0?r.raw:[r.min&&`(min-width: ${r.min})`,r.max&&`(max-width: ${r.max})`].filter(Boolean).join(" and "));return e.not?`not all and ${t}`:t}).join(", ")}var cn=C(()=>{l()});function Ha(i){return i.split(ZS).map(t=>{let r=t.trim(),n={value:r},a=r.split(eC),s=new Set;for(let o of a)!s.has("DIRECTIONS")&&HS.has(o)?(n.direction=o,s.add("DIRECTIONS")):!s.has("PLAY_STATES")&&YS.has(o)?(n.playState=o,s.add("PLAY_STATES")):!s.has("FILL_MODES")&&QS.has(o)?(n.fillMode=o,s.add("FILL_MODES")):!s.has("ITERATION_COUNTS")&&(JS.has(o)||tC.test(o))?(n.iterationCount=o,s.add("ITERATION_COUNTS")):!s.has("TIMING_FUNCTION")&&XS.has(o)||!s.has("TIMING_FUNCTION")&&KS.some(u=>o.startsWith(`${u}(`))?(n.timingFunction=o,s.add("TIMING_FUNCTION")):!s.has("DURATION")&&ud.test(o)?(n.duration=o,s.add("DURATION")):!s.has("DELAY")&&ud.test(o)?(n.delay=o,s.add("DELAY")):s.has("NAME")?(n.unknown||(n.unknown=[]),n.unknown.push(o)):(n.name=o,s.add("NAME"));return n})}var HS,YS,QS,JS,XS,KS,ZS,eC,ud,tC,fd=C(()=>{l();HS=new Set(["normal","reverse","alternate","alternate-reverse"]),YS=new Set(["running","paused"]),QS=new Set(["none","forwards","backwards","both"]),JS=new Set(["infinite"]),XS=new Set(["linear","ease","ease-in","ease-out","ease-in-out","step-start","step-end"]),KS=["cubic-bezier","steps"],ZS=/\,(?![^(]*\))/g,eC=/\ +(?![^(]*\))/g,ud=/^(-?[\d.]+m?s)$/,tC=/^(\d+)$/});var cd,re,pd=C(()=>{l();cd=i=>Object.assign({},...Object.entries(i??{}).flatMap(([e,t])=>typeof t=="object"?Object.entries(cd(t)).map(([r,n])=>({[e+(r==="DEFAULT"?"":`-${r}`)]:n})):[{[`${e}`]:t}])),re=cd});var rC,Qa,iC,nC,sC,aC,oC,lC,uC,fC,cC,pC,dC,hC,mC,gC,yC,wC,Ja,Ya=C(()=>{rC="tailwindcss",Qa="3.3.3",iC="A utility-first CSS framework for rapidly building custom user interfaces.",nC="MIT",sC="lib/index.js",aC="types/index.d.ts",oC="https://github.com/tailwindlabs/tailwindcss.git",lC="https://github.com/tailwindlabs/tailwindcss/issues",uC="https://tailwindcss.com",fC={tailwind:"lib/cli.js",tailwindcss:"lib/cli.js"},cC={engine:"stable"},pC={prebuild:"npm run generate && rimraf lib",build:`swc src --out-dir lib --copy-files --config jsc.transform.optimizer.globals.vars.__OXIDE__='"false"'`,postbuild:"esbuild lib/cli-peer-dependencies.js --bundle --platform=node --outfile=peers/index.js --define:process.env.CSS_TRANSFORMER_WASM=false","rebuild-fixtures":"npm run build && node -r @swc/register scripts/rebuildFixtures.js",style:"eslint .",pretest:"npm run generate",test:"jest","test:integrations":"npm run test --prefix ./integrations","install:integrations":"node scripts/install-integrations.js","generate:plugin-list":"node -r @swc/register scripts/create-plugin-list.js","generate:types":"node -r @swc/register scripts/generate-types.js",generate:"npm run generate:plugin-list && npm run generate:types","release-channel":"node ./scripts/release-channel.js","release-notes":"node ./scripts/release-notes.js",prepublishOnly:"npm install --force && npm run build"},dC=["src/*","cli/*","lib/*","peers/*","scripts/*.js","stubs/*","nesting/*","types/**/*","*.d.ts","*.css","*.js"],hC={"@swc/cli":"^0.1.62","@swc/core":"^1.3.55","@swc/jest":"^0.2.26","@swc/register":"^0.1.10",autoprefixer:"^10.4.14",browserslist:"^4.21.5",concurrently:"^8.0.1",cssnano:"^6.0.0",esbuild:"^0.17.18",eslint:"^8.39.0","eslint-config-prettier":"^8.8.0","eslint-plugin-prettier":"^4.2.1",jest:"^29.5.0","jest-diff":"^29.5.0",lightningcss:"1.18.0",prettier:"^2.8.8",rimraf:"^5.0.0","source-map-js":"^1.0.2",turbo:"^1.9.3"},mC={"@alloc/quick-lru":"^5.2.0",arg:"^5.0.2",chokidar:"^3.5.3",didyoumean:"^1.2.2",dlv:"^1.1.3","fast-glob":"^3.2.12","glob-parent":"^6.0.2","is-glob":"^4.0.3",jiti:"^1.18.2",lilconfig:"^2.1.0",micromatch:"^4.0.5","normalize-path":"^3.0.0","object-hash":"^3.0.0",picocolors:"^1.0.0",postcss:"^8.4.23","postcss-import":"^15.1.0","postcss-js":"^4.0.1","postcss-load-config":"^4.0.1","postcss-nested":"^6.0.1","postcss-selector-parser":"^6.0.11",resolve:"^1.22.2",sucrase:"^3.32.0"},gC=["> 1%","not edge <= 18","not ie 11","not op_mini all"],yC={testTimeout:3e4,setupFilesAfterEnv:["/jest/customMatchers.js"],testPathIgnorePatterns:["/node_modules/","/integrations/","/standalone-cli/","\\.test\\.skip\\.js$"],transformIgnorePatterns:["node_modules/(?!lightningcss)"],transform:{"\\.js$":"@swc/jest","\\.ts$":"@swc/jest"}},wC={node:">=14.0.0"},Ja={name:rC,version:Qa,description:iC,license:nC,main:sC,types:aC,repository:oC,bugs:lC,homepage:uC,bin:fC,tailwindcss:cC,scripts:pC,files:dC,devDependencies:hC,dependencies:mC,browserslist:gC,jest:yC,engines:wC}});function at(i,e=!0){return Array.isArray(i)?i.map(t=>{if(e&&Array.isArray(t))throw new Error("The tuple syntax is not supported for `screens`.");if(typeof t=="string")return{name:t.toString(),not:!1,values:[{min:t,max:void 0}]};let[r,n]=t;return r=r.toString(),typeof n=="string"?{name:r,not:!1,values:[{min:n,max:void 0}]}:Array.isArray(n)?{name:r,not:!1,values:n.map(a=>hd(a))}:{name:r,not:!1,values:[hd(n)]}}):at(Object.entries(i??{}),!1)}function pn(i){return i.values.length!==1?{result:!1,reason:"multiple-values"}:i.values[0].raw!==void 0?{result:!1,reason:"raw-values"}:i.values[0].min!==void 0&&i.values[0].max!==void 0?{result:!1,reason:"min-and-max"}:{result:!0,reason:null}}function dd(i,e,t){let r=dn(e,i),n=dn(t,i),a=pn(r),s=pn(n);if(a.reason==="multiple-values"||s.reason==="multiple-values")throw new Error("Attempted to sort a screen with multiple values. This should never happen. Please open a bug report.");if(a.reason==="raw-values"||s.reason==="raw-values")throw new Error("Attempted to sort a screen with raw values. This should never happen. Please open a bug report.");if(a.reason==="min-and-max"||s.reason==="min-and-max")throw new Error("Attempted to sort a screen with both min and max values. This should never happen. Please open a bug report.");let{min:o,max:u}=r.values[0],{min:c,max:f}=n.values[0];e.not&&([o,u]=[u,o]),t.not&&([c,f]=[f,c]),o=o===void 0?o:parseFloat(o),u=u===void 0?u:parseFloat(u),c=c===void 0?c:parseFloat(c),f=f===void 0?f:parseFloat(f);let[p,d]=i==="min"?[o,c]:[f,u];return p-d}function dn(i,e){return typeof i=="object"?i:{name:"arbitrary-screen",values:[{[e]:i}]}}function hd({"min-width":i,min:e=i,max:t,raw:r}={}){return{min:e,max:t,raw:r}}var hn=C(()=>{l()});function mn(i,e){i.walkDecls(t=>{if(e.includes(t.prop)){t.remove();return}for(let r of e)t.value.includes(`/ var(${r})`)&&(t.value=t.value.replace(`/ var(${r})`,""))})}var md=C(()=>{l()});var pe,Te,Be,Fe,gd,yd=C(()=>{l();ze();mt();nt();ld();cn();Mt();fd();pd();ar();ds();xt();Hr();Ya();Ee();hn();as();md();De();fr();Xr();pe={pseudoElementVariants:({addVariant:i})=>{i("first-letter","&::first-letter"),i("first-line","&::first-line"),i("marker",[({container:e})=>(mn(e,["--tw-text-opacity"]),"& *::marker"),({container:e})=>(mn(e,["--tw-text-opacity"]),"&::marker")]),i("selection",["& *::selection","&::selection"]),i("file","&::file-selector-button"),i("placeholder","&::placeholder"),i("backdrop","&::backdrop"),i("before",({container:e})=>(e.walkRules(t=>{let r=!1;t.walkDecls("content",()=>{r=!0}),r||t.prepend(z.decl({prop:"content",value:"var(--tw-content)"}))}),"&::before")),i("after",({container:e})=>(e.walkRules(t=>{let r=!1;t.walkDecls("content",()=>{r=!0}),r||t.prepend(z.decl({prop:"content",value:"var(--tw-content)"}))}),"&::after"))},pseudoClassVariants:({addVariant:i,matchVariant:e,config:t,prefix:r})=>{let n=[["first","&:first-child"],["last","&:last-child"],["only","&:only-child"],["odd","&:nth-child(odd)"],["even","&:nth-child(even)"],"first-of-type","last-of-type","only-of-type",["visited",({container:s})=>(mn(s,["--tw-text-opacity","--tw-border-opacity","--tw-bg-opacity"]),"&:visited")],"target",["open","&[open]"],"default","checked","indeterminate","placeholder-shown","autofill","optional","required","valid","invalid","in-range","out-of-range","read-only","empty","focus-within",["hover",J(t(),"hoverOnlyWhenSupported")?"@media (hover: hover) and (pointer: fine) { &:hover }":"&:hover"],"focus","focus-visible","active","enabled","disabled"].map(s=>Array.isArray(s)?s:[s,`&:${s}`]);for(let[s,o]of n)i(s,u=>typeof o=="function"?o(u):o);let a={group:(s,{modifier:o})=>o?[`:merge(${r(".group")}\\/${ce(o)})`," &"]:[`:merge(${r(".group")})`," &"],peer:(s,{modifier:o})=>o?[`:merge(${r(".peer")}\\/${ce(o)})`," ~ &"]:[`:merge(${r(".peer")})`," ~ &"]};for(let[s,o]of Object.entries(a))e(s,(u="",c)=>{let f=U(typeof u=="function"?u(c):u);f.includes("&")||(f="&"+f);let[p,d]=o("",c),h=null,y=null,x=0;for(let w=0;w{i("ltr",':is([dir="ltr"] &)'),i("rtl",':is([dir="rtl"] &)')},reducedMotionVariants:({addVariant:i})=>{i("motion-safe","@media (prefers-reduced-motion: no-preference)"),i("motion-reduce","@media (prefers-reduced-motion: reduce)")},darkVariants:({config:i,addVariant:e})=>{let[t,r=".dark"]=[].concat(i("darkMode","media"));t===!1&&(t="media",F.warn("darkmode-false",["The `darkMode` option in your Tailwind CSS configuration is set to `false`, which now behaves the same as `media`.","Change `darkMode` to `media` or remove it entirely.","https://tailwindcss.com/docs/upgrade-guide#remove-dark-mode-configuration"])),t==="class"?e("dark",`:is(${r} &)`):t==="media"&&e("dark","@media (prefers-color-scheme: dark)")},printVariant:({addVariant:i})=>{i("print","@media print")},screenVariants:({theme:i,addVariant:e,matchVariant:t})=>{let r=i("screens")??{},n=Object.values(r).every(b=>typeof b=="string"),a=at(i("screens")),s=new Set([]);function o(b){return b.match(/(\D+)$/)?.[1]??"(none)"}function u(b){b!==void 0&&s.add(o(b))}function c(b){return u(b),s.size===1}for(let b of a)for(let k of b.values)u(k.min),u(k.max);let f=s.size<=1;function p(b){return Object.fromEntries(a.filter(k=>pn(k).result).map(k=>{let{min:S,max:_}=k.values[0];if(b==="min"&&S!==void 0)return k;if(b==="min"&&_!==void 0)return{...k,not:!k.not};if(b==="max"&&_!==void 0)return k;if(b==="max"&&S!==void 0)return{...k,not:!k.not}}).map(k=>[k.name,k]))}function d(b){return(k,S)=>dd(b,k.value,S.value)}let h=d("max"),y=d("min");function x(b){return k=>{if(n)if(f){if(typeof k=="string"&&!c(k))return F.warn("minmax-have-mixed-units",["The `min-*` and `max-*` variants are not supported with a `screens` configuration containing mixed units."]),[]}else return F.warn("mixed-screen-units",["The `min-*` and `max-*` variants are not supported with a `screens` configuration containing mixed units."]),[];else return F.warn("complex-screen-config",["The `min-*` and `max-*` variants are not supported with a `screens` configuration containing objects."]),[];return[`@media ${st(dn(k,b))}`]}}t("max",x("max"),{sort:h,values:n?p("max"):{}});let w="min-screens";for(let b of a)e(b.name,`@media ${st(b)}`,{id:w,sort:n&&f?y:void 0,value:b});t("min",x("min"),{id:w,sort:y})},supportsVariants:({matchVariant:i,theme:e})=>{i("supports",(t="")=>{let r=U(t),n=/^\w*\s*\(/.test(r);return r=n?r.replace(/\b(and|or|not)\b/g," $1 "):r,n?`@supports ${r}`:(r.includes(":")||(r=`${r}: var(--tw)`),r.startsWith("(")&&r.endsWith(")")||(r=`(${r})`),`@supports ${r}`)},{values:e("supports")??{}})},ariaVariants:({matchVariant:i,theme:e})=>{i("aria",t=>`&[aria-${U(t)}]`,{values:e("aria")??{}}),i("group-aria",(t,{modifier:r})=>r?`:merge(.group\\/${r})[aria-${U(t)}] &`:`:merge(.group)[aria-${U(t)}] &`,{values:e("aria")??{}}),i("peer-aria",(t,{modifier:r})=>r?`:merge(.peer\\/${r})[aria-${U(t)}] ~ &`:`:merge(.peer)[aria-${U(t)}] ~ &`,{values:e("aria")??{}})},dataVariants:({matchVariant:i,theme:e})=>{i("data",t=>`&[data-${U(t)}]`,{values:e("data")??{}}),i("group-data",(t,{modifier:r})=>r?`:merge(.group\\/${r})[data-${U(t)}] &`:`:merge(.group)[data-${U(t)}] &`,{values:e("data")??{}}),i("peer-data",(t,{modifier:r})=>r?`:merge(.peer\\/${r})[data-${U(t)}] ~ &`:`:merge(.peer)[data-${U(t)}] ~ &`,{values:e("data")??{}})},orientationVariants:({addVariant:i})=>{i("portrait","@media (orientation: portrait)"),i("landscape","@media (orientation: landscape)")},prefersContrastVariants:({addVariant:i})=>{i("contrast-more","@media (prefers-contrast: more)"),i("contrast-less","@media (prefers-contrast: less)")}},Te=["translate(var(--tw-translate-x), var(--tw-translate-y))","rotate(var(--tw-rotate))","skewX(var(--tw-skew-x))","skewY(var(--tw-skew-y))","scaleX(var(--tw-scale-x))","scaleY(var(--tw-scale-y))"].join(" "),Be=["var(--tw-blur)","var(--tw-brightness)","var(--tw-contrast)","var(--tw-grayscale)","var(--tw-hue-rotate)","var(--tw-invert)","var(--tw-saturate)","var(--tw-sepia)","var(--tw-drop-shadow)"].join(" "),Fe=["var(--tw-backdrop-blur)","var(--tw-backdrop-brightness)","var(--tw-backdrop-contrast)","var(--tw-backdrop-grayscale)","var(--tw-backdrop-hue-rotate)","var(--tw-backdrop-invert)","var(--tw-backdrop-opacity)","var(--tw-backdrop-saturate)","var(--tw-backdrop-sepia)"].join(" "),gd={preflight:({addBase:i})=>{let e=z.parse(`*,::after,::before{box-sizing:border-box;border-width:0;border-style:solid;border-color:theme('borderColor.DEFAULT', currentColor)}::after,::before{--tw-content:''}html{line-height:1.5;-webkit-text-size-adjust:100%;-moz-tab-size:4;tab-size:4;font-family:theme('fontFamily.sans', ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji");font-feature-settings:theme('fontFamily.sans[1].fontFeatureSettings', normal);font-variation-settings:theme('fontFamily.sans[1].fontVariationSettings', normal)}body{margin:0;line-height:inherit}hr{height:0;color:inherit;border-top-width:1px}abbr:where([title]){text-decoration:underline dotted}h1,h2,h3,h4,h5,h6{font-size:inherit;font-weight:inherit}a{color:inherit;text-decoration:inherit}b,strong{font-weight:bolder}code,kbd,pre,samp{font-family:theme('fontFamily.mono', ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace);font-size:1em}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}table{text-indent:0;border-color:inherit;border-collapse:collapse}button,input,optgroup,select,textarea{font-family:inherit;font-feature-settings:inherit;font-variation-settings:inherit;font-size:100%;font-weight:inherit;line-height:inherit;color:inherit;margin:0;padding:0}button,select{text-transform:none}[type=button],[type=reset],[type=submit],button{-webkit-appearance:button;background-color:transparent;background-image:none}:-moz-focusring{outline:auto}:-moz-ui-invalid{box-shadow:none}progress{vertical-align:baseline}::-webkit-inner-spin-button,::-webkit-outer-spin-button{height:auto}[type=search]{-webkit-appearance:textfield;outline-offset:-2px}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{-webkit-appearance:button;font:inherit}summary{display:list-item}blockquote,dd,dl,figure,h1,h2,h3,h4,h5,h6,hr,p,pre{margin:0}fieldset{margin:0;padding:0}legend{padding:0}menu,ol,ul{list-style:none;margin:0;padding:0}dialog{padding:0}textarea{resize:vertical}input::placeholder,textarea::placeholder{opacity:1;color:theme('colors.gray.4', #9ca3af)}[role=button],button{cursor:pointer}:disabled{cursor:default}audio,canvas,embed,iframe,img,object,svg,video{display:block;vertical-align:middle}img,video{max-width:100%;height:auto}[hidden]{display:none}`);i([z.comment({text:`! tailwindcss v${Qa} | MIT License | https://tailwindcss.com`}),...e.nodes])},container:(()=>{function i(t=[]){return t.flatMap(r=>r.values.map(n=>n.min)).filter(r=>r!==void 0)}function e(t,r,n){if(typeof n=="undefined")return[];if(!(typeof n=="object"&&n!==null))return[{screen:"DEFAULT",minWidth:0,padding:n}];let a=[];n.DEFAULT&&a.push({screen:"DEFAULT",minWidth:0,padding:n.DEFAULT});for(let s of t)for(let o of r)for(let{min:u}of o.values)u===s&&a.push({minWidth:s,padding:n[o.name]});return a}return function({addComponents:t,theme:r}){let n=at(r("container.screens",r("screens"))),a=i(n),s=e(a,n,r("container.padding")),o=c=>{let f=s.find(p=>p.minWidth===c);return f?{paddingRight:f.padding,paddingLeft:f.padding}:{}},u=Array.from(new Set(a.slice().sort((c,f)=>parseInt(c)-parseInt(f)))).map(c=>({[`@media (min-width: ${c})`]:{".container":{"max-width":c,...o(c)}}}));t([{".container":Object.assign({width:"100%"},r("container.center",!1)?{marginRight:"auto",marginLeft:"auto"}:{},o(0))},...u])}})(),accessibility:({addUtilities:i})=>{i({".sr-only":{position:"absolute",width:"1px",height:"1px",padding:"0",margin:"-1px",overflow:"hidden",clip:"rect(0, 0, 0, 0)",whiteSpace:"nowrap",borderWidth:"0"},".not-sr-only":{position:"static",width:"auto",height:"auto",padding:"0",margin:"0",overflow:"visible",clip:"auto",whiteSpace:"normal"}})},pointerEvents:({addUtilities:i})=>{i({".pointer-events-none":{"pointer-events":"none"},".pointer-events-auto":{"pointer-events":"auto"}})},visibility:({addUtilities:i})=>{i({".visible":{visibility:"visible"},".invisible":{visibility:"hidden"},".collapse":{visibility:"collapse"}})},position:({addUtilities:i})=>{i({".static":{position:"static"},".fixed":{position:"fixed"},".absolute":{position:"absolute"},".relative":{position:"relative"},".sticky":{position:"sticky"}})},inset:P("inset",[["inset",["inset"]],[["inset-x",["left","right"]],["inset-y",["top","bottom"]]],[["start",["inset-inline-start"]],["end",["inset-inline-end"]],["top",["top"]],["right",["right"]],["bottom",["bottom"]],["left",["left"]]]],{supportsNegativeValues:!0}),isolation:({addUtilities:i})=>{i({".isolate":{isolation:"isolate"},".isolation-auto":{isolation:"auto"}})},zIndex:P("zIndex",[["z",["zIndex"]]],{supportsNegativeValues:!0}),order:P("order",void 0,{supportsNegativeValues:!0}),gridColumn:P("gridColumn",[["col",["gridColumn"]]]),gridColumnStart:P("gridColumnStart",[["col-start",["gridColumnStart"]]]),gridColumnEnd:P("gridColumnEnd",[["col-end",["gridColumnEnd"]]]),gridRow:P("gridRow",[["row",["gridRow"]]]),gridRowStart:P("gridRowStart",[["row-start",["gridRowStart"]]]),gridRowEnd:P("gridRowEnd",[["row-end",["gridRowEnd"]]]),float:({addUtilities:i})=>{i({".float-right":{float:"right"},".float-left":{float:"left"},".float-none":{float:"none"}})},clear:({addUtilities:i})=>{i({".clear-left":{clear:"left"},".clear-right":{clear:"right"},".clear-both":{clear:"both"},".clear-none":{clear:"none"}})},margin:P("margin",[["m",["margin"]],[["mx",["margin-left","margin-right"]],["my",["margin-top","margin-bottom"]]],[["ms",["margin-inline-start"]],["me",["margin-inline-end"]],["mt",["margin-top"]],["mr",["margin-right"]],["mb",["margin-bottom"]],["ml",["margin-left"]]]],{supportsNegativeValues:!0}),boxSizing:({addUtilities:i})=>{i({".box-border":{"box-sizing":"border-box"},".box-content":{"box-sizing":"content-box"}})},lineClamp:({matchUtilities:i,addUtilities:e,theme:t})=>{i({"line-clamp":r=>({overflow:"hidden",display:"-webkit-box","-webkit-box-orient":"vertical","-webkit-line-clamp":`${r}`})},{values:t("lineClamp")}),e({".line-clamp-none":{overflow:"visible",display:"block","-webkit-box-orient":"horizontal","-webkit-line-clamp":"none"}})},display:({addUtilities:i})=>{i({".block":{display:"block"},".inline-block":{display:"inline-block"},".inline":{display:"inline"},".flex":{display:"flex"},".inline-flex":{display:"inline-flex"},".table":{display:"table"},".inline-table":{display:"inline-table"},".table-caption":{display:"table-caption"},".table-cell":{display:"table-cell"},".table-column":{display:"table-column"},".table-column-group":{display:"table-column-group"},".table-footer-group":{display:"table-footer-group"},".table-header-group":{display:"table-header-group"},".table-row-group":{display:"table-row-group"},".table-row":{display:"table-row"},".flow-root":{display:"flow-root"},".grid":{display:"grid"},".inline-grid":{display:"inline-grid"},".contents":{display:"contents"},".list-item":{display:"list-item"},".hidden":{display:"none"}})},aspectRatio:P("aspectRatio",[["aspect",["aspect-ratio"]]]),height:P("height",[["h",["height"]]]),maxHeight:P("maxHeight",[["max-h",["maxHeight"]]]),minHeight:P("minHeight",[["min-h",["minHeight"]]]),width:P("width",[["w",["width"]]]),minWidth:P("minWidth",[["min-w",["minWidth"]]]),maxWidth:P("maxWidth",[["max-w",["maxWidth"]]]),flex:P("flex"),flexShrink:P("flexShrink",[["flex-shrink",["flex-shrink"]],["shrink",["flex-shrink"]]]),flexGrow:P("flexGrow",[["flex-grow",["flex-grow"]],["grow",["flex-grow"]]]),flexBasis:P("flexBasis",[["basis",["flex-basis"]]]),tableLayout:({addUtilities:i})=>{i({".table-auto":{"table-layout":"auto"},".table-fixed":{"table-layout":"fixed"}})},captionSide:({addUtilities:i})=>{i({".caption-top":{"caption-side":"top"},".caption-bottom":{"caption-side":"bottom"}})},borderCollapse:({addUtilities:i})=>{i({".border-collapse":{"border-collapse":"collapse"},".border-separate":{"border-collapse":"separate"}})},borderSpacing:({addDefaults:i,matchUtilities:e,theme:t})=>{i("border-spacing",{"--tw-border-spacing-x":0,"--tw-border-spacing-y":0}),e({"border-spacing":r=>({"--tw-border-spacing-x":r,"--tw-border-spacing-y":r,"@defaults border-spacing":{},"border-spacing":"var(--tw-border-spacing-x) var(--tw-border-spacing-y)"}),"border-spacing-x":r=>({"--tw-border-spacing-x":r,"@defaults border-spacing":{},"border-spacing":"var(--tw-border-spacing-x) var(--tw-border-spacing-y)"}),"border-spacing-y":r=>({"--tw-border-spacing-y":r,"@defaults border-spacing":{},"border-spacing":"var(--tw-border-spacing-x) var(--tw-border-spacing-y)"})},{values:t("borderSpacing")})},transformOrigin:P("transformOrigin",[["origin",["transformOrigin"]]]),translate:P("translate",[[["translate-x",[["@defaults transform",{}],"--tw-translate-x",["transform",Te]]],["translate-y",[["@defaults transform",{}],"--tw-translate-y",["transform",Te]]]]],{supportsNegativeValues:!0}),rotate:P("rotate",[["rotate",[["@defaults transform",{}],"--tw-rotate",["transform",Te]]]],{supportsNegativeValues:!0}),skew:P("skew",[[["skew-x",[["@defaults transform",{}],"--tw-skew-x",["transform",Te]]],["skew-y",[["@defaults transform",{}],"--tw-skew-y",["transform",Te]]]]],{supportsNegativeValues:!0}),scale:P("scale",[["scale",[["@defaults transform",{}],"--tw-scale-x","--tw-scale-y",["transform",Te]]],[["scale-x",[["@defaults transform",{}],"--tw-scale-x",["transform",Te]]],["scale-y",[["@defaults transform",{}],"--tw-scale-y",["transform",Te]]]]],{supportsNegativeValues:!0}),transform:({addDefaults:i,addUtilities:e})=>{i("transform",{"--tw-translate-x":"0","--tw-translate-y":"0","--tw-rotate":"0","--tw-skew-x":"0","--tw-skew-y":"0","--tw-scale-x":"1","--tw-scale-y":"1"}),e({".transform":{"@defaults transform":{},transform:Te},".transform-cpu":{transform:Te},".transform-gpu":{transform:Te.replace("translate(var(--tw-translate-x), var(--tw-translate-y))","translate3d(var(--tw-translate-x), var(--tw-translate-y), 0)")},".transform-none":{transform:"none"}})},animation:({matchUtilities:i,theme:e,config:t})=>{let r=a=>ce(t("prefix")+a),n=Object.fromEntries(Object.entries(e("keyframes")??{}).map(([a,s])=>[a,{[`@keyframes ${r(a)}`]:s}]));i({animate:a=>{let s=Ha(a);return[...s.flatMap(o=>n[o.name]),{animation:s.map(({name:o,value:u})=>o===void 0||n[o]===void 0?u:u.replace(o,r(o))).join(", ")}]}},{values:e("animation")})},cursor:P("cursor"),touchAction:({addDefaults:i,addUtilities:e})=>{i("touch-action",{"--tw-pan-x":" ","--tw-pan-y":" ","--tw-pinch-zoom":" "});let t="var(--tw-pan-x) var(--tw-pan-y) var(--tw-pinch-zoom)";e({".touch-auto":{"touch-action":"auto"},".touch-none":{"touch-action":"none"},".touch-pan-x":{"@defaults touch-action":{},"--tw-pan-x":"pan-x","touch-action":t},".touch-pan-left":{"@defaults touch-action":{},"--tw-pan-x":"pan-left","touch-action":t},".touch-pan-right":{"@defaults touch-action":{},"--tw-pan-x":"pan-right","touch-action":t},".touch-pan-y":{"@defaults touch-action":{},"--tw-pan-y":"pan-y","touch-action":t},".touch-pan-up":{"@defaults touch-action":{},"--tw-pan-y":"pan-up","touch-action":t},".touch-pan-down":{"@defaults touch-action":{},"--tw-pan-y":"pan-down","touch-action":t},".touch-pinch-zoom":{"@defaults touch-action":{},"--tw-pinch-zoom":"pinch-zoom","touch-action":t},".touch-manipulation":{"touch-action":"manipulation"}})},userSelect:({addUtilities:i})=>{i({".select-none":{"user-select":"none"},".select-text":{"user-select":"text"},".select-all":{"user-select":"all"},".select-auto":{"user-select":"auto"}})},resize:({addUtilities:i})=>{i({".resize-none":{resize:"none"},".resize-y":{resize:"vertical"},".resize-x":{resize:"horizontal"},".resize":{resize:"both"}})},scrollSnapType:({addDefaults:i,addUtilities:e})=>{i("scroll-snap-type",{"--tw-scroll-snap-strictness":"proximity"}),e({".snap-none":{"scroll-snap-type":"none"},".snap-x":{"@defaults scroll-snap-type":{},"scroll-snap-type":"x var(--tw-scroll-snap-strictness)"},".snap-y":{"@defaults scroll-snap-type":{},"scroll-snap-type":"y var(--tw-scroll-snap-strictness)"},".snap-both":{"@defaults scroll-snap-type":{},"scroll-snap-type":"both var(--tw-scroll-snap-strictness)"},".snap-mandatory":{"--tw-scroll-snap-strictness":"mandatory"},".snap-proximity":{"--tw-scroll-snap-strictness":"proximity"}})},scrollSnapAlign:({addUtilities:i})=>{i({".snap-start":{"scroll-snap-align":"start"},".snap-end":{"scroll-snap-align":"end"},".snap-center":{"scroll-snap-align":"center"},".snap-align-none":{"scroll-snap-align":"none"}})},scrollSnapStop:({addUtilities:i})=>{i({".snap-normal":{"scroll-snap-stop":"normal"},".snap-always":{"scroll-snap-stop":"always"}})},scrollMargin:P("scrollMargin",[["scroll-m",["scroll-margin"]],[["scroll-mx",["scroll-margin-left","scroll-margin-right"]],["scroll-my",["scroll-margin-top","scroll-margin-bottom"]]],[["scroll-ms",["scroll-margin-inline-start"]],["scroll-me",["scroll-margin-inline-end"]],["scroll-mt",["scroll-margin-top"]],["scroll-mr",["scroll-margin-right"]],["scroll-mb",["scroll-margin-bottom"]],["scroll-ml",["scroll-margin-left"]]]],{supportsNegativeValues:!0}),scrollPadding:P("scrollPadding",[["scroll-p",["scroll-padding"]],[["scroll-px",["scroll-padding-left","scroll-padding-right"]],["scroll-py",["scroll-padding-top","scroll-padding-bottom"]]],[["scroll-ps",["scroll-padding-inline-start"]],["scroll-pe",["scroll-padding-inline-end"]],["scroll-pt",["scroll-padding-top"]],["scroll-pr",["scroll-padding-right"]],["scroll-pb",["scroll-padding-bottom"]],["scroll-pl",["scroll-padding-left"]]]]),listStylePosition:({addUtilities:i})=>{i({".list-inside":{"list-style-position":"inside"},".list-outside":{"list-style-position":"outside"}})},listStyleType:P("listStyleType",[["list",["listStyleType"]]]),listStyleImage:P("listStyleImage",[["list-image",["listStyleImage"]]]),appearance:({addUtilities:i})=>{i({".appearance-none":{appearance:"none"}})},columns:P("columns",[["columns",["columns"]]]),breakBefore:({addUtilities:i})=>{i({".break-before-auto":{"break-before":"auto"},".break-before-avoid":{"break-before":"avoid"},".break-before-all":{"break-before":"all"},".break-before-avoid-page":{"break-before":"avoid-page"},".break-before-page":{"break-before":"page"},".break-before-left":{"break-before":"left"},".break-before-right":{"break-before":"right"},".break-before-column":{"break-before":"column"}})},breakInside:({addUtilities:i})=>{i({".break-inside-auto":{"break-inside":"auto"},".break-inside-avoid":{"break-inside":"avoid"},".break-inside-avoid-page":{"break-inside":"avoid-page"},".break-inside-avoid-column":{"break-inside":"avoid-column"}})},breakAfter:({addUtilities:i})=>{i({".break-after-auto":{"break-after":"auto"},".break-after-avoid":{"break-after":"avoid"},".break-after-all":{"break-after":"all"},".break-after-avoid-page":{"break-after":"avoid-page"},".break-after-page":{"break-after":"page"},".break-after-left":{"break-after":"left"},".break-after-right":{"break-after":"right"},".break-after-column":{"break-after":"column"}})},gridAutoColumns:P("gridAutoColumns",[["auto-cols",["gridAutoColumns"]]]),gridAutoFlow:({addUtilities:i})=>{i({".grid-flow-row":{gridAutoFlow:"row"},".grid-flow-col":{gridAutoFlow:"column"},".grid-flow-dense":{gridAutoFlow:"dense"},".grid-flow-row-dense":{gridAutoFlow:"row dense"},".grid-flow-col-dense":{gridAutoFlow:"column dense"}})},gridAutoRows:P("gridAutoRows",[["auto-rows",["gridAutoRows"]]]),gridTemplateColumns:P("gridTemplateColumns",[["grid-cols",["gridTemplateColumns"]]]),gridTemplateRows:P("gridTemplateRows",[["grid-rows",["gridTemplateRows"]]]),flexDirection:({addUtilities:i})=>{i({".flex-row":{"flex-direction":"row"},".flex-row-reverse":{"flex-direction":"row-reverse"},".flex-col":{"flex-direction":"column"},".flex-col-reverse":{"flex-direction":"column-reverse"}})},flexWrap:({addUtilities:i})=>{i({".flex-wrap":{"flex-wrap":"wrap"},".flex-wrap-reverse":{"flex-wrap":"wrap-reverse"},".flex-nowrap":{"flex-wrap":"nowrap"}})},placeContent:({addUtilities:i})=>{i({".place-content-center":{"place-content":"center"},".place-content-start":{"place-content":"start"},".place-content-end":{"place-content":"end"},".place-content-between":{"place-content":"space-between"},".place-content-around":{"place-content":"space-around"},".place-content-evenly":{"place-content":"space-evenly"},".place-content-baseline":{"place-content":"baseline"},".place-content-stretch":{"place-content":"stretch"}})},placeItems:({addUtilities:i})=>{i({".place-items-start":{"place-items":"start"},".place-items-end":{"place-items":"end"},".place-items-center":{"place-items":"center"},".place-items-baseline":{"place-items":"baseline"},".place-items-stretch":{"place-items":"stretch"}})},alignContent:({addUtilities:i})=>{i({".content-normal":{"align-content":"normal"},".content-center":{"align-content":"center"},".content-start":{"align-content":"flex-start"},".content-end":{"align-content":"flex-end"},".content-between":{"align-content":"space-between"},".content-around":{"align-content":"space-around"},".content-evenly":{"align-content":"space-evenly"},".content-baseline":{"align-content":"baseline"},".content-stretch":{"align-content":"stretch"}})},alignItems:({addUtilities:i})=>{i({".items-start":{"align-items":"flex-start"},".items-end":{"align-items":"flex-end"},".items-center":{"align-items":"center"},".items-baseline":{"align-items":"baseline"},".items-stretch":{"align-items":"stretch"}})},justifyContent:({addUtilities:i})=>{i({".justify-normal":{"justify-content":"normal"},".justify-start":{"justify-content":"flex-start"},".justify-end":{"justify-content":"flex-end"},".justify-center":{"justify-content":"center"},".justify-between":{"justify-content":"space-between"},".justify-around":{"justify-content":"space-around"},".justify-evenly":{"justify-content":"space-evenly"},".justify-stretch":{"justify-content":"stretch"}})},justifyItems:({addUtilities:i})=>{i({".justify-items-start":{"justify-items":"start"},".justify-items-end":{"justify-items":"end"},".justify-items-center":{"justify-items":"center"},".justify-items-stretch":{"justify-items":"stretch"}})},gap:P("gap",[["gap",["gap"]],[["gap-x",["columnGap"]],["gap-y",["rowGap"]]]]),space:({matchUtilities:i,addUtilities:e,theme:t})=>{i({"space-x":r=>(r=r==="0"?"0px":r,{"& > :not([hidden]) ~ :not([hidden])":{"--tw-space-x-reverse":"0","margin-right":`calc(${r} * var(--tw-space-x-reverse))`,"margin-left":`calc(${r} * calc(1 - var(--tw-space-x-reverse)))`}}),"space-y":r=>(r=r==="0"?"0px":r,{"& > :not([hidden]) ~ :not([hidden])":{"--tw-space-y-reverse":"0","margin-top":`calc(${r} * calc(1 - var(--tw-space-y-reverse)))`,"margin-bottom":`calc(${r} * var(--tw-space-y-reverse))`}})},{values:t("space"),supportsNegativeValues:!0}),e({".space-y-reverse > :not([hidden]) ~ :not([hidden])":{"--tw-space-y-reverse":"1"},".space-x-reverse > :not([hidden]) ~ :not([hidden])":{"--tw-space-x-reverse":"1"}})},divideWidth:({matchUtilities:i,addUtilities:e,theme:t})=>{i({"divide-x":r=>(r=r==="0"?"0px":r,{"& > :not([hidden]) ~ :not([hidden])":{"@defaults border-width":{},"--tw-divide-x-reverse":"0","border-right-width":`calc(${r} * var(--tw-divide-x-reverse))`,"border-left-width":`calc(${r} * calc(1 - var(--tw-divide-x-reverse)))`}}),"divide-y":r=>(r=r==="0"?"0px":r,{"& > :not([hidden]) ~ :not([hidden])":{"@defaults border-width":{},"--tw-divide-y-reverse":"0","border-top-width":`calc(${r} * calc(1 - var(--tw-divide-y-reverse)))`,"border-bottom-width":`calc(${r} * var(--tw-divide-y-reverse))`}})},{values:t("divideWidth"),type:["line-width","length","any"]}),e({".divide-y-reverse > :not([hidden]) ~ :not([hidden])":{"@defaults border-width":{},"--tw-divide-y-reverse":"1"},".divide-x-reverse > :not([hidden]) ~ :not([hidden])":{"@defaults border-width":{},"--tw-divide-x-reverse":"1"}})},divideStyle:({addUtilities:i})=>{i({".divide-solid > :not([hidden]) ~ :not([hidden])":{"border-style":"solid"},".divide-dashed > :not([hidden]) ~ :not([hidden])":{"border-style":"dashed"},".divide-dotted > :not([hidden]) ~ :not([hidden])":{"border-style":"dotted"},".divide-double > :not([hidden]) ~ :not([hidden])":{"border-style":"double"},".divide-none > :not([hidden]) ~ :not([hidden])":{"border-style":"none"}})},divideColor:({matchUtilities:i,theme:e,corePlugins:t})=>{i({divide:r=>t("divideOpacity")?{["& > :not([hidden]) ~ :not([hidden])"]:se({color:r,property:"border-color",variable:"--tw-divide-opacity"})}:{["& > :not([hidden]) ~ :not([hidden])"]:{"border-color":N(r)}}},{values:(({DEFAULT:r,...n})=>n)(re(e("divideColor"))),type:["color","any"]})},divideOpacity:({matchUtilities:i,theme:e})=>{i({"divide-opacity":t=>({["& > :not([hidden]) ~ :not([hidden])"]:{"--tw-divide-opacity":t}})},{values:e("divideOpacity")})},placeSelf:({addUtilities:i})=>{i({".place-self-auto":{"place-self":"auto"},".place-self-start":{"place-self":"start"},".place-self-end":{"place-self":"end"},".place-self-center":{"place-self":"center"},".place-self-stretch":{"place-self":"stretch"}})},alignSelf:({addUtilities:i})=>{i({".self-auto":{"align-self":"auto"},".self-start":{"align-self":"flex-start"},".self-end":{"align-self":"flex-end"},".self-center":{"align-self":"center"},".self-stretch":{"align-self":"stretch"},".self-baseline":{"align-self":"baseline"}})},justifySelf:({addUtilities:i})=>{i({".justify-self-auto":{"justify-self":"auto"},".justify-self-start":{"justify-self":"start"},".justify-self-end":{"justify-self":"end"},".justify-self-center":{"justify-self":"center"},".justify-self-stretch":{"justify-self":"stretch"}})},overflow:({addUtilities:i})=>{i({".overflow-auto":{overflow:"auto"},".overflow-hidden":{overflow:"hidden"},".overflow-clip":{overflow:"clip"},".overflow-visible":{overflow:"visible"},".overflow-scroll":{overflow:"scroll"},".overflow-x-auto":{"overflow-x":"auto"},".overflow-y-auto":{"overflow-y":"auto"},".overflow-x-hidden":{"overflow-x":"hidden"},".overflow-y-hidden":{"overflow-y":"hidden"},".overflow-x-clip":{"overflow-x":"clip"},".overflow-y-clip":{"overflow-y":"clip"},".overflow-x-visible":{"overflow-x":"visible"},".overflow-y-visible":{"overflow-y":"visible"},".overflow-x-scroll":{"overflow-x":"scroll"},".overflow-y-scroll":{"overflow-y":"scroll"}})},overscrollBehavior:({addUtilities:i})=>{i({".overscroll-auto":{"overscroll-behavior":"auto"},".overscroll-contain":{"overscroll-behavior":"contain"},".overscroll-none":{"overscroll-behavior":"none"},".overscroll-y-auto":{"overscroll-behavior-y":"auto"},".overscroll-y-contain":{"overscroll-behavior-y":"contain"},".overscroll-y-none":{"overscroll-behavior-y":"none"},".overscroll-x-auto":{"overscroll-behavior-x":"auto"},".overscroll-x-contain":{"overscroll-behavior-x":"contain"},".overscroll-x-none":{"overscroll-behavior-x":"none"}})},scrollBehavior:({addUtilities:i})=>{i({".scroll-auto":{"scroll-behavior":"auto"},".scroll-smooth":{"scroll-behavior":"smooth"}})},textOverflow:({addUtilities:i})=>{i({".truncate":{overflow:"hidden","text-overflow":"ellipsis","white-space":"nowrap"},".overflow-ellipsis":{"text-overflow":"ellipsis"},".text-ellipsis":{"text-overflow":"ellipsis"},".text-clip":{"text-overflow":"clip"}})},hyphens:({addUtilities:i})=>{i({".hyphens-none":{hyphens:"none"},".hyphens-manual":{hyphens:"manual"},".hyphens-auto":{hyphens:"auto"}})},whitespace:({addUtilities:i})=>{i({".whitespace-normal":{"white-space":"normal"},".whitespace-nowrap":{"white-space":"nowrap"},".whitespace-pre":{"white-space":"pre"},".whitespace-pre-line":{"white-space":"pre-line"},".whitespace-pre-wrap":{"white-space":"pre-wrap"},".whitespace-break-spaces":{"white-space":"break-spaces"}})},wordBreak:({addUtilities:i})=>{i({".break-normal":{"overflow-wrap":"normal","word-break":"normal"},".break-words":{"overflow-wrap":"break-word"},".break-all":{"word-break":"break-all"},".break-keep":{"word-break":"keep-all"}})},borderRadius:P("borderRadius",[["rounded",["border-radius"]],[["rounded-s",["border-start-start-radius","border-end-start-radius"]],["rounded-e",["border-start-end-radius","border-end-end-radius"]],["rounded-t",["border-top-left-radius","border-top-right-radius"]],["rounded-r",["border-top-right-radius","border-bottom-right-radius"]],["rounded-b",["border-bottom-right-radius","border-bottom-left-radius"]],["rounded-l",["border-top-left-radius","border-bottom-left-radius"]]],[["rounded-ss",["border-start-start-radius"]],["rounded-se",["border-start-end-radius"]],["rounded-ee",["border-end-end-radius"]],["rounded-es",["border-end-start-radius"]],["rounded-tl",["border-top-left-radius"]],["rounded-tr",["border-top-right-radius"]],["rounded-br",["border-bottom-right-radius"]],["rounded-bl",["border-bottom-left-radius"]]]]),borderWidth:P("borderWidth",[["border",[["@defaults border-width",{}],"border-width"]],[["border-x",[["@defaults border-width",{}],"border-left-width","border-right-width"]],["border-y",[["@defaults border-width",{}],"border-top-width","border-bottom-width"]]],[["border-s",[["@defaults border-width",{}],"border-inline-start-width"]],["border-e",[["@defaults border-width",{}],"border-inline-end-width"]],["border-t",[["@defaults border-width",{}],"border-top-width"]],["border-r",[["@defaults border-width",{}],"border-right-width"]],["border-b",[["@defaults border-width",{}],"border-bottom-width"]],["border-l",[["@defaults border-width",{}],"border-left-width"]]]],{type:["line-width","length"]}),borderStyle:({addUtilities:i})=>{i({".border-solid":{"border-style":"solid"},".border-dashed":{"border-style":"dashed"},".border-dotted":{"border-style":"dotted"},".border-double":{"border-style":"double"},".border-hidden":{"border-style":"hidden"},".border-none":{"border-style":"none"}})},borderColor:({matchUtilities:i,theme:e,corePlugins:t})=>{i({border:r=>t("borderOpacity")?se({color:r,property:"border-color",variable:"--tw-border-opacity"}):{"border-color":N(r)}},{values:(({DEFAULT:r,...n})=>n)(re(e("borderColor"))),type:["color","any"]}),i({"border-x":r=>t("borderOpacity")?se({color:r,property:["border-left-color","border-right-color"],variable:"--tw-border-opacity"}):{"border-left-color":N(r),"border-right-color":N(r)},"border-y":r=>t("borderOpacity")?se({color:r,property:["border-top-color","border-bottom-color"],variable:"--tw-border-opacity"}):{"border-top-color":N(r),"border-bottom-color":N(r)}},{values:(({DEFAULT:r,...n})=>n)(re(e("borderColor"))),type:["color","any"]}),i({"border-s":r=>t("borderOpacity")?se({color:r,property:"border-inline-start-color",variable:"--tw-border-opacity"}):{"border-inline-start-color":N(r)},"border-e":r=>t("borderOpacity")?se({color:r,property:"border-inline-end-color",variable:"--tw-border-opacity"}):{"border-inline-end-color":N(r)},"border-t":r=>t("borderOpacity")?se({color:r,property:"border-top-color",variable:"--tw-border-opacity"}):{"border-top-color":N(r)},"border-r":r=>t("borderOpacity")?se({color:r,property:"border-right-color",variable:"--tw-border-opacity"}):{"border-right-color":N(r)},"border-b":r=>t("borderOpacity")?se({color:r,property:"border-bottom-color",variable:"--tw-border-opacity"}):{"border-bottom-color":N(r)},"border-l":r=>t("borderOpacity")?se({color:r,property:"border-left-color",variable:"--tw-border-opacity"}):{"border-left-color":N(r)}},{values:(({DEFAULT:r,...n})=>n)(re(e("borderColor"))),type:["color","any"]})},borderOpacity:P("borderOpacity",[["border-opacity",["--tw-border-opacity"]]]),backgroundColor:({matchUtilities:i,theme:e,corePlugins:t})=>{i({bg:r=>t("backgroundOpacity")?se({color:r,property:"background-color",variable:"--tw-bg-opacity"}):{"background-color":N(r)}},{values:re(e("backgroundColor")),type:["color","any"]})},backgroundOpacity:P("backgroundOpacity",[["bg-opacity",["--tw-bg-opacity"]]]),backgroundImage:P("backgroundImage",[["bg",["background-image"]]],{type:["lookup","image","url"]}),gradientColorStops:(()=>{function i(e){return Ie(e,0,"rgb(255 255 255 / 0)")}return function({matchUtilities:e,theme:t,addDefaults:r}){r("gradient-color-stops",{"--tw-gradient-from-position":" ","--tw-gradient-via-position":" ","--tw-gradient-to-position":" "});let n={values:re(t("gradientColorStops")),type:["color","any"]},a={values:t("gradientColorStopPositions"),type:["length","percentage"]};e({from:s=>{let o=i(s);return{"@defaults gradient-color-stops":{},"--tw-gradient-from":`${N(s)} var(--tw-gradient-from-position)`,"--tw-gradient-to":`${o} var(--tw-gradient-to-position)`,"--tw-gradient-stops":"var(--tw-gradient-from), var(--tw-gradient-to)"}}},n),e({from:s=>({"--tw-gradient-from-position":s})},a),e({via:s=>{let o=i(s);return{"@defaults gradient-color-stops":{},"--tw-gradient-to":`${o} var(--tw-gradient-to-position)`,"--tw-gradient-stops":`var(--tw-gradient-from), ${N(s)} var(--tw-gradient-via-position), var(--tw-gradient-to)`}}},n),e({via:s=>({"--tw-gradient-via-position":s})},a),e({to:s=>({"@defaults gradient-color-stops":{},"--tw-gradient-to":`${N(s)} var(--tw-gradient-to-position)`})},n),e({to:s=>({"--tw-gradient-to-position":s})},a)}})(),boxDecorationBreak:({addUtilities:i})=>{i({".decoration-slice":{"box-decoration-break":"slice"},".decoration-clone":{"box-decoration-break":"clone"},".box-decoration-slice":{"box-decoration-break":"slice"},".box-decoration-clone":{"box-decoration-break":"clone"}})},backgroundSize:P("backgroundSize",[["bg",["background-size"]]],{type:["lookup","length","percentage","size"]}),backgroundAttachment:({addUtilities:i})=>{i({".bg-fixed":{"background-attachment":"fixed"},".bg-local":{"background-attachment":"local"},".bg-scroll":{"background-attachment":"scroll"}})},backgroundClip:({addUtilities:i})=>{i({".bg-clip-border":{"background-clip":"border-box"},".bg-clip-padding":{"background-clip":"padding-box"},".bg-clip-content":{"background-clip":"content-box"},".bg-clip-text":{"background-clip":"text"}})},backgroundPosition:P("backgroundPosition",[["bg",["background-position"]]],{type:["lookup",["position",{preferOnConflict:!0}]]}),backgroundRepeat:({addUtilities:i})=>{i({".bg-repeat":{"background-repeat":"repeat"},".bg-no-repeat":{"background-repeat":"no-repeat"},".bg-repeat-x":{"background-repeat":"repeat-x"},".bg-repeat-y":{"background-repeat":"repeat-y"},".bg-repeat-round":{"background-repeat":"round"},".bg-repeat-space":{"background-repeat":"space"}})},backgroundOrigin:({addUtilities:i})=>{i({".bg-origin-border":{"background-origin":"border-box"},".bg-origin-padding":{"background-origin":"padding-box"},".bg-origin-content":{"background-origin":"content-box"}})},fill:({matchUtilities:i,theme:e})=>{i({fill:t=>({fill:N(t)})},{values:re(e("fill")),type:["color","any"]})},stroke:({matchUtilities:i,theme:e})=>{i({stroke:t=>({stroke:N(t)})},{values:re(e("stroke")),type:["color","url","any"]})},strokeWidth:P("strokeWidth",[["stroke",["stroke-width"]]],{type:["length","number","percentage"]}),objectFit:({addUtilities:i})=>{i({".object-contain":{"object-fit":"contain"},".object-cover":{"object-fit":"cover"},".object-fill":{"object-fit":"fill"},".object-none":{"object-fit":"none"},".object-scale-down":{"object-fit":"scale-down"}})},objectPosition:P("objectPosition",[["object",["object-position"]]]),padding:P("padding",[["p",["padding"]],[["px",["padding-left","padding-right"]],["py",["padding-top","padding-bottom"]]],[["ps",["padding-inline-start"]],["pe",["padding-inline-end"]],["pt",["padding-top"]],["pr",["padding-right"]],["pb",["padding-bottom"]],["pl",["padding-left"]]]]),textAlign:({addUtilities:i})=>{i({".text-left":{"text-align":"left"},".text-center":{"text-align":"center"},".text-right":{"text-align":"right"},".text-justify":{"text-align":"justify"},".text-start":{"text-align":"start"},".text-end":{"text-align":"end"}})},textIndent:P("textIndent",[["indent",["text-indent"]]],{supportsNegativeValues:!0}),verticalAlign:({addUtilities:i,matchUtilities:e})=>{i({".align-baseline":{"vertical-align":"baseline"},".align-top":{"vertical-align":"top"},".align-middle":{"vertical-align":"middle"},".align-bottom":{"vertical-align":"bottom"},".align-text-top":{"vertical-align":"text-top"},".align-text-bottom":{"vertical-align":"text-bottom"},".align-sub":{"vertical-align":"sub"},".align-super":{"vertical-align":"super"}}),e({align:t=>({"vertical-align":t})})},fontFamily:({matchUtilities:i,theme:e})=>{i({font:t=>{let[r,n={}]=Array.isArray(t)&&ie(t[1])?t:[t],{fontFeatureSettings:a,fontVariationSettings:s}=n;return{"font-family":Array.isArray(r)?r.join(", "):r,...a===void 0?{}:{"font-feature-settings":a},...s===void 0?{}:{"font-variation-settings":s}}}},{values:e("fontFamily"),type:["lookup","generic-name","family-name"]})},fontSize:({matchUtilities:i,theme:e})=>{i({text:(t,{modifier:r})=>{let[n,a]=Array.isArray(t)?t:[t];if(r)return{"font-size":n,"line-height":r};let{lineHeight:s,letterSpacing:o,fontWeight:u}=ie(a)?a:{lineHeight:a};return{"font-size":n,...s===void 0?{}:{"line-height":s},...o===void 0?{}:{"letter-spacing":o},...u===void 0?{}:{"font-weight":u}}}},{values:e("fontSize"),modifiers:e("lineHeight"),type:["absolute-size","relative-size","length","percentage"]})},fontWeight:P("fontWeight",[["font",["fontWeight"]]],{type:["lookup","number","any"]}),textTransform:({addUtilities:i})=>{i({".uppercase":{"text-transform":"uppercase"},".lowercase":{"text-transform":"lowercase"},".capitalize":{"text-transform":"capitalize"},".normal-case":{"text-transform":"none"}})},fontStyle:({addUtilities:i})=>{i({".italic":{"font-style":"italic"},".not-italic":{"font-style":"normal"}})},fontVariantNumeric:({addDefaults:i,addUtilities:e})=>{let t="var(--tw-ordinal) var(--tw-slashed-zero) var(--tw-numeric-figure) var(--tw-numeric-spacing) var(--tw-numeric-fraction)";i("font-variant-numeric",{"--tw-ordinal":" ","--tw-slashed-zero":" ","--tw-numeric-figure":" ","--tw-numeric-spacing":" ","--tw-numeric-fraction":" "}),e({".normal-nums":{"font-variant-numeric":"normal"},".ordinal":{"@defaults font-variant-numeric":{},"--tw-ordinal":"ordinal","font-variant-numeric":t},".slashed-zero":{"@defaults font-variant-numeric":{},"--tw-slashed-zero":"slashed-zero","font-variant-numeric":t},".lining-nums":{"@defaults font-variant-numeric":{},"--tw-numeric-figure":"lining-nums","font-variant-numeric":t},".oldstyle-nums":{"@defaults font-variant-numeric":{},"--tw-numeric-figure":"oldstyle-nums","font-variant-numeric":t},".proportional-nums":{"@defaults font-variant-numeric":{},"--tw-numeric-spacing":"proportional-nums","font-variant-numeric":t},".tabular-nums":{"@defaults font-variant-numeric":{},"--tw-numeric-spacing":"tabular-nums","font-variant-numeric":t},".diagonal-fractions":{"@defaults font-variant-numeric":{},"--tw-numeric-fraction":"diagonal-fractions","font-variant-numeric":t},".stacked-fractions":{"@defaults font-variant-numeric":{},"--tw-numeric-fraction":"stacked-fractions","font-variant-numeric":t}})},lineHeight:P("lineHeight",[["leading",["lineHeight"]]]),letterSpacing:P("letterSpacing",[["tracking",["letterSpacing"]]],{supportsNegativeValues:!0}),textColor:({matchUtilities:i,theme:e,corePlugins:t})=>{i({text:r=>t("textOpacity")?se({color:r,property:"color",variable:"--tw-text-opacity"}):{color:N(r)}},{values:re(e("textColor")),type:["color","any"]})},textOpacity:P("textOpacity",[["text-opacity",["--tw-text-opacity"]]]),textDecoration:({addUtilities:i})=>{i({".underline":{"text-decoration-line":"underline"},".overline":{"text-decoration-line":"overline"},".line-through":{"text-decoration-line":"line-through"},".no-underline":{"text-decoration-line":"none"}})},textDecorationColor:({matchUtilities:i,theme:e})=>{i({decoration:t=>({"text-decoration-color":N(t)})},{values:re(e("textDecorationColor")),type:["color","any"]})},textDecorationStyle:({addUtilities:i})=>{i({".decoration-solid":{"text-decoration-style":"solid"},".decoration-double":{"text-decoration-style":"double"},".decoration-dotted":{"text-decoration-style":"dotted"},".decoration-dashed":{"text-decoration-style":"dashed"},".decoration-wavy":{"text-decoration-style":"wavy"}})},textDecorationThickness:P("textDecorationThickness",[["decoration",["text-decoration-thickness"]]],{type:["length","percentage"]}),textUnderlineOffset:P("textUnderlineOffset",[["underline-offset",["text-underline-offset"]]],{type:["length","percentage","any"]}),fontSmoothing:({addUtilities:i})=>{i({".antialiased":{"-webkit-font-smoothing":"antialiased","-moz-osx-font-smoothing":"grayscale"},".subpixel-antialiased":{"-webkit-font-smoothing":"auto","-moz-osx-font-smoothing":"auto"}})},placeholderColor:({matchUtilities:i,theme:e,corePlugins:t})=>{i({placeholder:r=>t("placeholderOpacity")?{"&::placeholder":se({color:r,property:"color",variable:"--tw-placeholder-opacity"})}:{"&::placeholder":{color:N(r)}}},{values:re(e("placeholderColor")),type:["color","any"]})},placeholderOpacity:({matchUtilities:i,theme:e})=>{i({"placeholder-opacity":t=>({["&::placeholder"]:{"--tw-placeholder-opacity":t}})},{values:e("placeholderOpacity")})},caretColor:({matchUtilities:i,theme:e})=>{i({caret:t=>({"caret-color":N(t)})},{values:re(e("caretColor")),type:["color","any"]})},accentColor:({matchUtilities:i,theme:e})=>{i({accent:t=>({"accent-color":N(t)})},{values:re(e("accentColor")),type:["color","any"]})},opacity:P("opacity",[["opacity",["opacity"]]]),backgroundBlendMode:({addUtilities:i})=>{i({".bg-blend-normal":{"background-blend-mode":"normal"},".bg-blend-multiply":{"background-blend-mode":"multiply"},".bg-blend-screen":{"background-blend-mode":"screen"},".bg-blend-overlay":{"background-blend-mode":"overlay"},".bg-blend-darken":{"background-blend-mode":"darken"},".bg-blend-lighten":{"background-blend-mode":"lighten"},".bg-blend-color-dodge":{"background-blend-mode":"color-dodge"},".bg-blend-color-burn":{"background-blend-mode":"color-burn"},".bg-blend-hard-light":{"background-blend-mode":"hard-light"},".bg-blend-soft-light":{"background-blend-mode":"soft-light"},".bg-blend-difference":{"background-blend-mode":"difference"},".bg-blend-exclusion":{"background-blend-mode":"exclusion"},".bg-blend-hue":{"background-blend-mode":"hue"},".bg-blend-saturation":{"background-blend-mode":"saturation"},".bg-blend-color":{"background-blend-mode":"color"},".bg-blend-luminosity":{"background-blend-mode":"luminosity"}})},mixBlendMode:({addUtilities:i})=>{i({".mix-blend-normal":{"mix-blend-mode":"normal"},".mix-blend-multiply":{"mix-blend-mode":"multiply"},".mix-blend-screen":{"mix-blend-mode":"screen"},".mix-blend-overlay":{"mix-blend-mode":"overlay"},".mix-blend-darken":{"mix-blend-mode":"darken"},".mix-blend-lighten":{"mix-blend-mode":"lighten"},".mix-blend-color-dodge":{"mix-blend-mode":"color-dodge"},".mix-blend-color-burn":{"mix-blend-mode":"color-burn"},".mix-blend-hard-light":{"mix-blend-mode":"hard-light"},".mix-blend-soft-light":{"mix-blend-mode":"soft-light"},".mix-blend-difference":{"mix-blend-mode":"difference"},".mix-blend-exclusion":{"mix-blend-mode":"exclusion"},".mix-blend-hue":{"mix-blend-mode":"hue"},".mix-blend-saturation":{"mix-blend-mode":"saturation"},".mix-blend-color":{"mix-blend-mode":"color"},".mix-blend-luminosity":{"mix-blend-mode":"luminosity"},".mix-blend-plus-lighter":{"mix-blend-mode":"plus-lighter"}})},boxShadow:(()=>{let i=Ge("boxShadow"),e=["var(--tw-ring-offset-shadow, 0 0 #0000)","var(--tw-ring-shadow, 0 0 #0000)","var(--tw-shadow)"].join(", ");return function({matchUtilities:t,addDefaults:r,theme:n}){r(" box-shadow",{"--tw-ring-offset-shadow":"0 0 #0000","--tw-ring-shadow":"0 0 #0000","--tw-shadow":"0 0 #0000","--tw-shadow-colored":"0 0 #0000"}),t({shadow:a=>{a=i(a);let s=yi(a);for(let o of s)!o.valid||(o.color="var(--tw-shadow-color)");return{"@defaults box-shadow":{},"--tw-shadow":a==="none"?"0 0 #0000":a,"--tw-shadow-colored":a==="none"?"0 0 #0000":Iu(s),"box-shadow":e}}},{values:n("boxShadow"),type:["shadow"]})}})(),boxShadowColor:({matchUtilities:i,theme:e})=>{i({shadow:t=>({"--tw-shadow-color":N(t),"--tw-shadow":"var(--tw-shadow-colored)"})},{values:re(e("boxShadowColor")),type:["color","any"]})},outlineStyle:({addUtilities:i})=>{i({".outline-none":{outline:"2px solid transparent","outline-offset":"2px"},".outline":{"outline-style":"solid"},".outline-dashed":{"outline-style":"dashed"},".outline-dotted":{"outline-style":"dotted"},".outline-double":{"outline-style":"double"}})},outlineWidth:P("outlineWidth",[["outline",["outline-width"]]],{type:["length","number","percentage"]}),outlineOffset:P("outlineOffset",[["outline-offset",["outline-offset"]]],{type:["length","number","percentage","any"],supportsNegativeValues:!0}),outlineColor:({matchUtilities:i,theme:e})=>{i({outline:t=>({"outline-color":N(t)})},{values:re(e("outlineColor")),type:["color","any"]})},ringWidth:({matchUtilities:i,addDefaults:e,addUtilities:t,theme:r,config:n})=>{let a=(()=>{if(J(n(),"respectDefaultRingColorOpacity"))return r("ringColor.DEFAULT");let s=r("ringOpacity.DEFAULT","0.5");return r("ringColor")?.DEFAULT?Ie(r("ringColor")?.DEFAULT,s,`rgb(147 197 253 / ${s})`):`rgb(147 197 253 / ${s})`})();e("ring-width",{"--tw-ring-inset":" ","--tw-ring-offset-width":r("ringOffsetWidth.DEFAULT","0px"),"--tw-ring-offset-color":r("ringOffsetColor.DEFAULT","#fff"),"--tw-ring-color":a,"--tw-ring-offset-shadow":"0 0 #0000","--tw-ring-shadow":"0 0 #0000","--tw-shadow":"0 0 #0000","--tw-shadow-colored":"0 0 #0000"}),i({ring:s=>({"@defaults ring-width":{},"--tw-ring-offset-shadow":"var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color)","--tw-ring-shadow":`var(--tw-ring-inset) 0 0 0 calc(${s} + var(--tw-ring-offset-width)) var(--tw-ring-color)`,"box-shadow":["var(--tw-ring-offset-shadow)","var(--tw-ring-shadow)","var(--tw-shadow, 0 0 #0000)"].join(", ")})},{values:r("ringWidth"),type:"length"}),t({".ring-inset":{"@defaults ring-width":{},"--tw-ring-inset":"inset"}})},ringColor:({matchUtilities:i,theme:e,corePlugins:t})=>{i({ring:r=>t("ringOpacity")?se({color:r,property:"--tw-ring-color",variable:"--tw-ring-opacity"}):{"--tw-ring-color":N(r)}},{values:Object.fromEntries(Object.entries(re(e("ringColor"))).filter(([r])=>r!=="DEFAULT")),type:["color","any"]})},ringOpacity:i=>{let{config:e}=i;return P("ringOpacity",[["ring-opacity",["--tw-ring-opacity"]]],{filterDefault:!J(e(),"respectDefaultRingColorOpacity")})(i)},ringOffsetWidth:P("ringOffsetWidth",[["ring-offset",["--tw-ring-offset-width"]]],{type:"length"}),ringOffsetColor:({matchUtilities:i,theme:e})=>{i({"ring-offset":t=>({"--tw-ring-offset-color":N(t)})},{values:re(e("ringOffsetColor")),type:["color","any"]})},blur:({matchUtilities:i,theme:e})=>{i({blur:t=>({"--tw-blur":`blur(${t})`,"@defaults filter":{},filter:Be})},{values:e("blur")})},brightness:({matchUtilities:i,theme:e})=>{i({brightness:t=>({"--tw-brightness":`brightness(${t})`,"@defaults filter":{},filter:Be})},{values:e("brightness")})},contrast:({matchUtilities:i,theme:e})=>{i({contrast:t=>({"--tw-contrast":`contrast(${t})`,"@defaults filter":{},filter:Be})},{values:e("contrast")})},dropShadow:({matchUtilities:i,theme:e})=>{i({"drop-shadow":t=>({"--tw-drop-shadow":Array.isArray(t)?t.map(r=>`drop-shadow(${r})`).join(" "):`drop-shadow(${t})`,"@defaults filter":{},filter:Be})},{values:e("dropShadow")})},grayscale:({matchUtilities:i,theme:e})=>{i({grayscale:t=>({"--tw-grayscale":`grayscale(${t})`,"@defaults filter":{},filter:Be})},{values:e("grayscale")})},hueRotate:({matchUtilities:i,theme:e})=>{i({"hue-rotate":t=>({"--tw-hue-rotate":`hue-rotate(${t})`,"@defaults filter":{},filter:Be})},{values:e("hueRotate"),supportsNegativeValues:!0})},invert:({matchUtilities:i,theme:e})=>{i({invert:t=>({"--tw-invert":`invert(${t})`,"@defaults filter":{},filter:Be})},{values:e("invert")})},saturate:({matchUtilities:i,theme:e})=>{i({saturate:t=>({"--tw-saturate":`saturate(${t})`,"@defaults filter":{},filter:Be})},{values:e("saturate")})},sepia:({matchUtilities:i,theme:e})=>{i({sepia:t=>({"--tw-sepia":`sepia(${t})`,"@defaults filter":{},filter:Be})},{values:e("sepia")})},filter:({addDefaults:i,addUtilities:e})=>{i("filter",{"--tw-blur":" ","--tw-brightness":" ","--tw-contrast":" ","--tw-grayscale":" ","--tw-hue-rotate":" ","--tw-invert":" ","--tw-saturate":" ","--tw-sepia":" ","--tw-drop-shadow":" "}),e({".filter":{"@defaults filter":{},filter:Be},".filter-none":{filter:"none"}})},backdropBlur:({matchUtilities:i,theme:e})=>{i({"backdrop-blur":t=>({"--tw-backdrop-blur":`blur(${t})`,"@defaults backdrop-filter":{},"backdrop-filter":Fe})},{values:e("backdropBlur")})},backdropBrightness:({matchUtilities:i,theme:e})=>{i({"backdrop-brightness":t=>({"--tw-backdrop-brightness":`brightness(${t})`,"@defaults backdrop-filter":{},"backdrop-filter":Fe})},{values:e("backdropBrightness")})},backdropContrast:({matchUtilities:i,theme:e})=>{i({"backdrop-contrast":t=>({"--tw-backdrop-contrast":`contrast(${t})`,"@defaults backdrop-filter":{},"backdrop-filter":Fe})},{values:e("backdropContrast")})},backdropGrayscale:({matchUtilities:i,theme:e})=>{i({"backdrop-grayscale":t=>({"--tw-backdrop-grayscale":`grayscale(${t})`,"@defaults backdrop-filter":{},"backdrop-filter":Fe})},{values:e("backdropGrayscale")})},backdropHueRotate:({matchUtilities:i,theme:e})=>{i({"backdrop-hue-rotate":t=>({"--tw-backdrop-hue-rotate":`hue-rotate(${t})`,"@defaults backdrop-filter":{},"backdrop-filter":Fe})},{values:e("backdropHueRotate"),supportsNegativeValues:!0})},backdropInvert:({matchUtilities:i,theme:e})=>{i({"backdrop-invert":t=>({"--tw-backdrop-invert":`invert(${t})`,"@defaults backdrop-filter":{},"backdrop-filter":Fe})},{values:e("backdropInvert")})},backdropOpacity:({matchUtilities:i,theme:e})=>{i({"backdrop-opacity":t=>({"--tw-backdrop-opacity":`opacity(${t})`,"@defaults backdrop-filter":{},"backdrop-filter":Fe})},{values:e("backdropOpacity")})},backdropSaturate:({matchUtilities:i,theme:e})=>{i({"backdrop-saturate":t=>({"--tw-backdrop-saturate":`saturate(${t})`,"@defaults backdrop-filter":{},"backdrop-filter":Fe})},{values:e("backdropSaturate")})},backdropSepia:({matchUtilities:i,theme:e})=>{i({"backdrop-sepia":t=>({"--tw-backdrop-sepia":`sepia(${t})`,"@defaults backdrop-filter":{},"backdrop-filter":Fe})},{values:e("backdropSepia")})},backdropFilter:({addDefaults:i,addUtilities:e})=>{i("backdrop-filter",{"--tw-backdrop-blur":" ","--tw-backdrop-brightness":" ","--tw-backdrop-contrast":" ","--tw-backdrop-grayscale":" ","--tw-backdrop-hue-rotate":" ","--tw-backdrop-invert":" ","--tw-backdrop-opacity":" ","--tw-backdrop-saturate":" ","--tw-backdrop-sepia":" "}),e({".backdrop-filter":{"@defaults backdrop-filter":{},"backdrop-filter":Fe},".backdrop-filter-none":{"backdrop-filter":"none"}})},transitionProperty:({matchUtilities:i,theme:e})=>{let t=e("transitionTimingFunction.DEFAULT"),r=e("transitionDuration.DEFAULT");i({transition:n=>({"transition-property":n,...n==="none"?{}:{"transition-timing-function":t,"transition-duration":r}})},{values:e("transitionProperty")})},transitionDelay:P("transitionDelay",[["delay",["transitionDelay"]]]),transitionDuration:P("transitionDuration",[["duration",["transitionDuration"]]],{filterDefault:!0}),transitionTimingFunction:P("transitionTimingFunction",[["ease",["transitionTimingFunction"]]],{filterDefault:!0}),willChange:P("willChange",[["will-change",["will-change"]]]),content:P("content",[["content",["--tw-content",["content","var(--tw-content)"]]]])}});function bC(i){if(i===void 0)return!1;if(i==="true"||i==="1")return!0;if(i==="false"||i==="0")return!1;if(i==="*")return!0;let e=i.split(",").map(t=>t.split(":")[0]);return e.includes("-tailwindcss")?!1:!!e.includes("tailwindcss")}var Pe,wd,bd,gn,Xa,He,Kr,ot=C(()=>{l();Ya();Pe=typeof m!="undefined"?{NODE_ENV:"production",DEBUG:bC(m.env.DEBUG),ENGINE:Ja.tailwindcss.engine}:{NODE_ENV:"production",DEBUG:!1,ENGINE:Ja.tailwindcss.engine},wd=new Map,bd=new Map,gn=new Map,Xa=new Map,He=new String("*"),Kr=Symbol("__NONE__")});function Bt(i){let e=[],t=!1;for(let r=0;r0)}var vd,xd,vC,Ka=C(()=>{l();vd=new Map([["{","}"],["[","]"],["(",")"]]),xd=new Map(Array.from(vd.entries()).map(([i,e])=>[e,i])),vC=new Set(['"',"'","`"])});function Ft(i){let[e]=kd(i);return e.forEach(([t,r])=>t.removeChild(r)),i.nodes.push(...e.map(([,t])=>t)),i}function kd(i){let e=[],t=null;for(let r of i.nodes)if(r.type==="combinator")e=e.filter(([,n])=>eo(n).includes("jumpable")),t=null;else if(r.type==="pseudo"){xC(r)?(t=r,e.push([i,r,null])):t&&kC(r,t)?e.push([i,r,t]):t=null;for(let n of r.nodes??[]){let[a,s]=kd(n);t=s||t,e.push(...a)}}return[e,t]}function Sd(i){return i.value.startsWith("::")||Za[i.value]!==void 0}function xC(i){return Sd(i)&&eo(i).includes("terminal")}function kC(i,e){return i.type!=="pseudo"||Sd(i)?!1:eo(e).includes("actionable")}function eo(i){return Za[i.value]??Za.__default__}var Za,yn=C(()=>{l();Za={"::after":["terminal","jumpable"],"::backdrop":["terminal","jumpable"],"::before":["terminal","jumpable"],"::cue":["terminal"],"::cue-region":["terminal"],"::first-letter":["terminal","jumpable"],"::first-line":["terminal","jumpable"],"::grammar-error":["terminal"],"::marker":["terminal","jumpable"],"::part":["terminal","actionable"],"::placeholder":["terminal","jumpable"],"::selection":["terminal","jumpable"],"::slotted":["terminal"],"::spelling-error":["terminal"],"::target-text":["terminal"],"::file-selector-button":["terminal","actionable"],"::deep":["actionable"],"::v-deep":["actionable"],"::ng-deep":["actionable"],":after":["terminal","jumpable"],":before":["terminal","jumpable"],":first-letter":["terminal","jumpable"],":first-line":["terminal","jumpable"],__default__:["terminal","actionable"]}});function Nt(i,{context:e,candidate:t}){let r=e?.tailwindConfig.prefix??"",n=i.map(s=>{let o=(0,Ne.default)().astSync(s.format);return{...s,ast:s.respectPrefix?Rt(r,o):o}}),a=Ne.default.root({nodes:[Ne.default.selector({nodes:[Ne.default.className({value:ce(t)})]})]});for(let{ast:s}of n)[a,s]=CC(a,s),s.walkNesting(o=>o.replaceWith(...a.nodes[0].nodes)),a=s;return a}function Ad(i){let e=[];for(;i.prev()&&i.prev().type!=="combinator";)i=i.prev();for(;i&&i.type!=="combinator";)e.push(i),i=i.next();return e}function SC(i){return i.sort((e,t)=>e.type==="tag"&&t.type==="class"?-1:e.type==="class"&&t.type==="tag"?1:e.type==="class"&&t.type==="pseudo"&&t.value.startsWith("::")?-1:e.type==="pseudo"&&e.value.startsWith("::")&&t.type==="class"?1:i.index(e)-i.index(t)),i}function ro(i,e){let t=!1;i.walk(r=>{if(r.type==="class"&&r.value===e)return t=!0,!1}),t||i.remove()}function wn(i,e,{context:t,candidate:r,base:n}){let a=t?.tailwindConfig?.separator??":";n=n??r.split(new RegExp(`\\${a}(?![^[]*\\])`)).pop();let s=(0,Ne.default)().astSync(i);s.walkClasses(f=>{f.raws&&f.value.includes(n)&&(f.raws.value=ce((0,Cd.default)(f.raws.value)))}),s.each(f=>ro(f,n));let o=Array.isArray(e)?Nt(e,{context:t,candidate:r}):e;if(o===null)return s.toString();let u=Ne.default.comment({value:"/*__simple__*/"}),c=Ne.default.comment({value:"/*__simple__*/"});return s.walkClasses(f=>{if(f.value!==n)return;let p=f.parent,d=o.nodes[0].nodes;if(p.nodes.length===1){f.replaceWith(...d);return}let h=Ad(f);p.insertBefore(h[0],u),p.insertAfter(h[h.length-1],c);for(let x of d)p.insertBefore(h[0],x.clone());f.remove(),h=Ad(u);let y=p.index(u);p.nodes.splice(y,h.length,...SC(Ne.default.selector({nodes:h})).nodes),u.remove(),c.remove()}),s.walkPseudos(f=>{f.value===to&&f.replaceWith(f.nodes)}),s.each(f=>Ft(f)),s.toString()}function CC(i,e){let t=[];return i.walkPseudos(r=>{r.value===to&&t.push({pseudo:r,value:r.nodes[0].toString()})}),e.walkPseudos(r=>{if(r.value!==to)return;let n=r.nodes[0].toString(),a=t.find(c=>c.value===n);if(!a)return;let s=[],o=r.next();for(;o&&o.type!=="combinator";)s.push(o),o=o.next();let u=o;a.pseudo.parent.insertAfter(a.pseudo,Ne.default.selector({nodes:s.map(c=>c.clone())})),r.remove(),s.forEach(c=>c.remove()),u&&u.type==="combinator"&&u.remove()}),[i,e]}var Ne,Cd,to,io=C(()=>{l();Ne=K(Me()),Cd=K(Yi());Mt();un();yn();to=":merge"});function bn(i,e){let t=(0,no.default)().astSync(i);return t.each(r=>{r.nodes[0].type==="pseudo"&&r.nodes[0].value===":is"&&r.nodes.every(a=>a.type!=="combinator")||(r.nodes=[no.default.pseudo({value:":is",nodes:[r.clone()]})]),Ft(r)}),`${e} ${t.toString()}`}var no,so=C(()=>{l();no=K(Me());yn()});function ao(i){return AC.transformSync(i)}function*_C(i){let e=1/0;for(;e>=0;){let t,r=!1;if(e===1/0&&i.endsWith("]")){let s=i.indexOf("[");i[s-1]==="-"?t=s-1:i[s-1]==="/"?(t=s-1,r=!0):t=-1}else e===1/0&&i.includes("/")?(t=i.lastIndexOf("/"),r=!0):t=i.lastIndexOf("-",e);if(t<0)break;let n=i.slice(0,t),a=i.slice(r?t:t+1);e=t-1,!(n===""||a==="/")&&(yield[n,a])}}function EC(i,e){if(i.length===0||e.tailwindConfig.prefix==="")return i;for(let t of i){let[r]=t;if(r.options.respectPrefix){let n=z.root({nodes:[t[1].clone()]}),a=t[1].raws.tailwind.classCandidate;n.walkRules(s=>{let o=a.startsWith("-");s.selector=Rt(e.tailwindConfig.prefix,s.selector,o)}),t[1]=n.nodes[0]}}return i}function OC(i,e){if(i.length===0)return i;let t=[];for(let[r,n]of i){let a=z.root({nodes:[n.clone()]});a.walkRules(s=>{let o=(0,vn.default)().astSync(s.selector);o.each(u=>ro(u,e)),Wu(o,u=>u===e?`!${u}`:u),s.selector=o.toString(),s.walkDecls(u=>u.important=!0)}),t.push([{...r,important:!0},a.nodes[0]])}return t}function TC(i,e,t){if(e.length===0)return e;let r={modifier:null,value:Kr};{let[n,...a]=le(i,"/");if(a.length>1&&(n=n+"/"+a.slice(0,-1).join("/"),a=a.slice(-1)),a.length&&!t.variantMap.has(i)&&(i=n,r.modifier=a[0],!J(t.tailwindConfig,"generalizedModifiers")))return[]}if(i.endsWith("]")&&!i.startsWith("[")){let n=/(.)(-?)\[(.*)\]/g.exec(i);if(n){let[,a,s,o]=n;if(a==="@"&&s==="-")return[];if(a!=="@"&&s==="")return[];i=i.replace(`${s}[${o}]`,""),r.value=o}}if(lo(i)&&!t.variantMap.has(i)){let n=t.offsets.recordVariant(i),a=U(i.slice(1,-1)),s=le(a,",");if(s.length>1)return[];if(!s.every(An))return[];let o=s.map((u,c)=>[t.offsets.applyParallelOffset(n,c),Zr(u.trim())]);t.variantMap.set(i,o)}if(t.variantMap.has(i)){let n=lo(i),a=t.variantOptions.get(i)?.[Jr]??{},s=t.variantMap.get(i).slice(),o=[],u=(()=>!(n||a.respectPrefix===!1))();for(let[c,f]of e){if(c.layer==="user")continue;let p=z.root({nodes:[f.clone()]});for(let[d,h,y]of s){let b=function(){x.raws.neededBackup||(x.raws.neededBackup=!0,x.walkRules(E=>E.raws.originalSelector=E.selector))},k=function(E){return b(),x.each(I=>{I.type==="rule"&&(I.selectors=I.selectors.map(B=>E({get className(){return ao(B)},selector:B})))}),x},x=(y??p).clone(),w=[],S=h({get container(){return b(),x},separator:t.tailwindConfig.separator,modifySelectors:k,wrap(E){let I=x.nodes;x.removeAll(),E.append(I),x.append(E)},format(E){w.push({format:E,respectPrefix:u})},args:r});if(Array.isArray(S)){for(let[E,I]of S.entries())s.push([t.offsets.applyParallelOffset(d,E),I,x.clone()]);continue}if(typeof S=="string"&&w.push({format:S,respectPrefix:u}),S===null)continue;x.raws.neededBackup&&(delete x.raws.neededBackup,x.walkRules(E=>{let I=E.raws.originalSelector;if(!I||(delete E.raws.originalSelector,I===E.selector))return;let B=E.selector,q=(0,vn.default)(X=>{X.walkClasses(ae=>{ae.value=`${i}${t.tailwindConfig.separator}${ae.value}`})}).processSync(I);w.push({format:B.replace(q,"&"),respectPrefix:u}),E.selector=I})),x.nodes[0].raws.tailwind={...x.nodes[0].raws.tailwind,parentLayer:c.layer};let _=[{...c,sort:t.offsets.applyVariantOffset(c.sort,d,Object.assign(r,t.variantOptions.get(i))),collectedFormats:(c.collectedFormats??[]).concat(w)},x.nodes[0]];o.push(_)}}return o}return[]}function oo(i,e,t={}){return!ie(i)&&!Array.isArray(i)?[[i],t]:Array.isArray(i)?oo(i[0],e,i[1]):(e.has(i)||e.set(i,qt(i)),[e.get(i),t])}function DC(i){return PC.test(i)}function IC(i){if(!i.includes("://"))return!1;try{let e=new URL(i);return e.scheme!==""&&e.host!==""}catch(e){return!1}}function _d(i){let e=!0;return i.walkDecls(t=>{if(!Ed(t.prop,t.value))return e=!1,!1}),e}function Ed(i,e){if(IC(`${i}:${e}`))return!1;try{return z.parse(`a{${i}:${e}}`).toResult(),!0}catch(t){return!1}}function qC(i,e){let[,t,r]=i.match(/^\[([a-zA-Z0-9-_]+):(\S+)\]$/)??[];if(r===void 0||!DC(t)||!Bt(r))return null;let n=U(r);return Ed(t,n)?[[{sort:e.offsets.arbitraryProperty(),layer:"utilities"},()=>({[Wa(i)]:{[t]:n}})]]:null}function*RC(i,e){e.candidateRuleMap.has(i)&&(yield[e.candidateRuleMap.get(i),"DEFAULT"]),yield*function*(o){o!==null&&(yield[o,"DEFAULT"])}(qC(i,e));let t=i,r=!1,n=e.tailwindConfig.prefix,a=n.length,s=t.startsWith(n)||t.startsWith(`-${n}`);t[a]==="-"&&s&&(r=!0,t=n+t.slice(a+1)),r&&e.candidateRuleMap.has(t)&&(yield[e.candidateRuleMap.get(t),"-DEFAULT"]);for(let[o,u]of _C(t))e.candidateRuleMap.has(o)&&(yield[e.candidateRuleMap.get(o),r?`-${u}`:u])}function MC(i,e){return i===He?[He]:le(i,e)}function*BC(i,e){for(let t of i)t[1].raws.tailwind={...t[1].raws.tailwind,classCandidate:e,preserveSource:t[0].options?.preserveSource??!1},yield t}function*xn(i,e,t=i){let r=e.tailwindConfig.separator,[n,...a]=MC(i,r).reverse(),s=!1;if(n.startsWith("!")&&(s=!0,n=n.slice(1)),J(e.tailwindConfig,"variantGrouping")&&n.startsWith("(")&&n.endsWith(")")){let o=a.slice().reverse().join(r);for(let u of le(n.slice(1,-1),","))yield*xn(o+r+u,e,t)}for(let o of RC(n,e)){let u=[],c=new Map,[f,p]=o,d=f.length===1;for(let[h,y]of f){let x=[];if(typeof y=="function")for(let w of[].concat(y(p,{isOnlyPlugin:d}))){let[b,k]=oo(w,e.postCssNodeCache);for(let S of b)x.push([{...h,options:{...h.options,...k}},S])}else if(p==="DEFAULT"||p==="-DEFAULT"){let w=y,[b,k]=oo(w,e.postCssNodeCache);for(let S of b)x.push([{...h,options:{...h.options,...k}},S])}if(x.length>0){let w=Array.from(ps(h.options?.types??[],p,h.options??{},e.tailwindConfig)).map(([b,k])=>k);w.length>0&&c.set(x,w),u.push(x)}}if(lo(p)){if(u.length>1){let x=function(b){return b.length===1?b[0]:b.find(k=>{let S=c.get(k);return k.some(([{options:_},E])=>_d(E)?_.types.some(({type:I,preferOnConflict:B})=>S.includes(I)&&B):!1)})},[h,y]=u.reduce((b,k)=>(k.some(([{options:_}])=>_.types.some(({type:E})=>E==="any"))?b[0].push(k):b[1].push(k),b),[[],[]]),w=x(y)??x(h);if(w)u=[w];else{let b=u.map(S=>new Set([...c.get(S)??[]]));for(let S of b)for(let _ of S){let E=!1;for(let I of b)S!==I&&I.has(_)&&(I.delete(_),E=!0);E&&S.delete(_)}let k=[];for(let[S,_]of b.entries())for(let E of _){let I=u[S].map(([,B])=>B).flat().map(B=>B.toString().split(` +`).slice(1,-1).map(q=>q.trim()).map(q=>` ${q}`).join(` +`)).join(` + +`);k.push(` Use \`${i.replace("[",`[${E}:`)}\` for \`${I.trim()}\``);break}F.warn([`The class \`${i}\` is ambiguous and matches multiple utilities.`,...k,`If this is content and not a class, replace it with \`${i.replace("[","[").replace("]","]")}\` to silence this warning.`]);continue}}u=u.map(h=>h.filter(y=>_d(y[1])))}u=u.flat(),u=Array.from(BC(u,n)),u=EC(u,e),s&&(u=OC(u,n));for(let h of a)u=TC(h,u,e);for(let h of u)h[1].raws.tailwind={...h[1].raws.tailwind,candidate:i},h=FC(h,{context:e,candidate:i,original:t}),h!==null&&(yield h)}}function FC(i,{context:e,candidate:t,original:r}){if(!i[0].collectedFormats)return i;let n=!0,a;try{a=Nt(i[0].collectedFormats,{context:e,candidate:t})}catch{return null}let s=z.root({nodes:[i[1].clone()]});return s.walkRules(o=>{if(!kn(o))try{o.selector=wn(o.selector,a,{candidate:r,context:e})}catch{return n=!1,!1}}),n?(i[1]=s.nodes[0],i):null}function kn(i){return i.parent&&i.parent.type==="atrule"&&i.parent.name==="keyframes"}function NC(i){if(i===!0)return e=>{kn(e)||e.walkDecls(t=>{t.parent.type==="rule"&&!kn(t.parent)&&(t.important=!0)})};if(typeof i=="string")return e=>{kn(e)||(e.selectors=e.selectors.map(t=>bn(t,i)))}}function Sn(i,e){let t=[],r=NC(e.tailwindConfig.important);for(let n of i){if(e.notClassCache.has(n))continue;if(e.candidateRuleCache.has(n)){t=t.concat(Array.from(e.candidateRuleCache.get(n)));continue}let a=Array.from(xn(n,e));if(a.length===0){e.notClassCache.add(n);continue}e.classCache.set(n,a);let s=e.candidateRuleCache.get(n)??new Set;e.candidateRuleCache.set(n,s);for(let o of a){let[{sort:u,options:c},f]=o;if(c.respectImportant&&r){let d=z.root({nodes:[f.clone()]});d.walkRules(r),f=d.nodes[0]}let p=[u,f];s.add(p),e.ruleCache.add(p),t.push(p)}}return t}function lo(i){return i.startsWith("[")&&i.endsWith("]")}var vn,AC,PC,Cn=C(()=>{l();nt();vn=K(Me());Ua();xt();un();cr();Ee();ot();io();Ga();fr();Xr();Ka();or();De();so();AC=(0,vn.default)(i=>i.first.filter(({type:e})=>e==="class").pop().value);PC=/^[a-z_-]/});var Od,Td=C(()=>{l();Od={}});function LC(i){try{return Od.createHash("md5").update(i,"utf-8").digest("binary")}catch(e){return""}}function Pd(i,e){let t=e.toString();if(!t.includes("@tailwind"))return!1;let r=Xa.get(i),n=LC(t),a=r!==n;return Xa.set(i,n),a}var Dd=C(()=>{l();Td();ot()});function _n(i){return(i>0n)-(i<0n)}var Id=C(()=>{l()});function qd(i,e){let t=0n,r=0n;for(let[n,a]of e)i&n&&(t=t|n,r=r|a);return i&~t|r}var Rd=C(()=>{l()});function Md(i){let e=null;for(let t of i)e=e??t,e=e>t?e:t;return e}function $C(i,e){let t=i.length,r=e.length,n=t{l();Id();Rd();uo=class{constructor(){this.offsets={defaults:0n,base:0n,components:0n,utilities:0n,variants:0n,user:0n},this.layerPositions={defaults:0n,base:1n,components:2n,utilities:3n,user:4n,variants:5n},this.reservedVariantBits=0n,this.variantOffsets=new Map}create(e){return{layer:e,parentLayer:e,arbitrary:0n,variants:0n,parallelIndex:0n,index:this.offsets[e]++,options:[]}}arbitraryProperty(){return{...this.create("utilities"),arbitrary:1n}}forVariant(e,t=0){let r=this.variantOffsets.get(e);if(r===void 0)throw new Error(`Cannot find offset for unknown variant ${e}`);return{...this.create("variants"),variants:r<n.startsWith("[")).sort(([n],[a])=>$C(n,a)),t=e.map(([,n])=>n).sort((n,a)=>_n(n-a));return e.map(([,n],a)=>[n,t[a]]).filter(([n,a])=>n!==a)}remapArbitraryVariantOffsets(e){let t=this.recalculateVariantOffsets();return t.length===0?e:e.map(r=>{let[n,a]=r;return n={...n,variants:qd(n.variants,t)},[n,a]})}sort(e){return e=this.remapArbitraryVariantOffsets(e),e.sort(([t],[r])=>_n(this.compare(t,r)))}}});function ho(i,e){let t=i.tailwindConfig.prefix;return typeof t=="function"?t(e):t+e}function Nd({type:i="any",...e}){let t=[].concat(i);return{...e,types:t.map(r=>Array.isArray(r)?{type:r[0],...r[1]}:{type:r,preferOnConflict:!1})}}function jC(i){let e=[],t="",r=0;for(let n=0;n0&&e.push(t.trim()),e=e.filter(n=>n!==""),e}function zC(i,e,{before:t=[]}={}){if(t=[].concat(t),t.length<=0){i.push(e);return}let r=i.length-1;for(let n of t){let a=i.indexOf(n);a!==-1&&(r=Math.min(r,a))}i.splice(r,0,e)}function Ld(i){return Array.isArray(i)?i.flatMap(e=>!Array.isArray(e)&&!ie(e)?e:qt(e)):Ld([i])}function $d(i,e){return(0,fo.default)(r=>{let n=[];return e&&e(r),r.walkClasses(a=>{n.push(a.value)}),n}).transformSync(i)}function VC(i,e={containsNonOnDemandable:!1},t=0){let r=[];if(i.type==="rule"){let n=function(a){a.walkPseudos(s=>{s.value===":not"&&s.remove()})};for(let a of i.selectors){let s=$d(a,n);s.length===0&&(e.containsNonOnDemandable=!0);for(let o of s)r.push(o)}}else i.type==="atrule"&&i.walkRules(n=>{for(let a of n.selectors.flatMap(s=>$d(s)))r.push(a)});return t===0?[e.containsNonOnDemandable||r.length===0,r]:r}function En(i){return Ld(i).flatMap(e=>{let t=new Map,[r,n]=VC(e);return r&&n.unshift(He),n.map(a=>(t.has(e)||t.set(e,e),[a,t.get(e)]))})}function An(i){return i.startsWith("@")||i.includes("&")}function Zr(i){i=i.replace(/\n+/g,"").replace(/\s{1,}/g," ").trim();let e=jC(i).map(t=>{if(!t.startsWith("@"))return({format:a})=>a(t);let[,r,n]=/@(\S*)( .+|[({].*)?/g.exec(t);return({wrap:a})=>a(z.atRule({name:r,params:n?.trim()??""}))}).reverse();return t=>{for(let r of e)r(t)}}function UC(i,e,{variantList:t,variantMap:r,offsets:n,classList:a}){function s(d,h){return d?(0,Fd.default)(i,d,h):i}function o(d){return Rt(i.prefix,d)}function u(d,h){return d===He?He:h.respectPrefix?e.tailwindConfig.prefix+d:d}function c(d,h,y={}){let x=Ke(d),w=s(["theme",...x],h);return Ge(x[0])(w,y)}let f=0,p={postcss:z,prefix:o,e:ce,config:s,theme:c,corePlugins:d=>Array.isArray(i.corePlugins)?i.corePlugins.includes(d):s(["corePlugins",d],!0),variants:()=>[],addBase(d){for(let[h,y]of En(d)){let x=u(h,{}),w=n.create("base");e.candidateRuleMap.has(x)||e.candidateRuleMap.set(x,[]),e.candidateRuleMap.get(x).push([{sort:w,layer:"base"},y])}},addDefaults(d,h){let y={[`@defaults ${d}`]:h};for(let[x,w]of En(y)){let b=u(x,{});e.candidateRuleMap.has(b)||e.candidateRuleMap.set(b,[]),e.candidateRuleMap.get(b).push([{sort:n.create("defaults"),layer:"defaults"},w])}},addComponents(d,h){h=Object.assign({},{preserveSource:!1,respectPrefix:!0,respectImportant:!1},Array.isArray(h)?{}:h);for(let[x,w]of En(d)){let b=u(x,h);a.add(b),e.candidateRuleMap.has(b)||e.candidateRuleMap.set(b,[]),e.candidateRuleMap.get(b).push([{sort:n.create("components"),layer:"components",options:h},w])}},addUtilities(d,h){h=Object.assign({},{preserveSource:!1,respectPrefix:!0,respectImportant:!0},Array.isArray(h)?{}:h);for(let[x,w]of En(d)){let b=u(x,h);a.add(b),e.candidateRuleMap.has(b)||e.candidateRuleMap.set(b,[]),e.candidateRuleMap.get(b).push([{sort:n.create("utilities"),layer:"utilities",options:h},w])}},matchUtilities:function(d,h){h=Nd({...{respectPrefix:!0,respectImportant:!0,modifiers:!1},...h});let x=n.create("utilities");for(let w in d){let S=function(E,{isOnlyPlugin:I}){let[B,q,X]=cs(h.types,E,h,i);if(B===void 0)return[];if(!h.types.some(({type:$})=>$===q))if(I)F.warn([`Unnecessary typehint \`${q}\` in \`${w}-${E}\`.`,`You can safely update it to \`${w}-${E.replace(q+":","")}\`.`]);else return[];if(!Bt(B))return[];let ae={get modifier(){return h.modifiers||F.warn(`modifier-used-without-options-for-${w}`,["Your plugin must set `modifiers: true` in its options to support modifiers."]),X}},ge=J(i,"generalizedModifiers");return[].concat(ge?k(B,ae):k(B)).filter(Boolean).map($=>({[fn(w,E)]:$}))},b=u(w,h),k=d[w];a.add([b,h]);let _=[{sort:x,layer:"utilities",options:h},S];e.candidateRuleMap.has(b)||e.candidateRuleMap.set(b,[]),e.candidateRuleMap.get(b).push(_)}},matchComponents:function(d,h){h=Nd({...{respectPrefix:!0,respectImportant:!1,modifiers:!1},...h});let x=n.create("components");for(let w in d){let S=function(E,{isOnlyPlugin:I}){let[B,q,X]=cs(h.types,E,h,i);if(B===void 0)return[];if(!h.types.some(({type:$})=>$===q))if(I)F.warn([`Unnecessary typehint \`${q}\` in \`${w}-${E}\`.`,`You can safely update it to \`${w}-${E.replace(q+":","")}\`.`]);else return[];if(!Bt(B))return[];let ae={get modifier(){return h.modifiers||F.warn(`modifier-used-without-options-for-${w}`,["Your plugin must set `modifiers: true` in its options to support modifiers."]),X}},ge=J(i,"generalizedModifiers");return[].concat(ge?k(B,ae):k(B)).filter(Boolean).map($=>({[fn(w,E)]:$}))},b=u(w,h),k=d[w];a.add([b,h]);let _=[{sort:x,layer:"components",options:h},S];e.candidateRuleMap.has(b)||e.candidateRuleMap.set(b,[]),e.candidateRuleMap.get(b).push(_)}},addVariant(d,h,y={}){h=[].concat(h).map(x=>{if(typeof x!="string")return(w={})=>{let{args:b,modifySelectors:k,container:S,separator:_,wrap:E,format:I}=w,B=x(Object.assign({modifySelectors:k,container:S,separator:_},y.type===co.MatchVariant&&{args:b,wrap:E,format:I}));if(typeof B=="string"&&!An(B))throw new Error(`Your custom variant \`${d}\` has an invalid format string. Make sure it's an at-rule or contains a \`&\` placeholder.`);return Array.isArray(B)?B.filter(q=>typeof q=="string").map(q=>Zr(q)):B&&typeof B=="string"&&Zr(B)(w)};if(!An(x))throw new Error(`Your custom variant \`${d}\` has an invalid format string. Make sure it's an at-rule or contains a \`&\` placeholder.`);return Zr(x)}),zC(t,d,y),r.set(d,h),e.variantOptions.set(d,y)},matchVariant(d,h,y){let x=y?.id??++f,w=d==="@",b=J(i,"generalizedModifiers");for(let[S,_]of Object.entries(y?.values??{}))S!=="DEFAULT"&&p.addVariant(w?`${d}${S}`:`${d}-${S}`,({args:E,container:I})=>h(_,b?{modifier:E?.modifier,container:I}:{container:I}),{...y,value:_,id:x,type:co.MatchVariant,variantInfo:po.Base});let k="DEFAULT"in(y?.values??{});p.addVariant(d,({args:S,container:_})=>S?.value===Kr&&!k?null:h(S?.value===Kr?y.values.DEFAULT:S?.value??(typeof S=="string"?S:""),b?{modifier:S?.modifier,container:_}:{container:_}),{...y,id:x,type:co.MatchVariant,variantInfo:po.Dynamic})}};return p}function On(i){return mo.has(i)||mo.set(i,new Map),mo.get(i)}function jd(i,e){let t=!1,r=new Map;for(let n of i){if(!n)continue;let a=ws.parse(n),s=a.hash?a.href.replace(a.hash,""):a.href;s=a.search?s.replace(a.search,""):s;let o=te.statSync(decodeURIComponent(s),{throwIfNoEntry:!1})?.mtimeMs;!o||((!e.has(n)||o>e.get(n))&&(t=!0),r.set(n,o))}return[t,r]}function zd(i){i.walkAtRules(e=>{["responsive","variants"].includes(e.name)&&(zd(e),e.before(e.nodes),e.remove())})}function WC(i){let e=[];return i.each(t=>{t.type==="atrule"&&["responsive","variants"].includes(t.name)&&(t.name="layer",t.params="utilities")}),i.walkAtRules("layer",t=>{if(zd(t),t.params==="base"){for(let r of t.nodes)e.push(function({addBase:n}){n(r,{respectPrefix:!1})});t.remove()}else if(t.params==="components"){for(let r of t.nodes)e.push(function({addComponents:n}){n(r,{respectPrefix:!1,preserveSource:!0})});t.remove()}else if(t.params==="utilities"){for(let r of t.nodes)e.push(function({addUtilities:n}){n(r,{respectPrefix:!1,preserveSource:!0})});t.remove()}}),e}function GC(i,e){let t=Object.entries({...pe,...gd}).map(([o,u])=>i.tailwindConfig.corePlugins.includes(o)?u:null).filter(Boolean),r=i.tailwindConfig.plugins.map(o=>(o.__isOptionsFunction&&(o=o()),typeof o=="function"?o:o.handler)),n=WC(e),a=[pe.pseudoElementVariants,pe.pseudoClassVariants,pe.ariaVariants,pe.dataVariants],s=[pe.supportsVariants,pe.directionVariants,pe.reducedMotionVariants,pe.prefersContrastVariants,pe.darkVariants,pe.printVariant,pe.screenVariants,pe.orientationVariants];return[...t,...a,...r,...s,...n]}function HC(i,e){let t=[],r=new Map;e.variantMap=r;let n=new uo;e.offsets=n;let a=new Set,s=UC(e.tailwindConfig,e,{variantList:t,variantMap:r,offsets:n,classList:a});for(let f of i)if(Array.isArray(f))for(let p of f)p(s);else f?.(s);n.recordVariants(t,f=>r.get(f).length);for(let[f,p]of r.entries())e.variantMap.set(f,p.map((d,h)=>[n.forVariant(f,h),d]));let o=(e.tailwindConfig.safelist??[]).filter(Boolean);if(o.length>0){let f=[];for(let p of o){if(typeof p=="string"){e.changedContent.push({content:p,extension:"html"});continue}if(p instanceof RegExp){F.warn("root-regex",["Regular expressions in `safelist` work differently in Tailwind CSS v3.0.","Update your `safelist` configuration to eliminate this warning.","https://tailwindcss.com/docs/content-configuration#safelisting-classes"]);continue}f.push(p)}if(f.length>0){let p=new Map,d=e.tailwindConfig.prefix.length,h=f.some(y=>y.pattern.source.includes("!"));for(let y of a){let x=Array.isArray(y)?(()=>{let[w,b]=y,S=Object.keys(b?.values??{}).map(_=>Qr(w,_));return b?.supportsNegativeValues&&(S=[...S,...S.map(_=>"-"+_)],S=[...S,...S.map(_=>_.slice(0,d)+"-"+_.slice(d))]),b.types.some(({type:_})=>_==="color")&&(S=[...S,...S.flatMap(_=>Object.keys(e.tailwindConfig.theme.opacity).map(E=>`${_}/${E}`))]),h&&b?.respectImportant&&(S=[...S,...S.map(_=>"!"+_)]),S})():[y];for(let w of x)for(let{pattern:b,variants:k=[]}of f)if(b.lastIndex=0,p.has(b)||p.set(b,0),!!b.test(w)){p.set(b,p.get(b)+1),e.changedContent.push({content:w,extension:"html"});for(let S of k)e.changedContent.push({content:S+e.tailwindConfig.separator+w,extension:"html"})}}for(let[y,x]of p.entries())x===0&&F.warn([`The safelist pattern \`${y}\` doesn't match any Tailwind CSS classes.`,"Fix this pattern or remove it from your `safelist` configuration.","https://tailwindcss.com/docs/content-configuration#safelisting-classes"])}}let u=[].concat(e.tailwindConfig.darkMode??"media")[1]??"dark",c=[ho(e,u),ho(e,"group"),ho(e,"peer")];e.getClassOrder=function(p){let d=[...p].sort((w,b)=>w===b?0:w[w,null])),y=Sn(new Set(d),e);y=e.offsets.sort(y);let x=BigInt(c.length);for(let[,w]of y){let b=w.raws.tailwind.candidate;h.set(b,h.get(b)??x++)}return p.map(w=>{let b=h.get(w)??null,k=c.indexOf(w);return b===null&&k!==-1&&(b=BigInt(k)),[w,b]})},e.getClassList=function(p={}){let d=[];for(let h of a)if(Array.isArray(h)){let[y,x]=h,w=[],b=Object.keys(x?.modifiers??{});x?.types?.some(({type:_})=>_==="color")&&b.push(...Object.keys(e.tailwindConfig.theme.opacity??{}));let k={modifiers:b},S=p.includeMetadata&&b.length>0;for(let[_,E]of Object.entries(x?.values??{})){if(E==null)continue;let I=Qr(y,_);if(d.push(S?[I,k]:I),x?.supportsNegativeValues&&Xe(E)){let B=Qr(y,`-${_}`);w.push(S?[B,k]:B)}}d.push(...w)}else d.push(h);return d},e.getVariants=function(){let p=[];for(let[d,h]of e.variantOptions.entries())h.variantInfo!==po.Base&&p.push({name:d,isArbitrary:h.type===Symbol.for("MATCH_VARIANT"),values:Object.keys(h.values??{}),hasDash:d!=="@",selectors({modifier:y,value:x}={}){let w="__TAILWIND_PLACEHOLDER__",b=z.rule({selector:`.${w}`}),k=z.root({nodes:[b.clone()]}),S=k.toString(),_=(e.variantMap.get(d)??[]).flatMap(([$,oe])=>oe),E=[];for(let $ of _){let oe=[],ai={args:{modifier:y,value:h.values?.[x]??x},separator:e.tailwindConfig.separator,modifySelectors(Ce){return k.each(Jn=>{Jn.type==="rule"&&(Jn.selectors=Jn.selectors.map(lu=>Ce({get className(){return ao(lu)},selector:lu})))}),k},format(Ce){oe.push(Ce)},wrap(Ce){oe.push(`@${Ce.name} ${Ce.params} { & }`)},container:k},oi=$(ai);if(oe.length>0&&E.push(oe),Array.isArray(oi))for(let Ce of oi)oe=[],Ce(ai),E.push(oe)}let I=[],B=k.toString();S!==B&&(k.walkRules($=>{let oe=$.selector,ai=(0,fo.default)(oi=>{oi.walkClasses(Ce=>{Ce.value=`${d}${e.tailwindConfig.separator}${Ce.value}`})}).processSync(oe);I.push(oe.replace(ai,"&").replace(w,"&"))}),k.walkAtRules($=>{I.push(`@${$.name} (${$.params}) { & }`)}));let q=!(x in(h.values??{})),X=h[Jr]??{},ae=(()=>!(q||X.respectPrefix===!1))();E=E.map($=>$.map(oe=>({format:oe,respectPrefix:ae}))),I=I.map($=>({format:$,respectPrefix:ae}));let ge={candidate:w,context:e},je=E.map($=>wn(`.${w}`,Nt($,ge),ge).replace(`.${w}`,"&").replace("{ & }","").trim());return I.length>0&&je.push(Nt(I,ge).toString().replace(`.${w}`,"&")),je}});return p}}function Vd(i,e){!i.classCache.has(e)||(i.notClassCache.add(e),i.classCache.delete(e),i.applyClassCache.delete(e),i.candidateRuleMap.delete(e),i.candidateRuleCache.delete(e),i.stylesheetCache=null)}function YC(i,e){let t=e.raws.tailwind.candidate;if(!!t){for(let r of i.ruleCache)r[1].raws.tailwind.candidate===t&&i.ruleCache.delete(r);Vd(i,t)}}function go(i,e=[],t=z.root()){let r={disposables:[],ruleCache:new Set,candidateRuleCache:new Map,classCache:new Map,applyClassCache:new Map,notClassCache:new Set(i.blocklist??[]),postCssNodeCache:new Map,candidateRuleMap:new Map,tailwindConfig:i,changedContent:e,variantMap:new Map,stylesheetCache:null,variantOptions:new Map,markInvalidUtilityCandidate:a=>Vd(r,a),markInvalidUtilityNode:a=>YC(r,a)},n=GC(r,t);return HC(n,r),r}function Ud(i,e,t,r,n,a){let s=e.opts.from,o=r!==null;Pe.DEBUG&&console.log("Source path:",s);let u;if(o&&Lt.has(s))u=Lt.get(s);else if(ei.has(n)){let d=ei.get(n);lt.get(d).add(s),Lt.set(s,d),u=d}let c=Pd(s,i);if(u){let[d,h]=jd([...a],On(u));if(!d&&!c)return[u,!1,h]}if(Lt.has(s)){let d=Lt.get(s);if(lt.has(d)&&(lt.get(d).delete(s),lt.get(d).size===0)){lt.delete(d);for(let[h,y]of ei)y===d&&ei.delete(h);for(let h of d.disposables.splice(0))h(d)}}Pe.DEBUG&&console.log("Setting up new context...");let f=go(t,[],i);Object.assign(f,{userConfigPath:r});let[,p]=jd([...a],On(f));return ei.set(n,f),Lt.set(s,f),lt.has(f)||lt.set(f,new Set),lt.get(f).add(s),[f,!0,p]}var Fd,fo,Jr,co,po,mo,Lt,ei,lt,Xr=C(()=>{l();ze();bs();nt();Fd=K(js()),fo=K(Me());Hr();Ua();un();xt();Mt();Ga();cr();yd();ot();ot();pi();Ee();fi();Ka();Cn();Dd();Bd();De();io();Jr=Symbol(),co={AddVariant:Symbol.for("ADD_VARIANT"),MatchVariant:Symbol.for("MATCH_VARIANT")},po={Base:1<<0,Dynamic:1<<1};mo=new WeakMap;Lt=wd,ei=bd,lt=gn});function yo(i){return i.ignore?[]:i.glob?m.env.ROLLUP_WATCH==="true"?[{type:"dependency",file:i.base}]:[{type:"dir-dependency",dir:i.base,glob:i.glob}]:[{type:"dependency",file:i.base}]}var Wd=C(()=>{l()});function Gd(i,e){return{handler:i,config:e}}var Hd,Yd=C(()=>{l();Gd.withOptions=function(i,e=()=>({})){let t=function(r){return{__options:r,handler:i(r),config:e(r)}};return t.__isOptionsFunction=!0,t.__pluginFunction=i,t.__configFunction=e,t};Hd=Gd});var wo={};Ae(wo,{default:()=>QC});var QC,bo=C(()=>{l();Yd();QC=Hd});var Jd=v((ID,Qd)=>{l();var JC=(bo(),wo).default,XC={overflow:"hidden",display:"-webkit-box","-webkit-box-orient":"vertical"},KC=JC(function({matchUtilities:i,addUtilities:e,theme:t,variants:r}){let n=t("lineClamp");i({"line-clamp":a=>({...XC,"-webkit-line-clamp":`${a}`})},{values:n}),e([{".line-clamp-none":{"-webkit-line-clamp":"unset"}}],r("lineClamp"))},{theme:{lineClamp:{1:"1",2:"2",3:"3",4:"4",5:"5",6:"6"}},variants:{lineClamp:["responsive"]}});Qd.exports=KC});function vo(i){i.content.files.length===0&&F.warn("content-problems",["The `content` option in your Tailwind CSS configuration is missing or empty.","Configure your content sources or your generated CSS will be missing styles.","https://tailwindcss.com/docs/content-configuration"]);try{let e=Jd();i.plugins.includes(e)&&(F.warn("line-clamp-in-core",["As of Tailwind CSS v3.3, the `@tailwindcss/line-clamp` plugin is now included by default.","Remove it from the `plugins` array in your configuration to eliminate this warning."]),i.plugins=i.plugins.filter(t=>t!==e))}catch{}return i}var Xd=C(()=>{l();Ee()});var Kd,Zd=C(()=>{l();Kd=()=>!1});var Tn,eh=C(()=>{l();Tn={sync:i=>[].concat(i),generateTasks:i=>[{dynamic:!1,base:".",negative:[],positive:[].concat(i),patterns:[].concat(i)}],escapePath:i=>i}});var xo,th=C(()=>{l();xo=i=>i});var rh,ih=C(()=>{l();rh=()=>""});function nh(i){let e=i,t=rh(i);return t!=="."&&(e=i.substr(t.length),e.charAt(0)==="/"&&(e=e.substr(1))),e.substr(0,2)==="./"&&(e=e.substr(2)),e.charAt(0)==="/"&&(e=e.substr(1)),{base:t,glob:e}}var sh=C(()=>{l();ih()});function ah(i,e){let t=e.content.files;t=t.filter(o=>typeof o=="string"),t=t.map(xo);let r=Tn.generateTasks(t),n=[],a=[];for(let o of r)n.push(...o.positive.map(u=>oh(u,!1))),a.push(...o.negative.map(u=>oh(u,!0)));let s=[...n,...a];return s=e2(i,s),s=s.flatMap(t2),s=s.map(ZC),s}function oh(i,e){let t={original:i,base:i,ignore:e,pattern:i,glob:null};return Kd(i)&&Object.assign(t,nh(i)),t}function ZC(i){let e=xo(i.base);return e=Tn.escapePath(e),i.pattern=i.glob?`${e}/${i.glob}`:e,i.pattern=i.ignore?`!${i.pattern}`:i.pattern,i}function e2(i,e){let t=[];return i.userConfigPath&&i.tailwindConfig.content.relative&&(t=[Z.dirname(i.userConfigPath)]),e.map(r=>(r.base=Z.resolve(...t,r.base),r))}function t2(i){let e=[i];try{let t=te.realpathSync(i.base);t!==i.base&&e.push({...i,base:t})}catch{}return e}function lh(i,e,t){let r=i.tailwindConfig.content.files.filter(s=>typeof s.raw=="string").map(({raw:s,extension:o="html"})=>({content:s,extension:o})),[n,a]=r2(e,t);for(let s of n){let o=Z.extname(s).slice(1);r.push({file:s,extension:o})}return[r,a]}function r2(i,e){let t=i.map(s=>s.pattern),r=new Map,n=new Set;Pe.DEBUG&&console.time("Finding changed files");let a=Tn.sync(t,{absolute:!0});for(let s of a){let o=e.get(s)||-1/0,u=te.statSync(s).mtimeMs;u>o&&(n.add(s),r.set(s,u))}return Pe.DEBUG&&console.timeEnd("Finding changed files"),[n,r]}var uh=C(()=>{l();ze();mt();Zd();eh();th();sh();ot()});function fh(){}var ch=C(()=>{l()});function a2(i,e){for(let t of e){let r=`${i}${t}`;if(te.existsSync(r)&&te.statSync(r).isFile())return r}for(let t of e){let r=`${i}/index${t}`;if(te.existsSync(r))return r}return null}function*ph(i,e,t,r=Z.extname(i)){let n=a2(Z.resolve(e,i),i2.includes(r)?n2:s2);if(n===null||t.has(n))return;t.add(n),yield n,e=Z.dirname(n),r=Z.extname(n);let a=te.readFileSync(n,"utf-8");for(let s of[...a.matchAll(/import[\s\S]*?['"](.{3,}?)['"]/gi),...a.matchAll(/import[\s\S]*from[\s\S]*?['"](.{3,}?)['"]/gi),...a.matchAll(/require\(['"`](.+)['"`]\)/gi)])!s[1].startsWith(".")||(yield*ph(s[1],e,t,r))}function ko(i){return i===null?new Set:new Set(ph(i,Z.dirname(i),new Set))}var i2,n2,s2,dh=C(()=>{l();ze();mt();i2=[".js",".cjs",".mjs"],n2=["",".js",".cjs",".mjs",".ts",".cts",".mts",".jsx",".tsx"],s2=["",".ts",".cts",".mts",".tsx",".js",".cjs",".mjs",".jsx"]});function o2(i,e){if(So.has(i))return So.get(i);let t=ah(i,e);return So.set(i,t).get(i)}function l2(i){let e=ys(i);if(e!==null){let[r,n,a,s]=mh.get(e)||[],o=ko(e),u=!1,c=new Map;for(let d of o){let h=te.statSync(d).mtimeMs;c.set(d,h),(!s||!s.has(d)||h>s.get(d))&&(u=!0)}if(!u)return[r,e,n,a];for(let d of o)delete fu.cache[d];let f=vo(dr(fh(e))),p=ui(f);return mh.set(e,[f,p,o,c]),[f,e,p,o]}let t=dr(i?.config??i??{});return t=vo(t),[t,null,ui(t),[]]}function Co(i){return({tailwindDirectives:e,registerDependency:t})=>(r,n)=>{let[a,s,o,u]=l2(i),c=new Set(u);if(e.size>0){c.add(n.opts.from);for(let y of n.messages)y.type==="dependency"&&c.add(y.file)}let[f,,p]=Ud(r,n,a,s,o,c),d=On(f),h=o2(f,a);if(e.size>0){for(let w of h)for(let b of yo(w))t(b);let[y,x]=lh(f,h,d);for(let w of y)f.changedContent.push(w);for(let[w,b]of x.entries())p.set(w,b)}for(let y of u)t({type:"dependency",file:y});for(let[y,x]of p.entries())d.set(y,x);return f}}var hh,mh,So,gh=C(()=>{l();ze();hh=K(Xn());mu();gs();af();Xr();Wd();Xd();uh();ch();dh();mh=new hh.default({maxSize:100}),So=new WeakMap});function Ao(i){let e=new Set,t=new Set,r=new Set;if(i.walkAtRules(n=>{n.name==="apply"&&r.add(n),n.name==="import"&&(n.params==='"tailwindcss/base"'||n.params==="'tailwindcss/base'"?(n.name="tailwind",n.params="base"):n.params==='"tailwindcss/components"'||n.params==="'tailwindcss/components'"?(n.name="tailwind",n.params="components"):n.params==='"tailwindcss/utilities"'||n.params==="'tailwindcss/utilities'"?(n.name="tailwind",n.params="utilities"):(n.params==='"tailwindcss/screens"'||n.params==="'tailwindcss/screens'"||n.params==='"tailwindcss/variants"'||n.params==="'tailwindcss/variants'")&&(n.name="tailwind",n.params="variants")),n.name==="tailwind"&&(n.params==="screens"&&(n.params="variants"),e.add(n.params)),["layer","responsive","variants"].includes(n.name)&&(["responsive","variants"].includes(n.name)&&F.warn(`${n.name}-at-rule-deprecated`,[`The \`@${n.name}\` directive has been deprecated in Tailwind CSS v3.0.`,"Use `@layer utilities` or `@layer components` instead.","https://tailwindcss.com/docs/upgrade-guide#replace-variants-with-layer"]),t.add(n))}),!e.has("base")||!e.has("components")||!e.has("utilities")){for(let n of t)if(n.name==="layer"&&["base","components","utilities"].includes(n.params)){if(!e.has(n.params))throw n.error(`\`@layer ${n.params}\` is used but no matching \`@tailwind ${n.params}\` directive is present.`)}else if(n.name==="responsive"){if(!e.has("utilities"))throw n.error("`@responsive` is used but `@tailwind utilities` is missing.")}else if(n.name==="variants"&&!e.has("utilities"))throw n.error("`@variants` is used but `@tailwind utilities` is missing.")}return{tailwindDirectives:e,applyDirectives:r}}var yh=C(()=>{l();Ee()});function bt(i,e=void 0,t=void 0){return i.map(r=>{let n=r.clone(),a=r.raws.tailwind?.preserveSource!==!0||!n.source;return e!==void 0&&a&&(n.source=e,"walk"in n&&n.walk(s=>{s.source=e})),t!==void 0&&(n.raws.tailwind={...n.raws.tailwind,...t}),n})}var wh=C(()=>{l()});function Pn(i){return i=Array.isArray(i)?i:[i],i=i.map(e=>e instanceof RegExp?e.source:e),i.join("")}function xe(i){return new RegExp(Pn(i),"g")}function $t(i){return`(?:${i.map(Pn).join("|")})`}function _o(i){return`(?:${Pn(i)})?`}function vh(i){return`(?:${Pn(i)})*`}function xh(i){return i&&u2.test(i)?i.replace(bh,"\\$&"):i||""}var bh,u2,kh=C(()=>{l();bh=/[\\^$.*+?()[\]{}|]/g,u2=RegExp(bh.source)});function Sh(i){let e=Array.from(f2(i));return t=>{let r=[];for(let n of e)r=[...r,...t.match(n)??[]];return r.filter(n=>n!==void 0).map(d2)}}function*f2(i){let e=i.tailwindConfig.separator,t=J(i.tailwindConfig,"variantGrouping"),r=i.tailwindConfig.prefix!==""?_o(xe([/-?/,xh(i.tailwindConfig.prefix)])):"",n=$t([/\[[^\s:'"`]+:[^\s\[\]]+\]/,/\[[^\s:'"`]+:[^\s]+?\[[^\s]+\][^\s]+?\]/,xe([/-?(?:\w+)/,_o($t([xe([/-(?:\w+-)*\[[^\s:]+\]/,/(?![{([]])/,/(?:\/[^\s'"`\\><$]*)?/]),xe([/-(?:\w+-)*\[[^\s]+\]/,/(?![{([]])/,/(?:\/[^\s'"`\\$]*)?/]),/[-\/][^\s'"`\\$={><]*/]))])]),a=[$t([xe([/@\[[^\s"'`]+\](\/[^\s"'`]+)?/,e]),xe([/([^\s"'`\[\\]+-)?\[[^\s"'`]+\]/,e]),xe([/[^\s"'`\[\\]+/,e])]),$t([xe([/([^\s"'`\[\\]+-)?\[[^\s`]+\]/,e]),xe([/[^\s`\[\\]+/,e])])];for(let s of a)yield xe(["((?=((",s,")+))\\2)?",/!?/,r,t?$t([xe([/\(/,n,vh([/,/,n]),/\)/]),n]):n]);yield/[^<>"'`\s.(){}[\]#=%$]*[^<>"'`\s.(){}[\]#=%:$]/g}function d2(i){if(!i.includes("-["))return i;let e=0,t=[],r=i.matchAll(c2);r=Array.from(r).flatMap(n=>{let[,...a]=n;return a.map((s,o)=>Object.assign([],n,{index:n.index+o,0:s}))});for(let n of r){let a=n[0],s=t[t.length-1];if(a===s?t.pop():(a==="'"||a==='"'||a==="`")&&t.push(a),!s){if(a==="["){e++;continue}else if(a==="]"){e--;continue}if(e<0)return i.substring(0,n.index-1);if(e===0&&!p2.test(a))return i.substring(0,n.index)}}return i}var c2,p2,Ch=C(()=>{l();De();kh();c2=/([\[\]'"`])([^\[\]'"`])?/g,p2=/[^"'`\s<>\]]+/});function h2(i,e){let t=i.tailwindConfig.content.extract;return t[e]||t.DEFAULT||_h[e]||_h.DEFAULT(i)}function m2(i,e){let t=i.content.transform;return t[e]||t.DEFAULT||Eh[e]||Eh.DEFAULT}function g2(i,e,t,r){ti.has(e)||ti.set(e,new Ah.default({maxSize:25e3}));for(let n of i.split(` +`))if(n=n.trim(),!r.has(n))if(r.add(n),ti.get(e).has(n))for(let a of ti.get(e).get(n))t.add(a);else{let a=e(n).filter(o=>o!=="!*"),s=new Set(a);for(let o of s)t.add(o);ti.get(e).set(n,s)}}function y2(i,e){let t=e.offsets.sort(i),r={base:new Set,defaults:new Set,components:new Set,utilities:new Set,variants:new Set};for(let[n,a]of t)r[n.layer].add(a);return r}function Eo(i){return async e=>{let t={base:null,components:null,utilities:null,variants:null};if(e.walkAtRules(y=>{y.name==="tailwind"&&Object.keys(t).includes(y.params)&&(t[y.params]=y)}),Object.values(t).every(y=>y===null))return e;let r=new Set([...i.candidates??[],He]),n=new Set;Ye.DEBUG&&console.time("Reading changed files"),await Promise.all(i.changedContent.map(async({file:y,content:x,extension:w})=>{let b=m2(i.tailwindConfig,w),k=h2(i,w);x=y?await te.promises.readFile(y,"utf8"):x,g2(b(x),k,r,n)})),Ye.DEBUG&&console.timeEnd("Reading changed files");let a=i.classCache.size;Ye.DEBUG&&console.time("Generate rules"),Ye.DEBUG&&console.time("Sorting candidates");let s=new Set([...r].sort((y,x)=>y===x?0:y{let x=y.raws.tailwind?.parentLayer;return x==="components"?t.components!==null:x==="utilities"?t.utilities!==null:!0});t.variants?(t.variants.before(bt(d,t.variants.source,{layer:"variants"})),t.variants.remove()):d.length>0&&e.append(bt(d,e.source,{layer:"variants"}));let h=d.some(y=>y.raws.tailwind?.parentLayer==="utilities");t.utilities&&f.size===0&&!h&&F.warn("content-problems",["No utility classes were detected in your source files. If this is unexpected, double-check the `content` option in your Tailwind CSS configuration.","https://tailwindcss.com/docs/content-configuration"]),Ye.DEBUG&&(console.log("Potential classes: ",r.size),console.log("Active contexts: ",gn.size)),i.changedContent=[],e.walkAtRules("layer",y=>{Object.keys(t).includes(y.params)&&y.remove()})}}var Ah,Ye,_h,Eh,ti,Oh=C(()=>{l();ze();Ah=K(Xn());ot();Cn();Ee();wh();Ch();Ye=Pe,_h={DEFAULT:Sh},Eh={DEFAULT:i=>i,svelte:i=>i.replace(/(?:^|\s)class:/g," ")};ti=new WeakMap});function In(i){let e=new Map;z.root({nodes:[i.clone()]}).walkRules(a=>{(0,Dn.default)(s=>{s.walkClasses(o=>{let u=o.parent.toString(),c=e.get(u);c||e.set(u,c=new Set),c.add(o.value)})}).processSync(a.selector)});let r=Array.from(e.values(),a=>Array.from(a)),n=r.flat();return Object.assign(n,{groups:r})}function Oo(i){return w2.astSync(i)}function Th(i,e){let t=new Set;for(let r of i)t.add(r.split(e).pop());return Array.from(t)}function Ph(i,e){let t=i.tailwindConfig.prefix;return typeof t=="function"?t(e):t+e}function*Dh(i){for(yield i;i.parent;)yield i.parent,i=i.parent}function b2(i,e={}){let t=i.nodes;i.nodes=[];let r=i.clone(e);return i.nodes=t,r}function v2(i){for(let e of Dh(i))if(i!==e){if(e.type==="root")break;i=b2(e,{nodes:[i]})}return i}function x2(i,e){let t=new Map;return i.walkRules(r=>{for(let s of Dh(r))if(s.raws.tailwind?.layer!==void 0)return;let n=v2(r),a=e.offsets.create("user");for(let s of In(r)){let o=t.get(s)||[];t.set(s,o),o.push([{layer:"user",sort:a,important:!1},n])}}),t}function k2(i,e){for(let t of i){if(e.notClassCache.has(t)||e.applyClassCache.has(t))continue;if(e.classCache.has(t)){e.applyClassCache.set(t,e.classCache.get(t).map(([n,a])=>[n,a.clone()]));continue}let r=Array.from(xn(t,e));if(r.length===0){e.notClassCache.add(t);continue}e.applyClassCache.set(t,r)}return e.applyClassCache}function S2(i){let e=null;return{get:t=>(e=e||i(),e.get(t)),has:t=>(e=e||i(),e.has(t))}}function C2(i){return{get:e=>i.flatMap(t=>t.get(e)||[]),has:e=>i.some(t=>t.has(e))}}function Ih(i){let e=i.split(/[\s\t\n]+/g);return e[e.length-1]==="!important"?[e.slice(0,-1),!0]:[e,!1]}function qh(i,e,t){let r=new Set,n=[];if(i.walkAtRules("apply",u=>{let[c]=Ih(u.params);for(let f of c)r.add(f);n.push(u)}),n.length===0)return;let a=C2([t,k2(r,e)]);function s(u,c,f){let p=Oo(u),d=Oo(c),y=Oo(`.${ce(f)}`).nodes[0].nodes[0];return p.each(x=>{let w=new Set;d.each(b=>{let k=!1;b=b.clone(),b.walkClasses(S=>{S.value===y.value&&(k||(S.replaceWith(...x.nodes.map(_=>_.clone())),w.add(b),k=!0))})});for(let b of w){let k=[[]];for(let S of b.nodes)S.type==="combinator"?(k.push(S),k.push([])):k[k.length-1].push(S);b.nodes=[];for(let S of k)Array.isArray(S)&&S.sort((_,E)=>_.type==="tag"&&E.type==="class"?-1:_.type==="class"&&E.type==="tag"?1:_.type==="class"&&E.type==="pseudo"&&E.value.startsWith("::")?-1:_.type==="pseudo"&&_.value.startsWith("::")&&E.type==="class"?1:0),b.nodes=b.nodes.concat(S)}x.replaceWith(...w)}),p.toString()}let o=new Map;for(let u of n){let[c]=o.get(u.parent)||[[],u.source];o.set(u.parent,[c,u.source]);let[f,p]=Ih(u.params);if(u.parent.type==="atrule"){if(u.parent.name==="screen"){let d=u.parent.params;throw u.error(`@apply is not supported within nested at-rules like @screen. We suggest you write this as @apply ${f.map(h=>`${d}:${h}`).join(" ")} instead.`)}throw u.error(`@apply is not supported within nested at-rules like @${u.parent.name}. You can fix this by un-nesting @${u.parent.name}.`)}for(let d of f){if([Ph(e,"group"),Ph(e,"peer")].includes(d))throw u.error(`@apply should not be used with the '${d}' utility`);if(!a.has(d))throw u.error(`The \`${d}\` class does not exist. If \`${d}\` is a custom class, make sure it is defined within a \`@layer\` directive.`);let h=a.get(d);c.push([d,p,h])}}for(let[u,[c,f]]of o){let p=[];for(let[h,y,x]of c){let w=[h,...Th([h],e.tailwindConfig.separator)];for(let[b,k]of x){let S=In(u),_=In(k);if(_=_.groups.filter(q=>q.some(X=>w.includes(X))).flat(),_=_.concat(Th(_,e.tailwindConfig.separator)),S.some(q=>_.includes(q)))throw k.error(`You cannot \`@apply\` the \`${h}\` utility here because it creates a circular dependency.`);let I=z.root({nodes:[k.clone()]});I.walk(q=>{q.source=f}),(k.type!=="atrule"||k.type==="atrule"&&k.name!=="keyframes")&&I.walkRules(q=>{if(!In(q).some($=>$===h)){q.remove();return}let X=typeof e.tailwindConfig.important=="string"?e.tailwindConfig.important:null,ge=u.raws.tailwind!==void 0&&X&&u.selector.indexOf(X)===0?u.selector.slice(X.length):u.selector;q.selector=s(ge,q.selector,h),X&&ge!==u.selector&&(q.selector=bn(q.selector,X)),q.walkDecls($=>{$.important=b.important||y});let je=(0,Dn.default)().astSync(q.selector);je.each($=>Ft($)),q.selector=je.toString()}),!!I.nodes[0]&&p.push([b.sort,I.nodes[0]])}}let d=e.offsets.sort(p).map(h=>h[1]);u.after(d)}for(let u of n)u.parent.nodes.length>1?u.remove():u.parent.remove();qh(i,e,t)}function To(i){return e=>{let t=S2(()=>x2(e,i));qh(e,i,t)}}var Dn,w2,Rh=C(()=>{l();nt();Dn=K(Me());Cn();Mt();so();yn();w2=(0,Dn.default)()});var Mh=v((PI,qn)=>{l();(function(){"use strict";function i(r,n,a){if(!r)return null;i.caseSensitive||(r=r.toLowerCase());var s=i.threshold===null?null:i.threshold*r.length,o=i.thresholdAbsolute,u;s!==null&&o!==null?u=Math.min(s,o):s!==null?u=s:o!==null?u=o:u=null;var c,f,p,d,h,y=n.length;for(h=0;ha)return a+1;var u=[],c,f,p,d,h;for(c=0;c<=o;c++)u[c]=[c];for(f=0;f<=s;f++)u[0][f]=f;for(c=1;c<=o;c++){for(p=e,d=1,c>a&&(d=c-a),h=o+1,h>a+c&&(h=a+c),f=1;f<=s;f++)fh?u[c][f]=a+1:n.charAt(c-1)===r.charAt(f-1)?u[c][f]=u[c-1][f-1]:u[c][f]=Math.min(u[c-1][f-1]+1,Math.min(u[c][f-1]+1,u[c-1][f]+1)),u[c][f]a)return a+1}return u[o][s]}})()});var Fh=v((DI,Bh)=>{l();var Po="(".charCodeAt(0),Do=")".charCodeAt(0),Rn="'".charCodeAt(0),Io='"'.charCodeAt(0),qo="\\".charCodeAt(0),jt="/".charCodeAt(0),Ro=",".charCodeAt(0),Mo=":".charCodeAt(0),Mn="*".charCodeAt(0),A2="u".charCodeAt(0),_2="U".charCodeAt(0),E2="+".charCodeAt(0),O2=/^[a-f0-9?-]+$/i;Bh.exports=function(i){for(var e=[],t=i,r,n,a,s,o,u,c,f,p=0,d=t.charCodeAt(p),h=t.length,y=[{nodes:e}],x=0,w,b="",k="",S="";p{l();Nh.exports=function i(e,t,r){var n,a,s,o;for(n=0,a=e.length;n{l();function $h(i,e){var t=i.type,r=i.value,n,a;return e&&(a=e(i))!==void 0?a:t==="word"||t==="space"?r:t==="string"?(n=i.quote||"",n+r+(i.unclosed?"":n)):t==="comment"?"/*"+r+(i.unclosed?"":"*/"):t==="div"?(i.before||"")+r+(i.after||""):Array.isArray(i.nodes)?(n=jh(i.nodes,e),t!=="function"?n:r+"("+(i.before||"")+n+(i.after||"")+(i.unclosed?"":")")):r}function jh(i,e){var t,r;if(Array.isArray(i)){for(t="",r=i.length-1;~r;r-=1)t=$h(i[r],e)+t;return t}return $h(i,e)}zh.exports=jh});var Wh=v((RI,Uh)=>{l();var Bn="-".charCodeAt(0),Fn="+".charCodeAt(0),Bo=".".charCodeAt(0),T2="e".charCodeAt(0),P2="E".charCodeAt(0);function D2(i){var e=i.charCodeAt(0),t;if(e===Fn||e===Bn){if(t=i.charCodeAt(1),t>=48&&t<=57)return!0;var r=i.charCodeAt(2);return t===Bo&&r>=48&&r<=57}return e===Bo?(t=i.charCodeAt(1),t>=48&&t<=57):e>=48&&e<=57}Uh.exports=function(i){var e=0,t=i.length,r,n,a;if(t===0||!D2(i))return!1;for(r=i.charCodeAt(e),(r===Fn||r===Bn)&&e++;e57));)e+=1;if(r=i.charCodeAt(e),n=i.charCodeAt(e+1),r===Bo&&n>=48&&n<=57)for(e+=2;e57));)e+=1;if(r=i.charCodeAt(e),n=i.charCodeAt(e+1),a=i.charCodeAt(e+2),(r===T2||r===P2)&&(n>=48&&n<=57||(n===Fn||n===Bn)&&a>=48&&a<=57))for(e+=n===Fn||n===Bn?3:2;e57));)e+=1;return{number:i.slice(0,e),unit:i.slice(e)}}});var Qh=v((MI,Yh)=>{l();var I2=Fh(),Gh=Lh(),Hh=Vh();function ut(i){return this instanceof ut?(this.nodes=I2(i),this):new ut(i)}ut.prototype.toString=function(){return Array.isArray(this.nodes)?Hh(this.nodes):""};ut.prototype.walk=function(i,e){return Gh(this.nodes,i,e),this};ut.unit=Wh();ut.walk=Gh;ut.stringify=Hh;Yh.exports=ut});function No(i){return typeof i=="object"&&i!==null}function q2(i,e){let t=Ke(e);do if(t.pop(),(0,ri.default)(i,t)!==void 0)break;while(t.length);return t.length?t:void 0}function zt(i){return typeof i=="string"?i:i.reduce((e,t,r)=>t.includes(".")?`${e}[${t}]`:r===0?t:`${e}.${t}`,"")}function Xh(i){return i.map(e=>`'${e}'`).join(", ")}function Kh(i){return Xh(Object.keys(i))}function Lo(i,e,t,r={}){let n=Array.isArray(e)?zt(e):e.replace(/^['"]+|['"]+$/g,""),a=Array.isArray(e)?e:Ke(n),s=(0,ri.default)(i.theme,a,t);if(s===void 0){let u=`'${n}' does not exist in your theme config.`,c=a.slice(0,-1),f=(0,ri.default)(i.theme,c);if(No(f)){let p=Object.keys(f).filter(h=>Lo(i,[...c,h]).isValid),d=(0,Jh.default)(a[a.length-1],p);d?u+=` Did you mean '${zt([...c,d])}'?`:p.length>0&&(u+=` '${zt(c)}' has the following valid keys: ${Xh(p)}`)}else{let p=q2(i.theme,n);if(p){let d=(0,ri.default)(i.theme,p);No(d)?u+=` '${zt(p)}' has the following keys: ${Kh(d)}`:u+=` '${zt(p)}' is not an object.`}else u+=` Your theme has the following top-level keys: ${Kh(i.theme)}`}return{isValid:!1,error:u}}if(!(typeof s=="string"||typeof s=="number"||typeof s=="function"||s instanceof String||s instanceof Number||Array.isArray(s))){let u=`'${n}' was found but does not resolve to a string.`;if(No(s)){let c=Object.keys(s).filter(f=>Lo(i,[...a,f]).isValid);c.length&&(u+=` Did you mean something like '${zt([...a,c[0]])}'?`)}return{isValid:!1,error:u}}let[o]=a;return{isValid:!0,value:Ge(o)(s,r)}}function R2(i,e,t){e=e.map(n=>Zh(i,n,t));let r=[""];for(let n of e)n.type==="div"&&n.value===","?r.push(""):r[r.length-1]+=Fo.default.stringify(n);return r}function Zh(i,e,t){if(e.type==="function"&&t[e.value]!==void 0){let r=R2(i,e.nodes,t);e.type="word",e.value=t[e.value](i,...r)}return e}function M2(i,e,t){return Object.keys(t).some(n=>e.includes(`${n}(`))?(0,Fo.default)(e).walk(n=>{Zh(i,n,t)}).toString():e}function*F2(i){i=i.replace(/^['"]+|['"]+$/g,"");let e=i.match(/^([^\s]+)(?![^\[]*\])(?:\s*\/\s*([^\/\s]+))$/),t;yield[i,void 0],e&&(i=e[1],t=e[2],yield[i,t])}function N2(i,e,t){let r=Array.from(F2(e)).map(([n,a])=>Object.assign(Lo(i,n,t,{opacityValue:a}),{resolvedPath:n,alpha:a}));return r.find(n=>n.isValid)??r[0]}function em(i){let e=i.tailwindConfig,t={theme:(r,n,...a)=>{let{isValid:s,value:o,error:u,alpha:c}=N2(e,n,a.length?a:void 0);if(!s){let d=r.parent,h=d?.raws.tailwind?.candidate;if(d&&h!==void 0){i.markInvalidUtilityNode(d),d.remove(),F.warn("invalid-theme-key-in-class",[`The utility \`${h}\` contains an invalid theme value and was not generated.`]);return}throw r.error(u)}let f=kt(o),p=f!==void 0&&typeof f=="function";return(c!==void 0||p)&&(c===void 0&&(c=1),o=Ie(f,c,f)),o},screen:(r,n)=>{n=n.replace(/^['"]+/g,"").replace(/['"]+$/g,"");let s=at(e.theme.screens).find(({name:o})=>o===n);if(!s)throw r.error(`The '${n}' screen does not exist in your theme.`);return st(s)}};return r=>{r.walk(n=>{let a=B2[n.type];a!==void 0&&(n[a]=M2(n,n[a],t))})}}var ri,Jh,Fo,B2,tm=C(()=>{l();ri=K(js()),Jh=K(Mh());Hr();Fo=K(Qh());hn();cn();pi();ar();cr();Ee();B2={atrule:"params",decl:"value"}});function rm({tailwindConfig:{theme:i}}){return function(e){e.walkAtRules("screen",t=>{let r=t.params,a=at(i.screens).find(({name:s})=>s===r);if(!a)throw t.error(`No \`${r}\` screen found.`);t.name="media",t.params=st(a)})}}var im=C(()=>{l();hn();cn()});function L2(i){let e=i.filter(o=>o.type!=="pseudo"||o.nodes.length>0?!0:o.value.startsWith("::")||[":before",":after",":first-line",":first-letter"].includes(o.value)).reverse(),t=new Set(["tag","class","id","attribute"]),r=e.findIndex(o=>t.has(o.type));if(r===-1)return e.reverse().join("").trim();let n=e[r],a=nm[n.type]?nm[n.type](n):n;e=e.slice(0,r);let s=e.findIndex(o=>o.type==="combinator"&&o.value===">");return s!==-1&&(e.splice(0,s),e.unshift(Nn.default.universal())),[a,...e.reverse()].join("").trim()}function j2(i){return $o.has(i)||$o.set(i,$2.transformSync(i)),$o.get(i)}function jo({tailwindConfig:i}){return e=>{let t=new Map,r=new Set;if(e.walkAtRules("defaults",n=>{if(n.nodes&&n.nodes.length>0){r.add(n);return}let a=n.params;t.has(a)||t.set(a,new Set),t.get(a).add(n.parent),n.remove()}),J(i,"optimizeUniversalDefaults"))for(let n of r){let a=new Map,s=t.get(n.params)??[];for(let o of s)for(let u of j2(o.selector)){let c=u.includes(":-")||u.includes("::-")?u:"__DEFAULT__",f=a.get(c)??new Set;a.set(c,f),f.add(u)}if(J(i,"optimizeUniversalDefaults")){if(a.size===0){n.remove();continue}for(let[,o]of a){let u=z.rule({source:n.source});u.selectors=[...o],u.append(n.nodes.map(c=>c.clone())),n.before(u)}}n.remove()}else if(r.size){let n=z.rule({selectors:["*","::before","::after"]});for(let s of r)n.append(s.nodes),n.parent||s.before(n),n.source||(n.source=s.source),s.remove();let a=n.clone({selectors:["::backdrop"]});n.after(a)}}}var Nn,nm,$2,$o,sm=C(()=>{l();nt();Nn=K(Me());De();nm={id(i){return Nn.default.attribute({attribute:"id",operator:"=",value:i.value,quoteMark:'"'})}};$2=(0,Nn.default)(i=>i.map(e=>{let t=e.split(r=>r.type==="combinator"&&r.value===" ").pop();return L2(t)})),$o=new Map});function zo(){function i(e){let t=null;e.each(r=>{if(!z2.has(r.type)){t=null;return}if(t===null){t=r;return}let n=am[r.type];r.type==="atrule"&&r.name==="font-face"?t=r:n.every(a=>(r[a]??"").replace(/\s+/g," ")===(t[a]??"").replace(/\s+/g," "))?(r.nodes&&t.append(r.nodes),r.remove()):t=r}),e.each(r=>{r.type==="atrule"&&i(r)})}return e=>{i(e)}}var am,z2,om=C(()=>{l();am={atrule:["name","params"],rule:["selector"]},z2=new Set(Object.keys(am))});function Vo(){return i=>{i.walkRules(e=>{let t=new Map,r=new Set([]),n=new Map;e.walkDecls(a=>{if(a.parent===e){if(t.has(a.prop)){if(t.get(a.prop).value===a.value){r.add(t.get(a.prop)),t.set(a.prop,a);return}n.has(a.prop)||n.set(a.prop,new Set),n.get(a.prop).add(t.get(a.prop)),n.get(a.prop).add(a)}t.set(a.prop,a)}});for(let a of r)a.remove();for(let a of n.values()){let s=new Map;for(let o of a){let u=U2(o.value);u!==null&&(s.has(u)||s.set(u,new Set),s.get(u).add(o))}for(let o of s.values()){let u=Array.from(o).slice(0,-1);for(let c of u)c.remove()}}})}}function U2(i){let e=/^-?\d*.?\d+([\w%]+)?$/g.exec(i);return e?e[1]??V2:null}var V2,lm=C(()=>{l();V2=Symbol("unitless-number")});function W2(i){if(!i.walkAtRules)return;let e=new Set;if(i.walkAtRules("apply",t=>{e.add(t.parent)}),e.size!==0)for(let t of e){let r=[],n=[];for(let a of t.nodes)a.type==="atrule"&&a.name==="apply"?(n.length>0&&(r.push(n),n=[]),r.push([a])):n.push(a);if(n.length>0&&r.push(n),r.length!==1){for(let a of[...r].reverse()){let s=t.clone({nodes:[]});s.append(a),t.after(s)}t.remove()}}}function Ln(){return i=>{W2(i)}}var um=C(()=>{l()});function G2(i){return i.type==="root"}function H2(i){return i.type==="atrule"&&i.name==="layer"}function fm(i){return(e,t)=>{let r=!1;e.walkAtRules("tailwind",n=>{if(r)return!1;if(n.parent&&!(G2(n.parent)||H2(n.parent)))return r=!0,n.warn(t,["Nested @tailwind rules were detected, but are not supported.","Consider using a prefix to scope Tailwind's classes: https://tailwindcss.com/docs/configuration#prefix","Alternatively, use the important selector strategy: https://tailwindcss.com/docs/configuration#selector-strategy"].join(` +`)),!1}),e.walkRules(n=>{if(r)return!1;n.walkRules(a=>(r=!0,a.warn(t,["Nested CSS was detected, but CSS nesting has not been configured correctly.","Please enable a CSS nesting plugin *before* Tailwind in your configuration.","See how here: https://tailwindcss.com/docs/using-with-preprocessors#nesting"].join(` +`)),!1))})}}var cm=C(()=>{l()});function $n(i){return async function(e,t){let{tailwindDirectives:r,applyDirectives:n}=Ao(e);fm()(e,t),Ln()(e,t);let a=i({tailwindDirectives:r,applyDirectives:n,registerDependency(s){t.messages.push({plugin:"tailwindcss",parent:t.opts.from,...s})},createContext(s,o){return go(s,o,e)}})(e,t);if(a.tailwindConfig.separator==="-")throw new Error("The '-' character cannot be used as a custom separator in JIT mode due to parsing ambiguity. Please use another character like '_' instead.");_u(a.tailwindConfig),await Eo(a)(e,t),Ln()(e,t),To(a)(e,t),em(a)(e,t),rm(a)(e,t),jo(a)(e,t),zo(a)(e,t),Vo(a)(e,t)}}var pm=C(()=>{l();yh();Oh();Rh();tm();im();sm();om();lm();um();cm();Xr();De()});function dm(i,e){let t=null,r=null;return i.walkAtRules("config",n=>{if(r=n.source?.input.file??e.opts.from??null,r===null)throw n.error("The `@config` directive cannot be used without setting `from` in your PostCSS config.");if(t)throw n.error("Only one `@config` directive is allowed per file.");let a=n.params.match(/(['"])(.*?)\1/);if(!a)throw n.error("A path is required when using the `@config` directive.");let s=a[2];if(Z.isAbsolute(s))throw n.error("The `@config` directive cannot be used with an absolute path.");if(t=Z.resolve(Z.dirname(r),s),!te.existsSync(t))throw n.error(`The config file at "${s}" does not exist. Make sure the path is correct and the file exists.`);n.remove()}),t||null}var hm=C(()=>{l();ze();mt()});var mm=v((v4,Uo)=>{l();gh();pm();ot();hm();Uo.exports=function(e){return{postcssPlugin:"tailwindcss",plugins:[Pe.DEBUG&&function(t){return console.log(` +`),console.time("JIT TOTAL"),t},async function(t,r){e=dm(t,r)??e;let n=Co(e);if(t.type==="document"){let a=t.nodes.filter(s=>s.type==="root");for(let s of a)s.type==="root"&&await $n(n)(s,r);return}await $n(n)(t,r)},!1,Pe.DEBUG&&function(t){return console.timeEnd("JIT TOTAL"),console.log(` +`),t}].filter(Boolean)}};Uo.exports.postcss=!0});var ym=v((x4,gm)=>{l();gm.exports=mm()});var Wo=v((k4,wm)=>{l();wm.exports=()=>["and_chr 114","and_uc 15.5","chrome 114","chrome 113","chrome 109","edge 114","firefox 114","ios_saf 16.5","ios_saf 16.4","ios_saf 16.3","ios_saf 16.1","opera 99","safari 16.5","samsung 21"]});var jn={};Ae(jn,{agents:()=>Y2,feature:()=>Q2});function Q2(){return{status:"cr",title:"CSS Feature Queries",stats:{ie:{"6":"n","7":"n","8":"n","9":"n","10":"n","11":"n","5.5":"n"},edge:{"12":"y","13":"y","14":"y","15":"y","16":"y","17":"y","18":"y","79":"y","80":"y","81":"y","83":"y","84":"y","85":"y","86":"y","87":"y","88":"y","89":"y","90":"y","91":"y","92":"y","93":"y","94":"y","95":"y","96":"y","97":"y","98":"y","99":"y","100":"y","101":"y","102":"y","103":"y","104":"y","105":"y","106":"y","107":"y","108":"y","109":"y","110":"y","111":"y","112":"y","113":"y","114":"y"},firefox:{"2":"n","3":"n","4":"n","5":"n","6":"n","7":"n","8":"n","9":"n","10":"n","11":"n","12":"n","13":"n","14":"n","15":"n","16":"n","17":"n","18":"n","19":"n","20":"n","21":"n","22":"y","23":"y","24":"y","25":"y","26":"y","27":"y","28":"y","29":"y","30":"y","31":"y","32":"y","33":"y","34":"y","35":"y","36":"y","37":"y","38":"y","39":"y","40":"y","41":"y","42":"y","43":"y","44":"y","45":"y","46":"y","47":"y","48":"y","49":"y","50":"y","51":"y","52":"y","53":"y","54":"y","55":"y","56":"y","57":"y","58":"y","59":"y","60":"y","61":"y","62":"y","63":"y","64":"y","65":"y","66":"y","67":"y","68":"y","69":"y","70":"y","71":"y","72":"y","73":"y","74":"y","75":"y","76":"y","77":"y","78":"y","79":"y","80":"y","81":"y","82":"y","83":"y","84":"y","85":"y","86":"y","87":"y","88":"y","89":"y","90":"y","91":"y","92":"y","93":"y","94":"y","95":"y","96":"y","97":"y","98":"y","99":"y","100":"y","101":"y","102":"y","103":"y","104":"y","105":"y","106":"y","107":"y","108":"y","109":"y","110":"y","111":"y","112":"y","113":"y","114":"y","115":"y","116":"y","117":"y","3.5":"n","3.6":"n"},chrome:{"4":"n","5":"n","6":"n","7":"n","8":"n","9":"n","10":"n","11":"n","12":"n","13":"n","14":"n","15":"n","16":"n","17":"n","18":"n","19":"n","20":"n","21":"n","22":"n","23":"n","24":"n","25":"n","26":"n","27":"n","28":"y","29":"y","30":"y","31":"y","32":"y","33":"y","34":"y","35":"y","36":"y","37":"y","38":"y","39":"y","40":"y","41":"y","42":"y","43":"y","44":"y","45":"y","46":"y","47":"y","48":"y","49":"y","50":"y","51":"y","52":"y","53":"y","54":"y","55":"y","56":"y","57":"y","58":"y","59":"y","60":"y","61":"y","62":"y","63":"y","64":"y","65":"y","66":"y","67":"y","68":"y","69":"y","70":"y","71":"y","72":"y","73":"y","74":"y","75":"y","76":"y","77":"y","78":"y","79":"y","80":"y","81":"y","83":"y","84":"y","85":"y","86":"y","87":"y","88":"y","89":"y","90":"y","91":"y","92":"y","93":"y","94":"y","95":"y","96":"y","97":"y","98":"y","99":"y","100":"y","101":"y","102":"y","103":"y","104":"y","105":"y","106":"y","107":"y","108":"y","109":"y","110":"y","111":"y","112":"y","113":"y","114":"y","115":"y","116":"y","117":"y"},safari:{"4":"n","5":"n","6":"n","7":"n","8":"n","9":"y","10":"y","11":"y","12":"y","13":"y","14":"y","15":"y","17":"y","9.1":"y","10.1":"y","11.1":"y","12.1":"y","13.1":"y","14.1":"y","15.1":"y","15.2-15.3":"y","15.4":"y","15.5":"y","15.6":"y","16.0":"y","16.1":"y","16.2":"y","16.3":"y","16.4":"y","16.5":"y","16.6":"y",TP:"y","3.1":"n","3.2":"n","5.1":"n","6.1":"n","7.1":"n"},opera:{"9":"n","11":"n","12":"n","15":"y","16":"y","17":"y","18":"y","19":"y","20":"y","21":"y","22":"y","23":"y","24":"y","25":"y","26":"y","27":"y","28":"y","29":"y","30":"y","31":"y","32":"y","33":"y","34":"y","35":"y","36":"y","37":"y","38":"y","39":"y","40":"y","41":"y","42":"y","43":"y","44":"y","45":"y","46":"y","47":"y","48":"y","49":"y","50":"y","51":"y","52":"y","53":"y","54":"y","55":"y","56":"y","57":"y","58":"y","60":"y","62":"y","63":"y","64":"y","65":"y","66":"y","67":"y","68":"y","69":"y","70":"y","71":"y","72":"y","73":"y","74":"y","75":"y","76":"y","77":"y","78":"y","79":"y","80":"y","81":"y","82":"y","83":"y","84":"y","85":"y","86":"y","87":"y","88":"y","89":"y","90":"y","91":"y","92":"y","93":"y","94":"y","95":"y","96":"y","97":"y","98":"y","99":"y","100":"y","12.1":"y","9.5-9.6":"n","10.0-10.1":"n","10.5":"n","10.6":"n","11.1":"n","11.5":"n","11.6":"n"},ios_saf:{"8":"n","17":"y","9.0-9.2":"y","9.3":"y","10.0-10.2":"y","10.3":"y","11.0-11.2":"y","11.3-11.4":"y","12.0-12.1":"y","12.2-12.5":"y","13.0-13.1":"y","13.2":"y","13.3":"y","13.4-13.7":"y","14.0-14.4":"y","14.5-14.8":"y","15.0-15.1":"y","15.2-15.3":"y","15.4":"y","15.5":"y","15.6":"y","16.0":"y","16.1":"y","16.2":"y","16.3":"y","16.4":"y","16.5":"y","16.6":"y","3.2":"n","4.0-4.1":"n","4.2-4.3":"n","5.0-5.1":"n","6.0-6.1":"n","7.0-7.1":"n","8.1-8.4":"n"},op_mini:{all:"y"},android:{"3":"n","4":"n","114":"y","4.4":"y","4.4.3-4.4.4":"y","2.1":"n","2.2":"n","2.3":"n","4.1":"n","4.2-4.3":"n"},bb:{"7":"n","10":"n"},op_mob:{"10":"n","11":"n","12":"n","73":"y","11.1":"n","11.5":"n","12.1":"n"},and_chr:{"114":"y"},and_ff:{"115":"y"},ie_mob:{"10":"n","11":"n"},and_uc:{"15.5":"y"},samsung:{"4":"y","20":"y","21":"y","5.0-5.4":"y","6.2-6.4":"y","7.2-7.4":"y","8.2":"y","9.2":"y","10.1":"y","11.1-11.2":"y","12.0":"y","13.0":"y","14.0":"y","15.0":"y","16.0":"y","17.0":"y","18.0":"y","19.0":"y"},and_qq:{"13.1":"y"},baidu:{"13.18":"y"},kaios:{"2.5":"y","3.0-3.1":"y"}}}}var Y2,zn=C(()=>{l();Y2={ie:{prefix:"ms"},edge:{prefix:"webkit",prefix_exceptions:{"12":"ms","13":"ms","14":"ms","15":"ms","16":"ms","17":"ms","18":"ms"}},firefox:{prefix:"moz"},chrome:{prefix:"webkit"},safari:{prefix:"webkit"},opera:{prefix:"webkit",prefix_exceptions:{"9":"o","11":"o","12":"o","9.5-9.6":"o","10.0-10.1":"o","10.5":"o","10.6":"o","11.1":"o","11.5":"o","11.6":"o","12.1":"o"}},ios_saf:{prefix:"webkit"},op_mini:{prefix:"o"},android:{prefix:"webkit"},bb:{prefix:"webkit"},op_mob:{prefix:"o",prefix_exceptions:{"73":"webkit"}},and_chr:{prefix:"webkit"},and_ff:{prefix:"moz"},ie_mob:{prefix:"ms"},and_uc:{prefix:"webkit",prefix_exceptions:{"15.5":"webkit"}},samsung:{prefix:"webkit"},and_qq:{prefix:"webkit"},baidu:{prefix:"webkit"},kaios:{prefix:"moz"}}});var bm=v(()=>{l()});var ue=v((A4,ft)=>{l();var{list:Go}=me();ft.exports.error=function(i){let e=new Error(i);throw e.autoprefixer=!0,e};ft.exports.uniq=function(i){return[...new Set(i)]};ft.exports.removeNote=function(i){return i.includes(" ")?i.split(" ")[0]:i};ft.exports.escapeRegexp=function(i){return i.replace(/[$()*+-.?[\\\]^{|}]/g,"\\$&")};ft.exports.regexp=function(i,e=!0){return e&&(i=this.escapeRegexp(i)),new RegExp(`(^|[\\s,(])(${i}($|[\\s(,]))`,"gi")};ft.exports.editList=function(i,e){let t=Go.comma(i),r=e(t,[]);if(t===r)return i;let n=i.match(/,\s*/);return n=n?n[0]:", ",r.join(n)};ft.exports.splitSelector=function(i){return Go.comma(i).map(e=>Go.space(e).map(t=>t.split(/(?=\.|#)/g)))}});var ct=v((_4,km)=>{l();var J2=Wo(),vm=(zn(),jn).agents,X2=ue(),xm=class{static prefixes(){if(this.prefixesCache)return this.prefixesCache;this.prefixesCache=[];for(let e in vm)this.prefixesCache.push(`-${vm[e].prefix}-`);return this.prefixesCache=X2.uniq(this.prefixesCache).sort((e,t)=>t.length-e.length),this.prefixesCache}static withPrefix(e){return this.prefixesRegexp||(this.prefixesRegexp=new RegExp(this.prefixes().join("|"))),this.prefixesRegexp.test(e)}constructor(e,t,r,n){this.data=e,this.options=r||{},this.browserslistOpts=n||{},this.selected=this.parse(t)}parse(e){let t={};for(let r in this.browserslistOpts)t[r]=this.browserslistOpts[r];return t.path=this.options.from,J2(e,t)}prefix(e){let[t,r]=e.split(" "),n=this.data[t],a=n.prefix_exceptions&&n.prefix_exceptions[r];return a||(a=n.prefix),`-${a}-`}isSelected(e){return this.selected.includes(e)}};km.exports=xm});var ii=v((E4,Sm)=>{l();Sm.exports={prefix(i){let e=i.match(/^(-\w+-)/);return e?e[0]:""},unprefixed(i){return i.replace(/^-\w+-/,"")}}});var Vt=v((O4,Am)=>{l();var K2=ct(),Cm=ii(),Z2=ue();function Ho(i,e){let t=new i.constructor;for(let r of Object.keys(i||{})){let n=i[r];r==="parent"&&typeof n=="object"?e&&(t[r]=e):r==="source"||r===null?t[r]=n:Array.isArray(n)?t[r]=n.map(a=>Ho(a,t)):r!=="_autoprefixerPrefix"&&r!=="_autoprefixerValues"&&r!=="proxyCache"&&(typeof n=="object"&&n!==null&&(n=Ho(n,t)),t[r]=n)}return t}var Vn=class{static hack(e){return this.hacks||(this.hacks={}),e.names.map(t=>(this.hacks[t]=e,this.hacks[t]))}static load(e,t,r){let n=this.hacks&&this.hacks[e];return n?new n(e,t,r):new this(e,t,r)}static clone(e,t){let r=Ho(e);for(let n in t)r[n]=t[n];return r}constructor(e,t,r){this.prefixes=t,this.name=e,this.all=r}parentPrefix(e){let t;return typeof e._autoprefixerPrefix!="undefined"?t=e._autoprefixerPrefix:e.type==="decl"&&e.prop[0]==="-"?t=Cm.prefix(e.prop):e.type==="root"?t=!1:e.type==="rule"&&e.selector.includes(":-")&&/:(-\w+-)/.test(e.selector)?t=e.selector.match(/:(-\w+-)/)[1]:e.type==="atrule"&&e.name[0]==="-"?t=Cm.prefix(e.name):t=this.parentPrefix(e.parent),K2.prefixes().includes(t)||(t=!1),e._autoprefixerPrefix=t,e._autoprefixerPrefix}process(e,t){if(!this.check(e))return;let r=this.parentPrefix(e),n=this.prefixes.filter(s=>!r||r===Z2.removeNote(s)),a=[];for(let s of n)this.add(e,s,a.concat([s]),t)&&a.push(s);return a}clone(e,t){return Vn.clone(e,t)}};Am.exports=Vn});var R=v((T4,Om)=>{l();var eA=Vt(),tA=ct(),_m=ue(),Em=class extends eA{check(){return!0}prefixed(e,t){return t+e}normalize(e){return e}otherPrefixes(e,t){for(let r of tA.prefixes())if(r!==t&&e.includes(r))return!0;return!1}set(e,t){return e.prop=this.prefixed(e.prop,t),e}needCascade(e){return e._autoprefixerCascade||(e._autoprefixerCascade=this.all.options.cascade!==!1&&e.raw("before").includes(` +`)),e._autoprefixerCascade}maxPrefixed(e,t){if(t._autoprefixerMax)return t._autoprefixerMax;let r=0;for(let n of e)n=_m.removeNote(n),n.length>r&&(r=n.length);return t._autoprefixerMax=r,t._autoprefixerMax}calcBefore(e,t,r=""){let a=this.maxPrefixed(e,t)-_m.removeNote(r).length,s=t.raw("before");return a>0&&(s+=Array(a).fill(" ").join("")),s}restoreBefore(e){let t=e.raw("before").split(` +`),r=t[t.length-1];this.all.group(e).up(n=>{let a=n.raw("before").split(` +`),s=a[a.length-1];s.lengths.prop===n.prop&&s.value===n.value)))return this.needCascade(e)&&(n.raws.before=this.calcBefore(r,e,t)),e.parent.insertBefore(e,n)}isAlready(e,t){let r=this.all.group(e).up(n=>n.prop===t);return r||(r=this.all.group(e).down(n=>n.prop===t)),r}add(e,t,r,n){let a=this.prefixed(e.prop,t);if(!(this.isAlready(e,a)||this.otherPrefixes(e.value,t)))return this.insert(e,t,r,n)}process(e,t){if(!this.needCascade(e)){super.process(e,t);return}let r=super.process(e,t);!r||!r.length||(this.restoreBefore(e),e.raws.before=this.calcBefore(r,e))}old(e,t){return[this.prefixed(e,t)]}};Om.exports=Em});var Pm=v((P4,Tm)=>{l();Tm.exports=function i(e){return{mul:t=>new i(e*t),div:t=>new i(e/t),simplify:()=>new i(e),toString:()=>e.toString()}}});var qm=v((D4,Im)=>{l();var rA=Pm(),iA=Vt(),Yo=ue(),nA=/(min|max)-resolution\s*:\s*\d*\.?\d+(dppx|dpcm|dpi|x)/gi,sA=/(min|max)-resolution(\s*:\s*)(\d*\.?\d+)(dppx|dpcm|dpi|x)/i,Dm=class extends iA{prefixName(e,t){return e==="-moz-"?t+"--moz-device-pixel-ratio":e+t+"-device-pixel-ratio"}prefixQuery(e,t,r,n,a){return n=new rA(n),a==="dpi"?n=n.div(96):a==="dpcm"&&(n=n.mul(2.54).div(96)),n=n.simplify(),e==="-o-"&&(n=n.n+"/"+n.d),this.prefixName(e,t)+r+n}clean(e){if(!this.bad){this.bad=[];for(let t of this.prefixes)this.bad.push(this.prefixName(t,"min")),this.bad.push(this.prefixName(t,"max"))}e.params=Yo.editList(e.params,t=>t.filter(r=>this.bad.every(n=>!r.includes(n))))}process(e){let t=this.parentPrefix(e),r=t?[t]:this.prefixes;e.params=Yo.editList(e.params,(n,a)=>{for(let s of n){if(!s.includes("min-resolution")&&!s.includes("max-resolution")){a.push(s);continue}for(let o of r){let u=s.replace(nA,c=>{let f=c.match(sA);return this.prefixQuery(o,f[1],f[2],f[3],f[4])});a.push(u)}a.push(s)}return Yo.uniq(a)})}};Im.exports=Dm});var Mm=v((I4,Rm)=>{l();var Qo="(".charCodeAt(0),Jo=")".charCodeAt(0),Un="'".charCodeAt(0),Xo='"'.charCodeAt(0),Ko="\\".charCodeAt(0),Ut="/".charCodeAt(0),Zo=",".charCodeAt(0),el=":".charCodeAt(0),Wn="*".charCodeAt(0),aA="u".charCodeAt(0),oA="U".charCodeAt(0),lA="+".charCodeAt(0),uA=/^[a-f0-9?-]+$/i;Rm.exports=function(i){for(var e=[],t=i,r,n,a,s,o,u,c,f,p=0,d=t.charCodeAt(p),h=t.length,y=[{nodes:e}],x=0,w,b="",k="",S="";p{l();Bm.exports=function i(e,t,r){var n,a,s,o;for(n=0,a=e.length;n{l();function Nm(i,e){var t=i.type,r=i.value,n,a;return e&&(a=e(i))!==void 0?a:t==="word"||t==="space"?r:t==="string"?(n=i.quote||"",n+r+(i.unclosed?"":n)):t==="comment"?"/*"+r+(i.unclosed?"":"*/"):t==="div"?(i.before||"")+r+(i.after||""):Array.isArray(i.nodes)?(n=Lm(i.nodes,e),t!=="function"?n:r+"("+(i.before||"")+n+(i.after||"")+(i.unclosed?"":")")):r}function Lm(i,e){var t,r;if(Array.isArray(i)){for(t="",r=i.length-1;~r;r-=1)t=Nm(i[r],e)+t;return t}return Nm(i,e)}$m.exports=Lm});var Vm=v((M4,zm)=>{l();var Gn="-".charCodeAt(0),Hn="+".charCodeAt(0),tl=".".charCodeAt(0),fA="e".charCodeAt(0),cA="E".charCodeAt(0);function pA(i){var e=i.charCodeAt(0),t;if(e===Hn||e===Gn){if(t=i.charCodeAt(1),t>=48&&t<=57)return!0;var r=i.charCodeAt(2);return t===tl&&r>=48&&r<=57}return e===tl?(t=i.charCodeAt(1),t>=48&&t<=57):e>=48&&e<=57}zm.exports=function(i){var e=0,t=i.length,r,n,a;if(t===0||!pA(i))return!1;for(r=i.charCodeAt(e),(r===Hn||r===Gn)&&e++;e57));)e+=1;if(r=i.charCodeAt(e),n=i.charCodeAt(e+1),r===tl&&n>=48&&n<=57)for(e+=2;e57));)e+=1;if(r=i.charCodeAt(e),n=i.charCodeAt(e+1),a=i.charCodeAt(e+2),(r===fA||r===cA)&&(n>=48&&n<=57||(n===Hn||n===Gn)&&a>=48&&a<=57))for(e+=n===Hn||n===Gn?3:2;e57));)e+=1;return{number:i.slice(0,e),unit:i.slice(e)}}});var Yn=v((B4,Gm)=>{l();var dA=Mm(),Um=Fm(),Wm=jm();function pt(i){return this instanceof pt?(this.nodes=dA(i),this):new pt(i)}pt.prototype.toString=function(){return Array.isArray(this.nodes)?Wm(this.nodes):""};pt.prototype.walk=function(i,e){return Um(this.nodes,i,e),this};pt.unit=Vm();pt.walk=Um;pt.stringify=Wm;Gm.exports=pt});var Xm=v((F4,Jm)=>{l();var{list:hA}=me(),Hm=Yn(),mA=ct(),Ym=ii(),Qm=class{constructor(e){this.props=["transition","transition-property"],this.prefixes=e}add(e,t){let r,n,a=this.prefixes.add[e.prop],s=this.ruleVendorPrefixes(e),o=s||a&&a.prefixes||[],u=this.parse(e.value),c=u.map(h=>this.findProp(h)),f=[];if(c.some(h=>h[0]==="-"))return;for(let h of u){if(n=this.findProp(h),n[0]==="-")continue;let y=this.prefixes.add[n];if(!(!y||!y.prefixes))for(r of y.prefixes){if(s&&!s.some(w=>r.includes(w)))continue;let x=this.prefixes.prefixed(n,r);x!=="-ms-transform"&&!c.includes(x)&&(this.disabled(n,r)||f.push(this.clone(n,x,h)))}}u=u.concat(f);let p=this.stringify(u),d=this.stringify(this.cleanFromUnprefixed(u,"-webkit-"));if(o.includes("-webkit-")&&this.cloneBefore(e,`-webkit-${e.prop}`,d),this.cloneBefore(e,e.prop,d),o.includes("-o-")){let h=this.stringify(this.cleanFromUnprefixed(u,"-o-"));this.cloneBefore(e,`-o-${e.prop}`,h)}for(r of o)if(r!=="-webkit-"&&r!=="-o-"){let h=this.stringify(this.cleanOtherPrefixes(u,r));this.cloneBefore(e,r+e.prop,h)}p!==e.value&&!this.already(e,e.prop,p)&&(this.checkForWarning(t,e),e.cloneBefore(),e.value=p)}findProp(e){let t=e[0].value;if(/^\d/.test(t)){for(let[r,n]of e.entries())if(r!==0&&n.type==="word")return n.value}return t}already(e,t,r){return e.parent.some(n=>n.prop===t&&n.value===r)}cloneBefore(e,t,r){this.already(e,t,r)||e.cloneBefore({prop:t,value:r})}checkForWarning(e,t){if(t.prop!=="transition-property")return;let r=!1,n=!1;t.parent.each(a=>{if(a.type!=="decl"||a.prop.indexOf("transition-")!==0)return;let s=hA.comma(a.value);if(a.prop==="transition-property"){s.forEach(o=>{let u=this.prefixes.add[o];u&&u.prefixes&&u.prefixes.length>0&&(r=!0)});return}return n=n||s.length>1,!1}),r&&n&&t.warn(e,"Replace transition-property to transition, because Autoprefixer could not support any cases of transition-property and other transition-*")}remove(e){let t=this.parse(e.value);t=t.filter(s=>{let o=this.prefixes.remove[this.findProp(s)];return!o||!o.remove});let r=this.stringify(t);if(e.value===r)return;if(t.length===0){e.remove();return}let n=e.parent.some(s=>s.prop===e.prop&&s.value===r),a=e.parent.some(s=>s!==e&&s.prop===e.prop&&s.value.length>r.length);if(n||a){e.remove();return}e.value=r}parse(e){let t=Hm(e),r=[],n=[];for(let a of t.nodes)n.push(a),a.type==="div"&&a.value===","&&(r.push(n),n=[]);return r.push(n),r.filter(a=>a.length>0)}stringify(e){if(e.length===0)return"";let t=[];for(let r of e)r[r.length-1].type!=="div"&&r.push(this.div(e)),t=t.concat(r);return t[0].type==="div"&&(t=t.slice(1)),t[t.length-1].type==="div"&&(t=t.slice(0,-2+1||void 0)),Hm.stringify({nodes:t})}clone(e,t,r){let n=[],a=!1;for(let s of r)!a&&s.type==="word"&&s.value===e?(n.push({type:"word",value:t}),a=!0):n.push(s);return n}div(e){for(let t of e)for(let r of t)if(r.type==="div"&&r.value===",")return r;return{type:"div",value:",",after:" "}}cleanOtherPrefixes(e,t){return e.filter(r=>{let n=Ym.prefix(this.findProp(r));return n===""||n===t})}cleanFromUnprefixed(e,t){let r=e.map(a=>this.findProp(a)).filter(a=>a.slice(0,t.length)===t).map(a=>this.prefixes.unprefixed(a)),n=[];for(let a of e){let s=this.findProp(a),o=Ym.prefix(s);!r.includes(s)&&(o===t||o==="")&&n.push(a)}return n}disabled(e,t){let r=["order","justify-content","align-self","align-content"];if(e.includes("flex")||r.includes(e)){if(this.prefixes.options.flexbox===!1)return!0;if(this.prefixes.options.flexbox==="no-2009")return t.includes("2009")}}ruleVendorPrefixes(e){let{parent:t}=e;if(t.type!=="rule")return!1;if(!t.selector.includes(":-"))return!1;let r=mA.prefixes().filter(n=>t.selector.includes(":"+n));return r.length>0?r:!1}};Jm.exports=Qm});var Wt=v((N4,Zm)=>{l();var gA=ue(),Km=class{constructor(e,t,r,n){this.unprefixed=e,this.prefixed=t,this.string=r||t,this.regexp=n||gA.regexp(t)}check(e){return e.includes(this.string)?!!e.match(this.regexp):!1}};Zm.exports=Km});var ke=v((L4,tg)=>{l();var yA=Vt(),wA=Wt(),bA=ii(),vA=ue(),eg=class extends yA{static save(e,t){let r=t.prop,n=[];for(let a in t._autoprefixerValues){let s=t._autoprefixerValues[a];if(s===t.value)continue;let o,u=bA.prefix(r);if(u==="-pie-")continue;if(u===a){o=t.value=s,n.push(o);continue}let c=e.prefixed(r,a),f=t.parent;if(!f.every(y=>y.prop!==c)){n.push(o);continue}let p=s.replace(/\s+/," ");if(f.some(y=>y.prop===t.prop&&y.value.replace(/\s+/," ")===p)){n.push(o);continue}let h=this.clone(t,{value:s});o=t.parent.insertBefore(t,h),n.push(o)}return n}check(e){let t=e.value;return t.includes(this.name)?!!t.match(this.regexp()):!1}regexp(){return this.regexpCache||(this.regexpCache=vA.regexp(this.name))}replace(e,t){return e.replace(this.regexp(),`$1${t}$2`)}value(e){return e.raws.value&&e.raws.value.value===e.value?e.raws.value.raw:e.value}add(e,t){e._autoprefixerValues||(e._autoprefixerValues={});let r=e._autoprefixerValues[t]||this.value(e),n;do if(n=r,r=this.replace(r,t),r===!1)return;while(r!==n);e._autoprefixerValues[t]=r}old(e){return new wA(this.name,e+this.name)}};tg.exports=eg});var dt=v(($4,rg)=>{l();rg.exports={}});var il=v((j4,sg)=>{l();var ig=Yn(),xA=ke(),kA=dt().insertAreas,SA=/(^|[^-])linear-gradient\(\s*(top|left|right|bottom)/i,CA=/(^|[^-])radial-gradient\(\s*\d+(\w*|%)\s+\d+(\w*|%)\s*,/i,AA=/(!\s*)?autoprefixer:\s*ignore\s+next/i,_A=/(!\s*)?autoprefixer\s*grid:\s*(on|off|(no-)?autoplace)/i,EA=["width","height","min-width","max-width","min-height","max-height","inline-size","min-inline-size","max-inline-size","block-size","min-block-size","max-block-size"];function rl(i){return i.parent.some(e=>e.prop==="grid-template"||e.prop==="grid-template-areas")}function OA(i){let e=i.parent.some(r=>r.prop==="grid-template-rows"),t=i.parent.some(r=>r.prop==="grid-template-columns");return e&&t}var ng=class{constructor(e){this.prefixes=e}add(e,t){let r=this.prefixes.add["@resolution"],n=this.prefixes.add["@keyframes"],a=this.prefixes.add["@viewport"],s=this.prefixes.add["@supports"];e.walkAtRules(f=>{if(f.name==="keyframes"){if(!this.disabled(f,t))return n&&n.process(f)}else if(f.name==="viewport"){if(!this.disabled(f,t))return a&&a.process(f)}else if(f.name==="supports"){if(this.prefixes.options.supports!==!1&&!this.disabled(f,t))return s.process(f)}else if(f.name==="media"&&f.params.includes("-resolution")&&!this.disabled(f,t))return r&&r.process(f)}),e.walkRules(f=>{if(!this.disabled(f,t))return this.prefixes.add.selectors.map(p=>p.process(f,t))});function o(f){return f.parent.nodes.some(p=>{if(p.type!=="decl")return!1;let d=p.prop==="display"&&/(inline-)?grid/.test(p.value),h=p.prop.startsWith("grid-template"),y=/^grid-([A-z]+-)?gap/.test(p.prop);return d||h||y})}function u(f){return f.parent.some(p=>p.prop==="display"&&/(inline-)?flex/.test(p.value))}let c=this.gridStatus(e,t)&&this.prefixes.add["grid-area"]&&this.prefixes.add["grid-area"].prefixes;return e.walkDecls(f=>{if(this.disabledDecl(f,t))return;let p=f.parent,d=f.prop,h=f.value;if(d==="grid-row-span"){t.warn("grid-row-span is not part of final Grid Layout. Use grid-row.",{node:f});return}else if(d==="grid-column-span"){t.warn("grid-column-span is not part of final Grid Layout. Use grid-column.",{node:f});return}else if(d==="display"&&h==="box"){t.warn("You should write display: flex by final spec instead of display: box",{node:f});return}else if(d==="text-emphasis-position")(h==="under"||h==="over")&&t.warn("You should use 2 values for text-emphasis-position For example, `under left` instead of just `under`.",{node:f});else if(/^(align|justify|place)-(items|content)$/.test(d)&&u(f))(h==="start"||h==="end")&&t.warn(`${h} value has mixed support, consider using flex-${h} instead`,{node:f});else if(d==="text-decoration-skip"&&h==="ink")t.warn("Replace text-decoration-skip: ink to text-decoration-skip-ink: auto, because spec had been changed",{node:f});else{if(c&&this.gridStatus(f,t))if(f.value==="subgrid"&&t.warn("IE does not support subgrid",{node:f}),/^(align|justify|place)-items$/.test(d)&&o(f)){let x=d.replace("-items","-self");t.warn(`IE does not support ${d} on grid containers. Try using ${x} on child elements instead: ${f.parent.selector} > * { ${x}: ${f.value} }`,{node:f})}else if(/^(align|justify|place)-content$/.test(d)&&o(f))t.warn(`IE does not support ${f.prop} on grid containers`,{node:f});else if(d==="display"&&f.value==="contents"){t.warn("Please do not use display: contents; if you have grid setting enabled",{node:f});return}else if(f.prop==="grid-gap"){let x=this.gridStatus(f,t);x==="autoplace"&&!OA(f)&&!rl(f)?t.warn("grid-gap only works if grid-template(-areas) is being used or both rows and columns have been declared and cells have not been manually placed inside the explicit grid",{node:f}):(x===!0||x==="no-autoplace")&&!rl(f)&&t.warn("grid-gap only works if grid-template(-areas) is being used",{node:f})}else if(d==="grid-auto-columns"){t.warn("grid-auto-columns is not supported by IE",{node:f});return}else if(d==="grid-auto-rows"){t.warn("grid-auto-rows is not supported by IE",{node:f});return}else if(d==="grid-auto-flow"){let x=p.some(b=>b.prop==="grid-template-rows"),w=p.some(b=>b.prop==="grid-template-columns");rl(f)?t.warn("grid-auto-flow is not supported by IE",{node:f}):h.includes("dense")?t.warn("grid-auto-flow: dense is not supported by IE",{node:f}):!x&&!w&&t.warn("grid-auto-flow works only if grid-template-rows and grid-template-columns are present in the same rule",{node:f});return}else if(h.includes("auto-fit")){t.warn("auto-fit value is not supported by IE",{node:f,word:"auto-fit"});return}else if(h.includes("auto-fill")){t.warn("auto-fill value is not supported by IE",{node:f,word:"auto-fill"});return}else d.startsWith("grid-template")&&h.includes("[")&&t.warn("Autoprefixer currently does not support line names. Try using grid-template-areas instead.",{node:f,word:"["});if(h.includes("radial-gradient"))if(CA.test(f.value))t.warn("Gradient has outdated direction syntax. New syntax is like `closest-side at 0 0` instead of `0 0, closest-side`.",{node:f});else{let x=ig(h);for(let w of x.nodes)if(w.type==="function"&&w.value==="radial-gradient")for(let b of w.nodes)b.type==="word"&&(b.value==="cover"?t.warn("Gradient has outdated direction syntax. Replace `cover` to `farthest-corner`.",{node:f}):b.value==="contain"&&t.warn("Gradient has outdated direction syntax. Replace `contain` to `closest-side`.",{node:f}))}h.includes("linear-gradient")&&SA.test(h)&&t.warn("Gradient has outdated direction syntax. New syntax is like `to left` instead of `right`.",{node:f})}EA.includes(f.prop)&&(f.value.includes("-fill-available")||(f.value.includes("fill-available")?t.warn("Replace fill-available to stretch, because spec had been changed",{node:f}):f.value.includes("fill")&&ig(h).nodes.some(w=>w.type==="word"&&w.value==="fill")&&t.warn("Replace fill to stretch, because spec had been changed",{node:f})));let y;if(f.prop==="transition"||f.prop==="transition-property")return this.prefixes.transition.add(f,t);if(f.prop==="align-self"){if(this.displayType(f)!=="grid"&&this.prefixes.options.flexbox!==!1&&(y=this.prefixes.add["align-self"],y&&y.prefixes&&y.process(f)),this.gridStatus(f,t)!==!1&&(y=this.prefixes.add["grid-row-align"],y&&y.prefixes))return y.process(f,t)}else if(f.prop==="justify-self"){if(this.gridStatus(f,t)!==!1&&(y=this.prefixes.add["grid-column-align"],y&&y.prefixes))return y.process(f,t)}else if(f.prop==="place-self"){if(y=this.prefixes.add["place-self"],y&&y.prefixes&&this.gridStatus(f,t)!==!1)return y.process(f,t)}else if(y=this.prefixes.add[f.prop],y&&y.prefixes)return y.process(f,t)}),this.gridStatus(e,t)&&kA(e,this.disabled),e.walkDecls(f=>{if(this.disabledValue(f,t))return;let p=this.prefixes.unprefixed(f.prop),d=this.prefixes.values("add",p);if(Array.isArray(d))for(let h of d)h.process&&h.process(f,t);xA.save(this.prefixes,f)})}remove(e,t){let r=this.prefixes.remove["@resolution"];e.walkAtRules((n,a)=>{this.prefixes.remove[`@${n.name}`]?this.disabled(n,t)||n.parent.removeChild(a):n.name==="media"&&n.params.includes("-resolution")&&r&&r.clean(n)});for(let n of this.prefixes.remove.selectors)e.walkRules((a,s)=>{n.check(a)&&(this.disabled(a,t)||a.parent.removeChild(s))});return e.walkDecls((n,a)=>{if(this.disabled(n,t))return;let s=n.parent,o=this.prefixes.unprefixed(n.prop);if((n.prop==="transition"||n.prop==="transition-property")&&this.prefixes.transition.remove(n),this.prefixes.remove[n.prop]&&this.prefixes.remove[n.prop].remove){let u=this.prefixes.group(n).down(c=>this.prefixes.normalize(c.prop)===o);if(o==="flex-flow"&&(u=!0),n.prop==="-webkit-box-orient"){let c={"flex-direction":!0,"flex-flow":!0};if(!n.parent.some(f=>c[f.prop]))return}if(u&&!this.withHackValue(n)){n.raw("before").includes(` +`)&&this.reduceSpaces(n),s.removeChild(a);return}}for(let u of this.prefixes.values("remove",o)){if(!u.check||!u.check(n.value))continue;if(o=u.unprefixed,this.prefixes.group(n).down(f=>f.value.includes(o))){s.removeChild(a);return}}})}withHackValue(e){return e.prop==="-webkit-background-clip"&&e.value==="text"}disabledValue(e,t){return this.gridStatus(e,t)===!1&&e.type==="decl"&&e.prop==="display"&&e.value.includes("grid")||this.prefixes.options.flexbox===!1&&e.type==="decl"&&e.prop==="display"&&e.value.includes("flex")||e.type==="decl"&&e.prop==="content"?!0:this.disabled(e,t)}disabledDecl(e,t){if(this.gridStatus(e,t)===!1&&e.type==="decl"&&(e.prop.includes("grid")||e.prop==="justify-items"))return!0;if(this.prefixes.options.flexbox===!1&&e.type==="decl"){let r=["order","justify-content","align-items","align-content"];if(e.prop.includes("flex")||r.includes(e.prop))return!0}return this.disabled(e,t)}disabled(e,t){if(!e)return!1;if(e._autoprefixerDisabled!==void 0)return e._autoprefixerDisabled;if(e.parent){let n=e.prev();if(n&&n.type==="comment"&&AA.test(n.text))return e._autoprefixerDisabled=!0,e._autoprefixerSelfDisabled=!0,!0}let r=null;if(e.nodes){let n;e.each(a=>{a.type==="comment"&&/(!\s*)?autoprefixer:\s*(off|on)/i.test(a.text)&&(typeof n!="undefined"?t.warn("Second Autoprefixer control comment was ignored. Autoprefixer applies control comment to whole block, not to next rules.",{node:a}):n=/on/i.test(a.text))}),n!==void 0&&(r=!n)}if(!e.nodes||r===null)if(e.parent){let n=this.disabled(e.parent,t);e.parent._autoprefixerSelfDisabled===!0?r=!1:r=n}else r=!1;return e._autoprefixerDisabled=r,r}reduceSpaces(e){let t=!1;if(this.prefixes.group(e).up(()=>(t=!0,!0)),t)return;let r=e.raw("before").split(` +`),n=r[r.length-1].length,a=!1;this.prefixes.group(e).down(s=>{r=s.raw("before").split(` +`);let o=r.length-1;r[o].length>n&&(a===!1&&(a=r[o].length-n),r[o]=r[o].slice(0,-a),s.raws.before=r.join(` +`))})}displayType(e){for(let t of e.parent.nodes)if(t.prop==="display"){if(t.value.includes("flex"))return"flex";if(t.value.includes("grid"))return"grid"}return!1}gridStatus(e,t){if(!e)return!1;if(e._autoprefixerGridStatus!==void 0)return e._autoprefixerGridStatus;let r=null;if(e.nodes){let n;e.each(a=>{if(a.type==="comment"&&_A.test(a.text)){let s=/:\s*autoplace/i.test(a.text),o=/no-autoplace/i.test(a.text);typeof n!="undefined"?t.warn("Second Autoprefixer grid control comment was ignored. Autoprefixer applies control comments to the whole block, not to the next rules.",{node:a}):s?n="autoplace":o?n=!0:n=/on/i.test(a.text)}}),n!==void 0&&(r=n)}if(e.type==="atrule"&&e.name==="supports"){let n=e.params;n.includes("grid")&&n.includes("auto")&&(r=!1)}if(!e.nodes||r===null)if(e.parent){let n=this.gridStatus(e.parent,t);e.parent._autoprefixerSelfDisabled===!0?r=!1:r=n}else typeof this.prefixes.options.grid!="undefined"?r=this.prefixes.options.grid:typeof m.env.AUTOPREFIXER_GRID!="undefined"?m.env.AUTOPREFIXER_GRID==="autoplace"?r="autoplace":r=!0:r=!1;return e._autoprefixerGridStatus=r,r}};sg.exports=ng});var og=v((z4,ag)=>{l();ag.exports={A:{A:{"2":"K E F G A B JC"},B:{"1":"C L M H N D O P Q R S T U V W X Y Z a b c d e f g h i j n o p q r s t u v w x y z I"},C:{"1":"2 3 4 5 6 7 8 9 AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB XB YB ZB aB bB cB 0B dB 1B eB fB gB hB iB jB kB lB mB nB oB m pB qB rB sB tB P Q R 2B S T U V W X Y Z a b c d e f g h i j n o p q r s t u v w x y z I uB 3B 4B","2":"0 1 KC zB J K E F G A B C L M H N D O k l LC MC"},D:{"1":"8 9 AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB XB YB ZB aB bB cB 0B dB 1B eB fB gB hB iB jB kB lB mB nB oB m pB qB rB sB tB P Q R S T U V W X Y Z a b c d e f g h i j n o p q r s t u v w x y z I uB 3B 4B","2":"0 1 2 3 4 5 6 7 J K E F G A B C L M H N D O k l"},E:{"1":"G A B C L M H D RC 6B vB wB 7B SC TC 8B 9B xB AC yB BC CC DC EC FC GC UC","2":"0 J K E F NC 5B OC PC QC"},F:{"1":"1 2 3 4 5 6 7 8 9 H N D O k l AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB m pB qB rB sB tB P Q R 2B S T U V W X Y Z a b c d e f g h i j wB","2":"G B C VC WC XC YC vB HC ZC"},G:{"1":"D fC gC hC iC jC kC lC mC nC oC pC qC rC sC tC 8B 9B xB AC yB BC CC DC EC FC GC","2":"F 5B aC IC bC cC dC eC"},H:{"1":"uC"},I:{"1":"I zC 0C","2":"zB J vC wC xC yC IC"},J:{"2":"E A"},K:{"1":"m","2":"A B C vB HC wB"},L:{"1":"I"},M:{"1":"uB"},N:{"2":"A B"},O:{"1":"xB"},P:{"1":"J k l 1C 2C 3C 4C 5C 6B 6C 7C 8C 9C AD yB BD CD DD"},Q:{"1":"7B"},R:{"1":"ED"},S:{"1":"FD GD"}},B:4,C:"CSS Feature Queries"}});var cg=v((V4,fg)=>{l();function lg(i){return i[i.length-1]}var ug={parse(i){let e=[""],t=[e];for(let r of i){if(r==="("){e=[""],lg(t).push(e),t.push(e);continue}if(r===")"){t.pop(),e=lg(t),e.push("");continue}e[e.length-1]+=r}return t[0]},stringify(i){let e="";for(let t of i){if(typeof t=="object"){e+=`(${ug.stringify(t)})`;continue}e+=t}return e}};fg.exports=ug});var gg=v((U4,mg)=>{l();var TA=og(),{feature:PA}=(zn(),jn),{parse:DA}=me(),IA=ct(),nl=cg(),qA=ke(),RA=ue(),pg=PA(TA),dg=[];for(let i in pg.stats){let e=pg.stats[i];for(let t in e){let r=e[t];/y/.test(r)&&dg.push(i+" "+t)}}var hg=class{constructor(e,t){this.Prefixes=e,this.all=t}prefixer(){if(this.prefixerCache)return this.prefixerCache;let e=this.all.browsers.selected.filter(r=>dg.includes(r)),t=new IA(this.all.browsers.data,e,this.all.options);return this.prefixerCache=new this.Prefixes(this.all.data,t,this.all.options),this.prefixerCache}parse(e){let t=e.split(":"),r=t[0],n=t[1];return n||(n=""),[r.trim(),n.trim()]}virtual(e){let[t,r]=this.parse(e),n=DA("a{}").first;return n.append({prop:t,value:r,raws:{before:""}}),n}prefixed(e){let t=this.virtual(e);if(this.disabled(t.first))return t.nodes;let r={warn:()=>null},n=this.prefixer().add[t.first.prop];n&&n.process&&n.process(t.first,r);for(let a of t.nodes){for(let s of this.prefixer().values("add",t.first.prop))s.process(a);qA.save(this.all,a)}return t.nodes}isNot(e){return typeof e=="string"&&/not\s*/i.test(e)}isOr(e){return typeof e=="string"&&/\s*or\s*/i.test(e)}isProp(e){return typeof e=="object"&&e.length===1&&typeof e[0]=="string"}isHack(e,t){return!new RegExp(`(\\(|\\s)${RA.escapeRegexp(t)}:`).test(e)}toRemove(e,t){let[r,n]=this.parse(e),a=this.all.unprefixed(r),s=this.all.cleaner();if(s.remove[r]&&s.remove[r].remove&&!this.isHack(t,a))return!0;for(let o of s.values("remove",a))if(o.check(n))return!0;return!1}remove(e,t){let r=0;for(;rtypeof t!="object"?t:t.length===1&&typeof t[0]=="object"?this.cleanBrackets(t[0]):this.cleanBrackets(t))}convert(e){let t=[""];for(let r of e)t.push([`${r.prop}: ${r.value}`]),t.push(" or ");return t[t.length-1]="",t}normalize(e){if(typeof e!="object")return e;if(e=e.filter(t=>t!==""),typeof e[0]=="string"){let t=e[0].trim();if(t.includes(":")||t==="selector"||t==="not selector")return[nl.stringify(e)]}return e.map(t=>this.normalize(t))}add(e,t){return e.map(r=>{if(this.isProp(r)){let n=this.prefixed(r[0]);return n.length>1?this.convert(n):r}return typeof r=="object"?this.add(r,t):r})}process(e){let t=nl.parse(e.params);t=this.normalize(t),t=this.remove(t,e.params),t=this.add(t,e.params),t=this.cleanBrackets(t),e.params=nl.stringify(t)}disabled(e){if(!this.all.options.grid&&(e.prop==="display"&&e.value.includes("grid")||e.prop.includes("grid")||e.prop==="justify-items"))return!0;if(this.all.options.flexbox===!1){if(e.prop==="display"&&e.value.includes("flex"))return!0;let t=["order","justify-content","align-items","align-content"];if(e.prop.includes("flex")||t.includes(e.prop))return!0}return!1}};mg.exports=hg});var bg=v((W4,wg)=>{l();var yg=class{constructor(e,t){this.prefix=t,this.prefixed=e.prefixed(this.prefix),this.regexp=e.regexp(this.prefix),this.prefixeds=e.possible().map(r=>[e.prefixed(r),e.regexp(r)]),this.unprefixed=e.name,this.nameRegexp=e.regexp()}isHack(e){let t=e.parent.index(e)+1,r=e.parent.nodes;for(;t{l();var{list:MA}=me(),BA=bg(),FA=Vt(),NA=ct(),LA=ue(),vg=class extends FA{constructor(e,t,r){super(e,t,r);this.regexpCache=new Map}check(e){return e.selector.includes(this.name)?!!e.selector.match(this.regexp()):!1}prefixed(e){return this.name.replace(/^(\W*)/,`$1${e}`)}regexp(e){if(!this.regexpCache.has(e)){let t=e?this.prefixed(e):this.name;this.regexpCache.set(e,new RegExp(`(^|[^:"'=])${LA.escapeRegexp(t)}`,"gi"))}return this.regexpCache.get(e)}possible(){return NA.prefixes()}prefixeds(e){if(e._autoprefixerPrefixeds){if(e._autoprefixerPrefixeds[this.name])return e._autoprefixerPrefixeds}else e._autoprefixerPrefixeds={};let t={};if(e.selector.includes(",")){let n=MA.comma(e.selector).filter(a=>a.includes(this.name));for(let a of this.possible())t[a]=n.map(s=>this.replace(s,a)).join(", ")}else for(let r of this.possible())t[r]=this.replace(e.selector,r);return e._autoprefixerPrefixeds[this.name]=t,e._autoprefixerPrefixeds}already(e,t,r){let n=e.parent.index(e)-1;for(;n>=0;){let a=e.parent.nodes[n];if(a.type!=="rule")return!1;let s=!1;for(let o in t[this.name]){let u=t[this.name][o];if(a.selector===u){if(r===o)return!0;s=!0;break}}if(!s)return!1;n-=1}return!1}replace(e,t){return e.replace(this.regexp(),`$1${this.prefixed(t)}`)}add(e,t){let r=this.prefixeds(e);if(this.already(e,r,t))return;let n=this.clone(e,{selector:r[this.name][t]});e.parent.insertBefore(e,n)}old(e){return new BA(this,e)}};xg.exports=vg});var Cg=v((H4,Sg)=>{l();var $A=Vt(),kg=class extends $A{add(e,t){let r=t+e.name;if(e.parent.some(s=>s.name===r&&s.params===e.params))return;let a=this.clone(e,{name:r});return e.parent.insertBefore(e,a)}process(e){let t=this.parentPrefix(e);for(let r of this.prefixes)(!t||t===r)&&this.add(e,r)}};Sg.exports=kg});var _g=v((Y4,Ag)=>{l();var jA=Gt(),sl=class extends jA{prefixed(e){return e==="-webkit-"?":-webkit-full-screen":e==="-moz-"?":-moz-full-screen":`:${e}fullscreen`}};sl.names=[":fullscreen"];Ag.exports=sl});var Og=v((Q4,Eg)=>{l();var zA=Gt(),al=class extends zA{possible(){return super.possible().concat(["-moz- old","-ms- old"])}prefixed(e){return e==="-webkit-"?"::-webkit-input-placeholder":e==="-ms-"?"::-ms-input-placeholder":e==="-ms- old"?":-ms-input-placeholder":e==="-moz- old"?":-moz-placeholder":`::${e}placeholder`}};al.names=["::placeholder"];Eg.exports=al});var Pg=v((J4,Tg)=>{l();var VA=Gt(),ol=class extends VA{prefixed(e){return e==="-ms-"?":-ms-input-placeholder":`:${e}placeholder-shown`}};ol.names=[":placeholder-shown"];Tg.exports=ol});var Ig=v((X4,Dg)=>{l();var UA=Gt(),WA=ue(),ll=class extends UA{constructor(e,t,r){super(e,t,r);this.prefixes&&(this.prefixes=WA.uniq(this.prefixes.map(n=>"-webkit-")))}prefixed(e){return e==="-webkit-"?"::-webkit-file-upload-button":`::${e}file-selector-button`}};ll.names=["::file-selector-button"];Dg.exports=ll});var de=v((K4,qg)=>{l();qg.exports=function(i){let e;return i==="-webkit- 2009"||i==="-moz-"?e=2009:i==="-ms-"?e=2012:i==="-webkit-"&&(e="final"),i==="-webkit- 2009"&&(i="-webkit-"),[e,i]}});var Fg=v((Z4,Bg)=>{l();var Rg=me().list,Mg=de(),GA=R(),Ht=class extends GA{prefixed(e,t){let r;return[r,t]=Mg(t),r===2009?t+"box-flex":super.prefixed(e,t)}normalize(){return"flex"}set(e,t){let r=Mg(t)[0];if(r===2009)return e.value=Rg.space(e.value)[0],e.value=Ht.oldValues[e.value]||e.value,super.set(e,t);if(r===2012){let n=Rg.space(e.value);n.length===3&&n[2]==="0"&&(e.value=n.slice(0,2).concat("0px").join(" "))}return super.set(e,t)}};Ht.names=["flex","box-flex"];Ht.oldValues={auto:"1",none:"0"};Bg.exports=Ht});var $g=v((eq,Lg)=>{l();var Ng=de(),HA=R(),ul=class extends HA{prefixed(e,t){let r;return[r,t]=Ng(t),r===2009?t+"box-ordinal-group":r===2012?t+"flex-order":super.prefixed(e,t)}normalize(){return"order"}set(e,t){return Ng(t)[0]===2009&&/\d/.test(e.value)?(e.value=(parseInt(e.value)+1).toString(),super.set(e,t)):super.set(e,t)}};ul.names=["order","flex-order","box-ordinal-group"];Lg.exports=ul});var zg=v((tq,jg)=>{l();var YA=R(),fl=class extends YA{check(e){let t=e.value;return!t.toLowerCase().includes("alpha(")&&!t.includes("DXImageTransform.Microsoft")&&!t.includes("data:image/svg+xml")}};fl.names=["filter"];jg.exports=fl});var Ug=v((rq,Vg)=>{l();var QA=R(),cl=class extends QA{insert(e,t,r,n){if(t!=="-ms-")return super.insert(e,t,r);let a=this.clone(e),s=e.prop.replace(/end$/,"start"),o=t+e.prop.replace(/end$/,"span");if(!e.parent.some(u=>u.prop===o)){if(a.prop=o,e.value.includes("span"))a.value=e.value.replace(/span\s/i,"");else{let u;if(e.parent.walkDecls(s,c=>{u=c}),u){let c=Number(e.value)-Number(u.value)+"";a.value=c}else e.warn(n,`Can not prefix ${e.prop} (${s} is not found)`)}e.cloneBefore(a)}}};cl.names=["grid-row-end","grid-column-end"];Vg.exports=cl});var Gg=v((iq,Wg)=>{l();var JA=R(),pl=class extends JA{check(e){return!e.value.split(/\s+/).some(t=>{let r=t.toLowerCase();return r==="reverse"||r==="alternate-reverse"})}};pl.names=["animation","animation-direction"];Wg.exports=pl});var Yg=v((nq,Hg)=>{l();var XA=de(),KA=R(),dl=class extends KA{insert(e,t,r){let n;if([n,t]=XA(t),n!==2009)return super.insert(e,t,r);let a=e.value.split(/\s+/).filter(p=>p!=="wrap"&&p!=="nowrap"&&"wrap-reverse");if(a.length===0||e.parent.some(p=>p.prop===t+"box-orient"||p.prop===t+"box-direction"))return;let o=a[0],u=o.includes("row")?"horizontal":"vertical",c=o.includes("reverse")?"reverse":"normal",f=this.clone(e);return f.prop=t+"box-orient",f.value=u,this.needCascade(e)&&(f.raws.before=this.calcBefore(r,e,t)),e.parent.insertBefore(e,f),f=this.clone(e),f.prop=t+"box-direction",f.value=c,this.needCascade(e)&&(f.raws.before=this.calcBefore(r,e,t)),e.parent.insertBefore(e,f)}};dl.names=["flex-flow","box-direction","box-orient"];Hg.exports=dl});var Jg=v((sq,Qg)=>{l();var ZA=de(),e_=R(),hl=class extends e_{normalize(){return"flex"}prefixed(e,t){let r;return[r,t]=ZA(t),r===2009?t+"box-flex":r===2012?t+"flex-positive":super.prefixed(e,t)}};hl.names=["flex-grow","flex-positive"];Qg.exports=hl});var Kg=v((aq,Xg)=>{l();var t_=de(),r_=R(),ml=class extends r_{set(e,t){if(t_(t)[0]!==2009)return super.set(e,t)}};ml.names=["flex-wrap"];Xg.exports=ml});var ey=v((oq,Zg)=>{l();var i_=R(),Yt=dt(),gl=class extends i_{insert(e,t,r,n){if(t!=="-ms-")return super.insert(e,t,r);let a=Yt.parse(e),[s,o]=Yt.translate(a,0,2),[u,c]=Yt.translate(a,1,3);[["grid-row",s],["grid-row-span",o],["grid-column",u],["grid-column-span",c]].forEach(([f,p])=>{Yt.insertDecl(e,f,p)}),Yt.warnTemplateSelectorNotFound(e,n),Yt.warnIfGridRowColumnExists(e,n)}};gl.names=["grid-area"];Zg.exports=gl});var ry=v((lq,ty)=>{l();var n_=R(),ni=dt(),yl=class extends n_{insert(e,t,r){if(t!=="-ms-")return super.insert(e,t,r);if(e.parent.some(s=>s.prop==="-ms-grid-row-align"))return;let[[n,a]]=ni.parse(e);a?(ni.insertDecl(e,"grid-row-align",n),ni.insertDecl(e,"grid-column-align",a)):(ni.insertDecl(e,"grid-row-align",n),ni.insertDecl(e,"grid-column-align",n))}};yl.names=["place-self"];ty.exports=yl});var ny=v((uq,iy)=>{l();var s_=R(),wl=class extends s_{check(e){let t=e.value;return!t.includes("/")||t.includes("span")}normalize(e){return e.replace("-start","")}prefixed(e,t){let r=super.prefixed(e,t);return t==="-ms-"&&(r=r.replace("-start","")),r}};wl.names=["grid-row-start","grid-column-start"];iy.exports=wl});var oy=v((fq,ay)=>{l();var sy=de(),a_=R(),Qt=class extends a_{check(e){return e.parent&&!e.parent.some(t=>t.prop&&t.prop.startsWith("grid-"))}prefixed(e,t){let r;return[r,t]=sy(t),r===2012?t+"flex-item-align":super.prefixed(e,t)}normalize(){return"align-self"}set(e,t){let r=sy(t)[0];if(r===2012)return e.value=Qt.oldValues[e.value]||e.value,super.set(e,t);if(r==="final")return super.set(e,t)}};Qt.names=["align-self","flex-item-align"];Qt.oldValues={"flex-end":"end","flex-start":"start"};ay.exports=Qt});var uy=v((cq,ly)=>{l();var o_=R(),l_=ue(),bl=class extends o_{constructor(e,t,r){super(e,t,r);this.prefixes&&(this.prefixes=l_.uniq(this.prefixes.map(n=>n==="-ms-"?"-webkit-":n)))}};bl.names=["appearance"];ly.exports=bl});var py=v((pq,cy)=>{l();var fy=de(),u_=R(),vl=class extends u_{normalize(){return"flex-basis"}prefixed(e,t){let r;return[r,t]=fy(t),r===2012?t+"flex-preferred-size":super.prefixed(e,t)}set(e,t){let r;if([r,t]=fy(t),r===2012||r==="final")return super.set(e,t)}};vl.names=["flex-basis","flex-preferred-size"];cy.exports=vl});var hy=v((dq,dy)=>{l();var f_=R(),xl=class extends f_{normalize(){return this.name.replace("box-image","border")}prefixed(e,t){let r=super.prefixed(e,t);return t==="-webkit-"&&(r=r.replace("border","box-image")),r}};xl.names=["mask-border","mask-border-source","mask-border-slice","mask-border-width","mask-border-outset","mask-border-repeat","mask-box-image","mask-box-image-source","mask-box-image-slice","mask-box-image-width","mask-box-image-outset","mask-box-image-repeat"];dy.exports=xl});var gy=v((hq,my)=>{l();var c_=R(),Le=class extends c_{insert(e,t,r){let n=e.prop==="mask-composite",a;n?a=e.value.split(","):a=e.value.match(Le.regexp)||[],a=a.map(c=>c.trim()).filter(c=>c);let s=a.length,o;if(s&&(o=this.clone(e),o.value=a.map(c=>Le.oldValues[c]||c).join(", "),a.includes("intersect")&&(o.value+=", xor"),o.prop=t+"mask-composite"),n)return s?(this.needCascade(e)&&(o.raws.before=this.calcBefore(r,e,t)),e.parent.insertBefore(e,o)):void 0;let u=this.clone(e);return u.prop=t+u.prop,s&&(u.value=u.value.replace(Le.regexp,"")),this.needCascade(e)&&(u.raws.before=this.calcBefore(r,e,t)),e.parent.insertBefore(e,u),s?(this.needCascade(e)&&(o.raws.before=this.calcBefore(r,e,t)),e.parent.insertBefore(e,o)):e}};Le.names=["mask","mask-composite"];Le.oldValues={add:"source-over",subtract:"source-out",intersect:"source-in",exclude:"xor"};Le.regexp=new RegExp(`\\s+(${Object.keys(Le.oldValues).join("|")})\\b(?!\\))\\s*(?=[,])`,"ig");my.exports=Le});var by=v((mq,wy)=>{l();var yy=de(),p_=R(),Jt=class extends p_{prefixed(e,t){let r;return[r,t]=yy(t),r===2009?t+"box-align":r===2012?t+"flex-align":super.prefixed(e,t)}normalize(){return"align-items"}set(e,t){let r=yy(t)[0];return(r===2009||r===2012)&&(e.value=Jt.oldValues[e.value]||e.value),super.set(e,t)}};Jt.names=["align-items","flex-align","box-align"];Jt.oldValues={"flex-end":"end","flex-start":"start"};wy.exports=Jt});var xy=v((gq,vy)=>{l();var d_=R(),kl=class extends d_{set(e,t){return t==="-ms-"&&e.value==="contain"&&(e.value="element"),super.set(e,t)}insert(e,t,r){if(!(e.value==="all"&&t==="-ms-"))return super.insert(e,t,r)}};kl.names=["user-select"];vy.exports=kl});var Cy=v((yq,Sy)=>{l();var ky=de(),h_=R(),Sl=class extends h_{normalize(){return"flex-shrink"}prefixed(e,t){let r;return[r,t]=ky(t),r===2012?t+"flex-negative":super.prefixed(e,t)}set(e,t){let r;if([r,t]=ky(t),r===2012||r==="final")return super.set(e,t)}};Sl.names=["flex-shrink","flex-negative"];Sy.exports=Sl});var _y=v((wq,Ay)=>{l();var m_=R(),Cl=class extends m_{prefixed(e,t){return`${t}column-${e}`}normalize(e){return e.includes("inside")?"break-inside":e.includes("before")?"break-before":"break-after"}set(e,t){return(e.prop==="break-inside"&&e.value==="avoid-column"||e.value==="avoid-page")&&(e.value="avoid"),super.set(e,t)}insert(e,t,r){if(e.prop!=="break-inside")return super.insert(e,t,r);if(!(/region/i.test(e.value)||/page/i.test(e.value)))return super.insert(e,t,r)}};Cl.names=["break-inside","page-break-inside","column-break-inside","break-before","page-break-before","column-break-before","break-after","page-break-after","column-break-after"];Ay.exports=Cl});var Oy=v((bq,Ey)=>{l();var g_=R(),Al=class extends g_{prefixed(e,t){return t+"print-color-adjust"}normalize(){return"color-adjust"}};Al.names=["color-adjust","print-color-adjust"];Ey.exports=Al});var Py=v((vq,Ty)=>{l();var y_=R(),Xt=class extends y_{insert(e,t,r){if(t==="-ms-"){let n=this.set(this.clone(e),t);this.needCascade(e)&&(n.raws.before=this.calcBefore(r,e,t));let a="ltr";return e.parent.nodes.forEach(s=>{s.prop==="direction"&&(s.value==="rtl"||s.value==="ltr")&&(a=s.value)}),n.value=Xt.msValues[a][e.value]||e.value,e.parent.insertBefore(e,n)}return super.insert(e,t,r)}};Xt.names=["writing-mode"];Xt.msValues={ltr:{"horizontal-tb":"lr-tb","vertical-rl":"tb-rl","vertical-lr":"tb-lr"},rtl:{"horizontal-tb":"rl-tb","vertical-rl":"bt-rl","vertical-lr":"bt-lr"}};Ty.exports=Xt});var Iy=v((xq,Dy)=>{l();var w_=R(),_l=class extends w_{set(e,t){return e.value=e.value.replace(/\s+fill(\s)/,"$1"),super.set(e,t)}};_l.names=["border-image"];Dy.exports=_l});var My=v((kq,Ry)=>{l();var qy=de(),b_=R(),Kt=class extends b_{prefixed(e,t){let r;return[r,t]=qy(t),r===2012?t+"flex-line-pack":super.prefixed(e,t)}normalize(){return"align-content"}set(e,t){let r=qy(t)[0];if(r===2012)return e.value=Kt.oldValues[e.value]||e.value,super.set(e,t);if(r==="final")return super.set(e,t)}};Kt.names=["align-content","flex-line-pack"];Kt.oldValues={"flex-end":"end","flex-start":"start","space-between":"justify","space-around":"distribute"};Ry.exports=Kt});var Fy=v((Sq,By)=>{l();var v_=R(),Se=class extends v_{prefixed(e,t){return t==="-moz-"?t+(Se.toMozilla[e]||e):super.prefixed(e,t)}normalize(e){return Se.toNormal[e]||e}};Se.names=["border-radius"];Se.toMozilla={};Se.toNormal={};for(let i of["top","bottom"])for(let e of["left","right"]){let t=`border-${i}-${e}-radius`,r=`border-radius-${i}${e}`;Se.names.push(t),Se.names.push(r),Se.toMozilla[t]=r,Se.toNormal[r]=t}By.exports=Se});var Ly=v((Cq,Ny)=>{l();var x_=R(),El=class extends x_{prefixed(e,t){return e.includes("-start")?t+e.replace("-block-start","-before"):t+e.replace("-block-end","-after")}normalize(e){return e.includes("-before")?e.replace("-before","-block-start"):e.replace("-after","-block-end")}};El.names=["border-block-start","border-block-end","margin-block-start","margin-block-end","padding-block-start","padding-block-end","border-before","border-after","margin-before","margin-after","padding-before","padding-after"];Ny.exports=El});var jy=v((Aq,$y)=>{l();var k_=R(),{parseTemplate:S_,warnMissedAreas:C_,getGridGap:A_,warnGridGap:__,inheritGridGap:E_}=dt(),Ol=class extends k_{insert(e,t,r,n){if(t!=="-ms-")return super.insert(e,t,r);if(e.parent.some(h=>h.prop==="-ms-grid-rows"))return;let a=A_(e),s=E_(e,a),{rows:o,columns:u,areas:c}=S_({decl:e,gap:s||a}),f=Object.keys(c).length>0,p=Boolean(o),d=Boolean(u);return __({gap:a,hasColumns:d,decl:e,result:n}),C_(c,e,n),(p&&d||f)&&e.cloneBefore({prop:"-ms-grid-rows",value:o,raws:{}}),d&&e.cloneBefore({prop:"-ms-grid-columns",value:u,raws:{}}),e}};Ol.names=["grid-template"];$y.exports=Ol});var Vy=v((_q,zy)=>{l();var O_=R(),Tl=class extends O_{prefixed(e,t){return t+e.replace("-inline","")}normalize(e){return e.replace(/(margin|padding|border)-(start|end)/,"$1-inline-$2")}};Tl.names=["border-inline-start","border-inline-end","margin-inline-start","margin-inline-end","padding-inline-start","padding-inline-end","border-start","border-end","margin-start","margin-end","padding-start","padding-end"];zy.exports=Tl});var Wy=v((Eq,Uy)=>{l();var T_=R(),Pl=class extends T_{check(e){return!e.value.includes("flex-")&&e.value!=="baseline"}prefixed(e,t){return t+"grid-row-align"}normalize(){return"align-self"}};Pl.names=["grid-row-align"];Uy.exports=Pl});var Hy=v((Oq,Gy)=>{l();var P_=R(),Zt=class extends P_{keyframeParents(e){let{parent:t}=e;for(;t;){if(t.type==="atrule"&&t.name==="keyframes")return!0;({parent:t}=t)}return!1}contain3d(e){if(e.prop==="transform-origin")return!1;for(let t of Zt.functions3d)if(e.value.includes(`${t}(`))return!0;return!1}set(e,t){return e=super.set(e,t),t==="-ms-"&&(e.value=e.value.replace(/rotatez/gi,"rotate")),e}insert(e,t,r){if(t==="-ms-"){if(!this.contain3d(e)&&!this.keyframeParents(e))return super.insert(e,t,r)}else if(t==="-o-"){if(!this.contain3d(e))return super.insert(e,t,r)}else return super.insert(e,t,r)}};Zt.names=["transform","transform-origin"];Zt.functions3d=["matrix3d","translate3d","translateZ","scale3d","scaleZ","rotate3d","rotateX","rotateY","perspective"];Gy.exports=Zt});var Jy=v((Tq,Qy)=>{l();var Yy=de(),D_=R(),Dl=class extends D_{normalize(){return"flex-direction"}insert(e,t,r){let n;if([n,t]=Yy(t),n!==2009)return super.insert(e,t,r);if(e.parent.some(f=>f.prop===t+"box-orient"||f.prop===t+"box-direction"))return;let s=e.value,o,u;s==="inherit"||s==="initial"||s==="unset"?(o=s,u=s):(o=s.includes("row")?"horizontal":"vertical",u=s.includes("reverse")?"reverse":"normal");let c=this.clone(e);return c.prop=t+"box-orient",c.value=o,this.needCascade(e)&&(c.raws.before=this.calcBefore(r,e,t)),e.parent.insertBefore(e,c),c=this.clone(e),c.prop=t+"box-direction",c.value=u,this.needCascade(e)&&(c.raws.before=this.calcBefore(r,e,t)),e.parent.insertBefore(e,c)}old(e,t){let r;return[r,t]=Yy(t),r===2009?[t+"box-orient",t+"box-direction"]:super.old(e,t)}};Dl.names=["flex-direction","box-direction","box-orient"];Qy.exports=Dl});var Ky=v((Pq,Xy)=>{l();var I_=R(),Il=class extends I_{check(e){return e.value==="pixelated"}prefixed(e,t){return t==="-ms-"?"-ms-interpolation-mode":super.prefixed(e,t)}set(e,t){return t!=="-ms-"?super.set(e,t):(e.prop="-ms-interpolation-mode",e.value="nearest-neighbor",e)}normalize(){return"image-rendering"}process(e,t){return super.process(e,t)}};Il.names=["image-rendering","interpolation-mode"];Xy.exports=Il});var ew=v((Dq,Zy)=>{l();var q_=R(),R_=ue(),ql=class extends q_{constructor(e,t,r){super(e,t,r);this.prefixes&&(this.prefixes=R_.uniq(this.prefixes.map(n=>n==="-ms-"?"-webkit-":n)))}};ql.names=["backdrop-filter"];Zy.exports=ql});var rw=v((Iq,tw)=>{l();var M_=R(),B_=ue(),Rl=class extends M_{constructor(e,t,r){super(e,t,r);this.prefixes&&(this.prefixes=B_.uniq(this.prefixes.map(n=>n==="-ms-"?"-webkit-":n)))}check(e){return e.value.toLowerCase()==="text"}};Rl.names=["background-clip"];tw.exports=Rl});var nw=v((qq,iw)=>{l();var F_=R(),N_=["none","underline","overline","line-through","blink","inherit","initial","unset"],Ml=class extends F_{check(e){return e.value.split(/\s+/).some(t=>!N_.includes(t))}};Ml.names=["text-decoration"];iw.exports=Ml});var ow=v((Rq,aw)=>{l();var sw=de(),L_=R(),er=class extends L_{prefixed(e,t){let r;return[r,t]=sw(t),r===2009?t+"box-pack":r===2012?t+"flex-pack":super.prefixed(e,t)}normalize(){return"justify-content"}set(e,t){let r=sw(t)[0];if(r===2009||r===2012){let n=er.oldValues[e.value]||e.value;if(e.value=n,r!==2009||n!=="distribute")return super.set(e,t)}else if(r==="final")return super.set(e,t)}};er.names=["justify-content","flex-pack","box-pack"];er.oldValues={"flex-end":"end","flex-start":"start","space-between":"justify","space-around":"distribute"};aw.exports=er});var uw=v((Mq,lw)=>{l();var $_=R(),Bl=class extends $_{set(e,t){let r=e.value.toLowerCase();return t==="-webkit-"&&!r.includes(" ")&&r!=="contain"&&r!=="cover"&&(e.value=e.value+" "+e.value),super.set(e,t)}};Bl.names=["background-size"];lw.exports=Bl});var cw=v((Bq,fw)=>{l();var j_=R(),Fl=dt(),Nl=class extends j_{insert(e,t,r){if(t!=="-ms-")return super.insert(e,t,r);let n=Fl.parse(e),[a,s]=Fl.translate(n,0,1);n[0]&&n[0].includes("span")&&(s=n[0].join("").replace(/\D/g,"")),[[e.prop,a],[`${e.prop}-span`,s]].forEach(([u,c])=>{Fl.insertDecl(e,u,c)})}};Nl.names=["grid-row","grid-column"];fw.exports=Nl});var hw=v((Fq,dw)=>{l();var z_=R(),{prefixTrackProp:pw,prefixTrackValue:V_,autoplaceGridItems:U_,getGridGap:W_,inheritGridGap:G_}=dt(),H_=il(),Ll=class extends z_{prefixed(e,t){return t==="-ms-"?pw({prop:e,prefix:t}):super.prefixed(e,t)}normalize(e){return e.replace(/^grid-(rows|columns)/,"grid-template-$1")}insert(e,t,r,n){if(t!=="-ms-")return super.insert(e,t,r);let{parent:a,prop:s,value:o}=e,u=s.includes("rows"),c=s.includes("columns"),f=a.some(k=>k.prop==="grid-template"||k.prop==="grid-template-areas");if(f&&u)return!1;let p=new H_({options:{}}),d=p.gridStatus(a,n),h=W_(e);h=G_(e,h)||h;let y=u?h.row:h.column;(d==="no-autoplace"||d===!0)&&!f&&(y=null);let x=V_({value:o,gap:y});e.cloneBefore({prop:pw({prop:s,prefix:t}),value:x});let w=a.nodes.find(k=>k.prop==="grid-auto-flow"),b="row";if(w&&!p.disabled(w,n)&&(b=w.value.trim()),d==="autoplace"){let k=a.nodes.find(_=>_.prop==="grid-template-rows");if(!k&&f)return;if(!k&&!f){e.warn(n,"Autoplacement does not work without grid-template-rows property");return}!a.nodes.find(_=>_.prop==="grid-template-columns")&&!f&&e.warn(n,"Autoplacement does not work without grid-template-columns property"),c&&!f&&U_(e,n,h,b)}}};Ll.names=["grid-template-rows","grid-template-columns","grid-rows","grid-columns"];dw.exports=Ll});var gw=v((Nq,mw)=>{l();var Y_=R(),$l=class extends Y_{check(e){return!e.value.includes("flex-")&&e.value!=="baseline"}prefixed(e,t){return t+"grid-column-align"}normalize(){return"justify-self"}};$l.names=["grid-column-align"];mw.exports=$l});var ww=v((Lq,yw)=>{l();var Q_=R(),jl=class extends Q_{prefixed(e,t){return t+"scroll-chaining"}normalize(){return"overscroll-behavior"}set(e,t){return e.value==="auto"?e.value="chained":(e.value==="none"||e.value==="contain")&&(e.value="none"),super.set(e,t)}};jl.names=["overscroll-behavior","scroll-chaining"];yw.exports=jl});var xw=v(($q,vw)=>{l();var J_=R(),{parseGridAreas:X_,warnMissedAreas:K_,prefixTrackProp:Z_,prefixTrackValue:bw,getGridGap:eE,warnGridGap:tE,inheritGridGap:rE}=dt();function iE(i){return i.trim().slice(1,-1).split(/["']\s*["']?/g)}var zl=class extends J_{insert(e,t,r,n){if(t!=="-ms-")return super.insert(e,t,r);let a=!1,s=!1,o=e.parent,u=eE(e);u=rE(e,u)||u,o.walkDecls(/-ms-grid-rows/,p=>p.remove()),o.walkDecls(/grid-template-(rows|columns)/,p=>{if(p.prop==="grid-template-rows"){s=!0;let{prop:d,value:h}=p;p.cloneBefore({prop:Z_({prop:d,prefix:t}),value:bw({value:h,gap:u.row})})}else a=!0});let c=iE(e.value);a&&!s&&u.row&&c.length>1&&e.cloneBefore({prop:"-ms-grid-rows",value:bw({value:`repeat(${c.length}, auto)`,gap:u.row}),raws:{}}),tE({gap:u,hasColumns:a,decl:e,result:n});let f=X_({rows:c,gap:u});return K_(f,e,n),e}};zl.names=["grid-template-areas"];vw.exports=zl});var Sw=v((jq,kw)=>{l();var nE=R(),Vl=class extends nE{set(e,t){return t==="-webkit-"&&(e.value=e.value.replace(/\s*(right|left)\s*/i,"")),super.set(e,t)}};Vl.names=["text-emphasis-position"];kw.exports=Vl});var Aw=v((zq,Cw)=>{l();var sE=R(),Ul=class extends sE{set(e,t){return e.prop==="text-decoration-skip-ink"&&e.value==="auto"?(e.prop=t+"text-decoration-skip",e.value="ink",e):super.set(e,t)}};Ul.names=["text-decoration-skip-ink","text-decoration-skip"];Cw.exports=Ul});var Dw=v((Vq,Pw)=>{l();"use strict";Pw.exports={wrap:_w,limit:Ew,validate:Ow,test:Wl,curry:aE,name:Tw};function _w(i,e,t){var r=e-i;return((t-i)%r+r)%r+i}function Ew(i,e,t){return Math.max(i,Math.min(e,t))}function Ow(i,e,t,r,n){if(!Wl(i,e,t,r,n))throw new Error(t+" is outside of range ["+i+","+e+")");return t}function Wl(i,e,t,r,n){return!(te||n&&t===e||r&&t===i)}function Tw(i,e,t,r){return(t?"(":"[")+i+","+e+(r?")":"]")}function aE(i,e,t,r){var n=Tw.bind(null,i,e,t,r);return{wrap:_w.bind(null,i,e),limit:Ew.bind(null,i,e),validate:function(a){return Ow(i,e,a,t,r)},test:function(a){return Wl(i,e,a,t,r)},toString:n,name:n}}});var Rw=v((Uq,qw)=>{l();var Gl=Yn(),oE=Dw(),lE=Wt(),uE=ke(),fE=ue(),Iw=/top|left|right|bottom/gi,Qe=class extends uE{replace(e,t){let r=Gl(e);for(let n of r.nodes)if(n.type==="function"&&n.value===this.name)if(n.nodes=this.newDirection(n.nodes),n.nodes=this.normalize(n.nodes),t==="-webkit- old"){if(!this.oldWebkit(n))return!1}else n.nodes=this.convertDirection(n.nodes),n.value=t+n.value;return r.toString()}replaceFirst(e,...t){return t.map(n=>n===" "?{type:"space",value:n}:{type:"word",value:n}).concat(e.slice(1))}normalizeUnit(e,t){return`${parseFloat(e)/t*360}deg`}normalize(e){if(!e[0])return e;if(/-?\d+(.\d+)?grad/.test(e[0].value))e[0].value=this.normalizeUnit(e[0].value,400);else if(/-?\d+(.\d+)?rad/.test(e[0].value))e[0].value=this.normalizeUnit(e[0].value,2*Math.PI);else if(/-?\d+(.\d+)?turn/.test(e[0].value))e[0].value=this.normalizeUnit(e[0].value,1);else if(e[0].value.includes("deg")){let t=parseFloat(e[0].value);t=oE.wrap(0,360,t),e[0].value=`${t}deg`}return e[0].value==="0deg"?e=this.replaceFirst(e,"to"," ","top"):e[0].value==="90deg"?e=this.replaceFirst(e,"to"," ","right"):e[0].value==="180deg"?e=this.replaceFirst(e,"to"," ","bottom"):e[0].value==="270deg"&&(e=this.replaceFirst(e,"to"," ","left")),e}newDirection(e){if(e[0].value==="to"||(Iw.lastIndex=0,!Iw.test(e[0].value)))return e;e.unshift({type:"word",value:"to"},{type:"space",value:" "});for(let t=2;t0&&(e[0].value==="to"?this.fixDirection(e):e[0].value.includes("deg")?this.fixAngle(e):this.isRadial(e)&&this.fixRadial(e)),e}fixDirection(e){e.splice(0,2);for(let t of e){if(t.type==="div")break;t.type==="word"&&(t.value=this.revertDirection(t.value))}}fixAngle(e){let t=e[0].value;t=parseFloat(t),t=Math.abs(450-t)%360,t=this.roundFloat(t,3),e[0].value=`${t}deg`}fixRadial(e){let t=[],r=[],n,a,s,o,u;for(o=0;o{l();var cE=Wt(),pE=ke();function Mw(i){return new RegExp(`(^|[\\s,(])(${i}($|[\\s),]))`,"gi")}var Hl=class extends pE{regexp(){return this.regexpCache||(this.regexpCache=Mw(this.name)),this.regexpCache}isStretch(){return this.name==="stretch"||this.name==="fill"||this.name==="fill-available"}replace(e,t){return t==="-moz-"&&this.isStretch()?e.replace(this.regexp(),"$1-moz-available$3"):t==="-webkit-"&&this.isStretch()?e.replace(this.regexp(),"$1-webkit-fill-available$3"):super.replace(e,t)}old(e){let t=e+this.name;return this.isStretch()&&(e==="-moz-"?t="-moz-available":e==="-webkit-"&&(t="-webkit-fill-available")),new cE(this.name,t,t,Mw(t))}add(e,t){if(!(e.prop.includes("grid")&&t!=="-webkit-"))return super.add(e,t)}};Hl.names=["max-content","min-content","fit-content","fill","fill-available","stretch"];Bw.exports=Hl});var $w=v((Gq,Lw)=>{l();var Nw=Wt(),dE=ke(),Yl=class extends dE{replace(e,t){return t==="-webkit-"?e.replace(this.regexp(),"$1-webkit-optimize-contrast"):t==="-moz-"?e.replace(this.regexp(),"$1-moz-crisp-edges"):super.replace(e,t)}old(e){return e==="-webkit-"?new Nw(this.name,"-webkit-optimize-contrast"):e==="-moz-"?new Nw(this.name,"-moz-crisp-edges"):super.old(e)}};Yl.names=["pixelated"];Lw.exports=Yl});var zw=v((Hq,jw)=>{l();var hE=ke(),Ql=class extends hE{replace(e,t){let r=super.replace(e,t);return t==="-webkit-"&&(r=r.replace(/("[^"]+"|'[^']+')(\s+\d+\w)/gi,"url($1)$2")),r}};Ql.names=["image-set"];jw.exports=Ql});var Uw=v((Yq,Vw)=>{l();var mE=me().list,gE=ke(),Jl=class extends gE{replace(e,t){return mE.space(e).map(r=>{if(r.slice(0,+this.name.length+1)!==this.name+"(")return r;let n=r.lastIndexOf(")"),a=r.slice(n+1),s=r.slice(this.name.length+1,n);if(t==="-webkit-"){let o=s.match(/\d*.?\d+%?/);o?(s=s.slice(o[0].length).trim(),s+=`, ${o[0]}`):s+=", 0.5"}return t+this.name+"("+s+")"+a}).join(" ")}};Jl.names=["cross-fade"];Vw.exports=Jl});var Gw=v((Qq,Ww)=>{l();var yE=de(),wE=Wt(),bE=ke(),Xl=class extends bE{constructor(e,t){super(e,t);e==="display-flex"&&(this.name="flex")}check(e){return e.prop==="display"&&e.value===this.name}prefixed(e){let t,r;return[t,e]=yE(e),t===2009?this.name==="flex"?r="box":r="inline-box":t===2012?this.name==="flex"?r="flexbox":r="inline-flexbox":t==="final"&&(r=this.name),e+r}replace(e,t){return this.prefixed(t)}old(e){let t=this.prefixed(e);if(!!t)return new wE(this.name,t)}};Xl.names=["display-flex","inline-flex"];Ww.exports=Xl});var Yw=v((Jq,Hw)=>{l();var vE=ke(),Kl=class extends vE{constructor(e,t){super(e,t);e==="display-grid"&&(this.name="grid")}check(e){return e.prop==="display"&&e.value===this.name}};Kl.names=["display-grid","inline-grid"];Hw.exports=Kl});var Jw=v((Xq,Qw)=>{l();var xE=ke(),Zl=class extends xE{constructor(e,t){super(e,t);e==="filter-function"&&(this.name="filter")}};Zl.names=["filter","filter-function"];Qw.exports=Zl});var eb=v((Kq,Zw)=>{l();var Xw=ii(),M=R(),Kw=qm(),kE=Xm(),SE=il(),CE=gg(),eu=ct(),tr=Gt(),AE=Cg(),$e=ke(),rr=ue(),_E=_g(),EE=Og(),OE=Pg(),TE=Ig(),PE=Fg(),DE=$g(),IE=zg(),qE=Ug(),RE=Gg(),ME=Yg(),BE=Jg(),FE=Kg(),NE=ey(),LE=ry(),$E=ny(),jE=oy(),zE=uy(),VE=py(),UE=hy(),WE=gy(),GE=by(),HE=xy(),YE=Cy(),QE=_y(),JE=Oy(),XE=Py(),KE=Iy(),ZE=My(),e5=Fy(),t5=Ly(),r5=jy(),i5=Vy(),n5=Wy(),s5=Hy(),a5=Jy(),o5=Ky(),l5=ew(),u5=rw(),f5=nw(),c5=ow(),p5=uw(),d5=cw(),h5=hw(),m5=gw(),g5=ww(),y5=xw(),w5=Sw(),b5=Aw(),v5=Rw(),x5=Fw(),k5=$w(),S5=zw(),C5=Uw(),A5=Gw(),_5=Yw(),E5=Jw();tr.hack(_E);tr.hack(EE);tr.hack(OE);tr.hack(TE);M.hack(PE);M.hack(DE);M.hack(IE);M.hack(qE);M.hack(RE);M.hack(ME);M.hack(BE);M.hack(FE);M.hack(NE);M.hack(LE);M.hack($E);M.hack(jE);M.hack(zE);M.hack(VE);M.hack(UE);M.hack(WE);M.hack(GE);M.hack(HE);M.hack(YE);M.hack(QE);M.hack(JE);M.hack(XE);M.hack(KE);M.hack(ZE);M.hack(e5);M.hack(t5);M.hack(r5);M.hack(i5);M.hack(n5);M.hack(s5);M.hack(a5);M.hack(o5);M.hack(l5);M.hack(u5);M.hack(f5);M.hack(c5);M.hack(p5);M.hack(d5);M.hack(h5);M.hack(m5);M.hack(g5);M.hack(y5);M.hack(w5);M.hack(b5);$e.hack(v5);$e.hack(x5);$e.hack(k5);$e.hack(S5);$e.hack(C5);$e.hack(A5);$e.hack(_5);$e.hack(E5);var tu=new Map,si=class{constructor(e,t,r={}){this.data=e,this.browsers=t,this.options=r,[this.add,this.remove]=this.preprocess(this.select(this.data)),this.transition=new kE(this),this.processor=new SE(this)}cleaner(){if(this.cleanerCache)return this.cleanerCache;if(this.browsers.selected.length){let e=new eu(this.browsers.data,[]);this.cleanerCache=new si(this.data,e,this.options)}else return this;return this.cleanerCache}select(e){let t={add:{},remove:{}};for(let r in e){let n=e[r],a=n.browsers.map(u=>{let c=u.split(" ");return{browser:`${c[0]} ${c[1]}`,note:c[2]}}),s=a.filter(u=>u.note).map(u=>`${this.browsers.prefix(u.browser)} ${u.note}`);s=rr.uniq(s),a=a.filter(u=>this.browsers.isSelected(u.browser)).map(u=>{let c=this.browsers.prefix(u.browser);return u.note?`${c} ${u.note}`:c}),a=this.sort(rr.uniq(a)),this.options.flexbox==="no-2009"&&(a=a.filter(u=>!u.includes("2009")));let o=n.browsers.map(u=>this.browsers.prefix(u));n.mistakes&&(o=o.concat(n.mistakes)),o=o.concat(s),o=rr.uniq(o),a.length?(t.add[r]=a,a.length!a.includes(u)))):t.remove[r]=o}return t}sort(e){return e.sort((t,r)=>{let n=rr.removeNote(t).length,a=rr.removeNote(r).length;return n===a?r.length-t.length:a-n})}preprocess(e){let t={selectors:[],"@supports":new CE(si,this)};for(let n in e.add){let a=e.add[n];if(n==="@keyframes"||n==="@viewport")t[n]=new AE(n,a,this);else if(n==="@resolution")t[n]=new Kw(n,a,this);else if(this.data[n].selector)t.selectors.push(tr.load(n,a,this));else{let s=this.data[n].props;if(s){let o=$e.load(n,a,this);for(let u of s)t[u]||(t[u]={values:[]}),t[u].values.push(o)}else{let o=t[n]&&t[n].values||[];t[n]=M.load(n,a,this),t[n].values=o}}}let r={selectors:[]};for(let n in e.remove){let a=e.remove[n];if(this.data[n].selector){let s=tr.load(n,a);for(let o of a)r.selectors.push(s.old(o))}else if(n==="@keyframes"||n==="@viewport")for(let s of a){let o=`@${s}${n.slice(1)}`;r[o]={remove:!0}}else if(n==="@resolution")r[n]=new Kw(n,a,this);else{let s=this.data[n].props;if(s){let o=$e.load(n,[],this);for(let u of a){let c=o.old(u);if(c)for(let f of s)r[f]||(r[f]={}),r[f].values||(r[f].values=[]),r[f].values.push(c)}}else for(let o of a){let u=this.decl(n).old(n,o);if(n==="align-self"){let c=t[n]&&t[n].prefixes;if(c){if(o==="-webkit- 2009"&&c.includes("-webkit-"))continue;if(o==="-webkit-"&&c.includes("-webkit- 2009"))continue}}for(let c of u)r[c]||(r[c]={}),r[c].remove=!0}}}return[t,r]}decl(e){return tu.has(e)||tu.set(e,M.load(e)),tu.get(e)}unprefixed(e){let t=this.normalize(Xw.unprefixed(e));return t==="flex-direction"&&(t="flex-flow"),t}normalize(e){return this.decl(e).normalize(e)}prefixed(e,t){return e=Xw.unprefixed(e),this.decl(e).prefixed(e,t)}values(e,t){let r=this[e],n=r["*"]&&r["*"].values,a=r[t]&&r[t].values;return n&&a?rr.uniq(n.concat(a)):n||a||[]}group(e){let t=e.parent,r=t.index(e),{length:n}=t.nodes,a=this.unprefixed(e.prop),s=(o,u)=>{for(r+=o;r>=0&&r{l();tb.exports={"backdrop-filter":{feature:"css-backdrop-filter",browsers:["ios_saf 16.1","ios_saf 16.3","ios_saf 16.4","ios_saf 16.5","safari 16.5"]},element:{props:["background","background-image","border-image","mask","list-style","list-style-image","content","mask-image"],feature:"css-element-function",browsers:["firefox 114"]},"user-select":{mistakes:["-khtml-"],feature:"user-select-none",browsers:["ios_saf 16.1","ios_saf 16.3","ios_saf 16.4","ios_saf 16.5","safari 16.5"]},"background-clip":{feature:"background-clip-text",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},hyphens:{feature:"css-hyphens",browsers:["ios_saf 16.1","ios_saf 16.3","ios_saf 16.4","ios_saf 16.5","safari 16.5"]},fill:{props:["width","min-width","max-width","height","min-height","max-height","inline-size","min-inline-size","max-inline-size","block-size","min-block-size","max-block-size","grid","grid-template","grid-template-rows","grid-template-columns","grid-auto-columns","grid-auto-rows"],feature:"intrinsic-width",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"fill-available":{props:["width","min-width","max-width","height","min-height","max-height","inline-size","min-inline-size","max-inline-size","block-size","min-block-size","max-block-size","grid","grid-template","grid-template-rows","grid-template-columns","grid-auto-columns","grid-auto-rows"],feature:"intrinsic-width",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},stretch:{props:["width","min-width","max-width","height","min-height","max-height","inline-size","min-inline-size","max-inline-size","block-size","min-block-size","max-block-size","grid","grid-template","grid-template-rows","grid-template-columns","grid-auto-columns","grid-auto-rows"],feature:"intrinsic-width",browsers:["firefox 114"]},"fit-content":{props:["width","min-width","max-width","height","min-height","max-height","inline-size","min-inline-size","max-inline-size","block-size","min-block-size","max-block-size","grid","grid-template","grid-template-rows","grid-template-columns","grid-auto-columns","grid-auto-rows"],feature:"intrinsic-width",browsers:["firefox 114"]},"text-decoration-style":{feature:"text-decoration",browsers:["ios_saf 16.1","ios_saf 16.3","ios_saf 16.4","ios_saf 16.5"]},"text-decoration-color":{feature:"text-decoration",browsers:["ios_saf 16.1","ios_saf 16.3","ios_saf 16.4","ios_saf 16.5"]},"text-decoration-line":{feature:"text-decoration",browsers:["ios_saf 16.1","ios_saf 16.3","ios_saf 16.4","ios_saf 16.5"]},"text-decoration":{feature:"text-decoration",browsers:["ios_saf 16.1","ios_saf 16.3","ios_saf 16.4","ios_saf 16.5"]},"text-decoration-skip":{feature:"text-decoration",browsers:["ios_saf 16.1","ios_saf 16.3","ios_saf 16.4","ios_saf 16.5"]},"text-decoration-skip-ink":{feature:"text-decoration",browsers:["ios_saf 16.1","ios_saf 16.3","ios_saf 16.4","ios_saf 16.5"]},"text-size-adjust":{feature:"text-size-adjust",browsers:["ios_saf 16.1","ios_saf 16.3","ios_saf 16.4","ios_saf 16.5"]},"mask-clip":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"mask-composite":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"mask-image":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"mask-origin":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"mask-repeat":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"mask-border-repeat":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"mask-border-source":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},mask:{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"mask-position":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"mask-size":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"mask-border":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"mask-border-outset":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"mask-border-width":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"mask-border-slice":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"clip-path":{feature:"css-clip-path",browsers:["samsung 21"]},"box-decoration-break":{feature:"css-boxdecorationbreak",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","ios_saf 16.1","ios_saf 16.3","ios_saf 16.4","ios_saf 16.5","opera 99","safari 16.5","samsung 21"]},appearance:{feature:"css-appearance",browsers:["samsung 21"]},"image-set":{props:["background","background-image","border-image","cursor","mask","mask-image","list-style","list-style-image","content"],feature:"css-image-set",browsers:["and_uc 15.5","chrome 109","samsung 21"]},"cross-fade":{props:["background","background-image","border-image","mask","list-style","list-style-image","content","mask-image"],feature:"css-cross-fade",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},isolate:{props:["unicode-bidi"],feature:"css-unicode-bidi",browsers:["ios_saf 16.1","ios_saf 16.3","ios_saf 16.4","ios_saf 16.5","safari 16.5"]},"color-adjust":{feature:"css-color-adjust",browsers:["chrome 109","chrome 113","chrome 114","edge 114","opera 99"]}}});var nb=v((e6,ib)=>{l();ib.exports={}});var lb=v((t6,ob)=>{l();var O5=Wo(),{agents:T5}=(zn(),jn),ru=bm(),P5=ct(),D5=eb(),I5=rb(),q5=nb(),sb={browsers:T5,prefixes:I5},ab=` + Replace Autoprefixer \`browsers\` option to Browserslist config. + Use \`browserslist\` key in \`package.json\` or \`.browserslistrc\` file. + + Using \`browsers\` option can cause errors. Browserslist config can + be used for Babel, Autoprefixer, postcss-normalize and other tools. + + If you really need to use option, rename it to \`overrideBrowserslist\`. + + Learn more at: + https://github.com/browserslist/browserslist#readme + https://twitter.com/browserslist + +`;function R5(i){return Object.prototype.toString.apply(i)==="[object Object]"}var iu=new Map;function M5(i,e){e.browsers.selected.length!==0&&(e.add.selectors.length>0||Object.keys(e.add).length>2||i.warn(`Autoprefixer target browsers do not need any prefixes.You do not need Autoprefixer anymore. +Check your Browserslist config to be sure that your targets are set up correctly. + + Learn more at: + https://github.com/postcss/autoprefixer#readme + https://github.com/browserslist/browserslist#readme + +`))}ob.exports=ir;function ir(...i){let e;if(i.length===1&&R5(i[0])?(e=i[0],i=void 0):i.length===0||i.length===1&&!i[0]?i=void 0:i.length<=2&&(Array.isArray(i[0])||!i[0])?(e=i[1],i=i[0]):typeof i[i.length-1]=="object"&&(e=i.pop()),e||(e={}),e.browser)throw new Error("Change `browser` option to `overrideBrowserslist` in Autoprefixer");if(e.browserslist)throw new Error("Change `browserslist` option to `overrideBrowserslist` in Autoprefixer");e.overrideBrowserslist?i=e.overrideBrowserslist:e.browsers&&(typeof console!="undefined"&&console.warn&&(ru.red?console.warn(ru.red(ab.replace(/`[^`]+`/g,n=>ru.yellow(n.slice(1,-1))))):console.warn(ab)),i=e.browsers);let t={ignoreUnknownVersions:e.ignoreUnknownVersions,stats:e.stats,env:e.env};function r(n){let a=sb,s=new P5(a.browsers,i,n,t),o=s.selected.join(", ")+JSON.stringify(e);return iu.has(o)||iu.set(o,new D5(a.prefixes,s,e)),iu.get(o)}return{postcssPlugin:"autoprefixer",prepare(n){let a=r({from:n.opts.from,env:e.env});return{OnceExit(s){M5(n,a),e.remove!==!1&&a.processor.remove(s,n),e.add!==!1&&a.processor.add(s,n)}}},info(n){return n=n||{},n.from=n.from||m.cwd(),q5(r(n))},options:e,browsers:i}}ir.postcss=!0;ir.data=sb;ir.defaults=O5.defaults;ir.info=()=>ir().info()});var ub={};Ae(ub,{default:()=>B5});var B5,fb=C(()=>{l();B5=[]});var pb={};Ae(pb,{default:()=>F5});var cb,F5,db=C(()=>{l();hi();cb=K(bi()),F5=Ze(cb.default.theme)});var mb={};Ae(mb,{default:()=>N5});var hb,N5,gb=C(()=>{l();hi();hb=K(bi()),N5=Ze(hb.default)});l();"use strict";var L5=Je(ym()),$5=Je(me()),j5=Je(lb()),z5=Je((fb(),ub)),V5=Je((db(),pb)),U5=Je((gb(),mb)),W5=Je((ts(),ku)),G5=Je((bo(),wo)),H5=Je((gs(),rf));function Je(i){return i&&i.__esModule?i:{default:i}}console.warn("cdn.tailwindcss.com should not be used in production. To use Tailwind CSS in production, install it as a PostCSS plugin or use the Tailwind CLI: https://tailwindcss.com/docs/installation");var Qn="tailwind",nu="text/tailwindcss",yb="/template.html",vt,wb=!0,bb=0,su=new Set,au,vb="",xb=(i=!1)=>({get(e,t){return(!i||t==="config")&&typeof e[t]=="object"&&e[t]!==null?new Proxy(e[t],xb()):e[t]},set(e,t,r){return e[t]=r,(!i||t==="config")&&ou(!0),!0}});window[Qn]=new Proxy({config:{},defaultTheme:V5.default,defaultConfig:U5.default,colors:W5.default,plugin:G5.default,resolveConfig:H5.default},xb(!0));function kb(i){au.observe(i,{attributes:!0,attributeFilter:["type"],characterData:!0,subtree:!0,childList:!0})}new MutationObserver(async i=>{let e=!1;if(!au){au=new MutationObserver(async()=>await ou(!0));for(let t of document.querySelectorAll(`style[type="${nu}"]`))kb(t)}for(let t of i)for(let r of t.addedNodes)r.nodeType===1&&r.tagName==="STYLE"&&r.getAttribute("type")===nu&&(kb(r),e=!0);await ou(e)}).observe(document.documentElement,{attributes:!0,attributeFilter:["class"],childList:!0,subtree:!0});async function ou(i=!1){i&&(bb++,su.clear());let e="";for(let r of document.querySelectorAll(`style[type="${nu}"]`))e+=r.textContent;let t=new Set;for(let r of document.querySelectorAll("[class]"))for(let n of r.classList)su.has(n)||t.add(n);if(document.body&&(wb||t.size>0||e!==vb||!vt||!vt.isConnected)){for(let n of t)su.add(n);wb=!1,vb=e,self[yb]=Array.from(t).join(" ");let{css:r}=await(0,$5.default)([(0,L5.default)({...window[Qn].config,_hash:bb,content:[yb],plugins:[...z5.default,...Array.isArray(window[Qn].config.plugins)?window[Qn].config.plugins:[]]}),(0,j5.default)({remove:!1})]).process(`@tailwind base;@tailwind components;@tailwind utilities;${e}`);(!vt||!vt.isConnected)&&(vt=document.createElement("style"),document.head.append(vt)),vt.textContent=r}}})(); +/*! https://mths.be/cssesc v3.0.0 by @mathias */ \ No newline at end of file diff --git a/maixpy/static/css/theme_default/dark.css b/maixpy/static/css/theme_default/dark.css new file mode 100644 index 00000000..8b510256 --- /dev/null +++ b/maixpy/static/css/theme_default/dark.css @@ -0,0 +1,169 @@ +/** + teedoc light theme css + @author neucrack + @copyright (c) neucrack CZD666666@gmail.com with MIT License + @changes 2021.1.27 add basic attrributes + */ + + /* + use .dark class to cover light theme style + */ + +/* global template */ +.dark body { + color: #d1d1d1; + background-color: #1b1b1b; +} +.dark a { + color: #8a8a8a; +} +.dark a:visited { + color: #8a8a8a; +} +.dark code { + background-color: #2a2a2a; +} +.dark pre[class*="language-"].line-numbers > code { + background: none; + padding: 0; +} +.dark #navbar .sub_items ul { + box-shadow: 0 0 9px 0px #000000; + background-color: #232323; +} +.dark #sidebar .active > a, +.dark #navbar .active > a { + background-color: #2d2d2d; + color: #c33d45; +} +.dark #sidebar .active > a, +.dark #navbar .active > a, +.dark #navbar .active_parent > a { + background-color: #2d2d2d; +} +.dark #sidebar li > a:hover, +.dark #sidebar li.active_parent > a:hover, +.dark #navbar li > a:hover, +.dark #navbar .sub_items > a:hover { + background-color: #2d2d2d; +} +.dark #sidebar ul .active_parent > a { + background-color: #232323; +} +.dark .gutter.gutter-horizontal { + background-color: #484848; +} +.dark *::-webkit-scrollbar-track { + background: #484848; +} +.dark *::-webkit-scrollbar-thumb { + background-color: #6b6b6b; +} +.dark #article #toc { + background-color: #1b1b1b; +} +.dark #to_top { + background-color: #2d2d2d; + box-shadow: 8px 8px 20px #000000; +} +.dark #to_top:hover { + box-shadow: 8px 8px 28px #000000; +} +.dark #to_top:active { + box-shadow: 0px 0px 20px #000000; +} +.dark blockquote { + background-color: #2d2d2d; +} +.dark blockquote.spoiler { + border-left: 5px solid #FF9800; + background-color: #6e5200; + color: white; +} +.dark td { + background-color: #373737; + border: 2px solid #555555; +} +.dark th { + font-weight: 700; + background-color: #0f5943; + color: white; + border: 2px solid #006f4f; +} +.dark sup a { + background-color: #2d2d2d; +} +.dark a:hover { + background-color: #424242; +} +.dark #doc_footer { + background-color: #2d2d2d; + border-top: 1px solid #2d2d2d; +} +.dark #page_footer { + background-color: #2d2d2d; + border-top: 1px solid #2d2d2d; +} +.dark #footer a:hover { + background-color: #404040; +} +.dark #previous_next { + border-top: 1px solid #525252; +} +.dark #previous_next a { + background-color: #2d2d2d; +} + +/* google translate */ +.dark #navbar #google_translate_element .goog-te-gadget-simple { + background-color: #1b1b1b; +} +.dark #navbar #google_translate_element .goog-te-gadget-simple .goog-te-menu-value { + color: #8a8a8a; +} + +/* tabset */ +.dark .tabset { + border: 0.2em solid #4c4c4c; +} +.dark .tabset-text-container { + background-color: #212121; +} +.dark .tabset-tab-active { + background-color: #212121; +} + +/* details */ +.dark details { + border: 0.2em solid #4c4c4c; +} +.dark details > .details-content { + background-color: #212121; +} + +/* markdown */ +.dark #mermaid-1662893106119 .messageText { + fill: #6f6f6f; + stroke: #6f6f6f; +} +#mermaid-1662893106119 .loopText, #mermaid-1662893106119 .loopText>tspan { + fill: #a99b1a; + stroke: none; +} + +@media screen and (max-width: 900px) { + .dark #menu_wrapper.m_menu_fixed { + background-color: rgb(27, 27, 27, 0.9); + box-shadow: 0px 1px 10px 0px rgb(0, 0, 0, 0.32); + } + .dark #sidebar_wrapper { + background-color: #1b1b1b; + } + .dark #navbar { + display: block; + border-bottom: 1px solid #383838; + z-index: 89; + } +} + + diff --git a/maixpy/static/css/theme_default/light.css b/maixpy/static/css/theme_default/light.css new file mode 100644 index 00000000..4d56f095 --- /dev/null +++ b/maixpy/static/css/theme_default/light.css @@ -0,0 +1,1349 @@ +/** + teedoc light theme css + @author neucrack + @copyright (c) neucrack CZD666666@gmail.com with MIT License + @changes 2021.1.26 add basic attrributes + */ + +/* global template */ +body { + color: #606975; + background-color: white; + transition: 0.4s; + margin: 0; + display: flex; + flex-direction: column; + justify-content: space-between; + min-height: 100vh; + letter-spacing: 0.03em; + font-family: "Microsoft YaHei",Helvetica,"Meiryo UI","Malgun Gothic","Segoe UI","Trebuchet MS",Monaco,monospace,Tahoma,STXihei,"华文细黑",STHeiti,"Helvetica Neue","Droid Sans","wenquanyi micro hei",FreeSans,Arimo,Arial,SimSun,"宋体",Heiti,"黑体",sans-serif; +} +a { + color: #606975; + text-decoration: none; +} +a:visited { + color: #606975; +} +a:hover { + background-color: #d8d8d8; + border-radius: 5px; + transition: 0.4s; +} +h1 { + text-align: center; + font-size: 2.2em; +} +h2 { + font-size: 2em; +} +h3 { + font-size: 1.5em; +} +h4 { + font-size: 1.17em; +} +h5 { + font-size: 1em; +} +h6 { + font-size: 0.83em; +} + +p { + /* text-align: justify; */ + line-height: 2em; +} +blockquote { + border-left: 4px solid #c33d45; + margin: 0 0 1em 2px; + padding: 1px; + padding-left: 1em; + background-color: #f1f1f1; + border-radius: 0 5px 5px 0; + transition: 0.4s; +} +blockquote.spoiler { + border-left: 5px solid #FF9800; + background-color: #ffd65b; +} +*::-webkit-scrollbar { + width : 0.8rem; + height: 0.8rem; + min-width: 0.8rem; + min-height: 0.6rem; +} +#sidebar ul::-webkit-scrollbar { + width : 0.45rem; + height: 0.8rem; + min-width: 0.45rem; + min-height: 0.6rem; +} +*::-webkit-scrollbar-thumb { + border-radius : 0.6rem; + background-color: #b8b8b8; +} +*::-webkit-scrollbar-track { + background : #ededed; + border-radius: 10px; +} +sup a { + font-size: 1.1em; + background-color: #f1f1f1; + padding: 4px; + border-left: 1px solid #c33d45; + border-right: 1px solid #c33d45; +} +dl > dt { + font-weight: bold; +} +table { + border-collapse: collapse; + border-spacing: 0; + display: block; + width: 100%; + overflow: auto; + word-break: keep-all; + border-color: transparent; +} +tr { + background-color: #f1f1f1; + border-top: 1px solid #ccc; +} +td, th { + padding: 6px 13px; + transition: 0.4s; +} +th { + font-weight: 700; + color: white; + filter: brightness(110%); + border: 2px solid #c33d45; + background-color: #c33d45; +} +td { + background-color: #fbfbfb; + border: 2px solid #f1f1f1; +} +code { + background-color: #f1f1f1; + border-radius: 0.2em; + transition: 0.4s; + font-family: Menlo, Consolas, "DejaVu Sans Mono", Bitstream Vera Sans Mono, Courier New, monospace, Monaco, 'Andale Mono', 'Ubuntu Mono', "Microsoft YaHei"; + font-size: 85%; + padding: 0.2em 0.4em; + color: #c33d45; +} +pre[class*="language-"].line-numbers > code { + background: none; + padding: 0; +} +.btn, +#page_content .btn, +#content_body .btn { + color: white; + border-radius: 5px; + transition: 0.2s; + padding: 1em; + cursor: pointer; + transition: 0.2s; + background: #c33d45; + box-shadow: 1px 1px 2px 0px #c33d45; +} +.btn:hover, +#page_content .btn:hover, +#content_body .btn:hover { + background: #c33d45; + box-shadow: 1px 1px 8px 0px #c33d45; +} +.btn:visited, +#page_content .btn:visited, +#content_body .btn:visited { + color: white; +} + +/* wrapper */ +.type_doc #wrapper { + margin-top: 1.5em; +} +#wrapper { + display: flex; + flex-grow: 1; +} +#page_wrapper { + display: flex; + flex-grow: 1; + flex-direction: column; + justify-content: space-between; + text-align: center; +} + +/* page */ +#page_content { + width: 100%; + flex-grow: 1; + display: flex; + flex-direction: row; +} +#page_content > div { + flex-grow: 1; + align-self: center; +} +#page_content a { + color: #c33a3a; +} + +/* navbar */ +#navbar { + display: flex; + justify-content: start; + z-index: 100; +} +#navbar * { + display: flex; + align-items: center; +} +#navbar_menu_btn { + display: none; +} +#navbar #navbar_items { + display: flex; + flex-grow: 1; + justify-content: space-between; + padding-right: 1em; + word-break: keep-all; +} +#navbar h2 { + color: #c33d45; +} +#navbar a{ + margin: 5px; + padding: 10px 15px; + cursor: pointer; +} +#navbar a.site_title{ + padding: 0; +} +#navbar a.site_title:hover{ + background-color: transparent; +} +#navbar .site_logo { + max-height: 60px; +} +#navbar ul { + list-style: none; + padding-inline-start: 10px; +} +#navbar .sub_items { + position: relative; + display: block; +} +#navbar .sub_items > a { + margin: 5px; + padding: 10px 15px; +} +#navbar .sub_items > a:hover + ul { + visibility: visible; +} +#navbar .sub_items ul { + display: block; + position: absolute; + left: 0; + box-shadow: 0 0 9px 0px #dadada; + border-radius: 5px; + padding: 0; + width: max-content; + min-width: 100%; + background-color: white; + visibility: hidden; + margin-top: -5px; + z-index: 101; + transition: 0.2s; +} +#navbar .sub_items ul:hover { + visibility: visible; +} +#navbar .sub_items ul li { + display: block; +} + +#navbar .sub_items ul ul { + left: 100%; + top: 0; +} + + +/* sidebar */ +#sidebar_wrapper { + width: 300px; +} +/* sidebar splitter */ +.gutter_icon { + background-image: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAeCAYAAADkftS9AAAAIklEQVQoU2M4c+bMfxAGAgYYmwGrIIiDjrELjpo5aiZeMwF+yNnOs5KSvgAAAABJRU5ErkJggg==); + width: 3px; + height: 2em; + background-repeat: no-repeat; + position: fixed; + top: 50%; +} +.gutter { + background-color: #eee; + background-repeat: no-repeat; + background-position: 50%; + transition: 0.2s; + border-radius: 5px; + display: block; +} + +.gutter.gutter-horizontal { + /* background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAeCAYAAADkftS9AAAAIklEQVQoU2M4c+bMfxAGAgYYmwGrIIiDjrELjpo5aiZeMwF+yNnOs5KSvgAAAABJRU5ErkJggg=='); */ + cursor: col-resize; +} + +#sidebar { + /* width: 300px; + min-width: 200px; + position: sticky; + top: 0; + overflow-x: hidden; */ + /* min-width: 200px; */ + /* position: sticky; */ + /* top: 0; */ + /* overflow-x: hidden; */ + display: flex; + flex-direction: column; + height: 100vh; + position: -webkit-sticky; + position: sticky; + top: 0; + padding-top: var(--ifm-navbar-height); + /* width: var(--doc-sidebar-width); */ + transition: opacity 50ms; + padding-top: 0; + z-index: 1; +} +#sidebar ul { + padding-inline-start: 20px; + display: none; +} +#sidebar > ul { + padding-inline-start: 0; + padding: .5rem; + display: block; + padding-bottom: 3em; + margin: 0; + overflow-y: auto; + overflow-x: hidden; +} +#sidebar ul .show{ + /* display: block; */ /* js instead, just reserve this item*/ +} +#sidebar ul .active_parent { + display: block; +} +#sidebar ul .active_parent > ul{ + display: block; +} +#sidebar ul.collapsed, #sidebar ul .active_parent > ul.collapsed{ + /* display: none; */ /* js instead, just reserve this item*/ +} +#sidebar ul .active_parent > a{ + background-color: #f1f1f1; + border-radius: 5px; + transition: 0.4s; +} +#sidebar li { + list-style: none; + margin: 2px 0; + position: relative; +} +#sidebar li > a { + display: flex; + justify-content: space-between; + padding: 0; + font-size: 0.9em; +} +#sidebar a:hover { + cursor: pointer; +} +#sidebar .active > a, #navbar .active > a, #navbar .active_parent > a{ + background-color: #f1f1f1; + border-radius: 5px; + color: #c33d45; + transition: 0.4s; +} +#navbar .sub_items .active > a { + transition: 0s; +} +#sidebar .active > a { + transition: 0.4s; +} +#sidebar li > a > .label { + padding: 10px; + display: inline-block; + white-space: nowrap; + width: 100%; + overflow: hidden; + text-overflow: ellipsis; +} + +#sidebar li > a:hover, +#sidebar li.active_parent > a:hover, +#navbar li > a:hover, +#navbar .sub_items > a:hover{ + background-color: #d8d8d8; + border-radius: 5px; + transition: 0.4s; +} +#sidebar .sub_indicator { + transition: transform 0.4s linear; + background: url("/maixpy/static/image/theme_default/indicator.svg"); + background-size: contain; + background-repeat: no-repeat; + background-position: center; + align-self: center; + height: 1.25rem; + width: 1.25rem; + -ms-transform: rotate(-45deg); + -moz-transform: rotate(-45deg); + -webkit-transform: rotate(-45deg); + transform: rotate(-45deg); + transition: 0.2s; +} + +#sidebar .sub_indicator:hover, +#sidebar li.no_link > a:hover > .sub_indicator, +#sidebar li.no_link > a.sub_indicator:hover, +#sidebar li.no_link > a.sub_indicator{ + height: 1.6em; + width: 1.6em; +} +#sidebar .sub_indicator_collapsed { + -ms-transform: rotate(0deg); + -moz-transform: rotate(0deg); + -webkit-transform: rotate(0deg); + transform: rotate(0deg); +} +#sidebar li.sidebar_category { + font-size: 0.9em; + color: #9e9e9e; + margin: 1em 0 0.1em 0; +} +#sidebar .tip { + position: fixed; + color: white; + left: 300px; + top: 18%; + box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.19); + border-radius: 0.2em; + padding: 1em; + display: flex; + flex-direction: column; + width: max-content; + transition: 0.4s; + visibility: hidden; + opacity: 0; + background: #c33d45; +} +#sidebar li > a:hover+.tip, #sidebar .tip:hover { + visibility: visible; + opacity: 1; +} +.blog_info > span { + margin: 0.4em; +} +.blog_tags { + text-align: center; + margin-top: 1em; +} +.blog_tags > span { + border-radius: 0.2em; + padding: 0.2em; + margin: 0.4em; + filter: brightness(120%); + border: 1px solid #c33d45; +} +#blog_list > ul { + padding-left: 0; +} +#blog_list > ul > li { + list-style: none; + box-shadow: 0px 0px 7px #00000033; + margin: 2em 1em; + padding: 2em; + border-radius: 0.4em; +} +#blog_list > ul > li a, #blog_list > ul > li a:visited { + padding: 0; + color: #c33d45; +} +html[lang^=zh] #article_content #blog_list h2:before { + content: ""; +} +#blog_list .blog_tags { + text-align: left; +} +#blog_list .blog_info { + color: #ababab; +} +#blog_list .blog_brief { + color: #828282; +} +.blog_cover { + text-align: center; + margin-top: 1em; +} +.blog_cover > img { + width: 100%; +} +#menu_wrapper { + width: 2%; +} +#menu { + background: url("/maixpy/static/image/theme_default/menu.svg"); + background-size: contain; + background-repeat: no-repeat; + background-position: center; + border-radius: 5px; + min-height: 2rem; + min-width: 2rem; + transition: 0.2s; + cursor: pointer; + position: sticky; + top: 1em; + z-index: 99; + margin-left: 0.2em; +} + +/* article */ +#article { + display: flex; + width: 98%; + margin: auto; + justify-content: space-evenly; +} +#toc_wrapper { + width: 25%; +} +#article #toc { + background-color: white; + position: sticky; + transition: 0.4s; + top: 15%; + max-height: calc(100vh - 15vh); + overflow-y: auto; +} +#article #toc a{ + padding: 0.2em 0.0em; + display: block; + font-size: 0.9em; +} +#article #toc ul, #article #toc ol { + list-style: none; + padding-left: 1.5em; +} +#article #toc > div > ul, #article #toc > div > ol { + padding-left: 1em; +} + +/* add numbers for toc */ +#toc {counter-reset: toc_l1;} +#toc #toc_content {counter-reset: toc_l2;} +#toc a.node-name--H1 { + counter-reset: toc_l2; +} +#toc a.node-name--H2 { + counter-reset: toc_l3; +} +#toc a.node-name--H1:before { + counter-increment: toc_l1; +} +#toc a.node-name--H2:before { + counter-increment: toc_l2; + content: counter(toc_l2, decimal) ".\00a0"; +} +html[lang^=zh] #toc a.node-name--H2:before { + counter-increment: toc_l2; + content: counter(toc_l2, simp-chinese-informal) "、"; +} +#toc a.node-name--H3:before { + counter-increment: toc_l3; + content: counter(toc_l2, decimal) "." counter(toc_l3, decimal) ".\00a0"; +} +.heading_no_counter #toc a.node-name--H2:before { + content: ""; +} +.heading_no_counter #toc a.node-name--H3:before { + content: ""; +} +html[lang^=zh].heading_no_counter #toc a.node-name--H2:before { + content: ""; +} +/* add numbers for titles toc*/ + + +/* article content */ +#content_wrapper { + flex-grow: 1; + justify-content: space-between; + flex-direction: column; + display: flex; + width: 65%; +} +#content_body { + max-width: 50em; + width: 90%; + display: flex; + flex-direction: column; + justify-content: space-between; + line-height: 2em; + margin: auto; +} +#content_body h1 { + line-height: initial; +} +#content_body a, #content_body a:visited { + color: #c33a3a; + padding: 0.2em; +} +#content_body a:hover { + background-color: #fde1e1; + padding: 0.2em; + border-radius: 0.2em; +} +#content_body img { + max-width: 100%; + object-fit: contain; + cursor: zoom-in; +} + +/* add numbers for titles */ +#article_content { + /* counter-reset: h2section h3section h4section; */ + min-height: 50vh; + word-break: break-word; +} +#article_content h1 { + /* counter-reset: h2section h3section h4section; */ + line-height: 1.3em; +} +#article_content h2 { + /* counter-reset: h3section h4section; */ + line-height: 1.3em; +} +/* #article_content h3 { + counter-reset: h4section; +} +#article_content h2:before +{ + counter-increment: h2section; + content: counter(h2section, upper-roman) "、"; +} +#article_content h3:before +{ + counter-increment: h3section; + content: counter(h2section, decimal) "." counter(h3section, decimal) "、"; +} +#article_content h4:before +{ + counter-increment: h4section; + content: counter(h2section, decimal) "." counter(h3section, decimal) "." counter(h4section, decimal) "、"; +} +html[lang^=zh] #article_content h2:before { + content: counter(h2section, simp-chinese-informal) "、"; +} +html[lang^=zh] #article_content h3:before { + content: counter(h2section, decimal) "." counter(h3section, decimal) "、"; +} +html[lang^=zh] #article_content h4:before { + content: counter(h2section, decimal) "." counter(h3section, decimal) "." counter(h4section, decimal) "、"; +} +.heading_no_counter #article_content h2:before { + content: ""; +} +.heading_no_counter #article_content h3:before { + content: ""; +} +.heading_no_counter #article_content h4:before { + content: ""; +} +html[lang^=zh].heading_no_counter #article_content h2:before { + content: ""; +} +html[lang^=zh].heading_no_counter #article_content h3:before { + content: ""; +} +html[lang^=zh].heading_no_counter #article_content h4:before { + content: ""; +} */ +/* add numbers for titles end*/ + +#article_head { + margin-bottom: 2em; +} +#article_tags ul { + list-style: none; + display: flex; + align-items: baseline; + justify-content: flex-end; + padding: 0 +} +#article_tags ul li{ + margin: 5px; + padding: 8px; + border-radius: 5px; + color: #ffffffcc; + font-size: 0.9em; + background-color: #c33d45; +} +#article_info { + display: flex; + flex-direction: row; + justify-content: space-between; +} +#article_info > div { + align-self: center; + display: flex; + flex-direction: row; + align-items: center; +} +#article_info > div > div { + margin: 2px; +} +#article_info #print_page { + height: 1em; + width: 1em; + cursor: pointer; + background-repeat: no-repeat; + background-size: contain; + background-image: url("/maixpy/static/image/theme_default/print.svg"); +} +#article_info>div>span { + padding: 0.2em; + color: #c33d45; +} +#source_link { + display: flex; + flex-direction: row; + justify-content: flex-end; +} + +/* cover prism.css */ + +:not(pre) > code[class*="language-"], pre[class*="language-"] { + background: #2d2d2d; + border-radius: 5px; +} +div.code-toolbar > .toolbar a, div.code-toolbar > .toolbar button, div.code-toolbar > .toolbar span { + color: #fff; + font-size: .8em; + padding: 1em; + background: #f5f2f0; + background: rgba(224, 224, 224, 0.2); + box-shadow: 0 2px 0 0 rgba(0,0,0,0.2); + border-radius: .5em; + cursor: pointer; +} +div.code-toolbar > .toolbar a:hover, div.code-toolbar > .toolbar a:focus, div.code-toolbar > .toolbar button:hover, div.code-toolbar > .toolbar button:focus, div.code-toolbar > .toolbar span:hover, div.code-toolbar > .toolbar span:focus { + color: #a9ffe0; + text-decoration: none; +} + +/* back to top button */ +/* #to_top_wrapper { +} */ +#to_top { + background: url("/maixpy/static/image/theme_default/to-top.svg"); + background-size: 2em; + background-repeat: no-repeat; + background-position: center; + align-self: flex-end; + position: sticky; + bottom: 6em; + right: 3em; + margin: 1em; + height: 3rem; + width: 3rem; + cursor: pointer; + transition: 0.4s; + z-index: 97; + border-radius: 100%; + padding: 2px; + background-color: #f1f1f1; + box-shadow: 8px 8px 20px rgba(0,0,0,0.13); +} +#to_top:hover { + background-size: 2.5em; + box-shadow: 8px 8px 20px rgba(0,0,0,0.24); +} +#to_top:active { + box-shadow: 0px 0px 20px rgba(0,0,0,0.05); +} + + +/* footer article */ +#previous_next { + min-height: 3em; + margin-top: 2em; + display: flex; + border-top: 1px solid #f1f1f1; + padding: 1em; + transition: 0.4s; + display: flex; + justify-content: space-between; +} +#previous_next > div { + margin: 1em; +} +#previous_next a{ + padding: 1em; + border-radius: 5px; + display: flex; + align-items: center; + background-color: #f1f1f1; + justify-content: flex-start; +} +#previous_next a:hover { + background-color: #dedede; +} +#previous_next #previous a > .label{ + flex-grow: 1; + text-align: center; + padding-right: 2.5em; +} +#previous_next #previous a > .icon{ + transition: transform 0.4s linear; + background: url("/maixpy/static/image/theme_default/to-top.svg"); + background-size: 2em; + background-repeat: no-repeat; + background-position: center; + min-height: 2.5rem; + min-width: 2.5rem; + -ms-transform: rotate(-90deg); + -moz-transform: rotate(-90deg); + -webkit-transform: rotate(-90deg); + transform: rotate(-90deg); + padding-right: 2em; + padding: 0; +} +#previous_next #next a { + justify-content: flex-end; +} +#previous_next #next a > .label{ + flex-grow: 1; + text-align: center; + padding-left: 2.5em; +} +#previous_next #next a > .icon{ + transition: transform 0.4s linear; + background: url("/maixpy/static/image/theme_default/to-top.svg"); + background-size: 2em; + background-repeat: no-repeat; + background-position: center; + min-height: 2.5rem; + min-width: 2.5rem; + -ms-transform: rotate(90deg); + -moz-transform: rotate(90deg); + -webkit-transform: rotate(90deg); + transform: rotate(90deg); + padding-left: 2em; + padding: 0; +} +.footnotes { + margin-top: 5em; +} +#doc_footer { + min-height: 2em; + display: flex; + background-color: #2d2d2d; + border-top: 1px solid #2d2d2d; + padding: 1em; + transition: 0.4s; +} +#page_footer { + min-height: 2em; + /* margin-top: 5em; */ + display: flex; + padding: 1em; + transition: 0.4s; + background-color: #292929; +} +#footer { + display: flex; + flex-direction: column; + align-items: center; + width: 100%; + line-height: 1em; +} +#footer ul { + list-style: none; + padding: 0; + margin: 0; +} +#footer a, #footer a:visited{ + color: #848fa0; + display: flex; + padding: 0.2em; + font-size: 0.9em; +} +#footer a:hover { + background-color: #e6e6e6; + border-radius: 0.1em; + padding: 0.2em; +} +#footer_top { + width: 100%; +} +#footer_top > ul { + display: flex; + justify-content: space-around; + flex-direction: row; +} +#footer_top > ul > li > a { + color: #cecece; + font-weight: 700; + margin: 1em 0 0.5em 0; +} +#footer_top > ul > li > a:hover { + background-color: transparent; +} +#footer_bottom { + margin-top: 2em; +} +#footer_bottom a { + justify-content: center; +} + +/* TOC */ +.anchor { + opacity: 0; + transition: 0.4s; +} +h2:hover > .anchor, h3:hover > .anchor, h4:hover > .anchor, h5:hover > .anchor { + opacity: 1; +} +.anchor:hover { + opacity: 1; +} +.anchor-link { + display: none; +} +#toc_content>.toc-list { + overflow: hidden; + position: relative +} + +#toc_content>.toc-list li { + list-style: none +} + +.toc-list { + margin: 0; + padding-left: 10px +} +#toc_content > .toc-list > .toc-list-item { + border-left: 2px solid #f1f1f1; + padding: 0.1em 0.1em; + line-height: 1.2em; +} + +a.toc-link { + color: currentColor; + height: 100% +} + +.is-collapsible { + /* max-height: 1000px; */ + overflow: hidden; + transition: all 300ms ease-in-out +} + +.is-collapsed { + max-height: 0 +} + +.is-position-fixed { + position: fixed !important; + top: 0 +} + +.is-active-link { + font-weight: 700 +} + +.toc-link::before { + content: ' '; + display: inline-block; + height: inherit; + left: 0; + margin-top: -1px; + padding-left: 6px; + margin-left: -2px; +} + +.is-active-link::before { + border-left: 2px solid #c33d45; +} + +/* class template */ +.md_page #page_content > div { + width: 90%; + max-width: 50em; + margin: auto; + line-height: 2em; +} + +/* jupyter notebook parser */ +.jp-InputArea { + display: flex; + flex-direction: row; +} +.jp-InputPrompt { + word-break: keep-all; + margin-right: 0.2em; + font-size: 0.8em; +} +.jp-CodeMirrorEditor { + flex-grow: 1; + overflow: auto; +} +.jp-OutputArea-child { + display: flex; + flex-direction: row; +} +.jp-OutputPrompt { + word-break: keep-all; + margin-right: 0.2em; + font-size: 0.8em; + min-width: 2.7em; +} +.jp-OutputArea-output { + flex-grow: 1; + overflow: auto; + background: #2d2d2d; +} + +/* google translate */ +#navbar #google_translate_element { + padding: 0; +} +#navbar #google_translate_element .goog-te-gadget-simple { + border-radius: 5px; + transition: 0.4s; +} +#navbar #google_translate_element .goog-te-gadget-simple .goog-te-menu-value { + transition: 0.4s; +} + +/* tabset */ +.tabset { + display: flex; + flex-direction: column; + align-items: normal; + border: 0.2em solid #ebedf0; + border-radius: 0.5em; + margin: 0.5em 0; +} +.tabset-title { + font-size: medium; + font-weight: 500; + padding: 0.5em 1em; +} +.tabset-content { + display: flex; + flex-direction: column; + align-items: normal; +} +.tabset-tab { + display: flex; + flex-direction: row; + padding: 0 1em; +} +.tabset-tab-label { + cursor: pointer; + font-size: large; + font-weight: 700; + padding: 0.5em 1em; + border-top-left-radius: 0.2em; + border-top-right-radius: 0.2em; +} +.tabset-tab-label:hover { + background-color: #0000000d; +} +.tabset-tab-active { + background-color: #0000000d; + border-bottom: 0.2em solid #c33d45; + color: #c33d45; +} +.tabset-text-container { + padding: 1em; + background-color: #0000000d; +} +.tabset-text-container > div { + display: none; +} +.tabset-text-container > div.tabset-text-active { + display: block; +} + +/* details */ +details { + display: flex; + flex-direction: column; + align-items: normal; + border: 0.2em solid #ebedf0; + border-radius: 0.5em; + margin: 0.5em 0; + transition: 0.4s; +} +details > summary { + font-size: medium; + font-weight: 500; + padding: 0.5em 1em; + cursor: pointer; +} +details > summary { + list-style-type:none; + position: relative; + padding-left: 2em; +} +details > summary:before { + display:inline-block; + content: url("/maixpy/static/image/theme_default/array.svg"); + transform:rotate(90deg); + transition: 0.4s; + left: 0.5em; + position: absolute; + top: 0.45rem; +} +details[open] > summary:before { + transform:rotate(180deg); + top: 0.35rem; +} +details[open] summary ~ * { + animation: sweep .4s ease-in-out; + } +@keyframes sweep { +0% {opacity: 0; margin-left: -10px} +100% {opacity: 1; margin-left: 0px} +} +details > .details-content, details > div { + padding: 1em; + background-color: #0000000d; +} + +#update_history { + overflow-x: auto; +} +#update_history details > div { + padding: 0; +} +#update_history details { + width: fit-content; +} + +/* mobile phone */ +@media screen and (max-width: 900px) { + #navbar { + display: block; + border-bottom: 1px solid #f1f1f1; + z-index: 89; + } + #navbar * { + display: block; + } + #navbar a.site_title { + display: flex; + } + #navbar_menu { + display: flex; + justify-content: space-between; + } + #navbar ul { + padding-left: 0; + } + #navbar_menu_btn { + background: url("/maixpy/static/image/theme_default/menu.svg"); + background-size: contain; + background-repeat: no-repeat; + background-position: center; + border-radius: 5px; + cursor: pointer; + width: 1em; + height: 1em; + display: block; + } + #navbar #navbar_items { + display: none; + padding-right: 0; + } + #navbar .sub_items ul { + left: 0; + right: auto; + } + #navbar .sub_items ul ul { + left: 0; + top: 3em; + } + #sidebar_wrapper { + position: fixed; + top: 0; + left: 0; + bottom: 0; + background-color: white; + box-shadow: 0 0 20px 0px #bbbbbb; + width: 100%; + z-index: 98; + display: none; + } + #sidebar { + position: relative; + width: 100%; + } + .gutter { + display: none; + } + #sidebar > ul { + padding-top: 4em; + } + #menu_wrapper { + z-index: 99; + } + #menu_wrapper.m_menu_fixed { + position: fixed; + left: 0; + top: 0; + width: 100%; + height: 4em; + background-color: rgb(255, 255, 255, 0.9); + box-shadow: 0px 1px 10px 0px rgb(0, 0, 0, 0.06); + } + #menu_wrapper.m_menu_fixed > #menu { + position: fixed; + left: 0.2em; + top: 1em; + } + #menu.close { + background: url("/maixpy/static/image/theme_default/back.svg"); + background-size: contain; + background-repeat: no-repeat; + background-position: center; + border-radius: 5px; + min-height: 2em; + min-width: 2em; + } + #content_body { + width: 90%; + } + #toc_wrapper { + display: none; + position: fixed; + width: 100vh; + height: 100vh; + top: 0; + left: 0; + background-color: rgb(0, 0, 0, 0.5); + z-index: 999; + transition: 0.4s; + } + #toc_wrapper.show { + display: block; + } + #article #toc{ + position: fixed; + top: 4em; + right: 0; + z-index: 200; + width: 90%; + padding: 1em 0 1em 0; + border-radius: 0.3em 0 0 0.3em; + } + #previous_next { + flex-direction: column; + } + + .m_hide { + display: none; + } + #footer_top > ul { + flex-direction: column; + } + #article_tools { + width: 100%; + display: flex; + flex-direction: column; + align-items: flex-end; + position: -webkit-sticky; + position: sticky; + top: 1em; + z-index: 97; + } + #toc_btn { + background: url("/maixpy/static/image/theme_default/anchor.svg"); + background-size: contain; + background-repeat: no-repeat; + background-position: center; + border-radius: 5px; + min-height: 2em; + min-width: 2em; + -ms-transform: rotate(12deg); + -moz-transform: rotate(12deg); + -webkit-transform: rotate(12deg); + transform: rotate(12deg); + } +} + + +/* special */ + +#themes{ + padding: 2px 5px; + cursor: pointer; +} +#navbar .light, #navbar .dark:hover { + background: url("/maixpy/static/image/theme_default/light_mode.svg"); + background-size: contain; + background-repeat: no-repeat; + background-position: center; + min-height: 1rem; + min-width: 1rem; +} +#navbar .dark, #navbar .light:hover { + background: url("/maixpy/static/image/theme_default/dark_mode.svg"); + background-size: contain; + background-repeat: no-repeat; + background-position: center; + min-height: 1rem; + min-width: 1rem; +} + + +@media print { + code[class*="language-"], pre[class*="language-"] { + white-space: pre-wrap; + } + pre[class*="language-"].line-numbers { + border: 1px solid #2d2d2d; + } + #navbar, + #sidebar_wrapper, .gutter, #menu_wrapper, + #toc, #to_top, #doc_footer, + #previous_next, + #source_link, #print_page, + #comments-container { + display: none; + } + #article { + width: 100%; + } + #content_body { + max-width: 100%; + width: 100%; + } + /* .line-numbers-rows { + display: none; + } */ + .gutter { + display: none; + } +} diff --git a/maixpy/static/css/theme_default/prism.min.css b/maixpy/static/css/theme_default/prism.min.css new file mode 100644 index 00000000..c087ba96 --- /dev/null +++ b/maixpy/static/css/theme_default/prism.min.css @@ -0,0 +1,261 @@ +/* PrismJS 1.23.0 +https://prismjs.com/download.html#themes=prism-tomorrow&languages=markup+css+clike+javascript+bash+c+cpp+cmake+coffeescript+docker+go+ini+java+json+json5+kotlin+latex+less+lua+makefile+markdown+markup-templating+objectivec+php+powershell+python+jsx+tsx+ruby+rust+sass+scss+shell-session+sql+swift+textile+typescript+yaml&plugins=line-numbers+highlight-keywords+toolbar+copy-to-clipboard+match-braces */ +/** + * prism.js tomorrow night eighties for JavaScript, CoffeeScript, CSS and HTML + * Based on https://github.com/chriskempson/tomorrow-theme + * @author Rose Pritchard + */ + +code[class*="language-"], +pre[class*="language-"] { + color: #ccc; + background: none; + font-family: Consolas, Monaco, 'Andale Mono', 'Ubuntu Mono', monospace; + font-size: 1em; + text-align: left; + white-space: pre; + word-spacing: normal; + word-break: normal; + word-wrap: normal; + line-height: 1.5; + + -moz-tab-size: 4; + -o-tab-size: 4; + tab-size: 4; + + -webkit-hyphens: none; + -moz-hyphens: none; + -ms-hyphens: none; + hyphens: none; + +} + +/* Code blocks */ +pre[class*="language-"] { + padding: 1em; + margin: .5em 0; + overflow: auto; +} + +:not(pre) > code[class*="language-"], +pre[class*="language-"] { + background: #2d2d2d; +} + +/* Inline code */ +:not(pre) > code[class*="language-"] { + padding: .1em; + border-radius: .3em; + white-space: normal; +} + +.token.comment, +.token.block-comment, +.token.prolog, +.token.doctype, +.token.cdata { + color: #999; +} + +.token.punctuation { + color: #ccc; +} + +.token.tag, +.token.attr-name, +.token.namespace, +.token.deleted { + color: #e2777a; +} + +.token.function-name { + color: #6196cc; +} + +.token.boolean, +.token.number, +.token.function { + color: #f08d49; +} + +.token.property, +.token.class-name, +.token.constant, +.token.symbol { + color: #f8c555; +} + +.token.selector, +.token.important, +.token.atrule, +.token.keyword, +.token.builtin { + color: #cc99cd; +} + +.token.string, +.token.char, +.token.attr-value, +.token.regex, +.token.variable { + color: #7ec699; +} + +.token.operator, +.token.entity, +.token.url { + color: #67cdcc; +} + +.token.important, +.token.bold { + font-weight: bold; +} +.token.italic { + font-style: italic; +} + +.token.entity { + cursor: help; +} + +.token.inserted { + color: green; +} + +pre[class*="language-"].line-numbers { + position: relative; + padding-left: 3.8em; + counter-reset: linenumber; +} + +pre[class*="language-"].line-numbers > code { + position: relative; + white-space: inherit; +} + +.line-numbers .line-numbers-rows { + position: absolute; + pointer-events: none; + top: 0; + font-size: 100%; + left: -3.8em; + width: 3em; /* works for line-numbers below 1000 lines */ + letter-spacing: -1px; + border-right: 1px solid #999; + + -webkit-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; + +} + + .line-numbers-rows > span { + display: block; + counter-increment: linenumber; + } + + .line-numbers-rows > span:before { + content: counter(linenumber); + color: #999; + display: block; + padding-right: 0.8em; + text-align: right; + } + +div.code-toolbar { + position: relative; +} + +div.code-toolbar > .toolbar { + position: absolute; + top: .3em; + right: .2em; + transition: opacity 0.3s ease-in-out; + opacity: 0; +} + +div.code-toolbar:hover > .toolbar { + opacity: 1; +} + +/* Separate line b/c rules are thrown out if selector is invalid. + IE11 and old Edge versions don't support :focus-within. */ +div.code-toolbar:focus-within > .toolbar { + opacity: 1; +} + +div.code-toolbar > .toolbar .toolbar-item { + display: inline-block; +} + +div.code-toolbar > .toolbar a { + cursor: pointer; +} + +div.code-toolbar > .toolbar button { + background: none; + border: 0; + color: inherit; + font: inherit; + line-height: normal; + overflow: visible; + padding: 0; + -webkit-user-select: none; /* for button */ + -moz-user-select: none; + -ms-user-select: none; +} + +div.code-toolbar > .toolbar a, +div.code-toolbar > .toolbar button, +div.code-toolbar > .toolbar span { + color: #bbb; + font-size: .8em; + padding: 0 .5em; + background: #f5f2f0; + background: rgba(224, 224, 224, 0.2); + box-shadow: 0 2px 0 0 rgba(0,0,0,0.2); + border-radius: .5em; +} + +div.code-toolbar > .toolbar a:hover, +div.code-toolbar > .toolbar a:focus, +div.code-toolbar > .toolbar button:hover, +div.code-toolbar > .toolbar button:focus, +div.code-toolbar > .toolbar span:hover, +div.code-toolbar > .toolbar span:focus { + color: inherit; + text-decoration: none; +} + +.token.punctuation.brace-hover, +.token.punctuation.brace-selected { + outline: solid 1px; +} + +.rainbow-braces .token.punctuation.brace-level-1, +.rainbow-braces .token.punctuation.brace-level-5, +.rainbow-braces .token.punctuation.brace-level-9 { + color: #E50; + opacity: 1; +} +.rainbow-braces .token.punctuation.brace-level-2, +.rainbow-braces .token.punctuation.brace-level-6, +.rainbow-braces .token.punctuation.brace-level-10 { + color: #0B3; + opacity: 1; +} +.rainbow-braces .token.punctuation.brace-level-3, +.rainbow-braces .token.punctuation.brace-level-7, +.rainbow-braces .token.punctuation.brace-level-11 { + color: #26F; + opacity: 1; +} +.rainbow-braces .token.punctuation.brace-level-4, +.rainbow-braces .token.punctuation.brace-level-8, +.rainbow-braces .token.punctuation.brace-level-12 { + color: #E0E; + opacity: 1; +} + diff --git a/maixpy/static/css/theme_default/prism.min.js b/maixpy/static/css/theme_default/prism.min.js new file mode 100644 index 00000000..debc9322 --- /dev/null +++ b/maixpy/static/css/theme_default/prism.min.js @@ -0,0 +1,46 @@ +/* PrismJS 1.23.0 +https://prismjs.com/download.html#themes=prism-tomorrow&languages=markup+css+clike+javascript+bash+c+cpp+cmake+coffeescript+docker+go+ini+java+json+json5+kotlin+latex+less+lua+makefile+markdown+markup-templating+objectivec+php+powershell+python+jsx+tsx+ruby+rust+sass+scss+shell-session+sql+swift+textile+typescript+yaml&plugins=line-numbers+highlight-keywords+toolbar+copy-to-clipboard+match-braces */ +var _self="undefined"!=typeof window?window:"undefined"!=typeof WorkerGlobalScope&&self instanceof WorkerGlobalScope?self:{},Prism=function(u){var c=/\blang(?:uage)?-([\w-]+)\b/i,n=0,M={manual:u.Prism&&u.Prism.manual,disableWorkerMessageHandler:u.Prism&&u.Prism.disableWorkerMessageHandler,util:{encode:function e(n){return n instanceof W?new W(n.type,e(n.content),n.alias):Array.isArray(n)?n.map(e):n.replace(/&/g,"&").replace(/=l.reach);y+=m.value.length,m=m.next){var k=m.value;if(r.length>n.length)return;if(!(k instanceof W)){var b,x=1;if(h){if(!(b=z(p,y,n,f)))break;var w=b.index,A=b.index+b[0].length,P=y;for(P+=m.value.length;P<=w;)m=m.next,P+=m.value.length;if(P-=m.value.length,y=P,m.value instanceof W)continue;for(var S=m;S!==r.tail&&(Pl.reach&&(l.reach=N);var j=m.prev;O&&(j=I(r,j,O),y+=O.length),q(r,j,x);var C=new W(o,g?M.tokenize(E,g):E,d,E);if(m=I(r,j,C),L&&I(r,m,L),1l.reach&&(l.reach=_.reach)}}}}}}(e,a,n,a.head,0),function(e){var n=[],r=e.head.next;for(;r!==e.tail;)n.push(r.value),r=r.next;return n}(a)},hooks:{all:{},add:function(e,n){var r=M.hooks.all;r[e]=r[e]||[],r[e].push(n)},run:function(e,n){var r=M.hooks.all[e];if(r&&r.length)for(var t,a=0;t=r[a++];)t(n)}},Token:W};function W(e,n,r,t){this.type=e,this.content=n,this.alias=r,this.length=0|(t||"").length}function z(e,n,r,t){e.lastIndex=n;var a=e.exec(r);if(a&&t&&a[1]){var i=a[1].length;a.index+=i,a[0]=a[0].slice(i)}return a}function i(){var e={value:null,prev:null,next:null},n={value:null,prev:e,next:null};e.next=n,this.head=e,this.tail=n,this.length=0}function I(e,n,r){var t=n.next,a={value:r,prev:n,next:t};return n.next=a,t.prev=a,e.length++,a}function q(e,n,r){for(var t=n.next,a=0;a"+a.content+""},!u.document)return u.addEventListener&&(M.disableWorkerMessageHandler||u.addEventListener("message",function(e){var n=JSON.parse(e.data),r=n.language,t=n.code,a=n.immediateClose;u.postMessage(M.highlight(t,M.languages[r],r)),a&&u.close()},!1)),M;var e=M.util.currentScript();function r(){M.manual||M.highlightAll()}if(e&&(M.filename=e.src,e.hasAttribute("data-manual")&&(M.manual=!0)),!M.manual){var t=document.readyState;"loading"===t||"interactive"===t&&e&&e.defer?document.addEventListener("DOMContentLoaded",r):window.requestAnimationFrame?window.requestAnimationFrame(r):window.setTimeout(r,16)}return M}(_self);"undefined"!=typeof module&&module.exports&&(module.exports=Prism),"undefined"!=typeof global&&(global.Prism=Prism); +Prism.languages.markup={comment://,prolog:/<\?[\s\S]+?\?>/,doctype:{pattern:/"'[\]]|"[^"]*"|'[^']*')+(?:\[(?:[^<"'\]]|"[^"]*"|'[^']*'|<(?!!--)|)*\]\s*)?>/i,greedy:!0,inside:{"internal-subset":{pattern:/(\[)[\s\S]+(?=\]>$)/,lookbehind:!0,greedy:!0,inside:null},string:{pattern:/"[^"]*"|'[^']*'/,greedy:!0},punctuation:/^$|[[\]]/,"doctype-tag":/^DOCTYPE/,name:/[^\s<>'"]+/}},cdata://i,tag:{pattern:/<\/?(?!\d)[^\s>\/=$<%]+(?:\s(?:\s*[^\s>\/=]+(?:\s*=\s*(?:"[^"]*"|'[^']*'|[^\s'">=]+(?=[\s>]))|(?=[\s/>])))+)?\s*\/?>/,greedy:!0,inside:{tag:{pattern:/^<\/?[^\s>\/]+/,inside:{punctuation:/^<\/?/,namespace:/^[^\s>\/:]+:/}},"attr-value":{pattern:/=\s*(?:"[^"]*"|'[^']*'|[^\s'">=]+)/,inside:{punctuation:[{pattern:/^=/,alias:"attr-equals"},/"|'/]}},punctuation:/\/?>/,"attr-name":{pattern:/[^\s>\/]+/,inside:{namespace:/^[^\s>\/:]+:/}}}},entity:[{pattern:/&[\da-z]{1,8};/i,alias:"named-entity"},/&#x?[\da-f]{1,8};/i]},Prism.languages.markup.tag.inside["attr-value"].inside.entity=Prism.languages.markup.entity,Prism.languages.markup.doctype.inside["internal-subset"].inside=Prism.languages.markup,Prism.hooks.add("wrap",function(a){"entity"===a.type&&(a.attributes.title=a.content.replace(/&/,"&"))}),Object.defineProperty(Prism.languages.markup.tag,"addInlined",{value:function(a,e){var s={};s["language-"+e]={pattern:/(^$)/i,lookbehind:!0,inside:Prism.languages[e]},s.cdata=/^$/i;var n={"included-cdata":{pattern://i,inside:s}};n["language-"+e]={pattern:/[\s\S]+/,inside:Prism.languages[e]};var t={};t[a]={pattern:RegExp("(<__[^>]*>)(?:))*\\]\\]>|(?!)".replace(/__/g,function(){return a}),"i"),lookbehind:!0,greedy:!0,inside:n},Prism.languages.insertBefore("markup","cdata",t)}}),Prism.languages.html=Prism.languages.markup,Prism.languages.mathml=Prism.languages.markup,Prism.languages.svg=Prism.languages.markup,Prism.languages.xml=Prism.languages.extend("markup",{}),Prism.languages.ssml=Prism.languages.xml,Prism.languages.atom=Prism.languages.xml,Prism.languages.rss=Prism.languages.xml; +!function(s){var e=/("|')(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/;s.languages.css={comment:/\/\*[\s\S]*?\*\//,atrule:{pattern:/@[\w-](?:[^;{\s]|\s+(?![\s{]))*(?:;|(?=\s*\{))/,inside:{rule:/^@[\w-]+/,"selector-function-argument":{pattern:/(\bselector\s*\(\s*(?![\s)]))(?:[^()\s]|\s+(?![\s)])|\((?:[^()]|\([^()]*\))*\))+(?=\s*\))/,lookbehind:!0,alias:"selector"},keyword:{pattern:/(^|[^\w-])(?:and|not|only|or)(?![\w-])/,lookbehind:!0}}},url:{pattern:RegExp("\\burl\\((?:"+e.source+"|(?:[^\\\\\r\n()\"']|\\\\[^])*)\\)","i"),greedy:!0,inside:{function:/^url/i,punctuation:/^\(|\)$/,string:{pattern:RegExp("^"+e.source+"$"),alias:"url"}}},selector:RegExp("[^{}\\s](?:[^{};\"'\\s]|\\s+(?![\\s{])|"+e.source+")*(?=\\s*\\{)"),string:{pattern:e,greedy:!0},property:/(?!\s)[-_a-z\xA0-\uFFFF](?:(?!\s)[-\w\xA0-\uFFFF])*(?=\s*:)/i,important:/!important\b/i,function:/[-a-z0-9]+(?=\()/i,punctuation:/[(){};:,]/},s.languages.css.atrule.inside.rest=s.languages.css;var t=s.languages.markup;t&&(t.tag.addInlined("style","css"),s.languages.insertBefore("inside","attr-value",{"style-attr":{pattern:/(^|["'\s])style\s*=\s*(?:"[^"]*"|'[^']*')/i,lookbehind:!0,inside:{"attr-value":{pattern:/=\s*(?:"[^"]*"|'[^']*'|[^\s'">=]+)/,inside:{style:{pattern:/(["'])[\s\S]+(?=["']$)/,lookbehind:!0,alias:"language-css",inside:s.languages.css},punctuation:[{pattern:/^=/,alias:"attr-equals"},/"|'/]}},"attr-name":/^style/i}}},t.tag))}(Prism); +Prism.languages.clike={comment:[{pattern:/(^|[^\\])\/\*[\s\S]*?(?:\*\/|$)/,lookbehind:!0,greedy:!0},{pattern:/(^|[^\\:])\/\/.*/,lookbehind:!0,greedy:!0}],string:{pattern:/(["'])(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/,greedy:!0},"class-name":{pattern:/(\b(?:class|interface|extends|implements|trait|instanceof|new)\s+|\bcatch\s+\()[\w.\\]+/i,lookbehind:!0,inside:{punctuation:/[.\\]/}},keyword:/\b(?:if|else|while|do|for|return|in|instanceof|function|new|try|throw|catch|finally|null|break|continue)\b/,boolean:/\b(?:true|false)\b/,function:/\w+(?=\()/,number:/\b0x[\da-f]+\b|(?:\b\d+(?:\.\d*)?|\B\.\d+)(?:e[+-]?\d+)?/i,operator:/[<>]=?|[!=]=?=?|--?|\+\+?|&&?|\|\|?|[?*/~^%]/,punctuation:/[{}[\];(),.:]/}; +Prism.languages.javascript=Prism.languages.extend("clike",{"class-name":[Prism.languages.clike["class-name"],{pattern:/(^|[^$\w\xA0-\uFFFF])(?!\s)[_$A-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\.(?:prototype|constructor))/,lookbehind:!0}],keyword:[{pattern:/((?:^|})\s*)catch\b/,lookbehind:!0},{pattern:/(^|[^.]|\.\.\.\s*)\b(?:as|async(?=\s*(?:function\b|\(|[$\w\xA0-\uFFFF]|$))|await|break|case|class|const|continue|debugger|default|delete|do|else|enum|export|extends|finally(?=\s*(?:\{|$))|for|from(?=\s*(?:['"]|$))|function|(?:get|set)(?=\s*(?:[\[$\w\xA0-\uFFFF]|$))|if|implements|import|in|instanceof|interface|let|new|null|of|package|private|protected|public|return|static|super|switch|this|throw|try|typeof|undefined|var|void|while|with|yield)\b/,lookbehind:!0}],function:/#?(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\s*(?:\.\s*(?:apply|bind|call)\s*)?\()/,number:/\b(?:(?:0[xX](?:[\dA-Fa-f](?:_[\dA-Fa-f])?)+|0[bB](?:[01](?:_[01])?)+|0[oO](?:[0-7](?:_[0-7])?)+)n?|(?:\d(?:_\d)?)+n|NaN|Infinity)\b|(?:\b(?:\d(?:_\d)?)+\.?(?:\d(?:_\d)?)*|\B\.(?:\d(?:_\d)?)+)(?:[Ee][+-]?(?:\d(?:_\d)?)+)?/,operator:/--|\+\+|\*\*=?|=>|&&=?|\|\|=?|[!=]==|<<=?|>>>?=?|[-+*/%&|^!=<>]=?|\.{3}|\?\?=?|\?\.?|[~:]/}),Prism.languages.javascript["class-name"][0].pattern=/(\b(?:class|interface|extends|implements|instanceof|new)\s+)[\w.\\]+/,Prism.languages.insertBefore("javascript","keyword",{regex:{pattern:/((?:^|[^$\w\xA0-\uFFFF."'\])\s]|\b(?:return|yield))\s*)\/(?:\[(?:[^\]\\\r\n]|\\.)*]|\\.|[^/\\\[\r\n])+\/[gimyus]{0,6}(?=(?:\s|\/\*(?:[^*]|\*(?!\/))*\*\/)*(?:$|[\r\n,.;:})\]]|\/\/))/,lookbehind:!0,greedy:!0,inside:{"regex-source":{pattern:/^(\/)[\s\S]+(?=\/[a-z]*$)/,lookbehind:!0,alias:"language-regex",inside:Prism.languages.regex},"regex-flags":/[a-z]+$/,"regex-delimiter":/^\/|\/$/}},"function-variable":{pattern:/#?(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\s*[=:]\s*(?:async\s*)?(?:\bfunction\b|(?:\((?:[^()]|\([^()]*\))*\)|(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*)\s*=>))/,alias:"function"},parameter:[{pattern:/(function(?:\s+(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*)?\s*\(\s*)(?!\s)(?:[^()\s]|\s+(?![\s)])|\([^()]*\))+(?=\s*\))/,lookbehind:!0,inside:Prism.languages.javascript},{pattern:/(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\s*=>)/i,inside:Prism.languages.javascript},{pattern:/(\(\s*)(?!\s)(?:[^()\s]|\s+(?![\s)])|\([^()]*\))+(?=\s*\)\s*=>)/,lookbehind:!0,inside:Prism.languages.javascript},{pattern:/((?:\b|\s|^)(?!(?:as|async|await|break|case|catch|class|const|continue|debugger|default|delete|do|else|enum|export|extends|finally|for|from|function|get|if|implements|import|in|instanceof|interface|let|new|null|of|package|private|protected|public|return|set|static|super|switch|this|throw|try|typeof|undefined|var|void|while|with|yield)(?![$\w\xA0-\uFFFF]))(?:(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*\s*)\(\s*|\]\s*\(\s*)(?!\s)(?:[^()\s]|\s+(?![\s)])|\([^()]*\))+(?=\s*\)\s*\{)/,lookbehind:!0,inside:Prism.languages.javascript}],constant:/\b[A-Z](?:[A-Z_]|\dx?)*\b/}),Prism.languages.insertBefore("javascript","string",{"template-string":{pattern:/`(?:\\[\s\S]|\${(?:[^{}]|{(?:[^{}]|{[^}]*})*})+}|(?!\${)[^\\`])*`/,greedy:!0,inside:{"template-punctuation":{pattern:/^`|`$/,alias:"string"},interpolation:{pattern:/((?:^|[^\\])(?:\\{2})*)\${(?:[^{}]|{(?:[^{}]|{[^}]*})*})+}/,lookbehind:!0,inside:{"interpolation-punctuation":{pattern:/^\${|}$/,alias:"punctuation"},rest:Prism.languages.javascript}},string:/[\s\S]+/}}}),Prism.languages.markup&&Prism.languages.markup.tag.addInlined("script","javascript"),Prism.languages.js=Prism.languages.javascript; +!function(e){var t="\\b(?:BASH|BASHOPTS|BASH_ALIASES|BASH_ARGC|BASH_ARGV|BASH_CMDS|BASH_COMPLETION_COMPAT_DIR|BASH_LINENO|BASH_REMATCH|BASH_SOURCE|BASH_VERSINFO|BASH_VERSION|COLORTERM|COLUMNS|COMP_WORDBREAKS|DBUS_SESSION_BUS_ADDRESS|DEFAULTS_PATH|DESKTOP_SESSION|DIRSTACK|DISPLAY|EUID|GDMSESSION|GDM_LANG|GNOME_KEYRING_CONTROL|GNOME_KEYRING_PID|GPG_AGENT_INFO|GROUPS|HISTCONTROL|HISTFILE|HISTFILESIZE|HISTSIZE|HOME|HOSTNAME|HOSTTYPE|IFS|INSTANCE|JOB|LANG|LANGUAGE|LC_ADDRESS|LC_ALL|LC_IDENTIFICATION|LC_MEASUREMENT|LC_MONETARY|LC_NAME|LC_NUMERIC|LC_PAPER|LC_TELEPHONE|LC_TIME|LESSCLOSE|LESSOPEN|LINES|LOGNAME|LS_COLORS|MACHTYPE|MAILCHECK|MANDATORY_PATH|NO_AT_BRIDGE|OLDPWD|OPTERR|OPTIND|ORBIT_SOCKETDIR|OSTYPE|PAPERSIZE|PATH|PIPESTATUS|PPID|PS1|PS2|PS3|PS4|PWD|RANDOM|REPLY|SECONDS|SELINUX_INIT|SESSION|SESSIONTYPE|SESSION_MANAGER|SHELL|SHELLOPTS|SHLVL|SSH_AUTH_SOCK|TERM|UID|UPSTART_EVENTS|UPSTART_INSTANCE|UPSTART_JOB|UPSTART_SESSION|USER|WINDOWID|XAUTHORITY|XDG_CONFIG_DIRS|XDG_CURRENT_DESKTOP|XDG_DATA_DIRS|XDG_GREETER_DATA_DIR|XDG_MENU_PREFIX|XDG_RUNTIME_DIR|XDG_SEAT|XDG_SEAT_PATH|XDG_SESSION_DESKTOP|XDG_SESSION_ID|XDG_SESSION_PATH|XDG_SESSION_TYPE|XDG_VTNR|XMODIFIERS)\\b",n={pattern:/(^(["']?)\w+\2)[ \t]+\S.*/,lookbehind:!0,alias:"punctuation",inside:null},a={bash:n,environment:{pattern:RegExp("\\$"+t),alias:"constant"},variable:[{pattern:/\$?\(\([\s\S]+?\)\)/,greedy:!0,inside:{variable:[{pattern:/(^\$\(\([\s\S]+)\)\)/,lookbehind:!0},/^\$\(\(/],number:/\b0x[\dA-Fa-f]+\b|(?:\b\d+(?:\.\d*)?|\B\.\d+)(?:[Ee]-?\d+)?/,operator:/--?|-=|\+\+?|\+=|!=?|~|\*\*?|\*=|\/=?|%=?|<<=?|>>=?|<=?|>=?|==?|&&?|&=|\^=?|\|\|?|\|=|\?|:/,punctuation:/\(\(?|\)\)?|,|;/}},{pattern:/\$\((?:\([^)]+\)|[^()])+\)|`[^`]+`/,greedy:!0,inside:{variable:/^\$\(|^`|\)$|`$/}},{pattern:/\$\{[^}]+\}/,greedy:!0,inside:{operator:/:[-=?+]?|[!\/]|##?|%%?|\^\^?|,,?/,punctuation:/[\[\]]/,environment:{pattern:RegExp("(\\{)"+t),lookbehind:!0,alias:"constant"}}},/\$(?:\w+|[#?*!@$])/],entity:/\\(?:[abceEfnrtv\\"]|O?[0-7]{1,3}|x[0-9a-fA-F]{1,2}|u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8})/};e.languages.bash={shebang:{pattern:/^#!\s*\/.*/,alias:"important"},comment:{pattern:/(^|[^"{\\$])#.*/,lookbehind:!0},"function-name":[{pattern:/(\bfunction\s+)\w+(?=(?:\s*\(?:\s*\))?\s*\{)/,lookbehind:!0,alias:"function"},{pattern:/\b\w+(?=\s*\(\s*\)\s*\{)/,alias:"function"}],"for-or-select":{pattern:/(\b(?:for|select)\s+)\w+(?=\s+in\s)/,alias:"variable",lookbehind:!0},"assign-left":{pattern:/(^|[\s;|&]|[<>]\()\w+(?=\+?=)/,inside:{environment:{pattern:RegExp("(^|[\\s;|&]|[<>]\\()"+t),lookbehind:!0,alias:"constant"}},alias:"variable",lookbehind:!0},string:[{pattern:/((?:^|[^<])<<-?\s*)(\w+?)\s[\s\S]*?(?:\r?\n|\r)\2/,lookbehind:!0,greedy:!0,inside:a},{pattern:/((?:^|[^<])<<-?\s*)(["'])(\w+)\2\s[\s\S]*?(?:\r?\n|\r)\3/,lookbehind:!0,greedy:!0,inside:{bash:n}},{pattern:/(^|[^\\](?:\\\\)*)(["'])(?:\\[\s\S]|\$\([^)]+\)|\$(?!\()|`[^`]+`|(?!\2)[^\\`$])*\2/,lookbehind:!0,greedy:!0,inside:a}],environment:{pattern:RegExp("\\$?"+t),alias:"constant"},variable:a.variable,function:{pattern:/(^|[\s;|&]|[<>]\()(?:add|apropos|apt|aptitude|apt-cache|apt-get|aspell|automysqlbackup|awk|basename|bash|bc|bconsole|bg|bzip2|cal|cat|cfdisk|chgrp|chkconfig|chmod|chown|chroot|cksum|clear|cmp|column|comm|composer|cp|cron|crontab|csplit|curl|cut|date|dc|dd|ddrescue|debootstrap|df|diff|diff3|dig|dir|dircolors|dirname|dirs|dmesg|du|egrep|eject|env|ethtool|expand|expect|expr|fdformat|fdisk|fg|fgrep|file|find|fmt|fold|format|free|fsck|ftp|fuser|gawk|git|gparted|grep|groupadd|groupdel|groupmod|groups|grub-mkconfig|gzip|halt|head|hg|history|host|hostname|htop|iconv|id|ifconfig|ifdown|ifup|import|install|ip|jobs|join|kill|killall|less|link|ln|locate|logname|logrotate|look|lpc|lpr|lprint|lprintd|lprintq|lprm|ls|lsof|lynx|make|man|mc|mdadm|mkconfig|mkdir|mke2fs|mkfifo|mkfs|mkisofs|mknod|mkswap|mmv|more|most|mount|mtools|mtr|mutt|mv|nano|nc|netstat|nice|nl|nohup|notify-send|npm|nslookup|op|open|parted|passwd|paste|pathchk|ping|pkill|pnpm|popd|pr|printcap|printenv|ps|pushd|pv|quota|quotacheck|quotactl|ram|rar|rcp|reboot|remsync|rename|renice|rev|rm|rmdir|rpm|rsync|scp|screen|sdiff|sed|sendmail|seq|service|sftp|sh|shellcheck|shuf|shutdown|sleep|slocate|sort|split|ssh|stat|strace|su|sudo|sum|suspend|swapon|sync|tac|tail|tar|tee|time|timeout|top|touch|tr|traceroute|tsort|tty|umount|uname|unexpand|uniq|units|unrar|unshar|unzip|update-grub|uptime|useradd|userdel|usermod|users|uudecode|uuencode|v|vdir|vi|vim|virsh|vmstat|wait|watch|wc|wget|whereis|which|who|whoami|write|xargs|xdg-open|yarn|yes|zenity|zip|zsh|zypper)(?=$|[)\s;|&])/,lookbehind:!0},keyword:{pattern:/(^|[\s;|&]|[<>]\()(?:if|then|else|elif|fi|for|while|in|case|esac|function|select|do|done|until)(?=$|[)\s;|&])/,lookbehind:!0},builtin:{pattern:/(^|[\s;|&]|[<>]\()(?:\.|:|break|cd|continue|eval|exec|exit|export|getopts|hash|pwd|readonly|return|shift|test|times|trap|umask|unset|alias|bind|builtin|caller|command|declare|echo|enable|help|let|local|logout|mapfile|printf|read|readarray|source|type|typeset|ulimit|unalias|set|shopt)(?=$|[)\s;|&])/,lookbehind:!0,alias:"class-name"},boolean:{pattern:/(^|[\s;|&]|[<>]\()(?:true|false)(?=$|[)\s;|&])/,lookbehind:!0},"file-descriptor":{pattern:/\B&\d\b/,alias:"important"},operator:{pattern:/\d?<>|>\||\+=|==?|!=?|=~|<<[<-]?|[&\d]?>>|\d?[<>]&?|&[>&]?|\|[&|]?|<=?|>=?/,inside:{"file-descriptor":{pattern:/^\d/,alias:"important"}}},punctuation:/\$?\(\(?|\)\)?|\.\.|[{}[\];\\]/,number:{pattern:/(^|\s)(?:[1-9]\d*|0)(?:[.,]\d+)?\b/,lookbehind:!0}},n.inside=e.languages.bash;for(var s=["comment","function-name","for-or-select","assign-left","string","environment","function","keyword","builtin","boolean","file-descriptor","operator","punctuation","number"],i=a.variable[1].inside,o=0;o>=?|<<=?|->|([-+&|:])\1|[?:~]|[-+*/%&|^!=<>]=?/}),Prism.languages.insertBefore("c","string",{macro:{pattern:/(^\s*)#\s*[a-z](?:[^\r\n\\/]|\/(?!\*)|\/\*(?:[^*]|\*(?!\/))*\*\/|\\(?:\r\n|[\s\S]))*/im,lookbehind:!0,greedy:!0,alias:"property",inside:{string:[{pattern:/^(#\s*include\s*)<[^>]+>/,lookbehind:!0},Prism.languages.c.string],comment:Prism.languages.c.comment,"macro-name":[{pattern:/(^#\s*define\s+)\w+\b(?!\()/i,lookbehind:!0},{pattern:/(^#\s*define\s+)\w+\b(?=\()/i,lookbehind:!0,alias:"function"}],directive:{pattern:/^(#\s*)[a-z]+/,lookbehind:!0,alias:"keyword"},"directive-hash":/^#/,punctuation:/##|\\(?=[\r\n])/,expression:{pattern:/\S[\s\S]*/,inside:Prism.languages.c}}},constant:/\b(?:__FILE__|__LINE__|__DATE__|__TIME__|__TIMESTAMP__|__func__|EOF|NULL|SEEK_CUR|SEEK_END|SEEK_SET|stdin|stdout|stderr)\b/}),delete Prism.languages.c.boolean; +!function(e){var t=/\b(?:alignas|alignof|asm|auto|bool|break|case|catch|char|char8_t|char16_t|char32_t|class|compl|concept|const|consteval|constexpr|constinit|const_cast|continue|co_await|co_return|co_yield|decltype|default|delete|do|double|dynamic_cast|else|enum|explicit|export|extern|final|float|for|friend|goto|if|import|inline|int|int8_t|int16_t|int32_t|int64_t|uint8_t|uint16_t|uint32_t|uint64_t|long|module|mutable|namespace|new|noexcept|nullptr|operator|override|private|protected|public|register|reinterpret_cast|requires|return|short|signed|sizeof|static|static_assert|static_cast|struct|switch|template|this|thread_local|throw|try|typedef|typeid|typename|union|unsigned|using|virtual|void|volatile|wchar_t|while)\b/,n="\\b(?!)\\w+(?:\\s*\\.\\s*\\w+)*\\b".replace(//g,function(){return t.source});e.languages.cpp=e.languages.extend("c",{"class-name":[{pattern:RegExp("(\\b(?:class|concept|enum|struct|typename)\\s+)(?!)\\w+".replace(//g,function(){return t.source})),lookbehind:!0},/\b[A-Z]\w*(?=\s*::\s*\w+\s*\()/,/\b[A-Z_]\w*(?=\s*::\s*~\w+\s*\()/i,/\w+(?=\s*<(?:[^<>]|<(?:[^<>]|<[^<>]*>)*>)*>\s*::\s*\w+\s*\()/],keyword:t,number:{pattern:/(?:\b0b[01']+|\b0x(?:[\da-f']+(?:\.[\da-f']*)?|\.[\da-f']+)(?:p[+-]?[\d']+)?|(?:\b[\d']+(?:\.[\d']*)?|\B\.[\d']+)(?:e[+-]?[\d']+)?)[ful]{0,4}/i,greedy:!0},operator:/>>=?|<<=?|->|([-+&|:])\1|[?:~]|<=>|[-+*/%&|^!=<>]=?|\b(?:and|and_eq|bitand|bitor|not|not_eq|or|or_eq|xor|xor_eq)\b/,boolean:/\b(?:true|false)\b/}),e.languages.insertBefore("cpp","string",{module:{pattern:RegExp('(\\b(?:module|import)\\s+)(?:"(?:\\\\(?:\r\n|[^])|[^"\\\\\r\n])*"|<[^<>\r\n]*>|'+"(?:\\s*:\\s*)?|:\\s*".replace(//g,function(){return n})+")"),lookbehind:!0,greedy:!0,inside:{string:/^[<"][\s\S]+/,operator:/:/,punctuation:/\./}},"raw-string":{pattern:/R"([^()\\ ]{0,16})\([\s\S]*?\)\1"/,alias:"string",greedy:!0}}),e.languages.insertBefore("cpp","class-name",{"base-clause":{pattern:/(\b(?:class|struct)\s+\w+\s*:\s*)[^;{}"'\s]+(?:\s+[^;{}"'\s]+)*(?=\s*[;{])/,lookbehind:!0,greedy:!0,inside:e.languages.extend("cpp",{})}}),e.languages.insertBefore("inside","operator",{"class-name":/\b[a-z_]\w*\b(?!\s*::)/i},e.languages.cpp["base-clause"])}(Prism); +Prism.languages.cmake={comment:/#.*/,string:{pattern:/"(?:[^\\"]|\\.)*"/,greedy:!0,inside:{interpolation:{pattern:/\${(?:[^{}$]|\${[^{}$]*})*}/,inside:{punctuation:/\${|}/,variable:/\w+/}}}},variable:/\b(?:CMAKE_\w+|\w+_(?:VERSION(?:_MAJOR|_MINOR|_PATCH|_TWEAK)?|(?:BINARY|SOURCE)_DIR|DESCRIPTION|HOMEPAGE_URL|ROOT)|(?:CTEST_CUSTOM_(?:MAXIMUM_(?:(?:FAIL|PASS)ED_TEST_OUTPUT_SIZE|NUMBER_OF_(?:ERROR|WARNING)S)|ERROR_(?:P(?:OST|RE)_CONTEXT|EXCEPTION|MATCH)|P(?:OST|RE)_MEMCHECK|WARNING_(?:EXCEPTION|MATCH)|(?:MEMCHECK|TESTS)_IGNORE|P(?:OST|RE)_TEST|COVERAGE_EXCLUDE)|ANDROID|APPLE|BORLAND|BUILD_SHARED_LIBS|CACHE|CPACK_(?:ABSOLUTE_DESTINATION_FILES|COMPONENT_INCLUDE_TOPLEVEL_DIRECTORY|ERROR_ON_ABSOLUTE_INSTALL_DESTINATION|INCLUDE_TOPLEVEL_DIRECTORY|INSTALL_DEFAULT_DIRECTORY_PERMISSIONS|INSTALL_SCRIPT|PACKAGING_INSTALL_PREFIX|SET_DESTDIR|WARN_ON_ABSOLUTE_INSTALL_DESTINATION)|CTEST_(?:BINARY_DIRECTORY|BUILD_COMMAND|BUILD_NAME|BZR_COMMAND|BZR_UPDATE_OPTIONS|CHANGE_ID|CHECKOUT_COMMAND|CONFIGURATION_TYPE|CONFIGURE_COMMAND|COVERAGE_COMMAND|COVERAGE_EXTRA_FLAGS|CURL_OPTIONS|CUSTOM_(?:COVERAGE_EXCLUDE|ERROR_EXCEPTION|ERROR_MATCH|ERROR_POST_CONTEXT|ERROR_PRE_CONTEXT|MAXIMUM_FAILED_TEST_OUTPUT_SIZE|MAXIMUM_NUMBER_OF_(?:ERRORS|WARNINGS)|MAXIMUM_PASSED_TEST_OUTPUT_SIZE|MEMCHECK_IGNORE|POST_MEMCHECK|POST_TEST|PRE_MEMCHECK|PRE_TEST|TESTS_IGNORE|WARNING_EXCEPTION|WARNING_MATCH)|CVS_CHECKOUT|CVS_COMMAND|CVS_UPDATE_OPTIONS|DROP_LOCATION|DROP_METHOD|DROP_SITE|DROP_SITE_CDASH|DROP_SITE_PASSWORD|DROP_SITE_USER|EXTRA_COVERAGE_GLOB|GIT_COMMAND|GIT_INIT_SUBMODULES|GIT_UPDATE_CUSTOM|GIT_UPDATE_OPTIONS|HG_COMMAND|HG_UPDATE_OPTIONS|LABELS_FOR_SUBPROJECTS|MEMORYCHECK_(?:COMMAND|COMMAND_OPTIONS|SANITIZER_OPTIONS|SUPPRESSIONS_FILE|TYPE)|NIGHTLY_START_TIME|P4_CLIENT|P4_COMMAND|P4_OPTIONS|P4_UPDATE_OPTIONS|RUN_CURRENT_SCRIPT|SCP_COMMAND|SITE|SOURCE_DIRECTORY|SUBMIT_URL|SVN_COMMAND|SVN_OPTIONS|SVN_UPDATE_OPTIONS|TEST_LOAD|TEST_TIMEOUT|TRIGGER_SITE|UPDATE_COMMAND|UPDATE_OPTIONS|UPDATE_VERSION_ONLY|USE_LAUNCHERS)|CYGWIN|ENV|EXECUTABLE_OUTPUT_PATH|GHS-MULTI|IOS|LIBRARY_OUTPUT_PATH|MINGW|MSVC(?:10|11|12|14|60|70|71|80|90|_IDE|_TOOLSET_VERSION|_VERSION)?|MSYS|PROJECT_(?:BINARY_DIR|DESCRIPTION|HOMEPAGE_URL|NAME|SOURCE_DIR|VERSION|VERSION_(?:MAJOR|MINOR|PATCH|TWEAK))|UNIX|WIN32|WINCE|WINDOWS_PHONE|WINDOWS_STORE|XCODE|XCODE_VERSION))\b/,property:/\b(?:cxx_\w+|(?:ARCHIVE_OUTPUT_(?:DIRECTORY|NAME)|COMPILE_DEFINITIONS|COMPILE_PDB_NAME|COMPILE_PDB_OUTPUT_DIRECTORY|EXCLUDE_FROM_DEFAULT_BUILD|IMPORTED_(?:IMPLIB|LIBNAME|LINK_DEPENDENT_LIBRARIES|LINK_INTERFACE_LANGUAGES|LINK_INTERFACE_LIBRARIES|LINK_INTERFACE_MULTIPLICITY|LOCATION|NO_SONAME|OBJECTS|SONAME)|INTERPROCEDURAL_OPTIMIZATION|LIBRARY_OUTPUT_DIRECTORY|LIBRARY_OUTPUT_NAME|LINK_FLAGS|LINK_INTERFACE_LIBRARIES|LINK_INTERFACE_MULTIPLICITY|LOCATION|MAP_IMPORTED_CONFIG|OSX_ARCHITECTURES|OUTPUT_NAME|PDB_NAME|PDB_OUTPUT_DIRECTORY|RUNTIME_OUTPUT_DIRECTORY|RUNTIME_OUTPUT_NAME|STATIC_LIBRARY_FLAGS|VS_CSHARP|VS_DOTNET_REFERENCEPROP|VS_DOTNET_REFERENCE|VS_GLOBAL_SECTION_POST|VS_GLOBAL_SECTION_PRE|VS_GLOBAL|XCODE_ATTRIBUTE)_\w+|\w+_(?:CLANG_TIDY|COMPILER_LAUNCHER|CPPCHECK|CPPLINT|INCLUDE_WHAT_YOU_USE|OUTPUT_NAME|POSTFIX|VISIBILITY_PRESET)|ABSTRACT|ADDITIONAL_MAKE_CLEAN_FILES|ADVANCED|ALIASED_TARGET|ALLOW_DUPLICATE_CUSTOM_TARGETS|ANDROID_(?:ANT_ADDITIONAL_OPTIONS|API|API_MIN|ARCH|ASSETS_DIRECTORIES|GUI|JAR_DEPENDENCIES|NATIVE_LIB_DEPENDENCIES|NATIVE_LIB_DIRECTORIES|PROCESS_MAX|PROGUARD|PROGUARD_CONFIG_PATH|SECURE_PROPS_PATH|SKIP_ANT_STEP|STL_TYPE)|ARCHIVE_OUTPUT_DIRECTORY|ARCHIVE_OUTPUT_NAME|ATTACHED_FILES|ATTACHED_FILES_ON_FAIL|AUTOGEN_(?:BUILD_DIR|ORIGIN_DEPENDS|PARALLEL|SOURCE_GROUP|TARGETS_FOLDER|TARGET_DEPENDS)|AUTOMOC|AUTOMOC_(?:COMPILER_PREDEFINES|DEPEND_FILTERS|EXECUTABLE|MACRO_NAMES|MOC_OPTIONS|SOURCE_GROUP|TARGETS_FOLDER)|AUTORCC|AUTORCC_EXECUTABLE|AUTORCC_OPTIONS|AUTORCC_SOURCE_GROUP|AUTOUIC|AUTOUIC_EXECUTABLE|AUTOUIC_OPTIONS|AUTOUIC_SEARCH_PATHS|BINARY_DIR|BUILDSYSTEM_TARGETS|BUILD_RPATH|BUILD_RPATH_USE_ORIGIN|BUILD_WITH_INSTALL_NAME_DIR|BUILD_WITH_INSTALL_RPATH|BUNDLE|BUNDLE_EXTENSION|CACHE_VARIABLES|CLEAN_NO_CUSTOM|COMMON_LANGUAGE_RUNTIME|COMPATIBLE_INTERFACE_(?:BOOL|NUMBER_MAX|NUMBER_MIN|STRING)|COMPILE_(?:DEFINITIONS|FEATURES|FLAGS|OPTIONS|PDB_NAME|PDB_OUTPUT_DIRECTORY)|COST|CPACK_DESKTOP_SHORTCUTS|CPACK_NEVER_OVERWRITE|CPACK_PERMANENT|CPACK_STARTUP_SHORTCUTS|CPACK_START_MENU_SHORTCUTS|CPACK_WIX_ACL|CROSSCOMPILING_EMULATOR|CUDA_EXTENSIONS|CUDA_PTX_COMPILATION|CUDA_RESOLVE_DEVICE_SYMBOLS|CUDA_SEPARABLE_COMPILATION|CUDA_STANDARD|CUDA_STANDARD_REQUIRED|CXX_EXTENSIONS|CXX_STANDARD|CXX_STANDARD_REQUIRED|C_EXTENSIONS|C_STANDARD|C_STANDARD_REQUIRED|DEBUG_CONFIGURATIONS|DEBUG_POSTFIX|DEFINE_SYMBOL|DEFINITIONS|DEPENDS|DEPLOYMENT_ADDITIONAL_FILES|DEPLOYMENT_REMOTE_DIRECTORY|DISABLED|DISABLED_FEATURES|ECLIPSE_EXTRA_CPROJECT_CONTENTS|ECLIPSE_EXTRA_NATURES|ENABLED_FEATURES|ENABLED_LANGUAGES|ENABLE_EXPORTS|ENVIRONMENT|EXCLUDE_FROM_ALL|EXCLUDE_FROM_DEFAULT_BUILD|EXPORT_NAME|EXPORT_PROPERTIES|EXTERNAL_OBJECT|EchoString|FAIL_REGULAR_EXPRESSION|FIND_LIBRARY_USE_LIB32_PATHS|FIND_LIBRARY_USE_LIB64_PATHS|FIND_LIBRARY_USE_LIBX32_PATHS|FIND_LIBRARY_USE_OPENBSD_VERSIONING|FIXTURES_CLEANUP|FIXTURES_REQUIRED|FIXTURES_SETUP|FOLDER|FRAMEWORK|Fortran_FORMAT|Fortran_MODULE_DIRECTORY|GENERATED|GENERATOR_FILE_NAME|GENERATOR_IS_MULTI_CONFIG|GHS_INTEGRITY_APP|GHS_NO_SOURCE_GROUP_FILE|GLOBAL_DEPENDS_DEBUG_MODE|GLOBAL_DEPENDS_NO_CYCLES|GNUtoMS|HAS_CXX|HEADER_FILE_ONLY|HELPSTRING|IMPLICIT_DEPENDS_INCLUDE_TRANSFORM|IMPORTED|IMPORTED_(?:COMMON_LANGUAGE_RUNTIME|CONFIGURATIONS|GLOBAL|IMPLIB|LIBNAME|LINK_DEPENDENT_LIBRARIES|LINK_INTERFACE_(?:LANGUAGES|LIBRARIES|MULTIPLICITY)|LOCATION|NO_SONAME|OBJECTS|SONAME)|IMPORT_PREFIX|IMPORT_SUFFIX|INCLUDE_DIRECTORIES|INCLUDE_REGULAR_EXPRESSION|INSTALL_NAME_DIR|INSTALL_RPATH|INSTALL_RPATH_USE_LINK_PATH|INTERFACE_(?:AUTOUIC_OPTIONS|COMPILE_DEFINITIONS|COMPILE_FEATURES|COMPILE_OPTIONS|INCLUDE_DIRECTORIES|LINK_DEPENDS|LINK_DIRECTORIES|LINK_LIBRARIES|LINK_OPTIONS|POSITION_INDEPENDENT_CODE|SOURCES|SYSTEM_INCLUDE_DIRECTORIES)|INTERPROCEDURAL_OPTIMIZATION|IN_TRY_COMPILE|IOS_INSTALL_COMBINED|JOB_POOLS|JOB_POOL_COMPILE|JOB_POOL_LINK|KEEP_EXTENSION|LABELS|LANGUAGE|LIBRARY_OUTPUT_DIRECTORY|LIBRARY_OUTPUT_NAME|LINKER_LANGUAGE|LINK_(?:DEPENDS|DEPENDS_NO_SHARED|DIRECTORIES|FLAGS|INTERFACE_LIBRARIES|INTERFACE_MULTIPLICITY|LIBRARIES|OPTIONS|SEARCH_END_STATIC|SEARCH_START_STATIC|WHAT_YOU_USE)|LISTFILE_STACK|LOCATION|MACOSX_BUNDLE|MACOSX_BUNDLE_INFO_PLIST|MACOSX_FRAMEWORK_INFO_PLIST|MACOSX_PACKAGE_LOCATION|MACOSX_RPATH|MACROS|MANUALLY_ADDED_DEPENDENCIES|MEASUREMENT|MODIFIED|NAME|NO_SONAME|NO_SYSTEM_FROM_IMPORTED|OBJECT_DEPENDS|OBJECT_OUTPUTS|OSX_ARCHITECTURES|OUTPUT_NAME|PACKAGES_FOUND|PACKAGES_NOT_FOUND|PARENT_DIRECTORY|PASS_REGULAR_EXPRESSION|PDB_NAME|PDB_OUTPUT_DIRECTORY|POSITION_INDEPENDENT_CODE|POST_INSTALL_SCRIPT|PREDEFINED_TARGETS_FOLDER|PREFIX|PRE_INSTALL_SCRIPT|PRIVATE_HEADER|PROCESSORS|PROCESSOR_AFFINITY|PROJECT_LABEL|PUBLIC_HEADER|REPORT_UNDEFINED_PROPERTIES|REQUIRED_FILES|RESOURCE|RESOURCE_LOCK|RULE_LAUNCH_COMPILE|RULE_LAUNCH_CUSTOM|RULE_LAUNCH_LINK|RULE_MESSAGES|RUNTIME_OUTPUT_DIRECTORY|RUNTIME_OUTPUT_NAME|RUN_SERIAL|SKIP_AUTOGEN|SKIP_AUTOMOC|SKIP_AUTORCC|SKIP_AUTOUIC|SKIP_BUILD_RPATH|SKIP_RETURN_CODE|SOURCES|SOURCE_DIR|SOVERSION|STATIC_LIBRARY_FLAGS|STATIC_LIBRARY_OPTIONS|STRINGS|SUBDIRECTORIES|SUFFIX|SYMBOLIC|TARGET_ARCHIVES_MAY_BE_SHARED_LIBS|TARGET_MESSAGES|TARGET_SUPPORTS_SHARED_LIBS|TESTS|TEST_INCLUDE_FILE|TEST_INCLUDE_FILES|TIMEOUT|TIMEOUT_AFTER_MATCH|TYPE|USE_FOLDERS|VALUE|VARIABLES|VERSION|VISIBILITY_INLINES_HIDDEN|VS_(?:CONFIGURATION_TYPE|COPY_TO_OUT_DIR|DEBUGGER_(?:COMMAND|COMMAND_ARGUMENTS|ENVIRONMENT|WORKING_DIRECTORY)|DEPLOYMENT_CONTENT|DEPLOYMENT_LOCATION|DOTNET_REFERENCES|DOTNET_REFERENCES_COPY_LOCAL|GLOBAL_KEYWORD|GLOBAL_PROJECT_TYPES|GLOBAL_ROOTNAMESPACE|INCLUDE_IN_VSIX|IOT_STARTUP_TASK|KEYWORD|RESOURCE_GENERATOR|SCC_AUXPATH|SCC_LOCALPATH|SCC_PROJECTNAME|SCC_PROVIDER|SDK_REFERENCES|SHADER_(?:DISABLE_OPTIMIZATIONS|ENABLE_DEBUG|ENTRYPOINT|FLAGS|MODEL|OBJECT_FILE_NAME|OUTPUT_HEADER_FILE|TYPE|VARIABLE_NAME)|STARTUP_PROJECT|TOOL_OVERRIDE|USER_PROPS|WINRT_COMPONENT|WINRT_EXTENSIONS|WINRT_REFERENCES|XAML_TYPE)|WILL_FAIL|WIN32_EXECUTABLE|WINDOWS_EXPORT_ALL_SYMBOLS|WORKING_DIRECTORY|WRAP_EXCLUDE|XCODE_(?:EMIT_EFFECTIVE_PLATFORM_NAME|EXPLICIT_FILE_TYPE|FILE_ATTRIBUTES|LAST_KNOWN_FILE_TYPE|PRODUCT_TYPE|SCHEME_(?:ADDRESS_SANITIZER|ADDRESS_SANITIZER_USE_AFTER_RETURN|ARGUMENTS|DISABLE_MAIN_THREAD_CHECKER|DYNAMIC_LIBRARY_LOADS|DYNAMIC_LINKER_API_USAGE|ENVIRONMENT|EXECUTABLE|GUARD_MALLOC|MAIN_THREAD_CHECKER_STOP|MALLOC_GUARD_EDGES|MALLOC_SCRIBBLE|MALLOC_STACK|THREAD_SANITIZER(?:_STOP)?|UNDEFINED_BEHAVIOUR_SANITIZER(?:_STOP)?|ZOMBIE_OBJECTS))|XCTEST)\b/,keyword:/\b(?:add_compile_definitions|add_compile_options|add_custom_command|add_custom_target|add_definitions|add_dependencies|add_executable|add_library|add_link_options|add_subdirectory|add_test|aux_source_directory|break|build_command|build_name|cmake_host_system_information|cmake_minimum_required|cmake_parse_arguments|cmake_policy|configure_file|continue|create_test_sourcelist|ctest_build|ctest_configure|ctest_coverage|ctest_empty_binary_directory|ctest_memcheck|ctest_read_custom_files|ctest_run_script|ctest_sleep|ctest_start|ctest_submit|ctest_test|ctest_update|ctest_upload|define_property|else|elseif|enable_language|enable_testing|endforeach|endfunction|endif|endmacro|endwhile|exec_program|execute_process|export|export_library_dependencies|file|find_file|find_library|find_package|find_path|find_program|fltk_wrap_ui|foreach|function|get_cmake_property|get_directory_property|get_filename_component|get_property|get_source_file_property|get_target_property|get_test_property|if|include|include_directories|include_external_msproject|include_guard|include_regular_expression|install|install_files|install_programs|install_targets|link_directories|link_libraries|list|load_cache|load_command|macro|make_directory|mark_as_advanced|math|message|option|output_required_files|project|qt_wrap_cpp|qt_wrap_ui|remove|remove_definitions|return|separate_arguments|set|set_directory_properties|set_property|set_source_files_properties|set_target_properties|set_tests_properties|site_name|source_group|string|subdir_depends|subdirs|target_compile_definitions|target_compile_features|target_compile_options|target_include_directories|target_link_directories|target_link_libraries|target_link_options|target_sources|try_compile|try_run|unset|use_mangled_mesa|utility_source|variable_requires|variable_watch|while|write_file)(?=\s*\()\b/,boolean:/\b(?:ON|OFF|TRUE|FALSE)\b/,namespace:/\b(?:PROPERTIES|SHARED|PRIVATE|STATIC|PUBLIC|INTERFACE|TARGET_OBJECTS)\b/,operator:/\b(?:NOT|AND|OR|MATCHES|LESS|GREATER|EQUAL|STRLESS|STRGREATER|STREQUAL|VERSION_LESS|VERSION_EQUAL|VERSION_GREATER|DEFINED)\b/,inserted:{pattern:/\b\w+::\w+\b/,alias:"class-name"},number:/\b\d+(?:\.\d+)*\b/,function:/\b[a-z_]\w*(?=\s*\()\b/i,punctuation:/[()>}]|\$[<{]/}; +!function(e){var t=/#(?!\{).+/,n={pattern:/#\{[^}]+\}/,alias:"variable"};e.languages.coffeescript=e.languages.extend("javascript",{comment:t,string:[{pattern:/'(?:\\[\s\S]|[^\\'])*'/,greedy:!0},{pattern:/"(?:\\[\s\S]|[^\\"])*"/,greedy:!0,inside:{interpolation:n}}],keyword:/\b(?:and|break|by|catch|class|continue|debugger|delete|do|each|else|extend|extends|false|finally|for|if|in|instanceof|is|isnt|let|loop|namespace|new|no|not|null|of|off|on|or|own|return|super|switch|then|this|throw|true|try|typeof|undefined|unless|until|when|while|window|with|yes|yield)\b/,"class-member":{pattern:/@(?!\d)\w+/,alias:"variable"}}),e.languages.insertBefore("coffeescript","comment",{"multiline-comment":{pattern:/###[\s\S]+?###/,alias:"comment"},"block-regex":{pattern:/\/{3}[\s\S]*?\/{3}/,alias:"regex",inside:{comment:t,interpolation:n}}}),e.languages.insertBefore("coffeescript","string",{"inline-javascript":{pattern:/`(?:\\[\s\S]|[^\\`])*`/,inside:{delimiter:{pattern:/^`|`$/,alias:"punctuation"},script:{pattern:/[\s\S]+/,alias:"language-javascript",inside:e.languages.javascript}}},"multiline-string":[{pattern:/'''[\s\S]*?'''/,greedy:!0,alias:"string"},{pattern:/"""[\s\S]*?"""/,greedy:!0,alias:"string",inside:{interpolation:n}}]}),e.languages.insertBefore("coffeescript","keyword",{property:/(?!\d)\w+(?=\s*:(?!:))/}),delete e.languages.coffeescript["template-string"],e.languages.coffee=e.languages.coffeescript}(Prism); +!function(e){var r="(?:[ \t]+(?![ \t])(?:)?|)".replace(//g,function(){return"\\\\[\r\n](?:\\s|\\\\[\r\n]|#.*(?!.))*(?![\\s#]|\\\\[\r\n])"}),n="\"(?:[^\"\\\\\r\n]|\\\\(?:\r\n|[^]))*\"|'(?:[^'\\\\\r\n]|\\\\(?:\r\n|[^]))*'",t="--[\\w-]+=(?:|(?![\"'])(?:[^\\s\\\\]|\\\\.)+)".replace(//g,function(){return n}),o={pattern:RegExp(n),greedy:!0},i={pattern:/(^[ \t]*)#.*/m,lookbehind:!0,greedy:!0};function a(e,n){return e=e.replace(//g,function(){return t}).replace(//g,function(){return r}),RegExp(e,n)}e.languages.docker={instruction:{pattern:/(^[ \t]*)(?:ADD|ARG|CMD|COPY|ENTRYPOINT|ENV|EXPOSE|FROM|HEALTHCHECK|LABEL|MAINTAINER|ONBUILD|RUN|SHELL|STOPSIGNAL|USER|VOLUME|WORKDIR)(?=\s)(?:\\.|[^\r\n\\])*(?:\\$(?:\s|#.*$)*(?![\s#])(?:\\.|[^\r\n\\])*)*/im,lookbehind:!0,greedy:!0,inside:{options:{pattern:a("(^(?:ONBUILD)?\\w+)(?:)*","i"),lookbehind:!0,greedy:!0,inside:{property:{pattern:/(^|\s)--[\w-]+/,lookbehind:!0},string:[o,{pattern:/(=)(?!["'])(?:[^\s\\]|\\.)+/,lookbehind:!0}],operator:/\\$/m,punctuation:/=/}},keyword:[{pattern:a("(^(?:ONBUILD)?HEALTHCHECK(?:)*)(?:CMD|NONE)\\b","i"),lookbehind:!0,greedy:!0},{pattern:a("(^(?:ONBUILD)?FROM(?:)*(?!--)[^ \t\\\\]+)AS","i"),lookbehind:!0,greedy:!0},{pattern:a("(^ONBUILD)\\w+","i"),lookbehind:!0,greedy:!0},{pattern:/^\w+/,greedy:!0}],comment:i,string:o,variable:/\$(?:\w+|\{[^{}"'\\]*\})/,operator:/\\$/m}},comment:i},e.languages.dockerfile=e.languages.docker}(Prism); +Prism.languages.go=Prism.languages.extend("clike",{string:{pattern:/(["'`])(?:\\[\s\S]|(?!\1)[^\\])*\1/,greedy:!0},keyword:/\b(?:break|case|chan|const|continue|default|defer|else|fallthrough|for|func|go(?:to)?|if|import|interface|map|package|range|return|select|struct|switch|type|var)\b/,boolean:/\b(?:_|iota|nil|true|false)\b/,number:/(?:\b0x[a-f\d]+|(?:\b\d+(?:\.\d*)?|\B\.\d+)(?:e[-+]?\d+)?)i?/i,operator:/[*\/%^!=]=?|\+[=+]?|-[=-]?|\|[=|]?|&(?:=|&|\^=?)?|>(?:>=?|=)?|<(?:<=?|=|-)?|:=|\.\.\./,builtin:/\b(?:bool|byte|complex(?:64|128)|error|float(?:32|64)|rune|string|u?int(?:8|16|32|64)?|uintptr|append|cap|close|complex|copy|delete|imag|len|make|new|panic|print(?:ln)?|real|recover)\b/}),delete Prism.languages.go["class-name"]; +Prism.languages.ini={comment:/^[ \t]*[;#].*$/m,selector:/^[ \t]*\[.*?\]/m,constant:/^[ \t]*[^\s=]+?(?=[ \t]*=)/m,"attr-value":{pattern:/=.*/,inside:{punctuation:/^[=]/}}}; +!function(e){var t=/\b(?:abstract|assert|boolean|break|byte|case|catch|char|class|const|continue|default|do|double|else|enum|exports|extends|final|finally|float|for|goto|if|implements|import|instanceof|int|interface|long|module|native|new|non-sealed|null|open|opens|package|permits|private|protected|provides|public|record|requires|return|sealed|short|static|strictfp|super|switch|synchronized|this|throw|throws|to|transient|transitive|try|uses|var|void|volatile|while|with|yield)\b/,n="(^|[^\\w.])(?:[a-z]\\w*\\s*\\.\\s*)*(?:[A-Z]\\w*\\s*\\.\\s*)*",a={pattern:RegExp(n+"[A-Z](?:[\\d_A-Z]*[a-z]\\w*)?\\b"),lookbehind:!0,inside:{namespace:{pattern:/^[a-z]\w*(?:\s*\.\s*[a-z]\w*)*(?:\s*\.)?/,inside:{punctuation:/\./}},punctuation:/\./}};e.languages.java=e.languages.extend("clike",{"class-name":[a,{pattern:RegExp(n+"[A-Z]\\w*(?=\\s+\\w+\\s*[;,=())])"),lookbehind:!0,inside:a.inside}],keyword:t,function:[e.languages.clike.function,{pattern:/(\:\:\s*)[a-z_]\w*/,lookbehind:!0}],number:/\b0b[01][01_]*L?\b|\b0x(?:\.[\da-f_p+-]+|[\da-f_]+(?:\.[\da-f_p+-]+)?)\b|(?:\b\d[\d_]*(?:\.[\d_]*)?|\B\.\d[\d_]*)(?:e[+-]?\d[\d_]*)?[dfl]?/i,operator:{pattern:/(^|[^.])(?:<<=?|>>>?=?|->|--|\+\+|&&|\|\||::|[?:~]|[-+*/%&|^!=<>]=?)/m,lookbehind:!0}}),e.languages.insertBefore("java","string",{"triple-quoted-string":{pattern:/"""[ \t]*[\r\n](?:(?:"|"")?(?:\\.|[^"\\]))*"""/,greedy:!0,alias:"string"}}),e.languages.insertBefore("java","class-name",{annotation:{pattern:/(^|[^.])@\w+(?:\s*\.\s*\w+)*/,lookbehind:!0,alias:"punctuation"},generics:{pattern:/<(?:[\w\s,.&?]|<(?:[\w\s,.&?]|<(?:[\w\s,.&?]|<[\w\s,.&?]*>)*>)*>)*>/,inside:{"class-name":a,keyword:t,punctuation:/[<>(),.:]/,operator:/[?&|]/}},namespace:{pattern:RegExp("(\\b(?:exports|import(?:\\s+static)?|module|open|opens|package|provides|requires|to|transitive|uses|with)\\s+)(?!)[a-z]\\w*(?:\\.[a-z]\\w*)*\\.?".replace(//g,function(){return t.source})),lookbehind:!0,inside:{punctuation:/\./}}})}(Prism); +Prism.languages.json={property:{pattern:/(^|[^\\])"(?:\\.|[^\\"\r\n])*"(?=\s*:)/,lookbehind:!0,greedy:!0},string:{pattern:/(^|[^\\])"(?:\\.|[^\\"\r\n])*"(?!\s*:)/,lookbehind:!0,greedy:!0},comment:{pattern:/\/\/.*|\/\*[\s\S]*?(?:\*\/|$)/,greedy:!0},number:/-?\b\d+(?:\.\d+)?(?:e[+-]?\d+)?\b/i,punctuation:/[{}[\],]/,operator:/:/,boolean:/\b(?:true|false)\b/,null:{pattern:/\bnull\b/,alias:"keyword"}},Prism.languages.webmanifest=Prism.languages.json; +!function(n){var e=/("|')(?:\\(?:\r\n?|\n|.)|(?!\1)[^\\\r\n])*\1/;n.languages.json5=n.languages.extend("json",{property:[{pattern:RegExp(e.source+"(?=\\s*:)"),greedy:!0},{pattern:/(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\s*:)/,alias:"unquoted"}],string:{pattern:e,greedy:!0},number:/[+-]?\b(?:NaN|Infinity|0x[a-fA-F\d]+)\b|[+-]?(?:\b\d+(?:\.\d*)?|\B\.\d+)(?:[eE][+-]?\d+\b)?/})}(Prism); +!function(e){e.languages.kotlin=e.languages.extend("clike",{keyword:{pattern:/(^|[^.])\b(?:abstract|actual|annotation|as|break|by|catch|class|companion|const|constructor|continue|crossinline|data|do|dynamic|else|enum|expect|external|final|finally|for|fun|get|if|import|in|infix|init|inline|inner|interface|internal|is|lateinit|noinline|null|object|open|operator|out|override|package|private|protected|public|reified|return|sealed|set|super|suspend|tailrec|this|throw|to|try|typealias|val|var|vararg|when|where|while)\b/,lookbehind:!0},function:[{pattern:/(?:`[^\r\n`]+`|\w+)(?=\s*\()/,greedy:!0},{pattern:/(\.)(?:`[^\r\n`]+`|\w+)(?=\s*\{)/,lookbehind:!0,greedy:!0}],number:/\b(?:0[xX][\da-fA-F]+(?:_[\da-fA-F]+)*|0[bB][01]+(?:_[01]+)*|\d+(?:_\d+)*(?:\.\d+(?:_\d+)*)?(?:[eE][+-]?\d+(?:_\d+)*)?[fFL]?)\b/,operator:/\+[+=]?|-[-=>]?|==?=?|!(?:!|==?)?|[\/*%<>]=?|[?:]:?|\.\.|&&|\|\||\b(?:and|inv|or|shl|shr|ushr|xor)\b/}),delete e.languages.kotlin["class-name"],e.languages.insertBefore("kotlin","string",{"raw-string":{pattern:/("""|''')[\s\S]*?\1/,alias:"string"}}),e.languages.insertBefore("kotlin","keyword",{annotation:{pattern:/\B@(?:\w+:)?(?:[A-Z]\w*|\[[^\]]+\])/,alias:"builtin"}}),e.languages.insertBefore("kotlin","function",{label:{pattern:/\w+@|@\w+/,alias:"symbol"}});var n=[{pattern:/\$\{[^}]+\}/,inside:{delimiter:{pattern:/^\$\{|\}$/,alias:"variable"},rest:e.languages.kotlin}},{pattern:/\$\w+/,alias:"variable"}];e.languages.kotlin.string.inside=e.languages.kotlin["raw-string"].inside={interpolation:n},e.languages.kt=e.languages.kotlin,e.languages.kts=e.languages.kotlin}(Prism); +!function(a){var e=/\\(?:[^a-z()[\]]|[a-z*]+)/i,n={"equation-command":{pattern:e,alias:"regex"}};a.languages.latex={comment:/%.*/m,cdata:{pattern:/(\\begin\{((?:verbatim|lstlisting)\*?)\})[\s\S]*?(?=\\end\{\2\})/,lookbehind:!0},equation:[{pattern:/\$\$(?:\\[\s\S]|[^\\$])+\$\$|\$(?:\\[\s\S]|[^\\$])+\$|\\\([\s\S]*?\\\)|\\\[[\s\S]*?\\\]/,inside:n,alias:"string"},{pattern:/(\\begin\{((?:equation|math|eqnarray|align|multline|gather)\*?)\})[\s\S]*?(?=\\end\{\2\})/,lookbehind:!0,inside:n,alias:"string"}],keyword:{pattern:/(\\(?:begin|end|ref|cite|label|usepackage|documentclass)(?:\[[^\]]+\])?\{)[^}]+(?=\})/,lookbehind:!0},url:{pattern:/(\\url\{)[^}]+(?=\})/,lookbehind:!0},headline:{pattern:/(\\(?:part|chapter|section|subsection|frametitle|subsubsection|paragraph|subparagraph|subsubparagraph|subsubsubparagraph)\*?(?:\[[^\]]+\])?\{)[^}]+(?=\}(?:\[[^\]]+\])?)/,lookbehind:!0,alias:"class-name"},function:{pattern:e,alias:"selector"},punctuation:/[[\]{}&]/},a.languages.tex=a.languages.latex,a.languages.context=a.languages.latex}(Prism); +Prism.languages.less=Prism.languages.extend("css",{comment:[/\/\*[\s\S]*?\*\//,{pattern:/(^|[^\\])\/\/.*/,lookbehind:!0}],atrule:{pattern:/@[\w-](?:\((?:[^(){}]|\([^(){}]*\))*\)|[^(){};\s]|\s+(?!\s))*?(?=\s*\{)/,inside:{punctuation:/[:()]/}},selector:{pattern:/(?:@\{[\w-]+\}|[^{};\s@])(?:@\{[\w-]+\}|\((?:[^(){}]|\([^(){}]*\))*\)|[^(){};@\s]|\s+(?!\s))*?(?=\s*\{)/,inside:{variable:/@+[\w-]+/}},property:/(?:@\{[\w-]+\}|[\w-])+(?:\+_?)?(?=\s*:)/i,operator:/[+\-*\/]/}),Prism.languages.insertBefore("less","property",{variable:[{pattern:/@[\w-]+\s*:/,inside:{punctuation:/:/}},/@@?[\w-]+/],"mixin-usage":{pattern:/([{;]\s*)[.#](?!\d)[\w-].*?(?=[(;])/,lookbehind:!0,alias:"function"}}); +Prism.languages.lua={comment:/^#!.+|--(?:\[(=*)\[[\s\S]*?\]\1\]|.*)/m,string:{pattern:/(["'])(?:(?!\1)[^\\\r\n]|\\z(?:\r\n|\s)|\\(?:\r\n|[^z]))*\1|\[(=*)\[[\s\S]*?\]\2\]/,greedy:!0},number:/\b0x[a-f\d]+(?:\.[a-f\d]*)?(?:p[+-]?\d+)?\b|\b\d+(?:\.\B|(?:\.\d*)?(?:e[+-]?\d+)?\b)|\B\.\d+(?:e[+-]?\d+)?\b/i,keyword:/\b(?:and|break|do|else|elseif|end|false|for|function|goto|if|in|local|nil|not|or|repeat|return|then|true|until|while)\b/,function:/(?!\d)\w+(?=\s*(?:[({]))/,operator:[/[-+*%^&|#]|\/\/?|<[<=]?|>[>=]?|[=~]=?/,{pattern:/(^|[^.])\.\.(?!\.)/,lookbehind:!0}],punctuation:/[\[\](){},;]|\.+|:+/}; +Prism.languages.makefile={comment:{pattern:/(^|[^\\])#(?:\\(?:\r\n|[\s\S])|[^\\\r\n])*/,lookbehind:!0},string:{pattern:/(["'])(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/,greedy:!0},builtin:/\.[A-Z][^:#=\s]+(?=\s*:(?!=))/,symbol:{pattern:/^(?:[^:=\s]|[ \t]+(?![\s:]))+(?=\s*:(?!=))/m,inside:{variable:/\$+(?:(?!\$)[^(){}:#=\s]+|(?=[({]))/}},variable:/\$+(?:(?!\$)[^(){}:#=\s]+|\([@*%<^+?][DF]\)|(?=[({]))/,keyword:[/-include\b|\b(?:define|else|endef|endif|export|ifn?def|ifn?eq|include|override|private|sinclude|undefine|unexport|vpath)\b/,{pattern:/(\()(?:addsuffix|abspath|and|basename|call|dir|error|eval|file|filter(?:-out)?|findstring|firstword|flavor|foreach|guile|if|info|join|lastword|load|notdir|or|origin|patsubst|realpath|shell|sort|strip|subst|suffix|value|warning|wildcard|word(?:s|list)?)(?=[ \t])/,lookbehind:!0}],operator:/(?:::|[?:+!])?=|[|@]/,punctuation:/[:;(){}]/}; +!function(u){function n(n){return n=n.replace(//g,function(){return"(?:\\\\.|[^\\\\\n\r]|(?:\n|\r\n?)(?!\n|\r\n?))"}),RegExp("((?:^|[^\\\\])(?:\\\\{2})*)(?:"+n+")")}var e="(?:\\\\.|``(?:[^`\r\n]|`(?!`))+``|`[^`\r\n]+`|[^\\\\|\r\n`])+",t="\\|?__(?:\\|__)+\\|?(?:(?:\n|\r\n?)|(?![^]))".replace(/__/g,function(){return e}),a="\\|?[ \t]*:?-{3,}:?[ \t]*(?:\\|[ \t]*:?-{3,}:?[ \t]*)+\\|?(?:\n|\r\n?)";u.languages.markdown=u.languages.extend("markup",{}),u.languages.insertBefore("markdown","prolog",{"front-matter-block":{pattern:/(^(?:\s*[\r\n])?)---(?!.)[\s\S]*?[\r\n]---(?!.)/,lookbehind:!0,greedy:!0,inside:{punctuation:/^---|---$/,"font-matter":{pattern:/\S+(?:\s+\S+)*/,alias:["yaml","language-yaml"],inside:u.languages.yaml}}},blockquote:{pattern:/^>(?:[\t ]*>)*/m,alias:"punctuation"},table:{pattern:RegExp("^"+t+a+"(?:"+t+")*","m"),inside:{"table-data-rows":{pattern:RegExp("^("+t+a+")(?:"+t+")*$"),lookbehind:!0,inside:{"table-data":{pattern:RegExp(e),inside:u.languages.markdown},punctuation:/\|/}},"table-line":{pattern:RegExp("^("+t+")"+a+"$"),lookbehind:!0,inside:{punctuation:/\||:?-{3,}:?/}},"table-header-row":{pattern:RegExp("^"+t+"$"),inside:{"table-header":{pattern:RegExp(e),alias:"important",inside:u.languages.markdown},punctuation:/\|/}}}},code:[{pattern:/((?:^|\n)[ \t]*\n|(?:^|\r\n?)[ \t]*\r\n?)(?: {4}|\t).+(?:(?:\n|\r\n?)(?: {4}|\t).+)*/,lookbehind:!0,alias:"keyword"},{pattern:/``.+?``|`[^`\r\n]+`/,alias:"keyword"},{pattern:/^```[\s\S]*?^```$/m,greedy:!0,inside:{"code-block":{pattern:/^(```.*(?:\n|\r\n?))[\s\S]+?(?=(?:\n|\r\n?)^```$)/m,lookbehind:!0},"code-language":{pattern:/^(```).+/,lookbehind:!0},punctuation:/```/}}],title:[{pattern:/\S.*(?:\n|\r\n?)(?:==+|--+)(?=[ \t]*$)/m,alias:"important",inside:{punctuation:/==+$|--+$/}},{pattern:/(^\s*)#.+/m,lookbehind:!0,alias:"important",inside:{punctuation:/^#+|#+$/}}],hr:{pattern:/(^\s*)([*-])(?:[\t ]*\2){2,}(?=\s*$)/m,lookbehind:!0,alias:"punctuation"},list:{pattern:/(^\s*)(?:[*+-]|\d+\.)(?=[\t ].)/m,lookbehind:!0,alias:"punctuation"},"url-reference":{pattern:/!?\[[^\]]+\]:[\t ]+(?:\S+|<(?:\\.|[^>\\])+>)(?:[\t ]+(?:"(?:\\.|[^"\\])*"|'(?:\\.|[^'\\])*'|\((?:\\.|[^)\\])*\)))?/,inside:{variable:{pattern:/^(!?\[)[^\]]+/,lookbehind:!0},string:/(?:"(?:\\.|[^"\\])*"|'(?:\\.|[^'\\])*'|\((?:\\.|[^)\\])*\))$/,punctuation:/^[\[\]!:]|[<>]/},alias:"url"},bold:{pattern:n("\\b__(?:(?!_)|_(?:(?!_))+_)+__\\b|\\*\\*(?:(?!\\*)|\\*(?:(?!\\*))+\\*)+\\*\\*"),lookbehind:!0,greedy:!0,inside:{content:{pattern:/(^..)[\s\S]+(?=..$)/,lookbehind:!0,inside:{}},punctuation:/\*\*|__/}},italic:{pattern:n("\\b_(?:(?!_)|__(?:(?!_))+__)+_\\b|\\*(?:(?!\\*)|\\*\\*(?:(?!\\*))+\\*\\*)+\\*"),lookbehind:!0,greedy:!0,inside:{content:{pattern:/(^.)[\s\S]+(?=.$)/,lookbehind:!0,inside:{}},punctuation:/[*_]/}},strike:{pattern:n("(~~?)(?:(?!~))+?\\2"),lookbehind:!0,greedy:!0,inside:{content:{pattern:/(^~~?)[\s\S]+(?=\1$)/,lookbehind:!0,inside:{}},punctuation:/~~?/}},url:{pattern:n('!?\\[(?:(?!\\]))+\\](?:\\([^\\s)]+(?:[\t ]+"(?:\\\\.|[^"\\\\])*")?\\)|[ \t]?\\[(?:(?!\\]))+\\])'),lookbehind:!0,greedy:!0,inside:{operator:/^!/,content:{pattern:/(^\[)[^\]]+(?=\])/,lookbehind:!0,inside:{}},variable:{pattern:/(^\][ \t]?\[)[^\]]+(?=\]$)/,lookbehind:!0},url:{pattern:/(^\]\()[^\s)]+/,lookbehind:!0},string:{pattern:/(^[ \t]+)"(?:\\.|[^"\\])*"(?=\)$)/,lookbehind:!0}}}}),["url","bold","italic","strike"].forEach(function(e){["url","bold","italic","strike"].forEach(function(n){e!==n&&(u.languages.markdown[e].inside.content.inside[n]=u.languages.markdown[n])})}),u.hooks.add("after-tokenize",function(n){"markdown"!==n.language&&"md"!==n.language||!function n(e){if(e&&"string"!=typeof e)for(var t=0,a=e.length;t=d.length);t++){var a=n[t];if("string"==typeof a||a.content&&"string"==typeof a.content){var r=d[m],o=p.tokenStack[r],c="string"==typeof a?a:a.content,i=v(k,r),u=c.indexOf(i);if(-1]?|\+\+?|!=?|<>?=?|==?|&&?|\|\|?|[~^%?*\/@]/}),delete Prism.languages.objectivec["class-name"],Prism.languages.objc=Prism.languages.objectivec; +!function(a){var e=/\/\*[\s\S]*?\*\/|\/\/.*|#(?!\[).*/,t=[{pattern:/\b(?:false|true)\b/i,alias:"boolean"},{pattern:/(::\s*)\b[a-z_]\w*\b(?!\s*\()/i,greedy:!0,lookbehind:!0},{pattern:/(\b(?:case|const)\s+)\b[a-z_]\w*(?=\s*[;=])/i,greedy:!0,lookbehind:!0},/\b(?:null)\b/i,/\b[A-Z_][A-Z0-9_]*\b(?!\s*\()/],i=/\b0b[01]+(?:_[01]+)*\b|\b0o[0-7]+(?:_[0-7]+)*\b|\b0x[\da-f]+(?:_[\da-f]+)*\b|(?:\b\d+(?:_\d+)*\.?(?:\d+(?:_\d+)*)?|\B\.\d+)(?:e[+-]?\d+)?/i,n=/|\?\?=?|\.{3}|\??->|[!=]=?=?|::|\*\*=?|--|\+\+|&&|\|\||<<|>>|[?~]|[/^|%*&<>.+-]=?/,s=/[{}\[\](),:;]/;a.languages.php={delimiter:{pattern:/\?>$|^<\?(?:php(?=\s)|=)?/i,alias:"important"},comment:e,variable:/\$+(?:\w+\b|(?={))/i,package:{pattern:/(namespace\s+|use\s+(?:function\s+)?)(?:\\?\b[a-z_]\w*)+\b(?!\\)/i,lookbehind:!0,inside:{punctuation:/\\/}},"class-name-definition":{pattern:/(\b(?:class|enum|interface|trait)\s+)\b[a-z_]\w*(?!\\)\b/i,lookbehind:!0,alias:"class-name"},keyword:[{pattern:/(\(\s*)\b(?:bool|boolean|int|integer|float|string|object|array)\b(?=\s*\))/i,alias:"type-casting",greedy:!0,lookbehind:!0},{pattern:/([(,?]\s*)\b(?:bool|int|float|string|object|array(?!\s*\()|mixed|self|static|callable|iterable|(?:null|false)(?=\s*\|))\b(?=\s*\$)/i,alias:"type-hint",greedy:!0,lookbehind:!0},{pattern:/([(,?]\s*[a-z0-9_|]\|\s*)(?:null|false)\b(?=\s*\$)/i,alias:"type-hint",greedy:!0,lookbehind:!0},{pattern:/(\)\s*:\s*(?:\?\s*)?)\b(?:bool|int|float|string|object|void|array(?!\s*\()|mixed|self|static|callable|iterable|(?:null|false)(?=\s*\|))\b/i,alias:"return-type",greedy:!0,lookbehind:!0},{pattern:/(\)\s*:\s*(?:\?\s*)?[a-z0-9_|]\|\s*)(?:null|false)\b/i,alias:"return-type",greedy:!0,lookbehind:!0},{pattern:/\b(?:bool|int|float|string|object|void|array(?!\s*\()|mixed|iterable|(?:null|false)(?=\s*\|))\b/i,alias:"type-declaration",greedy:!0},{pattern:/(\|\s*)(?:null|false)\b/i,alias:"type-declaration",greedy:!0,lookbehind:!0},{pattern:/\b(?:parent|self|static)(?=\s*::)/i,alias:"static-context",greedy:!0},/\b(?:__halt_compiler|abstract|and|array|as|break|callable|case|catch|class|clone|const|continue|declare|default|die|do|echo|else|elseif|empty|enddeclare|endfor|endforeach|endif|endswitch|endwhile|enum|eval|exit|extends|final|finally|for|foreach|function|global|goto|if|implements|include|include_once|instanceof|insteadof|interface|isset|list|namespace|match|new|or|parent|print|private|protected|public|require|require_once|return|self|static|switch|throw|trait|try|unset|use|var|while|xor|yield)\b/i],"argument-name":{pattern:/([(,]\s+)\b[a-z_]\w*(?=\s*:(?!:))/i,lookbehind:!0},"class-name":[{pattern:/(\b(?:extends|implements|instanceof|new(?!\s+self|\s+static))\s+|\bcatch\s*\()\b[a-z_]\w*(?!\\)\b/i,greedy:!0,lookbehind:!0},{pattern:/(\|\s*)\b[a-z_]\w*(?!\\)\b/i,greedy:!0,lookbehind:!0},{pattern:/\b[a-z_]\w*(?!\\)\b(?=\s*\|)/i,greedy:!0},{pattern:/(\|\s*)(?:\\?\b[a-z_]\w*)+\b/i,alias:"class-name-fully-qualified",greedy:!0,lookbehind:!0,inside:{punctuation:/\\/}},{pattern:/(?:\\?\b[a-z_]\w*)+\b(?=\s*\|)/i,alias:"class-name-fully-qualified",greedy:!0,inside:{punctuation:/\\/}},{pattern:/(\b(?:extends|implements|instanceof|new(?!\s+self\b|\s+static\b))\s+|\bcatch\s*\()(?:\\?\b[a-z_]\w*)+\b(?!\\)/i,alias:"class-name-fully-qualified",greedy:!0,lookbehind:!0,inside:{punctuation:/\\/}},{pattern:/\b[a-z_]\w*(?=\s*\$)/i,alias:"type-declaration",greedy:!0},{pattern:/(?:\\?\b[a-z_]\w*)+(?=\s*\$)/i,alias:["class-name-fully-qualified","type-declaration"],greedy:!0,inside:{punctuation:/\\/}},{pattern:/\b[a-z_]\w*(?=\s*::)/i,alias:"static-context",greedy:!0},{pattern:/(?:\\?\b[a-z_]\w*)+(?=\s*::)/i,alias:["class-name-fully-qualified","static-context"],greedy:!0,inside:{punctuation:/\\/}},{pattern:/([(,?]\s*)[a-z_]\w*(?=\s*\$)/i,alias:"type-hint",greedy:!0,lookbehind:!0},{pattern:/([(,?]\s*)(?:\\?\b[a-z_]\w*)+(?=\s*\$)/i,alias:["class-name-fully-qualified","type-hint"],greedy:!0,lookbehind:!0,inside:{punctuation:/\\/}},{pattern:/(\)\s*:\s*(?:\?\s*)?)\b[a-z_]\w*(?!\\)\b/i,alias:"return-type",greedy:!0,lookbehind:!0},{pattern:/(\)\s*:\s*(?:\?\s*)?)(?:\\?\b[a-z_]\w*)+\b(?!\\)/i,alias:["class-name-fully-qualified","return-type"],greedy:!0,lookbehind:!0,inside:{punctuation:/\\/}}],constant:t,function:/\w+\s*(?=\()/,property:{pattern:/(->)[\w]+/,lookbehind:!0},number:i,operator:n,punctuation:s};var l={pattern:/{\$(?:{(?:{[^{}]+}|[^{}]+)}|[^{}])+}|(^|[^\\{])\$+(?:\w+(?:\[[^\r\n\[\]]+\]|->\w+)*)/,lookbehind:!0,inside:a.languages.php},r=[{pattern:/<<<'([^']+)'[\r\n](?:.*[\r\n])*?\1;/,alias:"nowdoc-string",greedy:!0,inside:{delimiter:{pattern:/^<<<'[^']+'|[a-z_]\w*;$/i,alias:"symbol",inside:{punctuation:/^<<<'?|[';]$/}}}},{pattern:/<<<(?:"([^"]+)"[\r\n](?:.*[\r\n])*?\1;|([a-z_]\w*)[\r\n](?:.*[\r\n])*?\2;)/i,alias:"heredoc-string",greedy:!0,inside:{delimiter:{pattern:/^<<<(?:"[^"]+"|[a-z_]\w*)|[a-z_]\w*;$/i,alias:"symbol",inside:{punctuation:/^<<<"?|[";]$/}},interpolation:l}},{pattern:/`(?:\\[\s\S]|[^\\`])*`/,alias:"backtick-quoted-string",greedy:!0},{pattern:/'(?:\\[\s\S]|[^\\'])*'/,alias:"single-quoted-string",greedy:!0},{pattern:/"(?:\\[\s\S]|[^\\"])*"/,alias:"double-quoted-string",greedy:!0,inside:{interpolation:l}}];a.languages.insertBefore("php","variable",{string:r}),a.languages.insertBefore("php","variable",{attribute:{pattern:/#\[(?:[^"'\/#]|\/(?![*/])|\/\/.*$|#(?!\[).*$|\/\*(?:[^*]|\*(?!\/))*\*\/|"(?:\\[\s\S]|[^\\"])*"|'(?:\\[\s\S]|[^\\'])*')+\](?=\s*[a-z$#])/im,greedy:!0,inside:{"attribute-content":{pattern:/^(#\[)[\s\S]+(?=]$)/,lookbehind:!0,inside:{comment:e,string:r,"attribute-class-name":[{pattern:/([^:]|^)\b[a-z_]\w*(?!\\)\b/i,alias:"class-name",greedy:!0,lookbehind:!0},{pattern:/([^:]|^)(?:\\?\b[a-z_]\w*)+/i,alias:["class-name","class-name-fully-qualified"],greedy:!0,lookbehind:!0,inside:{punctuation:/\\/}}],constant:t,number:i,operator:n,punctuation:s}},delimiter:{pattern:/^#\[|]$/,alias:"punctuation"}}}}),a.hooks.add("before-tokenize",function(e){if(/<\?/.test(e.code)){a.languages["markup-templating"].buildPlaceholders(e,"php",/<\?(?:[^"'/#]|\/(?![*/])|("|')(?:\\[\s\S]|(?!\1)[^\\])*\1|(?:\/\/|#(?!\[))(?:[^?\n\r]|\?(?!>))*(?=$|\?>|[\r\n])|#\[|\/\*(?:[^*]|\*(?!\/))*(?:\*\/|$))*?(?:\?>|$)/gi)}}),a.hooks.add("after-tokenize",function(e){a.languages["markup-templating"].tokenizePlaceholders(e,"php")})}(Prism); +!function(e){var i=Prism.languages.powershell={comment:[{pattern:/(^|[^`])<#[\s\S]*?#>/,lookbehind:!0},{pattern:/(^|[^`])#.*/,lookbehind:!0}],string:[{pattern:/"(?:`[\s\S]|[^`"])*"/,greedy:!0,inside:{function:{pattern:/(^|[^`])\$\((?:\$\([^\r\n()]*\)|(?!\$\()[^\r\n)])*\)/,lookbehind:!0,inside:{}}}},{pattern:/'(?:[^']|'')*'/,greedy:!0}],namespace:/\[[a-z](?:\[(?:\[[^\]]*]|[^\[\]])*]|[^\[\]])*]/i,boolean:/\$(?:true|false)\b/i,variable:/\$\w+\b/,function:[/\b(?:Add|Approve|Assert|Backup|Block|Checkpoint|Clear|Close|Compare|Complete|Compress|Confirm|Connect|Convert|ConvertFrom|ConvertTo|Copy|Debug|Deny|Disable|Disconnect|Dismount|Edit|Enable|Enter|Exit|Expand|Export|Find|ForEach|Format|Get|Grant|Group|Hide|Import|Initialize|Install|Invoke|Join|Limit|Lock|Measure|Merge|Move|New|Open|Optimize|Out|Ping|Pop|Protect|Publish|Push|Read|Receive|Redo|Register|Remove|Rename|Repair|Request|Reset|Resize|Resolve|Restart|Restore|Resume|Revoke|Save|Search|Select|Send|Set|Show|Skip|Sort|Split|Start|Step|Stop|Submit|Suspend|Switch|Sync|Tee|Test|Trace|Unblock|Undo|Uninstall|Unlock|Unprotect|Unpublish|Unregister|Update|Use|Wait|Watch|Where|Write)-[a-z]+\b/i,/\b(?:ac|cat|chdir|clc|cli|clp|clv|compare|copy|cp|cpi|cpp|cvpa|dbp|del|diff|dir|ebp|echo|epal|epcsv|epsn|erase|fc|fl|ft|fw|gal|gbp|gc|gci|gcs|gdr|gi|gl|gm|gp|gps|group|gsv|gu|gv|gwmi|iex|ii|ipal|ipcsv|ipsn|irm|iwmi|iwr|kill|lp|ls|measure|mi|mount|move|mp|mv|nal|ndr|ni|nv|ogv|popd|ps|pushd|pwd|rbp|rd|rdr|ren|ri|rm|rmdir|rni|rnp|rp|rv|rvpa|rwmi|sal|saps|sasv|sbp|sc|select|set|shcm|si|sl|sleep|sls|sort|sp|spps|spsv|start|sv|swmi|tee|trcm|type|write)\b/i],keyword:/\b(?:Begin|Break|Catch|Class|Continue|Data|Define|Do|DynamicParam|Else|ElseIf|End|Exit|Filter|Finally|For|ForEach|From|Function|If|InlineScript|Parallel|Param|Process|Return|Sequence|Switch|Throw|Trap|Try|Until|Using|Var|While|Workflow)\b/i,operator:{pattern:/(\W?)(?:!|-(?:eq|ne|gt|ge|lt|le|sh[lr]|not|b?(?:and|x?or)|(?:Not)?(?:Like|Match|Contains|In)|Replace|Join|is(?:Not)?|as)\b|-[-=]?|\+[+=]?|[*\/%]=?)/i,lookbehind:!0},punctuation:/[|{}[\];(),.]/},r=i.string[0].inside;r.boolean=i.boolean,r.variable=i.variable,r.function.inside=i}(); +Prism.languages.python={comment:{pattern:/(^|[^\\])#.*/,lookbehind:!0},"string-interpolation":{pattern:/(?:f|rf|fr)(?:("""|''')[\s\S]*?\1|("|')(?:\\.|(?!\2)[^\\\r\n])*\2)/i,greedy:!0,inside:{interpolation:{pattern:/((?:^|[^{])(?:{{)*){(?!{)(?:[^{}]|{(?!{)(?:[^{}]|{(?!{)(?:[^{}])+})+})+}/,lookbehind:!0,inside:{"format-spec":{pattern:/(:)[^:(){}]+(?=}$)/,lookbehind:!0},"conversion-option":{pattern:/![sra](?=[:}]$)/,alias:"punctuation"},rest:null}},string:/[\s\S]+/}},"triple-quoted-string":{pattern:/(?:[rub]|rb|br)?("""|''')[\s\S]*?\1/i,greedy:!0,alias:"string"},string:{pattern:/(?:[rub]|rb|br)?("|')(?:\\.|(?!\1)[^\\\r\n])*\1/i,greedy:!0},function:{pattern:/((?:^|\s)def[ \t]+)[a-zA-Z_]\w*(?=\s*\()/g,lookbehind:!0},"class-name":{pattern:/(\bclass\s+)\w+/i,lookbehind:!0},decorator:{pattern:/(^\s*)@\w+(?:\.\w+)*/im,lookbehind:!0,alias:["annotation","punctuation"],inside:{punctuation:/\./}},keyword:/\b(?:and|as|assert|async|await|break|class|continue|def|del|elif|else|except|exec|finally|for|from|global|if|import|in|is|lambda|nonlocal|not|or|pass|print|raise|return|try|while|with|yield)\b/,builtin:/\b(?:__import__|abs|all|any|apply|ascii|basestring|bin|bool|buffer|bytearray|bytes|callable|chr|classmethod|cmp|coerce|compile|complex|delattr|dict|dir|divmod|enumerate|eval|execfile|file|filter|float|format|frozenset|getattr|globals|hasattr|hash|help|hex|id|input|int|intern|isinstance|issubclass|iter|len|list|locals|long|map|max|memoryview|min|next|object|oct|open|ord|pow|property|range|raw_input|reduce|reload|repr|reversed|round|set|setattr|slice|sorted|staticmethod|str|sum|super|tuple|type|unichr|unicode|vars|xrange|zip)\b/,boolean:/\b(?:True|False|None)\b/,number:/(?:\b(?=\d)|\B(?=\.))(?:0[bo])?(?:(?:\d|0x[\da-f])[\da-f]*(?:\.\d*)?|\.\d+)(?:e[+-]?\d+)?j?\b/i,operator:/[-+%=]=?|!=|\*\*?=?|\/\/?=?|<[<=>]?|>[=>]?|[&|^~]/,punctuation:/[{}[\];(),.:]/},Prism.languages.python["string-interpolation"].inside.interpolation.inside.rest=Prism.languages.python,Prism.languages.py=Prism.languages.python; +!function(o){var t=o.util.clone(o.languages.javascript),e="(?:\\{*\\.{3}(?:[^{}]|)*\\})";function n(t,n){return t=t.replace(//g,function(){return"(?:\\s|//.*(?!.)|/\\*(?:[^*]|\\*(?!/))\\*/)"}).replace(//g,function(){return"(?:\\{(?:\\{(?:\\{[^{}]*\\}|[^{}])*\\}|[^{}])*\\})"}).replace(//g,function(){return e}),RegExp(t,n)}e=n(e).source,o.languages.jsx=o.languages.extend("markup",t),o.languages.jsx.tag.pattern=n("+(?:[\\w.:$-]+(?:=(?:\"(?:\\\\[^]|[^\\\\\"])*\"|'(?:\\\\[^]|[^\\\\'])*'|[^\\s{'\"/>=]+|))?|))**/?)?>"),o.languages.jsx.tag.inside.tag.pattern=/^<\/?[^\s>\/]*/i,o.languages.jsx.tag.inside["attr-value"].pattern=/=(?!\{)(?:"(?:\\[^]|[^\\"])*"|'(?:\\[^]|[^\\'])*'|[^\s'">]+)/i,o.languages.jsx.tag.inside.tag.inside["class-name"]=/^[A-Z]\w*(?:\.[A-Z]\w*)*$/,o.languages.jsx.tag.inside.comment=t.comment,o.languages.insertBefore("inside","attr-name",{spread:{pattern:n(""),inside:o.languages.jsx}},o.languages.jsx.tag),o.languages.insertBefore("inside","attr-value",{script:{pattern:n("="),inside:{"script-punctuation":{pattern:/^=(?={)/,alias:"punctuation"},rest:o.languages.jsx},alias:"language-javascript"}},o.languages.jsx.tag);var i=function(t){return t?"string"==typeof t?t:"string"==typeof t.content?t.content:t.content.map(i).join(""):""},r=function(t){for(var n=[],e=0;e"===a.content[a.content.length-1].content||n.push({tagName:i(a.content[0].content[1]),openedBraces:0}):0]|<(?:[^<>]|<[^<>]*>)*>)*>)?/,lookbehind:!0,greedy:!0,inside:null},keyword:/\b(?:abstract|as|asserts|async|await|break|case|catch|class|const|constructor|continue|debugger|declare|default|delete|do|else|enum|export|extends|finally|for|from|function|get|if|implements|import|in|instanceof|interface|is|keyof|let|module|namespace|new|null|of|package|private|protected|public|readonly|return|require|set|static|super|switch|this|throw|try|type|typeof|undefined|var|void|while|with|yield)\b/,builtin:/\b(?:string|Function|any|number|boolean|Array|symbol|console|Promise|unknown|never)\b/}),delete e.languages.typescript.parameter;var n=e.languages.extend("typescript",{});delete n["class-name"],e.languages.typescript["class-name"].inside=n,e.languages.insertBefore("typescript","function",{"generic-function":{pattern:/#?(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*\s*<(?:[^<>]|<(?:[^<>]|<[^<>]*>)*>)*>(?=\s*\()/,greedy:!0,inside:{function:/^#?(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*/,generic:{pattern:/<[\s\S]+/,alias:"class-name",inside:n}}}}),e.languages.ts=e.languages.typescript}(Prism); +!function(a){var e=a.util.clone(a.languages.typescript);a.languages.tsx=a.languages.extend("jsx",e);var t=a.languages.tsx.tag;t.pattern=RegExp("(^|[^\\w$]|(?=\\\\]|\\\\[^])*>[gim]{0,3}"].join("|")+")"),greedy:!0,inside:{interpolation:n}},{pattern:/(^|[^/])\/(?!\/)(?:\[[^\r\n\]]+\]|\\.|[^[/\\\r\n])+\/[gim]{0,3}(?=\s*(?:$|[\r\n,.;})]))/,lookbehind:!0,greedy:!0}],variable:/[@$]+[a-zA-Z_]\w*(?:[?!]|\b)/,symbol:{pattern:/(^|[^:]):[a-zA-Z_]\w*(?:[?!]|\b)/,lookbehind:!0},"method-definition":{pattern:/(\bdef\s+)[\w.]+/,lookbehind:!0,inside:{function:/\w+$/,rest:e.languages.ruby}}}),e.languages.insertBefore("ruby","number",{builtin:/\b(?:Array|Bignum|Binding|Class|Continuation|Dir|Exception|FalseClass|File|Stat|Fixnum|Float|Hash|Integer|IO|MatchData|Method|Module|NilClass|Numeric|Object|Proc|Range|Regexp|String|Struct|TMS|Symbol|ThreadGroup|Thread|Time|TrueClass)\b/,constant:/\b[A-Z]\w*(?:[?!]|\b)/}),e.languages.ruby.string=[{pattern:RegExp("%[qQiIwWxs]?(?:"+["([^a-zA-Z0-9\\s{(\\[<])(?:(?!\\1)[^\\\\]|\\\\[^])*\\1","\\((?:[^()\\\\]|\\\\[^])*\\)","\\{(?:[^#{}\\\\]|#(?:\\{[^}]+\\})?|\\\\[^])*\\}","\\[(?:[^\\[\\]\\\\]|\\\\[^])*\\]","<(?:[^<>\\\\]|\\\\[^])*>"].join("|")+")"),greedy:!0,inside:{interpolation:n}},{pattern:/("|')(?:#\{[^}]+\}|#(?!\{)|\\(?:\r\n|[\s\S])|(?!\1)[^\\#\r\n])*\1/,greedy:!0,inside:{interpolation:n}}],e.languages.rb=e.languages.ruby}(Prism); +!function(e){for(var a="/\\*(?:[^*/]|\\*(?!/)|/(?!\\*)|)*\\*/",t=0;t<2;t++)a=a.replace(//g,function(){return a});a=a.replace(//g,function(){return"[^\\s\\S]"}),e.languages.rust={comment:[{pattern:RegExp("(^|[^\\\\])"+a),lookbehind:!0,greedy:!0},{pattern:/(^|[^\\:])\/\/.*/,lookbehind:!0,greedy:!0}],string:{pattern:/b?"(?:\\[\s\S]|[^\\"])*"|b?r(#*)"(?:[^"]|"(?!\1))*"\1/,greedy:!0},char:{pattern:/b?'(?:\\(?:x[0-7][\da-fA-F]|u\{(?:[\da-fA-F]_*){1,6}\}|.)|[^\\\r\n\t'])'/,greedy:!0,alias:"string"},attribute:{pattern:/#!?\[(?:[^\[\]"]|"(?:\\[\s\S]|[^\\"])*")*\]/,greedy:!0,alias:"attr-name",inside:{string:null}},"closure-params":{pattern:/([=(,:]\s*|\bmove\s*)\|[^|]*\||\|[^|]*\|(?=\s*(?:\{|->))/,lookbehind:!0,greedy:!0,inside:{"closure-punctuation":{pattern:/^\||\|$/,alias:"punctuation"},rest:null}},"lifetime-annotation":{pattern:/'\w+/,alias:"symbol"},"fragment-specifier":{pattern:/(\$\w+:)[a-z]+/,lookbehind:!0,alias:"punctuation"},variable:/\$\w+/,"function-definition":{pattern:/(\bfn\s+)\w+/,lookbehind:!0,alias:"function"},"type-definition":{pattern:/(\b(?:enum|struct|union)\s+)\w+/,lookbehind:!0,alias:"class-name"},"module-declaration":[{pattern:/(\b(?:crate|mod)\s+)[a-z][a-z_\d]*/,lookbehind:!0,alias:"namespace"},{pattern:/(\b(?:crate|self|super)\s*)::\s*[a-z][a-z_\d]*\b(?:\s*::(?:\s*[a-z][a-z_\d]*\s*::)*)?/,lookbehind:!0,alias:"namespace",inside:{punctuation:/::/}}],keyword:[/\b(?:abstract|as|async|await|become|box|break|const|continue|crate|do|dyn|else|enum|extern|final|fn|for|if|impl|in|let|loop|macro|match|mod|move|mut|override|priv|pub|ref|return|self|Self|static|struct|super|trait|try|type|typeof|union|unsafe|unsized|use|virtual|where|while|yield)\b/,/\b(?:[ui](?:8|16|32|64|128|size)|f(?:32|64)|bool|char|str)\b/],function:/\b[a-z_]\w*(?=\s*(?:::\s*<|\())/,macro:{pattern:/\w+!/,alias:"property"},constant:/\b[A-Z_][A-Z_\d]+\b/,"class-name":/\b[A-Z]\w*\b/,namespace:{pattern:/(?:\b[a-z][a-z_\d]*\s*::\s*)*\b[a-z][a-z_\d]*\s*::(?!\s*<)/,inside:{punctuation:/::/}},number:/\b(?:0x[\dA-Fa-f](?:_?[\dA-Fa-f])*|0o[0-7](?:_?[0-7])*|0b[01](?:_?[01])*|(?:(?:\d(?:_?\d)*)?\.)?\d(?:_?\d)*(?:[Ee][+-]?\d+)?)(?:_?(?:[iu](?:8|16|32|64|size)?|f32|f64))?\b/,boolean:/\b(?:false|true)\b/,punctuation:/->|\.\.=|\.{1,3}|::|[{}[\];(),:]/,operator:/[-+*\/%!^]=?|=[=>]?|&[&=]?|\|[|=]?|<>?=?|[@?]/},e.languages.rust["closure-params"].inside.rest=e.languages.rust,e.languages.rust.attribute.inside.string=e.languages.rust.string}(Prism); +!function(e){e.languages.sass=e.languages.extend("css",{comment:{pattern:/^([ \t]*)\/[\/*].*(?:(?:\r?\n|\r)\1[ \t].+)*/m,lookbehind:!0}}),e.languages.insertBefore("sass","atrule",{"atrule-line":{pattern:/^(?:[ \t]*)[@+=].+/m,inside:{atrule:/(?:@[\w-]+|[+=])/m}}}),delete e.languages.sass.atrule;var t=/\$[-\w]+|#\{\$[-\w]+\}/,a=[/[+*\/%]|[=!]=|<=?|>=?|\b(?:and|or|not)\b/,{pattern:/(\s+)-(?=\s)/,lookbehind:!0}];e.languages.insertBefore("sass","property",{"variable-line":{pattern:/^[ \t]*\$.+/m,inside:{punctuation:/:/,variable:t,operator:a}},"property-line":{pattern:/^[ \t]*(?:[^:\s]+ *:.*|:[^:\s].*)/m,inside:{property:[/[^:\s]+(?=\s*:)/,{pattern:/(:)[^:\s]+/,lookbehind:!0}],punctuation:/:/,variable:t,operator:a,important:e.languages.sass.important}}}),delete e.languages.sass.property,delete e.languages.sass.important,e.languages.insertBefore("sass","punctuation",{selector:{pattern:/([ \t]*)\S(?:,[^,\r\n]+|[^,\r\n]*)(?:,[^,\r\n]+)*(?:,(?:\r?\n|\r)\1[ \t]+\S(?:,[^,\r\n]+|[^,\r\n]*)(?:,[^,\r\n]+)*)*/,lookbehind:!0}})}(Prism); +Prism.languages.scss=Prism.languages.extend("css",{comment:{pattern:/(^|[^\\])(?:\/\*[\s\S]*?\*\/|\/\/.*)/,lookbehind:!0},atrule:{pattern:/@[\w-](?:\([^()]+\)|[^()\s]|\s+(?!\s))*?(?=\s+[{;])/,inside:{rule:/@[\w-]+/}},url:/(?:[-a-z]+-)?url(?=\()/i,selector:{pattern:/(?=\S)[^@;{}()]?(?:[^@;{}()\s]|\s+(?!\s)|#\{\$[-\w]+\})+(?=\s*\{(?:\}|\s|[^}][^:{}]*[:{][^}]+))/m,inside:{parent:{pattern:/&/,alias:"important"},placeholder:/%[-\w]+/,variable:/\$[-\w]+|#\{\$[-\w]+\}/}},property:{pattern:/(?:[-\w]|\$[-\w]|#\{\$[-\w]+\})+(?=\s*:)/,inside:{variable:/\$[-\w]+|#\{\$[-\w]+\}/}}}),Prism.languages.insertBefore("scss","atrule",{keyword:[/@(?:if|else(?: if)?|forward|for|each|while|import|use|extend|debug|warn|mixin|include|function|return|content)\b/i,{pattern:/( +)(?:from|through)(?= )/,lookbehind:!0}]}),Prism.languages.insertBefore("scss","important",{variable:/\$[-\w]+|#\{\$[-\w]+\}/}),Prism.languages.insertBefore("scss","function",{"module-modifier":{pattern:/\b(?:as|with|show|hide)\b/i,alias:"keyword"},placeholder:{pattern:/%[-\w]+/,alias:"selector"},statement:{pattern:/\B!(?:default|optional)\b/i,alias:"keyword"},boolean:/\b(?:true|false)\b/,null:{pattern:/\bnull\b/,alias:"keyword"},operator:{pattern:/(\s)(?:[-+*\/%]|[=!]=|<=?|>=?|and|or|not)(?=\s)/,lookbehind:!0}}),Prism.languages.scss.atrule.inside.rest=Prism.languages.scss; +!function(s){var n=["([\"'])(?:\\\\[^]|\\$\\([^)]+\\)|\\$(?!\\()|`[^`]+`|(?!\\1)[^\\\\`$])*\\1","<<-?\\s*([\"']?)(\\w+)\\2\\s[^]*?[\r\n]\\3"].join("|");s.languages["shell-session"]={command:{pattern:RegExp('^(?:[^\\s@:$#*!/\\\\]+@[^\\s@:$#*!/\\\\]+(?::[^\0-\\x1F$#*?"<>:;|]+)?|[^\0-\\x1F$#*?"<>:;|]+)?[$#](?:[^\\\\\r\n\'"<]|\\\\.|<>)+'.replace(/<>/g,function(){return n}),"m"),greedy:!0,inside:{info:{pattern:/^[^#$]+/,alias:"punctuation",inside:{user:/^[^\s@:$#*!/\\]+@[^\s@:$#*!/\\]+/,punctuation:/:/,path:/[\s\S]+/}},bash:{pattern:/(^[$#]\s*)\S[\s\S]*/,lookbehind:!0,alias:"language-bash",inside:s.languages.bash},"shell-symbol":{pattern:/^[$#]/,alias:"important"}}},output:/.(?:.*(?:[\r\n]|.$))*/},s.languages["sh-session"]=s.languages.shellsession=s.languages["shell-session"]}(Prism); +Prism.languages.sql={comment:{pattern:/(^|[^\\])(?:\/\*[\s\S]*?\*\/|(?:--|\/\/|#).*)/,lookbehind:!0},variable:[{pattern:/@(["'`])(?:\\[\s\S]|(?!\1)[^\\])+\1/,greedy:!0},/@[\w.$]+/],string:{pattern:/(^|[^@\\])("|')(?:\\[\s\S]|(?!\2)[^\\]|\2\2)*\2/,greedy:!0,lookbehind:!0},function:/\b(?:AVG|COUNT|FIRST|FORMAT|LAST|LCASE|LEN|MAX|MID|MIN|MOD|NOW|ROUND|SUM|UCASE)(?=\s*\()/i,keyword:/\b(?:ACTION|ADD|AFTER|ALGORITHM|ALL|ALTER|ANALYZE|ANY|APPLY|AS|ASC|AUTHORIZATION|AUTO_INCREMENT|BACKUP|BDB|BEGIN|BERKELEYDB|BIGINT|BINARY|BIT|BLOB|BOOL|BOOLEAN|BREAK|BROWSE|BTREE|BULK|BY|CALL|CASCADED?|CASE|CHAIN|CHAR(?:ACTER|SET)?|CHECK(?:POINT)?|CLOSE|CLUSTERED|COALESCE|COLLATE|COLUMNS?|COMMENT|COMMIT(?:TED)?|COMPUTE|CONNECT|CONSISTENT|CONSTRAINT|CONTAINS(?:TABLE)?|CONTINUE|CONVERT|CREATE|CROSS|CURRENT(?:_DATE|_TIME|_TIMESTAMP|_USER)?|CURSOR|CYCLE|DATA(?:BASES?)?|DATE(?:TIME)?|DAY|DBCC|DEALLOCATE|DEC|DECIMAL|DECLARE|DEFAULT|DEFINER|DELAYED|DELETE|DELIMITERS?|DENY|DESC|DESCRIBE|DETERMINISTIC|DISABLE|DISCARD|DISK|DISTINCT|DISTINCTROW|DISTRIBUTED|DO|DOUBLE|DROP|DUMMY|DUMP(?:FILE)?|DUPLICATE|ELSE(?:IF)?|ENABLE|ENCLOSED|END|ENGINE|ENUM|ERRLVL|ERRORS|ESCAPED?|EXCEPT|EXEC(?:UTE)?|EXISTS|EXIT|EXPLAIN|EXTENDED|FETCH|FIELDS|FILE|FILLFACTOR|FIRST|FIXED|FLOAT|FOLLOWING|FOR(?: EACH ROW)?|FORCE|FOREIGN|FREETEXT(?:TABLE)?|FROM|FULL|FUNCTION|GEOMETRY(?:COLLECTION)?|GLOBAL|GOTO|GRANT|GROUP|HANDLER|HASH|HAVING|HOLDLOCK|HOUR|IDENTITY(?:_INSERT|COL)?|IF|IGNORE|IMPORT|INDEX|INFILE|INNER|INNODB|INOUT|INSERT|INT|INTEGER|INTERSECT|INTERVAL|INTO|INVOKER|ISOLATION|ITERATE|JOIN|KEYS?|KILL|LANGUAGE|LAST|LEAVE|LEFT|LEVEL|LIMIT|LINENO|LINES|LINESTRING|LOAD|LOCAL|LOCK|LONG(?:BLOB|TEXT)|LOOP|MATCH(?:ED)?|MEDIUM(?:BLOB|INT|TEXT)|MERGE|MIDDLEINT|MINUTE|MODE|MODIFIES|MODIFY|MONTH|MULTI(?:LINESTRING|POINT|POLYGON)|NATIONAL|NATURAL|NCHAR|NEXT|NO|NONCLUSTERED|NULLIF|NUMERIC|OFF?|OFFSETS?|ON|OPEN(?:DATASOURCE|QUERY|ROWSET)?|OPTIMIZE|OPTION(?:ALLY)?|ORDER|OUT(?:ER|FILE)?|OVER|PARTIAL|PARTITION|PERCENT|PIVOT|PLAN|POINT|POLYGON|PRECEDING|PRECISION|PREPARE|PREV|PRIMARY|PRINT|PRIVILEGES|PROC(?:EDURE)?|PUBLIC|PURGE|QUICK|RAISERROR|READS?|REAL|RECONFIGURE|REFERENCES|RELEASE|RENAME|REPEAT(?:ABLE)?|REPLACE|REPLICATION|REQUIRE|RESIGNAL|RESTORE|RESTRICT|RETURN(?:S|ING)?|REVOKE|RIGHT|ROLLBACK|ROUTINE|ROW(?:COUNT|GUIDCOL|S)?|RTREE|RULE|SAVE(?:POINT)?|SCHEMA|SECOND|SELECT|SERIAL(?:IZABLE)?|SESSION(?:_USER)?|SET(?:USER)?|SHARE|SHOW|SHUTDOWN|SIMPLE|SMALLINT|SNAPSHOT|SOME|SONAME|SQL|START(?:ING)?|STATISTICS|STATUS|STRIPED|SYSTEM_USER|TABLES?|TABLESPACE|TEMP(?:ORARY|TABLE)?|TERMINATED|TEXT(?:SIZE)?|THEN|TIME(?:STAMP)?|TINY(?:BLOB|INT|TEXT)|TOP?|TRAN(?:SACTIONS?)?|TRIGGER|TRUNCATE|TSEQUAL|TYPES?|UNBOUNDED|UNCOMMITTED|UNDEFINED|UNION|UNIQUE|UNLOCK|UNPIVOT|UNSIGNED|UPDATE(?:TEXT)?|USAGE|USE|USER|USING|VALUES?|VAR(?:BINARY|CHAR|CHARACTER|YING)|VIEW|WAITFOR|WARNINGS|WHEN|WHERE|WHILE|WITH(?: ROLLUP|IN)?|WORK|WRITE(?:TEXT)?|YEAR)\b/i,boolean:/\b(?:TRUE|FALSE|NULL)\b/i,number:/\b0x[\da-f]+\b|\b\d+(?:\.\d*)?|\B\.\d+\b/i,operator:/[-+*\/=%^~]|&&?|\|\|?|!=?|<(?:=>?|<|>)?|>[>=]?|\b(?:AND|BETWEEN|DIV|IN|ILIKE|IS|LIKE|NOT|OR|REGEXP|RLIKE|SOUNDS LIKE|XOR)\b/i,punctuation:/[;[\]()`,.]/}; +Prism.languages.swift=Prism.languages.extend("clike",{string:{pattern:/("|')(?:\\(?:\((?:[^()]|\([^)]+\))+\)|\r\n|[^(])|(?!\1)[^\\\r\n])*\1/,greedy:!0,inside:{interpolation:{pattern:/\\\((?:[^()]|\([^)]+\))+\)/,inside:{delimiter:{pattern:/^\\\(|\)$/,alias:"variable"}}}}},keyword:/\b(?:as|associativity|break|case|catch|class|continue|convenience|default|defer|deinit|didSet|do|dynamic(?:Type)?|else|enum|extension|fallthrough|final|for|func|get|guard|if|import|in|infix|init|inout|internal|is|lazy|left|let|mutating|new|none|nonmutating|operator|optional|override|postfix|precedence|prefix|private|protocol|public|repeat|required|rethrows|return|right|safe|self|Self|set|some|static|struct|subscript|super|switch|throws?|try|Type|typealias|unowned|unsafe|var|weak|where|while|willSet|__(?:COLUMN__|FILE__|FUNCTION__|LINE__))\b/,number:/\b(?:[\d_]+(?:\.[\de_]+)?|0x[a-f0-9_]+(?:\.[a-f0-9p_]+)?|0b[01_]+|0o[0-7_]+)\b/i,constant:/\b(?:nil|[A-Z_]{2,}|k[A-Z][A-Za-z_]+)\b/,atrule:/@\b(?:IB(?:Outlet|Designable|Action|Inspectable)|class_protocol|exported|noreturn|NS(?:Copying|Managed)|objc|UIApplicationMain|auto_closure)\b/,builtin:/\b(?:[A-Z]\S+|abs|advance|alignof(?:Value)?|assert|contains|count(?:Elements)?|debugPrint(?:ln)?|distance|drop(?:First|Last)|dump|enumerate|equal|filter|find|first|getVaList|indices|isEmpty|join|last|lexicographicalCompare|map|max(?:Element)?|min(?:Element)?|numericCast|overlaps|partition|print(?:ln)?|reduce|reflect|reverse|sizeof(?:Value)?|sort(?:ed)?|split|startsWith|stride(?:of(?:Value)?)?|suffix|swap|toDebugString|toString|transcode|underestimateCount|unsafeBitCast|with(?:ExtendedLifetime|Unsafe(?:MutablePointers?|Pointers?)|VaList))\b/}),Prism.languages.swift.string.inside.interpolation.inside.rest=Prism.languages.swift; +!function(n){function e(n,e){return RegExp(n.replace(//g,function(){return"(?:\\([^|()\n]+\\)|\\[[^\\]\n]+\\]|\\{[^}\n]+\\})"}).replace(//g,function(){return"(?:\\)|\\((?![^|()\n]+\\)))"}),e||"")}var i={css:{pattern:/\{[^}]+\}/,inside:{rest:n.languages.css}},"class-id":{pattern:/(\()[^)]+(?=\))/,lookbehind:!0,alias:"attr-value"},lang:{pattern:/(\[)[^\]]+(?=\])/,lookbehind:!0,alias:"attr-value"},punctuation:/[\\\/]\d+|\S/},t=n.languages.textile=n.languages.extend("markup",{phrase:{pattern:/(^|\r|\n)\S[\s\S]*?(?=$|\r?\n\r?\n|\r\r)/,lookbehind:!0,inside:{"block-tag":{pattern:e("^[a-z]\\w*(?:||[<>=])*\\."),inside:{modifier:{pattern:e("(^[a-z]\\w*)(?:||[<>=])+(?=\\.)"),lookbehind:!0,inside:i},tag:/^[a-z]\w*/,punctuation:/\.$/}},list:{pattern:e("^[*#]+*\\s+\\S.*","m"),inside:{modifier:{pattern:e("(^[*#]+)+"),lookbehind:!0,inside:i},punctuation:/^[*#]+/}},table:{pattern:e("^(?:(?:||[<>=^~])+\\.\\s*)?(?:\\|(?:(?:||[<>=^~_]|[\\\\/]\\d+)+\\.|(?!(?:||[<>=^~_]|[\\\\/]\\d+)+\\.))[^|]*)+\\|","m"),inside:{modifier:{pattern:e("(^|\\|(?:\r?\n|\r)?)(?:||[<>=^~_]|[\\\\/]\\d+)+(?=\\.)"),lookbehind:!0,inside:i},punctuation:/\||^\./}},inline:{pattern:e("(^|[^a-zA-Z\\d])(\\*\\*|__|\\?\\?|[*_%@+\\-^~])*.+?\\2(?![a-zA-Z\\d])"),lookbehind:!0,inside:{bold:{pattern:e("(^(\\*\\*?)*).+?(?=\\2)"),lookbehind:!0},italic:{pattern:e("(^(__?)*).+?(?=\\2)"),lookbehind:!0},cite:{pattern:e("(^\\?\\?*).+?(?=\\?\\?)"),lookbehind:!0,alias:"string"},code:{pattern:e("(^@*).+?(?=@)"),lookbehind:!0,alias:"keyword"},inserted:{pattern:e("(^\\+*).+?(?=\\+)"),lookbehind:!0},deleted:{pattern:e("(^-*).+?(?=-)"),lookbehind:!0},span:{pattern:e("(^%*).+?(?=%)"),lookbehind:!0},modifier:{pattern:e("(^\\*\\*|__|\\?\\?|[*_%@+\\-^~])+"),lookbehind:!0,inside:i},punctuation:/[*_%?@+\-^~]+/}},"link-ref":{pattern:/^\[[^\]]+\]\S+$/m,inside:{string:{pattern:/(\[)[^\]]+(?=\])/,lookbehind:!0},url:{pattern:/(\])\S+$/,lookbehind:!0},punctuation:/[\[\]]/}},link:{pattern:e('"*[^"]+":.+?(?=[^\\w/]?(?:\\s|$))'),inside:{text:{pattern:e('(^"*)[^"]+(?=")'),lookbehind:!0},modifier:{pattern:e('(^")+'),lookbehind:!0,inside:i},url:{pattern:/(:).+/,lookbehind:!0},punctuation:/[":]/}},image:{pattern:e("!(?:||[<>=])*(?![<>=])[^!\\s()]+(?:\\([^)]+\\))?!(?::.+?(?=[^\\w/]?(?:\\s|$)))?"),inside:{source:{pattern:e("(^!(?:||[<>=])*)(?![<>=])[^!\\s()]+(?:\\([^)]+\\))?(?=!)"),lookbehind:!0,alias:"url"},modifier:{pattern:e("(^!)(?:||[<>=])+"),lookbehind:!0,inside:i},url:{pattern:/(:).+/,lookbehind:!0},punctuation:/[!:]/}},footnote:{pattern:/\b\[\d+\]/,alias:"comment",inside:{punctuation:/\[|\]/}},acronym:{pattern:/\b[A-Z\d]+\([^)]+\)/,inside:{comment:{pattern:/(\()[^)]+(?=\))/,lookbehind:!0},punctuation:/[()]/}},mark:{pattern:/\b\((?:TM|R|C)\)/,alias:"comment",inside:{punctuation:/[()]/}}}}}),a=t.phrase.inside,o={inline:a.inline,link:a.link,image:a.image,footnote:a.footnote,acronym:a.acronym,mark:a.mark};t.tag.pattern=/<\/?(?!\d)[a-z0-9]+(?:\s+[^\s>\/=]+(?:=(?:("|')(?:\\[\s\S]|(?!\1)[^\\])*\1|[^\s'">=]+))?)*\s*\/?>/i;var r=a.inline.inside;r.bold.inside=o,r.italic.inside=o,r.inserted.inside=o,r.deleted.inside=o,r.span.inside=o;var d=a.table.inside;d.inline=o.inline,d.link=o.link,d.image=o.image,d.footnote=o.footnote,d.acronym=o.acronym,d.mark=o.mark}(Prism); +!function(e){var n=/[*&][^\s[\]{},]+/,r=/!(?:<[\w\-%#;/?:@&=+$,.!~*'()[\]]+>|(?:[a-zA-Z\d-]*!)?[\w\-%#;/?:@&=+$.~*'()]+)?/,t="(?:"+r.source+"(?:[ \t]+"+n.source+")?|"+n.source+"(?:[ \t]+"+r.source+")?)",a="(?:[^\\s\\x00-\\x08\\x0e-\\x1f!\"#%&'*,\\-:>?@[\\]`{|}\\x7f-\\x84\\x86-\\x9f\\ud800-\\udfff\\ufffe\\uffff]|[?:-])(?:[ \t]*(?:(?![#:])|:))*".replace(//g,function(){return"[^\\s\\x00-\\x08\\x0e-\\x1f,[\\]{}\\x7f-\\x84\\x86-\\x9f\\ud800-\\udfff\\ufffe\\uffff]"}),d="\"(?:[^\"\\\\\r\n]|\\\\.)*\"|'(?:[^'\\\\\r\n]|\\\\.)*'";function o(e,n){n=(n||"").replace(/m/g,"")+"m";var r="([:\\-,[{]\\s*(?:\\s<>[ \t]+)?)(?:<>)(?=[ \t]*(?:$|,|]|}|(?:[\r\n]\\s*)?#))".replace(/<>/g,function(){return t}).replace(/<>/g,function(){return e});return RegExp(r,n)}e.languages.yaml={scalar:{pattern:RegExp("([\\-:]\\s*(?:\\s<>[ \t]+)?[|>])[ \t]*(?:((?:\r?\n|\r)[ \t]+)\\S[^\r\n]*(?:\\2[^\r\n]+)*)".replace(/<>/g,function(){return t})),lookbehind:!0,alias:"string"},comment:/#.*/,key:{pattern:RegExp("((?:^|[:\\-,[{\r\n?])[ \t]*(?:<>[ \t]+)?)<>(?=\\s*:\\s)".replace(/<>/g,function(){return t}).replace(/<>/g,function(){return"(?:"+a+"|"+d+")"})),lookbehind:!0,greedy:!0,alias:"atrule"},directive:{pattern:/(^[ \t]*)%.+/m,lookbehind:!0,alias:"important"},datetime:{pattern:o("\\d{4}-\\d\\d?-\\d\\d?(?:[tT]|[ \t]+)\\d\\d?:\\d{2}:\\d{2}(?:\\.\\d*)?(?:[ \t]*(?:Z|[-+]\\d\\d?(?::\\d{2})?))?|\\d{4}-\\d{2}-\\d{2}|\\d\\d?:\\d{2}(?::\\d{2}(?:\\.\\d*)?)?"),lookbehind:!0,alias:"number"},boolean:{pattern:o("true|false","i"),lookbehind:!0,alias:"important"},null:{pattern:o("null|~","i"),lookbehind:!0,alias:"important"},string:{pattern:o(d),lookbehind:!0,greedy:!0},number:{pattern:o("[+-]?(?:0x[\\da-f]+|0o[0-7]+|(?:\\d+(?:\\.\\d*)?|\\.?\\d+)(?:e[+-]?\\d+)?|\\.inf|\\.nan)","i"),lookbehind:!0},tag:r,important:n,punctuation:/---|[:[\]{}\-,|>?]|\.\.\./},e.languages.yml=e.languages.yaml}(Prism); +!function(){if("undefined"!=typeof self&&self.Prism&&self.document){var o="line-numbers",a=/\n(?!$)/g,e=Prism.plugins.lineNumbers={getLine:function(e,n){if("PRE"===e.tagName&&e.classList.contains(o)){var t=e.querySelector(".line-numbers-rows");if(t){var i=parseInt(e.getAttribute("data-start"),10)||1,r=i+(t.children.length-1);n");(i=document.createElement("span")).setAttribute("aria-hidden","true"),i.className="line-numbers-rows",i.innerHTML=l,t.hasAttribute("data-start")&&(t.style.counterReset="linenumber "+(parseInt(t.getAttribute("data-start"),10)-1)),e.element.appendChild(i),u([t]),Prism.hooks.run("line-numbers",e)}}}),Prism.hooks.add("line-numbers",function(e){e.plugins=e.plugins||{},e.plugins.lineNumbers=!0})}function u(e){if(0!=(e=e.filter(function(e){var n=t(e)["white-space"];return"pre-wrap"===n||"pre-line"===n})).length){var n=e.map(function(e){var n=e.querySelector("code"),t=e.querySelector(".line-numbers-rows");if(n&&t){var i=e.querySelector(".line-numbers-sizer"),r=n.textContent.split(a);i||((i=document.createElement("span")).className="line-numbers-sizer",n.appendChild(i)),i.innerHTML="0",i.style.display="block";var s=i.getBoundingClientRect().height;return i.innerHTML="",{element:e,lines:r,lineHeights:[],oneLinerHeight:s,sizer:i}}}).filter(Boolean);n.forEach(function(e){var i=e.sizer,n=e.lines,r=e.lineHeights,s=e.oneLinerHeight;r[n.length-1]=void 0,n.forEach(function(e,n){if(e&&1img{height:auto;margin:15px auto;max-width:90%!important;width:auto}.viewer-footer{bottom:0;left:0;overflow:hidden;position:absolute;right:0;text-align:center}.viewer-navbar{background-color:rgba(0,0,0,.5);overflow:hidden}.viewer-list{-webkit-box-sizing:content-box;box-sizing:content-box;height:50px;margin:0;overflow:hidden;padding:1px 0}.viewer-list>li{color:transparent;cursor:pointer;float:left;font-size:0;height:50px;line-height:0;opacity:.5;overflow:hidden;-webkit-transition:opacity .15s;transition:opacity .15s;width:30px}.viewer-list>li:focus,.viewer-list>li:hover{opacity:.75}.viewer-list>li:focus{outline:0}.viewer-list>li+li{margin-left:1px}.viewer-list>.viewer-loading{position:relative}.viewer-list>.viewer-loading:after{border-width:2px;height:20px;margin-left:-10px;margin-top:-10px;width:20px}.viewer-list>.viewer-active,.viewer-list>.viewer-active:focus,.viewer-list>.viewer-active:hover{opacity:1}.viewer-player{background-color:#000;bottom:0;cursor:none;display:none;right:0;z-index:1}.viewer-player,.viewer-player>img{left:0;position:absolute;top:0}.viewer-toolbar>ul{display:inline-block;margin:0 auto 5px;overflow:hidden;padding:6px 3px}.viewer-toolbar>ul>li{background-color:rgba(0,0,0,.5);border-radius:50%;cursor:pointer;float:left;height:24px;overflow:hidden;-webkit-transition:background-color .15s;transition:background-color .15s;width:24px}.viewer-toolbar>ul>li:focus,.viewer-toolbar>ul>li:hover{background-color:rgba(0,0,0,.8)}.viewer-toolbar>ul>li:focus{-webkit-box-shadow:0 0 3px #fff;box-shadow:0 0 3px #fff;outline:0;position:relative;z-index:1}.viewer-toolbar>ul>li:before{margin:2px}.viewer-toolbar>ul>li+li{margin-left:1px}.viewer-toolbar>ul>.viewer-small{height:18px;margin-bottom:3px;margin-top:3px;width:18px}.viewer-toolbar>ul>.viewer-small:before{margin:-1px}.viewer-toolbar>ul>.viewer-large{height:30px;margin-bottom:-3px;margin-top:-3px;width:30px}.viewer-toolbar>ul>.viewer-large:before{margin:5px}.viewer-tooltip{background-color:rgba(0,0,0,.8);border-radius:10px;color:#fff;display:none;font-size:12px;height:20px;left:50%;line-height:20px;margin-left:-25px;margin-top:-10px;position:absolute;text-align:center;top:50%;width:50px}.viewer-title{color:#ccc;display:inline-block;font-size:12px;line-height:1;margin:0 5% 5px;max-width:90%;opacity:.8;overflow:hidden;text-overflow:ellipsis;-webkit-transition:opacity .15s;transition:opacity .15s;white-space:nowrap}.viewer-title:hover{opacity:1}.viewer-button{background-color:rgba(0,0,0,.5);border-radius:50%;cursor:pointer;height:80px;overflow:hidden;position:absolute;right:-40px;top:-40px;-webkit-transition:background-color .15s;transition:background-color .15s;width:80px}.viewer-button:focus,.viewer-button:hover{background-color:rgba(0,0,0,.8)}.viewer-button:focus{-webkit-box-shadow:0 0 3px #fff;box-shadow:0 0 3px #fff;outline:0}.viewer-button:before{bottom:15px;left:15px;position:absolute}.viewer-fixed{position:fixed}.viewer-open{overflow:hidden}.viewer-show{display:block}.viewer-hide{display:none}.viewer-backdrop{background-color:rgba(0,0,0,.5)}.viewer-invisible{visibility:hidden}.viewer-move{cursor:move;cursor:-webkit-grab;cursor:grab}.viewer-fade{opacity:0}.viewer-in{opacity:1}.viewer-transition{-webkit-transition:all .3s;transition:all .3s}@-webkit-keyframes viewer-spinner{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(1turn);transform:rotate(1turn)}}@keyframes viewer-spinner{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(1turn);transform:rotate(1turn)}}.viewer-loading:after{-webkit-animation:viewer-spinner 1s linear infinite;animation:viewer-spinner 1s linear infinite;border:4px solid hsla(0,0%,100%,.1);border-left-color:hsla(0,0%,100%,.5);border-radius:50%;content:"";display:inline-block;height:40px;left:50%;margin-left:-20px;margin-top:-20px;position:absolute;top:50%;width:40px;z-index:1}@media (max-width:767px){.viewer-hide-xs-down{display:none}}@media (max-width:991px){.viewer-hide-sm-down{display:none}}@media (max-width:1199px){.viewer-hide-md-down{display:none}} \ No newline at end of file diff --git a/maixpy/static/image/bilibili_check_live_link.png b/maixpy/static/image/bilibili_check_live_link.png new file mode 100644 index 00000000..662bf378 Binary files /dev/null and b/maixpy/static/image/bilibili_check_live_link.png differ diff --git a/maixpy/static/image/bilibili_check_rtmp_url.png b/maixpy/static/image/bilibili_check_rtmp_url.png new file mode 100644 index 00000000..9fdb47b1 Binary files /dev/null and b/maixpy/static/image/bilibili_check_rtmp_url.png differ diff --git a/maixpy/static/image/bilibili_click_live.png b/maixpy/static/image/bilibili_click_live.png new file mode 100644 index 00000000..af02be88 Binary files /dev/null and b/maixpy/static/image/bilibili_click_live.png differ diff --git a/maixpy/static/image/bilibili_click_live_setting.png b/maixpy/static/image/bilibili_click_live_setting.png new file mode 100644 index 00000000..e58fe9b7 Binary files /dev/null and b/maixpy/static/image/bilibili_click_live_setting.png differ diff --git a/maixpy/static/image/bilibili_live_start.png b/maixpy/static/image/bilibili_live_start.png new file mode 100644 index 00000000..e4a7e9d9 Binary files /dev/null and b/maixpy/static/image/bilibili_live_start.png differ diff --git a/maixpy/static/image/body_keypoint.jpg b/maixpy/static/image/body_keypoint.jpg new file mode 100644 index 00000000..5e0e14e0 Binary files /dev/null and b/maixpy/static/image/body_keypoint.jpg differ diff --git a/maixpy/static/image/camera.png b/maixpy/static/image/camera.png new file mode 100644 index 00000000..0fa70af0 Binary files /dev/null and b/maixpy/static/image/camera.png differ diff --git a/maixpy/static/image/capture_sky.jpg b/maixpy/static/image/capture_sky.jpg new file mode 100644 index 00000000..c96abeda Binary files /dev/null and b/maixpy/static/image/capture_sky.jpg differ diff --git a/maixpy/static/image/code.svg b/maixpy/static/image/code.svg new file mode 100644 index 00000000..661fcd2e --- /dev/null +++ b/maixpy/static/image/code.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/maixpy/static/image/face_recognize.jpg b/maixpy/static/image/face_recognize.jpg new file mode 100644 index 00000000..c04f89e2 Binary files /dev/null and b/maixpy/static/image/face_recognize.jpg differ diff --git a/maixpy/static/image/find_blobs_app.jpg b/maixpy/static/image/find_blobs_app.jpg new file mode 100644 index 00000000..f01d0932 Binary files /dev/null and b/maixpy/static/image/find_blobs_app.jpg differ diff --git a/maixpy/static/image/github-fill.svg b/maixpy/static/image/github-fill.svg new file mode 100644 index 00000000..d3fd1f6d --- /dev/null +++ b/maixpy/static/image/github-fill.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/maixpy/static/image/global_shutter.jpg b/maixpy/static/image/global_shutter.jpg new file mode 100644 index 00000000..9f0ee32b Binary files /dev/null and b/maixpy/static/image/global_shutter.jpg differ diff --git a/maixpy/static/image/hdmi_capture.jpg b/maixpy/static/image/hdmi_capture.jpg new file mode 100644 index 00000000..10cede04 Binary files /dev/null and b/maixpy/static/image/hdmi_capture.jpg differ diff --git a/maixpy/static/image/language.svg b/maixpy/static/image/language.svg new file mode 100644 index 00000000..b1903d01 --- /dev/null +++ b/maixpy/static/image/language.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/maixpy/static/image/line_tracking_demo.jpg b/maixpy/static/image/line_tracking_demo.jpg new file mode 100644 index 00000000..3bd95870 Binary files /dev/null and b/maixpy/static/image/line_tracking_demo.jpg differ diff --git a/maixpy/static/image/maix-ecosystem.excalidraw b/maixpy/static/image/maix-ecosystem.excalidraw new file mode 100644 index 00000000..794d2bd5 --- /dev/null +++ b/maixpy/static/image/maix-ecosystem.excalidraw @@ -0,0 +1,974 @@ +{ + "type": "excalidraw", + "version": 2, + "source": "https://excalidraw.com", + "elements": [ + { + "type": "rectangle", + "version": 149, + "versionNonce": 170295430, + "isDeleted": false, + "id": "lgDrdWXT9bbyk2trMN6Vw", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 542, + "y": 545, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 602.8055468764685, + "height": 133, + "seed": 1518581314, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [], + "updated": 1705903395747, + "link": null, + "locked": false + }, + { + "type": "rectangle", + "version": 171, + "versionNonce": 1910780634, + "isDeleted": false, + "id": "yN5xUSW8oMw5XX3EP_QN5", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 543.8055468764685, + "y": 480.0277343823424, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 602.388906247063, + "height": 64, + "seed": 1257741826, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [], + "updated": 1705903395747, + "link": null, + "locked": false + }, + { + "type": "rectangle", + "version": 196, + "versionNonce": 869302214, + "isDeleted": false, + "id": "pZsOD-erMl2GhwgdB3len", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 544.3303130110232, + "y": 415.05640610609106, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 602.388906247063, + "height": 64, + "seed": 471371202, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [], + "updated": 1705903395747, + "link": null, + "locked": false + }, + { + "type": "rectangle", + "version": 110, + "versionNonce": 256690074, + "isDeleted": false, + "id": "-bPjeuUMFo6K5trDKlsX1", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 541.0280468294778, + "y": 263.3917182712827, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 604.1601573513434, + "height": 150.63726589960157, + "seed": 1124435330, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [], + "updated": 1705903395747, + "link": null, + "locked": false + }, + { + "type": "rectangle", + "version": 140, + "versionNonce": 1853396742, + "isDeleted": false, + "id": "lfgD3TSEibNF4UEOT65O6", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 822.1639067169694, + "y": 263.3917182712827, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 323.0242974638518, + "height": 116.80429708792639, + "seed": 956138818, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [ + { + "type": "text", + "id": "jFsobJvvk3oLsXpIMZZGR" + } + ], + "updated": 1705903395747, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 53, + "versionNonce": 1145357402, + "isDeleted": false, + "id": "jFsobJvvk3oLsXpIMZZGR", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 876.139831083661, + "y": 291.7938668152459, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 215.07244873046875, + "height": 60, + "seed": 1960559838, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1705903395747, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "MaixPy\nPython API\n(Auto Sync with MaixCDK)", + "textAlign": "center", + "verticalAlign": "middle", + "containerId": "lfgD3TSEibNF4UEOT65O6", + "originalText": "MaixPy\nPython API\n(Auto Sync with MaixCDK)", + "lineHeight": 1.25, + "baseline": 55 + }, + { + "type": "rectangle", + "version": 148, + "versionNonce": 580634182, + "isDeleted": false, + "id": "4EvFUYMCaR0VTnX-qZHxz", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 957.7240489190394, + "y": -159.09379322492532, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 186.8868753406823, + "height": 420.0688708668026, + "seed": 2002750722, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [ + { + "type": "text", + "id": "87m8Zy2thgUpCwI3aLGLU" + } + ], + "updated": 1705903395747, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 88, + "versionNonce": 983509274, + "isDeleted": false, + "id": "87m8Zy2thgUpCwI3aLGLU", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 962.9272827319587, + "y": 0.9406422084759924, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 176.48040771484375, + "height": 100, + "seed": 322530910, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1705903395747, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "MaixVision Workstation\n\npython coding, camera\npreview, graphic coding,\ncamera monitor...", + "textAlign": "center", + "verticalAlign": "middle", + "containerId": "4EvFUYMCaR0VTnX-qZHxz", + "originalText": "MaixVision Workstation\n\npython coding, camera preview, graphic coding, camera monitor...", + "lineHeight": 1.25, + "baseline": 95 + }, + { + "type": "rectangle", + "version": 304, + "versionNonce": 555158918, + "isDeleted": false, + "id": "EcrNFNI6yL92AoSlIWVVO", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 544.3798973239758, + "y": -160.84555157963533, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 413.2455476283189, + "height": 297.55670053920767, + "seed": 1940561090, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [], + "updated": 1705903395747, + "link": null, + "locked": false + }, + { + "type": "rectangle", + "version": 411, + "versionNonce": 1775171034, + "isDeleted": false, + "id": "rRCFAQjwm1vnYBnizXNgV", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "dotted", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 645.073256882533, + "y": -152.95838497869576, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 152.7585529775803, + "height": 137.38091739682395, + "seed": 461223042, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [ + { + "type": "text", + "id": "IljY5UxdaUGFHT9sh3TVh" + } + ], + "updated": 1705903395747, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 206, + "versionNonce": 388459718, + "isDeleted": false, + "id": "IljY5UxdaUGFHT9sh3TVh", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 659.1804157565771, + "y": -104.26792628028379, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 124.54423522949219, + "height": 40, + "seed": 1150609118, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1705903395747, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "AI model online \ntraining", + "textAlign": "center", + "verticalAlign": "middle", + "containerId": "rRCFAQjwm1vnYBnizXNgV", + "originalText": "AI model online training", + "lineHeight": 1.25, + "baseline": 35 + }, + { + "type": "rectangle", + "version": 429, + "versionNonce": 1659316890, + "isDeleted": false, + "id": "-s8ub4VAaSqSgcxn_ClB6", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "dotted", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 806.195982956134, + "y": -152.1965882262943, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 143.40089934047262, + "height": 135.41277270964167, + "seed": 1352127554, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [ + { + "type": "text", + "id": "HiKrFzJa43Cm5SmFjQ47Z" + } + ], + "updated": 1705903395747, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 209, + "versionNonce": 40494086, + "isDeleted": false, + "id": "HiKrFzJa43Cm5SmFjQ47Z", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 813.1922929168977, + "y": -94.49020187147346, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 129.4082794189453, + "height": 20, + "seed": 1527204318, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1705903395747, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "AI model sharing", + "textAlign": "center", + "verticalAlign": "middle", + "containerId": "-s8ub4VAaSqSgcxn_ClB6", + "originalText": "AI model sharing", + "lineHeight": 1.25, + "baseline": 15 + }, + { + "type": "rectangle", + "version": 471, + "versionNonce": 479905626, + "isDeleted": false, + "id": "4LZjGT8mwEbDX3LZWxl6R", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "dotted", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 645.8346979642795, + "y": -8.461871689459826, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 151.86479451166826, + "height": 139.42752107389416, + "seed": 1691876354, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [ + { + "type": "text", + "id": "K0Wr4IrqdbZ8pYoLS5o9U" + } + ], + "updated": 1705903395747, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 213, + "versionNonce": 618361670, + "isDeleted": false, + "id": "K0Wr4IrqdbZ8pYoLS5o9U", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 656.4469429373987, + "y": 41.25188884748725, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 130.6403045654297, + "height": 40, + "seed": 1106354562, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1705903395747, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "Project sharing, \nreward", + "textAlign": "center", + "verticalAlign": "middle", + "containerId": "4LZjGT8mwEbDX3LZWxl6R", + "originalText": "Project sharing, reward", + "lineHeight": 1.25, + "baseline": 35 + }, + { + "type": "rectangle", + "version": 477, + "versionNonce": 1060949018, + "isDeleted": false, + "id": "fawTMpzGHBvMo4rpWSaai", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "dotted", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 806.6026053001843, + "y": -8.271965644919078, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 143.81518326443873, + "height": 139.21006740019186, + "seed": 373107650, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [ + { + "type": "text", + "id": "znJrxwJJU0EAYq6Tw-t9R" + } + ], + "updated": 1705903395747, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 249, + "versionNonce": 2027494022, + "isDeleted": false, + "id": "znJrxwJJU0EAYq6Tw-t9R", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 837.0541117273256, + "y": 51.33306805517685, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 82.91217041015625, + "height": 20, + "seed": 1660884994, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1705903395747, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "APP Store", + "textAlign": "center", + "verticalAlign": "middle", + "containerId": "fawTMpzGHBvMo4rpWSaai", + "originalText": "APP Store", + "lineHeight": 1.25, + "baseline": 15 + }, + { + "type": "rectangle", + "version": 103, + "versionNonce": 1891645658, + "isDeleted": false, + "id": "AD9gq4p6sVlK5uCSSdF79", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 541.7062882439832, + "y": 137.47954767793942, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 414.3818235520964, + "height": 125.37029693455148, + "seed": 182870914, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [], + "updated": 1705903395747, + "link": null, + "locked": false + }, + { + "type": "rectangle", + "version": 129, + "versionNonce": 622844358, + "isDeleted": false, + "id": "rBVBpnjw-mw4TFaxBVqob", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "dotted", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 546.9896785754999, + "y": 143.80647502552387, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 156.74454974378438, + "height": 114.3227462361117, + "seed": 1293067074, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [ + { + "type": "text", + "id": "N9U-XXCh_1lmG6TFdyWZf" + } + ], + "updated": 1705903395747, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 59, + "versionNonce": 548933018, + "isDeleted": false, + "id": "N9U-XXCh_1lmG6TFdyWZf", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 583.3218686085248, + "y": 190.96784814357972, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 84.08016967773438, + "height": 20, + "seed": 1628866946, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1705903395747, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "User APPs", + "textAlign": "center", + "verticalAlign": "middle", + "containerId": "rBVBpnjw-mw4TFaxBVqob", + "originalText": "User APPs", + "lineHeight": 1.25, + "baseline": 15 + }, + { + "type": "rectangle", + "version": 227, + "versionNonce": 1892462854, + "isDeleted": false, + "id": "taBSuFoogLth7z8TAIGfA", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "dotted", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 711.7353054438793, + "y": 143.5381217253858, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 238.5851579676612, + "height": 113.2492266066432, + "seed": 1193339650, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [ + { + "type": "text", + "id": "zbtofl7zMyang1nAhL_EV" + } + ], + "updated": 1705903395747, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 154, + "versionNonce": 1415006810, + "isDeleted": false, + "id": "zbtofl7zMyang1nAhL_EV", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 717.9876241127685, + "y": 150.1627350287074, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 226.0805206298828, + "height": 100, + "seed": 756599262, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1705903395747, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "Official APPs:\nLauncher, APP store, \nSettings,Camera, Monitor, AI\ndetector, line tracer, QR \ncode finder etc.", + "textAlign": "center", + "verticalAlign": "middle", + "containerId": "taBSuFoogLth7z8TAIGfA", + "originalText": "Official APPs:\nLauncher, APP store, Settings,Camera, Monitor, AI detector, line tracer, QR code finder etc.", + "lineHeight": 1.25, + "baseline": 95 + }, + { + "type": "text", + "version": 49, + "versionNonce": 928817222, + "isDeleted": false, + "id": "sFPgg4fqvH6SkVMAcU2Ic", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 609.1081303162628, + "y": 321.1711479479845, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 86.400146484375, + "height": 40, + "seed": 1066416834, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 2 + }, + "boundElements": [], + "updated": 1705903395747, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "MaixCDK\nC/C++ API", + "textAlign": "left", + "verticalAlign": "top", + "containerId": null, + "originalText": "MaixCDK\nC/C++ API", + "lineHeight": 1.25, + "baseline": 34 + }, + { + "type": "text", + "version": 60, + "versionNonce": 1920003866, + "isDeleted": false, + "id": "mdOMOmkUcecRL2pFLwKdr", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 797.665729771511, + "y": 435.4203338248098, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 55.5521240234375, + "height": 20, + "seed": 1426035330, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 2 + }, + "boundElements": [], + "updated": 1705903395747, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "Drivers", + "textAlign": "left", + "verticalAlign": "top", + "containerId": null, + "originalText": "Drivers", + "lineHeight": 1.25, + "baseline": 14 + }, + { + "type": "text", + "version": 79, + "versionNonce": 1554818950, + "isDeleted": false, + "id": "HEf5jPBhfnk4ha0pwN7__", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 778.159771207175, + "y": 502.2979060453904, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 104.52821350097656, + "height": 20, + "seed": 2049831490, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 2 + }, + "boundElements": [], + "updated": 1705903395747, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "System(Linux)", + "textAlign": "left", + "verticalAlign": "top", + "containerId": null, + "originalText": "System(Linux)", + "lineHeight": 1.25, + "baseline": 14 + }, + { + "type": "text", + "version": 101, + "versionNonce": 259521498, + "isDeleted": false, + "id": "huzNuYRp7_Zxwnq1MvOpN", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 798.594584941241, + "y": 584.0371609816557, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 66.52815246582031, + "height": 20, + "seed": 1912232450, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 2 + }, + "boundElements": [], + "updated": 1705903395747, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "MaixCam", + "textAlign": "left", + "verticalAlign": "top", + "containerId": null, + "originalText": "MaixCam", + "lineHeight": 1.25, + "baseline": 14 + }, + { + "type": "text", + "version": 78, + "versionNonce": 1090111174, + "isDeleted": false, + "id": "BNhQWU8ZdE9UUh9PGz8WY", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 729.8593023811998, + "y": 614.0371609816557, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 248.54452514648438, + "height": 20, + "seed": 1264830914, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 2 + }, + "boundElements": [], + "updated": 1705903395747, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "For STEM, Industry, Personal...", + "textAlign": "left", + "verticalAlign": "top", + "containerId": null, + "originalText": "For STEM, Industry, Personal...", + "lineHeight": 1.25, + "baseline": 14 + }, + { + "type": "text", + "version": 75, + "versionNonce": 964653210, + "isDeleted": false, + "id": "DsZ8iDV3d86Jb4hh_bEIW", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 567.7901931261797, + "y": -20.78670585983116, + "strokeColor": "#343a40", + "backgroundColor": "transparent", + "width": 61.456146240234375, + "height": 20, + "seed": 1569276290, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 2 + }, + "boundElements": [], + "updated": 1705903395747, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "MaixHub", + "textAlign": "left", + "verticalAlign": "top", + "containerId": null, + "originalText": "MaixHub", + "lineHeight": 1.25, + "baseline": 14 + } + ], + "appState": { + "gridSize": null, + "viewBackgroundColor": "#ffffff" + }, + "files": {} +} \ No newline at end of file diff --git a/maixpy/static/image/maix_ecosystem.png b/maixpy/static/image/maix_ecosystem.png new file mode 100644 index 00000000..dff03994 Binary files /dev/null and b/maixpy/static/image/maix_ecosystem.png differ diff --git a/maixpy/static/image/maixcam.png b/maixpy/static/image/maixcam.png new file mode 100644 index 00000000..976fa5fa Binary files /dev/null and b/maixpy/static/image/maixcam.png differ diff --git a/maixpy/static/image/maixcam_back.png b/maixpy/static/image/maixcam_back.png new file mode 100644 index 00000000..44c5ed0d Binary files /dev/null and b/maixpy/static/image/maixcam_back.png differ diff --git a/maixpy/static/image/maixcam_font.png b/maixpy/static/image/maixcam_font.png new file mode 100644 index 00000000..268ea6e6 Binary files /dev/null and b/maixpy/static/image/maixcam_font.png differ diff --git a/maixpy/static/image/maixcam_hardware_back.png b/maixpy/static/image/maixcam_hardware_back.png new file mode 100644 index 00000000..8c4b43e0 Binary files /dev/null and b/maixpy/static/image/maixcam_hardware_back.png differ diff --git a/maixpy/static/image/maixcam_pro.png b/maixpy/static/image/maixcam_pro.png new file mode 100644 index 00000000..28dffd72 Binary files /dev/null and b/maixpy/static/image/maixcam_pro.png differ diff --git a/maixpy/static/image/maixcam_pro_io.png b/maixpy/static/image/maixcam_pro_io.png new file mode 100644 index 00000000..66aef0db Binary files /dev/null and b/maixpy/static/image/maixcam_pro_io.png differ diff --git a/maixpy/static/image/maixcdk.png b/maixpy/static/image/maixcdk.png new file mode 100644 index 00000000..0d4262ed Binary files /dev/null and b/maixpy/static/image/maixcdk.png differ diff --git a/maixpy/static/image/maixhub.jpg b/maixpy/static/image/maixhub.jpg new file mode 100644 index 00000000..eddf004a Binary files /dev/null and b/maixpy/static/image/maixhub.jpg differ diff --git a/maixpy/static/image/maixpy-v1-square.png b/maixpy/static/image/maixpy-v1-square.png new file mode 100644 index 00000000..c6d54c57 Binary files /dev/null and b/maixpy/static/image/maixpy-v1-square.png differ diff --git a/maixpy/static/image/maixpy-v1.png b/maixpy/static/image/maixpy-v1.png new file mode 100644 index 00000000..2df0900a Binary files /dev/null and b/maixpy/static/image/maixpy-v1.png differ diff --git a/maixpy/static/image/maixpy_banner.png b/maixpy/static/image/maixpy_banner.png new file mode 100644 index 00000000..3b290144 Binary files /dev/null and b/maixpy/static/image/maixpy_banner.png differ diff --git a/maixpy/static/image/maixvision.jpg b/maixpy/static/image/maixvision.jpg new file mode 100644 index 00000000..e6f6e33d Binary files /dev/null and b/maixpy/static/image/maixvision.jpg differ diff --git a/maixpy/static/image/monitor.jpg b/maixpy/static/image/monitor.jpg new file mode 100644 index 00000000..8bc3f1a1 Binary files /dev/null and b/maixpy/static/image/monitor.jpg differ diff --git a/maixpy/static/image/object_track.jpg b/maixpy/static/image/object_track.jpg new file mode 100644 index 00000000..d12036f0 Binary files /dev/null and b/maixpy/static/image/object_track.jpg differ diff --git a/maixpy/static/image/ocr.jpg b/maixpy/static/image/ocr.jpg new file mode 100644 index 00000000..cebf2344 Binary files /dev/null and b/maixpy/static/image/ocr.jpg differ diff --git a/maixpy/static/image/opencv_openmv.jpg b/maixpy/static/image/opencv_openmv.jpg new file mode 100644 index 00000000..01674361 Binary files /dev/null and b/maixpy/static/image/opencv_openmv.jpg differ diff --git a/maixpy/static/image/rndis_windows.jpg b/maixpy/static/image/rndis_windows.jpg new file mode 100644 index 00000000..58cb5e5b Binary files /dev/null and b/maixpy/static/image/rndis_windows.jpg differ diff --git a/maixpy/static/image/search/cancel.svg b/maixpy/static/image/search/cancel.svg new file mode 100644 index 00000000..46102a4c --- /dev/null +++ b/maixpy/static/image/search/cancel.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/maixpy/static/image/search/close.svg b/maixpy/static/image/search/close.svg new file mode 100644 index 00000000..87c50f5d --- /dev/null +++ b/maixpy/static/image/search/close.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/maixpy/static/image/search/search.svg b/maixpy/static/image/search/search.svg new file mode 100644 index 00000000..3fafc630 --- /dev/null +++ b/maixpy/static/image/search/search.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/maixpy/static/image/search/up.svg b/maixpy/static/image/search/up.svg new file mode 100644 index 00000000..1ed67152 --- /dev/null +++ b/maixpy/static/image/search/up.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/maixpy/static/image/self_learn_classifier.jpg b/maixpy/static/image/self_learn_classifier.jpg new file mode 100644 index 00000000..0ce807b1 Binary files /dev/null and b/maixpy/static/image/self_learn_classifier.jpg differ diff --git a/maixpy/static/image/self_learn_detector.jpg b/maixpy/static/image/self_learn_detector.jpg new file mode 100644 index 00000000..a4e5710a Binary files /dev/null and b/maixpy/static/image/self_learn_detector.jpg differ diff --git a/maixpy/static/image/serial_module.png b/maixpy/static/image/serial_module.png new file mode 100644 index 00000000..588e3375 Binary files /dev/null and b/maixpy/static/image/serial_module.png differ diff --git a/maixpy/static/image/sipeed_splash.jpeg b/maixpy/static/image/sipeed_splash.jpeg new file mode 100644 index 00000000..e5d0980a Binary files /dev/null and b/maixpy/static/image/sipeed_splash.jpeg differ diff --git a/maixpy/static/image/theme_default/anchor.svg b/maixpy/static/image/theme_default/anchor.svg new file mode 100644 index 00000000..4cbdda12 --- /dev/null +++ b/maixpy/static/image/theme_default/anchor.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/maixpy/static/image/theme_default/array.svg b/maixpy/static/image/theme_default/array.svg new file mode 100644 index 00000000..5c5c5148 --- /dev/null +++ b/maixpy/static/image/theme_default/array.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/maixpy/static/image/theme_default/back.svg b/maixpy/static/image/theme_default/back.svg new file mode 100644 index 00000000..e2729940 --- /dev/null +++ b/maixpy/static/image/theme_default/back.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/maixpy/static/image/theme_default/dark_mode.svg b/maixpy/static/image/theme_default/dark_mode.svg new file mode 100644 index 00000000..15523d8e --- /dev/null +++ b/maixpy/static/image/theme_default/dark_mode.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/maixpy/static/image/theme_default/ext_link.svg b/maixpy/static/image/theme_default/ext_link.svg new file mode 100644 index 00000000..ded48739 --- /dev/null +++ b/maixpy/static/image/theme_default/ext_link.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/maixpy/static/image/theme_default/indicator.svg b/maixpy/static/image/theme_default/indicator.svg new file mode 100644 index 00000000..7170d305 --- /dev/null +++ b/maixpy/static/image/theme_default/indicator.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/maixpy/static/image/theme_default/light_mode.svg b/maixpy/static/image/theme_default/light_mode.svg new file mode 100644 index 00000000..0be13395 --- /dev/null +++ b/maixpy/static/image/theme_default/light_mode.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/maixpy/static/image/theme_default/menu.svg b/maixpy/static/image/theme_default/menu.svg new file mode 100644 index 00000000..5f9cdda6 --- /dev/null +++ b/maixpy/static/image/theme_default/menu.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/maixpy/static/image/theme_default/print.svg b/maixpy/static/image/theme_default/print.svg new file mode 100644 index 00000000..3b343178 --- /dev/null +++ b/maixpy/static/image/theme_default/print.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/maixpy/static/image/theme_default/to-top.svg b/maixpy/static/image/theme_default/to-top.svg new file mode 100644 index 00000000..59f0a172 --- /dev/null +++ b/maixpy/static/image/theme_default/to-top.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/maixpy/static/image/thermal.jpg b/maixpy/static/image/thermal.jpg new file mode 100644 index 00000000..7e27b56e Binary files /dev/null and b/maixpy/static/image/thermal.jpg differ diff --git a/maixpy/static/image/voice_recognize.jpg b/maixpy/static/image/voice_recognize.jpg new file mode 100644 index 00000000..c02f8b4a Binary files /dev/null and b/maixpy/static/image/voice_recognize.jpg differ diff --git a/maixpy/static/image/weather_station.jpg b/maixpy/static/image/weather_station.jpg new file mode 100644 index 00000000..e03e0df7 Binary files /dev/null and b/maixpy/static/image/weather_station.jpg differ diff --git a/maixpy/static/image/wechat.png b/maixpy/static/image/wechat.png new file mode 100644 index 00000000..13978a67 Binary files /dev/null and b/maixpy/static/image/wechat.png differ diff --git a/maixpy/static/image/windows_ncm_install.jpg b/maixpy/static/image/windows_ncm_install.jpg new file mode 100644 index 00000000..03c67a81 Binary files /dev/null and b/maixpy/static/image/windows_ncm_install.jpg differ diff --git a/maixpy/static/image/windows_ncm_not_ok.png b/maixpy/static/image/windows_ncm_not_ok.png new file mode 100644 index 00000000..070c0ac6 Binary files /dev/null and b/maixpy/static/image/windows_ncm_not_ok.png differ diff --git a/maixpy/static/image/windows_ncm_ok.png b/maixpy/static/image/windows_ncm_ok.png new file mode 100644 index 00000000..d9fae5c6 Binary files /dev/null and b/maixpy/static/image/windows_ncm_ok.png differ diff --git a/maixpy/static/image/windows_rndis_ok.png b/maixpy/static/image/windows_rndis_ok.png new file mode 100644 index 00000000..2fe75e0a Binary files /dev/null and b/maixpy/static/image/windows_rndis_ok.png differ diff --git a/maixpy/static/images/thumbs_up/up.svg b/maixpy/static/images/thumbs_up/up.svg new file mode 100644 index 00000000..fda49330 --- /dev/null +++ b/maixpy/static/images/thumbs_up/up.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/maixpy/static/images/thumbs_up/upped.svg b/maixpy/static/images/thumbs_up/upped.svg new file mode 100644 index 00000000..f5c9305d --- /dev/null +++ b/maixpy/static/images/thumbs_up/upped.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/maixpy/static/js/custom.js b/maixpy/static/js/custom.js new file mode 100644 index 00000000..e69de29b diff --git a/maixpy/static/js/search/search_main.js b/maixpy/static/js/search/search_main.js new file mode 100644 index 00000000..fdd3d7c6 --- /dev/null +++ b/maixpy/static/js/search/search_main.js @@ -0,0 +1,351 @@ + +jQuery.fn.highlight = function (pat) { + function innerHighlight(node, pat) { + var skip = 0; + if (node.nodeType == 3) { + var pos = node.data.toUpperCase().indexOf(pat); + if (pos >= 0) { + var spannode = document.createElement('span'); + spannode.className = 'search_highlight'; + var middlebit = node.splitText(pos); + var endbit = middlebit.splitText(pat.length); + var middleclone = middlebit.cloneNode(true); + spannode.appendChild(middleclone); + middlebit.parentNode.replaceChild(spannode, middlebit); + skip = 1; + } + } + else if (node.nodeType == 1 && node.childNodes && !/(script|style)/i.test(node.tagName)) { + for (var i = 0; i < node.childNodes.length; ++i) { + i += innerHighlight(node.childNodes[i], pat); + } + } + return skip; + } + return this.each(function () { + innerHighlight(this, pat.toUpperCase()); + }); +}; + +window.onload = function(){ +} + +$(document).ready(function(){ + var waiting_search = false; + var search_index = null; + var search_content = { + "curr": null, + "others":{} + } + function onDownloadOk(data, arg1, arg2){ + search_index = data; + var pathname = window.location.pathname; + var curr_url = null; + var others_url = []; + for(var url in search_index){ + if(pathname.indexOf(url) != -1){ + if(!curr_url){ + curr_url = url; + }else{ // already have math item, e.g. `/get_started/zh/install/index.html /get_started/zh` + // and now `/get_started/zh/install/index.html /` + // choose longger one + if(url.length > curr_url.length){ + others_url.push(curr_url); + curr_url = url; + }else{ + others_url.push(url); + } + } + }else{ + others_url.push(url); + } + } + if(search_index[curr_url]){ + downloadJson(search_index[curr_url][1], onIndexDownloadOk, curr_url, true, search_index[curr_url][0]); + } + for(var i in others_url){ + url = others_url[i]; + downloadJson(search_index[url][1], onIndexDownloadOk, url, false, search_index[url][0]); + } + } + function onIndexDownloadOk(data, url, is_curr, doc_name){ + if(is_curr){ + search_content["curr"] = [url, doc_name, data]; + }else{ + search_content["others"][url] = [url, doc_name, data]; + } + if(waiting_search == true){ + waiting_search = false; + onSearch(); + } + } + downloadJson("/maixpy/static/search_index/index.json", onDownloadOk); + var input_hint = $("#search_input_hint").html(); + var loading_hint = $("#search_loading_hint").html(); + var download_err_hint = $("#search_download_err_hint").html(); + var other_docs_result_hint = $("#search_other_docs_result_hint").html(); + var curr_doc_result_hint = $("#search_curr_doc_result_hint").html(); + $("body").append('
\ +
\ +
\ +
\ +
\ + \ +
\ +
\ +
\ +
\ +
\ +
\ +
\ +
'); + $("#search").bind("click", function(e){ + $("body").css("overflow-y", "hidden"); + $("#search_wrapper").show(); + $("#search_input").focus(); + $("#wrapper").addClass("blur"); + $("#navbar").addClass("blur"); + }); + $("#search_wrapper .close").bind("click", function(e){ + $("body").css("overflow-y", "auto"); + $("#search_wrapper").hide(); + $("#wrapper").removeClass("blur"); + $("#navbar").removeClass("blur"); + }); + $("#search_input").bind("input propertychange", function(){ + setTimeout(() => { + onSearch(); + }, 1000); + }); + function onSearch(){ + $("#search_result_name").empty(); + $("#search_result_content").empty(); + $("#search_result_content").append('
    '+ curr_doc_result_hint +'
'); + $("#search_result_content").append('
    '+ other_docs_result_hint +'
'); + if(!search_index){ + $("#search_result_content").append('
'+ loading_hint +'
'); + waiting_search = true; + return; + } + if(!search_content["curr"] && search_content["others"].length == 0){ + $("#search_result_content").append('
'+ loading_hint +'
'); + waiting_search = true; + return; + } + $("#search_curr_result > .hint").addClass("searching"); + var search_keywords = $("#search_input").val(); + search_doc(search_content["curr"], "#search_curr_result"); + var doc_id = 0; + for(var url in search_content["others"]){ + search_doc(search_content["others"][url], "#search_others_result", doc_id); + doc_id += 1; + } + addSearchResultClickListener(); + function search_doc(data, containerId, doc_id="curr"){ + var doc_id_str = 'result_wrapper_' + doc_id; + var findFlag = false; + var items = data[2]; + for(var url in items){ + var content = items[url]; + search_keywords = search_keywords.trim(); + if(search_keywords.length <= 0){ + return; + } + var keywords = search_keywords.split(" "); + var find = false; + var find_strs = ""; + for(var i in keywords){ + var keyword = keywords[i]; + if(content["title"] && content["title"].indexOf(keyword) >= 0){ + find = true; + } + } + if(content["content"] && content["content"].length > 0){ + find_strs = search(keywords, content["content"]); + if(find_strs.length > 0){ + find = true; + } + } + if(find){ + if(!findFlag){ + $("#search_result_name").append('
  • '+ data[1] +'
  • '); + $(containerId).append('
    '+data[1]+'
    '); + findFlag = true; + } + $("#"+doc_id_str).append('
  • '+ (content["title"]?content["title"]:url) + + '

    ' + find_strs + '
  • '); + } + } + } + $("#search_curr_result > .hint").removeClass("searching"); + } + function downloadJson(url, callback, arg1=null, arg2=null, arg3=null){ + $.ajax({ + type: "GET", + url: url, + contentType: "application/json", + dataType: "json", + success: function(data){ + callback(data, arg1, arg2, arg3); + }, + error: function(){ + $("#search_result_content").empty(); + $("#search_result_content").append('
    '+ download_err_hint + ': '+ url +'
    '); + } + }); + } + highlightKeywords(); +}); + +function focusItems(id, contrainerId, offset=0, classname=null){ + var elementTop = 0; + if(classname){ + elementTop = $("."+classname)[0].offsetTop - offset; + }else{ + elementTop = $("#"+id)[0].offsetTop - offset; + } + + $("#"+contrainerId).animate({scrollTop: elementTop},500); +} + + +function addSearchResultClickListener(){ + $("#search_result_name > li").on("click", function(e){ + var targetId = e.target.attributes.result_id.value; + focusItems(targetId, "search_result_content", $("#search_title").height() + $("#search_result .hint").height()); + }); +} + +function highlightKeywords(){ + var highlight_keywords = getQueryVariable("highlight"); + if(highlight_keywords){ + // add search result btn + var html = document.getElementsByTagName("html")[0]; + var lang = html.lang.split("-")[0].toLowerCase() + let strs = { + "zh": { + "Previous": "上一个", + "Next": "下一个" + } + } + if(lang in strs){ + var pre_name = strs[lang]["Previous"]; + var next_name = strs[lang]["Next"]; + }else{ + var pre_name = "Previous"; + var next_name = "Next"; + } + $("body").append('
    ' + + '' + + '' + + '' + + '
    '); + var highlight_keywords = decodeURI(highlight_keywords); + highlight_keywords = highlight_keywords.split(" "); + for(var i=0; i .previous").on("click", function(){ + let old = currSearchIdx; + currSearchIdx -= 1; + if (currSearchIdx < 0){ + currSearchIdx = $(".search_highlight").length - 1; + } + window.scrollTo({ + top: $(".search_highlight")[currSearchIdx].offsetTop - window.screen.height / 3, + behavior: "smooth" + }); + $($(".search_highlight")[old]).removeClass("selected_highlight") + $($(".search_highlight")[currSearchIdx]).addClass("selected_highlight") + }); + $("#search_ctrl_btn > .next").on("click", function(){ + let old = currSearchIdx; + currSearchIdx += 1; + if (currSearchIdx >= $(".search_highlight").length){ + currSearchIdx = 0; + } + window.scrollTo({ + top: $(".search_highlight")[currSearchIdx].offsetTop - window.screen.height / 3, + behavior: "smooth" + }); + $($(".search_highlight")[old]).removeClass("selected_highlight") + $($(".search_highlight")[currSearchIdx]).addClass("selected_highlight") + }); + } +} +function getQueryVariable(variable) +{ + var query = window.location.search.substring(1); + var vars = query.split("&"); + for (var i=0;i= 0){ + idxs.push({ + "idx": idx + idx_rel, + "len": keyword.length + }); + _idxs = _search([keyword], content.substr(idx + keyword.length), idx_rel + idx + keyword.length); + idxs = idxs.concat(_idxs); + } + } + return idxs + } + var find_strs = ""; + idxs = _search(keywords, content); + idxs = idxs.sort((a, b)=> a.idx-b.idx); + var idx_last = -1; + var len_last = 0; + for(var i=0; i= 0 && (idx - idx_last -len_last) < show_length){ // last keyword too close + find_strs += content.substr(idx_last + len_last, idx - (idx_last + len_last)) + ''+ content.substr(idx, len) +'' + }else{ + var start_idx = (idx - show_length < 0) ? 0 : (idx - show_length); + find_strs += '...' + content.substr(start_idx, idx - start_idx) + + '' + content.substr(idx, len) + + ''; + } + var idx_next = -1; + if(i < idxs.length -1){ + idx_next = idxs[i + 1]['idx']; + } + if(idx_next >= 0 && ((idx_next - idx - len) < show_length) ){ // next keywor too close + }else{ + find_strs += content.substr(idx + len, show_length) + '...'; + } + idx_last = idx; + len_last = len; + } + return find_strs +} + diff --git a/maixpy/static/js/theme_default/jquery.min.js b/maixpy/static/js/theme_default/jquery.min.js new file mode 100644 index 00000000..b0614034 --- /dev/null +++ b/maixpy/static/js/theme_default/jquery.min.js @@ -0,0 +1,2 @@ +/*! jQuery v3.5.1 | (c) JS Foundation and other contributors | jquery.org/license */ +!function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(C,e){"use strict";var t=[],r=Object.getPrototypeOf,s=t.slice,g=t.flat?function(e){return t.flat.call(e)}:function(e){return t.concat.apply([],e)},u=t.push,i=t.indexOf,n={},o=n.toString,v=n.hasOwnProperty,a=v.toString,l=a.call(Object),y={},m=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType},x=function(e){return null!=e&&e===e.window},E=C.document,c={type:!0,src:!0,nonce:!0,noModule:!0};function b(e,t,n){var r,i,o=(n=n||E).createElement("script");if(o.text=e,t)for(r in c)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function w(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[o.call(e)]||"object":typeof e}var f="3.5.1",S=function(e,t){return new S.fn.init(e,t)};function p(e){var t=!!e&&"length"in e&&e.length,n=w(e);return!m(e)&&!x(e)&&("array"===n||0===t||"number"==typeof t&&0+~]|"+M+")"+M+"*"),U=new RegExp(M+"|>"),X=new RegExp(F),V=new RegExp("^"+I+"$"),G={ID:new RegExp("^#("+I+")"),CLASS:new RegExp("^\\.("+I+")"),TAG:new RegExp("^("+I+"|[*])"),ATTR:new RegExp("^"+W),PSEUDO:new RegExp("^"+F),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+R+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},Y=/HTML$/i,Q=/^(?:input|select|textarea|button)$/i,J=/^h\d$/i,K=/^[^{]+\{\s*\[native \w/,Z=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ee=/[+~]/,te=new RegExp("\\\\[\\da-fA-F]{1,6}"+M+"?|\\\\([^\\r\\n\\f])","g"),ne=function(e,t){var n="0x"+e.slice(1)-65536;return t||(n<0?String.fromCharCode(n+65536):String.fromCharCode(n>>10|55296,1023&n|56320))},re=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,ie=function(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e},oe=function(){T()},ae=be(function(e){return!0===e.disabled&&"fieldset"===e.nodeName.toLowerCase()},{dir:"parentNode",next:"legend"});try{H.apply(t=O.call(p.childNodes),p.childNodes),t[p.childNodes.length].nodeType}catch(e){H={apply:t.length?function(e,t){L.apply(e,O.call(t))}:function(e,t){var n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function se(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&(T(e),e=e||C,E)){if(11!==p&&(u=Z.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return n.push(a),n}else if(f&&(a=f.getElementById(i))&&y(e,a)&&a.id===i)return n.push(a),n}else{if(u[2])return H.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&d.getElementsByClassName&&e.getElementsByClassName)return H.apply(n,e.getElementsByClassName(i)),n}if(d.qsa&&!N[t+" "]&&(!v||!v.test(t))&&(1!==p||"object"!==e.nodeName.toLowerCase())){if(c=t,f=e,1===p&&(U.test(t)||z.test(t))){(f=ee.test(t)&&ye(e.parentNode)||e)===e&&d.scope||((s=e.getAttribute("id"))?s=s.replace(re,ie):e.setAttribute("id",s=S)),o=(l=h(t)).length;while(o--)l[o]=(s?"#"+s:":scope")+" "+xe(l[o]);c=l.join(",")}try{return H.apply(n,f.querySelectorAll(c)),n}catch(e){N(t,!0)}finally{s===S&&e.removeAttribute("id")}}}return g(t.replace($,"$1"),e,n,r)}function ue(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function le(e){return e[S]=!0,e}function ce(e){var t=C.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function fe(e,t){var n=e.split("|"),r=n.length;while(r--)b.attrHandle[n[r]]=t}function pe(e,t){var n=t&&e,r=n&&1===e.nodeType&&1===t.nodeType&&e.sourceIndex-t.sourceIndex;if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function de(t){return function(e){return"input"===e.nodeName.toLowerCase()&&e.type===t}}function he(n){return function(e){var t=e.nodeName.toLowerCase();return("input"===t||"button"===t)&&e.type===n}}function ge(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&ae(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function ve(a){return le(function(o){return o=+o,le(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function ye(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}for(e in d=se.support={},i=se.isXML=function(e){var t=e.namespaceURI,n=(e.ownerDocument||e).documentElement;return!Y.test(t||n&&n.nodeName||"HTML")},T=se.setDocument=function(e){var t,n,r=e?e.ownerDocument||e:p;return r!=C&&9===r.nodeType&&r.documentElement&&(a=(C=r).documentElement,E=!i(C),p!=C&&(n=C.defaultView)&&n.top!==n&&(n.addEventListener?n.addEventListener("unload",oe,!1):n.attachEvent&&n.attachEvent("onunload",oe)),d.scope=ce(function(e){return a.appendChild(e).appendChild(C.createElement("div")),"undefined"!=typeof e.querySelectorAll&&!e.querySelectorAll(":scope fieldset div").length}),d.attributes=ce(function(e){return e.className="i",!e.getAttribute("className")}),d.getElementsByTagName=ce(function(e){return e.appendChild(C.createComment("")),!e.getElementsByTagName("*").length}),d.getElementsByClassName=K.test(C.getElementsByClassName),d.getById=ce(function(e){return a.appendChild(e).id=S,!C.getElementsByName||!C.getElementsByName(S).length}),d.getById?(b.filter.ID=function(e){var t=e.replace(te,ne);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(te,ne);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=d.getElementsByTagName?function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):d.qsa?t.querySelectorAll(e):void 0}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},b.find.CLASS=d.getElementsByClassName&&function(e,t){if("undefined"!=typeof t.getElementsByClassName&&E)return t.getElementsByClassName(e)},s=[],v=[],(d.qsa=K.test(C.querySelectorAll))&&(ce(function(e){var t;a.appendChild(e).innerHTML="",e.querySelectorAll("[msallowcapture^='']").length&&v.push("[*^$]="+M+"*(?:''|\"\")"),e.querySelectorAll("[selected]").length||v.push("\\["+M+"*(?:value|"+R+")"),e.querySelectorAll("[id~="+S+"-]").length||v.push("~="),(t=C.createElement("input")).setAttribute("name",""),e.appendChild(t),e.querySelectorAll("[name='']").length||v.push("\\["+M+"*name"+M+"*="+M+"*(?:''|\"\")"),e.querySelectorAll(":checked").length||v.push(":checked"),e.querySelectorAll("a#"+S+"+*").length||v.push(".#.+[+~]"),e.querySelectorAll("\\\f"),v.push("[\\r\\n\\f]")}),ce(function(e){e.innerHTML="";var t=C.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),e.querySelectorAll("[name=d]").length&&v.push("name"+M+"*[*^$|!~]?="),2!==e.querySelectorAll(":enabled").length&&v.push(":enabled",":disabled"),a.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&v.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),v.push(",.*:")})),(d.matchesSelector=K.test(c=a.matches||a.webkitMatchesSelector||a.mozMatchesSelector||a.oMatchesSelector||a.msMatchesSelector))&&ce(function(e){d.disconnectedMatch=c.call(e,"*"),c.call(e,"[s!='']:x"),s.push("!=",F)}),v=v.length&&new RegExp(v.join("|")),s=s.length&&new RegExp(s.join("|")),t=K.test(a.compareDocumentPosition),y=t||K.test(a.contains)?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},D=t?function(e,t){if(e===t)return l=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)==(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!d.sortDetached&&t.compareDocumentPosition(e)===n?e==C||e.ownerDocument==p&&y(p,e)?-1:t==C||t.ownerDocument==p&&y(p,t)?1:u?P(u,e)-P(u,t):0:4&n?-1:1)}:function(e,t){if(e===t)return l=!0,0;var n,r=0,i=e.parentNode,o=t.parentNode,a=[e],s=[t];if(!i||!o)return e==C?-1:t==C?1:i?-1:o?1:u?P(u,e)-P(u,t):0;if(i===o)return pe(e,t);n=e;while(n=n.parentNode)a.unshift(n);n=t;while(n=n.parentNode)s.unshift(n);while(a[r]===s[r])r++;return r?pe(a[r],s[r]):a[r]==p?-1:s[r]==p?1:0}),C},se.matches=function(e,t){return se(e,null,null,t)},se.matchesSelector=function(e,t){if(T(e),d.matchesSelector&&E&&!N[t+" "]&&(!s||!s.test(t))&&(!v||!v.test(t)))try{var n=c.call(e,t);if(n||d.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){N(t,!0)}return 0":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(te,ne),e[3]=(e[3]||e[4]||e[5]||"").replace(te,ne),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||se.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&se.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return G.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&X.test(n)&&(t=h(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(te,ne).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=m[e+" "];return t||(t=new RegExp("(^|"+M+")"+e+"("+M+"|$)"))&&m(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=se.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function D(e,n,r){return m(n)?S.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?S.grep(e,function(e){return e===n!==r}):"string"!=typeof n?S.grep(e,function(e){return-1)[^>]*|#([\w-]+))$/;(S.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||j,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:q.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof S?t[0]:t,S.merge(this,S.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:E,!0)),N.test(r[1])&&S.isPlainObject(t))for(r in t)m(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=E.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):m(e)?void 0!==n.ready?n.ready(e):e(S):S.makeArray(e,this)}).prototype=S.fn,j=S(E);var L=/^(?:parents|prev(?:Until|All))/,H={children:!0,contents:!0,next:!0,prev:!0};function O(e,t){while((e=e[t])&&1!==e.nodeType);return e}S.fn.extend({has:function(e){var t=S(e,this),n=t.length;return this.filter(function(){for(var e=0;e\x20\t\r\n\f]*)/i,he=/^$|^module$|\/(?:java|ecma)script/i;ce=E.createDocumentFragment().appendChild(E.createElement("div")),(fe=E.createElement("input")).setAttribute("type","radio"),fe.setAttribute("checked","checked"),fe.setAttribute("name","t"),ce.appendChild(fe),y.checkClone=ce.cloneNode(!0).cloneNode(!0).lastChild.checked,ce.innerHTML="",y.noCloneChecked=!!ce.cloneNode(!0).lastChild.defaultValue,ce.innerHTML="",y.option=!!ce.lastChild;var ge={thead:[1,"","
    "],col:[2,"","
    "],tr:[2,"","
    "],td:[3,"","
    "],_default:[0,"",""]};function ve(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&A(e,t)?S.merge([e],n):n}function ye(e,t){for(var n=0,r=e.length;n",""]);var me=/<|&#?\w+;/;function xe(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d\s*$/g;function qe(e,t){return A(e,"table")&&A(11!==t.nodeType?t:t.firstChild,"tr")&&S(e).children("tbody")[0]||e}function Le(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function He(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Oe(e,t){var n,r,i,o,a,s;if(1===t.nodeType){if(Y.hasData(e)&&(s=Y.get(e).events))for(i in Y.remove(t,"handle events"),s)for(n=0,r=s[i].length;n").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),E.head.appendChild(r[0])},abort:function(){i&&i()}}});var Ut,Xt=[],Vt=/(=)\?(?=&|$)|\?\?/;S.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Xt.pop()||S.expando+"_"+Ct.guid++;return this[e]=!0,e}}),S.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Vt.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Vt.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=m(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Vt,"$1"+r):!1!==e.jsonp&&(e.url+=(Et.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||S.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=C[r],C[r]=function(){o=arguments},n.always(function(){void 0===i?S(C).removeProp(r):C[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,Xt.push(r)),o&&m(i)&&i(o[0]),o=i=void 0}),"script"}),y.createHTMLDocument=((Ut=E.implementation.createHTMLDocument("").body).innerHTML="
    ",2===Ut.childNodes.length),S.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(y.createHTMLDocument?((r=(t=E.implementation.createHTMLDocument("")).createElement("base")).href=E.location.href,t.head.appendChild(r)):t=E),o=!n&&[],(i=N.exec(e))?[t.createElement(i[1])]:(i=xe([e],t,o),o&&o.length&&S(o).remove(),S.merge([],i.childNodes)));var r,i,o},S.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1").append(S.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},S.expr.pseudos.animated=function(t){return S.grep(S.timers,function(e){return t===e.elem}).length},S.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=S.css(e,"position"),c=S(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=S.css(e,"top"),u=S.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),m(t)&&(t=t.call(e,n,S.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):("number"==typeof f.top&&(f.top+="px"),"number"==typeof f.left&&(f.left+="px"),c.css(f))}},S.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){S.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===S.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===S.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=S(e).offset()).top+=S.css(e,"borderTopWidth",!0),i.left+=S.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-S.css(r,"marginTop",!0),left:t.left-i.left-S.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===S.css(e,"position"))e=e.offsetParent;return e||re})}}),S.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;S.fn[t]=function(e){return $(this,function(e,t,n){var r;if(x(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),S.each(["top","left"],function(e,n){S.cssHooks[n]=$e(y.pixelPosition,function(e,t){if(t)return t=Be(e,n),Me.test(t)?S(e).position()[n]+"px":t})}),S.each({Height:"height",Width:"width"},function(a,s){S.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){S.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return $(this,function(e,t,n){var r;return x(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?S.css(e,t,i):S.style(e,t,n,i)},s,n?e:void 0,n)}})}),S.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){S.fn[t]=function(e){return this.on(t,e)}}),S.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},hover:function(e,t){return this.mouseenter(e).mouseleave(t||e)}}),S.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){S.fn[n]=function(e,t){return 0 a").bind("click", function(e){ + var is_click_indicator = $(e.target).hasClass("sub_indicator"); + var a_obj = $(this); + if(a_obj.attr("href") == window.location.pathname){ + show_collapse_item(a_obj); + return false; + } + show_collapse_item(a_obj); + if(is_click_indicator){ // click indicator, only collapse, not jump to link + return false; + } + var screenW = $(window).width(); + if(screenW > 900){ + return; + } + link_href = $(this).attr("href").split(location.host); + if(link_href.length > 1){ + link_href = link_href[1]; + }else{ + link_href = link_href[0]; + } + url_href = location.href.split(location.host)[1] + let link_url = link_href.split("#")[0]; + let sub = $(this).next(); + var haveSub = false; + if(sub && sub.prop("nodeName")){ + haveSub = sub.prop("nodeName").toLowerCase() == "ul"; + } + if((link_href != decodeURIComponent(url_href) || !haveSub) && location.pathname == link_url){ // current page, and jump to header, close sidebar + location.href = link_href; + menu_toggle(); + } + }); +} + +function hello(){ + console.log('\n\n\ + _ _ \n\ + | | | | \n\ + | |_ ___ ___ __| | ___ ___ \n\ + | __/ _ \\/ _ \\/ _` |/ _ \\ / __|\n\ + | || __/ __/ (_| | (_) | (__ \n\ + \\__\\___|\\___|\\__,_|\\___/ \\___|\n\ + \n\ + generated by teedoc: \n\ + \n\ + https://github.com/teedoc/teedoc\n\ + \n\n\n\ +'); +} + + +function addTOC(){ + if(!document.getElementById("toc_content")) + return; + tocbot.init({ + // Where to render the table of contents. + tocSelector: '#toc_content', + // Where to grab the headings to build the table of contents. + contentSelector: '#article_content', + // Which headings to grab inside of the contentSelector element. + headingSelector: 'h1, h2, h3, h4', + // For headings inside relative or absolute positioned containers within content. + hasInnerContainers: true, + }); +} + +function toChineseNumber(n) { + if (!Number.isInteger(n) && n < 0) { + throw Error('请输入自然数'); + } + + const digits = ['零', '一', '二', '三', '四', '五', '六', '七', '八', '九']; + const positions = ['', '十', '百', '千', '万', '十万', '百万', '千万', '亿', '十亿', '百亿', '千亿']; + const charArray = String(n).split(''); + let result = ''; + let prevIsZero = false; + //处理0 deal zero + for (let i = 0; i < charArray.length; i++) { + const ch = charArray[i]; + if (ch !== '0' && !prevIsZero) { + result += digits[parseInt(ch)] + positions[charArray.length - i - 1]; + } else if (ch === '0') { + prevIsZero = true; + } else if (ch !== '0' && prevIsZero) { + result += '零' + digits[parseInt(ch)] + positions[charArray.length - i - 1]; + } + } + //处理十 deal ten + if (n < 100) { + result = result.replace('一十', '十'); + } + return result; + } + +function addSequence(){ + if(!tocbot._parseContent){ + return; + } + var headings = tocbot._parseContent.selectHeadings(document.getElementById("article_content"), tocbot.options.headingSelector); + var counth2=0, counth3=0, counth4=0; + var html = document.getElementsByTagName("html")[0]; + var isZh = html.lang.substring(0, 2).toLowerCase() == "zh"; + for(var i=0; i' + seq + ''); + } +} + + +function getSplitter(){ + var sizes = localStorage.getItem("splitter_w"); + if(sizes){ + try + { + sizes = JSON.parse(sizes); + } + catch(err) + { + sizes = false; + } + } + if(!sizes){ + var screenW = $(window).width(); + var split_w = 0; + if(!sidebar_width_is_percent){ + split_w = parseInt(sidebar_width/screenW*100); + }else{ + split_w = sidebar_width; + } + sizes = [split_w, 100-split_w]; + setSplitter(sizes); + } + return sizes; +} +function setSplitter(sizes){ + localStorage.setItem("splitter_w", JSON.stringify(sizes)); +} + +var hasSplitter = false; + +function createSplitter(){ + var split = Split(["#sidebar_wrapper", "#article"],{ + gutterSize: 3, + gutterAlign: 'start', + minSize: 200, + elementStyle: function (dimension, size, gutterSize) { + return { + 'width': 'calc(' + size + '% - ' + gutterSize + 'px)', + } + }, + onDragEnd: function (sizes) { + setSplitter(sizes) + }, + }); + hasSplitter = true; + var screenW = $(window).width(); + var sizes = getSplitter(); + split_w = parseInt(sizes[0]); + if(isNaN(split_w) || (split_w + 20) >= screenW){ + if(!sidebar_width_is_percent){ + split_w = parseInt(sidebar_width/screenW*100); + }else{ + split_w = sidebar_width; + } + } + split.setSizes([split_w, 100 - split_w]); + $(".gutter").append('
    '); + $(".gutter").hover(function(){ + $(".gutter").css("width", "10px"); + $(".gutter_icon").css("width", "10px"); + },function(){ + $(".gutter").css("width", "3px"); + $(".gutter_icon").css("width", "3px"); + }); +} + +function addSplitter(){ + var screenW = $(window).width(); + if(screenW > 900) + { + createSplitter(); + } +} + +function registerOnWindowResize(has_sidebar){ + window.onresize = function(){ + var screenW = $(window).width(); + if(!has_sidebar){ + return; + } + if(screenW < 900){ + $("#sidebar_wrapper").removeAttr("style"); + if($("#menu").hasClass("close")){ + $("#sidebar_wrapper").css("display", "block"); + } + $(".gutter").css("display", "none"); + $("#article").css("width", "100%"); + }else{ + if(!hasSplitter){ + createSplitter(); + } + if($("#sidebar_wrapper").css("display") != "none"){ + $(".gutter").css("display", "block"); + } + } + } +} + +function focusSidebar(){ + var windowH = window.innerHeight; + var active = $("#sidebar .active")[0]; + if(!active) + return; + var offset = active.offsetTop; + if(offset > windowH/2){ + $("#sidebar .show").scrollTop(offset); + } +} + +function imageViewer(){ + var content_e = document.getElementById("content_body"); + if(!content_e){ + content_e = document.getElementById("page_wrapper"); + } + const gallery = new Viewer(content_e); +} + +function addAnchor(){ + $("#content_body h2, #content_body h3, #content_body h4, #content_body h5").each(function(){ + if($(this).attr("id")){ + $(this).append('#'); + } + }); +} + +function rerender(){ + Prism.highlightAll(); +} + +function addPrintPage(){ + if(!$("#article_info_right")){ + return; + } + $("#article_info_right").append(''); + + var beforePrint = function(){ + // update style changed by js: + $("#article").css("width", "100%"); + // rerender for proper output + rerender(); + } + var afterPrint = function() { + // location.reload(); + } + if (window.matchMedia) { + var mediaQueryList = window.matchMedia('print'); + mediaQueryList.addListener(function(mql) { + if (mql.matches) { + beforePrint(); + } else { + afterPrint(); + } + }); + } + window.onbeforeprint = beforePrint; + window.onafterprint = afterPrint; + $("#print_page").click(function(){ + window.print(); + }); +} + +function addTocMobileListener(){ + $("#toc_btn").click(function(){ + if($("#toc_wrapper").hasClass("show")){ + $("#toc_wrapper").removeClass("show"); + }else{ + $("#toc_wrapper").addClass("show"); + } + }); + $("#toc_wrapper").click(function(){ + if($("#toc_btn").is(":visible")){ + $("#toc_wrapper").removeClass("show"); + } + }); +} + +function addTabsetListener(){ + $(".tabset-tab-label").on("click", function(){ + let this_obj = $(this); + // already active, do nothing + if(this_obj.hasClass("tabset-tab-active")){ + return; + } + // remove all active tabset-tab-active and tabset-text-active class from all have class that startswith tabset-id-, + // then add active class to the same idx tab-label and tab-text + let tabset_id = null; + let same_id_tabsets = []; + let old_idx = this_obj.parent().find(".tabset-tab-active").attr("idx"); + let new_idx = this_obj.attr("idx"); + let tabset_obj = this_obj.parent().parent().parent(); + tabset_obj.attr("class").split(' ').forEach(function(item){ + if(item.startsWith("tabset-id-")){ + tabset_id = item; + } + }); + if(!tabset_id){ + same_id_tabsets = [tabset_obj[0]]; // to DOM element + }else{ + same_id_tabsets = document.getElementsByClassName(tabset_id); + } + for (let tabset of same_id_tabsets) { + console.log(tabset); + let tab_labels = tabset.getElementsByClassName("tabset-tab-label"); + tab_labels[old_idx].classList.remove("tabset-tab-active"); + tab_labels[new_idx].classList.add("tabset-tab-active"); + let tab_texts = tabset.getElementsByClassName("tabset-text"); + tab_texts[old_idx].classList.remove("tabset-text-active"); + tab_texts[new_idx].classList.add("tabset-text-active"); + } + }); +} diff --git a/maixpy/static/js/theme_default/pre_main.js b/maixpy/static/js/theme_default/pre_main.js new file mode 100644 index 00000000..1cb38adc --- /dev/null +++ b/maixpy/static/js/theme_default/pre_main.js @@ -0,0 +1,54 @@ +(function(){ + var theme = getTheme(); + setTheme(theme); +}()); + +function addCss(filename) { + var head = document.getElementsByTagName('head')[0]; + var link = document.createElement('link'); + link.rel = 'stylesheet'; + link.type = 'text/css'; + link.href = filename; + head.appendChild(link); +} +function removejscssfile(filename, filetype) { + var targetelement = (filetype == "js") ? "script" : (filetype == "css") ? "link" : "none" + var targetattr = (filetype == "js") ? "src" : (filetype == "css") ? "href" : "none" + var allsuspects = document.getElementsByTagName(targetelement) + for (var i = allsuspects.length; i >= 0; i--) { + if (allsuspects[i] && allsuspects[i].getAttribute(targetattr) != null && allsuspects[i].getAttribute(targetattr).indexOf(filename) != -1) + allsuspects[i].parentNode.removeChild(allsuspects[i]) + } +} + + +function getTheme(){ + var t = localStorage.getItem("theme"); + if(!t){ + t = "light"; + setTheme(t); + } + return t; +} +function setTheme(theme){ + var obj = document.getElementById("themes"); + if(theme=="dark"){ + if(obj){ + obj.classList.remove("light"); + obj.classList.add("dark"); + } + document.getElementsByTagName("html")[0].classList.add("dark"); + // load dark and light togher, distingush by .dark class instead use single css file + // removejscssfile("/maixpy/static/css/theme_default/light.css", "css"); + // addCss("/maixpy/static/css/theme_default/dark.css"); + }else{ + if(obj){ + obj.classList.remove("dark"); + obj.classList.add("light"); + } + document.getElementsByTagName("html")[0].classList.remove("dark"); + // removejscssfile("/maixpy/static/css/theme_default/dark.css", "css"); + // addCss("/maixpy/static/css/theme_default/light.css"); + } + localStorage.setItem("theme", theme); +} diff --git a/maixpy/static/js/theme_default/split.js b/maixpy/static/js/theme_default/split.js new file mode 100644 index 00000000..79f2131a --- /dev/null +++ b/maixpy/static/js/theme_default/split.js @@ -0,0 +1,3 @@ +/*! Split.js - v1.6.4 */ +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):(e="undefined"!=typeof globalThis?globalThis:e||self).Split=t()}(this,(function(){"use strict";var e="undefined"!=typeof window?window:null,t=null===e,n=t?void 0:e.document,i=function(){return!1},r=t?"calc":["","-webkit-","-moz-","-o-"].filter((function(e){var t=n.createElement("div");return t.style.cssText="width:"+e+"calc(9px)",!!t.style.length})).shift()+"calc",s=function(e){return"string"==typeof e||e instanceof String},o=function(e){if(s(e)){var t=n.querySelector(e);if(!t)throw new Error("Selector "+e+" did not match a DOM element");return t}return e},a=function(e,t,n){var i=e[t];return void 0!==i?i:n},u=function(e,t,n,i){if(t){if("end"===i)return 0;if("center"===i)return e/2}else if(n){if("start"===i)return 0;if("center"===i)return e/2}return e},l=function(e,t){var i=n.createElement("div");return i.className="gutter gutter-"+t,i},c=function(e,t,n){var i={};return s(t)?i[e]=t:i[e]=r+"("+t+"% - "+n+"px)",i},h=function(e,t){var n;return(n={})[e]=t+"px",n};return function(r,s){if(void 0===s&&(s={}),t)return{};var d,f,v,m,g,p,y=r;Array.from&&(y=Array.from(y));var z=o(y[0]).parentNode,S=getComputedStyle?getComputedStyle(z):null,b=S?S.flexDirection:null,E=a(s,"sizes")||y.map((function(){return 100/y.length})),_=a(s,"minSize",100),L=Array.isArray(_)?_:y.map((function(){return _})),w=a(s,"maxSize",1/0),x=Array.isArray(w)?w:y.map((function(){return w})),k=a(s,"expandToMin",!1),C=a(s,"gutterSize",10),M=a(s,"gutterAlign","center"),U=a(s,"snapOffset",30),A=a(s,"dragInterval",1),O=a(s,"direction","horizontal"),D=a(s,"cursor","horizontal"===O?"col-resize":"row-resize"),B=a(s,"gutter",l),T=a(s,"elementStyle",c),j=a(s,"gutterStyle",h);function F(e,t,n,i){var r=T(d,t,n,i);Object.keys(r).forEach((function(t){e.style[t]=r[t]}))}function R(){return p.map((function(e){return e.size}))}function N(e){return"touches"in e?e.touches[0][f]:e[f]}function q(e){var t=p[this.a],n=p[this.b],i=t.size+n.size;t.size=e/this.size*i,n.size=i-e/this.size*i,F(t.element,t.size,this._b,t.i),F(n.element,n.size,this._c,n.i)}function H(e){var t,n=p[this.a],r=p[this.b];this.dragging&&(t=N(e)-this.start+(this._b-this.dragOffset),A>1&&(t=Math.round(t/A)*A),t<=n.minSize+U+this._b?t=n.minSize+this._b:t>=this.size-(r.minSize+U+this._c)&&(t=this.size-(r.minSize+this._c)),t>=n.maxSize-U+this._b?t=n.maxSize+this._b:t<=this.size-(r.maxSize-U+this._c)&&(t=this.size-(r.maxSize+this._c)),q.call(this,t),a(s,"onDrag",i)(R()))}function I(){var e=p[this.a].element,t=p[this.b].element,n=e.getBoundingClientRect(),i=t.getBoundingClientRect();this.size=n[d]+i[d]+this._b+this._c,this.start=n[v],this.end=n[m]}function W(e){var t=function(e){if(!getComputedStyle)return null;var t=getComputedStyle(e);if(!t)return null;var n=e[g];return 0===n?null:n-="horizontal"===O?parseFloat(t.paddingLeft)+parseFloat(t.paddingRight):parseFloat(t.paddingTop)+parseFloat(t.paddingBottom)}(z);if(null===t)return e;if(L.reduce((function(e,t){return e+t}),0)>t)return e;var n=0,i=[],r=e.map((function(r,s){var o=t*r/100,a=u(C,0===s,s===e.length-1,M),l=L[s]+a;return o0&&i[r]-n>0){var o=Math.min(n,i[r]-n);n-=o,s=e-o}return s/t*100}))}function X(){var t=p[this.a].element,r=p[this.b].element;this.dragging&&a(s,"onDragEnd",i)(R()),this.dragging=!1,e.removeEventListener("mouseup",this.stop),e.removeEventListener("touchend",this.stop),e.removeEventListener("touchcancel",this.stop),e.removeEventListener("mousemove",this.move),e.removeEventListener("touchmove",this.move),this.stop=null,this.move=null,t.removeEventListener("selectstart",i),t.removeEventListener("dragstart",i),r.removeEventListener("selectstart",i),r.removeEventListener("dragstart",i),t.style.userSelect="",t.style.webkitUserSelect="",t.style.MozUserSelect="",t.style.pointerEvents="",r.style.userSelect="",r.style.webkitUserSelect="",r.style.MozUserSelect="",r.style.pointerEvents="",this.gutter.style.cursor="",this.parent.style.cursor="",n.body.style.cursor=""}function Y(t){if(!("button"in t)||0===t.button){var r=p[this.a].element,o=p[this.b].element;this.dragging||a(s,"onDragStart",i)(R()),t.preventDefault(),this.dragging=!0,this.move=H.bind(this),this.stop=X.bind(this),e.addEventListener("mouseup",this.stop),e.addEventListener("touchend",this.stop),e.addEventListener("touchcancel",this.stop),e.addEventListener("mousemove",this.move),e.addEventListener("touchmove",this.move),r.addEventListener("selectstart",i),r.addEventListener("dragstart",i),o.addEventListener("selectstart",i),o.addEventListener("dragstart",i),r.style.userSelect="none",r.style.webkitUserSelect="none",r.style.MozUserSelect="none",r.style.pointerEvents="none",o.style.userSelect="none",o.style.webkitUserSelect="none",o.style.MozUserSelect="none",o.style.pointerEvents="none",this.gutter.style.cursor=D,this.parent.style.cursor=D,n.body.style.cursor=D,I.call(this),this.dragOffset=N(t)-this.end}}"horizontal"===O?(d="width",f="clientX",v="left",m="right",g="clientWidth"):"vertical"===O&&(d="height",f="clientY",v="top",m="bottom",g="clientHeight"),E=W(E);var G=[];function J(e){var t=e.i===G.length,n=t?G[e.i-1]:G[e.i];I.call(n);var i=t?n.size-e.minSize-n._c:e.minSize+n._b;q.call(n,i)}return(p=y.map((function(e,t){var n,i={element:o(e),size:E[t],minSize:L[t],maxSize:x[t],i:t};if(t>0&&((n={a:t-1,b:t,dragging:!1,direction:O,parent:z})._b=u(C,t-1==0,!1,M),n._c=u(C,!1,t===y.length-1,M),"row-reverse"===b||"column-reverse"===b)){var r=n.a;n.a=n.b,n.b=r}if(t>0){var s=B(t,O,i.element);!function(e,t,n){var i=j(d,t,n);Object.keys(i).forEach((function(t){e.style[t]=i[t]}))}(s,C,t),n._a=Y.bind(n),s.addEventListener("mousedown",n._a),s.addEventListener("touchstart",n._a),z.insertBefore(s,i.element),n.gutter=s}return F(i.element,i.size,u(C,0===t,t===y.length-1,M),t),t>0&&G.push(n),i}))).forEach((function(e){var t=e.element.getBoundingClientRect()[d];t0){var i=G[n-1],r=p[i.a],s=p[i.b];r.size=t[n-1],s.size=e,F(r.element,r.size,i._b,r.i),F(s.element,s.size,i._c,s.i)}}))},getSizes:R,collapse:function(e){J(p[e])},destroy:function(e,t){G.forEach((function(n){if(!0!==t?n.parent.removeChild(n.gutter):(n.gutter.removeEventListener("mousedown",n._a),n.gutter.removeEventListener("touchstart",n._a)),!0!==e){var i=T(d,n.a.size,n._b);Object.keys(i).forEach((function(e){p[n.a].element.style[e]="",p[n.b].element.style[e]=""}))}}))},parent:z,pairs:G}}})); +//# sourceMappingURL=split.min.js.map diff --git a/maixpy/static/js/theme_default/tocbot.min.js b/maixpy/static/js/theme_default/tocbot.min.js new file mode 100644 index 00000000..64f46c79 --- /dev/null +++ b/maixpy/static/js/theme_default/tocbot.min.js @@ -0,0 +1 @@ +(()=>{var e={163:e=>{e.exports=function(e){var t=[].forEach,n=[].some,o=document.body,l=!0,r=" ";function i(n,o){var l,c,a,u=o.appendChild((l=n,c=document.createElement("li"),a=document.createElement("a"),e.listItemClass&&c.setAttribute("class",e.listItemClass),e.onClick&&(a.onclick=e.onClick),e.includeTitleTags&&a.setAttribute("title",l.textContent),e.includeHtml&&l.childNodes.length?t.call(l.childNodes,(function(e){a.appendChild(e.cloneNode(!0))})):a.textContent=l.textContent,a.setAttribute("href",e.basePath+"#"+l.id),a.setAttribute("class",e.linkClass+r+"node-name--"+l.nodeName+r+e.extraLinkClasses),c.appendChild(a),c));if(n.children.length){var d=s(n.isCollapsed);n.children.forEach((function(e){i(e,d)})),u.appendChild(d)}}function s(t){var n=e.orderedList?"ol":"ul",o=document.createElement(n),l=e.listClass+r+e.extraListClasses;return t&&(l+=r+e.collapsibleClass,l+=r+e.isCollapsedClass),o.setAttribute("class",l),o}function c(t){var n=0;return t!==document.querySelector(e.contentSelector&&null!=t)&&(n=t.offsetTop,e.hasInnerContainers&&(n+=c(t.offsetParent))),n}function a(t){return t&&-1!==t.className.indexOf(e.collapsibleClass)&&-1!==t.className.indexOf(e.isCollapsedClass)?(t.className=t.className.split(r+e.isCollapsedClass).join(""),a(t.parentNode.parentNode)):t}return{enableTocAnimation:function(){l=!0},disableTocAnimation:function(t){var n=t.target||t.srcElement;"string"==typeof n.className&&-1!==n.className.indexOf(e.linkClass)&&(l=!1)},render:function(e,t){var n=s(!1);if(t.forEach((function(e){i(e,n)})),null!==e)return e.firstChild&&e.removeChild(e.firstChild),0===t.length?e:e.appendChild(n)},updateToc:function(i){var s;s=e.scrollContainer&&document.querySelector(e.scrollContainer)?document.querySelector(e.scrollContainer).scrollTop:document.documentElement.scrollTop||o.scrollTop,e.positionFixedSelector&&function(){var t;t=e.scrollContainer&&document.querySelector(e.scrollContainer)?document.querySelector(e.scrollContainer).scrollTop:document.documentElement.scrollTop||o.scrollTop;var n=document.querySelector(e.positionFixedSelector);"auto"===e.fixedSidebarOffset&&(e.fixedSidebarOffset=document.querySelector(e.tocSelector).offsetTop),t>e.fixedSidebarOffset?-1===n.className.indexOf(e.positionFixedClass)&&(n.className+=r+e.positionFixedClass):n.className=n.className.split(r+e.positionFixedClass).join("")}();var u,d=i;if(l&&null!==document.querySelector(e.tocSelector)&&d.length>0){n.call(d,(function(t,n){return c(t)>s+e.headingsOffset+10?(u=d[0===n?n:n-1],!0):n===d.length-1?(u=d[d.length-1],!0):void 0}));var f=document.querySelector(e.tocSelector).querySelectorAll("."+e.linkClass);t.call(f,(function(t){t.className=t.className.split(r+e.activeLinkClass).join("")}));var m=document.querySelector(e.tocSelector).querySelectorAll("."+e.listItemClass);t.call(m,(function(t){t.className=t.className.split(r+e.activeListItemClass).join("")}));var h=document.querySelector(e.tocSelector).querySelector("."+e.linkClass+".node-name--"+u.nodeName+'[href="'+e.basePath+"#"+u.id.replace(/([ #;&,.+*~':"!^$[\]()=>|/@])/g,"\\$1")+'"]');h&&-1===h.className.indexOf(e.activeLinkClass)&&(h.className+=r+e.activeLinkClass);var p=h&&h.parentNode;p&&-1===p.className.indexOf(e.activeListItemClass)&&(p.className+=r+e.activeListItemClass);var C=document.querySelector(e.tocSelector).querySelectorAll("."+e.listClass+"."+e.collapsibleClass);t.call(C,(function(t){-1===t.className.indexOf(e.isCollapsedClass)&&(t.className+=r+e.isCollapsedClass)})),h&&h.nextSibling&&-1!==h.nextSibling.className.indexOf(e.isCollapsedClass)&&(h.nextSibling.className=h.nextSibling.className.split(r+e.isCollapsedClass).join("")),a(h&&h.parentNode.parentNode)}}}}},547:e=>{e.exports={tocSelector:".js-toc",contentSelector:".js-toc-content",headingSelector:"h1, h2, h3",ignoreSelector:".js-toc-ignore",hasInnerContainers:!1,linkClass:"toc-link",extraLinkClasses:"",activeLinkClass:"is-active-link",listClass:"toc-list",extraListClasses:"",isCollapsedClass:"is-collapsed",collapsibleClass:"is-collapsible",listItemClass:"toc-list-item",activeListItemClass:"is-active-li",collapseDepth:0,scrollSmooth:!0,scrollSmoothDuration:420,scrollSmoothOffset:0,scrollEndCallback:function(e){},headingsOffset:1,throttleTimeout:50,positionFixedSelector:null,positionFixedClass:"is-position-fixed",fixedSidebarOffset:"auto",includeHtml:!1,includeTitleTags:!1,onClick:function(e){},orderedList:!0,scrollContainer:null,skipRendering:!1,headingLabelCallback:!1,ignoreHiddenElements:!1,headingObjectCallback:null,basePath:"",disableTocScrollSync:!1}},971:function(e,t,n){var o,l,r;l=[],o=function(e){"use strict";var t,o,l,r=n(547),i={},s={},c=n(163),a=n(279),u=n(938),d=!!(e&&e.document&&e.document.querySelector&&e.addEventListener);if("undefined"!=typeof window||d){var f=Object.prototype.hasOwnProperty;return s.destroy=function(){var e=h(i);null!==e&&(i.skipRendering||e&&(e.innerHTML=""),i.scrollContainer&&document.querySelector(i.scrollContainer)?(document.querySelector(i.scrollContainer).removeEventListener("scroll",this._scrollListener,!1),document.querySelector(i.scrollContainer).removeEventListener("resize",this._scrollListener,!1),t&&document.querySelector(i.scrollContainer).removeEventListener("click",this._clickListener,!1)):(document.removeEventListener("scroll",this._scrollListener,!1),document.removeEventListener("resize",this._scrollListener,!1),t&&document.removeEventListener("click",this._clickListener,!1)))},s.init=function(e){if(d){i=function(){for(var e={},t=0;t{e.exports=function(e){var t=[].reduce;function n(e){return e[e.length-1]}function o(e){return+e.nodeName.toUpperCase().replace("H","")}function l(t){if(!(t instanceof window.HTMLElement))return t;if(e.ignoreHiddenElements&&(!t.offsetHeight||!t.offsetParent))return null;const n=t.getAttribute("data-heading-label")||(e.headingLabelCallback?String(e.headingLabelCallback(t.textContent)):t.textContent.trim());var l={id:t.id,children:[],nodeName:t.nodeName,headingLevel:o(t),textContent:n};return e.includeHtml&&(l.childNodes=t.childNodes),e.headingObjectCallback?e.headingObjectCallback(l,t):l}return{nestHeadingsArray:function(o){return t.call(o,(function(t,o){var r=l(o);return r&&function(t,o){for(var r=l(t),i=r.headingLevel,s=o,c=n(s),a=i-(c?c.headingLevel:0);a>0&&(!(c=n(s))||i!==c.headingLevel);)c&&void 0!==c.children&&(s=c.children),a--;i>=e.collapseDepth&&(r.isCollapsed=!0),s.push(r)}(r,t.nest),t}),{nest:[]})},selectHeadings:function(t,n){var o=n;e.ignoreSelector&&(o=n.split(",").map((function(t){return t.trim()+":not("+e.ignoreSelector+")"})));try{return t.querySelectorAll(o)}catch(e){return console.warn("Headers not found with selector: "+o),null}}}}},374:(e,t)=>{t.initSmoothScrolling=function(e){var t=e.duration,n=e.offset,o=location.hash?l(location.href):location.href;function l(e){return e.slice(0,e.lastIndexOf("#"))}document.body.addEventListener("click",(function(r){var i;"a"!==(i=r.target).tagName.toLowerCase()||!(i.hash.length>0||"#"===i.href.charAt(i.href.length-1))||l(i.href)!==o&&l(i.href)+"#"!==o||r.target.className.indexOf("no-smooth-scroll")>-1||"#"===r.target.href.charAt(r.target.href.length-2)&&"!"===r.target.href.charAt(r.target.href.length-1)||-1===r.target.className.indexOf(e.linkClass)||function(e,t){var n,o,l=window.pageYOffset,r={duration:t.duration,offset:t.offset||0,callback:t.callback,easing:t.easing||function(e,t,n,o){return(e/=o/2)<1?n/2*e*e+t:-n/2*(--e*(e-2)-1)+t}},i=document.querySelector('[id="'+decodeURI(e).split("#").join("")+'"]')||document.querySelector('[id="'+e.split("#").join("")+'"]'),s="string"==typeof e?r.offset+(e?i&&i.getBoundingClientRect().top||0:-(document.documentElement.scrollTop||document.body.scrollTop)):e,c="function"==typeof r.duration?r.duration(s):r.duration;function a(e){o=e-n,window.scrollTo(0,r.easing(o,l,s,c)),o{e.exports=function(e){var t=document.querySelector(e.tocSelector);if(t&&t.scrollHeight>t.clientHeight){var n=t.querySelector("."+e.activeListItemClass);n&&(t.scrollTop=n.offsetTop)}}}},t={};function n(o){var l=t[o];if(void 0!==l)return l.exports;var r=t[o]={exports:{}};return e[o].call(r.exports,r,r.exports,n),r.exports}n.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),n(971)})(); \ No newline at end of file diff --git a/maixpy/static/js/theme_default/viewer.min.js b/maixpy/static/js/theme_default/viewer.min.js new file mode 100644 index 00000000..c5376e39 --- /dev/null +++ b/maixpy/static/js/theme_default/viewer.min.js @@ -0,0 +1,10 @@ +/*! + * Viewer.js v1.10.0 + * https://fengyuanchen.github.io/viewerjs + * + * Copyright 2015-present Chen Fengyuan + * Released under the MIT license + * + * Date: 2021-06-12T07:57:10.970Z + */ +!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):(t="undefined"!=typeof globalThis?globalThis:t||self).Viewer=e()}(this,function(){"use strict";function e(e,t){var i,n=Object.keys(e);return Object.getOwnPropertySymbols&&(i=Object.getOwnPropertySymbols(e),t&&(i=i.filter(function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable})),n.push.apply(n,i)),n}function l(n){for(var t=1;t=this.length||this.viewed&&t===this.index)return this;if(!this.isShown)return this.index=t,this.show();this.viewing&&this.viewing.abort();var e=this.element,n=this.options,o=this.title,s=this.canvas,a=this.items[t],r=a.querySelector("img"),h=gt(r,"originalUrl"),l=r.getAttribute("alt"),c=document.createElement("img");if(ot(n.inheritedAttributes,function(t){var e=r.getAttribute(t);null!==e&&c.setAttribute(t,e)}),c.src=h,c.alt=l,nt(n.view)&&yt(e,"view",n.view,{once:!0}),!1===xt(e,"view",{originalImage:this.images[t],index:t,image:c})||!this.isShown||this.hiding||this.played)return this;h=this.items[this.index];ct(h,m),h.removeAttribute("aria-selected"),lt(a,m),a.setAttribute("aria-selected",!0),n.focus&&a.focus(),this.image=c,this.viewed=!1,this.index=t,this.imageData={},lt(c,D),n.loading&<(s,T),s.innerHTML="",s.appendChild(c),this.renderList(),o.innerHTML="";function u(){var t=i.imageData,e=Array.isArray(n.title)?n.title[1]:n.title;o.innerHTML=_(t=nt(e)?e.call(i,c,t):"".concat(l," (").concat(t.naturalWidth," × ").concat(t.naturalHeight,")"))?t.replace(/&(?!amp;|quot;|#39;|lt;|gt;)/g,"&").replace(/"/g,""").replace(/'/g,"'").replace(//g,">"):t}var d;return yt(e,V,u,{once:!0}),this.viewing={abort:function(){wt(e,V,u),c.complete?i.imageRendering?i.imageRendering.abort():i.imageInitializing&&i.imageInitializing.abort():(c.src="",wt(c,N,d),i.timeout&&clearTimeout(i.timeout))}},c.complete?this.load():(yt(c,N,d=this.load.bind(this),{once:!0}),this.timeout&&clearTimeout(this.timeout),this.timeout=setTimeout(function(){ct(c,D),i.timeout=!1},1e3)),this},prev:function(){var t=this.index-1;return t<0&&(t=0Math.abs(r)&&(this.pointers={},1
    ',n=(i=d.querySelector(".".concat(g,"-container"))).querySelector(".".concat(g,"-title")),o=i.querySelector(".".concat(g,"-toolbar")),a=i.querySelector(".".concat(g,"-navbar")),m=i.querySelector(".".concat(g,"-button")),d=i.querySelector(".".concat(g,"-canvas")),this.parent=e,this.viewer=i,this.title=n,this.toolbar=o,this.navbar=a,this.button=m,this.canvas=d,this.footer=i.querySelector(".".concat(g,"-footer")),this.tooltipBox=i.querySelector(".".concat(g,"-tooltip")),this.player=i.querySelector(".".concat(g,"-player")),this.list=i.querySelector(".".concat(g,"-list")),i.id="".concat(g).concat(this.id),n.id="".concat(g,"Title").concat(this.id),lt(n,s.title?Tt(Array.isArray(s.title)?s.title[0]:s.title):w),lt(a,s.navbar?Tt(s.navbar):w),ut(m,w,!s.button),s.keyboard&&m.setAttribute("tabindex",0),s.backdrop&&(lt(i,"".concat(g,"-backdrop")),s.inline||"static"===s.backdrop||ft(d,K,"hide")),_(s.className)&&s.className&&s.className.split(Z).forEach(function(t){lt(i,t)}),s.toolbar?(r=document.createElement("ul"),h=it(s.toolbar),l=$.slice(0,3),c=$.slice(7,9),u=$.slice(9),h||lt(o,Tt(s.toolbar)),ot(h?s.toolbar:$,function(t,e){var i=h&&it(t),n=h?mt(e):t,o=i&&!Q(t.show)?t.show:t;!o||!s.zoomable&&-1!==l.indexOf(n)||!s.rotatable&&-1!==c.indexOf(n)||!s.scalable&&-1!==u.indexOf(n)||(e=i&&!Q(t.size)?t.size:t,i=i&&!Q(t.click)?t.click:t,t=document.createElement("li"),s.keyboard&&t.setAttribute("tabindex",0),t.setAttribute("role","button"),lt(t,"".concat(g,"-").concat(n)),nt(i)||ft(t,K,n),J(o)&<(t,Tt(o)),-1!==["small","large"].indexOf(e)?lt(t,"".concat(g,"-").concat(e)):"play"===n&<(t,"".concat(g,"-large")),nt(i)&&yt(t,O,i),r.appendChild(t))}),o.appendChild(r)):lt(o,w),s.rotatable||(lt(d=o.querySelectorAll('li[class*="rotate"]'),D),ot(d,function(t){o.appendChild(t)})),s.inline?(lt(m,b),rt(i,{zIndex:s.zIndexInline}),"static"===window.getComputedStyle(e).position&&rt(e,{position:"relative"}),e.insertBefore(i,t.nextSibling)):(lt(m,f),lt(i,p),lt(i,v),lt(i,w),rt(i,{zIndex:s.zIndex}),(m=(m=_(m=s.container)?t.ownerDocument.querySelector(m):m)||this.body).appendChild(i)),s.inline&&(this.render(),this.bind(),this.isShown=!0),this.ready=!0,nt(s.ready)&&yt(t,j,s.ready,{once:!0}),!1!==xt(t,j)?this.ready&&s.inline&&this.view(this.index):this.ready=!1)}}])&&o(t.prototype,e),n&&o(t,n),i}();return st(n.prototype,It,r,t,St,Ot),n}); \ No newline at end of file diff --git a/maixpy/static/js/thumbs_up/main.js b/maixpy/static/js/thumbs_up/main.js new file mode 100644 index 00000000..b463ed74 --- /dev/null +++ b/maixpy/static/js/thumbs_up/main.js @@ -0,0 +1,215 @@ + + +var conf=js_vars["teedoc-plugin-thumbs-up"]; +(function(){ + document.addEventListener("DOMContentLoaded", function(event) { + var contentBody = document.getElementById("content_body"); + if(!contentBody){ + return; + } + // add message show element + var messageEL = document.createElement("div"); + messageEL.className = "thumbs-message"; + let msgContentEl = document.createElement("div"); + msgContentEl.classList = ["thumbs-message-content"]; + messageEL.appendChild(msgContentEl); + contentBody.appendChild(messageEL); + // create a div to hold the thumbs up button, and add it to the content body + var thumbsUpDiv = document.createElement("div"); + thumbsUpDiv.id = "thumbs_up_container"; + contentBody.appendChild(thumbsUpDiv); + // create the thumbs up button + var thumbsUpButton = document.createElement("button"); + thumbsUpButton.classList = ["thumbs-up"]; + thumbsUpDiv.appendChild(thumbsUpButton); + var thumbsUpIcon = document.createElement("img"); + thumbsUpIcon.src = conf.icon; + thumbsUpIcon.classList = ["thumbs-up-icon"]; + thumbsUpButton.appendChild(thumbsUpIcon); + var thumbsUpLabel = document.createElement("span"); + thumbsUpLabel.classList = ["thumbs-up-label"]; + thumbsUpLabel.innerHTML = conf.label_up; + thumbsUpButton.appendChild(thumbsUpLabel); + var thumbsUpCount = document.createElement("span"); + thumbsUpCount.classList = ["thumbs-up-count"]; + thumbsUpCount.innerHTML = "(0)"; + thumbsUpButton.appendChild(thumbsUpCount); + // create the thumbs down button + var thumbsDownButton = document.createElement("button"); + thumbsDownButton.classList = ["thumbs-down"]; + thumbsUpDiv.appendChild(thumbsDownButton); + var thumbsDownIcon = document.createElement("img"); + thumbsDownIcon.src = conf.icon; + thumbsDownIcon.classList = ["thumbs-down-icon"]; + thumbsDownButton.appendChild(thumbsDownIcon); + var thumbsDownLabel = document.createElement("span"); + thumbsDownLabel.classList = ["thumbs-down-label"]; + thumbsDownLabel.innerHTML = conf.label_down; + thumbsDownButton.appendChild(thumbsDownLabel); + var thumbsDownCount = document.createElement("span"); + thumbsDownCount.classList = ["thumbs-down-count"]; + thumbsDownCount.innerHTML = ""; + thumbsDownButton.appendChild(thumbsDownCount); + // add click listeners to the buttons + var thumbs_up = document.getElementsByClassName("thumbs-up"); + for (var i = 0; i < thumbs_up.length; i++) { + thumbs_up[i].addEventListener("click", function(e) { + onClick(true); + }); + } + var thumbs_down = document.getElementsByClassName("thumbs-down"); + for (var i = 0; i < thumbs_down.length; i++) { + thumbs_down[i].addEventListener("click", function(e) { + onClick(false); + }); + } + setIcon(); + getCount(); + }); +})(); + +function showMsg(msg){ + let msgEl = document.getElementsByClassName("thumbs-message")[0]; + let msgContentEl = document.getElementsByClassName("thumbs-message-content")[0]; + msgContentEl.innerHTML = msg; + if (jQuery) { + $(msgEl).fadeIn(500, function(){ + setTimeout(function(){ + $(msgEl).fadeOut(500); + }, 3000); + }); + }else{ + console.log("no jquery"); + msgEl.style.display = "flex"; + setTimeout(function(){ + msgEl.style.display = "none"; + }, 5000); + } +} + +function onClick(up){ + let path = check_path(location.pathname); + let did = localStorage.getItem("thumbs_" + (up?"up":"down") + "_" + path); + if (did){ + showMsg(conf.msg_already_voted); + return; + } + var url = conf.url; + if(up){ + url = url + "/api/thumbs_up"; + }else{ + url = url + "/api/thumbs_down"; + } + var page = location.pathname + var data = { + "type": up ? "up" : "down", + "path": page, + "url": location.protocol + "//" + location.host + location.pathname + }; + if(!up){ + data["msg"] = prompt(conf.msg_down_prompt); + if(!data["msg"]){ + return; + } + if(data["msg"].length < 10){ + showMsg(conf.msg_down_prompt_error); + return; + } + } + var xhr = new XMLHttpRequest(); + xhr.open("POST", url, true); + xhr.setRequestHeader("Content-Type", "application/json"); + xhr.onreadystatechange = function() { + if (xhr.readyState === 4 && xhr.status === 200) { + var response = JSON.parse(xhr.responseText); + setUpCount(response["up_count"], up); + setDownCount(response["down_count"], !up); + showMsg(conf.msg_thanks); + }else if (xhr.status != 200){ + showMsg(conf.msg_error); + } + }; + xhr.send(JSON.stringify(data)); +} + +function getCount(){ + let path = check_path(location.pathname); + let url = conf.url + "/api/thumbs_count"; + var data = { + "path": path + }; + var xhr = new XMLHttpRequest(); + xhr.open("POST", url, true); + xhr.setRequestHeader("Content-Type", "application/json"); + xhr.onreadystatechange = function() { + if (xhr.readyState === 4 && xhr.status === 200) { + var response = JSON.parse(xhr.responseText); + setUpCount(response["up_count"]); + setDownCount(response["down_count"]); + }else if (xhr.status != 200){ + showMsg(conf.msg_error); + } + }; + xhr.send(JSON.stringify(data)); +} + +function check_path(path){ + if(!path){ + return path; + } + if (path[path.length - 1] == "/"){ + path = path + "index.html"; + }else{ + let temp = path.split("/"); + if(temp[temp.length - 1].indexOf(".") == -1){ + path = path + ".html"; + } + } + return path; +} + +function setUpCount(count, add=false){ + if(add){ + let path = check_path(location.pathname); + localStorage.setItem("thumbs_up_" + path, true); + setIcon(); + } + if(!conf.show_up_count){ + return; + } + var selector = ".thumbs-up-count"; + var thumbs_up = document.querySelector(selector); + thumbs_up.innerHTML = "(" + count + ")"; +} + +function setDownCount(count, add=false){ + if(add){ + let path = check_path(location.pathname); + localStorage.setItem("thumbs_down_" + path, true); + setIcon(); + } + if(!conf.show_down_count){ + return; + } + var selector = ".thumbs-down-count"; + var thumbs_down = document.querySelector(selector); + thumbs_down.innerHTML = "(" + count + ")"; +} + +function setIcon(){ + let path = check_path(location.pathname); + let upIcon = document.getElementsByClassName("thumbs-up-icon")[0]; + let downIcon = document.getElementsByClassName("thumbs-down-icon")[0]; + let did = localStorage.getItem("thumbs_up_" + path); + if (did){ + upIcon.src = conf.icon_clicked; + }else{ + upIcon.src = conf.icon; + } + did = localStorage.getItem("thumbs_down_" + path); + if (did){ + downIcon.src = conf.icon_clicked; + }else{ + downIcon.src = conf.icon; + } +} diff --git a/maixpy/static/js/thumbs_up/style.css b/maixpy/static/js/thumbs_up/style.css new file mode 100644 index 00000000..89b5dd01 --- /dev/null +++ b/maixpy/static/js/thumbs_up/style.css @@ -0,0 +1,66 @@ +#thumbs_up_container{ + margin-top: 2em; + display: inline-flex; + border-top: 0.1em solid #f1f1f1; +} + +#thumbs_up_container button { + padding: 1em; + border-radius: 0.6em; + background-color: #f1f1f1; + color: #606975; + border: none; + cursor: pointer; + margin: 0.5em 1em; + transition: 0.4s; + display: flex; + align-items: center; +} +#thumbs_up_container button:hover { + scale: 1.2; +} +#thumbs_up_container button > img { + margin-right: 1em; + cursor: pointer; +} +.thumbs-down-icon { + transform: rotate(180deg); +} +.thumbs-up-count { + padding: 0 0.5em; + color: #26a69a; +} +.thumbs-message { + display: none; + position: fixed; + top: 3em; + right: 1em; + background-color: #4caf50; + color: white; + border-radius: 0.6em; + align-items: center; + padding: 1em; + z-index: 999999; +} +.thumbs-message-content { + display: flex; + align-items: center; + height: 4em; + min-width: 6em; +} + +/* dark mode */ + +.dark #thumbs_up_container{ + border-top: 0.1em solid #2d2d2d; +} +.dark #thumbs_up_container button { + background-color: #2d2d2d; + color: #a2a2a2; +} + +@media print { + #thumbs_up_container { + display: none; + } +} diff --git a/maixpy/static/search_index/index.json b/maixpy/static/search_index/index.json new file mode 100644 index 00000000..3442e24d --- /dev/null +++ b/maixpy/static/search_index/index.json @@ -0,0 +1 @@ +{"/api/":["MaixPy API Reference","/maixpy/static/search_index/index_0.json"],"/doc/zh/":["MaixPy 中文文档","/maixpy/static/search_index/index_1.json"],"/doc/en/":["MaixPy English Documentation","/maixpy/static/search_index/index_2.json"],"/":["MaixPy 页面","/maixpy/static/search_index/index_3.json"],"/en/":["MaixPy Pages","/maixpy/static/search_index/index_4.json"]} \ No newline at end of file diff --git a/maixpy/static/search_index/index_0.json b/maixpy/static/search_index/index_0.json new file mode 100644 index 00000000..dca352d4 --- /dev/null +++ b/maixpy/static/search_index/index_0.json @@ -0,0 +1 @@ +{"/maixpy/api/maix/peripheral/pinmap.html":{"title":"maix.peripheral.pinmap","content":" title: maix.peripheral.pinmap maix.peripheral.pinmap module > You can use `maix.peripheral.pinmap` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ## Variable ## Function ### get\\_pins ```python def get_pins() > list[str] ``` Get all pins of devices item description **return** pin name list, string type. > C++ defination code: > ```cpp > std::vector get_pins() > ``` ### get\\_pin\\_functions ```python def get_pin_functions(pin: str) > list[str] ``` Get all function of a pin item description **param** **pin**: pin name, string type.
    **return** function list, function name is string type. **throw** If pin name error will throwout err.Err.ERR_ARGS error. > C++ defination code: > ```cpp > std::vector get_pin_functions(const std::string &pin) > ``` ### set\\_pin\\_function ```python def set_pin_function(pin: str, func: str) > maix.err.Err ``` Set function of a pin item description **param** **pin**: pin name, string type.
    **func**: which function should this pin use.
    **return** if set ok, will return err.Err.ERR_NONE, else error occurs. > C++ defination code: > ```cpp > err::Err set_pin_function(const std::string &pin, const std::string &func) > ``` ## Class"},"/maixpy/api/maix/ext_dev/bm8563.html":{"title":"maix.ext_dev.bm8563","content":" title: maix.ext_dev.bm8563 maix.ext_dev.bm8563 module > You can use `maix.ext_dev.bm8563` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ## Variable ## Function ## Class ### BM8563 Peripheral BM8563 class > C++ defination code: > ```cpp > class BM8563 > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, i2c_bus: int 1) > None ``` BM8563 constructor item description **type** func **param** **i2c_bus**: i2c bus number.
    **static** False > C++ defination code: > ```cpp > BM8563(int i2c_bus 1) > ``` #### datetime ```python def datetime(self, timetuple: list[int] []) > list[int] ``` Get or set the date and time of the BM8563. item description **type** func **param** **timetuple**: time tuple, like (year, month, day[, hour[, minute[, second]]])
    **return** time tuple, like (year, month, day[, hour[, minute[, second]]]) **static** False > C++ defination code: > ```cpp > std::vector datetime(std::vector timetuple std::vector()) > ``` #### init ```python def init(self, timetuple: list[int]) > maix.err.Err ``` Initialise the BM8563. item description **type** func **param** **timetuple**: time tuple, like (year, month, day[, hour[, minute[, second]]])
    **return** err::Err type, if init success, return err::ERR_NONE **static** False > C++ defination code: > ```cpp > err::Err init(std::vector timetuple) > ``` #### now ```python def now(self) > list[int] ``` Get get the current datetime. item description **type** func **return** time tuple, like (year, month, day[, hour[, minute[, second]]]) **static** False > C++ defination code: > ```cpp > std::vector now() > ``` #### deinit ```python def deinit(self) > maix.err.Err ``` Deinit the BM8563. item description **type** func **return** err::Err err::Err type, if deinit success, return err::ERR_NONE **static** False > C++ defination code: > ```cpp > err::Err deinit() > ``` #### hctosys ```python def hctosys(self) > maix.err.Err ``` Set the system time from the BM8563 item description **type** func **return** err::Err type **static** False > C++ defination code: > ```cpp > err::Err hctosys() > ``` #### systohc ```python def systohc(self) > maix.err.Err ``` Set the BM8563 from the system time item description **type** func **return** err::Err type **static** False > C++ defination code: > ```cpp > err::Err systohc() > ```"},"/maixpy/api/maix/ext_dev/qmi8658.html":{"title":"maix.ext_dev.qmi8658","content":" title: maix.ext_dev.qmi8658 maix.ext_dev.qmi8658 module > You can use `maix.ext_dev.qmi8658` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ## Variable ## Function ## Class ### QMI8658 QMI8656 driver class > C++ defination code: > ```cpp > class QMI8658 > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, i2c_bus: int 1, addr: int 107, freq: int 400000, mode: maix.ext_dev.imu.Mode ..., acc_scale: maix.ext_dev.imu.AccScale ..., acc_odr: maix.ext_dev.imu.AccOdr ..., gyro_scale: maix.ext_dev.imu.GyroScale ..., gyro_odr: maix.ext_dev.imu.GyroOdr ..., block: bool True) > None ``` Construct a new QMI8658 object, will open QMI8658 item description **type** func **param** **i2c_bus**: i2c bus number. Automatically selects the on board qmi8658 when 1 is passed in.
    **addr**: QMI8658 i2c addr.
    **freq**: QMI8658 freq
    **mode**: QMI8658 Mode: ACC_ONLY/GYRO_ONLY/DUAL
    **acc_scale**: acc scale, see @qmi8658::AccScale
    **acc_odr**: acc output data rate, see @qmi8658::AccOdr
    **gyro_scale**: gyro scale, see @qmi8658::GyroScale
    **gyro_odr**: gyro output data rate, see @qmi8658::GyroOdr
    **block**: block or non block, defalut is true
    **static** False > C++ defination code: > ```cpp > QMI8658(int i2c_bus 1, int addr 0x6B, int freq 400000, > maix::ext_dev::imu::Mode mode maix::ext_dev::imu::Mode::DUAL, > maix::ext_dev::imu::AccScale acc_scale maix::ext_dev::imu::AccScale::ACC_SCALE_2G, > maix::ext_dev::imu::AccOdr acc_odr maix::ext_dev::imu::AccOdr::ACC_ODR_8000, > maix::ext_dev::imu::GyroScale gyro_scale maix::ext_dev::imu::GyroScale::GYRO_SCALE_16DPS, > maix::ext_dev::imu::GyroOdr gyro_odr maix::ext_dev::imu::GyroOdr::GYRO_ODR_8000, > bool block true) > ``` #### read ```python def read(self) > list[float] ``` Read data from QMI8658. item description **type** func **return** list type. If only one of the outputs is initialized, only [x,y,z] of that output will be returned.
    If all outputs are initialized, [acc_x, acc_y, acc_z, gyro_x, gyro_y, gyro_z] is returned. **static** False > C++ defination code: > ```cpp > std::vector read() > ```"},"/maixpy/api/maix/ext_dev/tmc2209.html":{"title":"maix.ext_dev.tmc2209","content":" title: maix.ext_dev.tmc2209 maix.ext_dev.tmc2209 module > You can use `maix.ext_dev.tmc2209` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ## Variable ## Function ### slide\\_scan ```python def slide_scan(port: str, addr: int, baud: int, step_angle: float, micro_step: int, round_mm: float, speed_mm_s: float, dir: bool True, use_internal_sense_resistors: bool True, run_current_per: int 100, hold_current_per: int 100, conf_save_path: str './slide_conf.bin', force_update: bool True) > None ``` Scan and initialize the slide with the given parameters item description **param** **port**: UART port, string type.
    **addr**: TMC2209 UART address, range 0x00~0x03, integer type.
    **baud**: UART baud rate, integer type.
    **step_angle**: Motor step angle, float type.
    **micro_step**: Motor micro step, options: 1/2/4/8/16/32/64/128/256, integer type.
    **round_mm**: Round distance in mm, float type.
    **speed_mm_s**: Speed of the slide in mm/s, float type.
    **dir**: Direction of movement, boolean type. Default is true.
    **use_internal_sense_resistors**: Enable internal sense resistors if true, disable if false, boolean type. Default is true.
    **run_current_per**: Motor run current percentage, range 0~100(%), integer type. Default is 100%.
    **hold_current_per**: Motor hold current percentage, range 0~100(%), integer type. Default is 100%.
    **conf_save_path**: Configuration save path, string type. Default is \"./slide_conf.bin\".
    **force_update**: Force update the configuration if true, boolean type. Default is true.
    > C++ defination code: > ```cpp > void slide_scan(const char* port, uint8_t addr, long baud, /* Uart init param */ > float step_angle, uint16_t micro_step, float round_mm, /* Motor init param */ > float speed_mm_s, bool dir true, bool use_internal_sense_resistors true, uint8_t run_current_per 100, > uint8_t hold_current_per 100, const std::string conf_save_path \"./slide_conf.bin\", > bool force_update true /* Driver init param */) > ``` ### slide\\_test ```python def slide_test(port: str, addr: int, baud: int, step_angle: float, micro_step: int, round_mm: float, speed_mm_s: float, dir: bool True, use_internal_sense_resistors: bool True, run_current_per: int 100, hold_current_per: int 100, conf_save_path: str './slide_conf.bin') > None ``` Test the slide with the given parameters\\nThis function tests the slide by moving it in the specified direction until a stall condition is detected, as defined in the configuration file. item description **param** **port**: UART port, string type.
    **addr**: TMC2209 UART address, range 0x00~0x03, integer type.
    **baud**: UART baud rate, integer type.
    **step_angle**: Motor step angle, float type.
    **micro_step**: Motor micro step, options: 1/2/4/8/16/32/64/128/256, integer type.
    **round_mm**: Round distance in mm, float type.
    **speed_mm_s**: Speed of the slide in mm/s, float type.
    **dir**: Direction of movement, boolean type. Default is true.
    **use_internal_sense_resistors**: Enable internal sense resistors if true, disable if false, boolean type. Default is true.
    **run_current_per**: Motor run current percentage, range 0~100(%), integer type. Default is 100%.
    **hold_current_per**: Motor hold current percentage, range 0~100(%), integer type. Default is 100%.
    **conf_save_path**: Configuration save path, string type. Default is \"./slide_conf.bin\".
    > C++ defination code: > ```cpp > void slide_test(const char* port, uint8_t addr, long baud, /* Uart init param */ > float step_angle, uint16_t micro_step, float round_mm, /* Motor init param */ > float speed_mm_s, bool dir true, bool use_internal_sense_resistors true, uint8_t run_current_per 100, > uint8_t hold_current_per 100, const std::string conf_save_path \"./slide_conf.bin\"/* Driver init param */) > ``` ## Class ### Slide Slide Class > C++ defination code: > ```cpp > class Slide > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, port: str, addr: int, baud: int, step_angle: float, micro_step: int, round_mm: float, speed_mm_s: float 1, use_internal_sense_resistors: bool True, run_current_per: int 100, hold_current_per: int 100, cfg_file_path: str '') > None ``` Constructor for Slide\\nInitializes the Slide object with the specified parameters. item description **type** func **param** **port**: UART port, string type.
    **addr**: TMC2209 UART address, range 0x00~0x03, integer type.
    **baud**: UART baud rate, integer type.
    **step_angle**: Motor step angle, float type.
    **micro_step**: Motor micro step, options: 1/2/4/8/16/32/64/128/256, integer type.
    **round_mm**: Round distance in mm, float type.
    **speed_mm_s**: Speed of the slide in mm/s, float type. Default is 1, indicating the use of a default speed factor.
    **use_internal_sense_resistors**: Enable internal sense resistors if TRUE, disable if FALSE, boolean type. Default is TRUE.
    **run_current_per**: Motor run current percentage, range 0~100(%), integer type. Default is 100%.
    **hold_current_per**: Motor hold current percentage, range 0~100(%), integer type. Default is 100%.
    **cfg_file_path**: Configuration file path, string type. Default is an empty string, indicating no configuration file.
    **static** False > C++ defination code: > ```cpp > Slide(const char* port, uint8_t addr, long baud, /* Uart init param */ > float step_angle, uint16_t micro_step, float round_mm, /* Motor init param */ > float speed_mm_s 1, bool use_internal_sense_resistors true, uint8_t run_current_per 100, > uint8_t hold_current_per 100, std::string cfg_file_path \"\" /* Driver init param */) > ``` #### load\\_conf ```python def load_conf(self, path: str) > None ``` Load configuration from a file\\nLoads the configuration settings for the slide from the specified file path. item description **type** func **param** **path**: Path to the configuration file, string type.
    **static** False > C++ defination code: > ```cpp > void load_conf(std::string path) > ``` #### move ```python def move(self, oft: float, speed_mm_s: int 1, check: bool True) > None ``` Move the slide by a specified length\\nMoves the slide by the specified length at the given speed. Optionally checks for stall conditions. item description **type** func **param** **oft**: Length to move, float type.
    **speed_mm_s**: Speed in mm/s. Default is 1, indicating the use of the default speed set during initialization.
    **check**: Enable movement check if true, boolean type. Default is true.
    **static** False > C++ defination code: > ```cpp > void move(float oft, int speed_mm_s 1, bool check true) > ``` #### reset ```python def reset(self, dir: bool False, speed_mm_s: int 1) > None ``` Reset the slide position\\nResets the slide position in the specified direction at the given speed. item description **type** func **param** **dir**: Direction of reset, boolean type. Default is false.
    **speed_mm_s**: Speed in mm/s. Default is 1, indicating the use of the speed set during initialization.
    **static** False > C++ defination code: > ```cpp > void reset(bool dir false, int speed_mm_s 1) > ``` #### stop\\_default\\_per Get or set the stop default percentage\\nRetrieves or sets the stop default percentage. If the parameter is 1, it returns the current setting. item description **type** func **param** **per**: Stop default percentage, range 0~100(%), integer type. Default is 1, indicating no change.
    **return** int Current stop default percentage if per is 1, otherwise the new set percentage. **static** False > C++ defination code: > ```cpp > int stop_default_per(int per 1) > ``` #### run\\_current\\_per ```python def run_current_per(self, per: int 1) > int ``` Get or set the run current percentage\\nRetrieves or sets the run current percentage. If the parameter is 1, it returns the current setting. item description **type** func **param** **per**: Run current percentage, range 0~100(%), integer type. Default is 1, indicating no change.
    **return** int Current run current percentage if per is 1, otherwise the new set percentage. **static** False > C++ defination code: > ```cpp > int run_current_per(int per 1) > ``` #### hold\\_current\\_per ```python def hold_current_per(self, per: int 1) > int ``` Get or set the hold current percentage\\nRetrieves or sets the hold current percentage. If the parameter is 1, it returns the current setting. item description **type** func **param** **per**: Hold current percentage, range 0~100(%), integer type. Default is 1, indicating no change.
    **return** int Current hold current percentage if per is 1, otherwise the new set percentage. **static** False > C++ defination code: > ```cpp > int hold_current_per(int per 1) > ``` #### use\\_internal\\_sense\\_resistors ```python def use_internal_sense_resistors(self, b: bool True) > None ``` Enable or disable internal sense resistors\\nEnables or disables the internal sense resistors based on the provided boolean value. item description **type** func **param** **b**: Boolean value to enable (true) or disable (false) internal sense resistors. Default is true.
    **static** False > C++ defination code: > ```cpp > void use_internal_sense_resistors(bool b true) > ``` ### ScrewSlide ScrewSlide Class > C++ defination code: > ```cpp > class ScrewSlide > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, port: str, addr: int, baud: int, step_angle: float, micro_step: int, screw_pitch: float, speed_mm_s: float 1, use_internal_sense_resistors: bool True, run_current_per: int 100, hold_current_per: int 100) > None ``` Constructor for ScrewSlide item description **type** func **param** **port**: UART port, string type.
    **addr**: TMC2209 UART address, range 0x00~0x03, integer type.
    **baud**: UART baud rate, integer type.
    **step_angle**: Motor step angle, float type.
    **micro_step**: Motor micro step, options: 1/2/4/8/16/32/64/128/256, integer type.
    **screw_pitch**: Screw pitch of the slide, integer type.
    **speed_mm_s**: Speed of the slide in mm/s, 10 means 10mm/s, float type.
    Default is 1, indicating the use of a default speed factor.
    **use_internal_sense_resistors**: Enable internal sense resistors if TRUE,
    disable if FALSE, boolean type. Default is TRUE.
    **run_current_per**: Motor run current percentage, range 0~100(%), integer type. Default is 100%.
    **hold_current_per**: Motor hold current percentage, range 0~100(%), integer type. Default is 100%.
    **static** False > C++ defination code: > ```cpp > ScrewSlide(const char* port, uint8_t addr, long baud, /* Uart init param */ > float step_angle, uint16_t micro_step, float screw_pitch, /* Motor init param */ > float speed_mm_s 1, bool use_internal_sense_resistors true, uint8_t run_current_per 100, > uint8_t hold_current_per 100) > ``` #### move ```python def move(self, oft: float, speed_mm_s: int 1, callback: typing.Callable[[float], bool] None) > None ``` Move the slide by a specified length item description **type** func **param** **oft**: Length to move, 10 means 10mm, float type.
    Positive values move the slide in the positive direction, negative values move it in the opposite direction.
    **speed_mm_s**: Speed in mm/s. Default is 1, indicating the use of the default speed set during initialization.
    **callback**: Callback function to be called during movement.
    The callback function receives the current progress percentage (0~100%) of the movement.
    If the callback returns true, the move operation will be terminated immediately. Default is nullptr.
    **static** False > C++ defination code: > ```cpp > void move(float oft, int speed_mm_s 1, std::function callback nullptr) > ``` #### reset ```python def reset(self, callback: typing.Callable[[], bool], dir: bool False, speed_mm_s: int 1) > None ``` Reset the slide position item description **type** func **param** **callback**: Callback function to be called during the reset loop.
    The reset operation will only terminate if the callback returns true.
    **dir**: Direction of reset. Default is false.
    **speed_mm_s**: Speed in mm/s. Default is 1, indicating the use of the speed set during initialization.
    **static** False > C++ defination code: > ```cpp > void reset(std::function callback, bool dir false, int speed_mm_s 1) > ``` #### run\\_current\\_per ```python def run_current_per(self, per: int 1) > int ``` Get or set the run current percentage item description **type** func **param** **per**: Run current percentage, range 0~100(%).
    Default is 1, indicating no change and returning the current run current percentage.
    **return** int Current run current percentage if per is 1, otherwise the new set percentage. **static** False > C++ defination code: > ```cpp > int run_current_per(int per 1) > ``` #### hold\\_current\\_per ```python def hold_current_per(self, per: int 1) > int ``` Get or set the hold current percentage item description **type** func **param** **per**: Hold current percentage, range 0~100(%). Default is 1, indicating no change and returning the current hold current percentage.
    **return** int Current hold current percentage if per is 1, otherwise the new set percentage. **static** False > C++ defination code: > ```cpp > int hold_current_per(int per 1) > ``` #### use\\_internal\\_sense\\_resistors ```python def use_internal_sense_resistors(self, b: bool True) > None ``` Enable or disable internal sense resistors item description **type** func **param** **b**: Boolean value to enable (true) or disable (false) internal sense resistors. Default is true.
    **static** False > C++ defination code: > ```cpp > void use_internal_sense_resistors(bool b true) > ```"},"/maixpy/api/maix/ext_dev/imu.html":{"title":"maix.ext_dev.imu","content":" title: maix.ext_dev.imu maix.ext_dev.imu module > You can use `maix.ext_dev.imu` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ### Mode imu mode item describe **values** **ACC_ONLY**:
    **GYRO_ONLY**:
    **DUAL**:
    > C++ defination code: > ```cpp > enum class Mode { > ACC_ONLY 0, > GYRO_ONLY, > DUAL > } > ``` ### AccScale imu acc scale item describe **values** **ACC_SCALE_2G**:
    **ACC_SCALE_4G**:
    **ACC_SCALE_8G**:
    **ACC_SCALE_16G**:
    > C++ defination code: > ```cpp > enum class AccScale { > ACC_SCALE_2G 0, > ACC_SCALE_4G, > ACC_SCALE_8G, > ACC_SCALE_16G > } > ``` ### AccOdr imu acc output data rate item describe **values** **ACC_ODR_8000**: Accelerometer ODR set to 8000 Hz.
    **ACC_ODR_4000**: Accelerometer ODR set to 4000 Hz.
    **ACC_ODR_2000**: Accelerometer ODR set to 2000 Hz.
    **ACC_ODR_1000**: Accelerometer ODR set to 1000 Hz.
    **ACC_ODR_500**: Accelerometer ODR set to 500 Hz.
    **ACC_ODR_250**: Accelerometer ODR set to 250 Hz.
    **ACC_ODR_125**: Accelerometer ODR set to 125 Hz.
    **ACC_ODR_62_5**: Accelerometer ODR set to 62.5 Hz.
    **ACC_ODR_31_25**: Accelerometer ODR set to 31.25 Hz.
    **ACC_ODR_128**: Accelerometer ODR set to 128 Hz.
    **ACC_ODR_21**: Accelerometer ODR set to 21 Hz.
    **ACC_ODR_11**: Accelerometer ODR set to 11 Hz.
    **ACC_ODR_3**: Accelerometer ODR set to 3 Hz.
    > C++ defination code: > ```cpp > enum class AccOdr { > ACC_ODR_8000, // Accelerometer ODR set to 8000 Hz. > ACC_ODR_4000, // Accelerometer ODR set to 4000 Hz. > ACC_ODR_2000, // Accelerometer ODR set to 2000 Hz. > ACC_ODR_1000, // Accelerometer ODR set to 1000 Hz. > ACC_ODR_500, // Accelerometer ODR set to 500 Hz. > ACC_ODR_250, // Accelerometer ODR set to 250 Hz. > ACC_ODR_125, // Accelerometer ODR set to 125 Hz. > ACC_ODR_62_5, // Accelerometer ODR set to 62.5 Hz. > ACC_ODR_31_25, // Accelerometer ODR set to 31.25 Hz. > ACC_ODR_128 12, // Accelerometer ODR set to 128 Hz. > ACC_ODR_21, // Accelerometer ODR set to 21 Hz. > ACC_ODR_11, // Accelerometer ODR set to 11 Hz. > ACC_ODR_3, // Accelerometer ODR set to 3 Hz. > } > ``` ### GyroScale imu gyro scale item describe **values** **GYRO_SCALE_16DPS**: Gyroscope scale set to ±16 degrees per second.
    **GYRO_SCALE_32DPS**: Gyroscope scale set to ±32 degrees per second.
    **GYRO_SCALE_64DPS**: Gyroscope scale set to ±64 degrees per second.
    **GYRO_SCALE_128DPS**: Gyroscope scale set to ±128 degrees per second.
    **GYRO_SCALE_256DPS**: Gyroscope scale set to ±256 degrees per second.
    **GYRO_SCALE_512DPS**: Gyroscope scale set to ±512 degrees per second.
    **GYRO_SCALE_1024DPS**: Gyroscope scale set to ±1024 degrees per second.
    **GYRO_SCALE_2048DPS**: Gyroscope scale set to ±2048 degrees per second.
    > C++ defination code: > ```cpp > enum class GyroScale { > GYRO_SCALE_16DPS 0, // Gyroscope scale set to ±16 degrees per second. > GYRO_SCALE_32DPS, // Gyroscope scale set to ±32 degrees per second. > GYRO_SCALE_64DPS, // Gyroscope scale set to ±64 degrees per second. > GYRO_SCALE_128DPS, // Gyroscope scale set to ±128 degrees per second. > GYRO_SCALE_256DPS, // Gyroscope scale set to ±256 degrees per second. > GYRO_SCALE_512DPS, // Gyroscope scale set to ±512 degrees per second. > GYRO_SCALE_1024DPS, // Gyroscope scale set to ±1024 degrees per second. > GYRO_SCALE_2048DPS, // Gyroscope scale set to ±2048 degrees per second. > } > ``` ### GyroOdr imu gyro output data rate item describe **values** **GYRO_ODR_8000**: Gyroscope ODR set to 8000 Hz.
    **GYRO_ODR_4000**: Gyroscope ODR set to 4000 Hz.
    **GYRO_ODR_2000**: Gyroscope ODR set to 2000 Hz.
    **GYRO_ODR_1000**: Gyroscope ODR set to 1000 Hz.
    **GYRO_ODR_500**: Gyroscope ODR set to 500 Hz.
    **GYRO_ODR_250**: Gyroscope ODR set to 250 Hz.
    **GYRO_ODR_125**: Gyroscope ODR set to 125 Hz.
    **GYRO_ODR_62_5**: Gyroscope ODR set to 62.5 Hz.
    **GYRO_ODR_31_25**: Gyroscope ODR set to 31.25 Hz.
    > C++ defination code: > ```cpp > enum class GyroOdr { > GYRO_ODR_8000, // Gyroscope ODR set to 8000 Hz. > GYRO_ODR_4000, // Gyroscope ODR set to 4000 Hz. > GYRO_ODR_2000, // Gyroscope ODR set to 2000 Hz. > GYRO_ODR_1000, // Gyroscope ODR set to 1000 Hz. > GYRO_ODR_500, // Gyroscope ODR set to 500 Hz. > GYRO_ODR_250, // Gyroscope ODR set to 250 Hz. > GYRO_ODR_125, // Gyroscope ODR set to 125 Hz. > GYRO_ODR_62_5, // Gyroscope ODR set to 62.5 Hz. > GYRO_ODR_31_25, // Gyroscope ODR set to 31.25 Hz. > } > ``` ## Variable ## Function ## Class ### IMU QMI8656 driver class > C++ defination code: > ```cpp > class IMU > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, driver: str, i2c_bus: int 1, addr: int 107, freq: int 400000, mode: Mode ..., acc_scale: AccScale ..., acc_odr: AccOdr ..., gyro_scale: GyroScale ..., gyro_odr: GyroOdr ..., block: bool True) > None ``` Construct a new IMU object, will open IMU item description **type** func **param** **driver**: driver name, only support \"qmi8656\"
    **i2c_bus**: i2c bus number. Automatically selects the on board imu when 1 is passed in.
    **addr**: IMU i2c addr.
    **freq**: IMU freq
    **mode**: IMU Mode: ACC_ONLY/GYRO_ONLY/DUAL
    **acc_scale**: acc scale, see @imu::AccScale
    **acc_odr**: acc output data rate, see @imu::AccOdr
    **gyro_scale**: gyro scale, see @imu::GyroScale
    **gyro_odr**: gyro output data rate, see @imu::GyroOdr
    **block**: block or non block, defalut is true
    **static** False > C++ defination code: > ```cpp > IMU(std::string driver, int i2c_bus 1, int addr 0x6B, int freq 400000, > maix::ext_dev::imu::Mode mode maix::ext_dev::imu::Mode::DUAL, > maix::ext_dev::imu::AccScale acc_scale maix::ext_dev::imu::AccScale::ACC_SCALE_2G, > maix::ext_dev::imu::AccOdr acc_odr maix::ext_dev::imu::AccOdr::ACC_ODR_8000, > maix::ext_dev::imu::GyroScale gyro_scale maix::ext_dev::imu::GyroScale::GYRO_SCALE_16DPS, > maix::ext_dev::imu::GyroOdr gyro_odr maix::ext_dev::imu::GyroOdr::GYRO_ODR_8000, > bool block true) > ``` #### read ```python def read(self) > list[float] ``` Read data from IMU. item description **type** func **return** list type. If only one of the outputs is initialized, only [x,y,z] of that output will be returned.
    If all outputs are initialized, [acc_x, acc_y, acc_z, gyro_x, gyro_y, gyro_z] is returned. **static** False > C++ defination code: > ```cpp > std::vector read() > ``` #### calculate\\_calibration ```python def calculate_calibration(self, time_ms: int 30000) > maix.err.Err ``` Caculate calibration, save calibration data to /maixapp/shart/imu_calibration item description **type** func **param** **time_ms**: caculate max time, unit:ms
    **return** err::Err **static** False > C++ defination code: > ```cpp > err::Err calculate_calibration(uint64_t time_ms 30 * 1000) > ``` #### get\\_calibration ```python def get_calibration(self) > list[float] ``` Get calibration data item description **type** func **return** return an array, format is [acc_x_bias, acc_y_bias, acc_z_bias, gyro_x_bias, gyro_y_bias, gyro_z_bias]
    If the calibration file cannot be found, an empty array will be returned. **static** False > C++ defination code: > ```cpp > std::vector get_calibration() > ``` ### Gcsv Gcsv class > C++ defination code: > ```cpp > class Gcsv > ``` #### \\_\\_init\\_\\_ ```python def __init__(self) > None ``` Construct a new IMU object item description **type** func **static** False > C++ defination code: > ```cpp > Gcsv() > ``` #### open ```python def open(self, path: str, tscale: float 0.001, gscale: float 1, ascale: float 1, mscale: float 1, version: str '1.3', id: str 'imu', orientation: str 'YxZ') > maix.err.Err ``` Open a file item description **type** func **param** **path**: the path where data will be saved
    **tscale**: time scale, default is 0.001
    **gscale**: gyroscope scale factor, default is 1, unit:g
    **ascale**: accelerometer scale factor, default is 1, unit:radians/second
    **mscale**: magnetometer scale factor, default is 1(unused)
    **version**: version number, default is \"1.3\"
    **id**: identifier for the IMU, default is \"imu\"
    **orientation**: sensor orientation, default is \"YxZ\"
    **return** error code **static** False > C++ defination code: > ```cpp > err::Err open(std::string path, double tscale 0.001, double gscale 1, double ascale 1, double mscale 1, std::string version \"1.3\", std::string id \"imu\", std::string orientation \"YxZ\") > ``` #### close ```python def close(self) > maix.err.Err ``` Close file item description **type** func **return** error code **static** False > C++ defination code: > ```cpp > err::Err close() > ``` #### is\\_opened ```python def is_opened(self) > bool ``` Check if the object is already open item description **type** func **return** true, opened; false, not opened **static** False > C++ defination code: > ```cpp > bool is_opened() > ``` #### write ```python def write(self, timestamp: float, gyro: list[float], acc: list[float], mag: list[float] []) > maix.err.Err ``` Write imu data to gcsv file item description **type** func **param** **t**: Timestamp of the current data. The actual value is equal to t * tscale. unit:s
    **gyro**: Gyroscope data must be an array consisting of x, y, and z axis data. The actual value is equal to gyro * gscale. unit:g
    **acc**: Acceleration data must be an array consisting of x, y, and z axis data. The actual value is equal to acc * ascale.unit:radians/second
    **mag**: Magnetic data must be an array consisting of x, y, and z axis data. Currently not supported.
    **static** False > C++ defination code: > ```cpp > err::Err write(double timestamp, std::vector gyro, std::vector acc, std::vector mag std::vector()) > ```"},"/maixpy/api/maix/nn/F.html":{"title":"maix.nn.F","content":" title: maix.nn.F maix.nn.F module > You can use `maix.nn.F` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ## Variable ## Function ### softmax ```python def softmax(tensor: maix.tensor.Tensor, replace: bool) > maix.tensor.Tensor ``` Softmax, only support 1D tensor, multi dimension tensor will be treated as 1D tensor item description **param** **tensor**: input tensor
    **replace**: change input tensor data directly, if not, will create a new tensor
    **throw** If arg error, will raise err.Exception error **return** output tensor, if arg replace is true, return the arg tensor's address.
    If not replace, return a new object, so In C++, you should delete it manually in this case! > C++ defination code: > ```cpp > tensor::Tensor *softmax(tensor::Tensor *tensor, bool replace) > ``` ## Class"},"/maixpy/api/maix/camera.html":{"title":"maix.camera","content":" title: maix.camera maix.camera module, access camera device and get image from it > You can use `maix.camera` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ## Variable ## Function ### list\\_devices ```python def list_devices() > list[str] ``` List all supported camera devices. item description **return** Returns the path to the camera device. > C++ defination code: > ```cpp > std::vector list_devices() > ``` ### set\\_regs\\_enable ```python def set_regs_enable(enable: bool True) > None ``` Enable set camera registers, default is false, if set to true, will not set camera registers, you can manually set registers by write_reg API. item description **param** **enable**: enable/disable set camera registers
    > C++ defination code: > ```cpp > void set_regs_enable(bool enable true) > ``` ## Class ### Camera Camera class > C++ defination code: > ```cpp > class Camera > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, width: int 1, height: int 1, format: maix.image.Format ..., device: str None, fps: float 1, buff_num: int 3, open: bool True, raw: bool False) > None ``` Construct a new Camera object.\\nMaximum resolution support 2560x1440. item description **type** func **param** **width**: camera width, default is 1, means auto, mostly means max width of camera support
    **height**: camera height, default is 1, means auto, mostly means max height of camera support
    **format**: camera output format, default is image.Format.FMT_RGB888
    **device**: camera device path, you can get devices by list_devices method, by default(value is NULL(None in MaixPy)) means the first device
    **fps**: camera fps, default is 1, means auto, mostly means max fps of camera support
    **buff_num**: camera buffer number, default is 3, means 3 buffer, one used by user, one used for cache the next frame,
    more than one buffer will accelerate image read speed, but will cost more memory.
    **open**: If true, camera will automatically call open() after creation. default is true.
    **raw**: If true, you can use read_raw() to capture the raw image output from the sensor.
    **static** False > C++ defination code: > ```cpp > Camera(int width 1, int height 1, image::Format format image::FMT_RGB888, const char *device nullptr, double fps 1, int buff_num 3, bool open true, bool raw false) > ``` #### get\\_ch\\_nums ```python def get_ch_nums(self) > int ``` Get the number of channels supported by the camera. item description **type** func **return** Returns the maximum number of channels. **static** False > C++ defination code: > ```cpp > int get_ch_nums() > ``` #### open ```python def open(self, width: int 1, height: int 1, format: maix.image.Format ..., fps: float 1, buff_num: int 1) > maix.err.Err ``` Open camera and run item description **type** func **param** **width**: camera width, default is 1, means auto, mostly means max width of camera support
    **height**: camera height, default is 1, means auto, mostly means max height of camera support
    **format**: camera output format, default same as the constructor's format argument
    **fps**: camera fps, default is 1, means auto, mostly means max fps of camera support
    **buff_num**: camera buffer number, default is 3, means 3 buffer, one used by user, one used for cache the next frame,
    more than one buffer will accelerate image read speed, but will cost more memory.
    **return** error code, err::ERR_NONE means success, others means failed **static** False > C++ defination code: > ```cpp > err::Err open(int width 1, int height 1, image::Format format image::FMT_INVALID, double fps 1, int buff_num 1) > ``` #### read ```python def read(self, buff: capsule None, buff_size: int 0, block: bool True, block_ms: int 1) > maix.image.Image ``` Get one frame image from camera buffer, must call open method before read.\\nIf open method not called, will call it automatically, if open failed, will throw exception!\\nSo call open method before read is recommended. item description **type** func **param** **buff**: buffer to store image data, if buff is nullptr, will alloc memory automatically.
    In MaixPy, default to None, you can create a image.Image object, then pass img.data() to buff.
    **block**: block read, default is true, means block util read image successfully,
    if set to false, will return nullptr if no image in buffer
    **block_ms**: block read timeout
    **return** image::Image object, if failed, return nullptr, you should delete if manually in C++ **static** False > C++ defination code: > ```cpp > image::Image *read(void *buff nullptr, size_t buff_size 0, bool block true, int block_ms 1) > ``` #### read\\_raw ```python def read_raw(self) > maix.image.Image ``` Read the raw image and obtain the width, height, and format of the raw image through the returned Image object. item description **type** func **note** The raw image is in a Bayer format, and its width and height are affected by the driver. Modifying the size and format is generally not allowed. **return** image::Image object, if failed, return nullptr, you should delete if manually in C++ **static** False > C++ defination code: > ```cpp > image::Image *read_raw() > ``` #### clear\\_buff ```python def clear_buff(self) > None ``` Clear buff to ensure the next read image is the latest image item description **type** func **static** False > C++ defination code: > ```cpp > void clear_buff() > ``` #### skip\\_frames ```python def skip_frames(self, num: int) > None ``` Read some frames and drop, this is usually used avoid read not stable image when camera just opened. item description **type** func **param** **num**: number of frames to read and drop
    **static** False > C++ defination code: > ```cpp > void skip_frames(int num) > ``` #### close ```python def close(self) > None ``` Close camera item description **type** func **static** False > C++ defination code: > ```cpp > void close() > ``` #### add\\_channel ```python def add_channel(self, width: int 1, height: int 1, format: maix.image.Format ..., fps: float 1, buff_num: int 3, open: bool True) > Camera ``` Add a new channel and return a new Camera object, you can use close() to close this channel. item description **type** func **param** **width**: camera width, default is 1, means auto, mostly means max width of camera support
    **height**: camera height, default is 1, means auto, mostly means max height of camera support
    **format**: camera output format, default is RGB888
    **fps**: camera fps, default is 1, means auto, mostly means max fps of camera support
    **buff_num**: camera buffer number, default is 3, means 3 buffer, one used by user, one used for cache the next frame,
    more than one buffer will accelerate image read speed, but will cost more memory.
    **open**: If true, camera will automatically call open() after creation. default is true.
    **return** new Camera object **static** False > C++ defination code: > ```cpp > camera::Camera *add_channel(int width 1, int height 1, image::Format format image::FMT_RGB888, double fps 1, int buff_num 3, bool open true) > ``` #### is\\_opened ```python def is_opened(self) > bool ``` Check if camera is opened item description **type** func **return** true if camera is opened, false if not **static** False > C++ defination code: > ```cpp > bool is_opened() > ``` #### is\\_closed ```python def is_closed(self) > bool ``` check camera device is closed or not item description **type** func **return** closed or not, bool type **static** False > C++ defination code: > ```cpp > bool is_closed() > ``` #### width ```python def width(self) > int ``` Get camera width item description **type** func **return** camera width **static** False > C++ defination code: > ```cpp > int width() > ``` #### height ```python def height(self) > int ``` Get camera height item description **type** func **return** camera height **static** False > C++ defination code: > ```cpp > int height() > ``` #### fps ```python def fps(self) > float ``` Get camera fps item description **type** func **return** camera fps **static** False > C++ defination code: > ```cpp > double fps() > ``` #### format ```python def format(self) > maix.image.Format ``` Get camera output format item description **type** func **return** camera output format, image::Format object **static** False > C++ defination code: > ```cpp > image::Format format() > ``` #### buff\\_num ```python def buff_num(self) > int ``` Get camera buffer number item description **type** func **return** camera buffer number **static** False > C++ defination code: > ```cpp > int buff_num() > ``` #### hmirror ```python def hmirror(self, value: int 1) > int ``` Set/Get camera horizontal mirror item description **type** func **return** camera horizontal mirror **static** False > C++ defination code: > ```cpp > int hmirror(int value 1) > ``` #### vflip ```python def vflip(self, value: int 1) > int ``` Set/Get camera vertical flip item description **type** func **return** camera vertical flip **static** False > C++ defination code: > ```cpp > int vflip(int value 1) > ``` #### device ```python def device(self) > str ``` Get camera device path item description **type** func **return** camera device path **static** False > C++ defination code: > ```cpp > std::string device() > ``` #### write\\_reg ```python def write_reg(self, addr: int, data: int, bit_width: int 8) > maix.err.Err ``` Write camera register item description **type** func **param** **addr**: register address
    **data**: register data
    **bit_width**: register data bit width, default is 8
    **return** error code, err::ERR_NONE means success, others means failed **static** False > C++ defination code: > ```cpp > err::Err write_reg(int addr, int data, int bit_width 8) > ``` #### read\\_reg ```python def read_reg(self, addr: int, bit_width: int 8) > int ``` Read camera register item description **type** func **param** **addr**: register address
    **bit_width**: register data bit width, default is 8
    **return** register data, 1 means failed **static** False > C++ defination code: > ```cpp > int read_reg(int addr, int bit_width 8) > ``` #### show\\_colorbar ```python def show_colorbar(self, enable: bool) > maix.err.Err ``` Camera output color bar image for test item description **type** func **param** **enable**: enable/disable color bar
    **return** error code, err::ERR_NONE means success, others means failed **static** False > C++ defination code: > ```cpp > err::Err show_colorbar(bool enable) > ``` #### get\\_channel ```python def get_channel(self) > int ``` Get channel of camera item description **type** func **return** channel number **static** False > C++ defination code: > ```cpp > int get_channel() > ``` #### set\\_resolution ```python def set_resolution(self, width: int, height: int) > maix.err.Err ``` Set camera resolution item description **type** func **param** **width**: new width
    **height**: new height
    **return** error code, err::ERR_NONE means success, others means failed **static** False > C++ defination code: > ```cpp > err::Err set_resolution(int width, int height) > ``` #### set\\_fps ```python def set_fps(self, fps: float) > maix.err.Err ``` Set camera fps item description **type** func **param** **fps**: new fps
    **return** error code, err::ERR_NONE means success, others means failed **static** False > C++ defination code: > ```cpp > err::Err set_fps(double fps) > ``` #### exposure ```python def exposure(self, value: int 1) > int ``` Set/Get camera exposure item description **type** func **attention** This method will affect the isp and thus the image, so please be careful with it. **param** **value**: exposure time. unit: us
    If value 1, return exposure time.
    If value ! 0, set and return exposure time.
    **return** camera exposure time **static** False > C++ defination code: > ```cpp > int exposure(int value 1) > ``` #### gain ```python def gain(self, value: int 1) > int ``` Set/Get camera gain item description **type** func **attention** This method will affect the isp and thus the image, so please be careful with it. **param** **value**: camera gain.
    If value 1, returns camera gain.
    If value ! 0, set and return camera gain.
    **return** camera gain **static** False > C++ defination code: > ```cpp > int gain(int value 1) > ``` #### luma ```python def luma(self, value: int 1) > int ``` Set/Get camera luma item description **type** func **attention** This method will affect the isp and thus the image, so please be careful with it. **param** **value**: luma value, range is [0, 100]
    If value 1, returns luma value.
    If value ! 0, set and return luma value.
    **return** returns luma value **static** False > C++ defination code: > ```cpp > int luma(int value 1) > ``` #### constrast ```python def constrast(self, value: int 1) > int ``` Set/Get camera constrast item description **type** func **attention** This method will affect the isp and thus the image, so please be careful with it. **param** **value**: constrast value, range is [0, 100]
    If value 1, returns constrast value.
    If value ! 0, set and return constrast value.
    **return** returns constrast value **static** False > C++ defination code: > ```cpp > int constrast(int value 1) > ``` #### saturation ```python def saturation(self, value: int 1) > int ``` Set/Get camera saturation item description **type** func **attention** This method will affect the isp and thus the image, so please be careful with it. **param** **value**: saturation value, range is [0, 100]
    If value 1, returns saturation value.
    If value ! 0, set and return saturation value.
    **return** returns saturation value **static** False > C++ defination code: > ```cpp > int saturation(int value 1) > ``` #### awb\\_mode ```python def awb_mode(self, value: int 1) > int ``` Set/Get white balance mode (deprecated interface) item description **type** func **attention** This method will affect the isp and thus the image, so please be careful with it.
    This interface may be deprecated in the future, and there may be incompatibilities in the definition of the parameters of the new interface **param** **value**: value 0, means set white balance to auto mode, value 1, means set white balance to manual mode, default is auto mode.
    **return** returns awb mode **static** False > C++ defination code: > ```cpp > int awb_mode(int value 1) > ``` #### set\\_awb ```python def set_awb(self, mode: int 1) > int ``` Set/Get white balance mode item description **type** func **attention** This method will affect the isp and thus the image, so please be careful with it. **param** **value**: value 0, means set white balance to manual mode, value 1, means set white balance to auto mode, default is auto mode.
    **return** returns awb mode **static** False > C++ defination code: > ```cpp > int set_awb(int mode 1) > ``` #### exp\\_mode ```python def exp_mode(self, value: int 1) > int ``` Set/Get exposure mode (deprecated interface) item description **type** func **attention** This method will affect the isp and thus the image, so please be careful with it.
    This interface may be deprecated in the future, and there may be incompatibilities in the definition of the parameters of the new interface **param** **value**: value 0, means set exposure to auto mode, value 1, means set exposure to manual mode, default is auto mode.
    **return** returns exposure mode **static** False > C++ defination code: > ```cpp > int exp_mode(int value 1) > ``` #### set\\_windowing ```python def set_windowing(self, roi: list[int]) > maix.err.Err ``` Set window size of camera item description **type** func **param** **roi**: Support two input formats, [x,y,w,h] set the coordinates and size of the window;
    [w,h] set the size of the window, when the window is centred.
    **return** error code **static** False > C++ defination code: > ```cpp > err::Err set_windowing(std::vector roi) > ```"},"/maixpy/api/index.html":{"title":"MaixPy API -- Maix AI machine vision platform Python API","content":" title: MaixPy API Maix AI machine vision platform Python API **You can read API doc at [MaixPy API on Sipeed Wiki](https://wiki.sipeed.com/maixpy/api/index.html)** If you want to preview API doc offline, build MaixPy, and API doc will be generated in `MaixPy/docs/api/` directory. > For MaixPy developer: This API documentation is generated from the source code, DO NOT edit this file manually! MaixPy API documentation, modules: module brief [maix.err](./maix/err.html) maix.err module [maix.tensor](./maix/tensor.html) maix.tensor module [maix.image](./maix/image.html) maix.image module, image related definition and functions [maix.camera](./maix/camera.html) maix.camera module, access camera device and get image from it [maix.display](./maix/display.html) maix.display module, control display device and show image on it [maix.ext_dev](./maix/ext_dev.html) maix.ext_dev module [maix.audio](./maix/audio.html) maix.audio module [maix.tracker](./maix/tracker.html) maix.tracker module [maix.http](./maix/http.html) maix.http module [maix.rtsp](./maix/rtsp.html) maix.rtsp module [maix.rtmp](./maix/rtmp.html) maix.rtmp module [maix.touchscreen](./maix/touchscreen.html) maix.touchscreen module [maix.video](./maix/video.html) maix.video module [maix.network](./maix/network.html) maix.network module [maix.comm](./maix/comm.html) maix.comm module [maix.fs](./maix/fs.html) maix.fs module [maix.app](./maix/app.html) maix.app module [maix.protocol](./maix/protocol.html) maix.protocol module [maix.time](./maix/time.html) maix.time module [maix.example](./maix/example.html) example module, this will be maix.example module in MaixPy, maix::example namespace in MaixCDK [maix.util](./maix/util.html) maix.util module [maix.thread](./maix/thread.html) maix.thread module [maix.sys](./maix/sys.html) maix.sys module [maix.i18n](./maix/i18n.html) maix.i18n module [maix.peripheral](./maix/peripheral.html) Chip's peripheral driver [maix.nn](./maix/nn.html) maix.nn module "},"/maixpy/api/maix/rtsp.html":{"title":"maix.rtsp","content":" title: maix.rtsp maix.rtsp module > You can use `maix.rtsp` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ### RtspStreamType The stream type of rtsp item describe **values** **RTSP_STREAM_NONE**: format invalid
    **RTSP_STREAM_H265**:
    > C++ defination code: > ```cpp > enum RtspStreamType > { > RTSP_STREAM_NONE 0, // format invalid > RTSP_STREAM_H265, > } > ``` ## Variable ## Function ## Class ### Region Region class > C++ defination code: > ```cpp > class Region > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, x: int, y: int, width: int, height: int, format: maix.image.Format, camera: maix.camera.Camera) > None ``` Construct a new Region object item description **type** func **param** **x**: region coordinate x
    **y**: region coordinate y
    **width**: region width
    **height**: region height
    **format**: region format
    **camera**: bind region to camera
    **static** False > C++ defination code: > ```cpp > Region(int x, int y, int width, int height, image::Format format, camera::Camera *camera) > ``` #### get\\_canvas ```python def get_canvas(self) > maix.image.Image ``` Return an image object from region item description **type** func **return** image object **static** False > C++ defination code: > ```cpp > image::Image *get_canvas() > ``` #### update\\_canvas ```python def update_canvas(self) > maix.err.Err ``` Update canvas item description **type** func **return** error code **static** False > C++ defination code: > ```cpp > err::Err update_canvas() > ``` ### Rtsp Rtsp class > C++ defination code: > ```cpp > class Rtsp > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, ip: str '', port: int 8554, fps: int 30, stream_type: RtspStreamType ...) > None ``` Construct a new Video object item description **type** func **param** **ip**: rtsp ip
    **port**: rtsp port
    **fps**: rtsp fps
    **stream_type**: rtsp stream type
    **static** False > C++ defination code: > ```cpp > Rtsp(std::string ip std::string(), int port 8554, int fps 30, rtsp::RtspStreamType stream_type rtsp::RtspStreamType::RTSP_STREAM_H265) > ``` #### start ```python def start(self) > maix.err.Err ``` start rtsp item description **type** func **return** error code, err::ERR_NONE means success, others means failed **static** False > C++ defination code: > ```cpp > err::Err start() > ``` #### start (overload 1) stop rtsp item description **type** func **return** error code, err::ERR_NONE means success, others means failed **static** False > C++ defination code: > ```cpp > err::Err stop() > ``` #### bind\\_camera ```python def bind_camera(self, camera: maix.camera.Camera) > maix.err.Err ``` Bind camera item description **type** func **param** **camera**: camera object
    **return** error code, err::ERR_NONE means success, others means failed **static** False > C++ defination code: > ```cpp > err::Err bind_camera(camera::Camera *camera) > ``` #### write ```python def write(self, frame: ...) > maix.err.Err ``` Write data to rtsp item description **type** func **param** **frame**: video frame data
    **return** error code, err::ERR_NONE means success, others means failed **static** False > C++ defination code: > ```cpp > err::Err write(video::Frame &frame) > ``` #### get\\_url ```python def get_url(self) > str ``` Get url of rtsp item description **type** func **return** url of rtsp **static** False > C++ defination code: > ```cpp > std::string get_url() > ``` #### get\\_urls ```python def get_urls(self) > list[str] ``` Get url list of rtsp item description **type** func **return** url list of rtsp **static** False > C++ defination code: > ```cpp > std::vector get_urls() > ``` #### to\\_camera ```python def to_camera(self) > maix.camera.Camera ``` Get camera object from rtsp item description **type** func **return** camera object **static** False > C++ defination code: > ```cpp > camera::Camera *to_camera() > ``` #### rtsp\\_is\\_start ```python def rtsp_is_start(self) > bool ``` return rtsp start status item description **type** func **return** true means rtsp is start, false means rtsp is stop. **static** False > C++ defination code: > ```cpp > bool rtsp_is_start() > ``` #### add\\_region ```python def add_region(self, x: int, y: int, width: int, height: int, format: maix.image.Format ...) > Region ``` return a region object, you can draw image on the region. item description **type** func **param** **x**: region coordinate x
    **y**: region coordinate y
    **width**: region width
    **height**: region height
    **format**: region format, support Format::FMT_BGRA8888 only
    **return** the reigon object **static** False > C++ defination code: > ```cpp > rtsp::Region *add_region(int x, int y, int width, int height, image::Format format image::Format::FMT_BGRA8888) > ``` #### update\\_region ```python def update_region(self, region: Region) > maix.err.Err ``` update and show region item description **type** func **return** error code **static** False > C++ defination code: > ```cpp > err::Err update_region(rtsp::Region ®ion) > ``` #### del\\_region ```python def del_region(self, region: Region) > maix.err.Err ``` del region item description **type** func **return** error code **static** False > C++ defination code: > ```cpp > err::Err del_region(rtsp::Region *region) > ``` #### draw\\_rect ```python def draw_rect(self, id: int, x: int, y: int, width: int, height: int, color: maix.image.Color, thickness: int 1) > maix.err.Err ``` Draw a rectangle on the canvas item description **type** func **param** **id**: region id
    **x**: rectangle coordinate x
    **y**: rectangle coordinate y
    **width**: rectangle width
    **height**: rectangle height
    **color**: rectangle color
    **thickness**: rectangle thickness. If you set it to 1, the rectangle will be filled.
    **return** error code **static** False > C++ defination code: > ```cpp > err::Err draw_rect(int id, int x, int y, int width, int height, image::Color color, int thickness 1) > ``` #### draw\\_string ```python def draw_string(self, id: int, x: int, y: int, str: str, color: maix.image.Color, size: int 16, thickness: int 1) > maix.err.Err ``` Draw a string on the canvas item description **type** func **param** **id**: region id
    **x**: string coordinate x
    **y**: string coordinate y
    **str**: string
    **color**: string color
    **size**: string size
    **thickness**: string thickness
    **return** error code **static** False > C++ defination code: > ```cpp > err::Err draw_string(int id, int x, int y, const char *str, image::Color color, int size 16, int thickness 1) > ```"},"/maixpy/api/maix/peripheral.html":{"title":"maix.peripheral","content":" title: maix.peripheral Chip's peripheral driver > You can use `maix.peripheral` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module module brief [key](./peripheral/key.html) maix.peripheral.key module [i2c](./peripheral/i2c.html) maix.peripheral.i2c module [spi](./peripheral/spi.html) maix.peripheral.spi module [pwm](./peripheral/pwm.html) maix.peripheral.pwm module [wdt](./peripheral/wdt.html) maix.peripheral.wdt module [adc](./peripheral/adc.html) maix.peripheral.adc module [pinmap](./peripheral/pinmap.html) maix.peripheral.pinmap module [uart](./peripheral/uart.html) maix uart peripheral driver [gpio](./peripheral/gpio.html) maix.peripheral.gpio module [hid](./peripheral/hid.html) maix.peripheral.hid module [timer](./peripheral/timer.html) maix.peripheral.timer module ## Enum ## Variable ## Function ## Class"},"/maixpy/api/maix/tracker.html":{"title":"maix.tracker","content":" title: maix.tracker maix.tracker module > You can use `maix.tracker` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ## Variable ## Function ## Class ### Object tracker.Object class > C++ defination code: > ```cpp > class Object > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, x: int, y: int, w: int, h: int, class_id: int, score: float) > None ``` tracker.Object class constructor item description **type** func **static** False > C++ defination code: > ```cpp > Object(const int &x, const int &y, const int &w, const int &h, const int &class_id, const float &score) > ``` #### x position x attribute. item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int x > ``` #### y position y attribute. item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int y > ``` #### w position rectangle width. item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int w > ``` #### h position rectangle height. item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int h > ``` #### class\\_id object class id, int type. item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int class_id > ``` #### score object score(prob). item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > float score > ``` ### Track tracker.Track class > C++ defination code: > ```cpp > class Track > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, id: int, score: float, lost: bool, start_frame_id: int, frame_id: int) > None ``` tracker.Track class constructor item description **type** func **static** False > C++ defination code: > ```cpp > Track(const size_t &id, const float &score, const bool &lost, const size_t &start_frame_id, const size_t &frame_id) > ``` #### id track id. item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > size_t id > ``` #### score track score(prob). item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > float score > ``` #### lost whether this track lost. item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > bool lost > ``` #### start\\_frame\\_id track start frame id. item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > size_t start_frame_id > ``` #### frame\\_id track current frame id. item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > size_t frame_id > ``` #### history track position history, the last one is latest position. item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::deque history > ``` ### ByteTracker tracker.ByteTracker class > C++ defination code: > ```cpp > class ByteTracker > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, max_lost_buff_num: int 60, track_thresh: float 0.5, high_thresh: float 0.6, match_thresh: float 0.8, max_history: int 20) > None ``` tracker.ByteTracker class constructor item description **type** func **param** **max_lost_buff_num**: the frames for keep lost tracks.
    **track_thresh**: tracking confidence threshold.
    **high_thresh**: threshold to add to new track.
    **match_thresh**: matching threshold for tracking, e.g. one object in two frame iou < match_thresh we think they are the same obj.
    **max_history**: max tack's position history length.
    **static** False > C++ defination code: > ```cpp > ByteTracker(const int &max_lost_buff_num 60, > const float &track_thresh 0.5, > const float &high_thresh 0.6, > const float &match_thresh 0.8, > const int &max_history 20) > ``` #### update ```python def update(self, objs: list[Object]) > list[Track] ``` update tracks according to current detected objects. item description **type** func **static** False > C++ defination code: > ```cpp > std::vector update(const std::vector &objs) > ```"},"/maixpy/api/maix/util.html":{"title":"maix.util","content":" title: maix.util maix.util module > You can use `maix.util` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ## Variable ## Function ### do\\_exit\\_function ```python def do_exit_function() > None ``` exec all of exit function > C++ defination code: > ```cpp > void do_exit_function() > ``` ### register\\_atexit ```python def register_atexit() > None ``` Registering default processes that need to be executed on exit > C++ defination code: > ```cpp > void register_atexit() > ``` ## Class"},"/maixpy/api/maix/tensor.html":{"title":"maix.tensor","content":" title: maix.tensor maix.tensor module > You can use `maix.tensor` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ### DType Tensor data types item describe **values** **UINT8**:
    **INT8**:
    **UINT16**:
    **INT16**:
    **UINT32**:
    **INT32**:
    **FLOAT16**:
    **FLOAT32**:
    **FLOAT64**:
    **BOOL**:
    **DTYPE_MAX**:
    > C++ defination code: > ```cpp > enum DType > { > UINT8 0, > INT8, > UINT16, > INT16, > UINT32, > INT32, > FLOAT16, > FLOAT32, > FLOAT64, > BOOL, > // STRING, > // OBJECT, > DTYPE_MAX > } > ``` ## Variable ### dtype\\_size Tensor data type size in bytes item description **attention** It's a copy of this variable in MaixPy,
    so change it in C++ (e.g. update var in hello function) will not take effect the var inMaixPy.
    So we add const for this var to avoid this mistake. **value** **{
    1, // UINT8
    1, // INT8
    2, // UINT16
    2, // INT16
    4, // UINT32
    4, // INT32
    2, // FLOAT16
    4, // FLOAT32
    8, // FLOAT64
    1, // BOOL
    // 1, // STRING
    // 1, // OBJECT
    0
    }** **readonly** True > C++ defination code: > ```cpp > const std::vector dtype_size { > 1, // UINT8 > 1, // INT8 > 2, // UINT16 > 2, // INT16 > 4, // UINT32 > 4, // INT32 > 2, // FLOAT16 > 4, // FLOAT32 > 8, // FLOAT64 > 1, // BOOL > // 1, // STRING > // 1, // OBJECT > 0 > } > ``` ### dtype\\_name Tensor data type name item description **value** **{
    \"uint8\",
    \"int8\",
    \"uint16\",
    \"int16\",
    \"uint32\",
    \"int32\",
    \"float16\",
    \"float32\",
    \"float64\",
    \"bool\",
    // \"string\",
    // \"object\",
    \"invalid\"
    }** **readonly** True > C++ defination code: > ```cpp > const std::vector dtype_name { > \"uint8\", > \"int8\", > \"uint16\", > \"int16\", > \"uint32\", > \"int32\", > \"float16\", > \"float32\", > \"float64\", > \"bool\", > // \"string\", > // \"object\", > \"invalid\" > } > ``` ## Function ### tensor\\_from\\_numpy\\_float32 ```python def tensor_from_numpy_float32(array: numpy.ndarray[numpy.float32], copy: bool True) > Tensor ``` float32 type numpy ndarray object to tensor.Tensor object. item description **param** **array**: numpy array object.
    **copy**: if true, will alloc new buffer and copy data, else will directly use array's data buffer, default true.
    Use this arg carefully, when set to false, ther array MUST keep alive until we don't use the return tensor of this func, or will cause program crash.
    **return** tensor.Tensor object. > C++ defination code: > ```cpp > tensor::Tensor *tensor_from_numpy_float32(py::array_t array, bool copy true) > ``` ### tensor\\_from\\_numpy\\_uint8 ```python def tensor_from_numpy_uint8(array: numpy.ndarray[numpy.uint8], copy: bool True) > Tensor ``` uint8 type numpy ndarray object to tensor.Tensor object. item description **param** **array**: numpy array object.
    **copy**: if true, will alloc new buffer and copy data, else will directly use array's data buffer, default true.
    Use this arg carefully, when set to false, ther array MUST keep alive until we don't use the return tensor of this func, or will cause program crash.
    **return** tensor.Tensor object. > C++ defination code: > ```cpp > tensor::Tensor *tensor_from_numpy_uint8(py::array_t array, bool copy true) > ``` ### tensor\\_from\\_numpy\\_int8 ```python def tensor_from_numpy_int8(array: numpy.ndarray[numpy.int8], copy: bool True) > Tensor ``` int8 type numpy ndarray object to tensor.Tensor object. item description **param** **array**: numpy array object.
    **copy**: if true, will alloc new buffer and copy data, else will directly use array's data buffer, default true.
    Use this arg carefully, when set to false, ther array MUST keep alive until we don't use the return tensor of this func, or will cause program crash.
    **return** tensor.Tensor object. > C++ defination code: > ```cpp > tensor::Tensor *tensor_from_numpy_int8(py::array_t array, bool copy true) > ``` ### tensor\\_to\\_numpy\\_float32 ```python def tensor_to_numpy_float32(t: Tensor, copy: bool True) > numpy.ndarray[numpy.float32] ``` tensor.Tensor object to float32 type numpy ndarray object. item description **param** **t**: tensor.Tensor object.
    **copy**: Whether alloc new Tensor and copy data or not,
    if not copy, array object will directly use arg's data buffer, will faster but change array will affect arg's data, default true.
    **return** numpy array object > C++ defination code: > ```cpp > py::array_t tensor_to_numpy_float32(tensor::Tensor *t, bool copy true) > ``` ### tensor\\_to\\_numpy\\_uint8 ```python def tensor_to_numpy_uint8(t: Tensor, copy: bool True) > numpy.ndarray[numpy.uint8] ``` tensor.Tensor object to int8 type numpy ndarray object. item description **param** **t**: tensor.Tensor object.
    **copy**: Whether alloc new Tensor and copy data or not,
    if not copy, array object will directly use arg's data buffer, will faster but change array will affect arg's data, default true.
    **return** numpy array object > C++ defination code: > ```cpp > py::array_t tensor_to_numpy_uint8(tensor::Tensor *t, bool copy true) > ``` ### tensor\\_to\\_numpy\\_int8 ```python def tensor_to_numpy_int8(t: Tensor, copy: bool True) > numpy.ndarray[numpy.int8] ``` tensor.Tensor object to int8 type numpy ndarray object. item description **param** **t**: tensor.Tensor object.
    **copy**: Whether alloc new Tensor and copy data or not,
    if not copy, array object will directly use arg's data buffer, will faster but change array will affect arg's data, default true.
    **return** numpy array object > C++ defination code: > ```cpp > py::array_t tensor_to_numpy_int8(tensor::Tensor *t, bool copy true) > ``` ## Class ### Tensor Tensor class > C++ defination code: > ```cpp > class Tensor > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, shape: list[int], dtype: DType) > None ``` Tensor constructor item description **type** func **param** **shape**: tensor shape, a int list
    **dtype**: tensor element data type, see DType of this module
    **static** False > C++ defination code: > ```cpp > Tensor(std::vector shape, tensor::DType dtype) > ``` #### to\\_str ```python def to_str(self) > str ``` To string item description **type** func **static** False > C++ defination code: > ```cpp > std::string to_str() > ``` #### \\_\\_str\\_\\_ ```python def __str__(self) > str ``` To string item description **type** func **static** False > C++ defination code: > ```cpp > std::string __str__() > ``` #### shape ```python def shape(self) > list[int] ``` get tensor shape item description **type** func **return** tensor shape, a int list **static** False > C++ defination code: > ```cpp > std::vector shape() > ``` #### expand\\_dims ```python def expand_dims(self, axis: int) > None ``` expand tensor shape item description **type** func **param** **axis**: axis to expand
    **static** False > C++ defination code: > ```cpp > void expand_dims(int axis) > ``` #### reshape ```python def reshape(self, shape: list[int]) > None ``` reshape tensor shape, if size not match, it will throw an err::Exception item description **type** func **param** **shape**: new shape
    **static** False > C++ defination code: > ```cpp > void reshape(std::vector shape) > ``` #### flatten ```python def flatten(self) > None ``` Flatten tensor shape to 1D item description **type** func **static** False > C++ defination code: > ```cpp > void flatten() > ``` #### dtype ```python def dtype(self) > DType ``` get tensor data type item description **type** func **return** tensor data type, see DType of this module **static** False > C++ defination code: > ```cpp > tensor::DType dtype() > ``` #### to\\_float\\_list ```python def to_float_list(self) > list[float] ``` get tensor data and return a list item description **type** func **return** list type data **static** False > C++ defination code: > ```cpp > std::valarray* to_float_list() > ``` #### argmax ```python def argmax(self, axis: int 65535) > Tensor ``` argmax of tensor item description **type** func **param** **axis**: By default, the index is into the flattened array, otherwise along the specified axis., wrong axis will throw an err::Exception
    **return** argmax result, you need to delete it after use in C++. **static** False > C++ defination code: > ```cpp > tensor::Tensor *argmax(int axis 0xffff) > ``` #### argmax1 ```python def argmax1(self) > int ``` argmax1, flattened data max index item description **type** func **return** argmax result, int type **static** False > C++ defination code: > ```cpp > int argmax1() > ``` ### Tensors Tensors > C++ defination code: > ```cpp > class Tensors > ``` #### \\_\\_init\\_\\_ ```python def __init__(self) > None ``` Constructor of Tensors item description **type** func **static** False > C++ defination code: > ```cpp > Tensors() > ``` #### add\\_tensor ```python def add_tensor(self, key: str, tensor: Tensor, copy: bool, auto_delete: bool) > None ``` Add tensor item description **type** func **static** False > C++ defination code: > ```cpp > void add_tensor(const std::string &key, tensor::Tensor *tensor, bool copy, bool auto_delete) > ``` #### rm\\_tensor ```python def rm_tensor(self, key: str) > None ``` Remove tensor item description **type** func **static** False > C++ defination code: > ```cpp > void rm_tensor(const std::string &key) > ``` #### clear ```python def clear(self) > None ``` Clear tensors item description **type** func **static** False > C++ defination code: > ```cpp > void clear() > ``` #### get\\_tensor ```python def get_tensor(self, key: str) > Tensor ``` Get tensor by key item description **type** func **static** False > C++ defination code: > ```cpp > tensor::Tensor &get_tensor(const std::string &key) > ``` #### \\_\\_getitem\\_\\_ ```python def __getitem__(self, key: str) > Tensor ``` Operator [] item description **type** func **static** False > C++ defination code: > ```cpp > tensor::Tensor &operator[](const std::string &key) > ``` #### \\_\\_len\\_\\_ ```python def __len__(self) > int ``` Size item description **type** func **static** False > C++ defination code: > ```cpp > size_t size() > ``` #### keys ```python def keys(self) > list[str] ``` Get names item description **type** func **static** False > C++ defination code: > ```cpp > std::vector keys() > ``` #### tensors Tensors data, dict type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::map tensors > ```"},"/maixpy/api/maix/touchscreen.html":{"title":"maix.touchscreen","content":" title: maix.touchscreen maix.touchscreen module > You can use `maix.touchscreen` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ## Variable ## Function ## Class ### TouchScreen TouchScreen class > C++ defination code: > ```cpp > class TouchScreen > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, device: str '', open: bool True) > None ``` Construct a new TouchScreen object item description **type** func **param** **device**: touchscreen device path, you can get devices by list_devices method, by default(value is NULL(None in MaixPy)) means the first device
    **open**: If true, touchscreen will automatically call open() after creation. default is true.
    **static** False > C++ defination code: > ```cpp > TouchScreen(const std::string &device \"\", bool open true) > ``` #### open ```python def open(self) > maix.err.Err ``` open touchscreen device item description **type** func **return** error code, err::ERR_NONE means success, others means failed **static** False > C++ defination code: > ```cpp > err::Err open() > ``` #### close ```python def close(self) > maix.err.Err ``` close touchscreen device item description **type** func **return** error code, err::ERR_NONE means success, others means failed **static** False > C++ defination code: > ```cpp > err::Err close() > ``` #### read ```python def read(self) > list[int] ``` read touchscreen device item description **type** func **attention** This method will discard same event in buffer, that is:
    if too many move event in buffer when call this method, it will only return the last one,
    and if read pressed or released event, it will return immediately. **return** Returns a list include x, y, pressed state **static** False > C++ defination code: > ```cpp > std::vector read() > ``` #### read (overload 1) read touchscreen device item description **type** func **attention** This method will return immediately if have event, so it's better to use available() to check if have more event in buffer,
    or too much event in buffer when your program call this read() interval is too long will make your program slow. **return** Returns a list include x, y, pressed state **static** False > C++ defination code: > ```cpp > std::vector read0() > ``` #### available ```python def available(self, timeout: int 0) > bool ``` If we need to read from touchscreen, for event driven touchscreen means have event or not item description **type** func **param** **timeout**: 1 means block, 0 means no block, >0 means timeout, default is 0, unit is ms.
    **return** true if need to read(have event), false if not **static** False > C++ defination code: > ```cpp > bool available(int timeout 0) > ``` #### is\\_opened ```python def is_opened(self) > bool ``` Check if touchscreen is opened item description **type** func **return** true if touchscreen is opened, false if not **static** False > C++ defination code: > ```cpp > bool is_opened() > ```"},"/maixpy/api/maix/example.html":{"title":"maix.example","content":" title: maix.example example module, this will be maix.example module in MaixPy, maix::example namespace in MaixCDK > You can use `maix.example` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ### Kind Example enum(not recommend! See Kind2) item describe **values** **KIND_NONE**: Kind none, value always 0, other enum value will auto increase
    **KIND_DOG**: Kind dog
    **KIND_CAT**: Kind cat, value is auto generated according to KING_DOG
    **KIND_BIRD**:
    **KIND_MAX**: Max Kind quantity
    You can get max Kind value by KIND_MAX 1
    > C++ defination code: > ```cpp > enum Kind > { > KIND_NONE 0, /** Kind none, value always 0, other enum value will auto increase */ > KIND_DOG, /** Kind dog*/ > KIND_CAT, // Kind cat, value is auto generated according to KING_DOG > KIND_BIRD, > KIND_MAX /* Max Kind quantity, > You can get max Kind value by KIND_MAX 1 > */ > } > ``` ### Kind2 Example enum class(recommend!) item describe **values** **NONE**: Kind none, value always 0, other enum value will auto increase
    **DOG**: Kind dog
    **CAT**: Kind cat, value is auto generated according to KING_DOG
    **BIRD**:
    **MAX**: Max Kind quantity
    You can get max Kind value by KIND_MAX 1
    > C++ defination code: > ```cpp > enum class Kind2 > { > NONE 0, /** Kind none, value always 0, other enum value will auto increase */ > DOG, /** Kind dog*/ > CAT, // Kind cat, value is auto generated according to KING_DOG > BIRD, > MAX /* Max Kind quantity, > You can get max Kind value by KIND_MAX 1 > */ > } > ``` ## Variable ### var1 Example module variable item description **attention** It's a copy of this variable in MaixPy,
    so change it in C++ (e.g. update var in hello function) will not take effect the var inMaixPy.
    So we add const for this var to avoid this mistake. **value** **\"Sipeed\"** **readonly** True > C++ defination code: > ```cpp > const std::string var1 \"Sipeed\" > ``` ### list\\_var Tensor data type size in bytes item description **attention** **1**. DO NOT use C/C++ array directly for python API, the python wrapper not support it.
    Use std::vector instead.
    **2**. It's a copy of this variable in MaixPy,
    so change it in C++ (e.g. update var in hello function) will not take effect the var inMaixPy.
    So we add const for this var to avoid this mistake.
    **value** **{
    0, 1, 2, 3, 4, 5, 6, 7, 8, 9}** **readonly** True > C++ defination code: > ```cpp > const std::vector list_var { > 0, 1, 2, 3, 4, 5, 6, 7, 8, 9} > ``` ### test\\_var Example module variable test_var item description **attention** It's a copy of this variable in MaixPy, so if you change it in C++, it will not take effect in MaixPy.
    And change it in MaixPy will not take effect in C++ as well !!!
    If you want to use vars shared between C++ and MaixPy, you can create a class and use its member. **value** **100** **readonly** False > C++ defination code: > ```cpp > int test_var 100 > ``` ## Function ### hello ```python def hello(name: str) > str ``` say hello to someone item description **param** **name**: direction [in], name of someone, string type
    **return** string type, content is hello + name > C++ defination code: > ```cpp > std::string hello(std::string name) > ``` ### change\\_arg\\_name ```python def change_arg_name(e: Example) > Example ``` Change arg name example item description **param** **e**: Example object
    **return** same as arg > C++ defination code: > ```cpp > example::Example *change_arg_name(example::Example *e) > ``` ### change\\_arg\\_name2 ```python def change_arg_name2(e: Example) > None ``` Change arg name example item description **param** **e**: Example object
    > C++ defination code: > ```cpp > void change_arg_name2(example::Example &e) > ``` ## Class ### Test Test class > C++ defination code: > ```cpp > class Test > ``` #### \\_\\_init\\_\\_ ```python def __init__(self) > None ``` Test constructor item description **type** func **static** False > C++ defination code: > ```cpp > Test() > ``` ### Example Example class\\nthis class will be export to MaixPy as maix.example.Example > C++ defination code: > ```cpp > class Example > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, name: str, age: int 18, pet: Kind ...) > None ``` Example constructor\\nthis constructor will be export to MaixPy as maix.example.Example.__init__ item description **type** func **param** **name**: direction [in], name of Example, string type
    **age**: direction [in], age of Example, int type, default is 18, value range is [0, 100]
    **attention** to make auto generate code work, param Kind should with full namespace name `example::Kind` instead of `Kind`,
    namespace `maix` can be ignored. **static** False > C++ defination code: > ```cpp > Example(std::string &name, int age 18, example::Kind pet example::KIND_NONE) > ``` #### get\\_name ```python def get_name(self) > str ``` get name of Example\\nyou can also get name by property `name`. item description **type** func **return** name of Example, string type **static** False > C++ defination code: > ```cpp > std::string get_name() > ``` #### get\\_age ```python def get_age(self) > int ``` get age of Example item description **type** func **return** age of Example, int type, value range is [0, 100] **static** False > C++ defination code: > ```cpp > int get_age() > ``` #### set\\_name ```python def set_name(self, name: str) > None ``` set name of Example item description **type** func **param** **name**: name of Example, string type
    **static** False > C++ defination code: > ```cpp > void set_name(std::string name) > ``` #### set\\_age ```python def set_age(self, age: int) > None ``` set age of Example item description **type** func **param** **age**: age of Example, int type, value range is [0, 100]
    **static** False > C++ defination code: > ```cpp > void set_age(int age) > ``` #### set\\_pet ```python def set_pet(self, pet: Kind) > None ``` Example enum member item description **type** func **attention** **static** False > C++ defination code: > ```cpp > void set_pet(example::Kind pet) > ``` #### get\\_pet ```python def get_pet(self) > Kind ``` Example enum member item description **type** func **static** False > C++ defination code: > ```cpp > example::Kind get_pet() > ``` #### get\\_list ```python def get_list(self, in: list[int]) > list[int] ``` get list example item description **type** func **param** **in**: direction [in], input list, items are int type.
    In MaixPy, you can pass list or tuple to this API
    **return** list, items are int type, content is [1, 2, 3] + in. Alloc item, del in MaixPy will auto free memory. **static** False > C++ defination code: > ```cpp > std::vector *get_list(std::vector in) > ``` #### get\\_dict ```python def get_dict(self, in: dict[str, int]) > dict[str, int] ``` Example dict API item description **type** func **param** **in**: direction [in], input dict, key is string type, value is int type.
    In MaixPy, you can pass `dict` to this API
    **return** dict, key is string type, value is int type, content is {\"a\": 1} + in
    In MaixPy, return type is `dict` object **static** False > C++ defination code: > ```cpp > std::map get_dict(std::map &in) > ``` #### hello ```python def hello(name: str) > str ``` say hello to someone item description **type** func **param** **name**: name of someone, string type
    **return** string type, content is Example::hello_str + name **static** True > C++ defination code: > ```cpp > static std::string hello(std::string name) > ``` #### hello\\_bytes ```python def hello_bytes(*args, **kwargs) ``` param is bytes example item description **type** func **param** **bytes**: bytes type param
    **return** bytes type, return value is bytes changed value **static** True > C++ defination code: > ```cpp > static Bytes *hello_bytes(Bytes &bytes) > ``` #### callback ```python def callback(cb: typing.Callable[[int, int], int]) > int ``` Callback example item description **type** func **param** **cb**: callback function, param is two int type, return is int type
    **return** int type, return value is cb's return value. **static** True > C++ defination code: > ```cpp > static int callback(std::function cb) > ``` #### callback2 ```python def callback2(cb: typing.Callable[[list[int], int], int]) > int ``` Callback example item description **type** func **param** **cb**: callback function, param is a int list type and int type, return is int type
    **return** int type, return value is cb's return value. **static** True > C++ defination code: > ```cpp > static int callback2(std::function, int)> cb) > ``` #### hello\\_dict ```python def hello_dict(dict: dict[str, int]) > dict[str, int] ``` Dict param example item description **type** func **param** **dict**: dict type param, key is string type, value is int type
    **static** True > C++ defination code: > ```cpp > static std::map *hello_dict(std::map *dict) > ``` #### name name member of Example item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::string name > ``` #### age age member of Example, value range should be [0, 100] item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int age > ``` #### hello\\_str hello_str member of Example, default value is \\\"hello \\\" item description **type** var **static** True **readonly** False > C++ defination code: > ```cpp > static std::string hello_str > ``` #### var1 Example module readonly variable item description **type** var **static** False **readonly** True > C++ defination code: > ```cpp > const std::string var1 \"Example.var1\" > ``` #### var2 Example module readonly variable item description **type** var **static** False **readonly** True > C++ defination code: > ```cpp > std::string var2 \"Example.var2\" > ``` #### dict\\_test ```python def dict_test() > dict[str, Test] ``` dict_test, return dict type, and element is pointer type(alloc in C++).\\nHere when the returned Tensor object will auto delete by Python GC. item description **type** func **static** True > C++ defination code: > ```cpp > static std::map *dict_test() > ```"},"/maixpy/api/maix/time.html":{"title":"maix.time","content":" title: maix.time maix.time module > You can use `maix.time` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ## Variable ## Function ### time ```python def time() > float ``` Get current time in s item description **return** current time in s, double type **attention** If board have no RTC battery, when bootup and connect to network,
    system will automatically sync time by NTP, will cause time() have big change,
    e.g. before NTP: 10(s), after: 1718590639.5149617(s).
    If you want to calculate time interval, please use ticks_s(). > C++ defination code: > ```cpp > double time() > ``` ### time\\_ms ```python def time_ms() > int ``` Get current time in ms item description **return** current time in ms, uint64_t type **attention** If board have no RTC battery, when bootup and connect to network,
    system will automatically sync time by NTP, will cause time() have big change,
    e.g. before NTP: 10000(ms), after: 1718590639000(ms)
    If you want to calculate time interval, please use ticks_ms(). > C++ defination code: > ```cpp > uint64_t time_ms() > ``` ### time\\_s ```python def time_s() > int ``` Get current time in s item description **return** current time in s, uint64_t type **attention** If board have no RTC battery, when bootup and connect to network,
    system will automatically sync time by NTP, will cause time() have big change,
    e.g. before NTP: 10(s), after: 1718590639(s) > C++ defination code: > ```cpp > uint64_t time_s() > ``` ### time\\_us ```python def time_us() > int ``` Get current time in us item description **return** current time in us, uint64_t type **attention** If board have no RTC battery, when bootup and connect to network,
    system will automatically sync time by NTP, will cause time() have big change,
    e.g. before NTP: 10000000(us), after: 1718590639000000(s)
    If you want to calculate time interval, please use ticks_us(). > C++ defination code: > ```cpp > uint64_t time_us() > ``` ### time\\_diff ```python def time_diff(last: float, now: float 1) > float ``` Calculate time difference in s. item description **param** **last**: last time
    **now**: current time, can be 1 if use current time
    **return** time difference **attention** If board have no RTC battery, when bootup and connect to network,
    system will automatically sync time by NTP, will cause time() have big change, and lead to big value.
    e.g. before NTP: 1(s), after: 1718590500(s)
    If you want to calculate time interval, please use ticks_diff(). > C++ defination code: > ```cpp > double time_diff(double last, double now 1) > ``` ### ticks\\_s ```python def ticks_s() > float ``` Get current time in s since bootup item description **return** current time in s, double type > C++ defination code: > ```cpp > double ticks_s() > ``` ### ticks\\_ms ```python def ticks_ms() > int ``` Get current time in ms since bootup item description **return** current time in ms, uint64_t type > C++ defination code: > ```cpp > uint64_t ticks_ms() > ``` ### ticks\\_us ```python def ticks_us() > int ``` Get current time in us since bootup item description **return** current time in us, uint64_t type > C++ defination code: > ```cpp > uint64_t ticks_us() > ``` ### ticks\\_diff ```python def ticks_diff(last: float, now: float 1) > float ``` Calculate time difference in s. item description **param** **last**: last time
    **now**: current time, can be 1 if use current time
    **return** time difference > C++ defination code: > ```cpp > double ticks_diff(double last, double now 1) > ``` ### sleep Sleep seconds item description **param** **s**: seconds, double type
    > C++ defination code: > ```cpp > void sleep(double s) > ``` ### sleep\\_ms Sleep milliseconds item description **param** **ms**: milliseconds, uint64_t type
    > C++ defination code: > ```cpp > void sleep_ms(uint64_t ms) > ``` ### sleep\\_us Sleep microseconds item description **param** **us**: microseconds, uint64_t type
    > C++ defination code: > ```cpp > void sleep_us(uint64_t us) > ``` ### fps ```python def fps() > float ``` Calculate FPS since last call this method.\\nAttention, this method is not multi thread safe, only call this method in one threads.\\nIf you want to use in multi threads, please use time.FPS class.\\nFPS is average value of recent n(buff_len) times, and you can call fps_set_buff_len(10) to change buffer length, default is 20.\\nMultiple invoke this function will calculate fps between two invoke, and you can also call fps_start() fisrt to manually assign fps calulate start point. item description **return** float type, current fps since last call this method > C++ defination code: > ```cpp > float fps() > ``` ### fps\\_start ```python def fps_start() > None ``` Manually set fps calculation start point, then you can call fps() function to calculate fps between fps_start() and fps(). > C++ defination code: > ```cpp > void fps_start() > ``` ### fps\\_set\\_buff\\_len ```python def fps_set_buff_len(len: int) > None ``` Set fps method buffer length, by default the buffer length is 10. item description **param** **len**: Buffer length to store recent fps value.
    > C++ defination code: > ```cpp > void fps_set_buff_len(int len) > ``` ### now ```python def now() > DateTime ``` Get current UTC date and time item description **return** current date and time, DateTime type > C++ defination code: > ```cpp > time::DateTime *now() > ``` ### localtime ```python def localtime() > DateTime ``` Get local time item description **return** local time, DateTime type > C++ defination code: > ```cpp > time::DateTime *localtime() > ``` ### strptime ```python def strptime(str: str, format: str) > DateTime ``` DateTime from string item description **param** **str**: date time string
    **format**: date time format
    **return** DateTime > C++ defination code: > ```cpp > time::DateTime *strptime(const std::string &str, const std::string &format) > ``` ### gmtime ```python def gmtime(timestamp: float) > DateTime ``` timestamp to DateTime(time zone is UTC (value 0)) item description **param** **timestamp**: double timestamp
    **return** DateTime > C++ defination code: > ```cpp > time::DateTime *gmtime(double timestamp) > ``` ### timezone ```python def timezone(timezone: str '') > str ``` Set or get timezone item description **param** **timezone**: string type, can be empty and default to empty, if empty, only return crrent timezone, a \"region/city\" string, e.g. Asia/Shanghai, Etc/UTC, you can get all by list_timezones function.
    **return** string type, return current timezone setting. **attention** when set new timezone, time setting not take effect in this process for some API, so you need to restart program. > C++ defination code: > ```cpp > std::string timezone(const std::string &timezone \"\") > ``` ### timezone (overload 1) Set or get timezone item description **param** **region**: string type, which region to set, can be empty means only get current, default empty.
    **city**: string type, which city to set, can be empty means only get current, default empty.
    **return** list type, return current timezone setting, first is region, second is city. **attention** when set new timezone, time setting not take effect in this process for some API, so you need to restart program. > C++ defination code: > ```cpp > std::vector timezone2(const std::string ®ion \"\", const std::string &city \"\") > ``` ### list\\_timezones ```python def list_timezones() > dict[str, list[str]] ``` List all timezone info item description **return** A dict with key are regions, and value are region's cities. > C++ defination code: > ```cpp > std::map> list_timezones() > ``` ### ntp\\_timetuple ```python def ntp_timetuple(host: str, port: int 1, retry: int 3, timeout_ms: int 0) > list[int] ``` Retrieves time from an NTP server\\nThis function fetches the current time from the specified NTP server and port,\\nreturning a tuple containing the time details. item description **param** **host**: The hostname or IP address of the NTP server.
    **port**: The port number of the NTP server. Use 1 for the default port 123.
    **retry**: The number of retry attempts. Must be at least 1.
    **timeout_ms**: The timeout duration in milliseconds. Must be non negative.
    **return** A list of 6 elements: [year, month, day, hour, minute, second] > C++ defination code: > ```cpp > std::vector ntp_timetuple(std::string host, int port 1, uint8_t retry 3, int timeout_ms 0) > ``` ### ntp\\_timetuple\\_with\\_config ```python def ntp_timetuple_with_config(path: str) > list[int] ``` Retrieves time from an NTP server using a configuration file\\nThis function reads the configuration from a YAML file to fetch the current time\\nfrom a list of specified NTP servers, returning a tuple containing the time details. item description **param** **path**: The path to the YAML configuration file, which should include:
    Config:
    retry: Number of retry attempts (must be at least 1)
    total_timeout_ms: Total timeout duration in milliseconds (must be non negative)
    NtpServers:
    host: Hostname or IP address of the NTP server
    port: Port number of the NTP server (use 123 for default)
    Example YAML configuration:
    Config:
    retry: 3
    total_timeout_ms: 10000
    NtpServers:
    host: \"pool.ntp.org\"
    port: 123
    host: \"time.nist.gov\"
    port: 123
    host: \"time.windows.com\"
    port: 123
    **return** A list of 6 elements: [year, month, day, hour, minute, second] > C++ defination code: > ```cpp > std::vector ntp_timetuple_with_config(std::string path) > ``` ### ntp\\_sync\\_sys\\_time ```python def ntp_sync_sys_time(host: str, port: int 1, retry: int 3, timeout_ms: int 0) > list[int] ``` Retrieves time from an NTP server and synchronizes the system time\\nThis function fetches the current time from the specified NTP server and port,\\nthen synchronizes the system time with the retrieved time. item description **param** **host**: The hostname or IP address of the NTP server.
    **port**: The port number of the NTP server. Use 123 for the default port.
    **retry**: The number of retry attempts. Must be at least 1.
    **timeout_ms**: The timeout duration in milliseconds. Must be non negative.
    **return** A list of 6 elements: [year, month, day, hour, minute, second] > C++ defination code: > ```cpp > std::vector ntp_sync_sys_time(std::string host, int port 1, uint8_t retry 3, int timeout_ms 0) > ``` ### ntp\\_sync\\_sys\\_time\\_with\\_config ```python def ntp_sync_sys_time_with_config(path: str) > list[int] ``` Retrieves time from an NTP server using a configuration file and synchronizes the system time\\nThis function reads the configuration from a YAML file to fetch the current time\\nfrom a list of specified NTP servers, then synchronizes the system time with the retrieved time. item description **param** **path**: The path to the YAML configuration file, which should include:
    Config:
    retry: Number of retry attempts (must be at least 1)
    total_timeout_ms: Total timeout duration in milliseconds (must be non negative)
    NtpServers:
    host: Hostname or IP address of the NTP server
    port: Port number of the NTP server (use 123 for default)
    Example YAML configuration:
    Config:
    retry: 3
    total_timeout_ms: 10000
    NtpServers:
    host: \"pool.ntp.org\"
    port: 123
    host: \"time.nist.gov\"
    port: 123
    host: \"time.windows.com\"
    port: 123
    **return** A vector of integers containing the time details: [year, month, day, hour, minute, second] > C++ defination code: > ```cpp > std::vector ntp_sync_sys_time_with_config(std::string path) > ``` ## Class ### FPS FPS class to use average filter to calculate FPS. > C++ defination code: > ```cpp > class FPS > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, buff_len: int 20) > None ``` FPS class constructor item description **type** func **param** **buff_len**: Average buffer length, default 20, that is, fps() function will return the average fps in recent buff_len times fps.
    **static** False > C++ defination code: > ```cpp > FPS(int buff_len 20) > ``` #### start ```python def start(self) > None ``` Manually set fps calculation start point, then you can call fps() function to calculate fps between start() and fps(). item description **type** func **static** False > C++ defination code: > ```cpp > void start() > ``` #### fps ```python def fps(self) > float ``` The same as end function. item description **type** func **return** float type, current fps since last call this method **static** False > C++ defination code: > ```cpp > float fps() > ``` #### fps (overload 1) Calculate FPS since last call this method.\\nFPS is average value of recent n(buff_len) times, and you can call fps_set_buff_len(10) to change buffer length, default is 20.\\nMultiple invoke this function will calculate fps between two invoke, and you can also call fps_start() fisrt to manually assign fps calulate start point. item description **type** func **return** float type, current fps since last call this method **static** False > C++ defination code: > ```cpp > inline float end() > ``` #### set\\_buff\\_len ```python def set_buff_len(self, len: int) > None ``` Set fps method buffer length, by default the buffer length is 10. item description **type** func **param** **len**: Buffer length to store recent fps value.
    **static** False > C++ defination code: > ```cpp > void set_buff_len(int len) > ``` ### DateTime Date and time class > C++ defination code: > ```cpp > class DateTime > ``` #### year Year item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int year > ``` #### month Month, 1~12 item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int month > ``` #### day Day item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int day > ``` #### hour Hour item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int hour > ``` #### minute Minute item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int minute > ``` #### second Second item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int second > ``` #### microsecond Microsecond item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int microsecond > ``` #### yearday Year day item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int yearday > ``` #### weekday Weekday, 0 is Monday, 6 is Sunday item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int weekday > ``` #### zone Time zone item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > float zone > ``` #### zone\\_name Time zone name item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::string zone_name > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, year: int 0, month: int 0, day: int 0, hour: int 0, minute: int 0, second: int 0, microsecond: int 0, yearday: int 0, weekday: int 0, zone: int 0) > None ``` Constructor item description **type** func **param** **year**: year
    **month**: month
    **day**: day
    **hour**: hour
    **minute**: minute
    **second**: second
    **microsecond**: microsecond
    **yearday**: year day
    **weekday**: weekday
    **zone**: time zone
    **static** False > C++ defination code: > ```cpp > DateTime(int year 0, int month 0, int day 0, int hour 0, int minute 0, int second 0, int microsecond 0, int yearday 0, int weekday 0, int zone 0) > ``` #### strftime ```python def strftime(self, format: str) > str ``` Convert to string item description **type** func **return** date time string **static** False > C++ defination code: > ```cpp > std::string strftime(const std::string &format) > ``` #### timestamp ```python def timestamp(self) > float ``` Convert to float timestamp item description **type** func **return** float timestamp **static** False > C++ defination code: > ```cpp > double timestamp() > ```"},"/maixpy/api/maix/sys.html":{"title":"maix.sys","content":" title: maix.sys maix.sys module > You can use `maix.sys` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ## Variable ## Function ### os\\_version ```python def os_version() > str ``` Get system version item description **return** version string, e.g. \"maixcam 2024 08 13 maixpy v4.4.20\" > C++ defination code: > ```cpp > std::string os_version() > ``` ### maixpy\\_version ```python def maixpy_version() > str ``` Get MaixPy version, if get failed will return empty string. item description **return** version string, e.g. \"4.4.21\" > C++ defination code: > ```cpp > std::string maixpy_version() > ``` ### device\\_name ```python def device_name() > str ``` Get device name item description **return** device name, e.g. \"MaixCAM\" > C++ defination code: > ```cpp > std::string device_name() > ``` ### host\\_name ```python def host_name() > str ``` Get host name item description **return** host name, e.g. \"maixcam 2f9f\" > C++ defination code: > ```cpp > std::string host_name() > ``` ### host\\_domain ```python def host_domain() > str ``` Get host domain item description **return** host domain, e.g. \"maixcam 2f9f.local\" > C++ defination code: > ```cpp > std::string host_domain() > ``` ### ip\\_address ```python def ip_address() > dict[str, str] ``` Get ip address item description **return** ip address, dict type, e.g. {\"eth0\": \"192.168.0.195\", \"wlan0\": \"192.168.0.123\", \"usb0\": \"10.47.159.1\"} > C++ defination code: > ```cpp > std::map ip_address() > ``` ### mac\\_address ```python def mac_address() > dict[str, str] ``` Get mac address item description **return** mac address, dict type, e.g. {\"eth0\": \"00:0c:29:2f:9f:00\", \"wlan0\": \"00:0c:29:2f:9f:01\", \"usb0\": \"00:0c:29:2f:9f:02\"} > C++ defination code: > ```cpp > std::map mac_address() > ``` ### device\\_key ```python def device_key() > str ``` Get device key, can be unique id of device item description **return** device key, 32 bytes hex string, e.g. \"1234567890abcdef1234567890abcdef\" > C++ defination code: > ```cpp > std::string device_key() > ``` ### memory\\_info ```python def memory_info() > dict[str, int] ``` Get memory info item description **return** memory info, dict type, e.g. {\"total\": 1024, \"used\": 512, \"hw_total\": 256*1024*1024}
    total: total memory size in Byte.
    used: used memory size in Byte.
    hw_total: total memory size in Byte of hardware, the total < hw_total,
    OS kernel may reserve some memory for some hardware like camera, npu, display etc. > C++ defination code: > ```cpp > std::map memory_info() > ``` ### bytes\\_to\\_human ```python def bytes_to_human(bytes: int, precision: int 2, base: int 1024, unit: str 'B', sep: str ' ') > str ``` Bytes to human readable string item description **param** **bytes:**: bytes size,e.g. 1234B 1234/1024 1.205 KB
    **precision:**: decimal precision, default 2
    **base:**: base number, default 1024
    **unit:**: unit string, e.g. \"B\"
    **sep:**: separator string, e.g. \" \"
    **return** human readable string, e.g. \"1.21 KB\" > C++ defination code: > ```cpp > std::string bytes_to_human(unsigned long long bytes, int precision 2, int base 1024, const std::string &unit \"B\", const std::string &sep \" \") > ``` ### cpu\\_freq ```python def cpu_freq() > dict[str, int] ``` Get CPU frequency item description **return** CPU frequency, dict type, e.g. {\"cpu0\": 1000000000, \"cpu1\": 1000000000} > C++ defination code: > ```cpp > std::map cpu_freq() > ``` ### cpu\\_temp ```python def cpu_temp() > dict[str, float] ``` Get CPU temperature item description **return** CPU temperature, unit dgree, dict type, e.g. {\"cpu\": 50.0, \"cpu0\": 50, \"cpu1\": 50} > C++ defination code: > ```cpp > std::map cpu_temp() > ``` ### cpu\\_usage ```python def cpu_usage() > dict[str, float] ``` Get CPU usage item description **return** CPU usage, dict type, e.g. {\"cpu\": 50.0, \"cpu0\": 50, \"cpu1\": 50} > C++ defination code: > ```cpp > std::map cpu_usage() > ``` ### npu\\_freq ```python def npu_freq() > dict[str, int] ``` Get NPU frequency item description **return** NPU frequency, dict type, e.g. {\"npu0\": 500000000} > C++ defination code: > ```cpp > std::map npu_freq() > ``` ### disk\\_usage ```python def disk_usage(path: str '/') > dict[str, int] ``` Get disk usage item description **param** **path:**: disk path, default \"/\"
    **return** disk usage, dict type, e.g. {\"total\": 1024, \"used\": 512} > C++ defination code: > ```cpp > std::map disk_usage(const std::string &path \"/\") > ``` ### disk\\_partitions ```python def disk_partitions(only_disk: bool True) > list[dict[str, str]] ``` Get disk partition and mount point info item description **param** **only_disk**: only return real disk, tempfs sysfs etc. not return, default true.
    **return** disk partition and mount point info, list type, e.g. [{\"device\": \"/dev/mmcblk0p1\", \"mountpoint\": \"/mnt/sdcard\", \"fstype\": \"vfat\"}] > C++ defination code: > ```cpp > std::vector> disk_partitions(bool only_disk true) > ``` ### register\\_default\\_signal\\_handle register default signal handle > C++ defination code: > ```cpp > void register_default_signal_handle() > ``` ### poweroff ```python def poweroff() > None ``` Power off device > C++ defination code: > ```cpp > void poweroff() > ``` ### reboot ```python def reboot() > None ``` Power off device and power on > C++ defination code: > ```cpp > void reboot() > ``` ## Class"},"/maixpy/api/maix/network/wifi.html":{"title":"maix.network.wifi","content":" title: maix.network.wifi maix.network.wifi module > You can use `maix.network.wifi` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ## Variable ## Function ### list\\_devices ```python def list_devices() > list[str] ``` List WiFi interfaces item description **return** WiFi interface list, string type > C++ defination code: > ```cpp > std::vector list_devices() > ``` ## Class ### AP\\_Info WiFi AP info > C++ defination code: > ```cpp > class AP_Info > ``` #### ssid WiFi AP info SSID item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector ssid > ``` #### bssid WiFi AP info BSSID item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::string bssid > ``` #### security WiFi AP info security item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::string security > ``` #### channel WiFi AP info channel item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int channel > ``` #### frequency WiFi AP info frequency item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int frequency > ``` #### rssi WiFi AP info rssi item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int rssi > ``` #### ssid\\_str ```python def ssid_str(self) > str ``` WiFi AP info ssid_str item description **type** func **static** False > C++ defination code: > ```cpp > std::string ssid_str() > ``` ### Wifi Wifi class > C++ defination code: > ```cpp > class Wifi > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, iface: str 'wlan0') > None ``` Wifi class item description **type** func **param** **iface**: wifi interface name, default is wlan0
    **static** False > C++ defination code: > ```cpp > Wifi(std::string iface \"wlan0\") > ``` #### get\\_ip ```python def get_ip(self) > str ``` Get current WiFi ip item description **type** func **return** ip, string type, if network not connected, will return empty string. **static** False > C++ defination code: > ```cpp > std::string get_ip() > ``` #### get\\_mac ```python def get_mac(self) > str ``` Get current WiFi MAC address item description **type** func **return** ip, string type. **static** False > C++ defination code: > ```cpp > std::string get_mac() > ``` #### get\\_ssid ```python def get_ssid(self, from_cache: bool True) > str ``` Get current WiFi SSID item description **type** func **param** **from_cache**: if true, will not read config from file, direct use ssid in cache.
    attention, first time call this method will auto matically read config from file, and if call connect method will set cache.
    **return** SSID, string type. **static** False > C++ defination code: > ```cpp > std::string get_ssid(bool from_cache true) > ``` #### get\\_gateway ```python def get_gateway(self) > str ``` Get current WiFi ip item description **type** func **return** ip, string type, if network not connected, will return empty string. **static** False > C++ defination code: > ```cpp > std::string get_gateway() > ``` #### start\\_scan ```python def start_scan(self) > maix.err.Err ``` WiFi start scan AP info around in background. item description **type** func **return** If success, return err.Err.ERR_NONE, else means failed. **static** False > C++ defination code: > ```cpp > err::Err start_scan() > ``` #### get\\_scan\\_result ```python def get_scan_result(self) > list[AP_Info] ``` Get WiFi scan AP info. item description **type** func **return** wifi.AP_Info list. **static** False > C++ defination code: > ```cpp > std::vector get_scan_result() > ``` #### stop\\_scan ```python def stop_scan(self) > None ``` Stop WiFi scan AP info. item description **type** func **static** False > C++ defination code: > ```cpp > void stop_scan() > ``` #### connect ```python def connect(self, ssid: str, password: str, wait: bool True, timeout: int 60) > maix.err.Err ``` Connect to WiFi AP. item description **type** func **param** **ssid**: SSID of AP
    **password**: password of AP, if no password, leave it empty.
    **wait**: wait for got IP or failed or timeout.
    **timeout**: connect timeout internal, unit second.
    **return** If success, return err.Err.ERR_NONE, else means failed. **static** False > C++ defination code: > ```cpp > err::Err connect(const std::string &ssid, const std::string &password, bool wait true, int timeout 60) > ``` #### disconnect ```python def disconnect(self) > maix.err.Err ``` Disconnect from WiFi AP. item description **type** func **return** If success, return err.Err.ERR_NONE, else means failed. **static** False > C++ defination code: > ```cpp > err::Err disconnect() > ``` #### is\\_connected ```python def is_connected(self) > bool ``` See if WiFi is connected to AP. item description **type** func **return** If connected return true, else false. **static** False > C++ defination code: > ```cpp > bool is_connected() > ``` #### start\\_ap ```python def start_ap(self, ssid: str, password: str, mode: str 'g', channel: int 0, ip: str '192.168.66.1', netmask: str '255.255.255.0', hidden: bool False) > maix.err.Err ``` Start WiFi AP. item description **type** func **param** **ssid**: SSID of AP.
    **password**: password of AP, if no password, leave it empty.
    **ip**: ip address of hostap, default empty string means auto generated one according to hardware.
    **netmask**: netmask, default 255.255.255.0, now only support 255.255.255.0 .
    **mode**: WiFi mode, default g(IEEE 802.11g (2.4 GHz)), a IEEE 802.11a (5 GHz), b IEEE 802.11b (2.4 GHz).
    **channel**: WiFi channel number, 0 means auto select. MaixCAM not support auto, will default channel 1.
    **hidden**: hidden SSID or not.
    **return** If success, return err.Err.ERR_NONE, else means failed. **static** False > C++ defination code: > ```cpp > err::Err start_ap(const std::string &ssid, const std::string &password, > std::string mode \"g\", int channel 0, > const std::string &ip \"192.168.66.1\", const std::string &netmask \"255.255.255.0\", > bool hidden false) > ``` #### stop\\_ap ```python def stop_ap(self) > maix.err.Err ``` Stop WiFi AP. item description **type** func **return** If success, return err.Err.ERR_NONE, else means failed. **static** False > C++ defination code: > ```cpp > err::Err stop_ap() > ``` #### is\\_ap\\_mode ```python def is_ap_mode(self) > bool ``` Whether WiFi is AP mode item description **type** func **return** True if AP mode now, or False. **static** False > C++ defination code: > ```cpp > bool is_ap_mode() > ```"},"/maixpy/api/maix/rtmp.html":{"title":"maix.rtmp","content":" title: maix.rtmp maix.rtmp module > You can use `maix.rtmp` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ### TagType Video type item describe **values** **TAG_NONE**:
    **TAG_VIDEO**:
    **TAG_AUDIO**:
    **TAG_SCRIPT**:
    > C++ defination code: > ```cpp > enum TagType > { > TAG_NONE, > TAG_VIDEO, > TAG_AUDIO, > TAG_SCRIPT, > } > ``` ## Variable ## Function ## Class ### Rtmp Rtmp class > C++ defination code: > ```cpp > class Rtmp > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, host: str 'localhost', port: int 1935, app: str '', stream: str '', bitrate: int 1000000) > None ``` Construct a new Video object item description **type** func **note** Rtmp url : rtmp://host:prot/app/stream
    example:
    r Rtmp(\"localhost\", 1935, \"live\", \"stream\")
    means rtmp url is rtmp://localhost:1935/live/stream **param** **host**: rtmp ip
    **port**: rtmp port, default is 1935.
    **app**: rtmp app name
    **stream**: rtmp stream name
    **bitrate**: rtmp bitrate, default is 1000 * 1000
    **static** False > C++ defination code: > ```cpp > Rtmp(std::string host \"localhost\", int port 1935, std::string app std::string(), std::string stream std::string(), int bitrate 1000 * 1000) > ``` #### push\\_video ```python def push_video(self) > int ``` Get bitrate item description **type** func **return** bitrate **static** False > C++ defination code: > ```cpp > int bitrate() > ``` #### bind\\_camera ```python def bind_camera(self, cam: maix.camera.Camera) > maix.err.Err ``` Bind camera item description **type** func **note** If the cam object is bound, the cam object cannot be used elsewhere. **param** **cam**: camera object
    **return** error code, err::ERR_NONE means success, others means failed **static** False > C++ defination code: > ```cpp > err::Err bind_camera(camera::Camera *cam) > ``` #### get\\_camera ```python def get_camera(self) > maix.camera.Camera ``` If you bind a camera, return the camera object. item description **type** func **return** Camera object **static** False > C++ defination code: > ```cpp > camera::Camera *get_camera() > ``` #### start ```python def start(self, path: str '') > maix.err.Err ``` Start push stream item description **type** func **note** only support flv file now **param** **path**: File path, if you passed file path, cyclic push the file, else if you bound camera, push the camera image.
    **return** error code, err::ERR_NONE means success, others means failed **static** False > C++ defination code: > ```cpp > err::Err start(std::string path std::string()) > ``` #### stop ```python def stop(self) > maix.err.Err ``` Stop push stream item description **type** func **return** error code, err::ERR_NONE means success, others means failed **static** False > C++ defination code: > ```cpp > err::Err stop() > ``` #### get\\_path ```python def get_path(self) > str ``` Get the file path of the push stream item description **type** func **return** file path **static** False > C++ defination code: > ```cpp > std::string get_path() > ``` #### get\\_path (overload 1) Check whether push streaming has started item description **type** func **return** If rtmp thread is running, returns true **static** False > C++ defination code: > ```cpp > bool is_started() > ```"},"/maixpy/api/maix/video.html":{"title":"maix.video","content":" title: maix.video maix.video module > You can use `maix.video` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ### VideoType Video type item describe **values** **VIDEO_NONE**: format invalid
    **VIDEO_ENC_H265_CBR**: Deprecated
    **VIDEO_ENC_MP4_CBR**: Deprecated
    **VIDEO_DEC_H265_CBR**: Deprecated
    **VIDEO_DEC_MP4_CBR**: Deprecated
    **VIDEO_H264_CBR**: Deprecated
    **VIDEO_H265_CBR**: Deprecated
    **VIDEO_H264_CBR_MP4**: Deprecated
    **VIDEO_H265_CBR_MP4**: Deprecated
    **VIDEO_H264**:
    **VIDEO_H264_MP4**:
    **VIDEO_H264_FLV**:
    **VIDEO_H265**:
    **VIDEO_H265_MP4**:
    > C++ defination code: > ```cpp > enum VideoType > { > VIDEO_NONE 0, // format invalid > VIDEO_ENC_H265_CBR, // Deprecated > VIDEO_ENC_MP4_CBR, // Deprecated > VIDEO_DEC_H265_CBR, // Deprecated > VIDEO_DEC_MP4_CBR, // Deprecated > VIDEO_H264_CBR, // Deprecated > VIDEO_H265_CBR, // Deprecated > VIDEO_H264_CBR_MP4, // Deprecated > VIDEO_H265_CBR_MP4, // Deprecated > > VIDEO_H264, > VIDEO_H264_MP4, > VIDEO_H264_FLV, > VIDEO_H265, > VIDEO_H265_MP4, > } > ``` ### MediaType Video type item describe **values** **MEDIA_TYPE_UNKNOWN**: Represents an unknown media type, which is usually treated as AVMEDIA_TYPE_DATA.
    **MEDIA_TYPE_VIDEO**: Represents a video stream, such as video content encoded in H.264, MPEG 4, etc.
    **MEDIA_TYPE_AUDIO**: Represents an audio stream, such as audio content encoded in AAC, MP3, etc.
    **MEDIA_TYPE_DATA**: Represents opaque data streams that are usually continuous. This type of stream is not necessarily audio or video and may be used for other data purposes.
    **MEDIA_TYPE_SUBTITLE**: Represents a subtitle stream used for displaying text or subtitle information, such as SRT, ASS, etc.
    **MEDIA_TYPE_ATTACHMENT**: Represents attachment streams that are usually sparse. Attachment streams can include images, fonts, or other files that need to be bundled with the media.
    **MEDIA_TYPE_NB**: Represents the number of media types (count) and indicates the total number of media types defined in this enumeration. It is not a media type itself but is used for counting enumeration items.
    > C++ defination code: > ```cpp > enum MediaType > { > MEDIA_TYPE_UNKNOWN 1, // Represents an unknown media type, which is usually treated as AVMEDIA_TYPE_DATA. > MEDIA_TYPE_VIDEO, // Represents a video stream, such as video content encoded in H.264, MPEG 4, etc. > MEDIA_TYPE_AUDIO, // Represents an audio stream, such as audio content encoded in AAC, MP3, etc. > MEDIA_TYPE_DATA, // Represents opaque data streams that are usually continuous. This type of stream is not necessarily audio or video and may be used for other data purposes. > MEDIA_TYPE_SUBTITLE, // Represents a subtitle stream used for displaying text or subtitle information, such as SRT, ASS, etc. > MEDIA_TYPE_ATTACHMENT, // Represents attachment streams that are usually sparse. Attachment streams can include images, fonts, or other files that need to be bundled with the media. > MEDIA_TYPE_NB // Represents the number of media types (count) and indicates the total number of media types defined in this enumeration. It is not a media type itself but is used for counting enumeration items. > } > ``` ## Variable ## Function ### timebase\\_to\\_us ```python def timebase_to_us(timebase: list[int], value: int) > float ``` Convert a value in timebase units to microseconds. value * 1000000 / (timebase[1] / timebase[0]) item description **param** **timebse**: Time base, used as the unit for calculating playback time. It must be an array containing two parameters,
    in the format [num, den], where the first parameter is the numerator of the time base, and the second parameter is the denominator of the time base.
    **value**: Input value
    **return** Return the result in microseconds. > C++ defination code: > ```cpp > double timebase_to_us(std::vector timebase, uint64_t value) > ``` ### timebase\\_to\\_ms ```python def timebase_to_ms(timebase: list[int], value: int) > float ``` Convert a value in timebase units to milliseconds. item description **param** **timebse**: Time base, used as the unit for calculating playback time. It must be an array containing two parameters,
    in the format [num, den], where the first parameter is the numerator of the time base, and the second parameter is the denominator of the time base.
    **value**: Input value
    **return** Return the result in milliseconds. > C++ defination code: > ```cpp > double timebase_to_ms(std::vector timebase, uint64_t value) > ``` ## Class ### Context Context class > C++ defination code: > ```cpp > class Context > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, media_type: MediaType, timebase: list[int]) > None ``` Construct a new Context object item description **type** func **param** **media_type**: enable capture, if true, you can use capture() function to get an image object
    **timebase**: Time base, used as the unit for calculating playback time. It must be an array containing two parameters,
    in the format [num, den], where the first parameter is the numerator of the time base, and the second parameter is the denominator of the time base.
    **static** False > C++ defination code: > ```cpp > Context(video::MediaType media_type, std::vector timebase) > ``` #### audio\\_sample\\_rate ```python def audio_sample_rate(self) > int ``` Get sample rate of audio (only valid in the context of audio) item description **type** func **return** sample rate **static** False > C++ defination code: > ```cpp > int audio_sample_rate() > ``` #### audio\\_sample\\_rate (overload 1) Get sample rate of audio (only valid in the context of audio) item description **type** func **return** sample rate **static** False > C++ defination code: > ```cpp > int audio_sample_rate() > ``` #### audio\\_channels ```python def audio_channels(self) > int ``` Get channels of audio (only valid in the context of audio) item description **type** func **return** channels **static** False > C++ defination code: > ```cpp > int audio_channels() > ``` #### audio\\_channels (overload 1) Get channels of audio (only valid in the context of audio) item description **type** func **return** channels **static** False > C++ defination code: > ```cpp > int audio_channels() > ``` #### audio\\_format ```python def audio_format(self) > maix.audio.Format ``` Get format of audio (only valid in the context of audio) item description **type** func **return** audio format. @see audio::Format **static** False > C++ defination code: > ```cpp > audio::Format audio_format() > ``` #### audio\\_format (overload 1) Get format of audio (only valid in the context of audio) item description **type** func **return** audio format. @see audio::Format **static** False > C++ defination code: > ```cpp > audio::Format audio_format() > ``` #### set\\_pcm ```python def set_pcm(self, data: maix.Bytes(bytes), duration: int 0, pts: int 0, copy: bool True) > maix.err.Err ``` Set pcm data (only valid in the context of audio) item description **type** func **param** **duration**: Duration of the current pcm. unit: timebase
    **pts**: The start time of this pcm playback. If it is 0, it means this parameter is not supported. unit: timebase
    **return** err::Err **static** False > C++ defination code: > ```cpp > err::Err set_pcm(maix::Bytes *data, int duration 0, uint64_t pts 0, bool copy true) > ``` #### get\\_pcm ```python def get_pcm(*args, **kwargs) ``` Get pcm data (only valid in the context of audio) item description **type** func **attention** Note that if you call this interface, you are responsible for releasing the memory of the data, and this interface cannot be called again. **return** Bytes **static** False > C++ defination code: > ```cpp > Bytes *get_pcm() > ``` #### image ```python def image(self) > maix.image.Image ``` Retrieve the image data to be played. item description **type** func **attention** Note that if you call this interface, you are responsible for releasing the memory of the image, and this interface cannot be called again. **static** False > C++ defination code: > ```cpp > image::Image *image() > ``` #### media\\_type ```python def media_type(self) > MediaType ``` Get the media type to determine whether it is video, audio, or another media type. item description **type** func **static** False > C++ defination code: > ```cpp > video::MediaType media_type() > ``` #### pts ```python def pts(self) > int ``` Get the start time of the current playback., in units of time base. item description **type** func **static** False > C++ defination code: > ```cpp > uint64_t pts() > ``` #### last\\_pts ```python def last_pts(self) > int ``` Get the start time of the previous playback, in units of time base. item description **type** func **static** False > C++ defination code: > ```cpp > uint64_t last_pts() > ``` #### timebase ```python def timebase(self) > list[int] ``` Get the time base. item description **type** func **static** False > C++ defination code: > ```cpp > std::vector timebase() > ``` #### duration ```python def duration(self) > int ``` Duration of the current frame. unit: timebase item description **type** func **static** False > C++ defination code: > ```cpp > int duration() > ``` #### duration\\_us ```python def duration_us(self) > int ``` Duration of the current frame. unit: us item description **type** func **static** False > C++ defination code: > ```cpp > uint64_t duration_us() > ``` ### Frame Frame class > C++ defination code: > ```cpp > class Frame > ``` #### to\\_bytes ```python def to_bytes(*args, **kwargs) ``` Get raw data of packet item description **type** func **param** **copy**: if true, will alloc memory and copy data to new buffer
    **return** raw data **static** False > C++ defination code: > ```cpp > Bytes *to_bytes(bool copy false) > ``` #### size ```python def size(self) > int ``` Get raw data size of packet item description **type** func **return** size of raw data **static** False > C++ defination code: > ```cpp > size_t size() > ``` #### is\\_valid ```python def is_valid(self) > bool ``` Check packet is valid item description **type** func **return** true, packet is valid; false, packet is invalid **static** False > C++ defination code: > ```cpp > bool is_valid() > ``` #### set\\_pts ```python def set_pts(self, pts: int) > None ``` Set pts item description **type** func **param** **pts**: presentation time stamp. unit: time_base
    **static** False > C++ defination code: > ```cpp > void set_pts(uint64_t pts) > ``` #### set\\_dts ```python def set_dts(self, dts: int) > None ``` Set dts item description **type** func **param** **dts**: decoding time stamp. unit: time_base
    **static** False > C++ defination code: > ```cpp > void set_dts(uint64_t dts) > ``` #### set\\_duration ```python def set_duration(self, duration: int) > None ``` Set duration item description **type** func **param** **duration**: packet display time. unit: time_base
    **static** False > C++ defination code: > ```cpp > void set_duration(uint64_t duration) > ``` #### get\\_pts ```python def get_pts(self) > int ``` Set pts item description **type** func **param** **pts**: presentation time stamp. unit: time_base
    **return** pts value **static** False > C++ defination code: > ```cpp > uint64_t get_pts() > ``` #### get\\_dts ```python def get_dts(self) > int ``` Set dts item description **type** func **param** **dts**: decoding time stamp. unit: time_base
    **return** dts value **static** False > C++ defination code: > ```cpp > uint64_t get_dts() > ``` #### get\\_duration ```python def get_duration(self) > int ``` Get duration item description **type** func **return** duration value **static** False > C++ defination code: > ```cpp > uint64_t get_duration() > ``` #### type ```python def type(self) > VideoType ``` Get frame type item description **type** func **return** video type. @see video::VideoType **static** False > C++ defination code: > ```cpp > video::VideoType type() > ``` ### Packet Packet class > C++ defination code: > ```cpp > class Packet > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, data: int, len: int, pts: int 1, dts: int 1, duration: int 0) > None ``` Packet number (pair of numerator and denominator). item description **type** func **param** **data**: src data pointer, use pointers directly without copying.
    Note: this object will try to free this memory
    **len**: data len
    **pts**: presentation time stamp. unit: time_base
    **dts**: decoding time stamp. unit: time_base
    **duration**: packet display time. unit: time_base
    **static** False > C++ defination code: > ```cpp > Packet(uint8_t *data, int len, uint64_t pts 1, uint64_t dts 1, int64_t duration 0) > ``` #### get ```python def get(self) > list[int] ``` Get raw data of packet item description **type** func **return** raw data **static** False > C++ defination code: > ```cpp > std::vector get() > ``` #### data ```python def data(self) > int ``` Get raw data of packet item description **type** func **return** raw data **static** False > C++ defination code: > ```cpp > uint8_t *data() > ``` #### data\\_size ```python def data_size(self) > int ``` Get raw data size of packet item description **type** func **return** size of raw data **static** False > C++ defination code: > ```cpp > size_t data_size() > ``` #### is\\_valid ```python def is_valid(self) > bool ``` Check packet is valid item description **type** func **return** true, packet is valid; false, packet is invalid **static** False > C++ defination code: > ```cpp > bool is_valid() > ``` #### set\\_pts ```python def set_pts(self, pts: int) > None ``` Set pts item description **type** func **param** **pts**: presentation time stamp. unit: time_base
    **return** true, packet is valid; false, packet is invalid **static** False > C++ defination code: > ```cpp > void set_pts(uint64_t pts) > ``` #### set\\_dts ```python def set_dts(self, dts: int) > None ``` Set dts item description **type** func **param** **dts**: decoding time stamp. unit: time_base
    **return** true, packet is valid; false, packet is invalid **static** False > C++ defination code: > ```cpp > void set_dts(uint64_t dts) > ``` #### set\\_duration ```python def set_duration(self, duration: int) > None ``` Set duration item description **type** func **param** **duration**: packet display time. unit: time_base
    **return** true, packet is valid; false, packet is invalid **static** False > C++ defination code: > ```cpp > void set_duration(uint64_t duration) > ``` ### Encoder Encode class > C++ defination code: > ```cpp > class Encoder > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, path: str '', width: int 2560, height: int 1440, format: maix.image.Format ..., type: VideoType ..., framerate: int 30, gop: int 50, bitrate: int 3000000, time_base: int 1000, capture: bool False, block: bool True) > None ``` Construct a new Video object item description **type** func **param** **width**: picture width. this value may be set automatically. default is 2560.
    **height**: picture height. this value may be set automatically. default is 1440.
    **format**: picture format. default is image::Format::FMT_YVU420SP. @see image::Format
    **type**: video encode/decode type. default is ENC_H265_CBR. @see EncodeType
    **framerate**: frame rate. framerate default is 30, means 30 frames per second
    for video. 1/time_base is not the average frame rate if the frame rate is not constant.
    **gop**: for h264/h265 encoding, the interval between two I frames, default is 50.
    **bitrate**: for h264/h265 encoding, used to limit the bandwidth used by compressed data, default is 3000kbps
    **time_base**: frame time base. time_base default is 1000, means 1/1000 ms (not used)
    **capture**: enable capture, if true, you can use capture() function to get an image object
    **block**: This parameter determines whether encoding should block until it is complete.
    If set to true, it will wait until encoding is finished before returning.
    If set to false, it will return the current encoding result on the next call.
    **static** False > C++ defination code: > ```cpp > Encoder(std::string path \"\", int width 2560, int height 1440, image::Format format image::Format::FMT_YVU420SP, video::VideoType type video::VideoType::VIDEO_H264, int framerate 30, int gop 50, int bitrate 3000 * 1000, int time_base 1000, bool capture false, bool block true) > ``` #### bind\\_camera ```python def bind_camera(self, camera: maix.camera.Camera) > maix.err.Err ``` Bind camera item description **type** func **param** **camera**: camera object
    **return** error code, err::ERR_NONE means success, others means failed **static** False > C++ defination code: > ```cpp > err::Err bind_camera(camera::Camera *camera) > ``` #### encode ```python def encode(self, img: maix.image.Image ..., pcm: maix.Bytes(bytes) b'') > Frame ``` Encode image. item description **type** func **param** **img**: the image will be encode.
    if the img is NULL, this function will try to get image from camera, you must use bind_camera() function to bind the camera.
    **pcm**: the pcm data will be encode.
    **return** encode result **static** False > C++ defination code: > ```cpp > video::Frame *encode(image::Image *img maix::video::Encoder::NoneImage, Bytes *pcm maix::video::Encoder::NoneBytes) > ``` #### capture ```python def capture(self) > maix.image.Image ``` Capture image item description **type** func **attention** Each time encode is called, the last captured image will be released. **return** error code **static** False > C++ defination code: > ```cpp > image::Image *capture() > ``` #### width ```python def width(self) > int ``` Get video width item description **type** func **return** video width **static** False > C++ defination code: > ```cpp > int width() > ``` #### height ```python def height(self) > int ``` Get video height item description **type** func **return** video height **static** False > C++ defination code: > ```cpp > int height() > ``` #### type ```python def type(self) > VideoType ``` Get video encode type item description **type** func **return** VideoType **static** False > C++ defination code: > ```cpp > video::VideoType type() > ``` #### framerate ```python def framerate(self) > int ``` Get video encode framerate item description **type** func **return** frame rate **static** False > C++ defination code: > ```cpp > int framerate() > ``` #### gop ```python def gop(self) > int ``` Get video encode gop item description **type** func **return** gop value **static** False > C++ defination code: > ```cpp > int gop() > ``` #### bitrate ```python def bitrate(self) > int ``` Get video encode bitrate item description **type** func **return** bitrate value **static** False > C++ defination code: > ```cpp > int bitrate() > ``` #### time\\_base ```python def time_base(self) > int ``` Get video encode time base item description **type** func **return** time base value **static** False > C++ defination code: > ```cpp > int time_base() > ``` ### Decoder Decoder class > C++ defination code: > ```cpp > class Decoder > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, path: str, format: maix.image.Format ...) > None ``` Construct a new decoder object item description **type** func **param** **path**: Path to the file to be decoded. Supports files with .264 and .mp4 extensions. Note that only mp4 files containing h.264 streams are supported.
    **format**: Decoded output format, currently only support YUV420SP
    **static** False > C++ defination code: > ```cpp > Decoder(std::string path, image::Format format image::Format::FMT_YVU420SP) > ``` #### decode\\_video ```python def decode_video(self, block: bool True) > Context ``` Decode the video stream, returning the image of the next frame each time. item description **type** func **param** **block**: Whether it blocks or not. If true, it will wait for the decoding to complete and return the current frame.
    If false, it will return the result of the previous frame's decoding. If the previous frame's decoding result is empty,
    it will return an unknown Context, and you can use the media_type method of the Context to determine if a valid result exists.
    default is true.
    **return** Decoded context information. **static** False > C++ defination code: > ```cpp > video::Context * decode_video(bool block true) > ``` #### decode\\_audio ```python def decode_audio(self) > Context ``` Decode the video stream, returning the image of the next frame each time. item description **type** func **return** Decoded context information. **static** False > C++ defination code: > ```cpp > video::Context * decode_audio() > ``` #### decode ```python def decode(self, block: bool True) > Context ``` Decode the video and audio stream item description **type** func **param** **block**: Whether it blocks or not. If true, it will wait for the decoding to complete and return the current frame.
    If false, it will return the result of the previous frame's decoding. If the previous frame's decoding result is empty,
    it will return an unknown Context, and you can use the media_type method of the Context to determine if a valid result exists.
    default is true.
    **return** Decoded context information. **static** False > C++ defination code: > ```cpp > video::Context * decode(bool block true) > ``` #### width ```python def width(self) > int ``` Get the video width item description **type** func **return** video width **static** False > C++ defination code: > ```cpp > int width() > ``` #### height ```python def height(self) > int ``` Get the video height item description **type** func **return** video height **static** False > C++ defination code: > ```cpp > int height() > ``` #### bitrate ```python def bitrate(self) > int ``` Get the video bitrate item description **type** func **return** bitrate value **static** False > C++ defination code: > ```cpp > int bitrate() > ``` #### fps ```python def fps(self) > int ``` Get the video fps item description **type** func **return** fps value **static** False > C++ defination code: > ```cpp > int fps() > ``` #### seek ```python def seek(self, time: float 1) > float ``` Seek to the required playback position item description **type** func **param** **time**: timestamp value, unit: s
    **return** return the current position, unit: s **static** False > C++ defination code: > ```cpp > double seek(double time 1) > ``` #### duration ```python def duration(self) > float ``` Get the maximum duration of the video. If it returns 0, it means it cannot be predicted. item description **type** func **return** duration value, unit: s **static** False > C++ defination code: > ```cpp > double duration() > ``` #### timebase ```python def timebase(self) > list[int] ``` Get the time base. item description **type** func **static** False > C++ defination code: > ```cpp > std::vector timebase() > ``` #### has\\_audio ```python def has_audio(self) > bool ``` If find audio data, return true item description **type** func **static** False > C++ defination code: > ```cpp > bool has_audio() > ``` #### has\\_video ```python def has_video(self) > bool ``` If find video data, return true item description **type** func **static** False > C++ defination code: > ```cpp > bool has_video() > ``` ### Video Video class > C++ defination code: > ```cpp > class Video > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, path: str '', width: int 2560, height: int 1440, format: maix.image.Format ..., time_base: int 30, framerate: int 30, capture: bool False, open: bool True) > None ``` Construct a new Video object item description **type** func **param** **path**: video path. the path determines the location where you load or save the file, if path is none, the video module will not save or load file.
    xxx.h265 means video format is H265, xxx.mp4 means video format is MP4
    **width**: picture width. this value may be set automatically. default is 2560.
    **height**: picture height. this value may be set automatically. default is 1440.
    **format**: picture pixel format. this value may be set automatically. default is FMT_YVU420SP.
    **time_base**: frame time base. time_base default is 30, means 1/30 ms
    **framerate**: frame rate. framerate default is 30, means 30 frames per second
    for video. 1/time_base is not the average frame rate if the frame rate is not constant.
    **capture**: enable capture, if true, you can use capture() function to get an image object
    **open**: If true, video will automatically call open() after creation. default is true.
    **static** False > C++ defination code: > ```cpp > Video(std::string path std::string(), int width 2560, int height 1440, image::Format format image::Format::FMT_YVU420SP, int time_base 30, int framerate 30, bool capture false, bool open true) > ``` #### open ```python def open(self, path: str '', fps: float 30.0) > maix.err.Err ``` Open video and run item description **type** func **param** **path**: video path. the path determines the location where you load or save the file, if path is none, the video module will not save or load file.
    xxx.h265 means video format is H265, xxx.mp4 means video format is MP4
    **fps**: video fps
    **return** error code, err::ERR_NONE means success, others means failed **static** False > C++ defination code: > ```cpp > err::Err open(std::string path std::string(), double fps 30.0) > ``` #### close ```python def close(self) > None ``` Close video item description **type** func **static** False > C++ defination code: > ```cpp > void close() > ``` #### bind\\_camera ```python def bind_camera(self, camera: maix.camera.Camera) > maix.err.Err ``` Bind camera item description **type** func **param** **camera**: camera object
    **return** error code, err::ERR_NONE means success, others means failed **static** False > C++ defination code: > ```cpp > err::Err bind_camera(camera::Camera *camera) > ``` #### encode ```python def encode(self, img: maix.image.Image ...) > Packet ``` Encode image. item description **type** func **param** **img**: the image will be encode.
    if the img is NULL, this function will try to get image from camera, you must use bind_camera() function to bind the camera.
    **return** encode result **static** False > C++ defination code: > ```cpp > video::Packet *encode(image::Image *img maix::video::Video::NoneImage) > ``` #### decode ```python def decode(self, frame: Frame None) > maix.image.Image ``` Decode frame item description **type** func **param** **frame**: the frame will be decode
    **return** decode result **static** False > C++ defination code: > ```cpp > image::Image *decode(video::Frame *frame nullptr) > ``` #### finish ```python def finish(self) > maix.err.Err ``` Encode or decode finish item description **type** func **return** error code **static** False > C++ defination code: > ```cpp > err::Err finish() > ``` #### capture ```python def capture(self) > maix.image.Image ``` Capture image item description **type** func **attention** Each time encode is called, the last captured image will be released. **return** error code **static** False > C++ defination code: > ```cpp > image::Image *capture() > ``` #### is\\_recording ```python def is_recording(self) > bool ``` Check if video is recording item description **type** func **return** true if video is recording, false if not **static** False > C++ defination code: > ```cpp > bool is_recording() > ``` #### is\\_opened ```python def is_opened(self) > bool ``` Check if video is opened item description **type** func **return** true if video is opened, false if not **static** False > C++ defination code: > ```cpp > bool is_opened() > ``` #### is\\_closed ```python def is_closed(self) > bool ``` check video device is closed or not item description **type** func **return** closed or not, bool type **static** False > C++ defination code: > ```cpp > bool is_closed() > ``` #### width ```python def width(self) > int ``` Get video width item description **type** func **return** video width **static** False > C++ defination code: > ```cpp > int width() > ``` #### height ```python def height(self) > int ``` Get video height item description **type** func **return** video height **static** False > C++ defination code: > ```cpp > int height() > ``` ### VideoRecorder Video Recorder class. This module is not fully supported and may be deprecated in the future. > C++ defination code: > ```cpp > class VideoRecorder > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, open: bool True) > None ``` Construct a new VideoRecorder object. This is an object that integrates recording, video capturing, and display functions, which can be used to achieve high resolution video input when needed. item description **type** func **param** **open**: If true, video will automatically call open() after creation. default is true.
    **static** False > C++ defination code: > ```cpp > VideoRecorder(bool open true) > ``` #### lock ```python def lock(self, timeout: int 1) > maix.err.Err ``` lock video item description **type** func **param** **timeout**: timeout in ms. unit:ms
    **return** error code **static** False > C++ defination code: > ```cpp > err::Err lock(int64_t timeout 1) > ``` #### unlock ```python def unlock(self) > maix.err.Err ``` unlock video item description **type** func **return** error code **static** False > C++ defination code: > ```cpp > err::Err unlock() > ``` #### open ```python def open(self) > maix.err.Err ``` Start a thread to handle the input function. item description **type** func **return** error code **static** False > C++ defination code: > ```cpp > err::Err open() > ``` #### close ```python def close(self) > maix.err.Err ``` Stop the thread, and reset the object. item description **type** func **return** error code **static** False > C++ defination code: > ```cpp > err::Err close() > ``` #### is\\_opened ```python def is_opened(self) > bool ``` Check whether the object is opened. item description **type** func **static** False > C++ defination code: > ```cpp > bool is_opened() > ``` #### bind\\_display ```python def bind_display(self, display: maix.display.Display, fit: maix.image.Fit ...) > maix.err.Err ``` Bind a Display object. if this object is not bound, it will not be displayed. item description **type** func **param** **display**: display object
    **fit**: fit mode. It is recommended to fill in FIT_COVER or FIT_FILL. For maixcam, using FIT_CONTAIN may affect the
    functionality of the second layer created by add_channel() in the Display. default is FIT_COVER.
    **return** error code **static** False > C++ defination code: > ```cpp > err::Err bind_display(display::Display *display, image::Fit fit image::FIT_COVER) > ``` #### bind\\_camera ```python def bind_camera(self, camera: maix.camera.Camera) > maix.err.Err ``` Bind a Camera object. if this object is not bound, images cannot be captured. item description **type** func **param** **camera**: camera object
    **return** error code **static** False > C++ defination code: > ```cpp > err::Err bind_camera(camera::Camera *camera) > ``` #### bind\\_audio ```python def bind_audio(self, audio: maix.audio.Recorder) > maix.err.Err ``` Bind a AudioRecorder object. if this object is not bound, audio cannot be captured. item description **type** func **param** **audio**: audio recorder object
    **return** error code **static** False > C++ defination code: > ```cpp > err::Err bind_audio(audio::Recorder *audio) > ``` #### bind\\_imu ```python def bind_imu(self, imu: capsule) > maix.err.Err ``` Bind a IMU object. if this object is not bound, imu data cannot be captured. item description **type** func **param** **imu**: imu object
    **return** error code **static** False > C++ defination code: > ```cpp > err::Err bind_imu(void *imu) > ``` #### reset ```python def reset(self) > maix.err.Err ``` Reset the video recorder. item description **type** func **note** It will not reset the bound object; if you have already bound the display using bind_display(), there is no need to rebind the display after calling reset(). **return** error code **static** False > C++ defination code: > ```cpp > err::Err reset() > ``` #### config\\_path ```python def config_path(self, path: str) > maix.err.Err ``` The recorded video will be saved to this path, and this API cannot be called during runtime. item description **type** func **param** **path**: The path of the video file to be saved
    **return** error code **static** False > C++ defination code: > ```cpp > err::Err config_path(std::string path) > ``` #### get\\_path ```python def get_path(self) > str ``` Get the path of the video file to be saved item description **type** func **return** path **static** False > C++ defination code: > ```cpp > std::string get_path() > ``` #### config\\_snapshot ```python def config_snapshot(self, enable: bool, resolution: list[int] [], format: maix.image.Format ...) > maix.err.Err ``` Set the snapshot parameters item description **type** func **note** Enabling snapshot functionality may result in some performance loss. **param** **enable**: enable or disable snapshot
    **resolution**: image resolution of snapshot
    **format**: image format of snapshot
    **return** error code **static** False > C++ defination code: > ```cpp > err::Err config_snapshot(bool enable, std::vector resolution std::vector(), image::Format format image::Format::FMT_YVU420SP) > ``` #### config\\_resolution ```python def config_resolution(self, resolution: list[int]) > maix.err.Err ``` Set the resolution of the video, and this API cannot be called during runtime. item description **type** func **note** You must bind the camera first, and this interface will modify the camera's resolution. The width must be divisible by 32. **param** **resolution**: The resolution of the video
    **return** error code **static** False > C++ defination code: > ```cpp > err::Err config_resolution(std::vector resolution) > ``` #### get\\_resolution ```python def get_resolution(self) > list[int] ``` Get the resolution of the video item description **type** func **return** the resolution of the video **static** False > C++ defination code: > ```cpp > std::vector get_resolution() > ``` #### config\\_fps ```python def config_fps(self, fps: int) > maix.err.Err ``` Set the fps of the video, and this API cannot be called during runtime. item description **type** func **note** This interface only affect the fps of the encoded file. **return** error code **static** False > C++ defination code: > ```cpp > err::Err config_fps(int fps) > ``` #### get\\_fps ```python def get_fps(self) > int ``` Get the fps of the video. item description **type** func **return** fps value **static** False > C++ defination code: > ```cpp > int get_fps() > ``` #### config\\_bitrate ```python def config_bitrate(self, bitrate: int) > maix.err.Err ``` Set the bitrate of the video, and this API cannot be called during runtime. item description **type** func **return** error code **static** False > C++ defination code: > ```cpp > err::Err config_bitrate(int bitrate) > ``` #### get\\_bitrate ```python def get_bitrate(self) > int ``` Get the bitrate of the video. item description **type** func **return** bitrate value **static** False > C++ defination code: > ```cpp > int get_bitrate() > ``` #### mute ```python def mute(self, data: int 1) > int ``` Set/Get the mute of the video item description **type** func **param** **data**: If the parameter is true, mute; if false, unmute; if no parameter is provided, return the mute status.
    **return** error code **static** False > C++ defination code: > ```cpp > int mute(int data 1) > ``` #### volume ```python def volume(self, data: int 1) > int ``` Set/Get the volume of the video item description **type** func **param** **data**: The volume of the video, the range is 0 100. if no parameter is provided, return the volume.
    **return** error code **static** False > C++ defination code: > ```cpp > int volume(int data 1) > ``` #### seek ```python def seek(self) > int ``` Get the current position of the video item description **type** func **return** current position, unit: ms **static** False > C++ defination code: > ```cpp > int64_t seek() > ``` #### record\\_start ```python def record_start(self) > maix.err.Err ``` Start recording item description **type** func **note** You must bind the camera at a minimum during input. Additionally,
    if you bind a display, the input image will be shown,
    if you bind a audio, audio will be recorded,
    if you bind a IMU, IMU data will be logged. **return** error code **static** False > C++ defination code: > ```cpp > err::Err record_start() > ``` #### snapshot ```python def snapshot(self) > maix.image.Image ``` Take a snapshot item description **type** func **return** image::Image **static** False > C++ defination code: > ```cpp > image::Image *snapshot() > ``` #### record\\_finish ```python def record_finish(self) > maix.err.Err ``` Stop recording and save the video item description **type** func **return** error code **static** False > C++ defination code: > ```cpp > err::Err record_finish() > ``` #### draw\\_rect ```python def draw_rect(self, id: int, x: int, y: int, w: int, h: int, color: maix.image.Color ..., thickness: int 1, hidden: bool False) > maix.err.Err ``` Draw a rect on the video item description **type** func **param** **id**: id of the rect, range is [0, 15]
    **x**: x coordinate
    **y**: y coordinate
    **w**: width
    **h**: height
    **color**: color
    **tickness**: The line width of the rectangular box; if set to 1, it indicates that the rectangular box will be filled.
    **hidden**: Hide or show the rectangular box
    **return** error code **static** False > C++ defination code: > ```cpp > err::Err draw_rect(int id, int x, int y, int w, int h, image::Color color image::COLOR_WHITE, int thickness 1, bool hidden false) > ```"},"/maixpy/api/maix/nn.html":{"title":"maix.nn","content":" title: maix.nn maix.nn module > You can use `maix.nn` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module module brief [F](./nn/F.html) maix.nn.F module ## Enum ### SpeechDevice speech device item describe **values** **DEVICE_NONE**:
    **DEVICE_PCM**:
    **DEVICE_MIC**:
    **DEVICE_WAV**:
    > C++ defination code: > ```cpp > enum SpeechDevice { > DEVICE_NONE 1, > DEVICE_PCM, > DEVICE_MIC, > DEVICE_WAV, > } > ``` ### SpeechDecoder speech decoder type item describe **values** **DECODER_RAW**:
    **DECODER_DIG**:
    **DECODER_LVCSR**:
    **DECODER_KWS**:
    **DECODER_ALL**:
    > C++ defination code: > ```cpp > enum SpeechDecoder { > DECODER_RAW 1, > DECODER_DIG 2, > DECODER_LVCSR 4, > DECODER_KWS 8, > DECODER_ALL 65535, > } > ``` ## Variable ## Function ## Class ### NanoTrack NanoTrack class > C++ defination code: > ```cpp > class NanoTrack > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, model: str '') > None ``` Constructor of NanoTrack class item description **type** func **param** **model**: model path, default empty, you can load model later by load function.
    **throw** If model arg is not empty and load failed, will throw err::Exception. **static** False > C++ defination code: > ```cpp > NanoTrack(const string &model \"\") > ``` #### load ```python def load(self, model: str) > maix.err.Err ``` Load model from file item description **type** func **param** **model**: Model path want to load
    **return** err::Err **static** False > C++ defination code: > ```cpp > err::Err load(const string &model) > ``` #### init ```python def init(self, img: maix.image.Image, x: int, y: int, w: int, h: int) > None ``` Init tracker, give tacker first target image and target position. item description **type** func **param** **img**: Image want to detect, target should be in this image.
    **x**: the target position left top coordinate x.
    **y**: the target position left top coordinate y.
    **w**: the target width.
    **h**: the target height.
    **throw** If image format not match model input format, will throw err::Exception. **static** False > C++ defination code: > ```cpp > void init(image::Image &img, int x, int y, int w, int h) > ``` #### track ```python def track(self, img: maix.image.Image, threshold: float 0.9) > ... ``` Track object acoording to last object position and the init function learned target feature. item description **type** func **param** **img**: image to detect object and track, can be any resolution, before detect it will crop a area according to last time target's position.
    **threshold**: If score < threshold, will see this new detection is invalid, but remain return this new detecion, default 0.9.
    **return** object, position and score, and detect area in points's first 4 element(x, y, w, h, center_x, center_y, input_size, target_size) **static** False > C++ defination code: > ```cpp > nn::Object track(image::Image &img, float threshold 0.9) > ``` #### input\\_size ```python def input_size(self) > maix.image.Size ``` Get model input size item description **type** func **return** model input size **static** False > C++ defination code: > ```cpp > image::Size input_size() > ``` #### input\\_width ```python def input_width(self) > int ``` Get model input width item description **type** func **return** model input size of width **static** False > C++ defination code: > ```cpp > int input_width() > ``` #### input\\_height ```python def input_height(self) > int ``` Get model input height item description **type** func **return** model input size of height **static** False > C++ defination code: > ```cpp > int input_height() > ``` #### input\\_format ```python def input_format(self) > maix.image.Format ``` Get input image format item description **type** func **return** input image format, image::Format type. **static** False > C++ defination code: > ```cpp > image::Format input_format() > ``` #### mean Get mean value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector mean > ``` #### scale Get scale value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector scale > ``` ### OCR\\_Box Object for OCR detect box > C++ defination code: > ```cpp > class OCR_Box > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, x1: int 0, y1: int 0, x2: int 0, y2: int 0, x3: int 0, y3: int 0, x4: int 0, y4: int 0) > None ``` OCR_Box constructor item description **type** func **static** False > C++ defination code: > ```cpp > OCR_Box(int x1 0, int y1 0, int x2 0, int y2 0, int x3 0, int y3 0, int x4 0, int y4 0) > ``` #### x1 left top point of box item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int x1 > ``` #### y1 left top point of box item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int y1 > ``` #### x2 right top point of box item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int x2 > ``` #### y2 right top point of box item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int y2 > ``` #### x3 right bottom point of box item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int x3 > ``` #### y3 right bottom point of box item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int y3 > ``` #### x4 left bottom point of box item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int x4 > ``` #### y4 left bottom point of box item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int y4 > ``` #### to\\_list ```python def to_list(self) > list[int] ``` convert box point to a list type. item description **type** func **return** list type, element is int type, value [x1, y1, x2, y2, x3, y3, x4, y4]. **static** False > C++ defination code: > ```cpp > std::vector to_list() > ``` ### OCR\\_Object Object for OCR detect result > C++ defination code: > ```cpp > class OCR_Object > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, box: OCR_Box, idx_list: list[int], char_list: list[str], score: float 0, char_pos: list[int] []) > None ``` Constructor of Object for OCR detect result item description **type** func **param** **score**: score
    **static** False > C++ defination code: > ```cpp > OCR_Object(const nn::OCR_Box &box, const std::vector &idx_list, const std::vector &char_list, float score 0, const std::vector &char_pos std::vector()) > ``` #### box OCR_Object box, 4 points box, first point at the left top, clock wise. item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > nn::OCR_Box box > ``` #### score Object score item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > float score > ``` #### idx\\_list chars' idx list, element is int type. item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector idx_list > ``` #### char\\_pos Chars' position relative to left item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector char_pos > ``` #### char\\_str ```python def char_str(self) > str ``` Get OCR_Object's charactors, return a string type. item description **type** func **return** All charactors in string type. **static** False > C++ defination code: > ```cpp > const std::string &char_str() > ``` #### char\\_list ```python def char_list(self) > list[str] ``` Get OCR_Object's charactors, return a list type. item description **type** func **return** All charactors in list type. **static** False > C++ defination code: > ```cpp > const std::vector &char_list() > ``` #### update\\_chars ```python def update_chars(self, char_list: list[str]) > None ``` Set OCR_Object's charactors item description **type** func **param** **char_list**: All charactors in list type.
    **static** False > C++ defination code: > ```cpp > void update_chars(const std::vector &char_list) > ``` #### \\_\\_str\\_\\_ ```python def __str__(self) > str ``` OCR_Object info to string item description **type** func **return** OCR_Object info string **static** False > C++ defination code: > ```cpp > std::string to_str() > ``` ### OCR\\_Objects OCR_Objects Class for detect result > C++ defination code: > ```cpp > class OCR_Objects > ``` #### \\_\\_init\\_\\_ ```python def __init__(self) > None ``` Constructor of OCR_Objects class item description **type** func **static** False > C++ defination code: > ```cpp > OCR_Objects() > ``` #### add ```python def add(self, box: OCR_Box, idx_list: list[int], char_list: list[str], score: float 0, char_pos: list[int] []) > OCR_Object ``` Add object to objects item description **type** func **throw** Throw exception if no memory **static** False > C++ defination code: > ```cpp > nn::OCR_Object &add(const nn::OCR_Box &box, const std::vector &idx_list, const std::vector &char_list, float score 0, const std::vector &char_pos std::vector()) > ``` #### remove ```python def remove(self, idx: int) > maix.err.Err ``` Remove object form objects item description **type** func **static** False > C++ defination code: > ```cpp > err::Err remove(int idx) > ``` #### at ```python def at(self, idx: int) > OCR_Object ``` Get object item item description **type** func **static** False > C++ defination code: > ```cpp > nn::OCR_Object &at(int idx) > ``` #### \\_\\_item\\_\\_ ```python def __item__(self, idx: int) > OCR_Object ``` Get object item item description **type** func **static** False > C++ defination code: > ```cpp > nn::OCR_Object &operator[](int idx) > ``` #### \\_\\_len\\_\\_ ```python def __len__(self) > int ``` Get size item description **type** func **static** False > C++ defination code: > ```cpp > size_t size() > ``` #### \\_\\_iter\\_\\_ ```python def __iter__(self) > typing.Iterator ``` Begin item description **type** func **static** False > C++ defination code: > ```cpp > std::vector::iterator begin() > ``` ### Speech Speech > C++ defination code: > ```cpp > class Speech > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, model: str '') > None ``` Construct a new Speech object item description **type** func **param** **model**: model path, default empty, you can load model later by load function.
    **throw** If model arg is not empty and load failed, will throw err::Exception. **static** False > C++ defination code: > ```cpp > Speech(const string &model \"\") > ``` #### \\_\\_init\\_\\_ (overload 1) Construct a new Speech object item description **type** func **param** **model**: model path, default empty, you can load model later by load function.
    **throw** If model arg is not empty and load failed, will throw err::Exception. **static** False > C++ defination code: > ```cpp > Speech(const string &model \"\") > ``` #### load ```python def load(self, model: str) > maix.err.Err ``` Load model from file item description **type** func **param** **model**: Model path want to load
    **return** err::Err **static** False > C++ defination code: > ```cpp > err::Err load(const string &model) > ``` #### load (overload 1) Load model from file item description **type** func **param** **model**: Model path want to load
    **return** err::Err **static** False > C++ defination code: > ```cpp > err::Err load(const string &model) > ``` #### init ```python def init(self, dev_type: SpeechDevice, device_name: str) > maix.err.Err ``` Init the ASR library and select the type and name of the audio device. item description **type** func **param** **dev_type**: device type want to detect, can choose between WAV, PCM, or MIC.
    **device_name**: device name want to detect, can choose a WAV file, a PCM file, or a MIC device name.
    **throw** **1**. If am model is not loaded, will throw err::ERR_NOT_IMPL.
    **2**. If device is not supported, will throw err::ERR_NOT_IMPL.
    **return** err::Err type, if init success, return err::ERR_NONE **static** False > C++ defination code: > ```cpp > err::Err init(nn::SpeechDevice dev_type, const string &device_name) > ``` #### init (overload 1) Init the ASR library and select the type and name of the audio device. item description **type** func **param** **dev_type**: device type want to detect, can choose between WAV, PCM, or MIC.
    **device_name**: device name want to detect, can choose a WAV file, a PCM file, or a MIC device name.
    **throw** **1**. If am model is not loaded, will throw err::ERR_NOT_IMPL.
    **2**. If device is not supported, will throw err::ERR_NOT_IMPL.
    **return** err::Err type, if init success, return err::ERR_NONE **static** False > C++ defination code: > ```cpp > err::Err init(nn::SpeechDevice dev_type, const string &device_name) > ``` #### devive ```python def devive(self, dev_type: SpeechDevice, device_name: str) > maix.err.Err ``` Reset the device, usually used for PCM/WAV recognition,\\nsuch as identifying the next WAV file. item description **type** func **param** **dev_type**: device type want to detect, can choose between WAV, PCM, or MIC.
    **device_name**: device name want to detect, can choose a WAV file, a PCM file, or a MIC device name.
    **throw** If device is not supported, will throw err::ERR_NOT_IMPL. **return** err::Err type, if init success, return err::ERR_NONE **static** False > C++ defination code: > ```cpp > err::Err devive(nn::SpeechDevice dev_type, const string &device_name) > ``` #### devive (overload 1) Reset the device, usually used for PCM/WAV recognition,\\nsuch as identifying the next WAV file. item description **type** func **param** **dev_type**: device type want to detect, can choose between WAV, PCM, or MIC.
    **device_name**: device name want to detect, can choose a WAV file, a PCM file, or a MIC device name.
    **throw** If device is not supported, will throw err::ERR_NOT_IMPL. **return** err::Err type, if init success, return err::ERR_NONE **static** False > C++ defination code: > ```cpp > err::Err devive(nn::SpeechDevice dev_type, const string &device_name) > ``` #### deinit ```python def deinit(self) > None ``` Deinit the ASR library. item description **type** func **static** False > C++ defination code: > ```cpp > void deinit() > ``` #### deinit (overload 1) Deinit the ASR library. item description **type** func **static** False > C++ defination code: > ```cpp > void deinit() > ``` #### dec\\_deinit ```python def dec_deinit(self, decoder: SpeechDecoder) > None ``` Deinit the decoder. item description **type** func **param** **decoder**: decoder type want to deinit
    can choose between DECODER_RAW, DECODER_DIG, DECODER_LVCSR, DECODER_KWS or DECODER_ALL.
    **throw** If device is not supported, will throw err::ERR_NOT_IMPL. **static** False > C++ defination code: > ```cpp > void dec_deinit(nn::SpeechDecoder decoder) > ``` #### dec\\_deinit (overload 1) Deinit the decoder. item description **type** func **param** **decoder**: decoder type want to deinit
    can choose between DECODER_RAW, DECODER_DIG, DECODER_LVCSR, DECODER_KWS or DECODER_ALL.
    **throw** If device is not supported, will throw err::ERR_NOT_IMPL. **static** False > C++ defination code: > ```cpp > void dec_deinit(nn::SpeechDecoder decoder) > ``` #### raw ```python def raw(self, callback: typing.Callable[[list[pnyp_t], int], None]) > maix.err.Err ``` Init raw decoder, it will output the prediction results of the original AM. item description **type** func **param** **callback**: raw decoder user callback.
    **return** err::Err type, if init success, return err::ERR_NONE **static** False > C++ defination code: > ```cpp > err::Err raw(std::function, int)> callback) > ``` #### raw (overload 1) Get raw decoder status item description **type** func **return** bool, raw decoder status **static** False > C++ defination code: > ```cpp > bool raw() > ``` #### raw (overload 2) Init raw decoder, it will output the prediction results of the original AM. item description **type** func **param** **callback**: raw decoder user callback.
    **return** err::Err type, if init success, return err::ERR_NONE **static** False > C++ defination code: > ```cpp > err::Err raw(std::function, int)> callback) > ``` #### raw (overload 3) Get raw decoder status item description **type** func **return** bool, raw decoder status **static** False > C++ defination code: > ```cpp > bool raw() > ``` #### digit ```python def digit(self, blank: int, callback: typing.Callable[[str, int], None]) > maix.err.Err ``` Init digit decoder, it will output the Chinese digit recognition results within the last 4 seconds. item description **type** func **param** **blank**: If it exceeds this value, insert a '_' in the output result to indicate idle mute.
    **callback**: digit decoder user callback.
    **return** err::Err type, if init success, return err::ERR_NONE **static** False > C++ defination code: > ```cpp > err::Err digit(int blank, std::function callback) > ``` #### digit (overload 1) Get digit decoder status item description **type** func **return** bool, digit decoder status **static** False > C++ defination code: > ```cpp > bool digit() > ``` #### digit (overload 2) Init digit decoder, it will output the Chinese digit recognition results within the last 4 seconds. item description **type** func **param** **blank**: If it exceeds this value, insert a '_' in the output result to indicate idle mute.
    **callback**: digit decoder user callback.
    **return** err::Err type, if init success, return err::ERR_NONE **static** False > C++ defination code: > ```cpp > err::Err digit(int blank, std::function callback) > ``` #### digit (overload 3) Get digit decoder status item description **type** func **return** bool, digit decoder status **static** False > C++ defination code: > ```cpp > bool digit() > ``` #### kws ```python def kws(self, kw_tbl: list[str], kw_gate: list[float], callback: typing.Callable[[list[float], int], None], auto_similar: bool True) > maix.err.Err ``` Init kws decoder, it will output a probability list of all registered keywords in the latest frame,\\nusers can set their own thresholds for wake up. item description **type** func **param** **kw_tbl**: Keyword list, filled in with spaces separated by pinyin, for example: xiao3 ai4 tong2 xue2
    **kw_gate**: kw_gate, keyword probability gate table, the number should be the same as kw_tbl
    **auto_similar**: Whether to perform automatic homophone processing,
    setting it to true will automatically calculate the probability by using pinyin with different tones as homophones
    **callback**: digit decoder user callback.
    **return** err::Err type, if init success, return err::ERR_NONE **static** False > C++ defination code: > ```cpp > err::Err kws(std::vector kw_tbl, std::vector kw_gate, std::function, int)> callback, bool auto_similar true) > ``` #### kws (overload 1) Get kws decoder status item description **type** func **return** bool, kws decoder status **static** False > C++ defination code: > ```cpp > bool kws() > ``` #### kws (overload 2) Init kws decoder, it will output a probability list of all registered keywords in the latest frame,\\nusers can set their own thresholds for wake up. item description **type** func **param** **kw_tbl**: Keyword list, filled in with spaces separated by pinyin, for example: xiao3 ai4 tong2 xue2
    **kw_gate**: kw_gate, keyword probability gate table, the number should be the same as kw_tbl
    **auto_similar**: Whether to perform automatic homophone processing,
    setting it to true will automatically calculate the probability by using pinyin with different tones as homophones
    **callback**: digit decoder user callback.
    **return** err::Err type, if init success, return err::ERR_NONE **static** False > C++ defination code: > ```cpp > err::Err kws(std::vector kw_tbl, std::vector kw_gate, std::function, int)> callback, bool auto_similar true) > ``` #### kws (overload 3) Get kws decoder status item description **type** func **return** bool, kws decoder status **static** False > C++ defination code: > ```cpp > bool kws() > ``` #### lvcsr ```python def lvcsr(self, sfst_name: str, sym_name: str, phones_txt: str, words_txt: str, callback: typing.Callable[[tuple[str, str], int], None], beam: float 8, bg_prob: float 10, scale: float 0.5, mmap: bool False) > maix.err.Err ``` Init lvcsr decoder, it will output continuous speech recognition results (less than 1024 Chinese characters). item description **type** func **param** **sfst_name**: Sfst file path.
    **sym_name**: Sym file path (output symbol table).
    **phones_txt**: Path to phones.bin (pinyin table).
    **words_txt**: Path to words.bin (dictionary table).
    **callback**: lvcsr decoder user callback.
    **beam**: The beam size for WFST search is set to 8 by default, and it is recommended to be between 3 and 9.
    The larger the size, the larger the search space, and the more accurate but slower the search.
    **bg_prob**: The absolute value of the natural logarithm of the default probability value for background pinyin
    outside of BEAM CNT is set to 10 by default.
    **scale**: acoustics_cost log(pny_prob)*scale.
    **mmap**: use mmap to load the WFST decoding image,
    If set to true, the beam should be less than 5.
    **return** err::Err type, if init success, return err::ERR_NONE **static** False > C++ defination code: > ```cpp > err::Err lvcsr(const string &sfst_name, const string &sym_name, > const string &phones_txt, const string &words_txt, > std::function, int)> callback, > float beam 8, float bg_prob 10, float scale 0.5, bool mmap false) > ``` #### lvcsr (overload 1) Get lvcsr decoder status item description **type** func **return** bool, lvcsr decoder status **static** False > C++ defination code: > ```cpp > bool lvcsr() > ``` #### lvcsr (overload 2) Init lvcsr decoder, it will output continuous speech recognition results (less than 1024 Chinese characters). item description **type** func **param** **sfst_name**: Sfst file path.
    **sym_name**: Sym file path (output symbol table).
    **phones_txt**: Path to phones.bin (pinyin table).
    **words_txt**: Path to words.bin (dictionary table).
    **callback**: lvcsr decoder user callback.
    **beam**: The beam size for WFST search is set to 8 by default, and it is recommended to be between 3 and 9.
    The larger the size, the larger the search space, and the more accurate but slower the search.
    **bg_prob**: The absolute value of the natural logarithm of the default probability value for background pinyin
    outside of BEAM CNT is set to 10 by default.
    **scale**: acoustics_cost log(pny_prob)*scale.
    **mmap**: use mmap to load the WFST decoding image,
    If set to true, the beam should be less than 5.
    **return** err::Err type, if init success, return err::ERR_NONE **static** False > C++ defination code: > ```cpp > err::Err lvcsr(const string &sfst_name, const string &sym_name, > const string &phones_txt, const string &words_txt, > std::function, int)> callback, > float beam 8, float bg_prob 10, float scale 0.5, bool mmap false) > ``` #### lvcsr (overload 3) Get lvcsr decoder status item description **type** func **return** bool, lvcsr decoder status **static** False > C++ defination code: > ```cpp > bool lvcsr() > ``` #### run ```python def run(self, frame: int) > int ``` Run speech recognition, user can run 1 frame at a time and do other processing after running,\\nor it can run continuously within a thread and be stopped by an external thread. item description **type** func **param** **frame**: The number of frames per run.
    **return** int type, return actual number of frames in the run. **static** False > C++ defination code: > ```cpp > int run(int frame) > ``` #### run (overload 1) Run speech recognition, user can run 1 frame at a time and do other processing after running,\\nor it can run continuously within a thread and be stopped by an external thread. item description **type** func **param** **frame**: The number of frames per run.
    **return** int type, return actual number of frames in the run. **static** False > C++ defination code: > ```cpp > int run(int frame) > ``` #### clear ```python def clear(self) > None ``` Reset internal cache operation item description **type** func **static** False > C++ defination code: > ```cpp > void clear() > ``` #### clear (overload 1) Reset internal cache operation item description **type** func **static** False > C++ defination code: > ```cpp > void clear() > ``` #### frame\\_time ```python def frame_time(self) > int ``` Get the time of one frame. item description **type** func **return** int type, return the time of one frame. **static** False > C++ defination code: > ```cpp > int frame_time() > ``` #### frame\\_time (overload 1) Get the time of one frame. item description **type** func **return** int type, return the time of one frame. **static** False > C++ defination code: > ```cpp > int frame_time() > ``` #### vocab ```python def vocab(self) > tuple[str, int] ``` Get the acoustic model dictionary. item description **type** func **return** std::pair type, return the dictionary and length. **static** False > C++ defination code: > ```cpp > std::pair vocab() > ``` #### vocab (overload 1) Get the acoustic model dictionary. item description **type** func **return** std::pair type, return the dictionary and length. **static** False > C++ defination code: > ```cpp > std::pair vocab() > ``` #### similar ```python def similar(self, pny: str, similar_pnys: list[str]) > maix.err.Err ``` Manually register mute words, and each pinyin can register up to 10 homophones,\\nplease note that using this interface to register homophones will overwrite,\\nthe homophone table automatically generated in the \\\"automatic homophone processing\\\" feature. item description **type** func **param** **dev_type**: device type want to detect, can choose between WAV, PCM, or MIC.
    **device_name**: device name want to detect, can choose a WAV file, a PCM file, or a MIC device name.
    **return** err::Err type, if init success, return err::ERR_NONE **static** False > C++ defination code: > ```cpp > err::Err similar(const string &pny, std::vector similar_pnys) > ``` #### similar (overload 1) Manually register mute words, and each pinyin can register up to 10 homophones,\\nplease note that using this interface to register homophones will overwrite,\\nthe homophone table automatically generated in the \\\"automatic homophone processing\\\" feature. item description **type** func **param** **dev_type**: device type want to detect, can choose between WAV, PCM, or MIC.
    **device_name**: device name want to detect, can choose a WAV file, a PCM file, or a MIC device name.
    **return** err::Err type, if init success, return err::ERR_NONE **static** False > C++ defination code: > ```cpp > err::Err similar(const string &pny, std::vector similar_pnys) > ``` #### mean Get mean value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector mean > ``` #### scale Get scale value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector scale > ``` #### dev\\_type ```python def dev_type(self) > SpeechDevice ``` get device type item description **type** func **return** nn::SpeechDevice type, see SpeechDevice of this module **static** False > C++ defination code: > ```cpp > nn::SpeechDevice dev_type() > ``` #### dev\\_type (overload 1) get device type item description **type** func **return** nn::SpeechDevice type, see SpeechDevice of this module **static** False > C++ defination code: > ```cpp > nn::SpeechDevice dev_type() > ``` ### YOLOv8 YOLOv8 class > C++ defination code: > ```cpp > class YOLOv8 > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, model: str '', dual_buff: bool True) > None ``` Constructor of YOLOv8 class item description **type** func **param** **model**: model path, default empty, you can load model later by load function.
    **dual_buff**: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.
    If you want to ensure every time forward output the input's result, set this arg to false please.
    Default true to ensure speed.
    **throw** If model arg is not empty and load failed, will throw err::Exception. **static** False > C++ defination code: > ```cpp > YOLOv8(const string &model \"\", bool dual_buff true) > ``` #### load ```python def load(self, model: str) > maix.err.Err ``` Load model from file item description **type** func **param** **model**: Model path want to load
    **return** err::Err **static** False > C++ defination code: > ```cpp > err::Err load(const string &model) > ``` #### detect ```python def detect(self, img: maix.image.Image, conf_th: float 0.5, iou_th: float 0.45, fit: maix.image.Fit ..., keypoint_th: float 0.5) > ... ``` Detect objects from image item description **type** func **param** **img**: Image want to detect, if image's size not match model input's, will auto resize with fit method.
    **conf_th**: Confidence threshold, default 0.5.
    **iou_th**: IoU threshold, default 0.45.
    **fit**: Resize method, default image.Fit.FIT_CONTAIN.
    **keypoint_th**: keypoint threshold, default 0.5, only for yolov8 pose model.
    **throw** If image format not match model input format, will throw err::Exception. **return** Object list. In C++, you should delete it after use.
    If model is yolov8 pose, object's points have value, and if points' value < 0 means that point is invalid(conf < keypoint_th). **static** False > C++ defination code: > ```cpp > nn::Objects *detect(image::Image &img, float conf_th 0.5, float iou_th 0.45, maix::image::Fit fit maix::image::FIT_CONTAIN, float keypoint_th 0.5) > ``` #### input\\_size ```python def input_size(self) > maix.image.Size ``` Get model input size item description **type** func **return** model input size **static** False > C++ defination code: > ```cpp > image::Size input_size() > ``` #### input\\_width ```python def input_width(self) > int ``` Get model input width item description **type** func **return** model input size of width **static** False > C++ defination code: > ```cpp > int input_width() > ``` #### input\\_height ```python def input_height(self) > int ``` Get model input height item description **type** func **return** model input size of height **static** False > C++ defination code: > ```cpp > int input_height() > ``` #### input\\_format ```python def input_format(self) > maix.image.Format ``` Get input image format item description **type** func **return** input image format, image::Format type. **static** False > C++ defination code: > ```cpp > image::Format input_format() > ``` #### draw\\_pose ```python def draw_pose(self, img: maix.image.Image, points: list[int], radius: int 4, color: maix.image.Color ..., body: bool True) > None ``` Draw pose keypoints on image item description **type** func **param** **img**: image object, maix.image.Image type.
    **points**: keypoits, int list type, [x, y, x, y ...]
    **radius**: radius of points.
    **color**: color of points.
    **body**: true, if points' length is 17*2 and body is ture, will draw lines as human body, if set to false won't draw lines, default true.
    **static** False > C++ defination code: > ```cpp > void draw_pose(image::Image &img, std::vector points, int radius 4, image::Color color image::COLOR_RED, bool body true) > ``` #### draw\\_seg\\_mask ```python def draw_seg_mask(self, img: maix.image.Image, x: int, y: int, seg_mask: maix.image.Image, threshold: int 127) > None ``` Draw segmentation on image item description **type** func **param** **img**: image object, maix.image.Image type.
    **seg_mask**: segmentation mask image by detect method, a grayscale image
    **threshold**: only mask's value > threshold will be draw on image, value from 0 to 255.
    **static** False > C++ defination code: > ```cpp > void draw_seg_mask(image::Image &img, int x, int y, image::Image &seg_mask, int threshold 127) > ``` #### labels Labels list item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector labels > ``` #### label\\_path Label file path item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::string label_path > ``` #### mean Get mean value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector mean > ``` #### scale Get scale value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector scale > ``` ### Object Object for detect result > C++ defination code: > ```cpp > class Object > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, x: int 0, y: int 0, w: int 0, h: int 0, class_id: int 0, score: float 0, points: list[int] []) > None ``` Constructor of Object for detect result item description **type** func **param** **x**: left top x
    **y**: left top y
    **w**: width
    **h**: height
    **class_id**: class id
    **score**: score
    **static** False > C++ defination code: > ```cpp > Object(int x 0, int y 0, int w 0, int h 0, int class_id 0, float score 0, std::vector points std::vector()) > ``` #### \\_\\_str\\_\\_ ```python def __str__(self) > str ``` Object info to string item description **type** func **return** Object info string **static** False > C++ defination code: > ```cpp > std::string to_str() > ``` #### x Object left top coordinate x item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int x > ``` #### y Object left top coordinate y item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int y > ``` #### w Object width item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int w > ``` #### h Object height item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int h > ``` #### class\\_id Object class id item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int class_id > ``` #### score Object score item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > float score > ``` #### points keypoints item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector points > ``` #### seg\\_mask segmentation mask, uint8 list type, shape is h * w but flattened to one dimension, value fron 0 to 255. item description **type** var **attention** For efficiency, it's a pointer in C++, use this carefully! **static** False **readonly** False > C++ defination code: > ```cpp > image::Image *seg_mask > ``` ### ObjectFloat Object for detect result > C++ defination code: > ```cpp > class ObjectFloat > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, x: float 0, y: float 0, w: float 0, h: float 0, class_id: float 0, score: float 0, points: list[float] []) > None ``` Constructor of Object for detect result item description **type** func **param** **x**: left top x
    **y**: left top y
    **w**: width
    **h**: height
    **class_id**: class id
    **score**: score
    **static** False > C++ defination code: > ```cpp > ObjectFloat(float x 0, float y 0, float w 0, float h 0, float class_id 0, float score 0, std::vector points std::vector()) > ``` #### \\_\\_str\\_\\_ ```python def __str__(self) > str ``` Object info to string item description **type** func **return** Object info string **static** False > C++ defination code: > ```cpp > std::string to_str() > ``` #### x Object left top coordinate x item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > float x > ``` #### y Object left top coordinate y item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > float y > ``` #### w Object width item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > float w > ``` #### h Object height item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > float h > ``` #### class\\_id Object class id item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > float class_id > ``` #### score Object score item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > float score > ``` #### points keypoints item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector points > ``` ### Objects Objects Class for detect result > C++ defination code: > ```cpp > class Objects > ``` #### \\_\\_init\\_\\_ ```python def __init__(self) > None ``` Constructor of Objects class item description **type** func **static** False > C++ defination code: > ```cpp > Objects() > ``` #### add ```python def add(self, x: int 0, y: int 0, w: int 0, h: int 0, class_id: int 0, score: float 0, points: list[int] []) > Object ``` Add object to objects item description **type** func **throw** Throw exception if no memory **static** False > C++ defination code: > ```cpp > nn::Object &add(int x 0, int y 0, int w 0, int h 0, int class_id 0, float score 0, std::vector points std::vector()) > ``` #### remove ```python def remove(self, idx: int) > maix.err.Err ``` Remove object form objects item description **type** func **static** False > C++ defination code: > ```cpp > err::Err remove(int idx) > ``` #### at ```python def at(self, idx: int) > Object ``` Get object item item description **type** func **static** False > C++ defination code: > ```cpp > nn::Object &at(int idx) > ``` #### \\_\\_item\\_\\_ ```python def __item__(self, idx: int) > Object ``` Get object item item description **type** func **static** False > C++ defination code: > ```cpp > nn::Object &operator[](int idx) > ``` #### \\_\\_len\\_\\_ ```python def __len__(self) > int ``` Get size item description **type** func **static** False > C++ defination code: > ```cpp > size_t size() > ``` #### \\_\\_iter\\_\\_ ```python def __iter__(self) > typing.Iterator ``` Begin item description **type** func **static** False > C++ defination code: > ```cpp > std::vector::iterator begin() > ``` ### MUD MUD(model universal describe file) class > C++ defination code: > ```cpp > class MUD > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, model_path: str None) > None ``` MUD constructor item description **type** func **param** **model_path**: direction [in], model file path, model format can be MUD(model universal describe file) file.
    If model_path set, will load model from file, load failed will raise err.Exception.
    If model_path not set, you can load model later by load function.
    **static** False > C++ defination code: > ```cpp > MUD(const char *model_path nullptr) > ``` #### load ```python def load(self, model_path: str) > maix.err.Err ``` Load model from file item description **type** func **param** **model_path**: direction [in], model file path, model format can be MUD(model universal describe file) file.
    **return** error code, if load success, return err::ERR_NONE **static** False > C++ defination code: > ```cpp > err::Err load(const std::string &model_path) > ``` #### type Model type, string type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::string type > ``` #### items Model config items, different model type has different config items item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::map> items > ``` ### LayerInfo NN model layer info > C++ defination code: > ```cpp > class LayerInfo > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, name: str '', dtype: maix.tensor.DType ..., shape: list[int] []) > None ``` LayerInfo constructor item description **type** func **param** **name**: direction [in], layer name
    **dtype**: direction [in], layer data type
    **shape**: direction [in], layer shape
    **static** False > C++ defination code: > ```cpp > LayerInfo(const std::string &name \"\", tensor::DType dtype tensor::DType::FLOAT32, std::vector shape std::vector()) > ``` #### name Layer name item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::string name > ``` #### dtype Layer data type item description **type** var **attention** If model is quantized, this is the real quantized data type like int8 float16,
    in most scene, inputs and outputs we actually use float32 in API like forward. **static** False **readonly** False > C++ defination code: > ```cpp > tensor::DType dtype > ``` #### shape Layer shape item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector shape > ``` #### shape\\_int ```python def shape_int(self) > int ``` Shape as one int type, multiply all dims of shape item description **type** func **static** False > C++ defination code: > ```cpp > int shape_int() > ``` #### to\\_str ```python def to_str(self) > str ``` To string item description **type** func **static** False > C++ defination code: > ```cpp > std::string to_str() > ``` #### \\_\\_str\\_\\_ ```python def __str__(self) > str ``` To string item description **type** func **static** False > C++ defination code: > ```cpp > std::string __str__() > ``` ### NN Neural network class > C++ defination code: > ```cpp > class NN > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, model: str '', dual_buff: bool True) > None ``` Neural network constructor item description **type** func **param** **model**: direction [in], model file path, model format can be MUD(model universal describe file) file.
    If model_path set, will load model from file, load failed will raise err.Exception.
    If model_path not set, you can load model later by load function.
    **dual_buff**: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.
    If you want to ensure every time forward output the input's result, set this arg to false please.
    Default true to ensure speed.
    **static** False > C++ defination code: > ```cpp > NN(const std::string &model \"\", bool dual_buff true) > ``` #### load ```python def load(self, model: str) > maix.err.Err ``` Load model from file item description **type** func **param** **model**: direction [in], model file path, model format can be MUD(model universal describe file) file.
    **return** error code, if load success, return err::ERR_NONE **static** False > C++ defination code: > ```cpp > err::Err load(const std::string &model) > ``` #### loaded ```python def loaded(self) > bool ``` Is model loaded item description **type** func **return** true if model loaded, else false **static** False > C++ defination code: > ```cpp > bool loaded() > ``` #### set\\_dual\\_buff ```python def set_dual_buff(self, enable: bool) > None ``` Enable dual buff or disable dual buff item description **type** func **param** **enable**: true to enable, false to disable
    **static** False > C++ defination code: > ```cpp > void set_dual_buff(bool enable) > ``` #### inputs\\_info ```python def inputs_info(self) > list[LayerInfo] ``` Get model input layer info item description **type** func **return** input layer info **static** False > C++ defination code: > ```cpp > std::vector inputs_info() > ``` #### outputs\\_info ```python def outputs_info(self) > list[LayerInfo] ``` Get model output layer info item description **type** func **return** output layer info **static** False > C++ defination code: > ```cpp > std::vector outputs_info() > ``` #### extra\\_info ```python def extra_info(self) > dict[str, str] ``` Get model extra info define in MUD file item description **type** func **return** extra info, dict type, key value object, attention: key and value are all string type. **static** False > C++ defination code: > ```cpp > std::map extra_info() > ``` #### forward ```python def forward(self, inputs: maix.tensor.Tensors, copy_result: bool True, dual_buff_wait: bool False) > maix.tensor.Tensors ``` forward run model, get output of model,\\nthis is specially for MaixPy, not efficient, but easy to use in MaixPy item description **type** func **param** **input**: direction [in], input tensor
    **copy_result**: If set true, will copy result to a new variable; else will use a internal memory, you can only use it until to the next forward.
    Default true to avoid problems, you can set it to false manually to make speed faster.
    **dual_buff_wait**: bool type, only for dual_buff mode, if true, will inference this image and wait for result, default false.
    **return** output tensor. In C++, you should manually delete tensors in return value and return value.
    If dual_buff mode, it can be NULL(None in MaixPy) means not ready. **throw** if error ocurrs like no memory or arg error, will raise err.Exception. **static** False > C++ defination code: > ```cpp > tensor::Tensors *forward(tensor::Tensors &inputs, bool copy_result true, bool dual_buff_wait false) > ``` #### forward\\_image ```python def forward_image(self, img: maix.image.Image, mean: list[float] [], scale: list[float] [], fit: maix.image.Fit ..., copy_result: bool True, dual_buff_wait: bool False) > maix.tensor.Tensors ``` forward model, param is image item description **type** func **param** **img**: input image
    **mean**: mean value, a list type, e.g. [0.485, 0.456, 0.406], default is empty list means not normalize.
    **scale**: scale value, a list type, e.g. [1/0.229, 1/0.224, 1/0.225], default is empty list means not normalize.
    **fit**: fit mode, if the image size of input not equal to model's input, it will auto resize use this fit method,
    default is image.Fit.FIT_FILL for easy coordinate calculation, but for more accurate result, use image.Fit.FIT_CONTAIN is better.
    **copy_result**: If set true, will copy result to a new variable; else will use a internal memory, you can only use it until to the next forward.
    Default true to avoid problems, you can set it to false manually to make speed faster.
    **dual_buff_wait**: bool type, only for dual_buff mode, if true, will inference this image and wait for result, default false.
    **return** output tensor. In C++, you should manually delete tensors in return value and return value.
    If dual_buff mode, it can be NULL(None in MaixPy) means not ready. **throw** If error occurs, like arg error or alloc memory failed, will raise err.Exception. **static** False > C++ defination code: > ```cpp > tensor::Tensors *forward_image(image::Image &img, std::vector mean std::vector(), std::vector scale std::vector(), image::Fit fit image::Fit::FIT_FILL, bool copy_result true, bool dual_buff_wait false) > ``` ### FaceObject Face object > C++ defination code: > ```cpp > class FaceObject > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, x: int 0, y: int 0, w: int 0, h: int 0, class_id: int 0, score: float 0, points: list[int] [], feature: list[float] [], face: maix.image.Image ...) > None ``` Constructor item description **type** func **static** False > C++ defination code: > ```cpp > FaceObject(int x 0, int y 0, int w 0, int h 0, int class_id 0, float score 0, std::vector points std::vector(), std::vector feature std::vector(), image::Image face image::Image()) > ``` #### \\_\\_str\\_\\_ ```python def __str__(self) > str ``` FaceObject info to string item description **type** func **return** FaceObject info string **static** False > C++ defination code: > ```cpp > std::string to_str() > ``` #### x FaceObject left top coordinate x item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int x > ``` #### y FaceObject left top coordinate y item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int y > ``` #### w FaceObject width item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int w > ``` #### h FaceObject height item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int h > ``` #### class\\_id FaceObject class id item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > int class_id > ``` #### score FaceObject score item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > float score > ``` #### points keypoints item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector points > ``` #### feature feature, float list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector feature > ``` #### face face image item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > image::Image face > ``` ### FaceRecognizer FaceRecognizer class > C++ defination code: > ```cpp > class FaceRecognizer > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, detect_model: str '', feature_model: str '', dual_buff: bool True) > None ``` Constructor of FaceRecognizer class item description **type** func **param** **detect_model**: face detect model path, default empty, you can load model later by load function.
    **feature_model**: feature extract model
    **dual_buff**: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.
    If you want to ensure every time forward output the input's result, set this arg to false please.
    Default true to ensure speed.
    **throw** If model arg is not empty and load failed, will throw err::Exception. **static** False > C++ defination code: > ```cpp > FaceRecognizer(const string &detect_model \"\", const string &feature_model \"\", bool dual_buff true) > ``` #### load ```python def load(self, detect_model: str, feature_model: str) > maix.err.Err ``` Load model from file item description **type** func **param** **detect_model**: face detect model path, default empty, you can load model later by load function.
    **feature_model**: feature extract model
    **return** err::Err **static** False > C++ defination code: > ```cpp > err::Err load(const string &detect_model, const string &feature_model) > ``` #### recognize ```python def recognize(self, img: maix.image.Image, conf_th: float 0.5, iou_th: float 0.45, compare_th: float 0.8, get_feature: bool False, get_face: bool False, fit: maix.image.Fit ...) > list[FaceObject] ``` Detect objects from image item description **type** func **param** **img**: Image want to detect, if image's size not match model input's, will auto resize with fit method.
    **conf_th**: Detect confidence threshold, default 0.5.
    **iou_th**: Detect IoU threshold, default 0.45.
    **compare_th**: Compare two face score threshold, default 0.8, if two faces' score < this value, will see this face fas unknown.
    **get_feature**: return feature or not, if true will copy features to result, if false will not copy feature to result to save time and memory.
    **get_face**: return face image or not, if true result object's face attribute will valid, or face sttribute is empty. Get face image will alloc memory and copy image, so will lead to slower speed.
    **fit**: Resize method, default image.Fit.FIT_CONTAIN.
    **throw** If image format not match model input format, will throw err::Exception. **return** FaceObject list. In C++, you should delete it after use. **static** False > C++ defination code: > ```cpp > std::vector *recognize(image::Image &img, float conf_th 0.5, float iou_th 0.45, float compare_th 0.8, bool get_feature false, bool get_face false, maix::image::Fit fit maix::image::FIT_CONTAIN) > ``` #### add\\_face ```python def add_face(self, face: FaceObject, label: str) > maix.err.Err ``` Add face to lib item description **type** func **param** **face**: face object, find by recognize
    **label**: face label(name)
    **static** False > C++ defination code: > ```cpp > err::Err add_face(nn::FaceObject *face, const std::string &label) > ``` #### remove\\_face ```python def remove_face(self, idx: int 1, label: str '') > maix.err.Err ``` remove face from lib item description **type** func **param** **idx**: index of face in lib, default 1 means use label, idx and label must have one, idx have high priotiry.
    **label**: which face to remove, default to empty string mean use idx, idx and label must have one, idx have high priotiry.
    **static** False > C++ defination code: > ```cpp > err::Err remove_face(int idx 1, const std::string &label \"\") > ``` #### save\\_faces ```python def save_faces(self, path: str) > maix.err.Err ``` Save faces info to a file item description **type** func **param** **path**: where to save, string type.
    **return** err.Err type **static** False > C++ defination code: > ```cpp > err::Err save_faces(const std::string &path) > ``` #### load\\_faces ```python def load_faces(self, path: str) > maix.err.Err ``` Load faces info from a file item description **type** func **param** **path**: from where to load, string type.
    **return** err::Err type **static** False > C++ defination code: > ```cpp > err::Err load_faces(const std::string &path) > ``` #### input\\_size ```python def input_size(self) > maix.image.Size ``` Get model input size item description **type** func **return** model input size **static** False > C++ defination code: > ```cpp > image::Size input_size() > ``` #### input\\_width ```python def input_width(self) > int ``` Get model input width item description **type** func **return** model input size of width **static** False > C++ defination code: > ```cpp > int input_width() > ``` #### input\\_height ```python def input_height(self) > int ``` Get model input height item description **type** func **return** model input size of height **static** False > C++ defination code: > ```cpp > int input_height() > ``` #### input\\_format ```python def input_format(self) > maix.image.Format ``` Get input image format item description **type** func **return** input image format, image::Format type. **static** False > C++ defination code: > ```cpp > image::Format input_format() > ``` #### mean\\_detector Get mean value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector mean_detector > ``` #### scale\\_detector Get scale value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector scale_detector > ``` #### mean\\_feature Get mean value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector mean_feature > ``` #### scale\\_feature Get scale value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector scale_feature > ``` #### labels labels, list type, first is \\\"unknown\\\" item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector labels > ``` #### features features item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector> features > ``` ### SelfLearnClassifier SelfLearnClassifier > C++ defination code: > ```cpp > class SelfLearnClassifier > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, model: str '', dual_buff: bool True) > None ``` Construct a new SelfLearnClassifier object item description **type** func **param** **model**: MUD model path, if empty, will not load model, you can call load_model() later.
    if not empty, will load model and will raise err::Exception if load failed.
    **dual_buff**: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.
    If you want to ensure every time forward output the input's result, set this arg to false please.
    Default true to ensure speed.
    **static** False > C++ defination code: > ```cpp > SelfLearnClassifier(const std::string &model \"\", bool dual_buff true) > ``` #### load\\_model ```python def load_model(self, model: str) > maix.err.Err ``` Load model from file, model format is .mud,\\nMUD file should contain [extra] section, have key values:\\n model_type: classifier_no_top\\n input_type: rgb or bgr\\n mean: 123.675, 116.28, 103.53\\n scale: 0.017124753831663668, 0.01750700280112045, 0.017429193899782137 item description **type** func **param** **model**: MUD model path
    **return** error code, if load failed, return error code **static** False > C++ defination code: > ```cpp > err::Err load_model(const string &model) > ``` #### classify ```python def classify(self, img: maix.image.Image, fit: maix.image.Fit ...) > list[tuple[int, float]] ``` Classify image item description **type** func **param** **img**: image, format should match model input_type, or will raise err.Exception
    **fit**: image resize fit mode, default Fit.FIT_COVER, see image.Fit.
    **throw** If error occurred, will raise err::Exception, you can find reason in log, mostly caused by args error or hardware error. **return** result, a list of (idx, distance), smaller distance means more similar. In C++, you need to delete it after use. **static** False > C++ defination code: > ```cpp > std::vector> *classify(image::Image &img, image::Fit fit image::FIT_COVER) > ``` #### add\\_class ```python def add_class(self, img: maix.image.Image, fit: maix.image.Fit ...) > None ``` Add a class to recognize item description **type** func **param** **img**: Add a image as a new class
    **fit**: image resize fit mode, default Fit.FIT_COVER, see image.Fit.
    **static** False > C++ defination code: > ```cpp > void add_class(image::Image &img, image::Fit fit image::FIT_COVER) > ``` #### class\\_num ```python def class_num(self) > int ``` Get class number item description **type** func **static** False > C++ defination code: > ```cpp > int class_num() > ``` #### rm\\_class ```python def rm_class(self, idx: int) > maix.err.Err ``` Remove a class item description **type** func **param** **idx**: index, value from 0 to class_num();
    **static** False > C++ defination code: > ```cpp > err::Err rm_class(int idx) > ``` #### add\\_sample ```python def add_sample(self, img: maix.image.Image, fit: maix.image.Fit ...) > None ``` Add sample, you should call learn method after add some samples to learn classes.\\nSample image can be any of classes we already added. item description **type** func **param** **img**: Add a image as a new sample.
    **static** False > C++ defination code: > ```cpp > void add_sample(image::Image &img, image::Fit fit image::FIT_COVER) > ``` #### rm\\_sample ```python def rm_sample(self, idx: int) > maix.err.Err ``` Remove a sample item description **type** func **param** **idx**: index, value from 0 to sample_num();
    **static** False > C++ defination code: > ```cpp > err::Err rm_sample(int idx) > ``` #### sample\\_num ```python def sample_num(self) > int ``` Get sample number item description **type** func **static** False > C++ defination code: > ```cpp > int sample_num() > ``` #### learn ```python def learn(self) > int ``` Start auto learn class features from classes image and samples.\\nYou should call this method after you add some samples. item description **type** func **return** learn epoch(times), 0 means learn nothing. **static** False > C++ defination code: > ```cpp > int learn() > ``` #### clear ```python def clear(self) > None ``` Clear all class and samples item description **type** func **static** False > C++ defination code: > ```cpp > void clear() > ``` #### input\\_size ```python def input_size(self) > maix.image.Size ``` Get model input size, only for image input item description **type** func **return** model input size **static** False > C++ defination code: > ```cpp > image::Size input_size() > ``` #### input\\_width ```python def input_width(self) > int ``` Get model input width, only for image input item description **type** func **return** model input size of width **static** False > C++ defination code: > ```cpp > int input_width() > ``` #### input\\_height ```python def input_height(self) > int ``` Get model input height, only for image input item description **type** func **return** model input size of height **static** False > C++ defination code: > ```cpp > int input_height() > ``` #### input\\_format ```python def input_format(self) > maix.image.Format ``` Get input image format, only for image input item description **type** func **return** input image format, image::Format type. **static** False > C++ defination code: > ```cpp > image::Format input_format() > ``` #### input\\_shape ```python def input_shape(self) > list[int] ``` Get input shape, if have multiple input, only return first input shape item description **type** func **return** input shape, list type **static** False > C++ defination code: > ```cpp > std::vector input_shape() > ``` #### save ```python def save(self, path: str, labels: list[str] []) > maix.err.Err ``` Save features and labels to a binary file item description **type** func **param** **path**: file path to save, e.g. /root/my_classes.bin
    **labels**: class labels, can be None, or length must equal to class num, or will return err::Err
    **return** maix.err.Err if labels exists but length not equal to class num, or save file failed, or class num is 0. **static** False > C++ defination code: > ```cpp > err::Err save(const std::string &path, const std::vector &labels std::vector()) > ``` #### load ```python def load(self, path: str) > list[str] ``` Load features info from binary file item description **type** func **param** **path**: feature info binary file path, e.g. /root/my_classes.bin
    **static** False > C++ defination code: > ```cpp > std::vector load(const std::string &path) > ``` #### labels Labels list item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector labels > ``` #### label\\_path Label file path item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::string label_path > ``` #### mean Get mean value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector mean > ``` #### scale Get scale value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector scale > ``` ### YOLOv5 YOLOv5 class > C++ defination code: > ```cpp > class YOLOv5 > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, model: str '', dual_buff: bool True) > None ``` Constructor of YOLOv5 class item description **type** func **param** **model**: model path, default empty, you can load model later by load function.
    **dual_buff**: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.
    If you want to ensure every time forward output the input's result, set this arg to false please.
    Default true to ensure speed.
    **throw** If model arg is not empty and load failed, will throw err::Exception. **static** False > C++ defination code: > ```cpp > YOLOv5(const string &model \"\", bool dual_buff true) > ``` #### load ```python def load(self, model: str) > maix.err.Err ``` Load model from file item description **type** func **param** **model**: Model path want to load
    **return** err::Err **static** False > C++ defination code: > ```cpp > err::Err load(const string &model) > ``` #### detect ```python def detect(self, img: maix.image.Image, conf_th: float 0.5, iou_th: float 0.45, fit: maix.image.Fit ...) > list[Object] ``` Detect objects from image item description **type** func **param** **img**: Image want to detect, if image's size not match model input's, will auto resize with fit method.
    **conf_th**: Confidence threshold, default 0.5.
    **iou_th**: IoU threshold, default 0.45.
    **fit**: Resize method, default image.Fit.FIT_CONTAIN.
    **throw** If image format not match model input format, will throw err::Exception. **return** Object list. In C++, you should delete it after use. **static** False > C++ defination code: > ```cpp > std::vector *detect(image::Image &img, float conf_th 0.5, float iou_th 0.45, maix::image::Fit fit maix::image::FIT_CONTAIN) > ``` #### input\\_size ```python def input_size(self) > maix.image.Size ``` Get model input size item description **type** func **return** model input size **static** False > C++ defination code: > ```cpp > image::Size input_size() > ``` #### input\\_width ```python def input_width(self) > int ``` Get model input width item description **type** func **return** model input size of width **static** False > C++ defination code: > ```cpp > int input_width() > ``` #### input\\_height ```python def input_height(self) > int ``` Get model input height item description **type** func **return** model input size of height **static** False > C++ defination code: > ```cpp > int input_height() > ``` #### input\\_format ```python def input_format(self) > maix.image.Format ``` Get input image format item description **type** func **return** input image format, image::Format type. **static** False > C++ defination code: > ```cpp > image::Format input_format() > ``` #### labels Labels list item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector labels > ``` #### label\\_path Label file path item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::string label_path > ``` #### mean Get mean value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector mean > ``` #### scale Get scale value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector scale > ``` #### anchors Get anchors item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector anchors > ``` ### YOLO11 YOLO11 class > C++ defination code: > ```cpp > class YOLO11 > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, model: str '', dual_buff: bool True) > None ``` Constructor of YOLO11 class item description **type** func **param** **model**: model path, default empty, you can load model later by load function.
    **dual_buff**: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.
    If you want to ensure every time forward output the input's result, set this arg to false please.
    Default true to ensure speed.
    **throw** If model arg is not empty and load failed, will throw err::Exception. **static** False > C++ defination code: > ```cpp > YOLO11(const string &model \"\", bool dual_buff true) > ``` #### load ```python def load(self, model: str) > maix.err.Err ``` Load model from file item description **type** func **param** **model**: Model path want to load
    **return** err::Err **static** False > C++ defination code: > ```cpp > err::Err load(const string &model) > ``` #### detect ```python def detect(self, img: maix.image.Image, conf_th: float 0.5, iou_th: float 0.45, fit: maix.image.Fit ..., keypoint_th: float 0.5) > Objects ``` Detect objects from image item description **type** func **param** **img**: Image want to detect, if image's size not match model input's, will auto resize with fit method.
    **conf_th**: Confidence threshold, default 0.5.
    **iou_th**: IoU threshold, default 0.45.
    **fit**: Resize method, default image.Fit.FIT_CONTAIN.
    **keypoint_th**: keypoint threshold, default 0.5, only for yolo11 pose model.
    **throw** If image format not match model input format, will throw err::Exception. **return** Object list. In C++, you should delete it after use.
    If model is yolo11 pose, object's points have value, and if points' value < 0 means that point is invalid(conf < keypoint_th). **static** False > C++ defination code: > ```cpp > nn::Objects *detect(image::Image &img, float conf_th 0.5, float iou_th 0.45, maix::image::Fit fit maix::image::FIT_CONTAIN, float keypoint_th 0.5) > ``` #### input\\_size ```python def input_size(self) > maix.image.Size ``` Get model input size item description **type** func **return** model input size **static** False > C++ defination code: > ```cpp > image::Size input_size() > ``` #### input\\_width ```python def input_width(self) > int ``` Get model input width item description **type** func **return** model input size of width **static** False > C++ defination code: > ```cpp > int input_width() > ``` #### input\\_height ```python def input_height(self) > int ``` Get model input height item description **type** func **return** model input size of height **static** False > C++ defination code: > ```cpp > int input_height() > ``` #### input\\_format ```python def input_format(self) > maix.image.Format ``` Get input image format item description **type** func **return** input image format, image::Format type. **static** False > C++ defination code: > ```cpp > image::Format input_format() > ``` #### draw\\_pose ```python def draw_pose(self, img: maix.image.Image, points: list[int], radius: int 4, color: maix.image.Color ..., body: bool True) > None ``` Draw pose keypoints on image item description **type** func **param** **img**: image object, maix.image.Image type.
    **points**: keypoits, int list type, [x, y, x, y ...]
    **radius**: radius of points.
    **color**: color of points.
    **body**: true, if points' length is 17*2 and body is ture, will draw lines as human body, if set to false won't draw lines, default true.
    **static** False > C++ defination code: > ```cpp > void draw_pose(image::Image &img, std::vector points, int radius 4, image::Color color image::COLOR_RED, bool body true) > ``` #### draw\\_seg\\_mask ```python def draw_seg_mask(self, img: maix.image.Image, x: int, y: int, seg_mask: maix.image.Image, threshold: int 127) > None ``` Draw segmentation on image item description **type** func **param** **img**: image object, maix.image.Image type.
    **seg_mask**: segmentation mask image by detect method, a grayscale image
    **threshold**: only mask's value > threshold will be draw on image, value from 0 to 255.
    **static** False > C++ defination code: > ```cpp > void draw_seg_mask(image::Image &img, int x, int y, image::Image &seg_mask, int threshold 127) > ``` #### labels Labels list item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector labels > ``` #### label\\_path Label file path item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::string label_path > ``` #### mean Get mean value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector mean > ``` #### scale Get scale value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector scale > ``` ### Classifier Classifier > C++ defination code: > ```cpp > class Classifier > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, model: str '', dual_buff: bool True) > None ``` Construct a new Classifier object item description **type** func **param** **model**: MUD model path, if empty, will not load model, you can call load() later.
    if not empty, will load model and will raise err::Exception if load failed.
    **dual_buff**: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.
    If you want to ensure every time forward output the input's result, set this arg to false please.
    Default true to ensure speed.
    **static** False > C++ defination code: > ```cpp > Classifier(const string &model \"\", bool dual_buff true) > ``` #### load ```python def load(self, model: str) > maix.err.Err ``` Load model from file, model format is .mud,\\nMUD file should contain [extra] section, have key values:\\n model_type: classifier\\n input_type: rgb or bgr\\n mean: 123.675, 116.28, 103.53\\n scale: 0.017124753831663668, 0.01750700280112045, 0.017429193899782137\\n labels: imagenet_classes.txt item description **type** func **param** **model**: MUD model path
    **return** error code, if load failed, return error code **static** False > C++ defination code: > ```cpp > err::Err load(const string &model) > ``` #### classify ```python def classify(self, img: maix.image.Image, softmax: bool True, fit: maix.image.Fit ...) > list[tuple[int, float]] ``` Forward image to model, get result. Only for image input, use classify_raw for tensor input. item description **type** func **param** **img**: image, format should match model input_type, or will raise err.Exception
    **softmax**: if true, will do softmax to result, or will return raw value
    **fit**: image resize fit mode, default Fit.FIT_COVER, see image.Fit.
    **throw** If error occurred, will raise err::Exception, you can find reason in log, mostly caused by args error or hardware error. **return** result, a list of (label, score). If in dual_buff mode, value can be one element list and score is zero when not ready. In C++, you need to delete it after use. **static** False > C++ defination code: > ```cpp > std::vector> *classify(image::Image &img, bool softmax true, image::Fit fit image::FIT_COVER) > ``` #### classify\\_raw ```python def classify_raw(self, data: maix.tensor.Tensor, softmax: bool True) > list[tuple[int, float]] ``` Forward tensor data to model, get result item description **type** func **param** **data**: tensor data, format should match model input_type, or will raise err.Excetion
    **softmax**: if true, will do softmax to result, or will return raw value
    **throw** If error occurred, will raise err::Exception, you can find reason in log, mostly caused by args error or hardware error. **return** result, a list of (label, score). In C++, you need to delete it after use. **static** False > C++ defination code: > ```cpp > std::vector> *classify_raw(tensor::Tensor &data, bool softmax true) > ``` #### input\\_size ```python def input_size(self) > maix.image.Size ``` Get model input size, only for image input item description **type** func **return** model input size **static** False > C++ defination code: > ```cpp > image::Size input_size() > ``` #### input\\_width ```python def input_width(self) > int ``` Get model input width, only for image input item description **type** func **return** model input size of width **static** False > C++ defination code: > ```cpp > int input_width() > ``` #### input\\_height ```python def input_height(self) > int ``` Get model input height, only for image input item description **type** func **return** model input size of height **static** False > C++ defination code: > ```cpp > int input_height() > ``` #### input\\_format ```python def input_format(self) > maix.image.Format ``` Get input image format, only for image input item description **type** func **return** input image format, image::Format type. **static** False > C++ defination code: > ```cpp > image::Format input_format() > ``` #### input\\_shape ```python def input_shape(self) > list[int] ``` Get input shape, if have multiple input, only return first input shape item description **type** func **return** input shape, list type **static** False > C++ defination code: > ```cpp > std::vector input_shape() > ``` #### labels Labels list item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector labels > ``` #### label\\_path Label file path item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::string label_path > ``` #### mean Get mean value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector mean > ``` #### scale Get scale value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector scale > ``` ### Retinaface Retinaface class > C++ defination code: > ```cpp > class Retinaface > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, model: str '', dual_buff: bool True) > None ``` Constructor of Retinaface class item description **type** func **param** **model**: model path, default empty, you can load model later by load function.
    **dual_buff**: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.
    If you want to ensure every time forward output the input's result, set this arg to false please.
    Default true to ensure speed.
    **throw** If model arg is not empty and load failed, will throw err::Exception. **static** False > C++ defination code: > ```cpp > Retinaface(const string &model \"\", bool dual_buff true) > ``` #### load ```python def load(self, model: str) > maix.err.Err ``` Load model from file item description **type** func **param** **model**: Model path want to load
    **return** err::Err **static** False > C++ defination code: > ```cpp > err::Err load(const string &model) > ``` #### detect ```python def detect(self, img: maix.image.Image, conf_th: float 0.4, iou_th: float 0.45, fit: maix.image.Fit ...) > list[Object] ``` Detect objects from image item description **type** func **param** **img**: Image want to detect, if image's size not match model input's, will auto resize with fit method.
    **conf_th**: Confidence threshold, default 0.4.
    **iou_th**: IoU threshold, default 0.45.
    **fit**: Resize method, default image.Fit.FIT_CONTAIN.
    **throw** If image format not match model input format, will throw err::Exception. **return** Object list. In C++, you should delete it after use. **static** False > C++ defination code: > ```cpp > std::vector *detect(image::Image &img, float conf_th 0.4, float iou_th 0.45, maix::image::Fit fit maix::image::FIT_CONTAIN) > ``` #### input\\_size ```python def input_size(self) > maix.image.Size ``` Get model input size item description **type** func **return** model input size **static** False > C++ defination code: > ```cpp > image::Size input_size() > ``` #### input\\_width ```python def input_width(self) > int ``` Get model input width item description **type** func **return** model input size of width **static** False > C++ defination code: > ```cpp > int input_width() > ``` #### input\\_height ```python def input_height(self) > int ``` Get model input height item description **type** func **return** model input size of height **static** False > C++ defination code: > ```cpp > int input_height() > ``` #### input\\_format ```python def input_format(self) > maix.image.Format ``` Get input image format item description **type** func **return** input image format, image::Format type. **static** False > C++ defination code: > ```cpp > image::Format input_format() > ``` #### mean Get mean value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector mean > ``` #### scale Get scale value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector scale > ``` ### FaceDetector FaceDetector class > C++ defination code: > ```cpp > class FaceDetector > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, model: str '', dual_buff: bool True) > None ``` Constructor of FaceDetector class item description **type** func **param** **model**: model path, default empty, you can load model later by load function.
    **dual_buff**: direction [in], prepare dual input output buffer to accelarate forward, that is, when NPU is forwarding we not wait and prepare the next input buff.
    If you want to ensure every time forward output the input's result, set this arg to false please.
    Default true to ensure speed.
    **throw** If model arg is not empty and load failed, will throw err::Exception. **static** False > C++ defination code: > ```cpp > FaceDetector(const string &model \"\", bool dual_buff true) > ``` #### load ```python def load(self, model: str) > maix.err.Err ``` Load model from file item description **type** func **param** **model**: Model path want to load
    **return** err::Err **static** False > C++ defination code: > ```cpp > err::Err load(const string &model) > ``` #### detect ```python def detect(self, img: maix.image.Image, conf_th: float 0.5, iou_th: float 0.45, fit: maix.image.Fit ...) > list[Object] ``` Detect objects from image item description **type** func **param** **img**: Image want to detect, if image's size not match model input's, will auto resize with fit method.
    **conf_th**: Confidence threshold, default 0.5.
    **iou_th**: IoU threshold, default 0.45.
    **fit**: Resize method, default image.Fit.FIT_CONTAIN.
    **throw** If image format not match model input format, will throw err::Exception. **return** Object list. In C++, you should delete it after use. **static** False > C++ defination code: > ```cpp > std::vector *detect(image::Image &img, float conf_th 0.5, float iou_th 0.45, maix::image::Fit fit maix::image::FIT_CONTAIN) > ``` #### input\\_size ```python def input_size(self) > maix.image.Size ``` Get model input size item description **type** func **return** model input size **static** False > C++ defination code: > ```cpp > image::Size input_size() > ``` #### input\\_width ```python def input_width(self) > int ``` Get model input width item description **type** func **return** model input size of width **static** False > C++ defination code: > ```cpp > int input_width() > ``` #### input\\_height ```python def input_height(self) > int ``` Get model input height item description **type** func **return** model input size of height **static** False > C++ defination code: > ```cpp > int input_height() > ``` #### input\\_format ```python def input_format(self) > maix.image.Format ``` Get input image format item description **type** func **return** input image format, image::Format type. **static** False > C++ defination code: > ```cpp > image::Format input_format() > ``` #### mean Get mean value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector mean > ``` #### scale Get scale value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector scale > ``` ### PP\\_OCR PP_OCR class > C++ defination code: > ```cpp > class PP_OCR > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, model: str '') > None ``` Constructor of PP_OCR class item description **type** func **param** **model**: model path, default empty, you can load model later by load function.
    **throw** If model arg is not empty and load failed, will throw err::Exception. **static** False > C++ defination code: > ```cpp > PP_OCR(const string &model \"\") > ``` #### load ```python def load(self, model: str) > maix.err.Err ``` Load model from file item description **type** func **param** **model**: Model path want to load
    **return** err::Err **static** False > C++ defination code: > ```cpp > err::Err load(const string &model) > ``` #### detect ```python def detect(self, img: maix.image.Image, thresh: float 0.3, box_thresh: float 0.6, fit: maix.image.Fit ..., char_box: bool False) > OCR_Objects ``` Detect objects from image item description **type** func **param** **img**: Image want to detect, if image's size not match model input's, will auto resize with fit method.
    **thresh**: Confidence threshold where pixels have charactor, default 0.3.
    **box_thresh**: Box threshold, the box prob higher than this value will be valid, default 0.6.
    **fit**: Resize method, default image.Fit.FIT_CONTAIN.
    **char_box**: Calculate every charactor's box, default false, if true then you can get charactor's box by nn.OCR_Object's char_boxes attribute.
    **throw** If image format not match model input format or no memory, will throw err::Exception. **return** nn.OCR_Objects type. In C++, you should delete it after use. **static** False > C++ defination code: > ```cpp > nn::OCR_Objects *detect(image::Image &img, float thresh 0.3, float box_thresh 0.6, maix::image::Fit fit maix::image::FIT_CONTAIN, bool char_box false) > ``` #### recognize ```python def recognize(self, img: maix.image.Image, box_points: list[int] []) > OCR_Object ``` Only recognize, not detect item description **type** func **param** **img**: image to recognize chractors, can be a stanrd cropped charactors image,
    if crop image not standard, you can use box_points to assgin where the charactors' 4 corner is.
    **box_points**: list type, length must be 8 or 0, default empty means not transfer image to standard image.
    4 points postiion, format: [x1, y1, x2, y2, x3, y3, x4, y4], point 1 at the left top, point 2 right top...
    **char_box**: Calculate every charactor's box, default false, if true then you can get charactor's box by nn.OCR_Object's char_boxes attribute.
    **static** False > C++ defination code: > ```cpp > nn::OCR_Object *recognize(image::Image &img, const std::vector &box_points std::vector()) > ``` #### draw\\_seg\\_mask ```python def draw_seg_mask(self, img: maix.image.Image, x: int, y: int, seg_mask: maix.image.Image, threshold: int 127) > None ``` Draw segmentation on image item description **type** func **param** **img**: image object, maix.image.Image type.
    **seg_mask**: segmentation mask image by detect method, a grayscale image
    **threshold**: only mask's value > threshold will be draw on image, value from 0 to 255.
    **static** False > C++ defination code: > ```cpp > void draw_seg_mask(image::Image &img, int x, int y, image::Image &seg_mask, int threshold 127) > ``` #### input\\_size ```python def input_size(self) > maix.image.Size ``` Get model input size item description **type** func **return** model input size **static** False > C++ defination code: > ```cpp > image::Size input_size() > ``` #### input\\_width ```python def input_width(self) > int ``` Get model input width item description **type** func **return** model input size of width **static** False > C++ defination code: > ```cpp > int input_width() > ``` #### input\\_height ```python def input_height(self) > int ``` Get model input height item description **type** func **return** model input size of height **static** False > C++ defination code: > ```cpp > int input_height() > ``` #### input\\_format ```python def input_format(self) > maix.image.Format ``` Get input image format item description **type** func **return** input image format, image::Format type. **static** False > C++ defination code: > ```cpp > image::Format input_format() > ``` #### mean Get mean value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector mean > ``` #### scale Get scale value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector scale > ``` #### rec\\_mean Get mean value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector rec_mean > ``` #### rec\\_scale Get scale value, list type item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector rec_scale > ``` #### labels labels (charactors) item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > std::vector labels > ``` #### det model have detect model item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > bool det > ``` #### rec model have recognize model item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > bool rec > ```"},"/maixpy/api/maix/http.html":{"title":"maix.http","content":" title: maix.http maix.http module > You can use `maix.http` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ## Variable ## Function ## Class ### JpegStreamer JpegStreamer class > C++ defination code: > ```cpp > class JpegStreamer > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, host: str '', port: int 8000, client_number: int 16) > None ``` Construct a new jpeg streamer object item description **type** func **note** You can get the picture stream through http://host:port/stream, you can also get it through http://ip:port, and you can add personal style through set_html() at this time **param** **host**: http host
    **port**: http port, default is 8000
    **client_number**: the max number of client
    **static** False > C++ defination code: > ```cpp > JpegStreamer(std::string host std::string(), int port 8000, int client_number 16) > ``` #### start ```python def start(self) > maix.err.Err ``` start jpeg streame item description **type** func **return** error code, err::ERR_NONE means success, others means failed **static** False > C++ defination code: > ```cpp > err::Err start() > ``` #### start (overload 1) stop http item description **type** func **return** error code, err::ERR_NONE means success, others means failed **static** False > C++ defination code: > ```cpp > err::Err stop() > ``` #### write ```python def write(self, img: maix.image.Image) > maix.err.Err ``` Write data to http item description **type** func **param** **img**: image object
    **return** error code, err::ERR_NONE means success, others means failed **static** False > C++ defination code: > ```cpp > err::Err write(image::Image *img) > ``` #### set\\_html ```python def set_html(self, data: str) > maix.err.Err ``` add your style in this api\\ndefault is:\\n\\n\\n

    JPG Stream

    \\n\\n\\n item description **type** func **param** **data**: html code
    **return** error code, err::ERR_NONE means success, others means failed **static** False > C++ defination code: > ```cpp > err::Err set_html(std::string data) > ``` #### host ```python def host(self) > str ``` Get host item description **type** func **return** host name **static** False > C++ defination code: > ```cpp > std::string host() > ``` #### port ```python def port(self) > int ``` Get port item description **type** func **return** port **static** False > C++ defination code: > ```cpp > int port() > ```"},"/maixpy/api/maix/peripheral/pwm.html":{"title":"maix.peripheral.pwm","content":" title: maix.peripheral.pwm maix.peripheral.pwm module > You can use `maix.peripheral.pwm` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ## Variable ## Function ## Class ### PWM Peripheral pwm class > C++ defination code: > ```cpp > class PWM > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, id: int, freq: int 1000, duty: float 0, enable: bool True, duty_val: int 1) > None ``` PWM constructor item description **type** func **param** **pin**: direction [in], pwm id, int type, like 0, 1, 2 etc.
    **freq**: direction [in], pwm frequency, unit: Hz. int type. default is 1000
    **duty**: direction [in], pwm duty. double type. range is [0, 100], default is 0.
    **enable**: direction [in], enable pwm output right now. bool type. default is true, if false, you need to call enable() to enable pwm output.
    **duty_val**: direction [in], pwm duty value, int type. default 1 means not set and auto calculate by freq and duty.
    This arg directly set pwm duty value, if set, will ignore duty arg.
    duty_val duty / 100 * T_ns, T_ns 1 / freq * 1000000000.
    **throw** If args error or init pwm failed, will throw err::Exception **static** False > C++ defination code: > ```cpp > PWM(int id, int freq 1000, double duty 0, bool enable true, int duty_val 1) > ``` #### duty ```python def duty(self, duty: float 1) > float ``` get or set pwm duty item description **type** func **param** **duty**: direction [in], pwm duty, double type, value in [0, 100], default 1 means only read.
    **return** current duty, float type, if set and set failed will return err::Err **static** False > C++ defination code: > ```cpp > double duty(double duty 1) > ``` #### duty\\_val ```python def duty_val(self, duty_val: int 1) > int ``` set pwm duty value item description **type** func **param** **duty_val**: direction [in], pwm duty value. int type. default is 1
    duty_val > 0 means set duty_val
    duty_val 1 or not set, return current duty_val
    **return** int type
    when get duty_val, return current duty_val, else return err::Err code. **static** False > C++ defination code: > ```cpp > int duty_val(int duty_val 1) > ``` #### freq ```python def freq(self, freq: int 1) > int ``` get or set pwm frequency item description **type** func **param** **freq**: direction [in], pwm frequency. int type. default is 1
    freq > 0, set freq
    freq 1 or not set, return current freq
    **return** int type, current freq, if set and set failed will return err::Err **static** False > C++ defination code: > ```cpp > int freq(int freq 1) > ``` #### enable ```python def enable(self) > maix.err.Err ``` set pwm enable item description **type** func **return** err::Err type, err.Err.ERR_NONE means success **static** False > C++ defination code: > ```cpp > err::Err enable() > ``` #### disable ```python def disable(self) > maix.err.Err ``` set pwm disable item description **type** func **return** err::Err type, err.Err.ERR_NONE means success **static** False > C++ defination code: > ```cpp > err::Err disable() > ``` #### is\\_enabled ```python def is_enabled(self) > bool ``` get pwm enable status item description **type** func **return** bool type, true means enable, false means disable **static** False > C++ defination code: > ```cpp > bool is_enabled() > ```"},"/maixpy/api/maix/peripheral/wdt.html":{"title":"maix.peripheral.wdt","content":" title: maix.peripheral.wdt maix.peripheral.wdt module > You can use `maix.peripheral.wdt` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ## Variable ## Function ## Class ### WDT Peripheral wdt class > C++ defination code: > ```cpp > class WDT > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, id: int, feed_ms: int) > None ``` WDT constructor, after construct, the wdt will auto start. item description **type** func **param** **id**: direction [in], id of wdt, int type
    **feed_ms**: direction [in], feed interval, int type, unit is ms, you must feed wdt in this interval, or system will restart.
    **static** False > C++ defination code: > ```cpp > WDT(int id, int feed_ms) > ``` #### feed ```python def feed(self) > int ``` feed wdt item description **type** func **return** error code, if feed success, return err::ERR_NONE **static** False > C++ defination code: > ```cpp > int feed() > ``` #### stop ```python def stop(self) > int ``` stop wdt item description **type** func **static** False > C++ defination code: > ```cpp > int stop() > ``` #### restart ```python def restart(self) > int ``` restart wdt, stop and start watchdog timer. item description **type** func **static** False > C++ defination code: > ```cpp > int restart() > ```"},"/maixpy/api/maix/peripheral/uart.html":{"title":"maix.peripheral.uart","content":" title: maix.peripheral.uart maix uart peripheral driver > You can use `maix.peripheral.uart` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ### PARITY uart parity enum item describe **values** **PARITY_NONE**: no parity
    **PARITY_ODD**: odd parity
    **PARITY_EVEN**: even parity
    **PARITY_MAX**:
    > C++ defination code: > ```cpp > enum PARITY > { > PARITY_NONE 0x00, // no parity > PARITY_ODD 0x01, // odd parity > PARITY_EVEN 0x02, // even parity > PARITY_MAX > } > ``` ### STOP uart stop bits item describe **values** **STOP_1**: 1 stop bit
    **STOP_2**: 2 stop bits
    **STOP_1_5**: 1.5 stop bits
    **STOP_MAX**:
    > C++ defination code: > ```cpp > enum STOP > { > STOP_1 0x01, // 1 stop bit > STOP_2 0x02, // 2 stop bits > STOP_1_5 0x03, // 1.5 stop bits > STOP_MAX > } > ``` ### BITS uart stop bits item describe **values** **BITS_5**: 5 data bits
    **BITS_6**: 6 data bits
    **BITS_7**: 7 data bits
    **BITS_8**: 8 data bits
    **BITS_MAX**:
    > C++ defination code: > ```cpp > enum BITS > { > BITS_5 5, // 5 data bits > BITS_6 6, // 6 data bits > BITS_7 7, // 7 data bits > BITS_8 8, // 8 data bits > BITS_MAX > } > ``` ### FLOW\\_CTRL uart flow control item describe **values** **FLOW_CTRL_NONE**: no flow control
    **FLOW_CTRL_HW**: hardware flow control
    **FLOW_CTRL_MAX**:
    > C++ defination code: > ```cpp > enum FLOW_CTRL > { > FLOW_CTRL_NONE 0, // no flow control > FLOW_CTRL_HW 1, // hardware flow control > FLOW_CTRL_MAX > } > ``` ## Variable ## Function ### list\\_devices ```python def list_devices() > list[str] ``` Get supported uart ports. item description **return** uart ports list, string type. > C++ defination code: > ```cpp > std::vector list_devices() > ``` ## Class ### UART maix uart peripheral driver > C++ defination code: > ```cpp > class UART : public comm::CommBase > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, port: str '', baudrate: int 115200, databits: BITS ..., parity: PARITY ..., stopbits: STOP ..., flow_ctrl: FLOW_CTRL ...) > None ``` UART constructor. You need to call open() to open the device. item description **type** func **param** **port**: uart port. string type, can get it by uart.list_devices().
    If empty, will not open device in constructor, default empty.
    if not empty, will auto open device in constructor, open fail will throw err.Exception.
    **baudrate**: baudrate of uart. int type, default 115200.
    **databits**: databits, values @see uart.DATA_BITS
    **parity**: parity, values @see uart.PARITY
    **stopbits**: stopbits, values @see uart.STOP_BITS
    **flow_control**: flow_control, values @see uart.FLOW_CTRL
    **static** False > C++ defination code: > ```cpp > UART(const std::string &port \"\", int baudrate 115200, uart::BITS databits uart::BITS_8, > uart::PARITY parity uart::PARITY_NONE, uart::STOP stopbits uart::STOP_1, > uart::FLOW_CTRL flow_ctrl uart::FLOW_CTRL_NONE) > ``` #### set\\_port ```python def set_port(self, port: str) > maix.err.Err ``` Set port item description **type** func **param** **port**: uart port. string type, can get it by uart.list_devices().
    **return** set port error code, err.Err type. **static** False > C++ defination code: > ```cpp > err::Err set_port(const std::string &port) > ``` #### get\\_port ```python def get_port(self) > str ``` Get port item description **type** func **return** uart port, string type. **static** False > C++ defination code: > ```cpp > std::string get_port() > ``` #### set\\_baudrate ```python def set_baudrate(self, baudrate: int) > maix.err.Err ``` Set baud rate item description **type** func **param** **baudrate**: baudrate of uart. int type, default 115200.
    **return** set baud rate error code, err.Err type. **static** False > C++ defination code: > ```cpp > err::Err set_baudrate(int baudrate) > ``` #### get\\_baudrate ```python def get_baudrate(self) > int ``` Get baud rate item description **type** func **return** baud rate, int type. **static** False > C++ defination code: > ```cpp > int get_baudrate() > ``` #### open ```python def open(self) > maix.err.Err ``` Open uart device, before open, port must be set in constructor or by set_port().\\nIf already opened, do nothing and return err.ERR_NONE. item description **type** func **return** open device error code, err.Err type. **static** False > C++ defination code: > ```cpp > err::Err open() > ``` #### is\\_open ```python def is_open(self) > bool ``` Check if device is opened. item description **type** func **return** true if opened, false if not opened. **static** False > C++ defination code: > ```cpp > bool is_open() > ``` #### close ```python def close(self) > maix.err.Err ``` Close uart device, if already closed, do nothing and return err.ERR_NONE. item description **type** func **return** close device error code, err.Err type. **static** False > C++ defination code: > ```cpp > err::Err close() > ``` #### set\\_received\\_callback ```python def set_received_callback(self, callback: typing.Callable[[UART, maix.Bytes(bytes)], None]) > None ``` Set received callback function item description **type** func **param** **callback**: function to call when received data
    **static** False > C++ defination code: > ```cpp > void set_received_callback(std::function callback) > ``` #### write\\_str ```python def write_str(self, str: str) > int ``` Send string data item description **type** func **param** **str**: string data
    **return** sent data length, < 0 means error, value is err.Err. **static** False > C++ defination code: > ```cpp > int write_str(const std::string &str) > ``` #### write ```python def write(self, data: maix.Bytes(bytes)) > int ``` Send data to uart item description **type** func **param** **data**: direction [in], data to send, bytes type. If you want to send str type, use str.encode() to convert.
    **return** sent length, int type, if < 0 means error, value is err.Err. **static** False > C++ defination code: > ```cpp > int write(Bytes &data) > ``` #### available ```python def available(self, timeout: int 0) > int ``` Check if data available or wait data available. item description **type** func **param** **timeout**: unit ms, timeout to wait data, default 0.
    0 means check data available and return immediately,
    > 0 means wait until data available or timeout.
    1 means wait until data available.
    **return** available data number, 0 if timeout or no data, <0 if error, value is err.Err, can be err::ERR_IO, err::ERR_CANCEL, err::ERR_NOT_OPEN. **throw** err.Exception if fatal error. **static** False > C++ defination code: > ```cpp > int available(int timeout 0) > ``` #### read ```python def read(*args, **kwargs) ``` Recv data from uart item description **type** func **param** **len**: max data length want to receive, default 1.
    1 means read data in uart receive buffer.
    >0 means read len data want to receive.
    other values is invalid.
    **timeout**: unit ms, timeout to receive data, default 0.
    0 means read data in uart receive buffer and return immediately,
    1 means block until read len data,
    >0 means block until read len data or timeout.
    **return** received data, bytes type.
    Attention, you need to delete the returned object yourself in C++. **throw** Read failed will raise err.Exception error. **static** False > C++ defination code: > ```cpp > Bytes *read(int len 1, int timeout 0) > ``` #### readline ```python def readline(*args, **kwargs) ``` Read line from uart, that is read until '\\n' or '\\r\\n'. item description **type** func **param** **timeout**: unit ms, timeout to receive data, default 1 means block until read '\\n' or '\\r\\n'.
    > 0 means block until read '\\n' or '\\r\\n' or timeout.
    **return** received data, bytes type. If timeout will return the current received data despite not read '\\n' or '\\r\\n'.
    e.g. If we want to read b'123\\n', but when we only read b'12', timeout, then return b'12'. **static** False > C++ defination code: > ```cpp > Bytes *readline(int timeout 1) > ```"},"/maixpy/api/maix/peripheral/adc.html":{"title":"maix.peripheral.adc","content":" title: maix.peripheral.adc maix.peripheral.adc module > You can use `maix.peripheral.adc` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ## Variable ### RES\\_BIT\\_8 8 bit resolution, supported by the actual hardware item description **value** **8** **readonly** True > C++ defination code: > ```cpp > const int RES_BIT_8 8 > ``` ### RES\\_BIT\\_10 10 bit resolution, supported by the actual hardware item description **value** **10** **readonly** True > C++ defination code: > ```cpp > const int RES_BIT_10 10 > ``` ### RES\\_BIT\\_12 12 bit resolution, supported by the actual hardware item description **value** **12** **readonly** True > C++ defination code: > ```cpp > const int RES_BIT_12 12 > ``` ### RES\\_BIT\\_16 16 bit resolution, supported by the actual hardware item description **value** **16** **readonly** True > C++ defination code: > ```cpp > const int RES_BIT_16 16 > ``` ## Function ## Class ### ADC Peripheral adc class > C++ defination code: > ```cpp > class ADC > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, pin: int, resolution: int, vref: float 1) > None ``` ADC constructor item description **type** func **param** **pin**: direction [in], adc pin, int type
    **resolution**: direction [in], adc resolution. default is 1, means use default resolution
    option:
    resolution adc.RES_BIT_8, means 8 bit resolution
    resolution adc.RES_BIT_10, means 10 bit resolution
    resolution adc.RES_BIT_12, means 12 bit resolution
    resolution adc.RES_BIT_16, means 16 bit resolution
    the default resolution is determined by actual hardware.
    **vref**: direction [in], adc refer voltage. default is 1, means use default refer voltage.
    the default vref is determined by actual hardware. range: [0.0, 10.0]
    **static** False > C++ defination code: > ```cpp > ADC(int pin, int resolution, float vref 1) > ``` #### read ```python def read(self) > int ``` read adc value item description **type** func **return** adc data, int type
    if resolution is 8 bit, return value range is [0, 255]
    if resolution is 10 bit, return value range is [0, 1023]
    if resolution is 12 bit, return value range is [0, 4095]
    if resolution is 16 bit, return value range is [0, 65535] **static** False > C++ defination code: > ```cpp > int read() > ``` #### read\\_vol ```python def read_vol(self) > float ``` read adc voltage item description **type** func **return** adc voltage, float type。the range is [0.0, vref] **static** False > C++ defination code: > ```cpp > float read_vol() > ```"},"/maixpy/api/maix/peripheral/hid.html":{"title":"maix.peripheral.hid","content":" title: maix.peripheral.hid maix.peripheral.hid module > You can use `maix.peripheral.hid` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ### DeviceType Device enum of hid item describe **values** **DEVICE_MOUSE**:
    **DEVICE_KEYBOARD**:
    **DEVICE_TOUCHPAD**:
    > C++ defination code: > ```cpp > enum DeviceType { > DEVICE_MOUSE 0, > DEVICE_KEYBOARD, > DEVICE_TOUCHPAD > } > ``` ## Variable ## Function ## Class ### Hid Hid class > C++ defination code: > ```cpp > class Hid > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, device_type: DeviceType, open: bool True) > None ``` Hid Device constructor item description **type** func **param** **device_type**: Device type, used to select mouse, keyboard, or touchpad.
    **open**: auto open device in constructor, if false, you need call open() to open device
    **static** False > C++ defination code: > ```cpp > Hid(hid::DeviceType device_type, bool open true) > ``` #### open ```python def open(self) > maix.err.Err ``` Open hid device item description **type** func **return** err::Err **static** False > C++ defination code: > ```cpp > err::Err open() > ``` #### close ```python def close(self) > maix.err.Err ``` Close hid device item description **type** func **return** err::Err **static** False > C++ defination code: > ```cpp > err::Err close() > ``` #### write ```python def write(self, data: list[int]) > maix.err.Err ``` Write data to hid device item description **type** func **param** **data**: data to write
    For the keyboard, 8 bytes of data need to be written, with the format as follows:
    data [0x00, #
    0x00, #
    0x00, # Key value. Refer to the \"Universal Serial Bus HID Usage Tables\" section of the official documentation(https://www.usb.org).
    0x00, #
    0x00, #
    0x00, #
    0x00, #
    0x00] #
    For the mouse, 4 bytes of data need to be written, with the format as follows:
    data [0x00, # Button state
    0x00: no button pressed
    0x01: press left button
    0x02: press right button
    0x04: press middle button
    x, # X axis relative coordinates. Signed number, positive values for x indicate movement to the right
    y, # Y axis relative coordinates. Signed number, positive values for y indicate movement downward
    0x00] # Wheel movement. Signed number, positive values indicate downward movement.
    For the touchpad, 6 bytes of data need to be written, with the format as follows:
    data [0x00, # Button state (0: no button pressed, 0x01: press left button, 0x10, press right button.)
    x & 0xFF, (x >> 8) & 0xFF, # X axis absolute coordinate, 0 means unused.
    Note: You must map the target position to the range [0x1, 0x7FFF]. This means x value * 0x7FFF /
    y & 0xFF, (y >> 8) & 0xFF, # Y axis absolute coordinate, 0 means unused.
    Note: You must map the target position to the range [0x1, 0x7FFF]. This means y value * 0x7FFF /
    0x00, # Wheel movement. Signed number, positive values indicate downward movement.
    **return** err::Err **static** False > C++ defination code: > ```cpp > err::Err write(std::vector &data) > ``` #### is\\_opened ```python def is_opened(self) > bool ``` Check if hid device is opened item description **type** func **return** bool **static** False > C++ defination code: > ```cpp > bool is_opened() > ```"},"/maixpy/api/maix/peripheral/gpio.html":{"title":"maix.peripheral.gpio","content":" title: maix.peripheral.gpio maix.peripheral.gpio module > You can use `maix.peripheral.gpio` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ### Mode GPIO mode item describe **values** **IN**: input mode
    **OUT**: output mode
    **OUT_OD**: output open drain mode
    **MODE_MAX**:
    > C++ defination code: > ```cpp > enum Mode > { > IN 0x01, // input mode > OUT 0x02, // output mode > OUT_OD 0x03, // output open drain mode > MODE_MAX > } > ``` ### Pull GPIO pull mode item describe **values** **PULL_NONE**: pull none mode
    **PULL_UP**: pull up mode
    **PULL_DOWN**: pull down mode
    **PULL_MAX**:
    > C++ defination code: > ```cpp > enum Pull > { > PULL_NONE 0x00, // pull none mode > PULL_UP 0x01, // pull up mode > PULL_DOWN 0x02, // pull down mode > PULL_MAX > } > ``` ## Variable ## Function ## Class ### GPIO Peripheral gpio class > C++ defination code: > ```cpp > class GPIO > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, pin: str, mode: Mode ..., pull: Pull ...) > None ``` GPIO constructor item description **type** func **param** **pin**: direction [in], gpio pin name, string type the same as board's pin name, e.g. \"B14\" or \"GPIOB14\", or number string like \"10\" if board no gpiochipe name.
    **mode**: direction [in], gpio mode. gpio.Mode type, default is gpio.Mode.IN (input) mode.
    **pull**: direction [in], gpio pull. gpio.Pull type, default is gpio.Pull.PULL_NONE (pull none) mode.
    For input mode, this will set gpio default status(value), if set to gpio.Pull.PULL_NONE, gpio value will be floating.
    For output mode, this will set gpio default status(value), if set to gpio.Pull.PULL_UP, gpio value will be 1, else 0.
    **throw** err::Exception if open gpio device failed. **static** False > C++ defination code: > ```cpp > GPIO(std::string pin, gpio::Mode mode gpio::Mode::IN, gpio::Pull pull gpio::Pull::PULL_NONE) > ``` #### value ```python def value(self, value: int 1) > int ``` set and get gpio value item description **type** func **param** **value**: direction [in], gpio value. int type.
    0, means write gpio to low level
    1, means write gpio to high level
    1, means read gpio value, not set
    **return** int type, return gpio value, can be 0 or 1 **static** False > C++ defination code: > ```cpp > int value(int value 1) > ``` #### high ```python def high(self) > None ``` set gpio high (value to 1) item description **type** func **static** False > C++ defination code: > ```cpp > void high() > ``` #### low ```python def low(self) > None ``` set gpio low (value to 0) item description **type** func **static** False > C++ defination code: > ```cpp > void low() > ``` #### toggle ```python def toggle(self) > None ``` gpio toggle item description **type** func **static** False > C++ defination code: > ```cpp > void toggle() > ``` #### get\\_mode ```python def get_mode(self) > Mode ``` gpio get mode item description **type** func **static** False > C++ defination code: > ```cpp > gpio::Mode get_mode() > ``` #### get\\_pull ```python def get_pull(self) > Pull ``` get gpio pull item description **type** func **return** gpio::Pull type **static** False > C++ defination code: > ```cpp > gpio::Pull get_pull() > ``` #### reset ```python def reset(self, mode: Mode, pull: Pull) > maix.err.Err ``` reset gpio item description **type** func **param** **mode**: direction [in], gpio mode. gpio.Mode type
    **pull**: direction [in], gpio pull. gpio.Pull type
    For input mode, this will set gpio default status(value), if set to gpio.Pull.PULL_NONE, gpio value will be floating.
    For output mode, this will set gpio default status(value), if set to gpio.Pull.PULL_UP, gpio value will be 1, else 0.
    **return** err::Err type **static** False > C++ defination code: > ```cpp > err::Err reset(gpio::Mode mode, gpio::Pull pull) > ```"},"/maixpy/api/maix/peripheral/spi.html":{"title":"maix.peripheral.spi","content":" title: maix.peripheral.spi maix.peripheral.spi module > You can use `maix.peripheral.spi` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ### Mode SPI mode enum item describe **values** **MASTER**: spi master mode
    **SLAVE**: spi slave mode
    > C++ defination code: > ```cpp > enum Mode > { > MASTER 0x0, // spi master mode > SLAVE 0x1, // spi slave mode > } > ``` ## Variable ## Function ## Class ### SPI Peripheral spi class > C++ defination code: > ```cpp > class SPI > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, id: int, mode: Mode, freq: int, polarity: int 0, phase: int 0, bits: int 8, cs_enable: int 0, soft_cs: bool False, cs: str 'GPIOA19') > None ``` SPI constructor item description **type** func **param** **id**: direction [in], spi bus id, int type
    **mode**: direction [in], mode of spi, spi.Mode type, spi.Mode.MASTER or spi.Mode.SLAVE.
    **freq**: direction [in], freq of spi, int type
    **polarity**: direction [in], polarity of spi, 0 means idle level of clock is low, 1 means high, int type, default is 0.
    **phase**: direction [in], phase of spi, 0 means data is captured on the first edge of the SPI clock cycle, 1 means second, int type, default is 0.
    **bits**: direction [in], bits of spi, int type, default is 8.
    **cs_enable**: direction [in], cs pin active level, default is 0(low)
    **soft_cs**: direction [in], not use hardware cs, bool type, if set true, you can operate cs pin use gpio manually.
    **cs**: direction [in], soft cs pin number, std::string type, default is \"GPIOA19\", if SPI support multi hardware cs, you can set it to other value.
    **static** False > C++ defination code: > ```cpp > SPI(int id, spi::Mode mode, int freq, int polarity 0, int phase 0, > int bits 8, unsigned char cs_enable 0, bool soft_cs false, std::string cs \"GPIOA19\") > ``` #### read ```python def read(*args, **kwargs) ``` read data from spi item description **type** func **param** **length**: direction [in], read length, int type
    **return** bytes data, Bytes type in C++, bytes type in MaixPy. You need to delete it manually after use in C++. **static** False > C++ defination code: > ```cpp > Bytes *read(int length) > ``` #### write ```python def write(self, data: maix.Bytes(bytes)) > int ``` write data to spi item description **type** func **param** **data**: direction [in], data to write, Bytes type in C++, bytes type in MaixPy
    **return** write length, int type, if write failed, return err::Err code. **static** False > C++ defination code: > ```cpp > int write(Bytes *data) > ``` #### write\\_read ```python def write_read(*args, **kwargs) ``` write data to spi and read data from spi at the same time. item description **type** func **param** **data**: direction [in], data to write, Bytes type in C++, bytes type in MaixPy
    **read_len**: direction [in], read length, int type, should > 0.
    **return** read data, Bytes type in C++, bytes type in MaixPy. You need to delete it manually after use in C++. **static** False > C++ defination code: > ```cpp > Bytes *write_read(Bytes *data, int read_len) > ``` #### is\\_busy ```python def is_busy(self) > bool ``` get busy status of spi item description **type** func **return** busy status, bool type **static** False > C++ defination code: > ```cpp > bool is_busy() > ```"},"/maixpy/api/maix/peripheral/timer.html":{"title":"maix.peripheral.timer","content":" title: maix.peripheral.timer maix.peripheral.timer module > You can use `maix.peripheral.timer` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ## Variable ## Function ## Class ### TIMER Peripheral timer class > C++ defination code: > ```cpp > class TIMER > ``` #### \\_\\_init\\_\\_ ```python def __init__(self) > None ``` TIMER constructor item description **type** func **static** False > C++ defination code: > ```cpp > TIMER() > ```"},"/maixpy/api/maix/peripheral/i2c.html":{"title":"maix.peripheral.i2c","content":" title: maix.peripheral.i2c maix.peripheral.i2c module > You can use `maix.peripheral.i2c` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ### AddrSize Address size enum item describe **values** **SEVEN_BIT**: 7 bit address mode
    **TEN_BIT**: 10 bit address mode
    > C++ defination code: > ```cpp > enum AddrSize > { > SEVEN_BIT 7, // 7 bit address mode > TEN_BIT 10 // 10 bit address mode > } > ``` ### Mode I2C mode enum item describe **values** **MASTER**: master mode
    **SLAVE**: slave mode
    > C++ defination code: > ```cpp > enum Mode > { > MASTER 0x00, // master mode > SLAVE 0x01 // slave mode > } > ``` ## Variable ## Function ### list\\_devices ```python def list_devices() > list[int] ``` Get supported i2c bus devices. item description **return** i2c bus devices list, int type, is the i2c bus id. > C++ defination code: > ```cpp > std::vector list_devices() > ``` ## Class ### I2C Peripheral i2c class > C++ defination code: > ```cpp > class I2C > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, id: int, mode: Mode, freq: int 100000, addr_size: AddrSize ...) > None ``` I2C Device constructor\\nthis constructor will be export to MaixPy as _maix.example.Example.__init__ item description **type** func **param** **id**: direction [in], i2c bus id, int type, e.g. 0, 1, 2
    **freq**: direction [in], i2c clock, int type, default is 100000(100kbit/s), will auto set fast mode if freq > 100000.
    **mode**: direction [in], mode of i2c, i2c.Mode.SLAVE or i2c.Mode.MASTER.
    **addr_size**: direction [in], address length of i2c, i2c.AddrSize.SEVEN_BIT or i2c.AddrSize.TEN_BIT.
    **throw** err::Exception if open i2c device failed. **static** False > C++ defination code: > ```cpp > I2C(int id, i2c::Mode mode, int freq 100000, i2c::AddrSize addr_size i2c::AddrSize::SEVEN_BIT) > ``` #### scan ```python def scan(self, addr: int 1) > list[int] ``` scan all i2c salve address on the bus item description **type** func **param** **addr**: If 1, only scan this addr, or scan from 0x08~0x77, default 1.
    **return** the list of i2c slave address, int list type. **static** False > C++ defination code: > ```cpp > std::vector scan(int addr 1) > ``` #### writeto ```python def writeto(self, addr: int, data: maix.Bytes(bytes)) > int ``` write data to i2c slave item description **type** func **param** **addr**: direction [in], i2c slave address, int type
    **data**: direction [in], data to write, bytes type.
    Note: The range of value should be in [0,255].
    **return** if success, return the length of written data, error occurred will return err::Err. **static** False > C++ defination code: > ```cpp > int writeto(int addr, const Bytes &data) > ``` #### readfrom ```python def readfrom(*args, **kwargs) ``` read data from i2c slave item description **type** func **param** **addr**: direction [in], i2c slave address, int type
    **len**: direction [in], data length to read, int type
    **return** the list of data read from i2c slave, bytes type, you should delete it after use in C++.
    If read failed, return nullptr in C++, None in MaixPy. **static** False > C++ defination code: > ```cpp > Bytes* readfrom(int addr, int len) > ``` #### writeto\\_mem ```python def writeto_mem(self, addr: int, mem_addr: int, data: maix.Bytes(bytes), mem_addr_size: int 8, mem_addr_le: bool False) > int ``` write data to i2c slave's memory address item description **type** func **param** **addr**: direction [in], i2c slave address, int type
    **mem_addr**: direction [in], memory address want to write, int type.
    **data**: direction [in], data to write, bytes type.
    **mem_addr_size**: direction [in], memory address size, default is 8.
    **mem_addr_le**: direction [in], memory address little endian, default is false, that is send high byte first.
    **return** data length written if success, error occurred will return err::Err. **static** False > C++ defination code: > ```cpp > int writeto_mem(int addr, int mem_addr, const Bytes &data, int mem_addr_size 8, bool mem_addr_le false) > ``` #### readfrom\\_mem ```python def readfrom_mem(*args, **kwargs) ``` read data from i2c slave item description **type** func **param** **addr**: direction [in], i2c slave address, int type
    **mem_addr**: direction [in], memory address want to read, int type.
    **len**: direction [in], data length to read, int type
    **mem_addr_size**: direction [in], memory address size, default is 8.
    **mem_addr_le**: direction [in], memory address little endian, default is false, that is send high byte first.
    **return** the list of data read from i2c slave, bytes type, you should delete it after use in C++.
    If read failed, return nullptr in C++, None in MaixPy. **static** False > C++ defination code: > ```cpp > Bytes* readfrom_mem(int addr, int mem_addr, int len, int mem_addr_size 8, bool mem_addr_le false) > ```"},"/maixpy/api/maix/peripheral/key.html":{"title":"maix.peripheral.key","content":" title: maix.peripheral.key maix.peripheral.key module > You can use `maix.peripheral.key` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ### Keys Keys enum, id the same as linux input.h(input event codes.h) item describe **values** **KEY_NONE**:
    **KEY_ESC**:
    **KEY_OK**:
    **KEY_OPTION**:
    **KEY_NEXT**:
    **KEY_PREV**:
    > C++ defination code: > ```cpp > enum Keys{ > KEY_NONE 0x000, > KEY_ESC 0x001, > KEY_OK 0x160, > KEY_OPTION 0x165, > KEY_NEXT 0x197, > KEY_PREV 0x19c > } > ``` ### State Key state enum item describe **values** **KEY_RELEASED**:
    **KEY_PRESSED**:
    > C++ defination code: > ```cpp > enum State{ > KEY_RELEASED 0, > KEY_PRESSED 1, > } > ``` ## Variable ## Function ### add\\_default\\_listener Add default listener, if you want to exit app when press ok button, you can just call this function.\\nThis function is auto called in MaixPy' startup code, so you don't need to call it in MaixPy.\\nCreate Key object will auto call rm_default_listener() to cancel the default ok button function.\\nWhen ok button pressed, a SIGINT signal will be raise and call app.set_exit_flag(True). > C++ defination code: > ```cpp > void add_default_listener() > ``` ### rm\\_default\\_listener Remove default listener, if you want to cancel the default ok button function(exit app), you can just call this function. > C++ defination code: > ```cpp > void rm_default_listener() > ``` ## Class ### Key Key input class > C++ defination code: > ```cpp > class Key > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, callback: typing.Callable[[int, int], None] None, open: bool True) > None ``` Key Device constructor item description **type** func **param** **callback**: When key triggered and callback is not empty(empty In MaixPy is None, in C++ is nullptr),
    callback will be called with args key(key.Keys) and value(key.State).
    If set to null, you can get key value by read() function.
    This callback called in a standalone thread, so you can block a while in callback, and you should be carefully when operate shared data.
    **open**: auto open device in constructor, if false, you need call open() to open device
    **static** False > C++ defination code: > ```cpp > Key(std::function callback nullptr, bool open true) > ``` #### open ```python def open(self) > maix.err.Err ``` Open(Initialize) key device, if already opened, will close first and then open. item description **type** func **return** err::Err type, err.Err.ERR_NONE means success **static** False > C++ defination code: > ```cpp > err::Err open() > ``` #### close ```python def close(self) > maix.err.Err ``` Close key device item description **type** func **return** err::Err type, err.Err.ERR_NONE means success **static** False > C++ defination code: > ```cpp > err::Err close() > ``` #### is\\_opened ```python def is_opened(self) > bool ``` Check key device is opened item description **type** func **return** bool type, true means opened, false means closed **static** False > C++ defination code: > ```cpp > bool is_opened() > ``` #### read ```python def read(self) > tuple[int, int] ``` Read key input, and return key and value, if callback is set, DO NOT call this function manually. item description **type** func **return** list type, first is key(maix.key.Keys), second is value(maix.key.State), if no key input, return [0, 0] **throw** If read failed, will throw maix.err.Exception. **static** False > C++ defination code: > ```cpp > std::pair read() > ```"},"/maixpy/api/maix/audio.html":{"title":"maix.audio","content":" title: maix.audio maix.audio module > You can use `maix.audio` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ### Format Audio type item describe **values** **FMT_NONE**: format invalid
    **FMT_S8**: unsigned 8 bits
    **FMT_S16_LE**: signed 16 bits, little endian
    **FMT_S32_LE**: signed 32 bits, little endian
    **FMT_S16_BE**: signed 16 bits, big endian
    **FMT_S32_BE**: signed 32 bits, big endian
    **FMT_U8**: unsigned 8 bits
    **FMT_U16_LE**: unsigned 16 bits, little endian
    **FMT_U32_LE**: unsigned 32 bits, little endian
    **FMT_U16_BE**: unsigned 16 bits, big endian
    **FMT_U32_BE**: unsigned 32 bits, big endian
    > C++ defination code: > ```cpp > enum Format > { > FMT_NONE 0, // format invalid > FMT_S8, // unsigned 8 bits > FMT_S16_LE, // signed 16 bits, little endian > FMT_S32_LE, // signed 32 bits, little endian > FMT_S16_BE, // signed 16 bits, big endian > FMT_S32_BE, // signed 32 bits, big endian > FMT_U8, // unsigned 8 bits > FMT_U16_LE, // unsigned 16 bits, little endian > FMT_U32_LE, // unsigned 32 bits, little endian > FMT_U16_BE, // unsigned 16 bits, big endian > FMT_U32_BE, // unsigned 32 bits, big endian > } > ``` ## Variable ## Function ## Class ### Recorder Recorder class > C++ defination code: > ```cpp > class Recorder > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, path: str '', sample_rate: int 48000, format: Format ..., channel: int 1) > None ``` Construct a new Recorder object. currectly only pcm and wav formats supported. item description **type** func **param** **path**: record path. the path determines the location where you save the file, if path is none, the audio module will not save file.
    **sample_rate**: record sample rate, default is 48000(48KHz), means 48000 samples per second.
    **format**: record sample format, default is audio::Format::FMT_S16_LE, means sampling 16 bits at a time and save as signed 16 bits, little endian. see @audio::Format
    **channel**: record sample channel, default is 1, means 1 channel sampling at the same time
    **static** False > C++ defination code: > ```cpp > Recorder(std::string path std::string(), int sample_rate 48000, audio::Format format audio::Format::FMT_S16_LE, int channel 1) > ``` #### volume ```python def volume(self, value: int 1) > int ``` Set/Get record volume item description **type** func **param** **value**: volume value, If you use this parameter, audio will set the value to volume,
    if you don't, it will return the current volume. range is [0, 100].
    **return** the current volume **static** False > C++ defination code: > ```cpp > int volume(int value 1) > ``` #### mute ```python def mute(self, data: int 1) > bool ``` Mute item description **type** func **param** **data**: mute data, If you set this parameter to true, audio will set the value to mute,
    if you don't, it will return the current mute status.
    **return** Returns whether mute is currently enabled. **static** False > C++ defination code: > ```cpp > bool mute(int data 1) > ``` #### record ```python def record(*args, **kwargs) ``` Record, Read all cached data in buffer and return. If there is no audio data in the buffer, may return empty data. item description **type** func **param** **record_ms**: Block and record audio data lasting `record_ms` milliseconds and save it to a file, the return value does not return audio data. Only valid if the initialisation `path` is set.
    **return** pcm data. datatype @see Bytes. If you pass in record_ms parameter, the return value is an empty Bytes object. **static** False > C++ defination code: > ```cpp > maix::Bytes *record(int record_ms 1) > ``` #### finish ```python def finish(self) > maix.err.Err ``` Finish the record, if you have passed in the path, this api will save the audio data to file. item description **type** func **return** error code, err::ERR_NONE means success, others means failed **static** False > C++ defination code: > ```cpp > err::Err finish() > ``` #### sample\\_rate ```python def sample_rate(self) > int ``` Get sample rate item description **type** func **return** returns sample rate **static** False > C++ defination code: > ```cpp > int sample_rate() > ``` #### format ```python def format(self) > Format ``` Get sample format item description **type** func **return** returns sample format **static** False > C++ defination code: > ```cpp > audio::Format format() > ``` #### channel ```python def channel(self) > int ``` Get sample channel item description **type** func **return** returns sample channel **static** False > C++ defination code: > ```cpp > int channel() > ``` ### Player Player class > C++ defination code: > ```cpp > class Player > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, path: str '', sample_rate: int 48000, format: Format ..., channel: int 1) > None ``` Construct a new Player object item description **type** func **param** **path**: player path. the path determines the location where you save the file, if path is none, the audio module will not save file.
    **sample_rate**: player sample rate, default is 48000(48KHz), means 48000 samples per second.
    **format**: player sample format, default is audio::Format::FMT_S16_LE, means sampling 16 bits at a time and save as signed 16 bits, little endian. see @audio::Format
    **channel**: player sample channel, default is 1, means 1 channel sampling at the same time
    **static** False > C++ defination code: > ```cpp > Player(std::string path std::string(), int sample_rate 48000, audio::Format format audio::Format::FMT_S16_LE, int channel 1) > ``` #### volume ```python def volume(self, value: int 1) > int ``` Set/Get player volume(Not support now) item description **type** func **param** **value**: volume value, If you use this parameter, audio will set the value to volume,
    if you don't, it will return the current volume.
    **return** the current volume **static** False > C++ defination code: > ```cpp > int volume(int value 1) > ``` #### play ```python def play(self, data: maix.Bytes(bytes) b'') > maix.err.Err ``` Play item description **type** func **param** **data**: audio data, must be raw data
    **return** error code, err::ERR_NONE means success, others means failed **static** False > C++ defination code: > ```cpp > err::Err play(maix::Bytes *data maix::audio::Player::NoneBytes) > ``` #### sample\\_rate ```python def sample_rate(self) > int ``` Get sample rate item description **type** func **return** returns sample rate **static** False > C++ defination code: > ```cpp > int sample_rate() > ``` #### format ```python def format(self) > Format ``` Get sample format item description **type** func **return** returns sample format **static** False > C++ defination code: > ```cpp > audio::Format format() > ``` #### channel ```python def channel(self) > int ``` Get sample channel item description **type** func **return** returns sample channel **static** False > C++ defination code: > ```cpp > int channel() > ```"},"/maixpy/api/maix/err.html":{"title":"maix.err","content":" title: maix.err maix.err module > You can use `maix.err` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ### Err Maix Error code item describe **values** **ERR_NONE**: No error
    **ERR_ARGS**: Invalid arguments
    **ERR_NO_MEM**: No memory
    **ERR_NOT_IMPL**: Not implemented
    **ERR_NOT_READY**: Not ready
    **ERR_NOT_INIT**: Not initialized
    **ERR_NOT_OPEN**: Not opened
    **ERR_NOT_PERMIT**: Not permitted
    **ERR_REOPEN**: Re open
    **ERR_BUSY**: Busy
    **ERR_READ**: Read error
    **ERR_WRITE**: Write error
    **ERR_TIMEOUT**: Timeout
    **ERR_RUNTIME**: Runtime error
    **ERR_IO**: IO error
    **ERR_NOT_FOUND**: Not found
    **ERR_ALREAY_EXIST**: Already exist
    **ERR_BUFF_FULL**: Buffer full
    **ERR_BUFF_EMPTY**: Buffer empty
    **ERR_CANCEL**: Cancel
    **ERR_OVERFLOW**: Overflow
    **ERR_MAX**:
    > C++ defination code: > ```cpp > enum Err > { > // !!! fixed error code, DO NOT change number already defined, only append new error code > ERR_NONE 0, // No error > ERR_ARGS , // Invalid arguments > ERR_NO_MEM , // No memory > ERR_NOT_IMPL , // Not implemented > ERR_NOT_READY , // Not ready > ERR_NOT_INIT , // Not initialized > ERR_NOT_OPEN , // Not opened > ERR_NOT_PERMIT , // Not permitted > ERR_REOPEN , // Re open > ERR_BUSY , // Busy > ERR_READ , // Read error > ERR_WRITE , // Write error > ERR_TIMEOUT , // Timeout > ERR_RUNTIME , // Runtime error > ERR_IO , // IO error > ERR_NOT_FOUND , // Not found > ERR_ALREAY_EXIST , // Already exist > ERR_BUFF_FULL , // Buffer full > ERR_BUFF_EMPTY , // Buffer empty > ERR_CANCEL , // Cancel > ERR_OVERFLOW , // Overflow > ERR_MAX, > } > ``` ## Variable ## Function ### to\\_str ```python def to_str(e: Err) > str ``` Error code to string item description **param** **e**: direction [in], error code, err::Err type
    **return** error string > C++ defination code: > ```cpp > std::string to_str(err::Err e) > ``` ### get\\_error ```python def get_error() > str ``` get last error string item description **return** error string > C++ defination code: > ```cpp > std::string& get_error() > ``` ### set\\_error ```python def set_error(str: str) > None ``` set last error string item description **param** **str**: direction [in], error string
    > C++ defination code: > ```cpp > void set_error(const std::string &str) > ``` ### check\\_raise ```python def check_raise(e: Err, msg: str '') > None ``` Check error code, if not ERR_NONE, raise err.Exception item description **param** **e**: direction [in], error code, err::Err type
    **msg**: direction [in], error message
    > C++ defination code: > ```cpp > void check_raise(err::Err e, const std::string &msg \"\") > ``` ### check\\_bool\\_raise ```python def check_bool_raise(ok: bool, msg: str '') > None ``` Check condition, if false, raise err.Exception item description **param** **ok**: direction [in], condition, if true, do nothing, if false, raise err.Exception
    **msg**: direction [in], error message
    > C++ defination code: > ```cpp > void check_bool_raise(bool ok, const std::string &msg \"\") > ``` ### check\\_null\\_raise ```python def check_null_raise(ptr: capsule, msg: str '') > None ``` Check NULL pointer, if NULL, raise exception item description **param** **ptr**: direction [in], pointer
    **msg**: direction [in], error message
    > C++ defination code: > ```cpp > void check_null_raise(void *ptr, const std::string &msg \"\") > ``` ## Class ### Exception Maix Exception > C++ defination code: > ```cpp > class Exception : public std::exception > ```"},"/maixpy/api/maix/fs.html":{"title":"maix.fs","content":" title: maix.fs maix.fs module > You can use `maix.fs` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ### SEEK SEEK enums item describe **values** **SEEK_SET**: Seek from beginning of file.
    **SEEK_CUR**: Seek from current position.
    **SEEK_END**: Seek from end of file.
    > C++ defination code: > ```cpp > enum SEEK > { > SEEK_SET 0, // Seek from beginning of file. > SEEK_CUR 1, // Seek from current position. > SEEK_END 2, // Seek from end of file. > } > ``` ## Variable ## Function ### isabs ```python def isabs(path: str) > bool ``` Check if the path is absolute path item description **param** **path**: path to check
    **return** true if path is absolute path > C++ defination code: > ```cpp > bool isabs(const std::string &path) > ``` ### isdir ```python def isdir(path: str) > bool ``` Check if the path is a directory, if not exist, throw exception item description **param** **path**: path to check
    **return** true if path is a directory > C++ defination code: > ```cpp > bool isdir(const std::string &path) > ``` ### isfile ```python def isfile(path: str) > bool ``` Check if the path is a file, if not exist, throw exception item description **param** **path**: path to check
    **return** true if path is a file > C++ defination code: > ```cpp > bool isfile(const std::string &path) > ``` ### islink ```python def islink(path: str) > bool ``` Check if the path is a link, if not exist, throw exception item description **param** **path**: path to check
    **return** true if path is a link > C++ defination code: > ```cpp > bool islink(const std::string &path) > ``` ### symlink ```python def symlink(src: str, link: str, force: bool False) > maix.err.Err ``` Create soft link item description **param** **src**: real file path
    **link**: link file path
    **force**: force link, if already have link file, will delet it first then create.
    > C++ defination code: > ```cpp > err::Err symlink(const std::string &src, const std::string &link, bool force false) > ``` ### exists ```python def exists(path: str) > bool ``` Check if the path exists item description **param** **path**: path to check
    **return** true if path exists > C++ defination code: > ```cpp > bool exists(const std::string &path) > ``` ### mkdir ```python def mkdir(path: str, exist_ok: bool True, recursive: bool True) > maix.err.Err ``` Create a directory recursively item description **param** **path**: path to create
    **exist_ok**: if true, also return true if directory already exists
    **recursive**: if true, create directory recursively, otherwise, only create one directory, default is true
    **return** err::ERR_NONE(err.Err.ERR_NONE in MaixPy) if success, other error code if failed > C++ defination code: > ```cpp > err::Err mkdir(const std::string &path, bool exist_ok true, bool recursive true) > ``` ### rmdir ```python def rmdir(path: str, recursive: bool False) > maix.err.Err ``` Remove a directory item description **param** **path**: path to remove
    **recursive**: if true, remove directory recursively, otherwise, only remove empty directory, default is false
    **return** err::ERR_NONE(err.Err.ERR_NONE in MaixPy) if success, other error code if failed > C++ defination code: > ```cpp > err::Err rmdir(const std::string &path, bool recursive false) > ``` ### remove ```python def remove(path: str) > maix.err.Err ``` Remove a file item description **param** **path**: path to remove
    **return** err::ERR_NONE(err.Err.ERR_NONE in MaixPy) if success, other error code if failed > C++ defination code: > ```cpp > err::Err remove(const std::string &path) > ``` ### rename ```python def rename(src: str, dst: str) > maix.err.Err ``` Rename a file or directory item description **param** **src**: source path
    **dst**: destination path, if destination dirs not exist, will auto create
    **return** err::ERR_NONE(err.Err.ERR_NONE in MaixPy) if success, other error code if failed > C++ defination code: > ```cpp > err::Err rename(const std::string &src, const std::string &dst) > ``` ### sync ```python def sync() > None ``` Sync files, ensure they're wrriten to disk from RAM > C++ defination code: > ```cpp > void sync() > ``` ### getsize ```python def getsize(path: str) > int ``` Get file size item description **param** **path**: path to get size
    **return** file size if success, err::Err code if failed > C++ defination code: > ```cpp > int getsize(const std::string &path) > ``` ### dirname ```python def dirname(path: str) > str ``` Get directory name of path item description **param** **path**: path to get dirname
    **return** dirname if success, empty string if failed > C++ defination code: > ```cpp > std::string dirname(const std::string &path) > ``` ### basename ```python def basename(path: str) > str ``` Get base name of path item description **param** **path**: path to get basename
    **return** basename if success, empty string if failed > C++ defination code: > ```cpp > std::string basename(const std::string &path) > ``` ### abspath ```python def abspath(path: str) > str ``` Get absolute path item description **param** **path**: path to get absolute path
    **return** absolute path if success, empty string if failed > C++ defination code: > ```cpp > std::string abspath(const std::string &path) > ``` ### getcwd ```python def getcwd() > str ``` Get current working directory item description **return** current working directory absolute path > C++ defination code: > ```cpp > std::string getcwd() > ``` ### realpath ```python def realpath(path: str) > str ``` Get realpath of path item description **param** **path**: path to get realpath
    **return** realpath if success, empty string if failed > C++ defination code: > ```cpp > std::string realpath(const std::string &path) > ``` ### splitext ```python def splitext(path: str) > list[str] ``` Get file extension item description **param** **path**: path to get extension
    **return** prefix_path and extension list if success, empty string if failed > C++ defination code: > ```cpp > std::vector splitext(const std::string &path) > ``` ### listdir ```python def listdir(path: str, recursive: bool False, full_path: bool False) > list[str] ``` List files in directory item description **param** **path**: path to list
    **recursive**: if true, list recursively, otherwise, only list current directory, default is false
    **full_path**: if true, return full path, otherwise, only return basename, default is false
    **return** files list if success, nullptr if failed, you should manually delete it in C++. > C++ defination code: > ```cpp > std::vector *listdir(const std::string &path, bool recursive false, bool full_path false) > ``` ### open ```python def open(path: str, mode: str) > File ``` Open a file, and return a File object item description **param** **path**: path to open
    **mode**: open mode, support \"r\", \"w\", \"a\", \"r+\", \"w+\", \"a+\", \"rb\", \"wb\", \"ab\", \"rb+\", \"wb+\", \"ab+\"
    **return** File object if success(need to delete object manually in C/C++), nullptr if failed > C++ defination code: > ```cpp > fs::File *open(const std::string &path, const std::string &mode) > ``` ### tempdir ```python def tempdir() > str ``` Get temp files directory item description **return** temp files directory > C++ defination code: > ```cpp > std::string tempdir() > ``` ## Class ### File File read write ops > C++ defination code: > ```cpp > class File > ``` #### \\_\\_init\\_\\_ ```python def __init__(self) > None ``` Construct File object item description **type** func **static** False > C++ defination code: > ```cpp > File() > ``` #### open ```python def open(self, path: str, mode: str) > maix.err.Err ``` Open a file item description **type** func **param** **path**: path to open
    **mode**: open mode, support \"r\", \"w\", \"a\", \"r+\", \"w+\", \"a+\", \"rb\", \"wb\", \"ab\", \"rb+\", \"wb+\", \"ab+\"
    **return** err::ERR_NONE(err.Err.ERR_NONE in MaixPy) if success, other error code if failed **static** False > C++ defination code: > ```cpp > err::Err open(const std::string &path, const std::string &mode) > ``` #### close ```python def close(self) > None ``` Close a file item description **type** func **static** False > C++ defination code: > ```cpp > void close() > ``` #### read ```python def read(self, size: int) > list[int] ``` Read data from file API2 item description **type** func **param** **size**: max read size
    **return** bytes data if success(need delete manually in C/C++), nullptr if failed **static** False > C++ defination code: > ```cpp > std::vector *read(int size) > ``` #### readline ```python def readline(self) > str ``` Read line from file item description **type** func **return** line if success, empty string if failed. You need to delete the returned object manually in C/C++. **static** False > C++ defination code: > ```cpp > std::string *readline() > ``` #### eof ```python def eof(self) > int ``` End of file or not item description **type** func **return** 0 if not reach end of file, else eof. **static** False > C++ defination code: > ```cpp > int eof() > ``` #### write ```python def write(self, buf: list[int]) > int ``` Write data to file API2 item description **type** func **param** **buf**: buffer to write
    **return** write size if success, err::Err code if failed **static** False > C++ defination code: > ```cpp > int write(const std::vector &buf) > ``` #### seek ```python def seek(self, offset: int, whence: int) > int ``` Seek file position item description **type** func **param** **offset**: offset to seek
    **whence**: @see maix.fs.SEEK
    **return** new position if success, err::Err code if failed **static** False > C++ defination code: > ```cpp > int seek(int offset, int whence) > ``` #### tell ```python def tell(self) > int ``` Get file position item description **type** func **return** file position if success, err::Err code if failed **static** False > C++ defination code: > ```cpp > int tell() > ``` #### flush ```python def flush(self) > maix.err.Err ``` Flush file item description **type** func **return** err::ERR_NONE(err.Err.ERR_NONE in MaixPy) if success, other error code if failed **static** False > C++ defination code: > ```cpp > err::Err flush() > ```"},"/maixpy/api/maix/network.html":{"title":"maix.network","content":" title: maix.network maix.network module > You can use `maix.network` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module module brief [wifi](./network/wifi.html) maix.network.wifi module ## Enum ## Variable ## Function ### have\\_network ```python def have_network() > bool ``` Return if device have network(WiFi/Eth etc.) item description **return** True if have network, else False. > C++ defination code: > ```cpp > bool have_network() > ``` ## Class"},"/maixpy/api/maix/thread.html":{"title":"maix.thread","content":" title: maix.thread maix.thread module > You can use `maix.thread` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ## Variable ## Function ## Class ### Thread thread class > C++ defination code: > ```cpp > class Thread > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, func: typing.Callable[[capsule], None], args: capsule None) > None ``` create thread item description **type** func **param** **func**: direction [in], thread function, one `args` parameter, void* type, no return value
    **args**: direction [in], thread function parameter
    **static** False > C++ defination code: > ```cpp > Thread(std::function func, void *args nullptr) > ``` #### join ```python def join(self) > None ``` wait thread exit item description **type** func **static** False > C++ defination code: > ```cpp > void join() > ``` #### detach ```python def detach(self) > None ``` detach thread, detach will auto start thread and you can't use join anymore. item description **type** func **static** False > C++ defination code: > ```cpp > void detach() > ``` #### joinable ```python def joinable(self) > bool ``` Check if thread is joinable item description **type** func **return** true if thread is joinable **static** False > C++ defination code: > ```cpp > bool joinable() > ```"},"/maixpy/api/maix/i18n.html":{"title":"maix.i18n","content":" title: maix.i18n maix.i18n module > You can use `maix.i18n` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ## Variable ### locales i18n locales list item description **value** **{
    \"en\",
    \"zh\",
    \"zh tw\",
    \"ja\"}** **readonly** False > C++ defination code: > ```cpp > static std::vector locales { > \"en\", > \"zh\", > \"zh tw\", > \"ja\"} > ``` ### names i18n language names list item description **value** **{
    \"English\",
    \"简体中文\",
    \"繁體中文\",
    \"日本語\"}** **readonly** True > C++ defination code: > ```cpp > const static std::vector names { > \"English\", > \"简体中文\", > \"繁體中文\", > \"日本語\"} > ``` ## Function ### get\\_locale ```python def get_locale() > str ``` Get system config of locale. item description **return** language locale, e.g. en, zh, zh_CN, zh_TW, etc. > C++ defination code: > ```cpp > string get_locale() > ``` ### get\\_language\\_name ```python def get_language_name() > str ``` Get system config of language name. item description **return** language name, e.g. English, 简体中文, 繁體中文, etc. > C++ defination code: > ```cpp > string get_language_name() > ``` ### load\\_trans\\_yaml ```python def load_trans_yaml(locales_dir: str) > dict[str, dict[str, str]] ``` Load translations from yaml files. item description **param** **locales_dir**: translation yaml files directory.
    **return** A dict contains all translations, e.g. {\"zh\":{\"hello\": \"你好\"}, \"en\":{\"hello\": \"hello\"}}, you should delete it after use in C++. > C++ defination code: > ```cpp > const std::map> *load_trans_yaml(const std::string &locales_dir) > ``` ## Class ### Trans Translate helper class. > C++ defination code: > ```cpp > class Trans > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, locales_dict: dict[str, dict[str, str]] {}) > None ``` Translate helper class constructor.\\nBy default locale is get by `i18n.get_locale()` function which set by system settings.\\nBut you can also manually set by `set_locale` function temporarily. item description **type** func **param** **locales_dict**: locales dict, e.g. {\"zh\": {\"Confirm\": \"确认\", \"OK\": \"好的\"}, \"en\": {\"Confirm\": \"Confirm\", \"OK\": \"OK\"}}
    **static** False > C++ defination code: > ```cpp > Trans(const std::map> &locales_dict std::map>()) > ``` #### load ```python def load(self, locales_dir: str) > maix.err.Err ``` Load translation from yaml files generated by `maixtool i18n` command. item description **type** func **param** **locales_dir**: the translation files directory.
    **return** err.Err type, no error will return err.Err.ERR_NONE. **static** False > C++ defination code: > ```cpp > err::Err load(const std::string &locales_dir) > ``` #### update\\_dict ```python def update_dict(self, dict: dict[str, dict[str, str]]) > maix.err.Err ``` Update translation dict. item description **type** func **param** **dict**: the new translation dict.
    **return** err.Err type, no error will return err.Err.ERR_NONE. **static** False > C++ defination code: > ```cpp > err::Err update_dict(const std::map> &dict) > ``` #### tr ```python def tr(self, key: str, locale: str '') > str ``` Translate string by key. item description **type** func **param** **key**: string key, e.g. \"Confirm\"
    **locale**: locale name, if not assign, use default locale set by system settings or set_locale function.
    **return** translated string, if find translation, return it, or return key, e.g. \"确认\", \"Confirm\", etc. **static** False > C++ defination code: > ```cpp > string tr(const string &key, const string locale \"\") > ``` #### set\\_locale ```python def set_locale(self, locale: str) > None ``` Set locale temporarily, will not affect system settings. item description **type** func **param** **locale**: locale name, e.g. \"zh\", \"en\", etc. @see maix.i18n.locales
    **static** False > C++ defination code: > ```cpp > void set_locale(const string &locale) > ``` #### get\\_locale ```python def get_locale(self) > str ``` Get current locale. item description **type** func **return** locale name, e.g. \"zh\", \"en\", etc. @see maix.i18n.locales **static** False > C++ defination code: > ```cpp > string get_locale() > ```"},"/maixpy/api/maix/image.html":{"title":"maix.image","content":" title: maix.image maix.image module, image related definition and functions > You can use `maix.image` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ### Format Image formats item describe **attention** for MaixPy firmware developers, update this enum will also need to update the fmt_size and fmt_names too !!! **values** **FMT_RGB888**: RGBRGB...RGB, R at the lowest address
    **FMT_BGR888**: BGRBGR...BGR, B at the lowest address
    **FMT_RGBA8888**: RGBARGBA...RGBA, R at the lowest address
    **FMT_BGRA8888**: BGRABGRA...BGRA, B at the lowest address
    **FMT_RGB565**:
    **FMT_BGR565**:
    **FMT_YUV422SP**: YYY...UVUVUV...UVUV
    **FMT_YUV422P**: YYY...UUU...VVV
    **FMT_YVU420SP**: YYY...VUVUVU...VUVU, NV21
    **FMT_YUV420SP**: YYY...UVUVUV...UVUV, NV12
    **FMT_YVU420P**: YYY...VVV...UUU
    **FMT_YUV420P**: YYY...UUU...VVV
    **FMT_GRAYSCALE**:
    **FMT_BGGR6**: 6 bit Bayer format with a BGGR pattern.
    **FMT_GBRG6**: 6 bit Bayer format with a GBRG pattern.
    **FMT_GRBG6**: 6 bit Bayer format with a GRBG pattern.
    **FMT_RGGB6**: 6 bit Bayer format with a RGGB pattern.
    **FMT_BGGR8**: 8 bit Bayer format with a BGGR pattern.
    **FMT_GBRG8**: 8 bit Bayer format with a GBRG pattern.
    **FMT_GRBG8**: 8 bit Bayer format with a GRBG pattern.
    **FMT_RGGB8**: 8 bit Bayer format with a RGGB pattern.
    **FMT_BGGR10**: 10 bit Bayer format with a BGGR pattern.
    **FMT_GBRG10**: 10 bit Bayer format with a GBRG pattern.
    **FMT_GRBG10**: 10 bit Bayer format with a GRBG pattern.
    **FMT_RGGB10**: 10 bit Bayer format with a RGGB pattern.
    **FMT_BGGR12**: 12 bit Bayer format with a BGGR pattern.
    **FMT_GBRG12**: 12 bit Bayer format with a GBRG pattern.
    **FMT_GRBG12**: 12 bit Bayer format with a GRBG pattern.
    **FMT_RGGB12**: 12 bit Bayer format with a RGGB pattern.
    **FMT_UNCOMPRESSED_MAX**:
    **FMT_COMPRESSED_MIN**:
    **FMT_JPEG**:
    **FMT_PNG**:
    **FMT_COMPRESSED_MAX**:
    **FMT_INVALID**: format not valid
    > C++ defination code: > ```cpp > enum Format > { > FMT_RGB888 0, // RGBRGB...RGB, R at the lowest address > FMT_BGR888, // BGRBGR...BGR, B at the lowest address > FMT_RGBA8888, // RGBARGBA...RGBA, R at the lowest address > FMT_BGRA8888, // BGRABGRA...BGRA, B at the lowest address > FMT_RGB565, > FMT_BGR565, > FMT_YUV422SP, // YYY...UVUVUV...UVUV > FMT_YUV422P, // YYY...UUU...VVV > FMT_YVU420SP, // YYY...VUVUVU...VUVU, NV21 > FMT_YUV420SP, // YYY...UVUVUV...UVUV, NV12 > FMT_YVU420P, // YYY...VVV...UUU > FMT_YUV420P, // YYY...UUU...VVV > FMT_GRAYSCALE, > FMT_BGGR6, // 6 bit Bayer format with a BGGR pattern. > FMT_GBRG6, // 6 bit Bayer format with a GBRG pattern. > FMT_GRBG6, // 6 bit Bayer format with a GRBG pattern. > FMT_RGGB6, // 6 bit Bayer format with a RGGB pattern. > FMT_BGGR8, // 8 bit Bayer format with a BGGR pattern. > FMT_GBRG8, // 8 bit Bayer format with a GBRG pattern. > FMT_GRBG8, // 8 bit Bayer format with a GRBG pattern. > FMT_RGGB8, // 8 bit Bayer format with a RGGB pattern. > FMT_BGGR10, // 10 bit Bayer format with a BGGR pattern. > FMT_GBRG10, // 10 bit Bayer format with a GBRG pattern. > FMT_GRBG10, // 10 bit Bayer format with a GRBG pattern. > FMT_RGGB10, // 10 bit Bayer format with a RGGB pattern. > FMT_BGGR12, // 12 bit Bayer format with a BGGR pattern. > FMT_GBRG12, // 12 bit Bayer format with a GBRG pattern. > FMT_GRBG12, // 12 bit Bayer format with a GRBG pattern. > FMT_RGGB12, // 12 bit Bayer format with a RGGB pattern. > FMT_UNCOMPRESSED_MAX, > > // compressed format below, not compressed should define upper > FMT_COMPRESSED_MIN, > FMT_JPEG, > FMT_PNG, > FMT_COMPRESSED_MAX, > > FMT_INVALID 0xFF // format not valid > } > ``` ### Fit Object fit method item describe **values** **FIT_NONE**: no object fit, keep original
    **FIT_FILL**: width to new width, height to new height, may be stretch
    **FIT_CONTAIN**: keep aspect ratio, fill blank area with black color
    **FIT_COVER**: keep aspect ratio, crop image to fit new size
    **FIT_MAX**:
    > C++ defination code: > ```cpp > enum Fit > { > FIT_NONE 1, // no object fit, keep original > FIT_FILL 0, // width to new width, height to new height, may be stretch > FIT_CONTAIN, // keep aspect ratio, fill blank area with black color > FIT_COVER, // keep aspect ratio, crop image to fit new size > FIT_MAX > } > ``` ### ResizeMethod Resize method item describe **values** **NEAREST**:
    **BILINEAR**:
    **BICUBIC**:
    **AREA**:
    **LANCZOS**:
    **HAMMING**:
    **RESIZE_METHOD_MAX**:
    > C++ defination code: > ```cpp > enum ResizeMethod > { > NEAREST 0, > BILINEAR, > BICUBIC, > AREA, > LANCZOS, > HAMMING, > RESIZE_METHOD_MAX > } > ``` ### ApriltagFamilies Family of apriltag item describe **values** **TAG16H5**:
    **TAG25H7**:
    **TAG25H9**:
    **TAG36H10**:
    **TAG36H11**:
    **ARTOOLKIT**:
    > C++ defination code: > ```cpp > enum ApriltagFamilies > { > TAG16H5 1, > TAG25H7 2, > TAG25H9 4, > TAG36H10 8, > TAG36H11 16, > ARTOOLKIT 32 > } > ``` ### TemplateMatch Template match method item describe **values** **SEARCH_EX**: Exhaustive search
    **SEARCH_DS**: Diamond search
    > C++ defination code: > ```cpp > enum TemplateMatch > { > SEARCH_EX, // Exhaustive search > SEARCH_DS, // Diamond search > } > ``` ### CornerDetector CornerDetector class item describe **values** **CORNER_FAST**:
    **CORNER_AGAST**:
    > C++ defination code: > ```cpp > enum CornerDetector > { > CORNER_FAST, > CORNER_AGAST > } > ``` ### EdgeDetector EdgeDetector class item describe **values** **EDGE_CANNY**:
    **EDGE_SIMPLE**:
    > C++ defination code: > ```cpp > enum EdgeDetector > { > EDGE_CANNY, > EDGE_SIMPLE, > } > ``` ## Variable ### fmt\\_size Image format size in bytes item description **attention** It's a copy of this variable in MaixPy,
    so change it in C++ (e.g. update var in hello function) will not take effect the var inMaixPy.
    So we add const for this var to avoid this mistake. **value** **{
    3,
    3,
    4,
    4,
    2,
    2,
    2,
    2,
    1.5,
    1.5,
    1.5,
    1.5,
    1, // grayscale
    0.75, // 6 bit Bayer format
    0.75, // 6 bit Bayer format
    0.75, // 6 bit Bayer format
    0.75, // 6 bit Bayer format
    1, // 8 bit Bayer format
    1, // 8 bit Bayer format
    1, // 8 bit Bayer format
    1, // 8 bit Bayer format
    1.25, // 10 bit Bayer format
    1.25, // 10 bit Bayer format
    1.25, // 10 bit Bayer format
    1.25, // 10 bit Bayer format
    1.5, // 12 bit Bayer format
    1.5, // 12 bit Bayer format
    1.5, // 12 bit Bayer format
    1.5, // 12 bit Bayer format
    0, // uncompereed_max
    0, // compressed_min
    1, // jpeg
    1, // png
    0, // compressed_max
    0 // invalid
    }** **readonly** True > C++ defination code: > ```cpp > const std::vector fmt_size { > 3, > 3, > 4, > 4, > 2, > 2, > 2, > 2, > 1.5, > 1.5, > 1.5, > 1.5, > 1, // grayscale > 0.75, // 6 bit Bayer format > 0.75, // 6 bit Bayer format > 0.75, // 6 bit Bayer format > 0.75, // 6 bit Bayer format > 1, // 8 bit Bayer format > 1, // 8 bit Bayer format > 1, // 8 bit Bayer format > 1, // 8 bit Bayer format > 1.25, // 10 bit Bayer format > 1.25, // 10 bit Bayer format > 1.25, // 10 bit Bayer format > 1.25, // 10 bit Bayer format > 1.5, // 12 bit Bayer format > 1.5, // 12 bit Bayer format > 1.5, // 12 bit Bayer format > 1.5, // 12 bit Bayer format > 0, // uncompereed_max > 0, // compressed_min > 1, // jpeg > 1, // png > 0, // compressed_max > 0 // invalid > } > ``` ### fmt\\_names Image format string item description **value** **{
    \"RGB888\",
    \"BGR888\",
    \"RGBA8888\",
    \"BGRA8888\",
    \"RGB565\",
    \"BGR565\",
    \"YUV422SP\",
    \"YUV422P\",
    \"YVU420SP\",
    \"YUV420SP\",
    \"YVU420P\",
    \"YUV420P\",
    \"GRAYSCALE\",
    \"BGGR6\",
    \"GBRG6\",
    \"GRBG6\",
    \"RG6B6\",
    \"BGGR8\",
    \"GBRG8\",
    \"GRBG8\",
    \"RG6B8\",
    \"BGGR10\",
    \"GBRG10\",
    \"GRBG10\",
    \"RG6B10\",
    \"BGGR12\",
    \"GBRG12\",
    \"GRBG12\",
    \"RG6B12\",
    \"UNCOMPRESSED_MAX\",
    \"COMPRESSED_MIN\",
    \"JPEG\",
    \"PNG\",
    \"COMPRESSED_MAX\",
    \"INVALID\"
    }** **readonly** True > C++ defination code: > ```cpp > const std::vector fmt_names { > \"RGB888\", > \"BGR888\", > \"RGBA8888\", > \"BGRA8888\", > \"RGB565\", > \"BGR565\", > \"YUV422SP\", > \"YUV422P\", > \"YVU420SP\", > \"YUV420SP\", > \"YVU420P\", > \"YUV420P\", > \"GRAYSCALE\", > \"BGGR6\", > \"GBRG6\", > \"GRBG6\", > \"RG6B6\", > \"BGGR8\", > \"GBRG8\", > \"GRBG8\", > \"RG6B8\", > \"BGGR10\", > \"GBRG10\", > \"GRBG10\", > \"RG6B10\", > \"BGGR12\", > \"GBRG12\", > \"GRBG12\", > \"RG6B12\", > \"UNCOMPRESSED_MAX\", > \"COMPRESSED_MIN\", > \"JPEG\", > \"PNG\", > \"COMPRESSED_MAX\", > \"INVALID\" > } > ``` ### COLOR\\_WHITE Predefined color white item description **value** **image::Color::from_rgb(255, 255, 255)** **readonly** True > C++ defination code: > ```cpp > const image::Color COLOR_WHITE image::Color::from_rgb(255, 255, 255) > ``` ### COLOR\\_BLACK Predefined color black item description **value** **image::Color::from_rgb(0, 0, 0)** **readonly** True > C++ defination code: > ```cpp > const image::Color COLOR_BLACK image::Color::from_rgb(0, 0, 0) > ``` ### COLOR\\_RED Predefined color red item description **value** **image::Color::from_rgb(255, 0, 0)** **readonly** True > C++ defination code: > ```cpp > const image::Color COLOR_RED image::Color::from_rgb(255, 0, 0) > ``` ### COLOR\\_GREEN Predefined color green item description **value** **image::Color::from_rgb(0, 255, 0)** **readonly** True > C++ defination code: > ```cpp > const image::Color COLOR_GREEN image::Color::from_rgb(0, 255, 0) > ``` ### COLOR\\_BLUE Predefined color blue item description **value** **image::Color::from_rgb(0, 0, 255)** **readonly** True > C++ defination code: > ```cpp > const image::Color COLOR_BLUE image::Color::from_rgb(0, 0, 255) > ``` ### COLOR\\_YELLOW Predefined color yellow item description **value** **image::Color::from_rgb(255, 255, 0)** **readonly** True > C++ defination code: > ```cpp > const image::Color COLOR_YELLOW image::Color::from_rgb(255, 255, 0) > ``` ### COLOR\\_PURPLE Predefined color purple item description **value** **image::Color::from_rgb(143, 0, 255)** **readonly** True > C++ defination code: > ```cpp > const image::Color COLOR_PURPLE image::Color::from_rgb(143, 0, 255) > ``` ### COLOR\\_ORANGE Predefined color orange item description **value** **image::Color::from_rgb(255, 127, 0)** **readonly** True > C++ defination code: > ```cpp > const image::Color COLOR_ORANGE image::Color::from_rgb(255, 127, 0) > ``` ### COLOR\\_GRAY Predefined color gray item description **value** **image::Color::from_rgb(127, 127, 127)** **readonly** True > C++ defination code: > ```cpp > const image::Color COLOR_GRAY image::Color::from_rgb(127, 127, 127) > ``` ## Function ### resize\\_map\\_pos ```python def resize_map_pos(w_in: int, h_in: int, w_out: int, h_out: int, fit: Fit, x: int, y: int, w: int 1, h: int 1) > list[int] ``` map point position or rectangle position from one image size to another image size(resize) item description **param** **int**: h_out target image height
    **fit**: resize method, see maix.image.Fit
    **x**: original point x, or rectagle left top point's x
    **y**: original point y, or rectagle left top point's y
    **w**: original rectagle width, can be 1 if not use this arg, default 1.
    **h**: original rectagle height, can be 1 if not use this arg, default 1.
    **return** list type, [x, y] if map point, [x, y, w, h] if resize rectangle. > C++ defination code: > ```cpp > std::vector resize_map_pos(int w_in, int h_in, int w_out, int h_out, image::Fit fit, int x, int y, int w 1, int h 1) > ``` ### resize\\_map\\_pos (overload 1) map point position or rectangle position from this image size to another image size(resize) item description **param** **int**: h_out target image height
    **fit**: resize method, see maix.image.Fit
    **x**: original point x, or rectagle left top point's x
    **y**: original point y, or rectagle left top point's y
    **w**: original rectagle width, can be 1 if not use this arg, default 1.
    **h**: original rectagle height, can be 1 if not use this arg, default 1.
    **return** list type, [x, y] if map point, [x, y, w, h] if resize rectangle. > C++ defination code: > ```cpp > std::vector resize_map_pos(int w_out, int h_out, image::Fit fit, int x, int y, int w 1, int h 1) > ``` ### resize\\_map\\_pos\\_reverse ```python def resize_map_pos_reverse(w_in: int, h_in: int, w_out: int, h_out: int, fit: Fit, x: int, y: int, w: int 1, h: int 1) > list[int] ``` reverse resize_map_pos method, when we call image.resize method resiz image 'a' to image 'b', we want to known the original position on 'a' whith a knew point on 'b' item description **param** **int**: h_out image height after resized
    **fit**: resize method, see maix.image.Fit
    **x**: point on resized image x, or rectagle left top point's x
    **y**: original point y, or rectagle left top point's y
    **w**: original rectagle width, can be 1 if not use this arg, default 1.
    **h**: original rectagle height, can be 1 if not use this arg, default 1.
    **return** list type, [x, y] if map point, [x, y, w, h] if resize rectangle. > C++ defination code: > ```cpp > std::vector resize_map_pos_reverse(int w_in, int h_in, int w_out, int h_out, image::Fit fit, int x, int y, int w 1, int h 1) > ``` ### load ```python def load(path: str, format: Format ...) > Image ``` Load image from file, and convert to Image object item description **param** **path**: image file path
    **format**: read as this format, if not match, will convert to this format, by default is RGB888
    **return** Image object, if load failed, will return None(nullptr in C++), so you should care about it. > C++ defination code: > ```cpp > image::Image *load(const char *path, image::Format format image::Format::FMT_RGB888) > ``` ### from\\_bytes ```python def from_bytes(width: int, height: int, format: Format, data: maix.Bytes(bytes), copy: bool True) > Image ``` Create image from bytes item description **param** **width**: image width
    **height**: image height
    **format**: image format
    **data**: image data, if data is None, will malloc memory for image data
    If the image is in jpeg format, data must be filled in.
    **copy**: if true and data is not None, will copy data to new buffer, else will use data directly. default is true to avoid memory leak.
    Use it carefully!!!
    **return** Image object > C++ defination code: > ```cpp > image::Image *from_bytes(int width, int height, image::Format format, Bytes *data, bool copy true) > ``` ### load\\_font ```python def load_font(name: str, path: str, size: int 16) > maix.err.Err ``` Load font from file item description **param** **name**: font name, used to identify font
    **path**: font file path, support ttf, ttc, otf
    **size**: font size, font height, by default is 16
    **return** error code, err::ERR_NONE is ok, other is error > C++ defination code: > ```cpp > err::Err load_font(const std::string &name, const char *path, int size 16) > ``` ### set\\_default\\_font Set default font, if not call this method, default is hershey_plain item description **param** **name**: font name, supported names can be get by fonts()
    **return** error code, err::ERR_NONE is ok, other is error > C++ defination code: > ```cpp > err::Err set_default_font(const std::string &name) > ``` ### fonts ```python def fonts() > list[str] ``` Get all loaded fonts item description **return** all loaded fonts, string list type > C++ defination code: > ```cpp > std::vector *fonts() > ``` ### string\\_size ```python def string_size(string: str, scale: float 1, thickness: int 1, font: str '') > Size ``` Get text rendered width and height item description **param** **string**: text content
    **scale**: font scale, by default(value is 1)
    **thickness**: text thickness(line width), by default(value is 1)
    **return** text rendered width and height, [width, height] > C++ defination code: > ```cpp > image::Size string_size(std::string string, float scale 1, int thickness 1, const std::string &font \"\") > ``` ### cv2image ```python def cv2image(array: numpy.ndarray[numpy.uint8], bgr: bool True, copy: bool True) > Image ``` OpenCV Mat(numpy array object) to Image object item description **param** **array**: numpy array object, must be a 3 dim or 2 dim continuous array with shape hwc or hw
    **bgr**: if set bgr, the return image will be marked as BGR888 or BGRA8888 format, grayscale will ignore this arg.
    **copy**: if true, will alloc new buffer and copy data, else will directly use array's data buffer, default true.
    Use this arg carefully, when set to false, ther array MUST keep alive until we don't use the return img of this func, or will cause program crash.
    **return** Image object > C++ defination code: > ```cpp > image::Image *cv2image(py::array_t array, bool bgr true, bool copy true) > ``` ### image2cv ```python def image2cv(img: Image, ensure_bgr: bool True, copy: bool True) > numpy.ndarray[numpy.uint8] ``` Image object to OpenCV Mat(numpy array object) item description **param** **img**: Image object, maix.image.Image type.
    **ensure_bgr**: auto convert to BGR888 or BGRA8888 if img format is not BGR or BGRA, if set to false, will not auto convert and directly use img's data, default true.
    **copy**: Whether alloc new image and copy data or not, if ensure_bgr and img is not bgr or bgra format, always copy,
    if not copy, array object will directly use img's data buffer, will faster but change array will affect img's data, default true.
    **attention** take care of ensure_bgr and copy param. **return** numpy array object > C++ defination code: > ```cpp > py::array_t image2cv(image::Image *img, bool ensure_bgr true, bool copy true) > ``` ## Class ### Size Image size type > C++ defination code: > ```cpp > class Size > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, width: int 0, height: int 0) > None ``` Construct a new Size object item description **type** func **param** **width**: image width
    **height**: image height
    **static** False > C++ defination code: > ```cpp > Size(int width 0, int height 0) > ``` #### width ```python def width(self, width: int 1) > int ``` width of size item description **type** func **param** **width**: set new width, if not set, only return current width
    **static** False > C++ defination code: > ```cpp > int width(int width 1) > ``` #### height ```python def height(self, height: int 1) > int ``` height of size item description **type** func **param** **height**: set new height, if not set, only return current height
    **static** False > C++ defination code: > ```cpp > int height(int height 1) > ``` #### \\_\\_getitem\\_\\_ ```python def __getitem__(self, index: int) > int ``` Subscript operator item description **type** func **param** **index**: 0 for width, 1 for height
    **return** int& width or height **static** False > C++ defination code: > ```cpp > int &operator[](int index) > ``` #### \\_\\_str\\_\\_ ```python def __str__(self) > str ``` to string item description **type** func **static** False > C++ defination code: > ```cpp > std::string __str__() > ``` ### Line Line class > C++ defination code: > ```cpp > class Line > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, x1: int, y1: int, x2: int, y2: int, magnitude: int 0, theta: int 0, rho: int 0) > None ``` Line constructor item description **type** func **param** **x1**: coordinate x1 of the straight line
    **y1**: coordinate y1 of the straight line
    **x2**: coordinate x2 of the straight line
    **y2**: coordinate y2 of the straight line
    **magnitude**: magnitude of the straight line after Hough transformation
    **theta**: angle of the straight line after Hough transformation
    **rho**: p value of the straight line after Hough transformation
    **static** False > C++ defination code: > ```cpp > Line(int x1, int y1, int x2, int y2, int magnitude 0, int theta 0, int rho 0) > ``` #### \\_\\_getitem\\_\\_ ```python def __getitem__(self, index: int) > int ``` Subscript operator item description **type** func **param** **index**: [0] get x1 of line
    [1] get y1 of line
    [2] get x2 of line
    [3] get y2 of line
    [4] get length of line
    [5] get magnitude of the straight line after Hough transformation
    [6] get angle of the straight line after Hough transformation (0 179 degrees)
    [7] get p value of the straight line after Hough transformation
    **return** int& **static** False > C++ defination code: > ```cpp > int &__getitem__(int index) > ``` #### x1 ```python def x1(self) > int ``` get x1 of line item description **type** func **return** return x1 of the line, type is int **static** False > C++ defination code: > ```cpp > int x1() > ``` #### y1 ```python def y1(self) > int ``` get y1 of line item description **type** func **return** return y1 of the line, type is int **static** False > C++ defination code: > ```cpp > int y1() > ``` #### x2 ```python def x2(self) > int ``` get x2 of line item description **type** func **return** return x2 of the line, type is int **static** False > C++ defination code: > ```cpp > int x2() > ``` #### y2 ```python def y2(self) > int ``` get y2 of line item description **type** func **return** return y2 of the line, type is int **static** False > C++ defination code: > ```cpp > int y2() > ``` #### length ```python def length(self) > int ``` get length of line item description **type** func **return** return length of the line, type is int **static** False > C++ defination code: > ```cpp > int length() > ``` #### magnitude ```python def magnitude(self) > int ``` get magnitude of the straight line after Hough transformation item description **type** func **return** return magnitude, type is int **static** False > C++ defination code: > ```cpp > int magnitude() > ``` #### theta ```python def theta(self) > int ``` get angle of the straight line after Hough transformation (0 179 degrees) item description **type** func **return** return angle, type is int **static** False > C++ defination code: > ```cpp > int theta() > ``` #### rho ```python def rho(self) > int ``` get p value of the straight line after Hough transformation item description **type** func **return** return p value, type is int **static** False > C++ defination code: > ```cpp > int rho() > ``` ### Rect Rect class > C++ defination code: > ```cpp > class Rect > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, corners: list[list[int]], x: int, y: int, w: int, h: int, magnitude: int 0) > None ``` Rect constructor item description **type** func **param** **corners**: corners of rect
    **x**: coordinate x of the straight line
    **y**: coordinate y of the straight line
    **w**: coordinate w of the straight line
    **h**: coordinate h of the straight line
    **magnitude**: magnitude of the straight line after Hough transformation
    **static** False > C++ defination code: > ```cpp > Rect(std::vector> &corners, int x, int y, int w, int h, int magnitude 0) > ``` #### \\_\\_getitem\\_\\_ ```python def __getitem__(self, index: int) > int ``` Subscript operator item description **type** func **param** **index**: [0] get x of rect
    [1] get y of rect
    [2] get w of rect
    [3] get h of rect
    [4] get magnitude of the straight line after Hough transformation
    **return** int& **static** False > C++ defination code: > ```cpp > int &__getitem__(int index) > ``` #### corners ```python def corners(self) > list[list[int]] ``` get corners of rect item description **type** func **return** return the coordinate of the rect. **static** False > C++ defination code: > ```cpp > std::vector> corners() > ``` #### rect ```python def rect(self) > list[int] ``` get rectangle of rect item description **type** func **return** return the rectangle of the rect. format is {x, y, w, h}, type is std::vector **static** False > C++ defination code: > ```cpp > std::vector rect() > ``` #### x ```python def x(self) > int ``` get x of rect item description **type** func **return** return x of the rect, type is int **static** False > C++ defination code: > ```cpp > int x() > ``` #### y ```python def y(self) > int ``` get y of rect item description **type** func **return** return y of the rect, type is int **static** False > C++ defination code: > ```cpp > int y() > ``` #### w ```python def w(self) > int ``` get w of rect item description **type** func **return** return w of the rect, type is int **static** False > C++ defination code: > ```cpp > int w() > ``` #### h ```python def h(self) > int ``` get h of rect item description **type** func **return** return h of the rect, type is int **static** False > C++ defination code: > ```cpp > int h() > ``` #### magnitude ```python def magnitude(self) > int ``` get magnitude of the straight line after Hough transformation item description **type** func **return** return magnitude, type is int **static** False > C++ defination code: > ```cpp > int magnitude() > ``` ### Circle circle class > C++ defination code: > ```cpp > class Circle > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, x: int, y: int, r: int, magnitude: int) > None ``` Circle constructor item description **type** func **param** **x**: coordinate x of the circle
    **y**: coordinate y of the circle
    **r**: coordinate r of the circle
    **magnitude**: coordinate y2 of the straight line
    **static** False > C++ defination code: > ```cpp > Circle(int x, int y, int r, int magnitude) > ``` #### \\_\\_getitem\\_\\_ ```python def __getitem__(self, index: int) > int ``` Subscript operator item description **type** func **param** **index**: [0] get x of circle
    [1] get y of circle
    [2] get r of circle
    [3] get magnitude of the circle after Hough transformation
    **return** int& **static** False > C++ defination code: > ```cpp > int &__getitem__(int index) > ``` #### x ```python def x(self) > int ``` get x of circle item description **type** func **return** return x of the circle, type is int **static** False > C++ defination code: > ```cpp > int x() > ``` #### y ```python def y(self) > int ``` get y of circle item description **type** func **return** return y of the circle, type is int **static** False > C++ defination code: > ```cpp > int y() > ``` #### r ```python def r(self) > int ``` get r of circle item description **type** func **return** return r of the circle, type is int **static** False > C++ defination code: > ```cpp > int r() > ``` #### magnitude ```python def magnitude(self) > int ``` get magnitude of the circle after Hough transformation item description **type** func **return** return magnitude, type is int **static** False > C++ defination code: > ```cpp > int magnitude() > ``` ### Blob Blob class > C++ defination code: > ```cpp > class Blob > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, rect: list[int], corners: list[list[int]], mini_corners: list[list[int]], cx: float, cy: float, pixels: int, rotation: float, code: int, count: int, perimeter: int, roundness: float, x_hist_bins: list[int], y_hist_bins: list[int]) > None ``` Blob constructor item description **type** func **param** **rect**: blob rect, type is std::vector
    **corners**: blob corners, type is std::vector>
    **mini_corners**: blob mini_corners, type is std::vector>
    **cx**: blob center x, type is float
    **cy**: blob center y, type is float
    **pixels**: blob pixels, type is int
    **rotation**: blob rotation, type is float
    **code**: blob code, type is int
    **count**: blob count, type is int
    **perimeter**: blob perimeter, type is int
    **roundness**: blob roundness, type is float
    **x_hist_bins**: blob x_hist_bins, type is std::vector
    **y_hist_bins**: blob y_hist_bins, type is std::vector
    **static** False > C++ defination code: > ```cpp > Blob(std::vector &rect, std::vector> &corners, std::vector> &mini_corners,float cx, float cy, int pixels, float rotation, int code, int count, int perimeter, float roundness, std::vector &x_hist_bins, std::vector &y_hist_bins) > ``` #### \\_\\_getitem\\_\\_ ```python def __getitem__(self, index: int) > int ``` Subscript operator item description **type** func **param** **index**: [0] Returns the blob’s bounding box x coordinate
    [1] Returns the blob’s bounding box y coordinate
    [2] Returns the blob’s bounding box w coordinate
    [3] Returns the blob’s bounding box h coordinate
    [4] Returns the number of pixels that are part of this blob
    [5] Returns the centroid x position of the blob
    [6] Returns the centroid y position of the blob
    **return** int& width or height **static** False > C++ defination code: > ```cpp > int &__getitem__(int index) > ``` #### corners ```python def corners(self) > list[list[int]] ``` get blob corners item description **type** func **return** Returns a list of 4 (x,y) tuples of the 4 corners of the object.
    (x0, y0)___________(x1, y1)



    ___________
    (x3, y3) (x2, y2)
    note: the order of corners may change **static** False > C++ defination code: > ```cpp > std::vector> corners() > ``` #### mini\\_corners ```python def mini_corners(self) > list[list[int]] ``` get blob mini corners item description **type** func **return** Returns a list of 4 (x,y) tuples of the 4 corners than bound the min area rectangle of the blob.
    (x0, y0)___________(x1, y1)



    ___________
    (x3, y3) (x2, y2)
    note: the order of corners may change **static** False > C++ defination code: > ```cpp > std::vector> mini_corners() > ``` #### rect ```python def rect(self) > list[int] ``` get blob rect item description **type** func **return** Returns the center coordinates and width and height of the rectangle. format is (x, y, w, h)
    w
    (x, y) ___________

    h

    ___________ **static** False > C++ defination code: > ```cpp > std::vector rect() > ``` #### x ```python def x(self) > int ``` get blob x of the upper left coordinate item description **type** func **return** Returns the x coordinate of the upper left corner of the rectangle. **static** False > C++ defination code: > ```cpp > int x() > ``` #### y ```python def y(self) > int ``` get blob y of the upper left coordinate item description **type** func **return** Returns the y coordinate of the upper left corner of the rectangle. **static** False > C++ defination code: > ```cpp > int y() > ``` #### w ```python def w(self) > int ``` get blob width item description **type** func **return** Returns the blob’s bounding box w coordinate **static** False > C++ defination code: > ```cpp > int w() > ``` #### h ```python def h(self) > int ``` get blob height item description **type** func **return** Returns the blob’s bounding box h coordinate **static** False > C++ defination code: > ```cpp > int h() > ``` #### pixels ```python def pixels(self) > int ``` get blob pixels item description **type** func **return** Returns the number of pixels that are part of this blob. **static** False > C++ defination code: > ```cpp > int pixels() > ``` #### cx ```python def cx(self) > int ``` get blob center x item description **type** func **return** Returns the centroid x position of the blob **static** False > C++ defination code: > ```cpp > int cx() > ``` #### cy ```python def cy(self) > int ``` get blob center y item description **type** func **return** Returns the centroid y position of the blob **static** False > C++ defination code: > ```cpp > int cy() > ``` #### cxf ```python def cxf(self) > float ``` get blob center x item description **type** func **return** Returns the centroid x position of the blob **static** False > C++ defination code: > ```cpp > float cxf() > ``` #### cyf ```python def cyf(self) > float ``` get blob center y item description **type** func **return** Returns the centroid y position of the blob **static** False > C++ defination code: > ```cpp > float cyf() > ``` #### rotation ```python def rotation(self) > float ``` get blob rotation item description **type** func **return** Returns the rotation of the blob in radians (float). If the blob is like a pencil or pen this value will be unique for 0 180 degrees. **static** False > C++ defination code: > ```cpp > float rotation() > ``` #### rotation\\_rad ```python def rotation_rad(self) > float ``` get blob rotation_rad item description **type** func **return** Returns the rotation of the blob in radians **static** False > C++ defination code: > ```cpp > float rotation_rad() > ``` #### rotation\\_deg ```python def rotation_deg(self) > int ``` get blob rotation_deg item description **type** func **return** Returns the rotation of the blob in degrees. **static** False > C++ defination code: > ```cpp > int rotation_deg() > ``` #### code ```python def code(self) > int ``` get blob code item description **type** func **return** Returns a 32 bit binary number with a bit set in it for each color threshold that’s part of this blob **static** False > C++ defination code: > ```cpp > int code() > ``` #### count ```python def count(self) > int ``` get blob count item description **type** func **return** Returns the number of blobs merged into this blob. **static** False > C++ defination code: > ```cpp > int count() > ``` #### perimeter ```python def perimeter(self) > int ``` get blob merge_cnt item description **type** func **return** Returns the number of pixels on this blob’s perimeter. **static** False > C++ defination code: > ```cpp > int perimeter() > ``` #### roundness ```python def roundness(self) > float ``` get blob roundness item description **type** func **return** Returns a value between 0 and 1 representing how round the object is **static** False > C++ defination code: > ```cpp > float roundness() > ``` #### elongation ```python def elongation(self) > float ``` get blob elongation item description **type** func **returnReturns** a value between 0 and 1 representing how long (not round) the object is **static** False > C++ defination code: > ```cpp > float elongation() > ``` #### area ```python def area(self) > int ``` get blob area item description **type** func **return** Returns the area of the bounding box around the blob **static** False > C++ defination code: > ```cpp > int area() > ``` #### density ```python def density(self) > float ``` get blob density item description **type** func **return** Returns the density ratio of the blob **static** False > C++ defination code: > ```cpp > float density() > ``` #### extent ```python def extent(self) > float ``` Alias for blob.density() item description **type** func **return** Returns the density ratio of the blob **static** False > C++ defination code: > ```cpp > float extent() > ``` #### compactness ```python def compactness(self) > float ``` get blob compactness item description **type** func **return** Returns the compactness ratio of the blob **static** False > C++ defination code: > ```cpp > float compactness() > ``` #### solidity ```python def solidity(self) > float ``` get blob solidity item description **type** func **return** Returns the solidity ratio of the blob **static** False > C++ defination code: > ```cpp > float solidity() > ``` #### convexity ```python def convexity(self) > float ``` get blob convexity item description **type** func **return** Returns a value between 0 and 1 representing how convex the object is **static** False > C++ defination code: > ```cpp > float convexity() > ``` #### x\\_hist\\_bins ```python def x_hist_bins(self) > list[int] ``` get blob x_hist_bins item description **type** func **return** Returns the x_hist_bins of the blob **static** False > C++ defination code: > ```cpp > std::vector x_hist_bins() > ``` #### y\\_hist\\_bins ```python def y_hist_bins(self) > list[int] ``` get blob y_hist_bins item description **type** func **return** Returns the y_hist_bins of the blob **static** False > C++ defination code: > ```cpp > std::vector y_hist_bins() > ``` #### major\\_axis\\_line ```python def major_axis_line(self) > list[int] ``` get blob major_axis_line item description **type** func **return** Returns a line tuple (x1, y1, x2, y2) of the minor axis of the blob. **static** False > C++ defination code: > ```cpp > std::vector major_axis_line() > ``` #### minor\\_axis\\_line ```python def minor_axis_line(self) > list[int] ``` get blob minor_axis_line item description **type** func **return** Returns a line tuple (x1, y1, x2, y2) of the minor axis of the blob. **static** False > C++ defination code: > ```cpp > std::vector minor_axis_line() > ``` #### enclosing\\_circle ```python def enclosing_circle(self) > list[int] ``` get blob enclosing_circle item description **type** func **return** Returns a circle tuple (x, y, r) of the circle that encloses the min area rectangle of a blob. **static** False > C++ defination code: > ```cpp > std::vector enclosing_circle() > ``` #### enclosed\\_ellipse ```python def enclosed_ellipse(self) > list[int] ``` get blob enclosed_ellipse item description **type** func **return** Returns an ellipse tuple (x, y, rx, ry, rotation) of the ellipse that fits inside of the min area rectangle of a blob. **static** False > C++ defination code: > ```cpp > std::vector enclosed_ellipse() > ``` ### QRCode QRCode class > C++ defination code: > ```cpp > class QRCode > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, rect: list[int], corners: list[list[int]], payload: str, version: int, ecc_level: int, mask: int, data_type: int, eci: int) > None ``` QRCode constructor item description **type** func **param** **rect**: rect of corners, type is std::vector
    **corners**: corners of QRCode
    **payload**: payload of the QRCode
    **version**: version of the QRCode
    **ecc_level**: ecc_level of the QRCode
    **mask**: mask of the QRCode
    **data_type**: data_type of the QRCode
    **eci**: eci of the QRCode
    **static** False > C++ defination code: > ```cpp > QRCode(std::vector &rect, std::vector> &corners, std::string &payload, int version, int ecc_level, int mask, int data_type, int eci) > ``` #### \\_\\_getitem\\_\\_ ```python def __getitem__(self, index: int) > int ``` Subscript operator item description **type** func **param** **index**: [0] Returns the qrcode’s bounding box x coordinate
    [1] Returns the qrcode’s bounding box y coordinate
    [2] Returns the qrcode’s bounding box w coordinate
    [3] Returns the qrcode’s bounding box h coordinate
    [4] Not support this index, try to use payload() method
    [5] Returns the version of qrcode
    [6] Returns the error correction level of qrcode
    [7] Returns the mask of qrcode
    [8] Returns the datatype of qrcode
    [9] Returns the eci of qrcode
    **return** int& **static** False > C++ defination code: > ```cpp > int &__getitem__(int index) > ``` #### corners ```python def corners(self) > list[list[int]] ``` get coordinate of QRCode item description **type** func **return** return the coordinate of the QRCode. **static** False > C++ defination code: > ```cpp > std::vector> corners() > ``` #### rect ```python def rect(self) > list[int] ``` get rectangle of QRCode item description **type** func **return** return the rectangle of the QRCode. format is {x, y, w, h}, type is std::vector **static** False > C++ defination code: > ```cpp > std::vector rect() > ``` #### x ```python def x(self) > int ``` get x of QRCode item description **type** func **return** return x of the QRCode, type is int **static** False > C++ defination code: > ```cpp > int x() > ``` #### y ```python def y(self) > int ``` get y of QRCode item description **type** func **return** return y of the QRCode, type is int **static** False > C++ defination code: > ```cpp > int y() > ``` #### w ```python def w(self) > int ``` get w of QRCode item description **type** func **return** return w of the QRCode, type is int **static** False > C++ defination code: > ```cpp > int w() > ``` #### h ```python def h(self) > int ``` get h of QRCode item description **type** func **return** return h of the QRCode, type is int **static** False > C++ defination code: > ```cpp > int h() > ``` #### payload ```python def payload(self) > str ``` get QRCode payload item description **type** func **return** return area of the QRCode **static** False > C++ defination code: > ```cpp > std::string payload() > ``` #### version ```python def version(self) > int ``` get QRCode version item description **type** func **return** return version of the QRCode **static** False > C++ defination code: > ```cpp > int version() > ``` #### ecc\\_level ```python def ecc_level(self) > int ``` get QRCode error correction level item description **type** func **return** return error correction level of the QRCode **static** False > C++ defination code: > ```cpp > int ecc_level() > ``` #### mask ```python def mask(self) > int ``` get QRCode mask item description **type** func **return** return mask of the QRCode **static** False > C++ defination code: > ```cpp > int mask() > ``` #### data\\_type ```python def data_type(self) > int ``` get QRCode dataType item description **type** func **return** return mask of the QRCode **static** False > C++ defination code: > ```cpp > int data_type() > ``` #### eci ```python def eci(self) > int ``` get QRCode eci item description **type** func **return** return data of the QRCode **static** False > C++ defination code: > ```cpp > int eci() > ``` #### is\\_numeric ```python def is_numeric(self) > bool ``` check QRCode is numeric item description **type** func **return** return true if the result type of the QRCode is numeric **static** False > C++ defination code: > ```cpp > bool is_numeric() > ``` #### is\\_alphanumeric ```python def is_alphanumeric(self) > bool ``` check QRCode is alphanumeric item description **type** func **return** return true if the result type of the QRCode is alphanumeric **static** False > C++ defination code: > ```cpp > bool is_alphanumeric() > ``` #### is\\_binary ```python def is_binary(self) > bool ``` check QRCode is binary item description **type** func **return** return true if the result type of the QRCode is binary **static** False > C++ defination code: > ```cpp > bool is_binary() > ``` #### is\\_kanji ```python def is_kanji(self) > bool ``` check QRCode is kanji item description **type** func **return** return true if the result type of the QRCode is kanji **static** False > C++ defination code: > ```cpp > bool is_kanji() > ``` ### AprilTag AprilTag class > C++ defination code: > ```cpp > class AprilTag > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, rect: list[int], corners: list[list[int]], id: int, famliy: int, centroid_x: float, centroid_y: float, rotation: float, decision_margin: float, hamming: int, goodness: float, x_translation: float, y_translation: float, z_translation: float, x_rotation: float, y_rotation: float, z_rotation: float) > None ``` AprilTag constructor item description **type** func **param** **rect**: Inlucdes the top left corner and the width and height of the rectangle. format is {x, y, w, h}, type is std::vector
    **corners**: Includes the four corners of the rectangle. format is {{x0, y0}, {x1, y1}, {x2, y2}, {x3, y3}}, type is std::vector>
    **id**: The id of the AprilTag
    **famliy**: The family of the AprilTag
    **centroid_x**: The x coordinate of the center of the AprilTag
    **centroid_y**: The y coordinate of the center of the AprilTag
    **rotation**: The rotation of the AprilTag
    **decision_margin**: The decision_margin of the AprilTag
    **hamming**: The hamming of the AprilTag
    **goodness**: The goodness of the AprilTag
    **x_translation**: The x_translation of the AprilTag
    **y_translation**: The y_translation of the AprilTag
    **z_translation**: The z_translation of the AprilTag
    **x_rotation**: The x_rotation of the AprilTag
    **y_rotation**: The y_rotation of the AprilTag
    **z_rotation**: The z_rotation of the AprilTag
    **static** False > C++ defination code: > ```cpp > AprilTag(std::vector &rect, std::vector> &corners, int id, int famliy, float centroid_x, float centroid_y, float rotation, float decision_margin, int hamming, float goodness, float x_translation, float y_translation, float z_translation, float x_rotation, float y_rotation, float z_rotation) > ``` #### \\_\\_getitem\\_\\_ ```python def __getitem__(self, index: int) > int ``` Subscript operator item description **type** func **param** **index**: [0] Returns the apriltag’s bounding box x coordinate
    [1] Returns the apriltag’s bounding box y coordinate
    [2] Returns the apriltag’s bounding box w coordinate
    [3] Returns the apriltag’s bounding box h coordinate
    [4] Returns the apriltag’s id
    [5] Returns the apriltag’s family
    [6] Not support
    [7] Not support
    [8] Not support
    [9] Not support
    [10] Returns the apriltag’s hamming
    [11] Not support
    [12] Not support
    [13] Not support
    [14] Not support
    [15] Not support
    [16] Not support
    [17] Not support
    **return** int& **static** False > C++ defination code: > ```cpp > int &__getitem__(int index) > ``` #### corners ```python def corners(self) > list[list[int]] ``` get coordinate of AprilTag item description **type** func **return** return the coordinate of the AprilTag. **static** False > C++ defination code: > ```cpp > std::vector> corners() > ``` #### rect ```python def rect(self) > list[int] ``` get rectangle of AprilTag item description **type** func **return** return the rectangle of the AprilTag. format is {x, y, w, h}, type is std::vector **static** False > C++ defination code: > ```cpp > std::vector rect() > ``` #### x ```python def x(self) > int ``` get x of AprilTag item description **type** func **return** return x of the AprilTag, type is int **static** False > C++ defination code: > ```cpp > int x() > ``` #### y ```python def y(self) > int ``` get y of AprilTag item description **type** func **return** return y of the AprilTag, type is int **static** False > C++ defination code: > ```cpp > int y() > ``` #### w ```python def w(self) > int ``` get w of AprilTag item description **type** func **return** return w of the AprilTag, type is int **static** False > C++ defination code: > ```cpp > int w() > ``` #### h ```python def h(self) > int ``` get h of AprilTag item description **type** func **return** return h of the AprilTag, type is int **static** False > C++ defination code: > ```cpp > int h() > ``` #### id ```python def id(self) > int ``` get id of AprilTag item description **type** func **return** return id of the AprilTag, type is int **static** False > C++ defination code: > ```cpp > int id() > ``` #### family ```python def family(self) > int ``` get family of AprilTag item description **type** func **return** return family of the AprilTag, type is int **static** False > C++ defination code: > ```cpp > int family() > ``` #### cx ```python def cx(self) > int ``` get cx of AprilTag item description **type** func **return** return cx of the AprilTag, type is int **static** False > C++ defination code: > ```cpp > int cx() > ``` #### cxf ```python def cxf(self) > float ``` get cxf of AprilTag item description **type** func **return** return cxf of the AprilTag, type is float **static** False > C++ defination code: > ```cpp > float cxf() > ``` #### cy ```python def cy(self) > int ``` get cy of AprilTag item description **type** func **return** return cy of the AprilTag, type is int **static** False > C++ defination code: > ```cpp > int cy() > ``` #### cyf ```python def cyf(self) > float ``` get cyf of AprilTag item description **type** func **return** return cyf of the AprilTag, type is float **static** False > C++ defination code: > ```cpp > float cyf() > ``` #### rotation ```python def rotation(self) > float ``` get rotation of AprilTag item description **type** func **return** return rotation of the AprilTag, type is float **static** False > C++ defination code: > ```cpp > float rotation() > ``` #### decision\\_margin ```python def decision_margin(self) > float ``` Get decision_margin of AprilTag item description **type** func **return** Returns the quality of the apriltag match (0.0 1.0) where 1.0 is the best. **static** False > C++ defination code: > ```cpp > float decision_margin() > ``` #### hamming ```python def hamming(self) > int ``` get hamming of AprilTag item description **type** func **return** Returns the number of accepted bit errors for this tag.
    return 0, means 0 bit errors will be accepted.
    1 is TAG25H7, means up to 1 bit error may be accepted
    2 is TAG25H9, means up to 3 bit errors may be accepted
    3 is TAG36H10, means up to 3 bit errors may be accepted
    4 is TAG36H11, means up to 4 bit errors may be accepted
    5 is ARTOOLKIT, means 0 bit errors will be accepted **static** False > C++ defination code: > ```cpp > int hamming() > ``` #### goodness ```python def goodness(self) > float ``` get goodness of AprilTag item description **type** func **return** return goodness of the AprilTag, type is float
    Note: This value is always 0.0 for now. **static** False > C++ defination code: > ```cpp > float goodness() > ``` #### x\\_translation ```python def x_translation(self) > float ``` get x_translation of AprilTag item description **type** func **return** return x_translation of the AprilTag, type is float **static** False > C++ defination code: > ```cpp > float x_translation() > ``` #### y\\_translation ```python def y_translation(self) > float ``` get y_translation of AprilTag item description **type** func **return** return y_translation of the AprilTag, type is float **static** False > C++ defination code: > ```cpp > float y_translation() > ``` #### z\\_translation ```python def z_translation(self) > float ``` get z_translation of AprilTag item description **type** func **return** return z_translation of the AprilTag, type is float **static** False > C++ defination code: > ```cpp > float z_translation() > ``` #### x\\_rotation ```python def x_rotation(self) > float ``` get x_rotation of AprilTag item description **type** func **return** return x_rotation of the AprilTag, type is float **static** False > C++ defination code: > ```cpp > float x_rotation() > ``` #### y\\_rotation ```python def y_rotation(self) > float ``` get y_rotation of AprilTag item description **type** func **return** return y_rotation of the AprilTag, type is float **static** False > C++ defination code: > ```cpp > float y_rotation() > ``` #### z\\_rotation ```python def z_rotation(self) > float ``` get z_rotation of AprilTag item description **type** func **return** return z_rotation of the AprilTag, type is float **static** False > C++ defination code: > ```cpp > float z_rotation() > ``` ### DataMatrix DataMatrix class > C++ defination code: > ```cpp > class DataMatrix > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, rect: list[int], corners: list[list[int]], payload: str, rotation: float, rows: int, columns: int, capacity: int, padding: int) > None ``` DataMatrix constructor item description **type** func **param** **rect**: Inlucdes the top left corner and the width and height of the rectangle. format is {x, y, w, h}, type is std::vector
    **corners**: Includes the four corners of the rectangle. format is {{x0, y0}, {x1, y1}, {x2, y2}, {x3, y3}}, type is std::vector>
    **payload**: The payload of the DataMatrix
    **rotation**: The rotation of the DataMatrix
    **rows**: The rows of the DataMatrix
    **columns**: The columns of the DataMatrix
    **capacity**: The capacity of the DataMatrix
    **padding**: The padding of the DataMatrix
    **static** False > C++ defination code: > ```cpp > DataMatrix(std::vector &rect, std::vector> &corners, std::string &payload, float rotation, int rows, int columns, int capacity, int padding) > ``` #### \\_\\_getitem\\_\\_ ```python def __getitem__(self, index: int) > int ``` Subscript operator item description **type** func **param** **index**: [0] get x of DataMatrix
    [1] get y of DataMatrix
    [2] get w of DataMatrix
    [3] get h of DataMatrix
    [4] Not support this index, try to use payload() method
    [5] Not support this index, try to use rotation() method
    [6] get rows of DataMatrix
    [7] get columns of DataMatrix
    [8] get capacity of DataMatrix
    [9] get padding of DataMatrix
    **return** int& **static** False > C++ defination code: > ```cpp > int &__getitem__(int index) > ``` #### corners ```python def corners(self) > list[list[int]] ``` get coordinate of DataMatrix item description **type** func **return** return the coordinate of the DataMatrix. **static** False > C++ defination code: > ```cpp > std::vector> corners() > ``` #### rect ```python def rect(self) > list[int] ``` get rectangle of DataMatrix item description **type** func **return** return the rectangle of the DataMatrix. format is {x, y, w, h}, type is std::vector **static** False > C++ defination code: > ```cpp > std::vector rect() > ``` #### x ```python def x(self) > int ``` get x of DataMatrix item description **type** func **return** return x of the DataMatrix, type is int **static** False > C++ defination code: > ```cpp > int x() > ``` #### y ```python def y(self) > int ``` get y of DataMatrix item description **type** func **return** return y of the DataMatrix, type is int **static** False > C++ defination code: > ```cpp > int y() > ``` #### w ```python def w(self) > int ``` get w of DataMatrix item description **type** func **return** return w of the DataMatrix, type is int **static** False > C++ defination code: > ```cpp > int w() > ``` #### h ```python def h(self) > int ``` get h of DataMatrix item description **type** func **return** return h of the DataMatrix, type is int **static** False > C++ defination code: > ```cpp > int h() > ``` #### payload ```python def payload(self) > str ``` get payload of DataMatrix item description **type** func **return** return payload of the DataMatrix, type is std::string **static** False > C++ defination code: > ```cpp > std::string payload() > ``` #### rotation ```python def rotation(self) > float ``` get rotation of DataMatrix item description **type** func **return** return rotation of the DataMatrix, type is float **static** False > C++ defination code: > ```cpp > float rotation() > ``` #### rows ```python def rows(self) > int ``` get rows of DataMatrix item description **type** func **return** return rows of the DataMatrix, type is int **static** False > C++ defination code: > ```cpp > int rows() > ``` #### columns ```python def columns(self) > int ``` get columns of DataMatrix item description **type** func **return** return columns of the DataMatrix, type is int **static** False > C++ defination code: > ```cpp > int columns() > ``` #### capacity ```python def capacity(self) > int ``` get capacity of DataMatrix item description **type** func **return** returns how many characters could fit in this data matrix, type is int **static** False > C++ defination code: > ```cpp > int capacity() > ``` #### padding ```python def padding(self) > int ``` get padding of DataMatrix item description **type** func **return** returns how many unused characters are in this data matrix, type is int **static** False > C++ defination code: > ```cpp > int padding() > ``` ### BarCode BarCode class > C++ defination code: > ```cpp > class BarCode > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, rect: list[int], corners: list[list[int]], payload: str, type: int, rotation: float, quality: int) > None ``` BarCode constructor item description **type** func **param** **rect**: Inlucdes the top left corner and the width and height of the rectangle. format is {x, y, w, h}, type is std::vector
    **corners**: Includes the four corners of the rectangle. format is {{x0, y0}, {x1, y1}, {x2, y2}, {x3, y3}}, type is std::vector>
    **payload**: The payload of the BarCode
    **type**: The type of the BarCode
    **rotation**: The rotation of the BarCode
    **quality**: The quality of the BarCode
    **static** False > C++ defination code: > ```cpp > BarCode(std::vector &rect, std::vector> &corners, std::string &payload, int type, float rotation, int quality) > ``` #### \\_\\_getitem\\_\\_ ```python def __getitem__(self, index: int) > int ``` Subscript operator item description **type** func **param** **index**: [0] get x of BarCode
    [1] get y of BarCode
    [2] get w of BarCode
    [3] get h of BarCode
    [4] Not support this index, try to use payload() method
    [5] get type of BarCode
    [6] Not support this index, try to use rotation() method
    [7] get quality of BarCode
    **return** int& **static** False > C++ defination code: > ```cpp > int &__getitem__(int index) > ``` #### corners ```python def corners(self) > list[list[int]] ``` get coordinate of BarCode item description **type** func **return** return the coordinate of the BarCode. **static** False > C++ defination code: > ```cpp > std::vector> corners() > ``` #### rect ```python def rect(self) > list[int] ``` get rectangle of BarCode item description **type** func **return** return the rectangle of the BarCode. format is {x, y, w, h}, type is std::vector **static** False > C++ defination code: > ```cpp > std::vector rect() > ``` #### x ```python def x(self) > int ``` get x of BarCode item description **type** func **return** return x of the BarCode, type is int **static** False > C++ defination code: > ```cpp > int x() > ``` #### y ```python def y(self) > int ``` get y of BarCode item description **type** func **return** return y of the BarCode, type is int **static** False > C++ defination code: > ```cpp > int y() > ``` #### w ```python def w(self) > int ``` get w of BarCode item description **type** func **return** return w of the BarCode, type is int **static** False > C++ defination code: > ```cpp > int w() > ``` #### h ```python def h(self) > int ``` get h of BarCode item description **type** func **return** return h of the BarCode, type is int **static** False > C++ defination code: > ```cpp > int h() > ``` #### payload ```python def payload(self) > str ``` get payload of BarCode item description **type** func **return** return payload of the BarCode, type is std::string **static** False > C++ defination code: > ```cpp > std::string payload() > ``` #### type ```python def type(self) > int ``` get type of BarCode item description **type** func **return** return type of the BarCode, type is int **static** False > C++ defination code: > ```cpp > int type() > ``` #### rotation ```python def rotation(self) > float ``` get rotation of BarCode item description **type** func **return** return rotation of the BarCode, type is float. FIXME: always return 0.0 **static** False > C++ defination code: > ```cpp > float rotation() > ``` #### quality ```python def quality(self) > int ``` get quality of BarCode item description **type** func **return** return quality of the BarCode, type is int **static** False > C++ defination code: > ```cpp > int quality() > ``` ### Statistics Statistics class > C++ defination code: > ```cpp > class Statistics > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, format: Format, l_statistics: list[int], a_statistics: list[int], b_statistics: list[int]) > None ``` Statistics constructor item description **type** func **param** **format**: The statistics source image format
    **l_statistics**: The statistics of the L channel. format is {mean, median, mode, std_dev, min, max, lq, uq}, type is std::vector
    **a_statistics**: The statistics of the A channel. format is {mean, median, mode, std_dev, min, max, lq, uq}, type is std::vector
    **b_statistics**: The statistics of the B channel. format is {mean, median, mode, std_dev, min, max, lq, uq}, type is std::vector
    **static** False > C++ defination code: > ```cpp > Statistics(image::Format format, std::vector &l_statistics, std::vector &a_statistics, std::vector &b_statistics) > ``` #### \\_\\_getitem\\_\\_ ```python def __getitem__(self, index: int) > int ``` Subscript operator item description **type** func **param** **index**: array index
    **return** int& **static** False > C++ defination code: > ```cpp > int &__getitem__(int index) > ``` #### format ```python def format(self) > Format ``` get format of Statistics source image item description **type** func **return** return format of the Statistics source image, type is image::Format **static** False > C++ defination code: > ```cpp > image::Format format() > ``` #### l\\_mean ```python def l_mean(self) > int ``` get L channel mean item description **type** func **return** return L channel mean, type is int **static** False > C++ defination code: > ```cpp > int l_mean() > ``` #### l\\_median ```python def l_median(self) > int ``` get L channel median item description **type** func **return** return L channel median, type is int **static** False > C++ defination code: > ```cpp > int l_median() > ``` #### l\\_mode ```python def l_mode(self) > int ``` get L channel mode item description **type** func **return** return L channel mode, type is int **static** False > C++ defination code: > ```cpp > int l_mode() > ``` #### l\\_std\\_dev ```python def l_std_dev(self) > int ``` get L channel std_dev item description **type** func **return** return L channel std_dev, type is int **static** False > C++ defination code: > ```cpp > int l_std_dev() > ``` #### l\\_min ```python def l_min(self) > int ``` get L channel min item description **type** func **return** return L channel min, type is int **static** False > C++ defination code: > ```cpp > int l_min() > ``` #### l\\_max ```python def l_max(self) > int ``` get L channel max item description **type** func **return** return L channel max, type is int **static** False > C++ defination code: > ```cpp > int l_max() > ``` #### l\\_lq ```python def l_lq(self) > int ``` get L channel lq item description **type** func **return** return L channel lq, type is int **static** False > C++ defination code: > ```cpp > int l_lq() > ``` #### l\\_uq ```python def l_uq(self) > int ``` get L channel uq item description **type** func **return** return L channel uq, type is int **static** False > C++ defination code: > ```cpp > int l_uq() > ``` #### a\\_mean ```python def a_mean(self) > int ``` get A channel mean item description **type** func **return** return A channel mean, type is int **static** False > C++ defination code: > ```cpp > int a_mean() > ``` #### a\\_median ```python def a_median(self) > int ``` get A channea median item description **type** func **return** return A channel median, type is int **static** False > C++ defination code: > ```cpp > int a_median() > ``` #### a\\_mode ```python def a_mode(self) > int ``` get A channel mode item description **type** func **return** return A channel mode, type is int **static** False > C++ defination code: > ```cpp > int a_mode() > ``` #### a\\_std\\_dev ```python def a_std_dev(self) > int ``` get A channel std_dev item description **type** func **return** return A channel std_dev, type is int **static** False > C++ defination code: > ```cpp > int a_std_dev() > ``` #### a\\_min ```python def a_min(self) > int ``` get A channel min item description **type** func **return** return A channel min, type is int **static** False > C++ defination code: > ```cpp > int a_min() > ``` #### a\\_max ```python def a_max(self) > int ``` get A channel max item description **type** func **return** return A channel max, type is int **static** False > C++ defination code: > ```cpp > int a_max() > ``` #### a\\_lq ```python def a_lq(self) > int ``` get A channel lq item description **type** func **return** return A channel lq, type is int **static** False > C++ defination code: > ```cpp > int a_lq() > ``` #### a\\_uq ```python def a_uq(self) > int ``` get A channel uq item description **type** func **return** return A channel uq, type is int **static** False > C++ defination code: > ```cpp > int a_uq() > ``` #### b\\_mean ```python def b_mean(self) > int ``` get B channel mean item description **type** func **return** return B channel mean, type is int **static** False > C++ defination code: > ```cpp > int b_mean() > ``` #### b\\_median ```python def b_median(self) > int ``` get B channea median item description **type** func **return** return B channel median, type is int **static** False > C++ defination code: > ```cpp > int b_median() > ``` #### b\\_mode ```python def b_mode(self) > int ``` get B channel mode item description **type** func **return** return B channel mode, type is int **static** False > C++ defination code: > ```cpp > int b_mode() > ``` #### b\\_std\\_dev ```python def b_std_dev(self) > int ``` get B channel std_dev item description **type** func **return** return B channel std_dev, type is int **static** False > C++ defination code: > ```cpp > int b_std_dev() > ``` #### b\\_min ```python def b_min(self) > int ``` get B channel min item description **type** func **return** return B channel min, type is int **static** False > C++ defination code: > ```cpp > int b_min() > ``` #### b\\_max ```python def b_max(self) > int ``` get B channel max item description **type** func **return** return B channel max, type is int **static** False > C++ defination code: > ```cpp > int b_max() > ``` #### b\\_lq ```python def b_lq(self) > int ``` get B channel lq item description **type** func **return** return B channel lq, type is int **static** False > C++ defination code: > ```cpp > int b_lq() > ``` #### b\\_uq ```python def b_uq(self) > int ``` get B channel uq item description **type** func **return** return B channel uq, type is int **static** False > C++ defination code: > ```cpp > int b_uq() > ``` ### Displacement Displacement class > C++ defination code: > ```cpp > class Displacement > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, x_translation: float, y_translation: float, rotation: float, scale: float, response: float) > None ``` Displacement constructor item description **type** func **param** **x_translation**: The x_translation of the Displacement
    **y_translation**: The y_translation of the Displacement
    **rotation**: The rotation of the Displacement
    **scale**: The scale of the Displacement
    **response**: The response of the Displacement
    **static** False > C++ defination code: > ```cpp > Displacement(float x_translation, float y_translation, float rotation, float scale, float response) > ``` #### \\_\\_getitem\\_\\_ ```python def __getitem__(self, index: int) > int ``` Subscript operator item description **type** func **param** **index**: array index
    **return** int& **static** False > C++ defination code: > ```cpp > int &__getitem__(int index) > ``` #### x\\_translation ```python def x_translation(self) > float ``` get x_translation of Displacement item description **type** func **return** return x_translation of the Displacement, type is float **static** False > C++ defination code: > ```cpp > float x_translation() > ``` #### y\\_translation ```python def y_translation(self) > float ``` get y_translation of Displacement item description **type** func **return** return y_translation of the Displacement, type is float **static** False > C++ defination code: > ```cpp > float y_translation() > ``` #### rotation ```python def rotation(self) > float ``` get rotation of Displacement item description **type** func **return** return rotation of the Displacement, type is float **static** False > C++ defination code: > ```cpp > float rotation() > ``` #### scale ```python def scale(self) > float ``` get scale of Displacement item description **type** func **return** return scale of the Displacement, type is float **static** False > C++ defination code: > ```cpp > float scale() > ``` #### response ```python def response(self) > float ``` get response of Displacement item description **type** func **return** return response of the Displacement, type is float **static** False > C++ defination code: > ```cpp > float response() > ``` ### Percentile Percentile class > C++ defination code: > ```cpp > class Percentile > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, l_value: int, a_value: int 0, b_value: int 0) > None ``` Percentile constructor item description **type** func **param** **l_value**: for grayscale image, it is grayscale percentile value (between 0 and 255).
    for rgb888 image, it is l channel percentile value of lab (between 0 and 100).
    **a_value**: for rgb888 image, it is a channel percentile value of lab format(between 128 and 127).
    **b_value**: for rgb888 image, it is b channel percentile value of lab format(between 128 and 127).
    **static** False > C++ defination code: > ```cpp > Percentile(int l_value, int a_value 0, int b_value 0) > ``` #### \\_\\_getitem\\_\\_ ```python def __getitem__(self, index: int) > int ``` Subscript operator item description **type** func **static** False > C++ defination code: > ```cpp > int &__getitem__(int index) > ``` #### value ```python def value(self) > int ``` Return the grayscale percentile value (between 0 and 255). item description **type** func **return** returns grayscale percentile value **static** False > C++ defination code: > ```cpp > int value() > ``` #### l\\_value ```python def l_value(self) > int ``` Return the l channel percentile value of lab format (between 0 and 100). item description **type** func **return** returns l channel percentile value **static** False > C++ defination code: > ```cpp > int l_value() > ``` #### a\\_value ```python def a_value(self) > int ``` Return the a channel percentile value of lab format (between 128 and 127). item description **type** func **return** returns a channel percentile value **static** False > C++ defination code: > ```cpp > int a_value() > ``` #### b\\_value ```python def b_value(self) > int ``` Return the b channel percentile value of lab format (between 128 and 127). item description **type** func **return** returns b channel percentile value **static** False > C++ defination code: > ```cpp > int b_value() > ``` ### Threshold Threshold class > C++ defination code: > ```cpp > class Threshold > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, l_value: int, a_value: int 0, b_value: int 0) > None ``` Threshold constructor item description **type** func **param** **l_value**: for grayscale image, it is grayscale threshold value (between 0 and 255).
    for rgb888 image, it is l channel threshold value of lab (between 0 and 100).
    **a_value**: for rgb888 image, it is a channel threshold value of lab format(between 128 and 127).
    **b_value**: for rgb888 image, it is b channel threshold value of lab format(between 128 and 127).
    **static** False > C++ defination code: > ```cpp > Threshold(int l_value, int a_value 0, int b_value 0) > ``` #### \\_\\_getitem\\_\\_ ```python def __getitem__(self, index: int) > int ``` Subscript operator item description **type** func **static** False > C++ defination code: > ```cpp > int &__getitem__(int index) > ``` #### value ```python def value(self) > int ``` Return the grayscale threshold value (between 0 and 255). item description **type** func **return** returns grayscale threshold value **static** False > C++ defination code: > ```cpp > int value() > ``` #### l\\_value ```python def l_value(self) > int ``` Return the l channel threshold value of lab format (between 0 and 100). item description **type** func **return** returns l channel percentile value **static** False > C++ defination code: > ```cpp > int l_value() > ``` #### a\\_value ```python def a_value(self) > int ``` Return the a channel threshold value of lab format (between 128 and 127). item description **type** func **return** returns a channel percentile value **static** False > C++ defination code: > ```cpp > int a_value() > ``` #### b\\_value ```python def b_value(self) > int ``` Return the b channel threshold value of lab format (between 128 and 127). item description **type** func **return** returns b channel percentile value **static** False > C++ defination code: > ```cpp > int b_value() > ``` ### Histogram Histogram class > C++ defination code: > ```cpp > class Histogram > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, l_bin: list[float], a_bin: list[float], b_bin: list[float], format: Format ...) > None ``` Histogram constructor item description **type** func **param** **l_value**: for grayscale image, it is grayscale threshold value list (the range of element values in the list is 0 and 255).
    for rgb888 image, it is l channel threshold value list of lab (the range of element values in the list is 0 and 100).
    **a_value**: for rgb888 image, it is a channel threshold value list of lab format(the range of element values in the list is 128 and 127).
    **b_value**: for rgb888 image, it is b channel threshold value list of lab format(the range of element values in the list is 128 and 127).
    **format**: format of the source image
    **static** False > C++ defination code: > ```cpp > Histogram(std::vector l_bin, std::vector a_bin, std::vector b_bin, image::Format format image::Format::FMT_RGB888) > ``` #### \\_\\_getitem\\_\\_ ```python def __getitem__(self, index: int) > int ``` Subscript operator item description **type** func **static** False > C++ defination code: > ```cpp > int &__getitem__(int index) > ``` #### bins ```python def bins(self) > list[float] ``` Returns a list of floats for the grayscale histogram. item description **type** func **static** False > C++ defination code: > ```cpp > std::vector bins() > ``` #### l\\_bins ```python def l_bins(self) > list[float] ``` Returns a list of floats for the RGB565 histogram LAB L channel. item description **type** func **static** False > C++ defination code: > ```cpp > std::vector l_bins() > ``` #### a\\_bins ```python def a_bins(self) > list[float] ``` Returns a list of floats for the RGB565 histogram LAB A channel. item description **type** func **static** False > C++ defination code: > ```cpp > std::vector a_bins() > ``` #### b\\_bins ```python def b_bins(self) > list[float] ``` Returns a list of floats for the RGB565 histogram LAB B channel. item description **type** func **static** False > C++ defination code: > ```cpp > std::vector b_bins() > ``` #### get\\_percentile ```python def get_percentile(self, percentile: float) > Percentile ``` Computes the CDF of the histogram channels and returns a image::Percentile object item description **type** func **param** **percentile**: the values of the histogram at the passed in percentile (0.0 1.0) (float).
    So, if you pass in 0.1 this method will tell you (going from left to right in the histogram)
    what bin when summed into an accumulator caused the accumulator to cross 0.1. This is useful
    to determine min (with 0.1) and max (with 0.9) of a color distribution without outlier effects
    ruining your results for adaptive color tracking.
    **return** image::Percentile object **static** False > C++ defination code: > ```cpp > image::Percentile get_percentile(float percentile) > ``` #### get\\_threshold ```python def get_threshold(self) > Threshold ``` Uses Otsu’s Method to compute the optimal threshold values that split the histogram into two halves for each channel of the histogram and returns a image::Threshold object. item description **type** func **return** image::Threshold object **static** False > C++ defination code: > ```cpp > image::Threshold get_threshold() > ``` #### get\\_statistics ```python def get_statistics(self) > Statistics ``` Computes the mean, median, mode, standard deviation, min, max, lower quartile, and upper quartile of each color channel in the histogram and returns a image::Statistics object. item description **type** func **return** image::Statistics object **static** False > C++ defination code: > ```cpp > image::Statistics get_statistics() > ``` ### LBPKeyPoint LBPKeyPoint class > C++ defination code: > ```cpp > class LBPKeyPoint > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, data: list[int]) > None ``` LBPKeyPoint constructor item description **type** func **param** **data**: The data of the LBPKeyPoint
    **static** False > C++ defination code: > ```cpp > LBPKeyPoint(std::valarray &data) > ``` ### KeyPoint KeyPoint class > C++ defination code: > ```cpp > class KeyPoint > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, x: int, y: int, score: int, octave: int, angle: int, matched: int, desc: list[int]) > None ``` KeyPoint constructor item description **type** func **param** **x**: The x of the KeyPoint
    **y**: The y of the KeyPoint
    **score**: The score of the KeyPoint
    **octave**: The octave of the KeyPoint
    **angle**: The angle of the KeyPoint
    **matched**: The matched of the KeyPoint
    **desc**: The desc of the KeyPoint
    **static** False > C++ defination code: > ```cpp > KeyPoint(uint16_t x, uint16_t y, uint16_t score, uint16_t octave, uint16_t angle, uint16_t matched, std::vector &desc) > ``` ### KPTMatch KPTMatch class > C++ defination code: > ```cpp > class KPTMatch > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, cx: int, cy: int, x: int, y: int, w: int, h: int, score: int, theta: int, match: int) > None ``` KPTMatch constructor item description **type** func **param** **cx**: The cx of the KPTMatch
    **cy**: The cy of the KPTMatch
    **x**: The x of the KPTMatch
    **y**: The y of the KPTMatch
    **w**: The w of the KPTMatch
    **h**: The h of the KPTMatch
    **score**: The score of the KPTMatch
    **theta**: The theta of the KPTMatch
    **match**: The match of the KPTMatch
    **static** False > C++ defination code: > ```cpp > KPTMatch(int cx, int cy, int x, int y, int w, int h, int score, int theta, int match) > ``` ### ORBKeyPoint ORBKeyPoint class > C++ defination code: > ```cpp > class ORBKeyPoint > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, data: list[KeyPoint], threshold: int, normalized: bool) > None ``` ORBKeyPoint constructor item description **type** func **param** **data**: The data of the ORBKeyPoint
    **threshold**: The threshold of the ORBKeyPoint
    **normalized**: The normalized of the ORBKeyPoint
    **static** False > C++ defination code: > ```cpp > ORBKeyPoint(std::vector &data, int threshold, bool normalized) > ``` #### get\\_data ```python def get_data(self) > list[KeyPoint] ``` get data of ORBKeyPoint item description **type** func **return** return data of the ORBKeyPoint, type is std::vector **static** False > C++ defination code: > ```cpp > std::vector get_data() > ``` ### HaarCascade HaarCascade class > C++ defination code: > ```cpp > class HaarCascade > ``` #### \\_\\_init\\_\\_ ```python def __init__(self) > None ``` HaarCascade constructor item description **type** func **param** **data**: The data of the HaarCascade
    **threshold**: The threshold of the HaarCascade
    **normalized**: The normalized of the HaarCascade
    **static** False > C++ defination code: > ```cpp > HaarCascade() > ``` ### Color Color class > C++ defination code: > ```cpp > class Color > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, ch1: int, ch2: int 0, ch3: int 0, alpha: float 0, format: Format ...) > None ``` Color constructor item description **type** func **param** **alpha**: alpha channel, value range: 0 ~ 1
    **static** False > C++ defination code: > ```cpp > Color(uint8_t ch1, uint8_t ch2 0, uint8_t ch3 0, float alpha 0, image::Format format image::FMT_GRAYSCALE) > ``` #### r Color red channel item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > uint8_t r > ``` #### g Color green channel item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > uint8_t g > ``` #### b Color blue channel item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > uint8_t b > ``` #### alpha Color alpha channel, value from 0.0 to 1.0, float value item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > float alpha > ``` #### gray Color gray channel item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > uint8_t gray > ``` #### format Color format item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > image::Format format > ``` #### hex ```python def hex(self) > int ``` Get color's hex value item description **type** func **static** False > C++ defination code: > ```cpp > uint32_t hex() > ``` #### from\\_rgb ```python def from_rgb(r: int, g: int, b: int) > Color ``` Create Color object from RGB channels item description **type** func **static** True > C++ defination code: > ```cpp > static image::Color from_rgb(uint8_t r, uint8_t g, uint8_t b) > ``` #### from\\_bgr ```python def from_bgr(b: int, g: int, r: int) > Color ``` Create Color object from BGR channels item description **type** func **static** True > C++ defination code: > ```cpp > static image::Color from_bgr(uint8_t b, uint8_t g, uint8_t r) > ``` #### from\\_gray ```python def from_gray(gray: int) > Color ``` Create Color object from gray channel item description **type** func **static** True > C++ defination code: > ```cpp > static image::Color from_gray(uint8_t gray) > ``` #### from\\_rgba ```python def from_rgba(r: int, g: int, b: int, alpha: float) > Color ``` Create Color object from RGBA channels item description **type** func **param** **alpha**: alpha channel, float value, value range: 0 ~ 1
    **static** True > C++ defination code: > ```cpp > static image::Color from_rgba(uint8_t r, uint8_t g, uint8_t b, float alpha) > ``` #### from\\_bgra ```python def from_bgra(b: int, g: int, r: int, alpha: float) > Color ``` Create Color object from BGRA channels item description **type** func **param** **alpha**: alpha channel, float value, value range: 0 ~ 1
    **static** True > C++ defination code: > ```cpp > static image::Color from_bgra(uint8_t b, uint8_t g, uint8_t r, float alpha) > ``` #### from\\_hex ```python def from_hex(hex: int, format: Format) > Color ``` Create Color object from hex value item description **type** func **param** **hex**: hex value, e.g. 0x0000FF00, lower address if first channel
    **format**: color format, @see image::Format
    **static** True > C++ defination code: > ```cpp > static image::Color from_hex(uint32_t hex, image::Format &format) > ``` #### to\\_format ```python def to_format(self, format: Format) > None ``` Convert Color format item description **type** func **param** **format**: format want to convert to, @see image::Format, only support RGB888, BGR888, RGBA8888, BGRA8888, GRAYSCALE.
    **static** False > C++ defination code: > ```cpp > void to_format(const image::Format &format) > ``` #### to\\_format2 ```python def to_format2(self, format: Format) > Color ``` Convert color format and return a new Color object item description **type** func **param** **format**: format want to convert to, @see image::Format, only support RGB888, BGR888, RGBA8888, BGRA8888, GRAYSCALE.
    **return** new Color object, you need to delete it manually in C++. **static** False > C++ defination code: > ```cpp > image::Color *to_format2(const image::Format &format) > ``` ### Image Image class > C++ defination code: > ```cpp > class Image > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, width: int, height: int, format: Format ...) > None ``` Image constructor item description **type** func **param** **width**: image width, should > 0
    **height**: image height, should > 0
    **format**: image format @see image::Format
    **static** False > C++ defination code: > ```cpp > Image(int width, int height, image::Format format image::Format::FMT_RGB888) > ``` #### format ```python def format(self) > Format ``` Get image's format item description **type** func **see** image.Format **static** False > C++ defination code: > ```cpp > image::Format format() > ``` #### size ```python def size(self) > Size ``` Get image's size, [width, height] item description **type** func **static** False > C++ defination code: > ```cpp > image::Size size() > ``` #### data\\_size ```python def data_size(self) > int ``` Get image's data size item description **type** func **static** False > C++ defination code: > ```cpp > int data_size() > ``` #### width ```python def width(self) > int ``` Get image's width item description **type** func **static** False > C++ defination code: > ```cpp > int width() > ``` #### height ```python def height(self) > int ``` Get image's height item description **type** func **static** False > C++ defination code: > ```cpp > int height() > ``` #### data ```python def data(self) > capsule ``` Get image's data pointer.\\nIn MaixPy is capsule object. item description **type** func **static** False > C++ defination code: > ```cpp > void *data() > ``` #### \\_\\_str\\_\\_ ```python def __str__(self) > str ``` To string method item description **type** func **static** False > C++ defination code: > ```cpp > std::string __str__() > ``` #### to\\_str ```python def to_str(self) > str ``` To string method item description **type** func **static** False > C++ defination code: > ```cpp > std::string to_str() > ``` #### get\\_pixel ```python def get_pixel(self, x: int, y: int, rgbtuple: bool False) > list[int] ``` Get pixel of image item description **type** func **param** **x**: pixel's coordinate x. x must less than image's width
    **y**: pixel's coordinate y. y must less than image's height
    **rgbtuple**: switch return value method. rgbtuple decides whether to split the return or not. default is false.
    **return** pixel value,
    According to image format and rgbtuple, return different value:
    format is FMT_RGB888, rgbtuple is true, return [R, G, B]; rgbtuple is false, return [RGB]
    foramt is FMT_BGR888, rgbtuple is true, return [B, G, R]; rgbtuple is false, return [BGR]
    format is FMT_GRAYSCALE, return [GRAY]; **static** False > C++ defination code: > ```cpp > std::vector get_pixel(int x, int y, bool rgbtuple false) > ``` #### set\\_pixel ```python def set_pixel(self, x: int, y: int, pixel: list[int]) > maix.err.Err ``` Set pixel of image item description **type** func **param** **x**: pixel's coordinate x. x must less than image's width
    **y**: pixel's coordinate y. y must less than image's height
    **pixel**: pixel value, according to image format and size of pixel, has different operation:
    format is FMT_RGB888, pixel size must be 1 or 3, if size is 1, will split pixel[0] to [R, G, B]; if size is 3, will use pixel directly
    format is FMT_BGR888, pixel size must be 1 or 3, if size is 1, will split pixel[0] to [B, G, R]; if size is 3, will use pixel directly
    format is FMT_GRAYSCALE, pixel size must be 1, will use pixel directly
    **return** error code, Err::ERR_NONE is ok, other is error **static** False > C++ defination code: > ```cpp > err::Err set_pixel(int x, int y, std::vector pixel) > ``` #### to\\_tensor ```python def to_tensor(self, chw: bool False, copy: bool True) > maix.tensor.Tensor ``` Convert Image object to tensor::Tensor object item description **type** func **param** **chw**: if true, the shape of tensor is [C, H, W], else [H, W, C]
    **copy**: if true, will alloc memory for tensor data, else will use the memory of Image object
    **return** tensor::Tensor object pointer, an allocated tensor object **static** False > C++ defination code: > ```cpp > tensor::Tensor *to_tensor(bool chw false, bool copy true) > ``` #### to\\_bytes ```python def to_bytes(*args, **kwargs) ``` Get image's data and convert to array bytes item description **type** func **param** **copy**: if true, will alloc memory and copy data to new buffer,
    else will use the memory of Image object, delete bytes object will not affect Image object,
    but delete Image object will make bytes object invalid, it may cause program crash !!!!
    So use this param carefully.
    **return** image's data bytes, need be delete by caller in C++. **static** False > C++ defination code: > ```cpp > Bytes *to_bytes(bool copy true) > ``` #### to\\_format ```python def to_format(self, format: Format) > Image ``` Convert image to specific format item description **type** func **param** **format**: format want to convert to, @see image::Format, only support RGB888, BGR888, RGBA8888, BGRA8888, GRAYSCALE, JPEG.
    **return** new image object. Need be delete by caller in C++. **throw** err.Exception, if two images' format not support, **or already the format**, will raise exception **static** False > C++ defination code: > ```cpp > image::Image *to_format(const image::Format &format) > ``` #### to\\_jpeg ```python def to_jpeg(self, quality: int 95) > Image ``` Convert image to jpeg item description **type** func **param** **quality**: the quality of jpg, default is 95. For MaixCAM supported range is (50, 100], if < 50 will be fixed to 51.
    **return** new image object. Need be delete by caller in C++. **throw** err.Exception, if two images' format not support, **or already the format**, will raise exception **static** False > C++ defination code: > ```cpp > image::Image *to_jpeg(int quality 95) > ``` #### draw\\_image ```python def draw_image(self, x: int, y: int, img: Image) > Image ``` Draw image on this image item description **type** func **param** **x**: left top corner of image point's coordinate x
    **y**: left top corner of image point's coordinate y
    **img**: image object to draw, the caller's channel must < the args' channel,
    e.g. caller is RGB888, args is RGBA8888, will throw exception, but caller is RGBA8888, args is RGB888 or RGBA8888 is ok
    **return** this image object self **static** False > C++ defination code: > ```cpp > image::Image *draw_image(int x, int y, image::Image &img) > ``` #### draw\\_rect ```python def draw_rect(self, x: int, y: int, w: int, h: int, color: Color, thickness: int 1) > Image ``` Fill rectangle color to image item description **type** func **param** **x**: left top corner of rectangle point's coordinate x
    **y**: left top corner of rectangle point's coordinate y
    **w**: rectangle width
    **h**: rectangle height
    **color**: rectangle color
    **thickness**: rectangle thickness(line width), by default(value is 1), 1 means fill rectangle
    **return** this image object self **static** False > C++ defination code: > ```cpp > image::Image *draw_rect(int x, int y, int w, int h, const image::Color &color, int thickness 1) > ``` #### draw\\_line ```python def draw_line(self, x1: int, y1: int, x2: int, y2: int, color: Color, thickness: int 1) > Image ``` Draw line on image item description **type** func **param** **x1**: start point's coordinate x
    **y1**: start point's coordinate y
    **x2**: end point's coordinate x
    **y2**: end point's coordinate y
    **color**: line color @see image::Color
    **thickness**: line thickness(line width), by default(value is 1)
    **return** this image object self **static** False > C++ defination code: > ```cpp > image::Image *draw_line(int x1, int y1, int x2, int y2, const image::Color &color, int thickness 1) > ``` #### draw\\_circle ```python def draw_circle(self, x: int, y: int, radius: int, color: Color, thickness: int 1) > Image ``` Draw circle on image item description **type** func **param** **x**: circle center point's coordinate x
    **y**: circle center point's coordinate y
    **radius**: circle radius
    **color**: circle color @see image::Color
    **thickness**: circle thickness(line width), default 1 means fill circle
    **return** this image object self **static** False > C++ defination code: > ```cpp > image::Image *draw_circle(int x, int y, int radius, const image::Color &color, int thickness 1) > ``` #### draw\\_ellipse ```python def draw_ellipse(self, x: int, y: int, a: int, b: int, angle: float, start_angle: float, end_angle: float, color: Color, thickness: int 1) > Image ``` Draw ellipse on image item description **type** func **param** **x**: ellipse center point's coordinate x
    **y**: ellipse center point's coordinate y
    **a**: ellipse major axis length
    **b**: ellipse minor axis length
    **angle**: ellipse rotation angle
    **start_angle**: ellipse start angle
    **end_angle**: ellipse end angle
    **color**: ellipse color @see image::Color
    **thickness**: ellipse thickness(line width), by default(value is 1), 1 means fill ellipse
    **return** this image object self **static** False > C++ defination code: > ```cpp > image::Image *draw_ellipse(int x, int y, int a, int b, float angle, float start_angle, float end_angle, const image::Color &color, int thickness 1) > ``` #### draw\\_string ```python def draw_string(self, x: int, y: int, textstring: str, color: Color ..., scale: float 1, thickness: int 1, wrap: bool True, wrap_space: int 4, font: str '') > Image ``` Draw text on image item description **type** func **param** **x**: text left top point's coordinate x
    **y**: text left top point's coordinate y
    **string**: text content
    **color**: text color @see image::Color, default is white
    **scale**: font scale, by default(value is 1)
    **thickness**: text thickness(line width), if negative, the glyph is filled, by default(value is 1)
    **wrap**: if true, will auto wrap text to next line if text width > image width, by default(value is true)
    **return** this image object self **static** False > C++ defination code: > ```cpp > image::Image *draw_string(int x, int y, const std::string &textstring, const image::Color &color image::COLOR_WHITE, float scale 1, int thickness 1, > bool wrap true, int wrap_space 4, const std::string &font \"\") > ``` #### draw\\_cross ```python def draw_cross(self, x: int, y: int, color: Color, size: int 5, thickness: int 1) > Image ``` Draw cross on image item description **type** func **param** **x**: cross center point's coordinate x
    **y**: cross center point's coordinate y
    **color**: cross color @see image::Color
    **size**: how long the lines of the cross extend, by default(value is 5). So the line length is `2 * size + thickness`
    **thickness**: cross thickness(line width), by default(value is 1)
    **static** False > C++ defination code: > ```cpp > image::Image *draw_cross(int x, int y, const image::Color &color, int size 5, int thickness 1) > ``` #### draw\\_arrow ```python def draw_arrow(self, x0: int, y0: int, x1: int, y1: int, color: Color, thickness: int 1) > Image ``` Draw arrow on image item description **type** func **param** **x0**: start coordinate of the arrow x0
    **y0**: start coordinate of the arrow y0
    **x1**: end coordinate of the arrow x1
    **y1**: end coordinate of the arrow y1
    **color**: cross color @see image::Color
    **thickness**: cross thickness(line width), by default(value is 1)
    **return** this image object self **static** False > C++ defination code: > ```cpp > image::Image *draw_arrow(int x0, int y0, int x1, int y1, const image::Color &color, int thickness 1) > ``` #### draw\\_edges ```python def draw_edges(self, corners: list[list[int]], color: Color, size: int 0, thickness: int 1, fill: bool False) > Image ``` Draw edges on image item description **type** func **param** **corners**: edges, [[x0, y0], [x1, y1], [x2, y2], [x3, y3]]
    **color**: edges color @see image::Color
    **size**: the circle of radius size. TODO: support in the feature
    **thickness**: edges thickness(line width), by default(value is 1)
    **fill**: if true, will fill edges, by default(value is false)
    **return** this image object self **static** False > C++ defination code: > ```cpp > image::Image *draw_edges(std::vector> corners, const image::Color &color, int size 0, int thickness 1, bool fill false) > ``` #### draw\\_keypoints ```python def draw_keypoints(self, keypoints: list[int], color: Color, size: int 4, thickness: int 1, line_thickness: int 0) > Image ``` Draw keypoints on image item description **type** func **param** **keypoints**: keypoints, [x1, y1, x2, y2...] or [x, y, rotation_andle_in_degrees, x2, y2, rotation_andle_in_degrees2](TODO: rotation_andle_in_degrees support in the feature)
    **color**: keypoints color @see image::Color
    **size**: size of keypoints(radius)
    **thickness**: keypoints thickness(line width), by default(value is 1 means fill circle)
    **line_thickness**: line thickness, default 0 means not draw lines, > 0 will draw lines connect points.
    **return** this image object self **static** False > C++ defination code: > ```cpp > image::Image *draw_keypoints(const std::vector &keypoints, const image::Color &color, int size 4, int thickness 1, int line_thickness 0) > ``` #### resize ```python def resize(self, width: int, height: int, object_fit: Fit ..., method: ResizeMethod ...) > Image ``` Resize image, will create a new resized image object item description **type** func **param** **width**: new width, if value is 1, will use height to calculate aspect ratio
    **height**: new height, if value is 1, will use width to calculate aspect ratio
    **object_fit**: fill, contain, cover, by default is fill
    **method**: resize method, by default is bilinear
    **return** Always return a new resized image object even size not change, So in C++ you should take care of the return value to avoid memory leak.
    And it's better to judge whether the size has changed before calling this function to make the program more efficient.
    e.g.
    if img >width() ! width img >height() ! height:
    img img >resize(width, height); **static** False > C++ defination code: > ```cpp > image::Image *resize(int width, int height, image::Fit object_fit image::Fit::FIT_FILL, image::ResizeMethod method image::ResizeMethod::NEAREST) > ``` #### affine ```python def affine(self, src_points: list[int], dst_points: list[int], width: int 1, height: int 1, method: ResizeMethod ...) > Image ``` Affine transform image, will create a new transformed image object item description **type** func **param** **src_points**: three source points, [x1, y1, x2, y2, x3, y3]
    **dst_points**: three destination points, [x1, y1, x2, y2, x3, y3]
    **width**: new width, if value is 1, will use height to calculate aspect ratio
    **height**: new height, if value is 1, will use width to calculate aspect ratio
    **method**: resize method, by default is bilinear
    **return** new transformed image object **static** False > C++ defination code: > ```cpp > image::Image *affine(std::vector src_points, std::vector dst_points, int width 1, int height 1, image::ResizeMethod method image::ResizeMethod::BILINEAR) > ``` #### copy ```python def copy(self) > Image ``` Copy image, will create a new copied image object item description **type** func **return** new copied image object **static** False > C++ defination code: > ```cpp > image::Image *copy() > ``` #### crop ```python def crop(self, x: int, y: int, w: int, h: int) > Image ``` Crop image, will create a new cropped image object item description **type** func **param** **x**: left top corner of crop rectangle point's coordinate x
    **y**: left top corner of crop rectangle point's coordinate y
    **w**: crop rectangle width
    **h**: crop rectangle height
    **return** new cropped image object **static** False > C++ defination code: > ```cpp > image::Image *crop(int x, int y, int w, int h) > ``` #### rotate ```python def rotate(self, angle: float, width: int 1, height: int 1, method: ResizeMethod ...) > Image ``` Rotate image, will create a new rotated image object item description **type** func **param** **angle**: anti clock wise rotate angle, if angle is 90 or 270, and width or height is 1, will swap width and height, or will throw exception
    **width**: new width, if value is 1, will use height to calculate aspect ratio
    **height**: new height, if value is 1, will use width to calculate aspect ratio
    **method**: resize method, by default is bilinear
    **return** new rotated image object **static** False > C++ defination code: > ```cpp > image::Image *rotate(float angle, int width 1, int height 1, image::ResizeMethod method image::ResizeMethod::BILINEAR) > ``` #### mean\\_pool ```python def mean_pool(self, x_div: int, y_div: int, copy: bool False) > Image ``` Finds the mean of x_div * y_div squares in the image and returns the modified image composed of the mean of each square. item description **type** func **param** **x_div**: The width of the squares.
    **y_div**: The height of the squares.
    **copy**: Select whether to return a new image or modify the original image. default is false.
    If true, returns a new image composed of the mean of each square; If false, returns the modified image composed of the mean of each square.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *mean_pool(int x_div, int y_div, bool copy false) > ``` #### midpoint\\_pool ```python def midpoint_pool(self, x_div: int, y_div: int, bias: float 0.5, copy: bool False) > Image ``` Finds the midpoint of x_div * y_div squares in the image and returns the modified image composed of the mean of each square. item description **type** func **param** **x_div**: The width of the squares.
    **y_div**: The height of the squares.
    **bias**: The bias of the midpoint. default is 0.5.
    midpoint value is equal to (max * bias + min * (1 bias))
    **copy**: Select whether to return a new image or modify the original image. default is false.
    If true, returns a new image composed of the midpoint of each square; If false, returns the modified image composed of the midpoint of each square.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *midpoint_pool(int x_div, int y_div, double bias 0.5, bool copy false) > ``` #### compress ```python def compress(self, quality: int 95) > Image ``` JPEG compresses the image in place, the same as to_jpeg functioin, it's recommend to use to_jpeg instead. item description **type** func **param** **quality**: The quality of the compressed image. default is 95.
    **return** Returns the compressed JPEG image **static** False > C++ defination code: > ```cpp > image::Image *compress(int quality 95) > ``` #### clear ```python def clear(self, mask: Image None) > Image ``` Sets all pixels in the image to zero item description **type** func **param** **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *clear(image::Image *mask nullptr) > ``` #### mask\\_rectange ```python def mask_rectange(self, x: int 1, y: int 1, w: int 1, h: int 1) > Image ``` Zeros a rectangular part of the image. If no arguments are supplied this method zeros the center of the image. item description **type** func **param** **x**: The x coordinate of the top left corner of the rectangle.
    **y**: The y coordinate of the top left corner of the rectangle.
    **w**: The width of the rectangle.
    **h**: The height of the rectangle.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *mask_rectange(int x 1, int y 1, int w 1, int h 1) > ``` #### mask\\_circle ```python def mask_circle(self, x: int 1, y: int 1, radius: int 1) > Image ``` Zeros a circular part of the image. If no arguments are supplied this method zeros the center of the image. item description **type** func **param** **x**: The x coordinate of the center of the circle.
    **y**: The y coordinate of the center of the circle.
    **radius**: The radius of the circle.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *mask_circle(int x 1, int y 1, int radius 1) > ``` #### mask\\_ellipse ```python def mask_ellipse(self, x: int 1, y: int 1, radius_x: int 1, radius_y: int 1, rotation_angle_in_degrees: float 0) > Image ``` Zeros a ellipse part of the image. If no arguments are supplied this method zeros the center of the image. item description **type** func **param** **x**: The x coordinate of the center of the ellipse.
    **y**: The y coordinate of the center of the ellipse.
    **radius_x**: The radius of the ellipse in the x direction.
    **radius_y**: The radius of the ellipse in the y direction.
    **rotation_angle_in_degrees**: The rotation angle of the ellipse in degrees.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *mask_ellipse(int x 1, int y 1, int radius_x 1, int radius_y 1, float rotation_angle_in_degrees 0) > ``` #### binary ```python def binary(self, thresholds: list[list[int]] [], invert: bool False, zero: bool False, mask: Image None, to_bitmap: bool False, copy: bool False) > Image ``` Sets all pixels in the image to black or white depending on if the pixel is inside of a threshold in the threshold list thresholds or not. item description **type** func **note** For GRAYSCALE format, Lmin and Lmax range is [0, 255]. For RGB888 format, Lmin and Lmax range is [0, 100]. **param** **thresholds**: You can define multiple thresholds.
    For GRAYSCALE format, you can use {{Lmin, Lmax}, ...} to define one or more thresholds.
    For RGB888 format, you can use {{Lmin, Lmax, Amin, Amax, Bmin, Bmax}, ...} to define one or more thresholds.
    Where the upper case L,A,B represent the L,A,B channels of the LAB image format, and min, max represent the minimum and maximum values of the corresponding channels.
    **invert**: If true, the thresholds will be inverted before the operation. default is false.
    **zero**: If zero is true, the image will be set the pixels within the threshold to 0, other pixels remain unchanged. If zero is false, the image will be set to black or white. default is false.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **to_bitmap**: If true, the image will be converted to a bitmap image before thresholding. default is false. TODO: support in the feature
    **copy**: Select whether to return a new image or modify the original image. default is false.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *binary(std::vector> thresholds std::vector>(), bool invert false, bool zero false, image::Image *mask nullptr, bool to_bitmap false, bool copy false) > ``` #### invert ```python def invert(self) > Image ``` Inverts the image in place. item description **type** func **return** Returns the image after the operation is completed **static** False > C++ defination code: > ```cpp > image::Image *invert() > ``` #### b\\_and ```python def b_and(self, other: Image, mask: Image None) > Image ``` Performs a bitwise and operation between the image and the other image. item description **type** func **param** **other**: The other image should be an image and should be the same size as the image being operated on. TODO: support path?
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *b_and(image::Image *other, image::Image *mask nullptr) > ``` #### b\\_nand ```python def b_nand(self, other: Image, mask: Image None) > Image ``` Performs a bitwise nand operation between the image and the other image. item description **type** func **param** **other**: The other image should be an image and should be the same size as the image being operated on. TODO: support path?
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *b_nand(image::Image *other, image::Image *mask nullptr) > ``` #### b\\_or ```python def b_or(self, other: Image, mask: Image None) > Image ``` Performs a bitwise or operation between the image and the other image. item description **type** func **param** **other**: The other image should be an image and should be the same size as the image being operated on. TODO: support path?
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *b_or(image::Image *other, image::Image *mask nullptr) > ``` #### b\\_nor ```python def b_nor(self, other: Image, mask: Image None) > Image ``` Performs a bitwise nor operation between the image and the other image. item description **type** func **param** **other**: The other image should be an image and should be the same size as the image being operated on. TODO: support path?
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *b_nor(image::Image *other, image::Image *mask nullptr) > ``` #### b\\_xor ```python def b_xor(self, other: Image, mask: Image None) > Image ``` Performs a bitwise xor operation between the image and the other image. item description **type** func **param** **other**: The other image should be an image and should be the same size as the image being operated on. TODO: support path?
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *b_xor(image::Image *other, image::Image *mask nullptr) > ``` #### b\\_xnor ```python def b_xnor(self, other: Image, mask: Image None) > Image ``` Performs a bitwise xnor operation between the image and the other image. item description **type** func **param** **other**: The other image should be an image and should be the same size as the image being operated on. TODO: support path?
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *b_xnor(image::Image *other, image::Image *mask nullptr) > ``` #### awb ```python def awb(self, max: bool False) > Image ``` Performs an auto white balance operation on the image. TODO: support in the feature item description **type** func **param** **max**: if True uses the white patch algorithm instead. default is false.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *awb(bool max false) > ``` #### ccm ```python def ccm(self, matrix: list[float]) > Image ``` Multiples the passed (3x3) or (4x3) floating point color correction matrix with the image.\\nnote: Grayscale format is not support. item description **type** func **param** **matrix**: The color correction matrix to use. 3x3 or 4x3 matrix.
    Weights may either be positive or negative, and the sum of each column in the 3x3 matrix should generally be 1.
    example:
    {
    1, 0, 0,
    0, 1, 0,
    0, 0, 1,
    }
    Where the last row of the 4x3 matrix is an offset per color channel. If you add an offset you may wish to make the
    weights sum to less than 1 to account for the offset.
    example:
    {
    1, 0, 0,
    0, 1, 0,
    0, 0, 1,
    0, 0, 0,
    }
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *ccm(std::vector &matrix) > ``` #### gamma ```python def gamma(self, gamma: float 1.0, contrast: float 1.0, brightness: float 0.0) > Image ``` Quickly changes the image gamma, contrast, and brightness. Create a array whose size is usually 255,\\nand use the parameters gamma, contrast, and brightness to calculate the value of the array, and then map the\\nimage pixel value through the value of the array.\\nThe calculation method for array is: array[array_idx] (powf((array_idx / 255.0), (1 / gamma)) * contrast + brightness) * scale,\\n`powf` is a function used to calculate floating point power.\\n`array` is the array used for mapping.\\n`array_idx` is the index of the array, the maximum value is determined according to the image format, usually 255.\\n`scale` is a constant, the value is determined by the image format, usually 255.\\nMapping method:\\nAssume that a pixel value in the image is 128, then map the pixel value to the value of array[128]\\nUsers can adjust the value of the array through the gamma, contrast, and brightness parameters. item description **type** func **param** **gamma**: The contrast gamma greater than 1.0 makes the image darker in a non linear manner while less than 1.0 makes the image brighter. default is 1.0.
    **contrast**: The contrast value greater than 1.0 makes the image brighter in a linear manner while less than 1.0 makes the image darker. default is 1.0.
    **brightness**: The brightness value greater than 0.0 makes the image brighter in a constant manner while less than 0.0 makes the image darker. default is 0.0.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *gamma(double gamma 1.0, double contrast 1.0, double brightness 0.0) > ``` #### gamma\\_corr ```python def gamma_corr(self, gamma: float, contrast: float 1.0, brightness: float 0.0) > Image ``` Alias for Image.gamma. item description **type** func **param** **gamma**: The contrast gamma greater than 1.0 makes the image darker in a non linear manner while less than 1.0 makes the image brighter. default is 1.0.
    **contrast**: The contrast value greater than 1.0 makes the image brighter in a linear manner while less than 1.0 makes the image darker. default is 1.0.
    **brightness**: The brightness value greater than 0.0 makes the image brighter in a constant manner while less than 0.0 makes the image darker. default is 0.0.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *gamma_corr(double gamma, double contrast 1.0, double brightness 0.0) > ``` #### negate ```python def negate(self) > Image ``` Flips (numerically inverts) all pixels values in an image item description **type** func **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *negate() > ``` #### replace ```python def replace(self, other: Image None, hmirror: bool False, vflip: bool False, transpose: bool False, mask: Image None) > Image ``` Replaces all pixels in the image with the corresponding pixels in the other image. item description **type** func **param** **other**: The other image should be an image and should be the same size as the image being operated on.
    **hmirror**: If true, the image will be horizontally mirrored before the operation. default is false.
    **vflip**: If true, the image will be vertically flipped before the operation. default is false.
    **transpose**: If true, the image can be used to rotate 90 degrees or 270 degrees.
    hmirror false, vflip false, transpose false, the image will not be rotated.
    hmirror false, vflip true, transpose true, the image will be rotated 90 degrees.
    hmirror true, vflip true, transpose false, the image will be rotated 180 degrees.
    hmirror true, vflip false, transpose true, the image will be rotated 270 degrees.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *replace(image::Image *other nullptr, bool hmirror false, bool vflip false, bool transpose false, image::Image *mask nullptr) > ``` #### set ```python def set(self, other: Image, hmirror: bool False, vflip: bool False, transpose: bool False, mask: Image None) > Image ``` Alias for Image::replace. item description **type** func **param** **other**: The other image should be an image and should be the same size as the image being operated on.
    **hmirror**: If true, the image will be horizontally mirrored before the operation. default is false.
    **vflip**: If true, the image will be vertically flipped before the operation. default is false.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *set(image::Image *other, bool hmirror false, bool vflip false, bool transpose false, image::Image *mask nullptr) > ``` #### add ```python def add(self, other: Image, mask: Image None) > Image ``` Adds the other image to the image. item description **type** func **param** **other**: The other image should be an image and should be the same size as the image being operated on. TODO: support path?
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *add(image::Image *other, image::Image *mask nullptr) > ``` #### sub ```python def sub(self, other: Image, reverse: bool False, mask: Image None) > Image ``` Subtracts the other image from the image. item description **type** func **param** **other**: The other image should be an image and should be the same size as the image being operated on. TODO: support path?
    **reverse**: If true, the image will be reversed before the operation. default is false.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *sub(image::Image *other, bool reverse false, image::Image *mask nullptr) > ``` #### mul ```python def mul(self, other: Image, invert: bool False, mask: Image None) > Image ``` Multiplies the image by the other image.\\nNote: This method is meant for image blending and cannot multiply the pixels in the image by a scalar like 2. item description **type** func **param** **other**: The other image should be an image and should be the same size as the image being operated on. TODO: support path?
    **invert**: If true, the image will be change the multiplication operation from a*b to 1/((1/a)*(1/b)).
    In particular, this lightens the image instead of darkening it (e.g. multiply versus burn operations). default is false.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *mul(image::Image *other, bool invert false, image::Image *mask nullptr) > ``` #### div ```python def div(self, other: Image, invert: bool False, mod: bool False, mask: Image None) > Image ``` Divides the image by the other image.\\nThis method is meant for image blending and cannot divide the pixels in the image by a scalar like 2. item description **type** func **param** **other**: The other image should be an image and should be the same size as the image being operated on. TODO: support path?
    **invert**: If true, the image will be change the division direction from a/b to b/a. default is false.
    **mod**: If true, the image will be change the division operation to the modulus operation. default is false.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *div(image::Image *other, bool invert false, bool mod false, image::Image *mask nullptr) > ``` #### min ```python def min(self, other: Image, mask: Image None) > Image ``` Caculate the minimum of each pixel in the image and the other image. item description **type** func **param** **other**: The other image should be an image and should be the same size as the image being operated on.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *min(image::Image *other, image::Image *mask nullptr) > ``` #### max ```python def max(self, other: Image, mask: Image None) > Image ``` Caculate the maximum of each pixel in the image and the other image. item description **type** func **param** **other**: The other image should be an image and should be the same size as the image being operated on.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *max(image::Image *other, image::Image *mask nullptr) > ``` #### difference ```python def difference(self, other: Image, mask: Image None) > Image ``` Caculate the absolute value of the difference between each pixel in the image and the other image. item description **type** func **param** **other**: The other image should be an image and should be the same size as the image being operated on.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *difference(image::Image *other, image::Image *mask nullptr) > ``` #### blend ```python def blend(self, other: Image, alpha: int 128, mask: Image None) > Image ``` Blends the image with the other image.\\nres alpha * this_img / 256 + (256 alpha) * other_img / 256 item description **type** func **param** **other**: The other image should be an image and should be the same size as the image being operated on.
    **alpha**: The alpha value of the blend, the value range is [0, 256],default is 128.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *blend(image::Image *other, int alpha 128, image::Image *mask nullptr) > ``` #### histeq ```python def histeq(self, adaptive: bool False, clip_limit: int 1, mask: Image None) > Image ``` Runs the histogram equalization algorithm on the image. item description **type** func **param** **adaptive**: If true, an adaptive histogram equalization method will be run on the image instead which as generally better results than non adaptive histogram qualization but a longer run time. default is false.
    **clip_limit**: Provides a way to limit the contrast of the adaptive histogram qualization. Use a small value for this, like 10, to produce good histogram equalized contrast limited images. default is 1.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *histeq(bool adaptive false, int clip_limit 1, image::Image *mask nullptr) > ``` #### mean ```python def mean(self, size: int, threshold: bool False, offset: int 0, invert: bool False, mask: Image None) > Image ``` Standard mean blurring filter using a box filter.\\nThe parameters offset and invert are valid when threshold is True. item description **type** func **param** **size**: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
    **threshold**: If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel’s brightness in relation to the brightness of the kernel of pixels around them.
    default is false.
    **offset**: The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.
    **invert**: If true, the image will be inverted before the operation. default is false.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *mean(int size, bool threshold false, int offset 0, bool invert false, image::Image *mask nullptr) > ``` #### median ```python def median(self, size: int, percentile: float 0.5, threshold: bool False, offset: int 0, invert: bool False, mask: Image None) > Image ``` Runs the median filter on the image. The median filter is the best filter for smoothing surfaces while preserving edges but it is very slow. item description **type** func **param** **size**: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
    **percentile**: This parameter controls the percentile of the value used in the kernel. You can set this to 0 for a min filter, 0.25 for a lower quartile filter, 0.75 for an upper quartile filter, and 1.0 for a max filter. default is 0.5.
    **threshold**: If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel’s brightness in relation to the brightness of the kernel of pixels around them.
    default is false.
    **offset**: The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.
    **invert**: If true, the image will be inverted before the operation. default is false.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *median(int size, double percentile 0.5, bool threshold false, int offset 0, bool invert false, image::Image *mask nullptr) > ``` #### mode ```python def mode(self, size: int, threshold: bool False, offset: int 0, invert: bool False, mask: Image None) > Image ``` Runs the mode filter on the image by replacing each pixel with the mode of their neighbors. item description **type** func **param** **size**: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
    **threshold**: If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel’s brightness in relation to the brightness of the kernel of pixels around them.
    default is false.
    **offset**: The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.
    **invert**: If true, the image will be inverted before the operation. default is false.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *mode(int size, bool threshold false, int offset 0, bool invert false, image::Image *mask nullptr) > ``` #### midpoint ```python def midpoint(self, size: int, bias: float 0.5, threshold: bool False, offset: int 0, invert: bool False, mask: Image None) > Image ``` Runs the midpoint filter on the image.This filter finds the midpoint (max * bias + min * (1 bias)) of each pixel neighborhood in the image. item description **type** func **param** **size**: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
    **bias**: The bias of the midpoint. default is 0.5.
    **threshold**: If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel’s brightness in relation to the brightness of the kernel of pixels around them.
    default is false.
    **offset**: The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.
    **invert**: If true, the image will be inverted before the operation. default is false.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *midpoint(int size, double bias 0.5, bool threshold false, int offset 0, bool invert false, image::Image *mask nullptr) > ``` #### morph ```python def morph(self, size: int, kernel: list[int], mul: float 1, add: float 0.0, threshold: bool False, offset: int 0, invert: bool False, mask: Image None) > Image ``` Convolves the image by a filter kernel. This allows you to do general purpose convolutions on an image. item description **type** func **param** **size**: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
    **kernel**: The kernel used for convolution. The kernel should be a list of lists of numbers. The kernel should be the same size as the actual kernel size.
    **mul**: This parameter is used to multiply the convolved pixel results. default is auto.
    **add**: This parameter is the value to be added to each convolution pixel result. default is 0.0.
    **threshold**: If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel’s brightness in relation to the brightness of the kernel of pixels around them.
    default is false.
    **offset**: The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.
    **invert**: If true, the image will be inverted before the operation. default is false.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *morph(int size, std::vector kernel, float mul 1, float add 0.0, bool threshold false, int offset 0, bool invert false, image::Image *mask nullptr) > ``` #### gaussian ```python def gaussian(self, size: int, unsharp: bool False, mul: float 1, add: float 0.0, threshold: bool False, offset: int 0, invert: bool False, mask: Image None) > Image ``` Convolves the image by a smoothing guassian kernel. item description **type** func **param** **size**: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
    **unsharp**: If true, this method will perform an unsharp mask operation instead of gaussian filtering operation, this improves the clarity of image edges. default is false.
    **mul**: This parameter is used to multiply the convolved pixel results. default is auto.
    **add**: This parameter is the value to be added to each convolution pixel result. default is 0.0.
    **threshold**: If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel’s brightness in relation to the brightness of the kernel of pixels around them.
    default is false.
    **offset**: The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.
    **invert**: If true, the image will be inverted before the operation. default is false.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *gaussian(int size, bool unsharp false, float mul 1, float add 0.0, bool threshold false, int offset 0, bool invert false, image::Image *mask nullptr) > ``` #### laplacian ```python def laplacian(self, size: int, sharpen: bool False, mul: float 1, add: float 0.0, threshold: bool False, offset: int 0, invert: bool False, mask: Image None) > Image ``` Convolves the image by a edge detecting laplacian kernel. item description **type** func **param** **size**: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
    **sharpen**: If True, this method will sharpen the image instead of an unthresholded edge detection image. Then increase the kernel size to improve image clarity. default is false.
    **mul**: This parameter is used to multiply the convolved pixel results. default is auto.
    **add**: This parameter is the value to be added to each convolution pixel result. default is 0.0.
    **threshold**: If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel’s brightness in relation to the brightness of the kernel of pixels around them.
    default is false.
    **offset**: The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.
    **invert**: If true, the image will be inverted before the operation. default is false.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *laplacian(int size, bool sharpen false, float mul 1, float add 0.0, bool threshold false, int offset 0, bool invert false, image::Image *mask nullptr) > ``` #### bilateral ```python def bilateral(self, size: int, color_sigma: float 0.1, space_sigma: float 1, threshold: bool False, offset: int 0, invert: bool False, mask: Image None) > Image ``` Convolves the image by a bilateral filter. item description **type** func **param** **size**: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
    **color_sigma**: Controls how closely colors are matched using the bilateral filter. default is 0.1.
    **space_sigma**: Controls how closely pixels space wise are blurred with each other. default is 1.
    **threshold**: If true, which will enable adaptive thresholding of the image which sets pixels to white or black based on a pixel’s brightness in relation to the brightness of the kernel of pixels around them.
    default is false.
    **offset**: The larger the offset value, the lower brightness pixels on the original image will be set to white. default is 0.
    **invert**: If true, the image will be inverted before the operation. default is false.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *bilateral(int size, double color_sigma 0.1, double space_sigma 1, bool threshold false, int offset 0, bool invert false, image::Image *mask nullptr) > ``` #### linpolar ```python def linpolar(self, reverse: bool False) > Image ``` Re project’s and image from cartessian coordinates to linear polar coordinates. item description **type** func **param** **reverse**: If true, the image will be reverse polar transformed. default is false.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *linpolar(bool reverse false) > ``` #### logpolar ```python def logpolar(self, reverse: bool False) > Image ``` Re project’s and image from cartessian coordinates to log polar coordinates. item description **type** func **param** **reverse**: If true, the image will be reverse polar transformed. default is false.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *logpolar(bool reverse false) > ``` #### lens\\_corr ```python def lens_corr(self, strength: float 1.8, zoom: float 1.0, x_corr: float 0.0, y_corr: float 0.0) > Image ``` Performs a lens correction operation on the image. TODO: support in the feature item description **type** func **param** **strength**: The strength of the lens correction. default is 1.8.
    **zoom**: The zoom of the lens correction. default is 1.0.
    **x_corr**: The x correction of the lens correction. default is 0.0.
    **y_corr**: The y correction of the lens correction. default is 0.0.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *lens_corr(double strength 1.8, double zoom 1.0, double x_corr 0.0, double y_corr 0.0) > ``` #### rotation\\_corr ```python def rotation_corr(self, x_rotation: float 0.0, y_rotation: float 0.0, z_rotation: float 0.0, x_translation: float 0.0, y_translation: float 0.0, zoom: float 1.0, fov: float 60.0, corners: list[float] []) > Image ``` Performs a rotation correction operation on the image. TODO: support in the feature item description **type** func **param** **x_rotation**: The x rotation of the rotation correction. default is 0.0.
    **y_rotation**: The y rotation of the rotation correction. default is 0.0.
    **z_rotation**: The z rotation of the rotation correction. default is 0.0.
    **x_translation**: The x translation of the rotation correction. default is 0.0.
    **y_translation**: The y translation of the rotation correction. default is 0.0.
    **zoom**: The zoom of the rotation correction. default is 1.0.
    **fov**: The fov of the rotation correction. default is 60.0.
    **corners**: The corners of the rotation correction. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *rotation_corr(double x_rotation 0.0, double y_rotation 0.0, double z_rotation 0.0, double x_translation 0.0, double y_translation 0.0, double zoom 1.0, double fov 60.0, std::vector corners std::vector()) > ``` #### get\\_histogram ```python def get_histogram(self, thresholds: list[list[int]] [], invert: bool False, roi: list[int] [], bins: int 1, l_bins: int 100, a_bins: int 256, b_bins: int 256, difference: Image None) > Histogram ``` Computes the normalized histogram on all color channels and returns a image::Histogram object. item description **type** func **note** For GRAYSCALE format, Lmin and Lmax range is [0, 255]. For RGB888 format, Lmin and Lmax range is [0, 100]. **param** **thresholds**: You can define multiple thresholds.
    For GRAYSCALE format, you can use {{Lmin, Lmax}, ...} to define one or more thresholds.
    For RGB888 format, you can use {{Lmin, Lmax, Amin, Amax, Bmin, Bmax}, ...} to define one or more thresholds.
    Where the upper case L,A,B represent the L,A,B channels of the LAB image format, and min, max represent the minimum and maximum values of the corresponding channels.
    **invert**: If true, the thresholds will be inverted before the operation. default is false.
    **roi**: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
    default is None, means whole image.
    **bins**: The number of bins to use for the histogram.
    In GRAYSCALE format, setting range is [2, 256], default is 100.
    In RGB888 format, setting range is [2, 100], default is 100.
    **l_bins**: The number of bins to use for the l channel of the histogram. Only valid in RGB888 format.
    If an invalid value is set, bins will be used instead. The setting range is [2, 100], default is 100.
    **a_bins**: The number of bins to use for the a channel of the histogram.
    Only valid in RGB888 format.The setting range is [2, 256], default is 256.
    **b_bins**: The number of bins to use for the b channel of the histogram.
    Only valid in RGB888 format. The setting range is [2, 256], default is 256.
    **difference**: difference may be set to an image object to cause this method to operate on the difference image between the current image and the difference image object.
    default is None.
    **return** Returns image::Histogram object **static** False > C++ defination code: > ```cpp > image::Histogram get_histogram(std::vector> thresholds std::vector>(), bool invert false, std::vector roi std::vector(), int bins 1, int l_bins 100, int a_bins 256, int b_bins 256, image::Image *difference nullptr) > ``` #### get\\_statistics ```python def get_statistics(self, thresholds: list[list[int]] [], invert: bool False, roi: list[int] [], bins: int 1, l_bins: int 1, a_bins: int 1, b_bins: int 1, difference: Image None) > Statistics ``` Gets the statistics of the image. TODO: support in the feature item description **type** func **note** For GRAYSCALE format, Lmin and Lmax range is [0, 255]. For RGB888 format, Lmin and Lmax range is [0, 100]. **param** **thresholds**: You can define multiple thresholds.
    For GRAYSCALE format, you can use {{Lmin, Lmax}, ...} to define one or more thresholds.
    For RGB888 format, you can use {{Lmin, Lmax, Amin, Amax, Bmin, Bmax}, ...} to define one or more thresholds.
    Where the upper case L,A,B represent the L,A,B channels of the LAB image format, and min, max represent the minimum and maximum values of the corresponding channels.
    **invert**: If true, the image will be inverted before the operation. default is false.
    **roi**: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
    default is None, means whole image.
    **bins**: The number of bins to use for the statistics. default is 1.
    **l_bins**: The number of bins to use for the l channel of the statistics. default is 1.
    **a_bins**: The number of bins to use for the a channel of the statistics. default is 1.
    **b_bins**: The number of bins to use for the b channel of the statistics. default is 1.
    **difference**: The difference image to use for the statistics. default is None.
    **return** Returns the statistics of the image **static** False > C++ defination code: > ```cpp > image::Statistics get_statistics(std::vector> thresholds std::vector>(), bool invert false, std::vector roi std::vector(), int bins 1, int l_bins 1, int a_bins 1, int b_bins 1, image::Image *difference nullptr) > ``` #### get\\_regression ```python def get_regression(self, thresholds: list[list[int]] [], invert: bool False, roi: list[int] [], x_stride: int 2, y_stride: int 1, area_threshold: int 10, pixels_threshold: int 10, robust: bool False) > list[Line] ``` Gets the regression of the image. item description **type** func **note** For GRAYSCALE format, Lmin and Lmax range is [0, 255]. For RGB888 format, Lmin and Lmax range is [0, 100]. **param** **thresholds**: You can define multiple thresholds.
    For GRAYSCALE format, you can use {{Lmin, Lmax}, ...} to define one or more thresholds.
    For RGB888 format, you can use {{Lmin, Lmax, Amin, Amax, Bmin, Bmax}, ...} to define one or more thresholds.
    Where the upper case L,A,B represent the L,A,B channels of the LAB image format, and min, max represent the minimum and maximum values of the corresponding channels.
    **invert**: If true, the image will be inverted before the operation. default is false.
    **roi**: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
    default is None, means whole image.
    **x_stride**: The x stride to use for the regression. default is 2.
    **y_stride**: The y stride to use for the regression. default is 1.
    **area_threshold**: The area threshold to use for the regression. default is 10.
    **pixels_threshold**: The pixels threshold to use for the regression. default is 10.
    **robust**: If true, the regression will be robust. default is false.
    **return** Returns the regression of the image **static** False > C++ defination code: > ```cpp > std::vector get_regression(std::vector> thresholds std::vector>(), bool invert false, std::vector roi std::vector(), int x_stride 2, int y_stride 1, int area_threshold 10, int pixels_threshold 10, bool robust false) > ``` #### save ```python def save(self, path: str, quality: int 95) > maix.err.Err ``` Save image to file item description **type** func **param** **path**: file path
    **quality**: image quality, by default(value is 95), support jpeg and png format
    **return** error code, err::ERR_NONE is ok, other is error **static** False > C++ defination code: > ```cpp > err::Err save(const char *path, int quality 95) > ``` #### flood\\_fill ```python def flood_fill(self, x: int, y: int, seed_threshold: float 0.05, floating_threshold: float 0.05, color: Color ..., invert: bool False, clear_background: bool False, mask: Image None) > Image ``` Flood fills a region of the image starting from location x, y. item description **type** func **param** **x**: The x coordinate of the seed point.
    **y**: The y coordinate of the seed point.
    **seed_threshold**: The seed_threshold value controls how different any pixel in the fill area may be from the original starting pixel. default is 0.05.
    **floating_threshold**: The floating_threshold value controls how different any pixel in the fill area may be from any neighbor pixels. default is 0.05.
    **color**: The color to fill the region with. default is white.
    **invert**: If true, the image will be inverted before the operation. default is false.
    **clear_background**: If true, the background will be cleared before the operation. default is false.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None. FIXME: the mask image works abnormally
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *flood_fill(int x, int y, float seed_threshold 0.05, float floating_threshold 0.05, image::Color color image::COLOR_WHITE, bool invert false, bool clear_background false, image::Image *mask nullptr) > ``` #### erode ```python def erode(self, size: int, threshold: int 1, mask: Image None) > Image ``` Erodes the image in place. item description **type** func **param** **size**: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
    **threshold**: The number of pixels in the kernel that are not 0. If it is less than or equal to the threshold, set the center pixel to black. default is (kernel_size 1).
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *erode(int size, int threshold 1, image::Image *mask nullptr) > ``` #### dilate ```python def dilate(self, size: int, threshold: int 0, mask: Image None) > Image ``` Dilates the image in place. item description **type** func **param** **size**: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
    **threshold**: The number of pixels in the kernel that are not 0. If it is greater than or equal to the threshold, set the center pixel to white. default is 0.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *dilate(int size, int threshold 0, image::Image *mask nullptr) > ``` #### open ```python def open(self, size: int, threshold: int 0, mask: Image None) > Image ``` Performs erosion and dilation on an image in order. item description **type** func **param** **size**: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
    **threshold**: As the threshold for erosion and dilation, the actual threshold for erosion is (kernel_size 1 threshold), the actual threshold for dialation is threshold. default is 0.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *open(int size, int threshold 0, image::Image *mask nullptr) > ``` #### close ```python def close(self, size: int, threshold: int 0, mask: Image None) > Image ``` Performs dilation and erosion on an image in order. item description **type** func **param** **size**: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
    **threshold**: As the threshold for erosion and dilation, the actual threshold for erosion is (kernel_size 1 threshold), the actual threshold for dialation is threshold. default is 0.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *close(int size, int threshold 0, image::Image *mask nullptr) > ``` #### top\\_hat ```python def top_hat(self, size: int, threshold: int 0, mask: Image None) > Image ``` Returns the image difference of the image and Image.open()’ed image. item description **type** func **param** **size**: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
    **threshold**: As the threshold for open method. default is 0.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *top_hat(int size, int threshold 0, image::Image *mask nullptr) > ``` #### black\\_hat ```python def black_hat(self, size: int, threshold: int 0, mask: Image None) > Image ``` Returns the image difference of the image and Image.close()’ed image. item description **type** func **param** **size**: Kernel size. The actual kernel size is ((size * 2) + 1) * ((size * 2) + 1). Use 1(3x3 kernel), 2(5x5 kernel).
    **threshold**: As the threshold for close method. default is 0.
    **mask**: Mask is another image to use as a pixel level mask for the operation. The mask should be an image with just black or white pixels and should be the same size as the image being operated on.
    Only pixels set in the mask are modified. default is None.
    **return** Returns the image after the operation is completed. **static** False > C++ defination code: > ```cpp > image::Image *black_hat(int size, int threshold 0, image::Image *mask nullptr) > ``` #### find\\_blobs ```python def find_blobs(self, thresholds: list[list[int]] [], invert: bool False, roi: list[int] [], x_stride: int 2, y_stride: int 1, area_threshold: int 10, pixels_threshold: int 10, merge: bool False, margin: int 0, x_hist_bins_max: int 0, y_hist_bins_max: int 0) > list[Blob] ``` Finds all blobs in the image and returns a list of image.Blob class which describe each Blob.\\nPlease see the image.Blob object more more information. item description **type** func **note** For GRAYSCALE format, Lmin and Lmax range is [0, 255]. For RGB888 format, Lmin and Lmax range is [0, 100]. **param** **thresholds**: You can define multiple thresholds.
    For GRAYSCALE format, you can use {{Lmin, Lmax}, ...} to define one or more thresholds.
    For RGB888 format, you can use {{Lmin, Lmax, Amin, Amax, Bmin, Bmax}, ...} to define one or more thresholds.
    Where the upper case L,A,B represent the L,A,B channels of the LAB image format, and min, max represent the minimum and maximum values of the corresponding channels.
    **invert**: if true, will invert thresholds before find blobs, default is false
    **roi**: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
    default is None, means whole image.
    **x_stride**: x stride is the number of x pixels to skip when doing the hough transform. default is 2
    **y_stride**: y_stride is the number of y pixels to skip when doing the hough transform. default is 1
    **area_threshold**: area threshold, if the blob area is smaller than area_threshold, the blob is not returned, default is 10
    **pixels_threshold**: pixels threshold, if the blob pixels is smaller than area_threshold, the blob is not returned,, default is 10.
    when x_stride and y_stride is equal to 1, pixels_threshold is equivalent to area_threshold
    **merge**: if True merges all not filtered out blobs whos bounding rectangles intersect each other. default is false
    **margin**: margin can be used to increase or decrease the size of the bounding rectangles for blobs during the intersection test.
    For example, with a margin of 1 blobs whos bounding rectangles are 1 pixel away from each other will be merged. default is 0
    **x_hist_bins_max**: if set to non zero populates a histogram buffer in each blob object with an x_histogram projection of all columns in the object. This value then sets the number of bins for that projection.
    **y_hist_bins_max**: if set to non zero populates a histogram buffer in each blob object with an y_histogram projection of all rows in the object. This value then sets the number of bins for that projection.
    **return** Return the blob when found blobs, format is (blob1, blob2, ...), you can use blob class methods to do more operations. **static** False > C++ defination code: > ```cpp > std::vector find_blobs(std::vector> thresholds std::vector>(), bool invert false, std::vector roi std::vector(), int x_stride 2, int y_stride 1, int area_threshold 10, int pixels_threshold 10, bool merge false, int margin 0, int x_hist_bins_max 0, int y_hist_bins_max 0) > ``` #### find\\_lines ```python def find_lines(self, roi: list[int] [], x_stride: int 2, y_stride: int 1, threshold: float 1000, theta_margin: float 25, rho_margin: float 25) > list[Line] ``` Find lines in image item description **type** func **param** **roi**: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
    default is None, means whole image.
    **x_stride**: x stride is the number of x pixels to skip when doing the hough transform. default is 2
    **y_stride**: y_stride is the number of y pixels to skip when doing the hough transform. default is 1
    **threshold**: threshold threshold controls what lines are detected from the hough transform. Only lines with a magnitude greater than or equal to threshold are returned.
    The right value of threshold for your application is image dependent. default is 1000.
    **theta_margin**: theta_margin controls the merging of detected lines. default is 25.
    **rho_margin**: rho_margin controls the merging of detected lines. default is 25.
    **return** Return the line when found lines, format is (line1, line2, ...), you can use line class methods to do more operations **static** False > C++ defination code: > ```cpp > std::vector find_lines(std::vector roi std::vector(), int x_stride 2, int y_stride 1, double threshold 1000, double theta_margin 25, double rho_margin 25) > ``` #### find\\_line\\_segments ```python def find_line_segments(self, roi: list[int] [], merge_distance: int 0, max_theta_difference: int 15) > list[Line] ``` Finds all line segments in the image. item description **type** func **param** **roi**: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
    default is None, means whole image.
    **merge_distance**: The maximum distance between two lines to merge them. default is 0.
    **max_theta_difference**: The maximum difference between two lines to merge them. default is 15.
    **return** Return the line when found lines, format is (line1, line2, ...), you can use line class methods to do more operations **static** False > C++ defination code: > ```cpp > std::vector find_line_segments(std::vector roi std::vector(), int merge_distance 0, int max_theta_difference 15) > ``` #### find\\_circles ```python def find_circles(self, roi: list[int] [], x_stride: int 2, y_stride: int 1, threshold: int 2000, x_margin: int 10, y_margin: int 10, r_margin: int 10, r_min: int 2, r_max: int 1, r_step: int 2) > list[Circle] ``` Find circles in image item description **type** func **param** **roi**: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
    default is None, means whole image.
    **x_stride**: x stride is the number of x pixels to skip when doing the hough transform. default is 2
    **y_stride**: y_stride is the number of y pixels to skip when doing the hough transform. default is 1
    **threshold**: threshold controls what circles are detected from the hough transform. Only circles with a magnitude greater than or equal to threshold are returned.
    The right value of threshold for your application is image dependent.
    **x_margin**: x_margin controls the merging of detected circles. Circles which are x_margin, y_margin, and r_margin pixels apart are merged. default is 10
    **y_margin**: y_margin controls the merging of detected circles. Circles which are x_margin, y_margin, and r_margin pixels apart are merged. default is 10
    **r_margin**: r_margin controls the merging of detected circles. Circles which are x_margin, y_margin, and r_margin pixels apart are merged. default is 10
    **r_min**: r_min controls the minimum circle radius detected. Increase this to speed up the algorithm. default is 2
    **r_max**: r_max controls the maximum circle radius detected. Decrease this to speed up the algorithm. default is min(roi.w / 2, roi.h / 2)
    **r_step**: r_step controls how to step the radius detection by. default is 2.
    **return** Return the circle when found circles, format is (circle1, circle2, ...), you can use circle class methods to do more operations **static** False > C++ defination code: > ```cpp > std::vector find_circles(std::vector roi std::vector(), int x_stride 2, int y_stride 1, int threshold 2000, int x_margin 10, int y_margin 10, int r_margin 10, int r_min 2, int r_max 1, int r_step 2) > ``` #### find\\_rects ```python def find_rects(self, roi: list[int] [], threshold: int 10000) > list[Rect] ``` Finds all rects in the image. item description **type** func **param** **roi**: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
    default is None, means whole image.
    **threshold**: The threshold to use for the rects. default is 10000.
    **return** Returns the rects of the image **static** False > C++ defination code: > ```cpp > std::vector find_rects(std::vector roi std::vector(), int threshold 10000) > ``` #### find\\_qrcodes ```python def find_qrcodes(self, roi: list[int] []) > list[QRCode] ``` Finds all qrcodes in the image. item description **type** func **param** **roi**: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
    default is None, means whole image.
    **return** Returns the qrcodes of the image **static** False > C++ defination code: > ```cpp > std::vector find_qrcodes(std::vector roi std::vector()) > ``` #### find\\_apriltags ```python def find_apriltags(self, roi: list[int] [], families: ApriltagFamilies ..., fx: float 1, fy: float 1, cx: int 1, cy: int 1) > list[AprilTag] ``` Finds all apriltags in the image. item description **type** func **param** **roi**: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
    default is None, means whole image.
    **families**: The families to use for the apriltags. default is TAG36H11.
    **fx**: The camera X focal length in pixels, default is 1.
    **fy**: The camera Y focal length in pixels, default is 1.
    **cx**: The camera X center in pixels, default is image.width / 2.
    **cy**: The camera Y center in pixels, default is image.height / 2.
    **return** Returns the apriltags of the image **static** False > C++ defination code: > ```cpp > std::vector find_apriltags(std::vector roi std::vector(), image::ApriltagFamilies families image::ApriltagFamilies::TAG36H11, float fx 1, float fy 1, int cx 1, int cy 1) > ``` #### find\\_datamatrices ```python def find_datamatrices(self, roi: list[int] [], effort: int 200) > list[DataMatrix] ``` Finds all datamatrices in the image. item description **type** func **param** **roi**: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
    default is None, means whole image.
    **effort**: Controls how much time to spend trying to find data matrix matches. default is 200.
    **return** Returns the datamatrices of the image **static** False > C++ defination code: > ```cpp > std::vector find_datamatrices(std::vector roi std::vector(), int effort 200) > ``` #### find\\_barcodes ```python def find_barcodes(self, roi: list[int] []) > list[BarCode] ``` Finds all barcodes in the image. item description **type** func **param** **roi**: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
    default is None, means whole image.
    **return** Returns the barcodes of the image **static** False > C++ defination code: > ```cpp > std::vector find_barcodes(std::vector roi std::vector()) > ``` #### find\\_displacement ```python def find_displacement(self, template_image: Image, roi: list[int] [], template_roi: list[int] [], logpolar: bool False) > Displacement ``` Finds the displacement between the image and the template. TODO: support in the feature\\nnote: this method must be used on power of 2 image sizes item description **type** func **param** **template_image**: The template image.
    **roi**: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
    default is None, means whole image.
    **template_roi**: The region of interest rectangle (x, y, w, h) to work in. If not specified, it is equal to the image rectangle.
    **logpolar**: If true, it will instead find rotation and scale changes between the two images. default is false.
    **return** Returns the displacement of the image **static** False > C++ defination code: > ```cpp > image::Displacement find_displacement(image::Image &template_image, std::vector roi std::vector(), std::vector template_roi std::vector(), bool logpolar false) > ``` #### find\\_template ```python def find_template(self, template_image: Image, threshold: float, roi: list[int] [], step: int 2, search: TemplateMatch ...) > list[int] ``` Finds the template in the image. item description **type** func **param** **template_image**: The template image.
    **threshold**: Threshold is floating point number (0.0 1.0) where a higher threshold prevents false positives while lowering the detection rate while a lower threshold does the opposite.
    **roi**: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
    default is None, means whole image. Only valid in SEARCH_EX mode.
    **step**: The step size to use for the template. default is 2. Only valid in SEARCH_EX mode
    **search**: The search method to use for the template. default is SEARCH_EX.
    **return** Returns a bounding box tuple (x, y, w, h) for the matching location otherwise None. **static** False > C++ defination code: > ```cpp > std::vector find_template(image::Image &template_image, float threshold, std::vector roi std::vector(), int step 2, image::TemplateMatch search image::TemplateMatch::SEARCH_EX) > ``` #### find\\_features ```python def find_features(self, cascade: int, threshold: float 0.5, scale: float 1.5, roi: list[int] []) > list[int] ``` Finds the features in the image. TODO: support in the feature item description **type** func **param** **cascade**: The cascade to use for the features. default is CASCADE_FRONTALFACE_ALT.
    **threshold**: The threshold to use for the features. default is 0.5.
    **scale**: The scale to use for the features. default is 1.5.
    **roi**: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
    default is None, means whole image.
    **return** Returns the features of the image **static** False > C++ defination code: > ```cpp > std::vector find_features(int cascade, float threshold 0.5, float scale 1.5, std::vector roi std::vector()) > ``` #### find\\_lbp ```python def find_lbp(self, roi: list[int] []) > LBPKeyPoint ``` Finds the lbp in the image. TODO: support in the feature. item description **type** func **param** **roi**: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
    default is None, means whole image.
    **return** Returns the lbp of the image **static** False > C++ defination code: > ```cpp > image::LBPKeyPoint find_lbp(std::vector roi std::vector()) > ``` #### find\\_keypoints ```python def find_keypoints(self, roi: list[int] [], threshold: int 20, normalized: bool False, scale_factor: float 1.5, max_keypoints: int 100, corner_detector: CornerDetector ...) > ORBKeyPoint ``` Finds the keypoints in the image. TODO: support in the feature. item description **type** func **param** **roi**: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
    default is None, means whole image.
    **threshold**: The threshold to use for the keypoints. default is 20.
    **normalized**: If true, the image will be normalized before the operation. default is false.
    **scale_factor**: The scale factor to use for the keypoints. default is 1.5.
    **max_keypoints**: The maximum number of keypoints to use for the keypoints. default is 100.
    **corner_detector**: The corner detector to use for the keypoints. default is CORNER_AGAST.
    **return** Returns the keypoints of the image **static** False > C++ defination code: > ```cpp > image::ORBKeyPoint find_keypoints(std::vector roi std::vector(), int threshold 20, bool normalized false, float scale_factor 1.5, int max_keypoints 100, image::CornerDetector corner_detector image::CornerDetector::CORNER_AGAST) > ``` #### find\\_edges ```python def find_edges(self, edge_type: EdgeDetector, roi: list[int] [], threshold: list[int] [100, 200]) > Image ``` Finds the edges in the image. item description **type** func **param** **edge_type**: The edge type to use for the edges. default is EDGE_CANNY.
    **roi**: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
    default is None, means whole image.
    **threshold**: The threshold to use for the edges. default is 20.
    **return** Returns the edges of the image **static** False > C++ defination code: > ```cpp > image::Image* find_edges(image::EdgeDetector edge_type, std::vector roi std::vector(), std::vector threshold std::vector({100, 200})) > ``` #### find\\_hog ```python def find_hog(self, roi: list[int] [], size: int 8) > Image ``` Finds the hog in the image. TODO: support in the feature item description **type** func **param** **roi**: The region of interest, input in the format of (x, y, w, h), x and y are the coordinates of the upper left corner, w and h are the width and height of roi.
    default is None, means whole image.
    **size**: The size to use for the hog. default is 8.
    **return** Returns the hog of the image **static** False > C++ defination code: > ```cpp > image::Image* find_hog(std::vector roi std::vector(), int size 8) > ``` #### match\\_lbp\\_descriptor ```python def match_lbp_descriptor(self, desc1: LBPKeyPoint, desc2: LBPKeyPoint) > int ``` Matches the lbp descriptor of the image. TODO: support in the feature item description **type** func **param** **desc1**: The descriptor to use for the match.
    **desc2**: The descriptor to use for the match.
    **return** Returns the match of the image **static** False > C++ defination code: > ```cpp > int match_lbp_descriptor(image::LBPKeyPoint &desc1, image::LBPKeyPoint &desc2) > ``` #### match\\_orb\\_descriptor ```python def match_orb_descriptor(self, desc1: ORBKeyPoint, desc2: ORBKeyPoint, threshold: int 95, filter_outliers: bool False) > KPTMatch ``` Matches the orb descriptor of the image. TODO: support in the feature item description **type** func **param** **desc1**: The descriptor to use for the match.
    **desc2**: The descriptor to use for the match.
    **threshold**: The threshold to use for the match. default is 95.
    **filter_outliers**: If true, the image will be filter_outliers before the operation. default is false.
    **return** Returns the match of the image **static** False > C++ defination code: > ```cpp > image::KPTMatch match_orb_descriptor(image::ORBKeyPoint &desc1, image::ORBKeyPoint &desc2, int threshold 95, bool filter_outliers false) > ```"},"/maixpy/api/maix/display.html":{"title":"maix.display","content":" title: maix.display maix.display module, control display device and show image on it > You can use `maix.display` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ## Variable ## Function ### send\\_to\\_maixvision ```python def send_to_maixvision(img: maix.image.Image) > None ``` Send image to MaixVision work station if connected.\\nIf you want to debug your program an don't want to initialize display, use this method. item description **param** **img**: image to send, image.Image object
    > C++ defination code: > ```cpp > void send_to_maixvision(image::Image &img) > ``` ## Class ### Display Display class > C++ defination code: > ```cpp > class Display > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, width: int 1, height: int 1, format: maix.image.Format ..., device: str None, open: bool True) > None ``` Construct a new Display object item description **type** func **param** **width**: display width, by default(value is 1) means auto detect,
    if width > max device supported width, will auto set to max device supported width
    **height**: display height, by default(value is 1) means auto detect,
    if height > max device supported height, will auto set to max device supported height
    **device**: display device name, you can get devices by list_devices method, by default(value is NULL(None in MaixPy)) means the first device
    **open**: If true, display will automatically call open() after creation. default is true.
    **static** False > C++ defination code: > ```cpp > Display(int width 1, int height 1, image::Format format image::FMT_RGB888, const char *device nullptr, bool open true) > ``` #### width ```python def width(self) > int ``` Get display width item description **type** func **return** width **static** False > C++ defination code: > ```cpp > int width() > ``` #### height ```python def height(self) > int ``` Get display height item description **type** func **param** **ch**: channel to get, by default(value is 0) means the first channel
    **return** height **static** False > C++ defination code: > ```cpp > int height() > ``` #### size ```python def size(self) > list[int] ``` Get display size item description **type** func **param** **ch**: channel to get, by default(value is 0) means the first channel
    **return** size A list type in MaixPy, [width, height] **static** False > C++ defination code: > ```cpp > std::vector size() > ``` #### format ```python def format(self) > maix.image.Format ``` Get display format item description **type** func **return** format **static** False > C++ defination code: > ```cpp > image::Format format() > ``` #### open ```python def open(self, width: int 1, height: int 1, format: maix.image.Format ...) > maix.err.Err ``` open display device, if already opened, will return err.ERR_NONE. item description **type** func **param** **width**: display width, default is 1, means auto, mostly means max width of display support
    **height**: display height, default is 1, means auto, mostly means max height of display support
    **format**: display output format, default is RGB888
    **return** error code **static** False > C++ defination code: > ```cpp > err::Err open(int width 1, int height 1, image::Format format image::FMT_INVALID) > ``` #### close ```python def close(self) > maix.err.Err ``` close display device item description **type** func **return** error code **static** False > C++ defination code: > ```cpp > err::Err close() > ``` #### add\\_channel ```python def add_channel(self, width: int 1, height: int 1, format: maix.image.Format ..., open: bool True) > Display ``` Add a new channel and return a new Display object, you can use close() to close this channel. item description **type** func **attention** If a new disp channel is created, it is recommended to set fit image::FIT_COVER or fit image::FIT_FILL when running show for the main channel,
    otherwise the display of the new disp channel may be abnormal. **param** **width**: display width, default is 1, means auto, mostly means max width of display support. Maximum width must not exceed the main channel.
    **height**: display height, default is 1, means auto, mostly means max height of display support. Maximum height must not exceed the main channel.
    **format**: display output format, default is FMT_BGRA8888
    **open**: If true, display will automatically call open() after creation. default is true.
    **return** new Display object **static** False > C++ defination code: > ```cpp > display::Display *add_channel(int width 1, int height 1, image::Format format image::FMT_BGRA8888, bool open true) > ``` #### is\\_opened ```python def is_opened(self) > bool ``` check display device is opened or not item description **type** func **return** opened or not, bool type **static** False > C++ defination code: > ```cpp > bool is_opened() > ``` #### is\\_closed ```python def is_closed(self) > bool ``` check display device is closed or not item description **type** func **return** closed or not, bool type **static** False > C++ defination code: > ```cpp > bool is_closed() > ``` #### show ```python def show(self, img: maix.image.Image, fit: maix.image.Fit ...) > maix.err.Err ``` show image on display device, and will also send to MaixVision work station if connected. item description **type** func **param** **img**: image to show, image.Image object,
    if the size of image smaller than display size, will show in the center of display;
    if the size of image bigger than display size, will auto resize to display size and keep ratio, fill blank with black color.
    **fit**: image in screen fit mode, by default(value is image.FIT_CONTAIN), @see image.Fit for more details
    e.g. image.FIT_CONTAIN means resize image to fit display size and keep ratio, fill blank with black color.
    **return** error code **static** False > C++ defination code: > ```cpp > err::Err show(image::Image &img, image::Fit fit image::FIT_CONTAIN) > ``` #### device ```python def device(self) > str ``` Get display device path item description **type** func **return** display device path **static** False > C++ defination code: > ```cpp > std::string device() > ``` #### set\\_backlight ```python def set_backlight(self, value: float) > None ``` Set display backlight item description **type** func **param** **value**: backlight value, float type, range is [0, 100]
    **static** False > C++ defination code: > ```cpp > void set_backlight(float value) > ``` #### get\\_backlight ```python def get_backlight(self) > float ``` Get display backlight item description **type** func **return** value backlight value, float type, range is [0, 100] **static** False > C++ defination code: > ```cpp > float get_backlight() > ``` #### set\\_hmirror ```python def set_hmirror(self, en: bool) > maix.err.Err ``` Set display mirror item description **type** func **param** **en**: enable/disable mirror
    **static** False > C++ defination code: > ```cpp > err::Err set_hmirror(bool en) > ``` #### set\\_vflip ```python def set_vflip(self, en: bool) > maix.err.Err ``` Set display flip item description **type** func **param** **en**: enable/disable flip
    **static** False > C++ defination code: > ```cpp > err::Err set_vflip(bool en) > ```"},"/maixpy/api/maix/protocol.html":{"title":"maix.protocol","content":" title: maix.protocol maix.protocol module > You can use `maix.protocol` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ### CMD protocol cmd, more doc see MaixCDK document's convention doc item describe **note** max app custom CMD value should < CMD_APP_MAX **values** **CMD_APP_MAX**: 200, max app custom CMD value should < CMD_APP_MAX
    **CMD_SET_REPORT**: set auto upload data mode
    **CMD_APP_LIST**:
    **CMD_START_APP**:
    **CMD_EXIT_APP**:
    **CMD_CUR_APP_INFO**:
    **CMD_APP_INFO**:
    **CMD_KEY**:
    **CMD_TOUCH**:
    > C++ defination code: > ```cpp > enum CMD > { > CMD_APP_MAX 0xC8, // 200, max app custom CMD value should < CMD_APP_MAX > > CMD_SET_REPORT 0xF8, // set auto upload data mode > CMD_APP_LIST 0xF9, > CMD_START_APP 0xFA, > CMD_EXIT_APP 0xFB, > CMD_CUR_APP_INFO 0xFC, > CMD_APP_INFO 0xFD, > CMD_KEY 0xFE, > CMD_TOUCH 0xFF, > } > ``` ### FLAGS protocol flags, more doc see MaixCDK document's convention doc item describe **values** **FLAG_REQ**:
    **FLAG_RESP**:
    **FLAG_IS_RESP_MASK**:
    **FLAG_RESP_OK**:
    **FLAG_RESP_ERR**:
    **FLAG_RESP_OK_MASK**:
    **FLAG_REPORT**:
    **FLAG_REPORT_MASK**:
    **FLAG_VERSION_MASK**:
    > C++ defination code: > ```cpp > enum FLAGS > { > FLAG_REQ 0x00, > FLAG_RESP 0x80, > FLAG_IS_RESP_MASK 0x80, > > FLAG_RESP_OK 0x40, > FLAG_RESP_ERR 0x00, > FLAG_RESP_OK_MASK 0x40, > > FLAG_REPORT 0x20, > FLAG_REPORT_MASK 0x20, > > FLAG_VERSION_MASK 0x03 > } > ``` ## Variable ### VERSION protocol version item description **value** **1** **readonly** True > C++ defination code: > ```cpp > const uint8_t VERSION 1 > ``` ### HEADER protocol header item description **readonly** False > C++ defination code: > ```cpp > extern uint32_t HEADER > ``` ## Function ### crc16\\_IBM ```python def crc16_IBM(data: maix.Bytes(bytes)) > int ``` CRC16 IBM item description **param** **data**: data, bytes type.
    **return** CRC16 IBM value, uint16_t type. > C++ defination code: > ```cpp > uint16_t crc16_IBM(const Bytes *data) > ``` ## Class ### MSG protocol msg > C++ defination code: > ```cpp > class MSG > ``` #### version protocol version item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > uint8_t version > ``` #### resp\\_ok Indicate response message type, true means CMD valid and the CMD processed correctly, (only for response msg) item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > uint8_t resp_ok > ``` #### has\\_been\\_replied Flag whether CMD has been processed and responded to CMD sender.\\nE.g. CMD CMD_START_APP will be automatically processed in CommProtocol.get_msg function,\\nso the return msg will set this flag to true. item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > bool has_been_replied{false} > ``` #### cmd CMD value item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > uint8_t cmd > ``` #### is\\_resp message is response or not, contrast with is_req item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > bool is_resp > ``` #### body\\_len Message body length, read only, use set_body() to update item description **type** var **attention** DO NOT manually change this value **static** False **readonly** False > C++ defination code: > ```cpp > int body_len > ``` #### encode\\_resp\\_ok ```python def encode_resp_ok(*args, **kwargs) ``` Encode response ok(success) message item description **type** func **param** **body**: response body, can be null
    **return** encoded data, if nullptr, means error, and the error code is err.Err **static** False > C++ defination code: > ```cpp > Bytes *encode_resp_ok(Bytes *body nullptr) > ``` #### encode\\_report ```python def encode_report(*args, **kwargs) ``` Encode proactively report message item description **type** func **param** **body**: report body, can be null
    **return** encoded data, if nullptr, means error, and the error code is err.Err **static** False > C++ defination code: > ```cpp > Bytes *encode_report(Bytes *body nullptr) > ``` #### encode\\_resp\\_err ```python def encode_resp_err(*args, **kwargs) ``` Encode response error message item description **type** func **param** **code**: error code
    **msg**: error message
    **return** encoded data, if nullptr, means error, and the error code is err.Err **static** False > C++ defination code: > ```cpp > Bytes *encode_resp_err(err::Err code, const std::string &msg) > ``` #### set\\_body ```python def set_body(self, body_new: maix.Bytes(bytes)) > None ``` Update message body item description **type** func **param** **body_new**: new body data
    **static** False > C++ defination code: > ```cpp > void set_body(Bytes *body_new) > ``` #### get\\_body ```python def get_body(*args, **kwargs) ``` Get message body item description **type** func **return** message body, bytes type **static** False > C++ defination code: > ```cpp > Bytes *get_body() > ``` ### Protocol Communicate protocol > C++ defination code: > ```cpp > class Protocol > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, buff_size: int 1024, header: int 3148663466) > None ``` Construct a new Protocol object item description **type** func **param** **buff_size**: Data queue buffer size
    **static** False > C++ defination code: > ```cpp > Protocol(int buff_size 1024, uint32_t header maix::protocol::HEADER) > ``` #### buff\\_size ```python def buff_size(self) > int ``` Data queue buffer size item description **type** func **static** False > C++ defination code: > ```cpp > int buff_size() > ``` #### push\\_data ```python def push_data(self, new_data: maix.Bytes(bytes)) > maix.err.Err ``` Add data to data queue item description **type** func **param** **new_data**: new data
    **return** error code, maybe err.Err.ERR_BUFF_FULL **static** False > C++ defination code: > ```cpp > err::Err push_data(const Bytes *new_data) > ``` #### decode ```python def decode(self, new_data: maix.Bytes(bytes) None) > MSG ``` Decode data in data queue and return a message item description **type** func **param** **new_data**: new data add to data queue, if null, only decode.
    **return** decoded message, if nullptr, means no message decoded. **static** False > C++ defination code: > ```cpp > protocol::MSG *decode(const Bytes *new_data nullptr) > ``` #### encode\\_resp\\_ok ```python def encode_resp_ok(*args, **kwargs) ``` Encode response ok(success) message to buffer item description **type** func **param** **cmd**: CMD value
    **body**: response body, can be null
    **return** encoded data, if nullptr, means error, and the error code is err.Err **static** False > C++ defination code: > ```cpp > Bytes *encode_resp_ok(uint8_t cmd, Bytes *body nullptr) > ``` #### encode\\_report ```python def encode_report(*args, **kwargs) ``` Encode proactively report message to buffer item description **type** func **param** **cmd**: CMD value
    **body**: report body, can be null
    **return** encoded data, if nullptr, means error, and the error code is err.Err **static** False > C++ defination code: > ```cpp > Bytes *encode_report(uint8_t cmd, Bytes *body nullptr) > ``` #### encode\\_resp\\_err ```python def encode_resp_err(*args, **kwargs) ``` Encode response error message to buffer item description **type** func **param** **cmd**: CMD value
    **code**: error code
    **msg**: error message
    **return** encoded data, if nullptr, means error, and the error code is err.Err **static** False > C++ defination code: > ```cpp > Bytes *encode_resp_err(uint8_t cmd, err::Err code, const std::string &msg) > ```"},"/maixpy/api/maix/app.html":{"title":"maix.app","content":" title: maix.app maix.app module > You can use `maix.app` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ## Variable ## Function ### app\\_id ```python def app_id() > str ``` Get current APP ID. item description **return** APP ID. > C++ defination code: > ```cpp > string app_id() > ``` ### set\\_app\\_id ```python def set_app_id(app_id: str) > str ``` Set current APP ID. item description **param** **app_id**: APP ID.
    > C++ defination code: > ```cpp > string set_app_id(const string &app_id) > ``` ### get\\_apps\\_info\\_path ```python def get_apps_info_path() > str ``` Get APP info file path. > C++ defination code: > ```cpp > string get_apps_info_path() > ``` ### get\\_apps\\_info ```python def get_apps_info(ignore_launcher: bool False, ignore_app_store: bool False) > list[APP_Info] ``` Get APP info list. item description **param** **ignore_launcher**: if true, ignore launcher APP. default false.
    **ignore_app_store**: if true, ignore app store APP. default false.
    **return** APP info list. APP_Info object list. > C++ defination code: > ```cpp > vector &get_apps_info(bool ignore_launcher false, bool ignore_app_store false) > ``` ### get\\_app\\_info ```python def get_app_info(app_id: str) > APP_Info ``` Get app info by app id. item description **return** app.APP_Info type. > C++ defination code: > ```cpp > app::APP_Info get_app_info(const std::string &app_id) > ``` ### get\\_app\\_data\\_path ```python def get_app_data_path() > str ``` Get APP info, APP can store private data in this directory. item description **return** APP data path \"./data\", just return the data folder in current path because APP executed in app install path or project path.
    So, you must execute your program in you project path to use the project/data folder when you debug your APP. > C++ defination code: > ```cpp > string get_app_data_path() > ``` ### get\\_app\\_path ```python def get_app_path(app_id: str '') > str ``` Get APP path. item description **param** **app_id**: APP ID, if empty, return current APP path, else return the APP path by app_id.
    **return** APP path, just return the current path because APP executed in app install path or project path.
    So, you must execute your program in you project path to use the project/data folder when you debug your APP. > C++ defination code: > ```cpp > string get_app_path(const string &app_id \"\") > ``` ### get\\_tmp\\_path ```python def get_tmp_path() > str ``` Get global temporary data path, APPs can use this path as temporary data directory. item description **return** temporary data path. > C++ defination code: > ```cpp > string get_tmp_path() > ``` ### get\\_share\\_path ```python def get_share_path() > str ``` Get data path of share, shared data like picture and video will put in this directory item description **return** share data path. > C++ defination code: > ```cpp > string get_share_path() > ``` ### get\\_picture\\_path ```python def get_picture_path() > str ``` Get picture path of share, shared picture will put in this directory item description **return** share picture path. > C++ defination code: > ```cpp > string get_picture_path() > ``` ### get\\_video\\_path ```python def get_video_path() > str ``` Get video path of share, shared video will put in this directory item description **return** share video path. > C++ defination code: > ```cpp > string get_video_path() > ``` ### get\\_font\\_path ```python def get_font_path() > str ``` Get font path of share, shared font will put in this directory item description **return** share font path. > C++ defination code: > ```cpp > string get_font_path() > ``` ### get\\_icon\\_path ```python def get_icon_path() > str ``` Get icon path of share, shared icon will put in this directory item description **return** share icon path. > C++ defination code: > ```cpp > string get_icon_path() > ``` ### get\\_sys\\_config\\_kv ```python def get_sys_config_kv(item: str, key: str, value: str '', from_cache: bool True) > str ``` Get system config item value. item description **param** **item**: name of setting item, e.g. wifi, language. more see settings APP.
    **key**: config key, e.g. for wifi, key can be ssid, for language, key can be locale.
    **value**: default value, if not found, return this value.
    **from_cache**: if true, read from cache, if false, read from file.
    **return** config value, always string type, if not found, return empty string. > C++ defination code: > ```cpp > string get_sys_config_kv(const string &item, const string &key, const string &value \"\", bool from_cache true) > ``` ### get\\_app\\_config\\_kv ```python def get_app_config_kv(item: str, key: str, value: str '', from_cache: bool True) > str ``` Get APP config item value. item description **param** **item**: name of setting item, e.g. user_info
    **key**: config key, e.g. for user_info, key can be name, age etc.
    **value**: default value, if not found, return this value.
    **from_cache**: if true, read from cache, if false, read from file.
    **return** config value, always string type, if not found, return empty string. > C++ defination code: > ```cpp > string get_app_config_kv(const string &item, const string &key, const string &value \"\", bool from_cache true) > ``` ### set\\_app\\_config\\_kv ```python def set_app_config_kv(item: str, key: str, value: str, write_file: bool True) > maix.err.Err ``` Set APP config item value. item description **param** **item**: name of setting item, e.g. user_info
    **key**: config key, e.g. for user_info, key can be name, age etc.
    **value**: config value, always string type.
    **write_file**: if true, write to file, if false, just write to cache.
    **return** err::Err > C++ defination code: > ```cpp > err::Err set_app_config_kv(const string &item, const string &key, const string &value, bool write_file true) > ``` ### get\\_app\\_config\\_path ```python def get_app_config_path() > str ``` Get APP config path, ini format, so you can use your own ini parser to parse it like `configparser` in Python.\\nAll APP config info is recommended to store in this file. item description **return** APP config path(ini format). > C++ defination code: > ```cpp > string get_app_config_path() > ``` ### set\\_exit\\_msg ```python def set_exit_msg(code: maix.err.Err, msg: str) > maix.err.Err ``` Set APP exit code and exit message.\\nIf code ! 0, the launcher will show a dialog to user, and display the msg. item description **param** **code**: exit code, 0 means success, other means error, if code is 0, do nothing.
    **msg**: exit message, if code is 0, msg is not used.
    **return** exit code, the same as arg @code. > C++ defination code: > ```cpp > err::Err set_exit_msg(err::Err code, const string &msg) > ``` ### get\\_exit\\_msg ```python def get_exit_msg(cache: bool False) > tuple[str, maix.err.Err, str] ``` Get APP exit code and exit message. item description **param** **cache**: if true, read from cache, if false, read from file. default false.
    **return** exit return app_id, exit code and exit message. > C++ defination code: > ```cpp > tuple get_exit_msg(bool cache false) > ``` ### have\\_exit\\_msg ```python def have_exit_msg(cache: bool False) > bool ``` Check if have exit msg item description **param** **cache**: if true, just check from cache, if false, check from file. default false.
    **return** true if have exit msg, false if not. > C++ defination code: > ```cpp > bool have_exit_msg(bool cache false) > ``` ### switch\\_app ```python def switch_app(app_id: str, idx: int 1, start_param: str '') > None ``` Exit this APP and start another APP(by launcher).\\nCall this API will call set_exit_flag(true), you should check app::need_exit() in your code.\\nAnd exit this APP if app::need_exit() return true. item description **param** **app_id**: APP ID which will be started. app_id and idx must have one is valid.
    **idx**: APP index. app_id and idx must have one is valid.
    **start_param**: string type, will send to app, app can get this param by `app.get_start_param()`
    **attention** If app id or idx the same as current app, do nothing. > C++ defination code: > ```cpp > void switch_app(const string &app_id, int idx 1, const std::string &start_param \"\") > ``` ### get\\_start\\_param ```python def get_start_param() > str ``` Get start param set by caller item description **return** param, string type > C++ defination code: > ```cpp > const std::string get_start_param() > ``` ### need\\_exit ```python def need_exit() > bool ``` Shoule this APP exit? item description **return** true if this APP should exit, false if not. **attention** This API is a function, not a variable. > C++ defination code: > ```cpp > bool need_exit() > ``` ### running ```python def running() > bool ``` App should running? The same as !app::need_exit() (not app::need_exit() in MaixPy). item description **return** true if this APP should running, false if not. **attention** This API is a function, not a variable. > C++ defination code: > ```cpp > bool running() > ``` ### set\\_exit\\_flag ```python def set_exit_flag(exit: bool) > None ``` Set exit flag. You can get exit flag by app.need_exit(). item description **param** **exit**: true if this APP should exit, false if not.
    > C++ defination code: > ```cpp > void set_exit_flag(bool exit) > ``` ## Class ### Version APP version > C++ defination code: > ```cpp > class Version > ``` #### \\_\\_str\\_\\_ ```python def __str__(self) > str ``` Convert to string, e.g. 1.0.0 item description **type** func **static** False > C++ defination code: > ```cpp > std::string __str__() > ``` #### from\\_str ```python def from_str(version_str: str) > Version ``` Convert from string, e.g. \\\"1.0.0\\\" item description **type** func **static** True > C++ defination code: > ```cpp > static app::Version from_str(const string &version_str) > ``` ### APP\\_Info APP info > C++ defination code: > ```cpp > class APP_Info > ``` #### id APP id item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > string id > ``` #### name APP name item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > string name > ``` #### icon APP icon item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > string icon > ``` #### version APP version item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > Version version > ``` #### exec APP exec item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > string exec > ``` #### author APP author item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > string author > ``` #### desc APP desc item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > string desc > ``` #### names APP names item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > map names > ``` #### descs APP descs item description **type** var **static** False **readonly** False > C++ defination code: > ```cpp > map descs > ```"},"/maixpy/api/maix/ext_dev.html":{"title":"maix.ext_dev","content":" title: maix.ext_dev maix.ext_dev module > You can use `maix.ext_dev` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module module brief [imu](./ext_dev/imu.html) maix.ext_dev.imu module [qmi8658](./ext_dev/qmi8658.html) maix.ext_dev.qmi8658 module [tmc2209](./ext_dev/tmc2209.html) maix.ext_dev.tmc2209 module [bm8563](./ext_dev/bm8563.html) maix.ext_dev.bm8563 module ## Enum ## Variable ## Function ## Class"},"/maixpy/api/maix/comm.html":{"title":"maix.comm","content":" title: maix.comm maix.comm module > You can use `maix.comm` to access this module with MaixPy > This module is generated from [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK) ## Module No module ## Enum ## Variable ## Function ### add\\_default\\_comm\\_listener Add default CommProtocol listener.\\nWhen the application uses this port, the listening thread will immediately\\nrelease the port resources and exit. If you need to start the default listening thread again,\\nplease release the default port resources and then call this function. > C++ defination code: > ```cpp > void add_default_comm_listener() > ``` ### rm\\_default\\_comm\\_listener Remove default CommProtocol listener. item description **return** bool type. > C++ defination code: > ```cpp > bool rm_default_comm_listener() > ``` ## Class ### CommProtocol Class for communication protocol > C++ defination code: > ```cpp > class CommProtocol > ``` #### \\_\\_init\\_\\_ ```python def __init__(self, buff_size: int 1024, header: int 3148663466) > None ``` Construct a new CommProtocol object item description **type** func **param** **buff_size**: buffer size, default to 1024 bytes
    **static** False > C++ defination code: > ```cpp > CommProtocol(int buff_size 1024, uint32_t header maix::protocol::HEADER) > ``` #### get\\_msg ```python def get_msg(self, timeout: int 0) > ... ``` Read data to buffer, and try to decode it as maix.protocol.MSG object item description **type** func **param** **timeout**: unit ms, 0 means return immediatly, 1 means block util have msg, >0 means block until have msg or timeout.
    **return** decoded data, if nullptr, means no valid frame found.
    Attentioin, delete it after use in C++. **static** False > C++ defination code: > ```cpp > protocol::MSG *get_msg(int timeout 0) > ``` #### resp\\_ok ```python def resp_ok(self, cmd: int, body: maix.Bytes(bytes) None) > maix.err.Err ``` Send response ok(success) message item description **type** func **param** **cmd**: CMD value
    **body**: response body, can be null
    **return** encoded data, if nullptr, means error, and the error code is err.Err.
    Attentioin, delete it after use in C++. **static** False > C++ defination code: > ```cpp > err::Err resp_ok(uint8_t cmd, Bytes *body nullptr) > ``` #### report ```python def report(self, cmd: int, body: maix.Bytes(bytes) None) > maix.err.Err ``` Send report message item description **type** func **param** **cmd**: CMD value
    **body**: report body, can be null
    **return** encoded data, if nullptr, means error, and the error code is err.Err.
    Attentioin, delete it after use in C++. **static** False > C++ defination code: > ```cpp > err::Err report(uint8_t cmd, Bytes *body nullptr) > ``` #### resp\\_err ```python def resp_err(self, cmd: int, code: maix.err.Err, msg: str) > maix.err.Err ``` Encode response error message to buffer item description **type** func **param** **cmd**: CMD value
    **code**: error code
    **msg**: error message
    **return** encoded data, if nullptr, means error, and the error code is err.Err.
    Attentioin, delete it after use in C++. **static** False > C++ defination code: > ```cpp > err::Err resp_err(uint8_t cmd, err::Err code, const std::string &msg) > ```"}} \ No newline at end of file diff --git a/maixpy/static/search_index/index_1.json b/maixpy/static/search_index/index_1.json new file mode 100644 index 00000000..73176c66 --- /dev/null +++ b/maixpy/static/search_index/index_1.json @@ -0,0 +1 @@ +{"/maixpy/doc/zh/basic/maixvision.html":{"title":"MaixVision -- MaixCAM MaixPy 编程 IDE + 图形化积木编程","content":" title: MaixVision MaixCAM MaixPy 编程 IDE + 图形化积木编程 ## 简介 [MaixVision](https://wiki.sipeed.com/maixvision) 是专为 Maix 生态打造的一款开发者编程工具,支持 MaixPy 编程和图形化积木编程,同时支持在线运行和调试,以及实时预览图像,可以同步设备显示屏的图像,方便调试和开发。 以及支持打包应用和安装应用到设备,方便用户一键生成、安装应用。 同时还集成一些方便开发的小工具,比如文件管理,阈值编辑器,二维码生成等等。 ## 下载 访问 [MaixVision 主页](https://wiki.sipeed.com/maixvision) 下载。 ## 使用 MaixPy 编程和在线运行 按照[快速开始](../index.html)的步骤连接设备,我们可以很方便地使用 MaixPy 编程和在线运行。 ## 实时预览图像 MaixPy 提供`display`模块,可以将图像显示到屏幕上,同时,在调用`display`模块的`show`方法时,会将图像发送到 MaixVision 显示,比如代码: ```python from maix import display, camera cam camera.Camera(640, 480) disp display.Display() while 1: disp.show(cam.read()) ``` 这里我们用摄像头读取了图像,然后通过`disp.show()`方法将图像显示到屏幕上,同时也会发送到 MaixVision 显示。 当我们点击了右上角的`暂停`按钮,就会停止发送图像到 MaixVision 显示。 ## 代码自动补全 代码提示依赖电脑本地的 Python 包,为了实现代码提示,我们需要在电脑中安装 Python,并且安装需要提示的 Python 包。 * 安装 Python 请访问 [Python 官网](https://python.org/)安装。 * 安装需要提示的包,比如对于 MaixPy, 你需要在电脑也安装一份 MaixPy 包,在电脑使用`pip install MaixPy`即可安装好,如果`MaixPy`更新了,你也需要在电脑和设备更新到`MaixPy`,电脑手动在终端执行`pip install MaixPy U`即可,设备更新直接在`设置`应用中更新即可。 > 中国国内用户可以使用国内镜像`pip install i https://pypi.tuna.tsinghua.edu.cn/simple MaixPy`。 * 重启 MaixVision 就能够看到代码提示了。 > 如果仍然不能提示,可以手动在设置中设置 python 可执行文件的路径后重启。 >! 注意在电脑安装 Python 包这里只是为了用作代码提示,实际代码运行还是在设备(开发板)上,设备上也要有对应的包才能正常运行。 > 另外,虽然你在电脑上安装了 MaixPy 包,但是由于我们精力有限,我们不确保你能直接在电脑的 Python 导入 maix 包进行使用,请在支持的设备上运行。 ## 计算图像的直方图 在上一步中我们可以在 MaixVision 中实时看到图像,我们用鼠标框选一个区域,图像下方就能看到这个区域的直方图了,选择不同的颜色表示方法,可以看到不同的颜色通道的直方图。 这个功能方便我们在做某些图像处理算法时找到一些合适的参数。 ## 区分`设备文件系统`和`电脑文件系统` 这里我们有一个比较重要的概念需要掌握:**分清楚`设备文件系统`和`电脑文件系统`**。 * **电脑文件系统**:运行在电脑上,在 MaixVision 中打开文件或者工程都是打开的电脑里面的文件,保存也是自动保存到电脑的文件系统。 * **设备文件系统**:程序运行时会将程序发送到设备上运行,所以代码里面使用的文件都是从设备文件系统读取。 所以常见的问题是有同学在电脑上保存了文件`D:\\data\\a.jpg`,然后在设备上使用这个文件`img image.load(\"D:\\data\\a.jpg\")`,这样当然是找不到文件的,因为设备上没有`D:\\data\\a.jpg`这个文件。 具体如何将电脑的文件发送到设备上,参考下面的章节。 ## 传输文件到设备 先连接设备,然后点击浏览设备文件系统的按钮,有两个入口,如下图,然后就能上传文件到设备,或者从设备下载文件到电脑了。 ![maixvision_browser2](../../assets/maixvision_browser2.jpg) ![maixvision_browser](../../assets/maixvision_browser.jpg) .. details::也可以用其它工具代替,点击展开 先知道设备的 ip 地址或者设备名称,MaixVision 就可以搜索到, 或者在设备`设置 >系统信息`中看到,比如类似 `maixcam xxxx.local` 或者 `192.168.0.123`。 用户名和密码都是 `root`, 使用 `SFTP` 协议传输文件,端口号是 `22`。 然后不同系统下都有很多好用的软件: ### Windows 下 使用 [WinSCP](https://winscp.net/eng/index.php) 或者 [FileZilla](https://filezilla project.org/) 等工具连接设备,将文件传输到设备上,选择 `SFTP` 协议填写设备和账号信息连接即可。 具体不懂的可以自行搜索。 ### Linux 下 终端使用 `scp` 命令传输文件到设备上,比如: ```bash scp /path/to/your/file.py root@maixcam xxxx.local:/root ``` ### Mac 下 * **方法一**:终端使用 `scp` 命令传输文件到设备上,比如: ```bash scp /path/to/your/file.py root@maixcam xxxx.local:/root ``` * **方法二**:使用 [FileZilla](https://filezilla project.org/) 等工具连接设备,将文件传输到设备上,选择 `SFTP` 协议填写设备和账号信息连接即可。 ## 使用图形化积木编程 开发中,敬请期待。"},"/maixpy/doc/zh/basic/view_src_code.html":{"title":"MaixCAM MaixPy 如何找到 MaixPy API 对应的源码","content":" title: MaixCAM MaixPy 如何找到 MaixPy API 对应的源码 ## 简介 MaixPy 是基于 Python 实现,有部分函数是用 Python 编写,大多数底层代码都是使用 C/C++ 编写,这样可以保证运行效率。 如果我们在使用一个函数遇到疑问,我们可以查询本文档,以及 API 文档。 如果仍然不能解决你的疑惑,那么可以直接按照本文的方法找到底层实现的源码找出答案,**也欢迎一起贡献文档或代码,成为 MaixPy 开发者的一员**! ## 先看文档 一定要先看文档: [https://wiki.sipeed.com/maixpy/](https://wiki.sipeed.com/maixpy/), 然后看 API 文档:[https://wiki.sipeed.com/maixpy/api/index.html](https://wiki.sipeed.com/maixpy/api/index.html) API 文档只有英文,原因是 API 文档是从代码的注释生成而来,代码中一律使用英文,看不懂英文可以使用翻译。 ## 如何找到 API 对应的源码 首先有两个开源仓库,分别是 [MaixPy](https://github.com/sipeed/MaixPy) 和 [MaixCDK](https://github.com/sipeed/MaixCDK)。 MaixPy 是工程仓库,里面包含了 MaixPy 的部分源码,所有文档、例程;MaixCDK 包含了大多数 MaixPy API 的底层 C/C++ 实现。 我们可以把这两份代码下载下来,也可以直接在网页查看。 **顺便记得给它们点一个 star 让更多人看到哦~** ### 找到 C/C++ 编写的 API 现在假设我们要找到 `maix.image.Image.find_blobs` 函数为例, 首先我们尝试手动去找: * 因为这是属于视觉相关的 API, 我们在 [MaixCDK](https://github.com/sipeed/MaixCDK) 的`components/vision/include` 下面可以看到有一个 `maix_image.hpp`的头文件,猜测大概在这里面。 * 在`maix_image.hpp` 搜索 `find_blobs`,马上就发现了函数声明: ```c++ std::vector find_blobs(std::vector> thresholds std::vector>(), bool invert false, std::vector roi std::vector(), int x_stride 2, int y_stride 1, int area_threshold 10, int pixels_threshold 10, bool merge false, int margin 0, int x_hist_bins_max 0, int y_hist_bins_max 0); ``` * 同时我们发现函数声明前面有注释,API 文档即从这份注释自动生成而来,如果你仔细对比 API 文档和这个注释会发现他们一模一样的,改动这个注释编译后会产生 API 文档。 * 这只是函数声明,我们找到`components/vision/src/maix_image.cpp`,发现里面没有这个函数,仔细一看有个`components/vision/src/maix_image_find_blobs.cpp`,原来是将函数单独写了一个`cpp`,在里面我们就能看到函数的源代码了。 ### 找到使用 Pybind11 编写的 API 如果 MaixCDK 里面找不到,那就可以到 [MaixPy/components](https://github.com/sipeed/MaixPy/tree/main/components)里面寻找。 > 上面的代码你会发现,我们在使用`find_blobs`时第一个参数是`[[...]]`这样的参数即`list`类型,C/C++ 定义第一个参数是`std::vector>`类型,原因是我们使用了`pybind11`自动将 `std::vector` 类型转换为了`list`类型。 而有一些类型在`MaixCDK`里面不方便定义,比如`numpy`的`array`类型,但是`pybind11`里面有相关的定义方便我们直接使用,但是又不想 MaixCDK 里面有 pybind11 相关的代码,所以我们在[MaixPy/components](https://github.com/sipeed/MaixPy/tree/main/components) 里面来写使用了 `pybind11` 相关的代码,比如`maix.image.image2cv`方法。 ## 如何修改代码 在找到代码后,直接修改,然后按照[编译文档](../source_code/build.html)编译出固件即可。 ## 如何增加代码 照抄其它 API,写一个函数,然后添加完整的注释,注释中额外添加一个`@maixpy maix.xxx.xxx`,这里`xxx`即你想添加到的模块和`API`名,然后编译出固件即可。 可以参考[MaixCDK/components/basic/includemaix_api_example.hpp](https://github.com/sipeed/MaixCDK/blob/master/components/basic/include/maix_api_example.hpp)。 API 参数和返回值用基础的`C++` 类型会自动转换为`Python`的类型,是不是十分简单. 具体的类型转换参考[pybind11 类型自动转换列表](https://pybind11.readthedocs.io/en/stable/advanced/cast/overview.html#conversion table) 比如我们希望增加一个`maix.my_module.my_func`,在`MaixCDK`中合适的地方(最好符合现在的文件夹分类)创建一个头文件,然后添加代码: ```cpp namespace maix::my_module { /** * My function, add two integer. * @param a arg a, int type * @param b arg b, int type * @return int type, will a + b * @maixpy maix.my_module.my_func */ int my_func(int a, int b); } ``` 然后增加一个`cpp`文件: ```cpp int my_func(int a, int b) { return a + b; } ``` 然后编译 MaixPy 生成`whl`文件,安装到设备即可使用`maix.my_module.my_func`函数。 ## 如何贡献代码 如果你发现 MaixPy 有未完成的 API, 或者有 bug, 欢迎修改后提交 PR(Pull Request)到 MaixPy 仓库,具体提交方法看 [贡献文档和代码](../source_code/contribute.html)"},"/maixpy/doc/zh/basic/python.html":{"title":"Python 基础知识","content":" title: Python 基础知识 MaixPy 的教程文档里面就不涉及具体的 Python 语法教程了,因为 Python 的教程实在是太多了,都做得很好,这里只介绍需要学什么,方向和线路指导即可。 ## Python 简介 Python 是一门解释性、面向对象、动态类型的高级编程语言。 * 解释性:不需要编译,直接运行,优点是开发快速,缺点是因为每次运行都要解释一遍代码,运行速度慢一点点,但是往往瓶颈还是开发者写的代码而不是语言本身。 * 面向对象:支持面向对象编程,可以定义类和对象,相比面向过程语言,更容易组织代码。更多自行搜索。 * 动态类型:变量不需要声明类型,可以直接赋值,类型会根据赋值自动确定,这样可以减少代码量,但是也容易出现类型错误,需要开发者自己注意。 总之,对于没有接触过 Python 的开发者来说,Python 非常容易上手,有大量现成的库,开发者群体巨大,开发应用周期短,非常值得学习! ## Python 环境安装 你可以按照你学习的 Python 教程在电脑上安装 Python; 也可以在 MaixVisioin 上连接设备后使用 MaixVision 编程然后在开发板运行。 ## 使用 MaixPy 需要的 Python 基础有哪些? * Python 的基本概念。 * 面向对象编程的基本概念。 * Python 的基本语法,包括: * tab 缩进对齐语法 * 变量、函数、类、对象、注释等 * 控制语句比如 if、for、while 等等 * 模块和导入模块 * 基本数据类型比如 int、float、str、list、dict、tuple 等等 * bytes 和 str 的区别和转换 * 异常处理,try except * 常用的内置函数,比如 print、open、len、range 等等 * 常用的内置模块,比如 os、sys、time、random、math 等等 掌握以上的基础知识就可以顺畅使用 MaixPy 编程了,配合后面的教程和例程,在不懂的时候查询搜索引擎或者官方文档,或者问 ChatGPT 就能顺利完成开发。 ## 对于已经有一门面向对象编程语言经验的开发者 如果你已经会一门面向对象语言比如 C++/Java/C# 等等,那只需要快速浏览一下 Python 的语法,就可以开始使用了。 比如 [菜鸟教程](https://www.runoob.com/python3/python3 tutorial.html) 或者 [Python 官方教程](https://docs.python.org/3/tutorial/index.html)。 或者个人开发者的博客,比如 [哇!是 Python](https://neucrack.com/p/59)。 ## 对于没有面向对象编程经验但是有 C 语言经验的开发者 如果只学了 C,缺乏对面向对象的理解,那么可以先学习一下面向对象的概念,然后再学习 Python,也是比较快的,可以自行搜索视频教程入门。 跟着视频教程入门之后可以看看文档教程,比如 [菜鸟教程](https://www.runoob.com/python3/python3 tutorial.html) 或者 [Python 官方教程](https://docs.python.org/3/tutorial/index.html) 就可以开动了! 在学了入门知识后,就可以按照 MaixPy 的文档和例程开始使用 MaixPy 编程了。 ## 对于编程新手 如果你从未接触过编程,那么你需要重头开始学习 Python,Python 作为入门语言也是比较合适的,具体可以搜一搜视频教程。 在学会了基础语法后,就能按照例程使用 MaixPy 编程了。"},"/maixpy/doc/zh/audio/recognize.html":{"title":"MaixCAM MaixPy 语音实时识别","content":" title: MaixCAM MaixPy 语音实时识别 update: date: 2024 10 08 author: 916BGAI version: 1.0.0 content: 初版文档 ## 简介 `MaixCAM` 移植了 `Maix Speech` 离线语音库,实现了连续中文数字识别、关键词识别以及大词汇量语音识别功能。支持 `PCM` 和 `WAV` 格式的音频识别,且可通过板载麦克风进行输入识别。 ## Maix Speech [`Maix Speech`](https://github.com/sipeed/Maix Speech) 是专为嵌入式环境设计的离线语音库,其针对语音识别算法进行了深度优化,在内存占用上达到了数量级上的领先,并且保持了优良的WER。如果想了解原理可查看该开源项目。 ## 连续大词汇量语音识别 ```python from maix import app, nn speech nn.Speech(\"/root/models/am_3332_192_int8.mud\") speech.init(nn.SpeechDevice.DEVICE_MIC, \"hw:0,0\") def callback(data: tuple[str, str], len: int): print(data) lmS_path \"/root/models/lmS/\" speech.lvcsr(lmS_path + \"lg_6m.sfst\", lmS_path + \"lg_6m.sym\", \\ lmS_path + \"phones.bin\", lmS_path + \"words_utf.bin\", \\ callback) while not app.need_exit(): frames speech.run(1) if frames < 1: print(\"run out\\n\") speech.deinit() break ``` ### 使用方法 1. 导入 `app` 和 `nn` 模块 ```python from maix import app, nn ``` 2. 加载声学模型 ```python speech nn.Speech(\"/root/models/am_3332_192_int8.mud\") ``` 也可以加载 `am_7332` 声学模型,模型越大精度越高但是消耗的资源也越大 3. 选择对应的音频设备 ```python speech.init(nn.SpeechDevice.DEVICE_MIC, \"hw:0,0\") ``` 这里使用的是板载的麦克风,也选择 `WAV` 和 `PCM` 音频作为输入设备 ```python speech.init(nn.SpeechDevice.DEVICE_WAV, \"path/audio.wav\") # 使用 WAV 音频输入 ``` ```python speech.init(nn.SpeechDevice.DEVICE_PCM, \"path/audio.pcm\") # 使用 PCM 音频输入 ``` 注意 `WAV` 需要是 `16KHz` 采样,`S16_LE` 存储格式,可以使用 `arecord` 工具转换 ```shell arecord d 5 r 16000 c 1 f S16_LE audio.wav ``` 在 `PCM/WAV` 识别时,如果想要重新设置数据源,例如进行下一个WAV文件的识别可以使用 `speech.devive` 方法,内部会自动进行缓存清除操作: ```python speech.devive(nn.SpeechDevice.DEVICE_WAV, \"path/next.wav\") ``` 4. 设置解码器 ```python def callback(data: tuple[str, str], len: int): print(data) lmS_path \"/root/models/lmS/\" speech.lvcsr(lmS_path + \"lg_6m.sfst\", lmS_path + \"lg_6m.sym\", \\ lmS_path + \"phones.bin\", lmS_path + \"words_utf.bin\", \\ callback) ``` 用户可以注册若干个解码器(也可以不注册),解码器的作用是解码声学模型的结果,并执行对应的用户回调。这里注册了一个 `lvcsr` 解码器用于输出连续语音识别结果(小于1024个汉字结果)。对于其他解码器的使用可以查看连续中文数字识别和关键词识别部分 设置 `lvcsr` 解码器时需要设置 `sfst` 文件路径,`sym` 文件路径(输出符号表),`phones.bin` 的路径(拼音表),和 `words.bin` 的路径(词典表)。最后还要设置一个回调函数用于处理解码出的数据。 在注册完解码器后需要使用 `speech.deinit()` 方法清除初始化 5. 识别 ```python while not app.need_exit(): frames speech.run(1) if frames < 1: print(\"run out\\n\") speech.deinit() break ``` 使用 `speech.run` 方法运行语音识别,传入的参数为每次运行的帧数,返回实际运行的帧数。用户可以选择每次运行1帧后进行其他处理,或在一个线程中持续运行,使用外部线程进行停止。 ### 识别结果 如果上述程序运行正常,对板载麦克风说话,会得到实时语言识别结果,如: ```shell ### SIL to clear decoder! ('今天天气 怎么样 ', 'jin1 tian1 tian1 qi4 zen3 me yang4 ') ```"},"/maixpy/doc/zh/audio/record.html":{"title":"MaixCAM MaixPy 录音","content":" title: MaixCAM MaixPy 录音 update: date: 2024 05 20 author: lxowalle version: 1.0.0 content: 初版文档 ## 简介 本文档提供录音的使用方法,支持录入`PCM`和`WAV`格式的音频。 `MaixCAM`板载了麦克风,所以你可以直接使用录音功能。 ### 使用方法 #### 获取`PCM`数据 当构造`Recorder`对象时不传入`path`, 则只会录入音频后不会保存到文件中,当然你可以手动保存到文件。 ```python from maix import audio, time, app r audio.Recorder() r.volume(12) print(\"sample_rate:{} format:{} channel:{}\".format(r.sample_rate(), r.format(), r.channel())) while not app.need_exit(): data r.record() print(\"data size\", len(data)) time.sleep_ms(10) print(\"record finish!\") ``` 步骤: 1. 导入audio、time和app模块 ```python from maix import audio, time, app ``` 2. 初始化录制器 ```python r audio.Recorder() r.volume(12) ``` 注意默认的采样率是48k,采样格式为小端格式 有符号16位,采样通道为1。你也可以像这样自定义参数`p audio.Recorder(sample_rate 48000, format audio.Format.FMT_S16_LE, channel 1)`。目前只测试过采样率48000,`FMT_S16_LE`格式,和采样通道数为1 `r.volume(12)`用来设置音量,音量范围为[0,100] 3. 开始录制 ```python data r.record() ``` `data`是`PCM`格式的`bytes`类型数据,保存了当前录入的音频。`PCM`格式在初始化`Recorder`对象时设置,见步骤2。注意如果录制太快,音频缓冲区没有数据, 则有可能返回一个空的`bytes`数据。 4. 完成,做自己的应用时可以对`r.record()`返回的`PCM`数据做语音处理。 #### 录制音频并保存为`WAV`格式 当构造`Recorder`对象时传入了`path`, 则录入的音频将会保存到`path`文件中,并且你也可以通过`record`方法获取当前录入的`PCM`数据。`path`只支持`.pcm`和`.wav`后缀的路径,并且当录入`.wav`时,`record`方法不会返回`WAV`头部信息,只会返回`PCM`数据。 ```python from maix import audio, time, app r audio.Recorder(\"/root/output.wav\") r.volume(12) print(\"sample_rate:{} format:{} channel:{}\".format(r.sample_rate(), r.format(), r.channel())) while not app.need_exit(): data r.record() print(\"data size\", len(data)) time.sleep_ms(10) print(\"record finish!\") ``` 代码含义基本同上。 #### 录制音频并保存为`WAV`格式(阻塞) 录入时如果设置了`record_ms`参数,录入音频会阻塞直到到达`record_ms`设置的时间,单位ms。 ```python from maix import audio, time, app r audio.Recorder(\"/root/output.wav\") r.volume(12) print(\"sample_rate:{} format:{} channel:{}\".format(r.sample_rate(), r.format(), r.channel())) r.record(5000) print(\"record finish!\") ``` 上面示例将会持续录入`5000`ms,并保存为`WAV`格式,录入期间将会阻塞在`record`方法中,注意当`record`设置了`record_ms`后不会返回`PCM`数据。 ### 其他 `Player`和`Recorder`模块有些`bug`待解决,请保证它们在其他模块(`Camera`模块,`Display`模块等)之前创建。例如: ```python # 先创建Player和Recorder p audio.Player() r audio.Recorder() # 再创建Camera c camera.Camera() ```"},"/maixpy/doc/zh/audio/synthesis.html":{"title":"MaixCAM MaixPy 语音合成","content":" title: MaixCAM MaixPy 语音合成 TODO: 正在赶来"},"/maixpy/doc/zh/audio/digit.html":{"title":"MaixCAM MaixPy 连续中文数字识别","content":" title: MaixCAM MaixPy 连续中文数字识别 update: date: 2024 10 08 author: 916BGAI version: 1.0.0 content: 初版文档 ## 简介 `MaixCAM` 移植了 `Maix Speech` 离线语音库,实现了连续中文数字识别、关键词识别以及大词汇量语音识别功能。支持 `PCM` 和 `WAV` 格式的音频识别,且可通过板载麦克风进行输入识别。 ## Maix Speech [`Maix Speech`](https://github.com/sipeed/Maix Speech) 是专为嵌入式环境设计的离线语音库,其针对语音识别算法进行了深度优化,在内存占用上达到了数量级上的领先,并且保持了优良的WER。如果想了解原理可查看该开源项目。 ## 连续中文数字识别 ```python from maix import app, nn speech nn.Speech(\"/root/models/am_3332_192_int8.mud\") speech.init(nn.SpeechDevice.DEVICE_MIC, \"hw:0,0\") def callback(data: str, len: int): print(data) speech.digit(640, callback) while not app.need_exit(): frames speech.run(1) if frames < 1: print(\"run out\\n\") speech.deinit() break ``` ### 使用方法 1. 导入 `app` 和 `nn` 模块 ```python from maix import app, nn ``` 2. 加载声学模型 ```python speech nn.Speech(\"/root/models/am_3332_192_int8.mud\") ``` 也可以加载 `am_7332` 声学模型,模型越大精度越高但是消耗的资源也越大 3. 选择对应的音频设备 ```python speech.init(nn.SpeechDevice.DEVICE_MIC, \"hw:0,0\") ``` 这里使用的是板载的麦克风,也选择 `WAV` 和 `PCM` 音频作为输入设备 ```python speech.init(nn.SpeechDevice.DEVICE_WAV, \"path/audio.wav\") # 使用 WAV 音频输入 ``` ```python speech.init(nn.SpeechDevice.DEVICE_PCM, \"path/audio.pcm\") # 使用 PCM 音频输入 ``` 注意 `WAV` 需要是 `16KHz` 采样,`S16_LE` 存储格式,可以使用 `arecord` 工具转换 ```shell arecord d 5 r 16000 c 1 f S16_LE audio.wav ``` 在 `PCM/WAV` 识别时,如果想要重新设置数据源,例如进行下一个WAV文件的识别可以使用 `speech.devive` 方法,内部会自动进行缓存清除操作: ```python speech.devive(nn.SpeechDevice.DEVICE_WAV, \"path/next.wav\") ``` 4. 设置解码器 ```python def callback(data: str, len: int): print(data) speech.digit(640, callback) ``` 用户可以注册若干个解码器(也可以不注册),解码器的作用是解码声学模型的结果,并执行对应的用户回调。这里注册了一个 `digit` 解码器用于输出最近4s内的中文数字识别结果。返回的识别结果为字符串形式,支持 `0123456789 .(点) S(十) B(百) Q(千) W(万)`。对于其他解码器的使用可以查看语音实时识别和关键词识别部分 设置 `digit` 解码器时需要设置 `blank` 值,超过该值(ms)则在输出结果里插入一个 `_` 表示空闲静音 在注册完解码器后需要使用 `speech.deinit()` 方法清除初始化 5. 识别 ```python while not app.need_exit(): frames speech.run(1) if frames < 1: print(\"run out\\n\") speech.deinit() break ``` 使用 `speech.run` 方法运行语音识别,传入的参数为每次运行的帧数,返回实际运行的帧数。用户可以选择每次运行1帧后进行其他处理,或在一个线程中持续运行,使用外部线程进行停止。 ### 识别结果 如果上述程序运行正常,对板载麦克风说话,会得到连续中文数字识别结果,如: ```shell _0123456789 ```"},"/maixpy/doc/zh/audio/keyword.html":{"title":"MaixCAM MaixPy 关键词识别","content":" title: MaixCAM MaixPy 关键词识别 update: date: 2024 10 08 author: 916BGAI version: 1.0.0 content: 初版文档 ## 简介 `MaixCAM` 移植了 `Maix Speech` 离线语音库,实现了连续中文数字识别、关键词识别以及大词汇量语音识别功能。支持 `PCM` 和 `WAV` 格式的音频识别,且可通过板载麦克风进行输入识别。 ## Maix Speech [`Maix Speech`](https://github.com/sipeed/Maix Speech) 是专为嵌入式环境设计的离线语音库,其针对语音识别算法进行了深度优化,在内存占用上达到了数量级上的领先,并且保持了优良的WER。如果想了解原理可查看该开源项目。 ## 关键词识别 ```python from maix import app, nn speech nn.Speech(\"/root/models/am_3332_192_int8.mud\") speech.init(nn.SpeechDevice.DEVICE_MIC, \"hw:0,0\") kw_tbl ['xiao3 ai4 tong2 xue2', 'ni3 hao3', 'tian1 qi4 zen3 me yang4'] kw_gate [0.1, 0.1, 0.1] def callback(data:list[float], len: int): for i in range(len): print(f\"\\tkw{i}: {data[i]:.3f};\", end ' ') print(\"\\n\") speech.kws(kw_tbl, kw_gate, callback, True) while not app.need_exit(): frames speech.run(1) if frames < 1: print(\"run out\\n\") speech.deinit() break ``` ### 使用方法 1. 导入 `app` 和 `nn` 模块 ```python from maix import app, nn ``` 2. 加载声学模型 ```python speech nn.Speech(\"/root/models/am_3332_192_int8.mud\") ``` 也可以加载 `am_7332` 声学模型,模型越大精度越高但是消耗的资源也越大 3. 选择对应的音频设备 ```python speech.init(nn.SpeechDevice.DEVICE_MIC, \"hw:0,0\") ``` 这里使用的是板载的麦克风,也选择 `WAV` 和 `PCM` 音频作为输入设备 ```python speech.init(nn.SpeechDevice.DEVICE_WAV, \"path/audio.wav\") # 使用 WAV 音频输入 ``` ```python speech.init(nn.SpeechDevice.DEVICE_PCM, \"path/audio.pcm\") # 使用 PCM 音频输入 ``` 注意 `WAV` 需要是 `16KHz` 采样,`S16_LE` 存储格式,可以使用 `arecord` 工具转换 ```shell arecord d 5 r 16000 c 1 f S16_LE audio.wav ``` 在 `PCM/WAV` 识别时,如果想要重新设置数据源,例如进行下一个WAV文件的识别可以使用 `speech.devive` 方法,内部会自动进行缓存清除操作: ```python speech.devive(nn.SpeechDevice.DEVICE_WAV, \"path/next.wav\") ``` 4. 设置解码器 ```python kw_tbl ['xiao3 ai4 tong2 xue2', 'ni3 hao3', 'tian1 qi4 zen3 me yang4'] kw_gate [0.1, 0.1, 0.1] def callback(data:list[float], len: int): for i in range(len): print(f\"\\tkw{i}: {data[i]:.3f};\", end ' ') print(\"\\n\") speech.kws(kw_tbl, kw_gate, callback, True) ``` 用户可以注册若干个解码器(也可以不注册),解码器的作用是解码声学模型的结果,并执行对应的用户回调。这里注册了一个 `kws` 解码器用于输出最近一帧所有注册的关键词的概率列表,用户可以观察概率值,自行设定阈值进行唤醒。对于其他解码器的使用可以查看语音实时识别和连续中文数字识别部分 设置 `kws` 解码器时需要设置 `关键词列表`,以拼音间隔空格填写,`关键词概率门限表`,按顺序排列输入即可,是否进行 `自动近音处理`,设置为 `True` 则会自动将不同声调的拼音作为近音词来合计概率。最后还要设置一个回调函数用于处理解码出的数据。 用户还可以使用 `speech.similar` 方法手工注册近音词,每个拼音可以注册最多 `10` 个近音词。(注意,使用该接口注册近音词会覆盖使能 `自动近音处理` 里自动生成的近音表) ```python similar_char ['zhen3', 'zheng3'] speech.similar('zen3', similar_char) ``` 在注册完解码器后需要使用 `speech.deinit()` 方法清除初始化 5. 识别 ```python while not app.need_exit(): frames speech.run(1) if frames < 1: print(\"run out\\n\") speech.deinit() break ``` 使用 `speech.run` 方法运行语音识别,传入的参数为每次运行的帧数,返回实际运行的帧数。用户可以选择每次运行1帧后进行其他处理,或在一个线程中持续运行,使用外部线程进行停止。 ### 识别结果 如果上述程序运行正常,对板载麦克风说话,会得到关键词识别结果,如: ```shell kws log 2.048s, len 24 decoder_kws_init get 3 kws 00, xiao3 ai4 tong2 xue2 01, ni3 hao3 02, tian1 qi4 zen3 me yang4 find shared memory(491520), saved:491520 kw0: 0.959; \tkw1: 0.000; \tkw2: 0.000; # 小爱同学 kw0: 0.000; \tkw1: 0.930; \tkw2: 0.000; # 你好 kw0: 0.000; \tkw1: 0.000; \tkw2: 0.961; # 天气怎么样 ```"},"/maixpy/doc/zh/audio/play.html":{"title":"MaixCAM MaixPy 播放音频","content":" title: MaixCAM MaixPy 播放音频 update: date: 2024 05 20 author: lxowalle version: 1.0.0 content: 初版文档 ## 简介 本文档提供播放音频的使用方法 ## 使用方法 ### 硬件操作 ![image 20240520134637905](../../../static/image/maixcam_hardware_back.png) `MaixCAM`没有内置喇叭,因此需要自行焊接一个功率在`1W`内的喇叭。喇叭焊接的引脚见上图的Speaker对应的`VOP`和`VON`脚。 注:如果`MaixCAM`在这两个脚上连接了铜柱,则可以直接焊接在铜柱上,为了美观也可以焊接在板子的另一面。 ### 编写代码 #### 播放一个`WAV`文件 ```python from maix import audio, time, app p audio.Player(\"/root/output.wav\") p.play() while not app.need_exit(): time.sleep_ms(10) print(\"play finish!\") ``` 步骤: 1. 导入audio、time和app模块 ```python from maix import audio, time, app ``` 2. 初始化播放器 ```python p audio.Player(\"/root/output.wav\") ``` 默认的采样率是48k,采样格式为小端格式 有符号16位,采样通道为1。你也可以像这样自定义参数`p audio.Player(sample_rate 48000, format audio.Format.FMT_S16_LE, channel 1)`。目前只测试过采样率48000,`FMT_S16_LE`格式,和采样通道数为1。 如果是`.wav`文件,则会自动获取采样率、采样格式和采样通道。 3. 播放音频 ```python p.play() ``` 该将会阻塞直到写入所有音频数据,但不会阻塞到实际播放完所有音频数据。如果调用`play()`后退出了程序,则部分待播放的音频数据可能会丢失。 4. 完成 #### 用`PCM`数据播放 ```python from maix import audio, time, app p audio.Player() with open('/root/output.pcm', 'rb') as f: ctx f.read() p.play(bytes(ctx)) while not app.need_exit(): time.sleep_ms(10) print(\"play finish!\") ``` 步骤: 1. 导入audio、time和app模块 ```python from maix import audio, time, app ``` 2. 初始化播放器 ```python p audio.Player() ``` 注意默认的采样率是48k,采样格式为小端格式 有符号16位,采样通道为1。你也可以像这样自定义参数`p audio.Player(sample_rate 48000, format audio.Format.FMT_S16_LE, channel 1)`。目前只测试过采样率48000,`FMT_S16_LE`格式,和采样通道数为1 3. 打开并播放一个PCM文件 ```python with open('/root/output.pcm', 'rb') as f: ctx f.read() p.play(bytes(ctx)) while not app.need_exit(): time.sleep_ms(10) ``` `with open('xxx','rb') as f:`打开文件`xxx`, 并获取文件对象`f` `ctx f.read()`将读取文件的内容到`ctx`中 `p.play(bytes(ctx))`播放音频,`p`是已打开的播放器对象, `ctx`是转换为bytes类型的`PCM`数据 `time.sleep_ms(10)`这里有一个循环来等待播放完成,因为播放操作是异步执行的,如果提前退出了程序,那么可能导致音频不会完全播放。 4. 完成 ### 其他 `Player`和`Recorder`模块有些`bug`待解决,请保证它们在其他模块(`Camera`模块,`Display`模块等)之前创建。例如: ```python # 先创建Player和Recorder p audio.Player() r audio.Recorder() # 再创建Camera c camera.Camera()\t\t\t\t\t\t ```"},"/maixpy/doc/zh/audio/ai_classify.html":{"title":"MaixCAM MaixPy AI 声音分类","content":" title: MaixCAM MaixPy AI 声音分类 TODO: 待完成,如果你急需,可以先自行移植模型,或者先将声音用 FFT 处理成瀑布图,再以图片的方式进行训练 AI 分类识别。"},"/maixpy/doc/zh/peripheral/pwm.html":{"title":"MaixCAM MaixPy 使用 PWM","content":" title: MaixCAM MaixPy 使用 PWM ## 简介 在 MaixPy (v4) 中使用 `PWM`,先使用`pinmap`设置引脚的功能为 `PWM`,在使用。 以及每个 `PWM` 有对应的引脚,根据 MaixCAM 的引脚图可以看到: ![](https://wiki.sipeed.com/hardware/zh/lichee/assets/RV_Nano/intro/RV_Nano_3.jpg) ![maixcam_pro_io](/static/image/maixcam_pro_io.png) 这里我们推荐使用`PWM6` 和 `PWM7`。 对于 `MaixCAM` 因为`WiFi` 使用了`SDIO1`的所有引脚,所以`PWM4~9`只能和`WiFi`二选一使用。 > TODO: 提供禁用 WiFi 的方法(需要系统里面禁用掉 WiFi 驱动,比较复杂) ## MaixPy 使用 PWM 控制舵机 这里我们以控制舵机为例, 使用`MaixCAM`的`PWM7`和`A19`引脚: ```python from maix import pwm, time, pinmap SERVO_PERIOD 50 # 50Hz 20ms SERVO_MIN_DUTY 2.5 # 2.5% > 0.5ms SERVO_MAX_DUTY 12.5 # 12.5% > 2.5ms # Use PWM7 pwm_id 7 # !! set pinmap to use PWM7 pinmap.set_pin_function(\"A19\", \"PWM7\") def angle_to_duty(percent): return (SERVO_MAX_DUTY SERVO_MIN_DUTY) * percent / 100.0 + SERVO_MIN_DUTY out pwm.PWM(pwm_id, freq SERVO_PERIOD, duty angle_to_duty(0), enable True) for i in range(100): out.duty(angle_to_duty(i)) time.sleep_ms(100) for i in range(100): out.duty(angle_to_duty(100 i)) time.sleep_ms(100) ``` 这里的功能是控制舵机从最小角度旋转到最大角度再旋转回最小角度。"},"/maixpy/doc/zh/peripheral/wdt.html":{"title":"MaixCAM MaixPy 使用看门狗定时器","content":" title: MaixCAM MaixPy 使用看门狗定时器 ## 简介 为了防止程序出现问题,常常会用到看门狗定时器(WDT), 在程序出问题时自动重启系统。 原理就是有一个倒计时计数器,我们需要在程序的逻辑中定期地去设置这个倒计时时间(也叫喂狗),如果我们的程序在哪儿卡住了导致没有定期去设置倒计时,倒计时到 0 后硬件就会出发系统重启。 ## MaixPy 中使用 WDT ```python from maix import wdt, app, time w wdt.WDT(0, 1000) while not app.need_exit(): w.feed() # here sleep op is our operation # 200 ms is normal, if > 1000ms will cause system reset time.sleep_ms(200) ```"},"/maixpy/doc/zh/peripheral/uart.html":{"title":"MaixCAM MaixPy UART 串口使用介绍","content":" title: MaixCAM MaixPy UART 串口使用介绍 ## 串口简介 串口是一种通信方式,包含了硬件和通信协议的定义。 * 硬件包括: * 3 个引脚: `GND`, `RX`, `TX`,通信双发**交叉连接** `RX` `TX`, 即一方 `TX` 发送到另一方的 `RX`, 双方 `GND` 连接到一起。 * 控制器,一般在芯片内部,也叫 `UART` 外设,一般一个芯片有一个或者多个 `UART` 控制器,每个控制器有相对应的引脚。 * 串口通信协议: 为了让双方能顺利通信,规定了一套协议,即以什么样的时序通信,具体可以自行学习,常见的参数有 波特率 校验位等,波特率是我们用得最多的参数。 通过板子的串口,可以和其它单片机或者 SOC 进行数据通信,比如可以在 MaixCAM 上实现人体检测功能,检测到坐标后通过串口发送给 STM32/Arduino 单片机。 ## MaixPy 中使用串口 对于 MaixCAM 默认从 USB 口引出了一个串口,可以插上配套的 Type C 转接小板,就能直接使用上面的串口引脚, 也可以不用转接板,直接使用板子上的 `A16(TX)` 和 `A17(RX)`引脚, 和 USB 口引出的是同样的引脚,是等效的,具体看接口图: ![](https://wiki.sipeed.com/hardware/zh/lichee/assets/RV_Nano/intro/RV_Nano_3.jpg) ![maixcam_pro_io](/static/image/maixcam_pro_io.png) 对于 MaixCAM 使用 USB 引出的串口时需要**注意**,Typc C 正插和反插,转接小板上的 `RX` 和 `TX`会交换(默认 **Type C 母口朝前**和丝印符合),所以当你发现无法通信时,有可能就是 RX TX 反了,可以尝试将 Type C 翻转一面插再看看通信是否正常。这个算是设计缺陷,不过一般也不会经常拔插所以适应一下也能接受。 将两个通信的板子双方连接好后(通信双发交叉连接 RX TX, 即一方 TX 发送到另一方的 RX, 双方 GND 连接到一起),就可以使用软件了。 通过 MaixPy 使用串口很简单: ```python from maix import uart device \"/dev/ttyS0\" # ports uart.list_devices() # 列出当前可用的串口 serial uart.UART(device, 115200) serial.write_str(\"hello world\") print(\"received:\", serial.read(timeout 2000)) ``` 这里使用了第一个串口`/dev/ttyS0`,也就是上面说的 `Type C` 出 引出的串口。 更多串口的 API 在 [UART API 文档](../../../api/maix/peripheral/uart.html)。 ## MaixCAM 串口使用注意点 ### TX 引脚注意点 MaixCAM 的 `TX`(`UART0`) 引脚在开机时**不能是被拉低**的状态,不然会导致无法开机,是芯片的特性,如果你在做 `3.3v` 转 `5v` 的电平转换电路要十分注意不要默认拉低请保持浮空(可以考虑使用电平转换芯片)。 以及如果你发现无法开机,也可以先检查一下 `TX` 是否被拉低了。 ## 串口连接电脑 有开发者可能会问:为什么插上 USB 电脑没出现串口设备? 答: 因为设备的 USB 默认是 虚拟 USB 网卡,没有串口功能,如果要访问设备的终端,请使用 ssh 连接。 对于 MaixCAM, 从 Type C 转接板引出的`串口0`直连到 `A16(TX)`和 `A17(RX)`引脚,可以直接接到其它设备比如单片机的串口引脚; 如果要和电脑通信,需要使用 USB 转串口小板(比如[这个](https://item.taobao.com/item.htm?spm a1z10.5 c s.w4002 24984936573.13.73cc59d6AkB9bS&id 610365562537))连接到电脑。 ## 开机日志输出 需要注意的是, **MaixCAM 的`串口0` 在开机时会打印一部分开机日志**, 启动完毕后会打印`serial ready`字样,如果和单片机通信需要注意丢弃这部分信息,如果出现系统启动出现问题也可以通过查看`串口0`的开机打印来诊断问题。 ## 发送数据 主要有两个函数`write_str`和`write`函数。 `write_str`函数来发送字符串,`write`用来发送字节流,即`str`和`bytes`类型,两者可以互相转换,比如: * `\"A\"` 调用`encode()`方法变成`b\"A\"`,反过来`b\"A\"`调用`decode()`方法变成`\"A\"`。 * `str` 没法显示一些不可见字符比如 ASCII 码中的值`0`,在字符串中也是`\\0`一般作为结束符,在`bytes`类型中就可以用`b\"\\x00\"`来储存。 * 对于非 ASCII 编码的字符串更有用,比如`UTF 8`编码中中文`好`是由三个字节`\\xe5\\xa5\\xbd`来表示的,我们可以通过`\"好\".encode(\"utf 8\")`得到`b\"\\xe5\\xa5\\xbd\"`,也可以通过`b'\\xe5\\xa5\\xbd'.decode(\"utf 8)`得到`\"好\"`。 所以如果我们需要发送字节数据,则用`write()`方法发送即可, 比如: ```python bytes_content b'\\x01\\x02\\x03' serial.write(bytes_content) ``` 所以对于 `str` 类型,也可以不用`write_str`,而是使用`serial.write(str_content.encode())` 来发送。 如果你有其它类型的数据,想将它们变成一个**字符串发送**,可以使用`Python 字符串格式化`来创建一个字符串,比如: 想发送`I have xxx apple`,这里`xxx` 想用一个整型变量,则: ```python num 10 content \"I have {} apple\".format(num) content2 f\"I have {num} apple\" content3 \"I have {:04d} apple\".format(num) content4 f\"I have {num:d} apple\" print(content) print(content2) print(content3) print(content4) print(type(content)) serial.write_str(content) ``` 另外你也可以把数据编码成**二进制流数据发送**,比如前 4 个字节是十六进制的 `AABBCCDD`,中间发送一个 `int` 类型的数值,最后再加一个`0xFF`结尾,使用`struct.pack`来进行编码(看不懂可以看后文的介绍): ```python from struct import pack num 10 bytes_content b'\\xAA\\xBB\\xCC\\xDD' bytes_content + pack(\" 这里只举例使用`i`编码`int`类型的数据,还有其它类型比如`B`表示`unsigned char`等等,更多的`struct.pack`格式化用法可以自行搜索`python struct pack`。 这样最终发送的就是`AA BB CC DD 0A 00 00 00 FF`二进制数据了。 ## 接收 使用`read`方法进行读取数据,直接: ```python while not app.need_exit(): data serial.read() if data: print(data) time.sleep_ms(1) ``` 同样,`read`方法获得的数据也是`bytes`类型,这里`read`会读取对方一次性发送的一串数据,如果没有数据就是`b''`即空字节。 这里用了`time.sleep_ms(1)`进行睡眠了`1ms`,用来释放 CPU,不让这个线程占用所有 CPU 资源,而且`1ms`不影响我们程序的效率,特别是在多线程时有用。 另外`read`函数有两个参数: * `len`:代表想接收的最大长度,默认` 1`代表缓冲区有多少就返回多少,传`>0`的值则代表最多返回这个长度的数据。 * `timeout`: * 默认 `0` 代表从缓冲区读取数据立马返回数据,如果`len`为 ` 1`则返回所有数据,如果指定了`len`则返回长度不超过`len` 的数据。 * `<0` 代表一直等待直到接收到了数据才返回,如果`len`为 ` 1`则等待到接收到数据才返回(一串连续接收到的数据,即阻塞式读取所有数据),如果指定了`len`则等待接收数量达到`len`才返回。 * `>0` 代表无论有没有接收到数据,超过这个时间就会返回。 看起来有点复杂,常见的参数组合: * `read()`: 即`read( 1, 0)`,从缓冲区读取收到的数据,通常是对方一次性发来的一串数据,等到对方没有发送(一个字符的发送时间内没有再发)就立刻返回。 * `read(len 1, timeout 1)`: 阻塞式读取一串数据,等到对方发送了数据并且一个字符的发送时间内没有再发才返回。 * `read(len 10, timeout 1000)`: 阻塞式读取 10 个字符,读取到 10 个字符或者 超过 1000ms 还没收到就返回已经收到的数据。 ## 设置接收回调函数 在 MCU 开发中,串口收到数据通常会有中断事件发生, MaixPy 已经在底层处理好了中断,开发者无需再处理中断。 如果你想在接收到数据时调用一个回调函数,可以用`set_received_callback`设置回调函数: ```python from maix import uart, app, time def on_received(serial : uart.UART, data : bytes): print(\"received:\", data) # send back serial.write(data) device \"/dev/ttyS0\" serial uart.UART(device, 115200) serial.set_received_callback(on_received) serial0.write_str(\"hello\\r\\n\") print(\"sent hello\") print(\"wait data\") while not app.need_exit(): time.sleep_ms(100) # sleep to make CPU free ``` 在接收到数据后会在**另外一个线程**里调用设置的回调函数,因为是在另外的线程里调用的,所以不像中断函数要尽快退出函数,你可以在回调函数里处理一些事务再退出也是可以的,注意多线程常见问题。 使用回调函数的方式接收数据请不要再使用`read`函数读取,否则会读取出错。 ## 使用其它串口 每个引脚可能可以对应不同的外设功能,这也叫引脚复用,如下图,每个引脚对应了不同功能,比如`A17`引脚(板子的丝引标识)对应了`GPIOA17` `UART0_RX` `PWM5` 这三种功能,默认是`UART0_RX`。 ![](https://wiki.sipeed.com/hardware/zh/lichee/assets/RV_Nano/intro/RV_Nano_3.jpg) ![maixcam_pro_io](/static/image/maixcam_pro_io.png) 默认我们就能像上面直接使用`UART0`,对于其它串口的引脚默认都不是串口外设功能,所以要使用其它串口,需要先设置一下映射,使用`pinmap.set_pin_function`来设置。 这里以使用`UART1` 为例,先设置引脚映射选择引脚功能为串口,然后设备编号使用`/dev/ttyS1`,注意`uart.list_devices()` 默认不会返回需要手动映射的串口,所以直接手动传参就可以了: ```python from maix import app, uart, pinmap, time pinmap.set_pin_function(\"A18\", \"UART1_RX\") pinmap.set_pin_function(\"A19\", \"UART1_TX\") device \"/dev/ttyS1\" serial1 uart.UART(device, 115200) ``` ## 应用层通信协议 ### 概念和字符协议 串口只是规定了保证硬件通信的时序,为了让接收方知道发送方发送的字符流的含义,我们一般会规定一个应用通信协议。 比如发送放需要发送一个坐标,包含了`x, y`两个整型值,为了让接收方能理解我们发送的字节流的含义,我们规定: * **帧头**:当我开始发送`$`符号时,就代表我要开始发送有效的数据啦。 > **内容**:设计一个开头符号的原因是串口是流式传输,比如发送两次`12345`有可能在某个时刻接收到了`12345123`这样的数据,第二帧的`45`还没有接收到,我们可以根据起始和结尾符号来判断一个完整的数据帧。 * x, y 的取值范围是 0~65535, 即两个字节的无符号短整型(`unsinged short`),我会先发 x 再发 y,用逗号隔开,比如`10,20`。 * **帧尾**:最后我会再发一个`*`标记来代表我这次数据发送完成了。 这样发送一次数据就类似`$10,20*`这样一个字符串,对方如果用 C 语言接收和解析: ```c // 1. 接收数据 // 2. 根据帧头帧尾判断是否接收完毕了,并将完整的一帧数据存到 buff 数组里面 // 3. 解析一帧数据 uint16_t x, y; sscanf(buff, \"$%d,%d*\", &x, &y); ``` 这样我们就制定了最简单的字符通信协议,具有一定的可靠性。 但是由于我们串口一般用的参数是`115200 8 N 1`,这里的`N`就是无奇偶校验,我们可以在自己的协议里面加一个**校验值**放在末尾,比如: * 这里我们规定 x,y 后还有一个校验值,取值范围是 0 到 255,它的值为前面所有字符加起来的和对 255 取余。 * 这里以 `$10,20`举例,在`Python`只需要使用`sum`函数就可以`sum(b'$10,20') % 255 > 20`,最终发送`$10,20,20*`。 * 接收放接收到数据后读取到校验值`20`,然后自己也同样的方式计算一遍`$10,20`的校验值,如果也是`20`说明数据传输没有发生错误,如果不相同我们则可以认为数据传输过程中发生了错误,可以丢弃等下一个数据包。 比如在 MaixPy 中,我们需要编码一个字符协议,直接使用 `Python 的字符串格式化`功能即可: ```python x 10 y 20 content \"${},{}*\".format(x, y) print(content) ``` ### 二进制通信协议 上面的字符协议有个很明显的特征,我们都是用可见字符的方式在传输数据,传输数据时有点就是简单,人眼能直接看懂; 缺点就是占用字符数量不固定,数据量比较大,比如`$10,20*`和`$1000,2000*`,同样的格式,数值不同长度不同,这里`1000`用了 4 个字符也就是4个字节,我们都知道一个无符号短整型(`uint16`)类型的数据只需要两个字节就能表示`0~65535`的取值范围,用这种表示方法可以少传输数据。 我们也知道可见字符可以通过`ASCII`码表转换成二进制表示形式,比如`$1000`查找`ASCII码表`换成二进制表示就是`0x24 0x31 0x30 0x30 0x30`一共 5 个字节,也就是我们实际传输数据的时候传输的二进制内容,如果现在我们用二进制的方式直接编码`1000`,即`0x03E8`,就可以直接发送`0x24 0x03 0xE8`,最终只需要发送 3 个字节,减少了通信开销。 另外这里`0x03E8`两个字节低位是`0xE8`,先发送低位`0xE8`我们称之为小端编码,反之则是大端编码,两个皆可,双方规定一致即可。 在 MaixPy 中,要将一个数值转换成 bytes 类型也很简单,使用`struct.pack`函数即可,比如这里的`0x03E8`也就是十进制的`1000`,我们用 ```python from struct import pack b pack(\"通信协议`里面设置。 系统设置里面可能还有其它通信方式比如`tcp`,默认是`uart`,你也可以通过`maix.app.get_sys_config_kv(\"comm\", \"method\")`来获取到当前设置的是不是`uart`。 ```python from maix import comm, protocol, app from maix.err import Err import struct def encode_objs(objs): ''' encode objs info to bytes body for protocol 2B x(LE) + 2B y(LE) + 2B w(LE) + 2B h(LE) + 2B idx ... ''' body b\"\" for obj in objs: body + struct.pack(\" 0: body encode_objs(objs) p.report(APP_CMD_DETECT_RES, body) # ... ``` 这里通过`encode_objs`函数将所有检测到的物体信息打包成`bytes`类型的数据,然后用`p.report`函数将结果发送出去。 这里我们对`body`内容进行了一个简单的定义,即`2B x(LE) + 2B y(LE) + 2B w(LE) + 2B h(LE) + 2B idx ...`, 含义是: * 这张图中检测到多个物体,在`body`中按顺序排列,每个目标占用 `2+2+2+2+2 10` 个字节的长度,一共有`body_len / 10`个物体。 * 第1、2个字节代表识别到的物体的左上角的 `x` 坐标,单位是像素,因为 yolov5 的结果这个坐标值有可能为负数,所以我们用一个`short`类型的值来表示,这里使用了小端编码(LE)。 > 这里小端即数值的低字节在前,比如坐标 `x` 为 `100`, 十六进制为 `0x64`,我们用两个字节的`short`来表示就是`0x0064`,这里小端编码成 `bytes` 就是`0x64`在前, 结果就是`b'\\x64\\x00'`。 * 同理,将后面的数据都依次编码,一个物体得到一个`10`字节长的`bytes`类型数据。 * 循环将所有物体信息编码并拼接成一个`bytes`。 在调用`report`函数时,底层会自动按照协议拼接上协议头、校验和等等,这是在另一端就能收到一帧完整的数据了。 在另一端收到信息后也要按照协议进行解码,如果接收端也是用 MaixPy 可以直接: ```python while not app.need_exit(): msg p.get_msg() if msg and msg.is_report and msg.cmd APP_CMD_DETECT_RES: print(\"receive objs:\", decode_objs(msg.get_body())) p.resp_ok(msg.cmd, b'1') ``` 如果是其它设备比如`STM32`或者`Arduino`则可以参考 [Maix 串口通信协议标准](https://github.com/sipeed/MaixCDK/blob/master/docs/doc/convention/protocol.md) 附录中的 C 语言函数进行编解码。 ## 其它教程 * [【MaixPy/MaixCAM】视觉利器 MaixCAM 入门教程二](https://www.bilibili.com/video/BV1vcvweCEEe/?spm_id_from 333.337.search card.all.click) 看串口讲解部分 * [视觉模块和STM32如何进行串口通信](https://www.bilibili.com/video/BV175vWe5EfV/?spm_id_from 333.337.search card.all.click&vd_source 6c974e13f53439d17d6a092a499df304) * [[MaixCam]使用心得二:UART串口通信](https://blog.csdn.net/ButterflyBoy0/article/details/140577441) * 更多请自行互联网搜索"},"/maixpy/doc/zh/peripheral/adc.html":{"title":"MaixCAM MaixPy ADC 使用介绍","content":" title: MaixCAM MaixPy ADC 使用介绍 update: date: 2024 06 11 author: iawak9lkm version: 1.0.0 content: 初版文档 ## ADC 简介 ADC,即模拟信号数字转换器,将一个输入电压信号转换为一个输出的数字信号。由于数字信号本身不具有实际意义,仅仅表示一个相对大小。故任何一个模数转换器都需要一个参考模拟量作为转换的标准,参考标准一般为最大的可转换信号大小。而输出的数字量则表示输入信号相对于参考信号的大小。 ADC 外设一般有两个主要参数:分辨率和参考电压。 * 分辨率:ADC 的分辨率以输出二进制(或十进制)数的位数来表示。它说明 A/D 转换器对输入信号的分辨能力。一般来说,n 位输出的 A/D 转换器能区分 2^n 个不同等级的输入模拟电压,能区分输入电压的最小值为满量程输入的 1/(2^n)。在最大输入电压一定时,输出位数愈多,分辨率愈高。 * 参考电压:ADC 参考电压是在 AD 转换过程中与已知电压进行比较来找到未知电压的值的电压。参考电压可以认为是最高上限电压,当信号电压较低时,可以降低参考电压来提高分辨率。 通过板子的 ADC,可以采集外部的电压,并让板子检验电压是否达标,或是在检测到特定的电压时执行特定的任务(例如 ADC 检测多个按钮)。 ## MaixPy 中使用 ADC 通过 MaixPy 使用 ADC 很简单: ```python from maix.peripheral import adc from maix import time a adc.ADC(0, adc.RES_BIT_12) raw_data a.read() print(f\"ADC raw data:{raw_data}\") time.sleep_ms(50) vol a.read_vol() print(f\"ADC vol:{vol}\") ``` 使用 ADC0,从中读取原始的转换数据,或是直接从中读取电压数据。 有关 ADC API 的详细说明请看 [ADC API 文档](../../../api/maix/peripheral/adc.html) ## 关于 MaixCAM ADC 外设的一些说明 MaixCAM 引出一个连接 ADC 的 IO,为 GPIO B3,如下图所示(对于MaixCAM Pro 由于 B3 已经连接到了闪光灯, ADC 无法直接使用): ![](https://wiki.sipeed.com/hardware/zh/lichee/assets/RV_Nano/intro/RV_Nano_3.jpg) 该 IO 默认为 ADC, 无需额外进行配置。 MaixCAM ADC 外设采样精度为 12bit,也就是说采样输出范围为 0~4095。采样精度为参考电压的 1/4096。 MaixCAM ADC 外设的扫描频率不能高于 320K/s,也就是上述示例中增加延时的原因。 MaixCAM ADC 外设内部参考电压Vref为 1.5V,实际使用时会有些许偏差。因为内部参考电压典型值为 1.5V,所以 Soc 的 ADC 量程为 0~1.5V。该量程的 ADC 应用范围较小,故 MaixCAM 额外为 ADC 外设设计了分压电路来增大 ADC 的应用范围,该分压电路如下图所示。由于电路中电阻阻值存在误差、ADC 外设有阻抗、内部参考电压有些许偏差,该分压电路的参考电压 Vin_max 约为 4.6~5.0V。API 中已经选择一个精度较高的默认值,一般情况下无需传递该参数。 ![](https://wiki.sipeed.com/hardware/zh/lichee/assets/RV_Nano/peripheral/adc.png) 若需要较高的精度,可以通过以下步骤计算出该分压电路的参考电压: * 先测得 ADC_PIN 的实际输入电压 Vin。 * 然后测得 ADC1 处的实际输入电压 Vadc,电阻R10的位置可参考这个 [BOM](https://cn.dl.sipeed.com/fileList/LICHEE/LicheeRV_Nano/03_Designator_drawing/LicheeRV_Nano 70405_iBOM.rar) 表。 * 保持第一步的电压输入,在shell中执行以下命令: ```shell echo 1 > /sys/class/cvi saradc/cvi saradc0/device/cv_saradc cat /sys/class/cvi saradc/cvi saradc0/device/cv_saradc ``` 此时你将获得 ADC 原始数据 adc_data。 * 接地电阻为 R10,另一个电阻为 R6, 记录它们的阻值。通常,MaixCAM 的 R6 阻值为 10KΩ(10 000Ω),R10 阻值为 5.1KΩ(5 100Ω)。 * 将上述参数传递给以下 python 代码,即可得出 ADC_PIN 端的量程 [0, Vin_max] (闭区间)。 ```python def maixcam_get_vin_max(Vin:float, Vadc:float, adc_data:int, r6:int, r10:int, adc_max:int 4095): Vref (Vadc/adc_data)*(adc_max+1) r3 Vadc*r6/(Vin Vadc) Vin_max (Vref/r3)*(r6+r3) return Vin_max Vin 3.3\t\t# step 1 Vadc 1.06\t\t# step 2 adc_data 2700\t# step 3 r6 10000\t\t# step 4 r10 5100\t\t# step 4 if __name__ '__main__': print(maixcam_get_vin_max(Vin, Vadc, adc_data, r6, r10)) ``` 现在将结果传递给 `adc.ADC()` 的第三个参数,你将获得一个高精度的 ADC。"},"/maixpy/doc/zh/peripheral/hid.html":{"title":"MaixCAM MaixPy 使用 HID 设备","content":" title: MaixCAM MaixPy 使用 HID 设备 ## 简介 HID(Human Interface Device)设备是一类计算机外围设备,用于向计算机传输输入数据,或从计算机接收输出数据。HID 设备最常见的例子包括键盘、鼠标、游戏控制器、触摸屏、和手写板等。HID 协议是一种用于人机交互设备的通信协议,它允许这些设备通过 USB、蓝牙或其他连接方式与主机进行数据交换。MaixPy目前支持作为键盘、鼠标和触摸屏来使用,下面将会介入如何使用MaixPy通过HID来控制你的个人电脑~ ## 一定要操作的前期准备 > MaixPy 固件版本应该 > 4.5.1 在操作HID前一定要先使能HID设备,有两种方法: 1. 打开MaixCAM自带的`Settings`应用,依次点击`USB Settings` >勾选需要的HID设备,如`Keyboard`、`Mouse`、`Touchscreen`,然后点击`Confirm`后重启MaixCAM 2. 通过MaixVision中的`Examples/tools/maixcam_switch_usb_mode.py`示例,修改代码`device_list`中需要开启的HID设备,运行后重启MaixCAM 注意:由于最多只支持4个USB设备,因此在`ncm`,`rndis`,`keyboard`,`mouse`,`touchpad`之中只能同时启动4个设备,根据实际需求选择,其中`ncm`和`rndis`是USB网络协议设备,如果不需要可以关掉,默认是打开的。 ## 用MaixPy编写一个键盘 需要使能了`HID Keyboard`后才能运行。 下面示例中,通过键盘发送`rstuv`四个字符,然后松开按键。 ```python from maix import hid, time keyboard hid.Hid(hid.DeviceType.DEVICE_KEYBOARD) # 按键编号参考[USB HID文档](https://www.usb.org))的\"Universal Serial Bus HID Usage Tables\"部分 keys [21, 22, 23, 24, 25, 0] # 表示[r, s, t, u, v, 0], 0表示松开按键 for key in keys: keyboard.write([0, 0, key, 0, 0, 0, 0, 0]) ``` ## 用MaixPy编写一个鼠标 需要使能了`HID Mouse`后才能运行。 下面示例中,每隔100ms移动鼠标5个像素。 ```python from maix import hid, time mouse hid.Hid(hid.DeviceType.DEVICE_MOUSE) button 0 # 按键状态,0表示松开,1表示按下左键,2表示按下右键,4表示按下滚轮键 x_oft 0 # 相对当前位置的偏移量,数值范围是 127~127 y_oft 0 # 相对当前位置的偏移量,数值范围是 127~127 wheel_move 0 # 滚轮移动距离,数值范围是 127~127 count 0 while True: x_oft + 5 y_oft + 5 mouse.write([button, x_oft, y_oft, wheel_move]) time.sleep_ms(100) count + 1 if count > 50: break ``` ## 用MaixPy编写一个触摸屏 需要使能了`HID Touchpad`后才能运行。 下面示例中,每隔100ms移动触摸屏150个单位,注意触摸屏的坐标系是绝对坐标,而不是相对坐标,另外需要将屏幕实际尺寸映射到[1, 0x7FFF]区间,坐标(1,1)表示左上角,坐标(0x7FFF,0x7FFF)表示右下角。 ```python from maix import hid, time touchpad hid.Hid(hid.DeviceType.DEVICE_TOUCHPAD) def touchpad_set(button, x_oft, y_oft, wheel_move): touchpad.write([button, # 按键状态,0表示松开,1表示按下左键,2表示按下右键,4表示按下滚轮键 x_oft & 0xff, (x_oft >> 8) & 0xff, # 绝对位置,最左为1, 最右为0x7fff,0表示不操作,数值范围是0~0x7fff y_oft & 0xff, (y_oft >> 8) & 0xff, # 绝对位置,最上为1, 最下为0x7fff,0表示不操作,数值范围是0~0x7fff wheel_move]) # 滚轮移动距离,数值范围是 127~127 button 0 x_oft 0 y_oft 0 wheel_move 0 count 0 while True: x_oft + 150 y_oft + 150 touchpad_set(button, x_oft, y_oft, wheel_move) time.sleep_ms(100) count + 1 if count > 50: break ```"},"/maixpy/doc/zh/peripheral/gpio.html":{"title":"MaixCAM MaixPy 使用 GPIO","content":" title: MaixCAM MaixPy 使用 GPIO ## 简介 使用 GPIO 可以控制引脚输入或者输出高低电平,用来读取信号或者输出控制信号,十分常用。 **注意** `MaixCAM` 的引脚是 `3.3V` 耐受,请勿输入 `5V` 电压。 ## MaixPy 中使用 GPIO 首先我们需要知道设备有哪些引脚和 GPIO,对于 MaixCAM 每个引脚都对应了一个 GPIO 控制器,如图: ![](https://wiki.sipeed.com/hardware/zh/lichee/assets/RV_Nano/intro/RV_Nano_3.jpg) ![maixcam_pro_io](/static/image/maixcam_pro_io.png) 需要注意的是,引脚除了作为 GPIO 使用,还能用作其它功能比如 PWM 使用,使用前我们需要设置一下引脚的功能为 GPIO。 比如在 MaixCAM 上**有些引脚默认已经被其它功能占用,比如 UART0, WiFi(SDIO1 + A26), 不建议使用它们。** 其它的可以使用,另外 A14 引脚连接到了板载的 LED,默认是作为系统的负载提示灯,如果初始化它会自动取消系统提示灯功能作为普通 GPIO 被使用(注意`A14`只能作为输出),这样你就能控制这颗 LED 的亮灭了。 LED 的电路图如图所示,所以我们只需要给 A14 引脚一个高电平 LED 就会导通并亮起来: ![](../../assets/gpio_led.png) ```python from maix import gpio, pinmap, time pinmap.set_pin_function(\"A14\", \"GPIOA14\") led gpio.GPIO(\"GPIOA14\", gpio.Mode.OUT) led.value(0) while 1: led.toggle() time.sleep_ms(500) ``` 这里先使用`pinmap`设置了`A14`引脚的功能为`GPIO`,当然,对于`A14`因为只有`GPIO`功能,可以不设置,为了程序通用起见,其它引脚可能需要设置,所以这里例程设置了。 更多 API 请看 [GPIO API 文档](https://wiki.sipeed.com/maixpy/api/maix/peripheral/gpio.html) ## GPIO 作为输入模式 ```python from maix import gpio, pinmap, time pinmap.set_pin_function(\"A19\", \"GPIOA19\") led gpio.GPIO(\"GPIOA19\", gpio.Mode.IN) while 1: print(led.value()) time.sleep_ms(1) # sleep to make cpu free ``` ## MaixCAM Pro 使用照明 LED MaixCAM 和 MaixCAM Pro 都有一个 LED 小灯,即接到了引脚 `A14`,另外 MaixCAM Pro 还板载了一个照明 LED,连接到了 `B3` 引脚,也是高电平开启低电平关闭: ```python from maix import gpio, pinmap, time pinmap.set_pin_function(\"B3\", \"GPIOB3\") led gpio.GPIO(\"GPIOB3\", gpio.Mode.OUT) led.value(0) while 1: led.toggle() time.sleep_ms(500) ```"},"/maixpy/doc/zh/peripheral/spi.html":{"title":"MaixCAM MaixPy SPI 串行外设接口使用介绍","content":" title: MaixCAM MaixPy SPI 串行外设接口使用介绍 update: date: 2024 06 11 author: iawak9lkm version: 1.0.0 content: 初版文档 ## SPI 简介 SPI (Serial Peripheral Interface,即串行外设接口),是一种同步外设接口,它可以使 SoC 与各种外围设备以串行方式进行通信以交换信息。常见的外围设备有 Flash RAM、网络控制器、LCD显示驱动器和A/D转换器等。 SPI 采用主从模式(Master—Slave)架构,支持一个或多个Slave设备。 在硬件电路上,SPI 通常由 4 根线组成,它们分别是: * `MISO`:即主设备输入从设备输出(Master Output Slave Input),该引脚在从模式下发送数据,在主模式下接收数据。 * `MOSI`:即主设备输出从设备输入(Master Input Slave Output),该引脚在主模式下发送数据,在从模式下接收数据。 * `SCK`:串行总线时钟,由主设备输出,从设备输入。 * `NSS`/`CS`:从设备选择。它作为片选引脚,让主设备可以单独地与特定从设备通信,避免数据线上的冲突。 在通信协议上,SPI 行为一般如下: * SPI 支持一主多从,主设备通过片选引脚来选择需要进行通信的从设备,一般情况下,从设备 SPI 接口只需一根片选引脚,而主设备的片选引脚数量等同于设备数量。主设备使能某个从设备的片选信号期间,该从设备会响应主设备的所有请求,其余从设备会忽略总线上的所有数据。 * SPI 有四种模式,取决于极性(CPOL)和相位(CPHA)的配置。 极性,影响 SPI 总线空闲时的时钟信号电平。 1. CPOL 1:表示空闲时是高电平 2. CPOL 0:表示空闲时是低电平 相位,决定 SPI 总线采集数据的跳变沿。 1. CPHA 0:表示从第一个跳变沿开始采样 2. CPHA 1:表示从第二个跳变沿开始采样 极性与相位组合成了 SPI 的四种模式: Mode CPOL CPHA 0 0 0 1 0 1 2 1 0 3 1 1 * SPI 通常支持全双工和半双工通信。 * SPI 不规定最大传输速率,没有地址方案;SPI 也没规定通信应答机制,没有规定流控制规则。 SPI 是非常常见的通信接口,通过 SPI 接口,SoC 能控制各式各样的的外围设备。 ## MaixPy 中使用 SPI MaixCAM 的引脚分布如下: ![](https://wiki.sipeed.com/hardware/zh/lichee/assets/RV_Nano/intro/RV_Nano_3.jpg) ![maixcam_pro_io](/static/image/maixcam_pro_io.png) 使用前需要 `maix.peripheral.pinmap` 完成对 SPI 的管脚映射。 **注意:MaixCAM 由于其 SPI 外设的限制,只能作为 SPI 主设备使用。MaixCAM 的 SPI 暂时不支持修改硬件 CS 引脚有效电平,所有 SPI 硬件 CS 的有效电平为低电平。如需要使用其他的 CS 有效电平,请在 SPI API 中配置软件 CS 引脚及其有效电平。SPI4 为软件模拟的 SPI,实测最大速率为 1.25MHz,使用方法与硬件 SPI 无异。** 通过 MaixPy 使用 SPI 很简单: ```python from maix import spi, pinmap pin_function { \"A24\": \"SPI4_CS\", \"A23\": \"SPI4_MISO\", \"A25\": \"SPI4_MOSI\", \"A22\": \"SPI4_SCK\" } for pin, func in pin_function.items(): if 0 ! pinmap.set_pin_function(pin, func): print(f\"Failed: pin{pin}, func{func}\") exit( 1) spidev spi.SPI(4, spi.Mode.MASTER, 1250000) ### Example of full parameter passing. # spidev spi.SPI(id 4, # SPI ID # mode spi.Mode.MASTER, # SPI mode # freq 1250000, # SPI speed # polarity 0, # CPOL 0/1, default is 0 # phase 0, # CPHA 0/1, default is 0 # bits 8, # Bits of SPI, default is 8 # cs_enable True, # Use soft CS pin? True/False, default is False # cs 'GPIOA19') # Soft cs pin number, default is 'GPIOA19' b bytes(range(0, 8)) res spidev.write_read(b, len(b)) if res b: print(\"loopback test succeed\") else: print(\"loopback test failed\") print(f\"send:{b}\\nread:{res}\") ``` 请先连接该 SPI 的 `MOSI` 和 `MISO`。 先通过 `pinmap` 配置所需的引脚,然后启用全双工通信,返回值将等于发送值。 更多 SPI API 的详细说明请看 [SPI API 文档](../../../api/maix/peripheral/spi.html)"},"/maixpy/doc/zh/peripheral/i2c.html":{"title":"MaixCAM MaixPy 使用 I2C","content":" title: MaixCAM MaixPy 使用 I2C > 注意需要 MaixPy 镜像和固件 > 4.2.1 `MaixCAM` 的 `I2C` 及对应的 引脚 看图: ![](https://wiki.sipeed.com/hardware/zh/lichee/assets/RV_Nano/intro/RV_Nano_3.jpg) ![maixcam_pro_io](/static/image/maixcam_pro_io.png) 对于 MaixCAM,由于引脚资源比较紧张,引出的 `I2C1` `I2C3` 引脚和 WiFi 模块(SDIO1)重合了,所以 WiFi 和硬件 I2C 只能二选一使用。 另外还有一个`I2C5`,是底层驱动软件模拟的,建议使用它,底层已经做好了驱动,使用时和使用硬件`I2C`一样。 默认`I2C5`的引脚是`GPIO`,所以使用`i2c`模块前先用`pinmap`设置以下引脚功能为`I2C5`: ```python from maix import i2c, pinmap pinmap.set_pin_function(\"A15\", \"I2C5_SCL\") pinmap.set_pin_function(\"A27\", \"I2C5_SDA\") bus1 i2c.I2C(5, i2c.Mode.MASTER) slaves bus1.scan() print(\"find slaves:\", slaves) ``` 更多 API 看 [i2c API 文档](https://wiki.sipeed.com/maixpy/api/maix/peripheral/i2c.html) 如上面所说, 对于 `MaixCAM` 硬件 `I2C` 和 `WiFi` 只能二选一,如果一定要用,需要禁用`WiFi`,使用`pinmap`模块设置引脚功能为 `I2C`,再使用`maix.i2c`模块操作。 > TODO: 提供禁用 WiFi 的方法(需要系统里面禁用掉 WiFi 驱动,比较复杂) ```python from maix import i2c, pinmap pinmap.set_pin_function(\"P18\", \"I2C1_SCL\") pinmap.set_pin_function(\"P21\", \"I2C1_SDA\") bus1 i2c.I2C(1, i2c.Mode.MASTER) slaves bus1.scan() print(\"find slaves:\", slaves) ```"},"/maixpy/doc/zh/peripheral/pinmap.html":{"title":"MaixCAM MaixPy Pinmap 使用介绍","content":" title: MaixCAM MaixPy Pinmap 使用介绍 update: date: 2024 06 11 author: iawak9lkm version: 1.0.0 content: 初版文档 ## 管脚映射简介 在系统级芯片(System on Chip, SoC)设计中,一个管脚通常具有多个功能,这种设计方法被称为引脚复用。其原因主要有以下几个方面: * 节省引脚数量:SoC 集成了大量的功能模块,如 CPU、GPU、内存控制器、I/O 接口、通信模块等。如果每个功能都分配独立的引脚,会导致需要的引脚数量非常庞大,增加封装的复杂性和成本。通过引脚复用,一个引脚可以在不同的模式下支持不同的功能,从而显著减少引脚的总数。 * 降低芯片封装和制造成本:减少引脚数量可以选择更小的封装尺寸,从而降低封装和制造成本。小封装不仅降低了材料成本,还减少了芯片在电路板上的占用空间,有利于设计更紧凑的电子产品。 * 提高设计灵活性:引脚复用提供了更大的设计灵活性。不同的应用场景可能需要不同的功能组合,通过软件配置可以根据具体需求启用不同的引脚功能。例如,同一个引脚在一个实际应用中可以作为 UART 通信接口,而在另一个实际应用中可以作为 SPI 总线接口。 * 简化 PCB 布局:减少引脚数量可以简化印刷电路板(PCB)的布局设计。更少的引脚意味着更少的布线层数和过孔,从而简化了 PCB 设计,降低了生产难度和成本。 * 优化性能:在某些情况下,通过复用引脚可以优化信号路径和性能。例如,通过选择适当的引脚功能组合,可以减少信号传输路径上的干扰和噪声,提高系统的整体性能和可靠性。 而 Pinmap 展示和管理芯片各个引脚配置,这些配置通常包括每个引脚的名称及其功能(通常有多个功能)。 以 MaixCAM GPIO A28为例子: * `A28` 为引脚名称。 * `GPIOA28`/`UART2_TX`/`JTAG_TDI` 为引脚功能(可从 SoC 手册查询),同一时间该引脚只能是这三个功能中的其中一个。 通过 Pinmap,可以设定指定的芯片引脚为指定的功能。 ## MaixPy 中使用Pinmap 对于 MaixCAM 板子上各个引脚的编号及其功能,可以参考下图: ![](https://wiki.sipeed.com/hardware/zh/lichee/assets/RV_Nano/intro/RV_Nano_3.jpg) ![maixcam_pro_io](/static/image/maixcam_pro_io.png) 或是阅读 [SG2002芯片手册](https://cn.dl.sipeed.com/fileList/LICHEE/LicheeRV_Nano/07_Datasheet/SG2002_Preliminary_Datasheet_V1.0 alpha_CN.pdf) Pinmux 章节了解剩余的引脚的编号及功能。 介绍了那么多,其实通过 MaixPy 使用 Pinmap 来管理引脚功能很简单: ```python from maix import pinmap print(pinmap.get_pins()) f pinmap.get_pin_functions(\"A28\") print(f\"GPIO A28 pin functions:{f}\") print(f\"Set GPIO A28 to {f[0]} function\") pinmap.set_pin_function(\"A28\", f[0]) ``` 先列出了可供管脚映射的所有引脚,然后查询 `GPIO A28` 可供选择的引脚功能,最后将该引脚设置为该引脚的第一个功能(作为GPIO)。 更详细的 Pinmap 的 API 说明请看 [Pinmap API 文档](../../../api/maix/peripheral/pinmap.html)"},"/maixpy/doc/zh/pro/compile_os.html":{"title":"为 MaixCAM 编译系统","content":" title: 为 MaixCAM 编译系统 ## 为什么需要定制系统 正常情况下你只需要从 [https://github.com/sipeed/MaixPy/releases](https://github.com/sipeed/MaixPy/releases) 下载到适合 MaixCAM 的最新系统使用即可。 有些情况需要定制系统,比如: * 比如你要量产 1k 个产品,都想放一个自己的应用,并且自动开机启动,不想一个一个配置,就可以改一下`builtin_files`,然后打包一个系统,所有板子一烧录就自带了自定义的文件,不要开及后再拷贝一次。 * 现在官方的系统没有你想要的软件包或者驱动,可以自己编译系统,选择自己想要的软件包编译定制的系统。 ## 基础系统获取 原理是系统来自 [https://github.com/sipeed/LicheeRV Nano Build/releases](https://github.com/sipeed/LicheeRV Nano Build/releases) 作为基础系统(不能直接给 MaixCAM 烧录使用,否则有烧坏屏幕风险),然后将 MaixCAM 定制的相关文件拷贝到基础系统重新打包成 MaixCAM 能用的系统。 如果不需要对基础系统进行自定义,直接从 [https://github.com/sipeed/LicheeRV Nano Build/releases](https://github.com/sipeed/LicheeRV Nano Build/releases) 下载最新的系统镜像包即可。 如果基础系统无法满足你的要求,比如你需要自定义增删一些软件包和驱动等,按照 [https://github.com/sipeed/LicheeRV Nano Build](https://github.com/sipeed/LicheeRV Nano Build) README 文档进行编译, 尽量使用 docker 编译以避免遇到编译环境问题,以及使用`bash`,不要使用`zsh`。 注意编译出来的系统不能直接给 MaixCAM 烧录使用,否则有烧坏屏幕风险。 ## 为 MaixCAM 拷贝文件 准备以下内容: * 基础系统,是一个 `.img` 或者 `.img.xz` 文件。 * 对于 MaixCAM 还需要放一些额外的文件进去,到[MaixPy release](https://github.com/sipeed/MaixPy/releases) 下载最新的 `builtin_files.tar.xz` 文件。 > 如果你需要放一些自定义的文件进系统,可以解压后往目录里面填加,比如你想系统烧录后 `/root` 目录下就会有一个 `cat.jpg`, 那么就往这里 `root` 目录下放一个 `cat.jpg`。 * 下载或克隆 MaixPy 源码到本地。 * 编译 MaixPy 获得 `.whl` 安装包文件,你也可以到 [MaixPy release](https://github.com/sipeed/MaixPy/releases) 下载最新的安装包。 到`MaixPy/tools/os`目录下,执行 ```shell ./gen_os.sh [skip_build_apps]\" ``` 这里参数说明: * **base_os_filepath**: 基础系统路径, img 或者 img.xz 格式。 * **maixpy_whl_filepath**: MaixPy 软件包, whl 格式。 * **builtin_files_dir_path**: MaixCAM 自定义文件, 可以在 MaixPy release 下载到最新的。 * **os_version_str**: 系统版本,格式要满足类似 `maixcam 2024 08 16 maixpy v4.4.21` 的规范。 * **skip_build_apps**: 跳过编译内置应用,可选参数,传 1 则会跳过,不传这个参数会将 MaixCDK 和 MaixPy 中的应用都编译并拷贝到系统中。 举例: ```shell ./gen_os.sh '/home/xxx/.../LicheeRV Nano Build/install/soc_sg2002_licheervnano_sd/images/2024 08 13 14 43 0de38f.img' ../../dist/MaixPy 4.4.21 py3 none any.whl '/home/xxx/.../sys_builtin_files' maixcam 2024 08 15 maixpy v4.4.21 ``` 等待编译内置应用以及拷贝完成,在 `MaixPy/tools/os/tmp` 目录下机会有一个`maixcam 2024 08 15 maixpy v4.4.21.img.xz`系统镜像了。"},"/maixpy/doc/zh/ai_model_converter/maixcam.html":{"title":"将 ONNX 模型转换为 MaixCAM MaixPy 可以使用的模型(MUD)","content":" title: 将 ONNX 模型转换为 MaixCAM MaixPy 可以使用的模型(MUD) ## 简介 电脑上训练的模型不能直接给 MaixCAM 使用,因为 MaixCAM 的硬件性能有限,一般我们需要将模型进行`INT8`量化以减少计算量,并且转换为 MaixCAM 支持的模型格式。 本文介绍如何将 ONNX 模型转换为 MaixCAM 能使用的模型(MUD模型)。 ## MaixCAM 支持的模型文件格式 MUD(模型统一描述文件, model universal description file)是 MaixPy 支持的一种模型描述文件,用来统一不同平台的模型文件,方便 MaixPy 代码跨平台,本身是一个 `ini`格式的文本文件,可以使用文本编辑器编辑。 一般 MUD 文件会伴随一个或者多个实际的模型文件,比如对于 MaixCAM, 实际的模型文件是`.cvimodel`格式, MUD 文件则是对它做了一些描述说明。 这里以 `YOLOv8` 模型文件举例,一共两个文件`yolov8n.mud`和`yolov8n.cvimodel`,前者内容: ```ini [basic] type cvimodel model yolov8n.cvimodel [extra] model_type yolov8 input_type rgb mean 0, 0, 0 scale 0.00392156862745098, 0.00392156862745098, 0.00392156862745098 labels person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign, parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket, bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut, cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair drier, toothbrush ``` 可以看到, 指定了模型类别为`cvimodel`, 模型路径为相对`mud`文件的路径下的`yolov8n.cvimodel`文件; 以及一些需要用到的信息,比如预处理`mean`和`scale`,这里需要和训练的时候对模型输入的数据的预处理方法一致,`labels`则是检测对象的 80 种分类。 实际用这个模型的时候将两个文件放在同一个目录下即可。 ## 准备 ONNX 模型 准备好你的 onnx 模型, 然后在[https://netron.app/](https://netron.app/) 查看你的模型,确保你的模型使用的算子在转换工具的支持列表中,转换工具的支持列表可以在[算能 TPU SDK](https://developer.sophgo.com/thread/473.html)的 **CVITEK_TPU_SDK开发指南.pdf** 中看到列表。 ## 找出合适的量化输出节点 一般模型都有后处理节点,这部分是 CPU 进行运算的,我们将它们剥离出来,它们会影响到量化效果,可能会导致量化失败。 这里以`YOLOv5 举例`, ![](../../assets/yolov5s_onnx.jpg) 可以看到这里有三个`conv`,后面的计算均由 CPU 进行,我们量化时就采取这几个`conv`的输出作为模型的最后输出,在这里输出名分别叫`/model.24/m.0/Conv_output_0,/model.24/m.1/Conv_output_0,/model.24/m.2/Conv_output_0`。 ## 安装模型转换环境 模型转换使用算能的[https://github.com/sophgo/tpu mlir](https://github.com/sophgo/tpu mlir),要安装它我们直接在 docker 环境中安装,防止我们电脑的环境不匹配,如果你没用过 docker,可以简单理解成它类似虚拟机。 ### 安装 docker 参考[docker 安装官方文档](https://docs.docker.com/engine/install/ubuntu/)安装即可。 比如: ```shell # 安装docker依赖的基础软件 sudo apt get update sudo apt get install apt transport https ca certificates curl gnupg agent software properties common # 添加官方来源 curl fsSL https://download.docker.com/linux/ubuntu/gpg sudo apt key add sudo add apt repository \"deb [arch amd64] https://download.docker.com/linux/ubuntu $(lsb_release cs) stable\" # 安装 docker sudo apt get update sudo apt get install docker ce docker ce cli containerd.io ``` ### 拉取 docker 镜像 ```shell docker pull sophgo/tpuc_dev:latest ``` 如果docker拉取失败,可以通过以下方式进行下载: ```shell wget https://sophon file.sophon.cn/sophon prod s3/drive/24/06/14/12/sophgo tpuc_dev v3.2_191a433358ad.tar.gz docker load i sophgo tpuc_dev v3.2_191a433358ad.tar.gz ``` 这个方法参考[tpu mlir官方docker环境配置](https://github.com/sophgo/tpu mlir/blob/master/README_cn.md)。 此外你也可以设置国内的镜像,可自行搜索或者参考[docker 设置代理,以及国内加速镜像设置](https://neucrack.com/p/286)。 ### 运行容器 ```shell docker run privileged name tpu env v /home/$USER/data:/home/$USER/data it sophgo/tpuc_dev ``` 这就起了一个容器,名叫`tpu env`,并且把本机的`~/data`目录挂载到了容器的`~/data`,这样就实现了文件共享,并且和宿主机路径一致。 下次启动容器用`docker start tpu env && docker attach tpu env`即可。 ### 安装 tpu mlir 先到[github](https://github.com/sophgo/tpu mlir/releases)下载 `whl` 文件,放到`~/data`目录下。 在容器中执行命令安装: ```shell pip install tpu_mlir*.whl # 这里就是下载文件的名字 ``` 执行`model_transform.py` 会有打印帮助信息就算是安装成功了。 ## 编写转换脚本 转换模型主要就两个命令,`model_transform.py` 和 `model_deploy.py`,主要麻烦的是参数,所以我们写一个脚本`convert_yolov5_to_cvimodel.sh`存下来方便修改。 ```shell #!/bin/bash set e net_name yolov5s input_w 640 input_h 640 # mean: 0, 0, 0 # std: 255, 255, 255 # mean # 1/std # mean: 0, 0, 0 # scale: 0.00392156862745098, 0.00392156862745098, 0.00392156862745098 mkdir p workspace cd workspace # convert to mlir model_transform.py \\ model_name ${net_name} \\ model_def ../${net_name}.onnx \\ input_shapes [[1,3,${input_h},${input_w}]] \\ mean \"0,0,0\" \\ scale \"0.00392156862745098,0.00392156862745098,0.00392156862745098\" \\ keep_aspect_ratio \\ pixel_format rgb \\ channel_format nchw \\ output_names \"/model.24/m.0/Conv_output_0,/model.24/m.1/Conv_output_0,/model.24/m.2/Conv_output_0\" \\ test_input ../dog.jpg \\ test_result ${net_name}_top_outputs.npz \\ tolerance 0.99,0.99 \\ mlir ${net_name}.mlir # export bf16 model # not use quant_input, use float32 for easy coding model_deploy.py \\ mlir ${net_name}.mlir \\ quantize BF16 \\ processor cv181x \\ test_input ${net_name}_in_f32.npz \\ test_reference ${net_name}_top_outputs.npz \\ model ${net_name}_bf16.cvimodel echo \"calibrate for int8 model\" # export int8 model run_calibration.py ${net_name}.mlir \\ dataset ../images \\ input_num 200 \\ o ${net_name}_cali_table echo \"convert to int8 model\" # export int8 model # add quant_input, use int8 for faster processing in maix.nn.NN.forward_image model_deploy.py \\ mlir ${net_name}.mlir \\ quantize INT8 \\ quant_input \\ calibration_table ${net_name}_cali_table \\ processor cv181x \\ test_input ${net_name}_in_f32.npz \\ test_reference ${net_name}_top_outputs.npz \\ tolerance 0.9,0.6 \\ model ${net_name}_int8.cvimodel ``` 可以看到,这里有几个比较重要的参数: * `output_names` 就是我们前面说到的输出节点的输出名。 * `mean, scale` 就是训练时使用的预处理方法,比如 `YOLOv5` 官方代码的预处理是把图像 RGB 3个通道分别 ` mean`再除以`std`,并且默认`mean` 为`0`, `std`为`255`,即将图像的值归一,这里`scale`就是`1/std`。你的模型需要根据实际的预处理方法修改。 * `test_input` 就是转换时用来测试的图像,这里是`../dog.jpg`,所以实际模型转换时我们需要在此脚本所在同目录放一张`dog.jpg`的图,你的模型根据你的实际情况替换图像。 * `tolerance` 就是量化前后允许的误差,如果转换模型时报错提示值小于设置的这个值,说明转出来的模型可能相比 onnx 模型误差较大,如果你能够容忍,可以适当调小这个阈值让模型转换通过,不过大多数时候都是因为模型结构导致的,需要优化模型,以及仔细看后处理,把能去除的后处理去除了。 * `quantize` 即量化的数据类型,在 MaixCAM 上我们一般用 INT8 模型,这里我们虽然也顺便转换了一个 BF16 模型,BF16 模型的好处时精度高,不过运行速率比较慢,能转成 INT8 就推荐先用 INT8,实在不能转换的或者精度要求高速度要求不高的再考虑 BF16。 * `dataset` 表示用来量化的数据集,也是放在转换脚本同目录下,比如这里是`images`文件夹,里面放数据即可,对于 YOLOv5 来说就是图片,从 coco 数据集中复制一部分典型场景的图片过来即可。 用` input_num` 可以指定实际使用图片的数量(小于等于 images 目录下实际的图片)。 ## 执行转换脚本 直接执行`chmod +x convert_yolov5_to_cvimodelsh && ./convert_yolov5_to_cvimodel.sh` 等待转换完成。 如果出错了,请仔细看上一步的说明,是不是参数有问题,或者输出层选择得不合理等。 然后就能在`workspace`文件夹下看到有`**_int8.cvimodel` 文件了。 ## 编写`mud`文件 根据你的模型情况修改`mud`文件,对于 YOLOv5 就如下,修改成你训练的`labels`就好了。 ```ini [basic] type cvimodel model yolov5s.cvimodel [extra] model_type yolov5 input_type rgb mean 0, 0, 0 scale 0.00392156862745098, 0.00392156862745098, 0.00392156862745098 anchors 10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326 labels person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign, parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket, bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut, cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair drier, toothbrush ``` 这里`basic`部分指定了模型文件类别和模型文件路径,是必要的参数,有了这个参数就能用`MaixPy`或者`MaixCDK`中的`maix.nn.NN`类来加载并运行模型了。 `extra`则根据不同模型的需求设计不同参数。 比如这里对`YOLOv5`设计了这些参数,主要是 预处理、后处理、标签等参数。 对于 `MaixPy` 已经支持了的模型可以直接下载其模型复制修改。 也可以看具体的代码,比如[YOLOv5 的源码](https://github.com/sipeed/MaixCDK/blob/71d5b3980788e6b35514434bd84cd6eeee80d085/components/nn/include/maix_nn_yolov5.hpp#L73 L223),可以看到源码使用了哪些参数。 比如你用`YOLOv5`训练了检测数字`0~9`的模型,那么需要将`labels`改成`0,1,2,3,4,5,6,7,8,9`,其它参数如果你没改训练代码保持即可。 如果你需要移植 `MaixPy` 没有支持的模型,则可以根据模型的预处理和后处理情况定义 `extra`, 然后编写对应的解码类。如果你不想用C++修改 MaixPy 源码,你也可以用MaixPy 的`maix.nn.NN`类加载模型,然后用 `forward` 或者 `forward_image` 方法或者原始输出,在 Python 层面写后处理也可以,只是运行效率比较低不太推荐。 ## 编写后处理代码 如上一步所说,如果是按照已经支持的模型的`mud`文件修改好,那直接调用`MaixPy`或者`MaixCDK`对应的代码加载即可。 如果支持新模型,设计好 `mud` 文件后,你需要实际编写预处理和后处理,有两种方法: * 一:MaixPy 用 `maix.nn.NN`加载模型,然后`forward`或者`forward_image`函数运行模型,获得输出,然后用 Python 函数编写后处理得到最终结果。 * 二:在`MaixCDK`中,可以参考[YOLOv5 的源码](https://github.com/sipeed/MaixCDK/blob/71d5b3980788e6b35514434bd84cd6eeee80d085/components/nn/include/maix_nn_yolov5.hpp), 新增一个`hpp`文件,增加一个处理你的模型的类,并且修改所有函数和类的`@maixpy`注释,编写好了编译`MaixPy`项目,即可在`MaixPy`中调用新增的类来运行模型了。 支持了新模型后还可以将源码提交(Pull Request)到主`MaixPy`仓库中,成为`MaixPy`项目的一员,为社区做贡献,也可以到 [MaixHub 分享](https://maixhub.com/share) 分享你新支持的模型,根据质量可以获得最少 `30元` 最高 `2000元` 的打赏!"},"/maixpy/doc/zh/modules/rtc.html":{"title":"MaixCAM MaixPy 使用 RTC 模块","content":" title: MaixCAM MaixPy 使用 RTC 模块 MaixCAM Pro 板载了一个 RTC 模块,默认上电会自动同步系统时间,以及从网络同步时间,网络状态变化后也会自动同步。 所以一般情况不需要手动操作 RTC,直接使用系统的时间 API 获取时间即可。 如果一定要手动操作 RTC,请看[bm8653 RTC 模块使用](./bm8653.html)(手动操作前可以在系统 `/etc/init.d`目录下把 RTC 和 NTP 相关服务删掉以禁用自动同步。 > MaixCAM 无板载 RTC。"},"/maixpy/doc/zh/modules/acc.html":{"title":"MaixCAM MaixPy 读取加速度计和姿态解算","content":" title: MaixCAM MaixPy 读取加速度计和姿态解算 ## IMU 简介 对于 MaixCAM Pro,板载了一款集成了三轴陀螺仪和三轴加速度计的QMI8658芯片, 它能够提供高精度的姿态、运动和位置数据,适用于各种需要精确运动检测的应用场景,如无人机、机器人、游戏控制器和虚拟现实设备等。QMI8658具有低功耗、高稳定性和高灵敏度的特点, 下面将介绍使用IMU模块来获取姿态数据。 > MaixCAM 无板载加速度计,可自行外接使用 iic 驱动。 ## MaixPy 中使用 IMU 使用 IMU 模块从 QMI8658 读取数据. 示例代码: ```python from maix.ext_dev import imu i imu.IMU(\"qmi8658\", mode imu.Mode.DUAL, acc_scale imu.AccScale.ACC_SCALE_2G, acc_odr imu.AccOdr.ACC_ODR_8000, gyro_scale imu.GyroScale.GYRO_SCALE_16DPS, gyro_odr imu.GyroOdr.GYRO_ODR_8000) while True: data i.read() print(\"\\n \") print(f\"acc x: {data[0]}\") print(f\"acc y: {data[1]}\") print(f\"acc z: {data[2]}\") print(f\"gyro x: {data[3]}\") print(f\"gyro y: {data[4]}\") print(f\"gyro z: {data[5]}\") print(f\"temp: {data[6]}\") print(\" \\n\") ``` 按照您的需求初始化 IMU 对象, 然后调用 `read()` 即可. `read()` 返回的是从 IMU 中读出的原始数据. **如果 `mode` 参数选择 `DUAL`, 则 `read()`返回的数据为 `[acc_x, acc_y, acc_z, gyro_x, gyro_y, gyro_z, temp]`, 如果 `mode` 只选择 ACC/GYRO 中的一个, 只会返回对应的 `[x, y, z, temp]`, 例如选择 ACC, `read()` 会返回 `[acc_x, acc_y, acc_z, temp]`.** 有关 IMU API 的详细说明请看 [IMU API 文档](../../../api/maix/ext_dev/imu.html)"},"/maixpy/doc/zh/modules/bm8653.html":{"title":"MaixPy bm8653驱动说明","content":" title: MaixPy bm8653驱动说明 update: date: 2024 08 27 author: iawak9lkm version: 1.0.0 content: 初版文档 ## BM8653 简介 BM8653是一款实时时钟(RTC)芯片,广泛应用于各种电子设备中,用于提供精确的时间和日期信息。它具有低功耗、高精度的特点,能够在设备断电的情况下通过备用电池继续运行,确保时间的连续性和准确性。 ## MaixPy 中使用 BM8653 在 MaixPy 中使用 BM8653 很简单, 您只需要知道您平台上的 BM8653 挂载在哪个 I2C 总线上. MaixCAM Pro 板载的 BM8563 挂载在 I2C 4 上. 示例代码: ```python from maix import ext_dev, pinmap, err, time ### Enable I2C # ret pinmap.set_pin_function(\"PIN_NAME\", \"I2Cx_SCL\") # if ret ! err.Err.ERR_NONE: # print(\"Failed in function pinmap...\") # exit( 1) # ret pinmap.set_pin_function(\"PIN_NAME\", \"I2Cx_SDA\") # if ret ! err.Err.ERR_NONE: # print(\"Failed in function pinmap...\") # exit( 1) BM8653_I2CBUS_NUM 4 rtc ext_dev.bm8563.BM8563(BM8653_I2CBUS_NUM) ### 2020 12 31 23:59:45 t [2020, 12, 31, 23, 59, 45] # Set time rtc.datetime(t) while True: rtc_now rtc.datetime() print(f\"{rtc_now[0]} {rtc_now[1]} {rtc_now[2]} {rtc_now[3]}:{rtc_now[4]}:{rtc_now[5]}\") time.sleep(1) ``` 如果您使用的是 MaixCAM Pro 板载的 BM8653, 无需使能 I2C 4. 示例中读写 BM8653, 设置或是读取当前时间. 您也可以通过以下示例将当前 BM8653 内的时间设置为系统时间, 或是将当前系统时间设置为 BM8653 内的时间. ```python from maix import ext_dev, pinmap, err, time ### Enable I2C # ret pinmap.set_pin_function(\"PIN_NAME\", \"I2Cx_SCL\") # if ret ! err.Err.ERR_NONE: # print(\"Failed in function pinmap...\") # exit( 1) # ret pinmap.set_pin_function(\"PIN_NAME\", \"I2Cx_SDA\") # if ret ! err.Err.ERR_NONE: # print(\"Failed in function pinmap...\") # exit( 1) BM8653_I2CBUS_NUM 4 rtc ext_dev.bm8563.BM8563(BM8653_I2CBUS_NUM) ### Update RTC time from system rtc.systohc() ### Update system time from RTC # rtc.hctosys() while True: rtc_now rtc.datetime() print(f\"{rtc_now[0]} {rtc_now[1]} {rtc_now[2]} {rtc_now[3]}:{rtc_now[4]}:{rtc_now[5]}\") time.sleep(1) ``` **BM8653 的底层实现类似于单例模式, 本 API 可以保证对单个 BM8653 的读写是线程安全的. 也就意味着您可以随意的创建 BM8653 对象, 在任意地方读写 BM8653 均不会产生数据竞争.** 传给 BM8653 对象的 timetuple 遵循 (year, month, day[, hour[, minute[, second]]]), 即必须要有前三个参数, 后续参数缺失部分代表的时间不会进行修改. BM8653 保证返回的 timetuple 为空时表示错误, 不为空时必定是含有6个元素的 List[], 其内容为(year, month, day, hour, minute, second). 有关 BM8653 API 的详细说明请看 [BM8653 API 文档](../../../api/maix/ext_dev/bm8563.html)"},"/maixpy/doc/zh/modules/qmi8658.html":{"title":"MaixPy qmi8658驱动说明","content":" title: MaixPy qmi8658驱动说明 update: date: 2024 08 27 author: iawak9lkm version: 1.0.0 content: 初版文档 ## QMI8658 简介 QMI8658是一款集成了三轴陀螺仪和三轴加速度计的惯性测量单元(IMU)芯片. 它能够提供高精度的姿态、运动和位置数据,适用于各种需要精确运动检测的应用场景,如无人机、机器人、游戏控制器和虚拟现实设备等。QMI8658具有低功耗、高稳定性和高灵敏度的特点. ## MaixPy 中使用 QMI8658 在 MaixPy 中使用 QMI8658 很简单, 您只需要知道您使用的平台上 QMI8658 挂载在哪个 I2C 总线上. MaixCAM Pro 板载的 QMI8658 挂载在 I2C 4 上. 示例代码: ```python from maix import ext_dev, pinmap, err, time ### Enable I2C # ret pinmap.set_pin_function(\"PIN_NAME\", \"I2Cx_SCL\") # if ret ! err.Err.ERR_NONE: # print(\"Failed in function pinmap...\") # exit( 1) # ret pinmap.set_pin_function(\"PIN_NAME\", \"I2Cx_SDA\") # if ret ! err.Err.ERR_NONE: # print(\"Failed in function pinmap...\") # exit( 1) QMI8658_I2CBUS_NUM 4 imu ext_dev.qmi8658.QMI8658(QMI8658_I2CBUS_NUM, mode ext_dev.qmi8658.Mode.DUAL, acc_scale ext_dev.qmi8658.AccScale.ACC_SCALE_2G, acc_odr ext_dev.qmi8658.AccOdr.ACC_ODR_8000, gyro_scale ext_dev.qmi8658.GyroScale.GYRO_SCALE_16DPS, gyro_odr ext_dev.qmi8658.GyroOdr.GYRO_ODR_8000) while True: data imu.read() print(\"\\n \") print(f\"acc x: {data[0]}\") print(f\"acc y: {data[1]}\") print(f\"acc z: {data[2]}\") print(f\"gyro x: {data[3]}\") print(f\"gyro y: {data[4]}\") print(f\"gyro z: {data[5]}\") print(f\"temp: {data[6]}\") print(\" \\n\") ``` 按照您的需求初始化 QMI8658 对象, 然后调用 `read()` 即可. `read()` 返回的是从 QMI8658 中读出的原始数据. **如果 `mode` 参数选择 `DUAL`, 则 `read()`返回的数据为 `[acc_x, acc_y, acc_z, gyro_x, gyro_y, gyro_z, temp]`, 如果 `mode` 只选择 ACC/GYRO 中的一个, 只会返回对应的 `[x, y, z, temp]`, 例如选择 ACC, `read()` 会返回 `[acc_x, acc_y, acc_z, temp]`.** 有关 QMI8658 API 的详细说明请看 [QMI8658 API 文档](../../../api/maix/ext_dev/qmi8658.html)"},"/maixpy/doc/zh/modules/tmc2209.html":{"title":"MaixPy tmc2209 单串口驱动使用介绍","content":" title: MaixPy tmc2209 单串口驱动使用介绍 update: date: 2024 08 21 author: iawak9lkm version: 1.0.0 content: 初版文档 ## TMC2209 简介 TMC2209是一款由德国Trinamic公司生产的步进电机驱动芯片。它专为2相步进电机设计,具有低功耗、高效率和良好的噪声抑制能力。TMC2209支持高达2.8A的电流,适用于各种步进电机应用,如3D打印机、CNC机床、机器人等。 ## MaixPy 中使用 tmc2209 驱动步进电机 * 请确保您的步进电机为两相四线步进电机, 然后确认您的电机步进角度(step_angle), 需要使用的微步数(micro_step), 以及该电机旋转一圈时, 负载移动的距离(screw_pitch或round_mm). 以便我们后续配置驱动参数. * 一般来说, 市面上的 TMC2209 的驱动板有以下这些引脚(如果您嫌麻烦, 可以采购我司在售的 TMC2209 驱动板, 链接[暂未上架,敬请期待]): ``` EN VM MS1 GND MS2 2B RX 2A TX 1A NC 1B STEP VDD DIR GND ``` `EN`: EN 为使能脚, 将该引脚接到 `GND` 以硬件使能 TMC2209. `MS1`: MS1 为微步进选择引脚之一,与 MS2 引脚配合使用,用于设置步进电机的微步进模式。 `MS2`: MS2 为微步进选择引脚之一,与 MS1 引脚配合使用,用于设置步进电机的微步进模式。 **This driver program only supports the UART mode of TMC2209. In UART mode, the original microstep selection pins `MS1` and `MS2` are redefined as `AD0` and `AD1`, respectively. The combination of the logic levels of these two pins determines the UART address of the TMC2209, with a value range from 0x00 to 0x03. This means that a single UART port can connect up to 4 TMC2209 drivers with different addresses. For example, when `MS1` is at a low level (0) and `MS2` is at a high level (1), the UART address is binary 0b10, which is hexadecimal 0x02.** `TX`: TX 为串行通信发送引脚,用于与外部微控制器进行串口通信。 `RX`: RX 为串行通信接收引脚,用于与外部微控制器进行串口通信。 在 TMC2209 上, 同时使用 `RX` 和 `TX` 时, 请确保 TMC2209 驱动板 `RX` 与主控芯片 `TX` 间存在 1K 欧姆的电阻. 否则会出现通信数据异常. `NC`: NC 为未连接引脚,表示该引脚在正常使用中不需要连接。 `STEP`: STEP 为步进信号输入引脚,每接收到一个脉冲信号,步进电机前进一个步进角度。因为本驱动为纯 UART 方式驱动,故该引脚不需要连接, 悬空即可. `DIR`: DIR 为方向信号输入引脚,用于控制步进电机的旋转方向。当 DIR 为高电平时,电机顺时针旋转;当 DIR 为低电平时,电机逆时针旋转。因为本驱动为纯 UART 方式驱动,故该引脚不需要连接, 悬空即可. `VM`: VM 为电源输入引脚,连接到步进电机的电源正极。 `GND`: GND 为接地引脚,连接到电源的负极。 `2B`, `2A`, `1B`, `1A`: 这些引脚为步进电机的相位输出引脚,分别连接到步进电机的两相线圈。 `VDD`: VDD 为逻辑电源输入引脚,为芯片内部的逻辑电路提供电源。 * 使用 MaixPy 中的 TMC2209 驱动 以一个步进角度为18,微步数为256,螺距为3mm的丝杆步进电机为例: ```python from maix import pinmap, ext_dev, err, time port \"/dev/ttyS1\" uart_addr 0x00 uart_baudrate 115200 step_angle 18 micro_step 256 screw_pitch 3 speed 6 use_internal_sense_resistors True run_current_per 100 hold_current_per 100 if port \"/dev/ttyS1\": ret pinmap.set_pin_function(\"A19\", \"UART1_TX\") if ret ! err.Err.ERR_NONE: print(\"Failed in function pinmap...\") exit( 1) ret pinmap.set_pin_function(\"A18\", \"UART1_RX\") if ret ! err.Err.ERR_NONE: print(\"Failed in function pinmap...\") exit( 1) slide ext_dev.tmc2209.ScrewSlide(port, uart_addr, uart_baudrate, step_angle, micro_step, screw_pitch, speed, use_internal_sense_resistors, run_current_per, hold_current_per) def reset_callback() > bool: if 2 > 1: # An event occurs (e.g., a sensor is triggered), # indicating that the slide has moved to the boundary and the motor needs to stop. print(\"Reset finish...\") return True # Not occurred, no need to stop the motor. return False def move_callback(per:float) > bool: # per is the percentage of the current distance moved by move() # out of the total distance required for the current move(), ranging from 0 to 100. print(f\"Slide moving... {per}\") if per > 50: # Example: Stop moving when 50% of the total distance for the current move() has been covered. print(f\"{per} > 50%, stop.\") return True return False slide.reset(reset_callback) slide.move(screw_pitch*2, 1, move_callback) slide.move( screw_pitch) while True: slide.move(screw_pitch*2) slide.move( (screw_pitch*2)) time.sleep_ms(100) ``` 程序中需要先使用 `pinmap` 确保 UART1 被启用. 然后创建一个 `ScrewSlide` 对象, 默认使用内部参考电阻, 默认使用 100% 的电机运行电流和 100% 的电机保持电流. 这些参数可能需要根据您的电机进行调整. 然后例程声明了一个reset回调函数和一个move回调函数并分别传入reset()函数和move()函数中. reset() 和 move() 会每隔一段时间调用回调函数以确认是否需要立即停止电机(当回调函数返回True). move() 和 reset() 函数均为阻塞函数, 只有在回调函数返回True时(move还能在运动完指定长度时)停止电机并返回. ## MaixPy 中使用 tmc2209 驱动恒定负载的步进电机 **!!!丝杆步进电机携带恒定负载也不能视为带恒定负载的步进电机, 因为丝杆步进电机有限位装置以保证负载在杠上的运动方向是可知的, 丝杆步进电机运行时会与限位装置经常碰撞导致电机负载并不是恒定的. 其他情况举一反三即可知是否为恒定负载步进电机.** 某些应用场景中, 步进电机全程的负载恒定, 只有在接触到边缘堵转时负载变高. 那么可以使用 `Slide` 类代替 `ScrewSlide` 类, 在这种情况下 `Slide` 具备堵转检测功能. 使用 `ScrewSlide` 也是可行的, 不具备堵转检测但是更加灵活. 请结合使用场景来选择这两个类, 本节只讲 `Slide` 类. * 实现原理 TMC2209 内部存在一个寄存器 `SG_RESULT`, 该寄存器保存的数据与驱动电机剩余力矩成正比. 如果电机负载恒定, 该寄存器值变化幅度很小, 在堵转时, 该寄存器值将会快速减小并维持一个较低的值. 找到该恒定负载电机这个寄存器的运行平均值和堵转平均值, 即可衡量该电机在某时刻是否堵转. * 获取 `SG_RESULT` 寄存器的平均值 `maix.ext_dev.tmc2209` 中提供了获取并保存该平均值的函数 `maix.ext_dev.tmc2209.slide_scan`. example: ```python from maix import ext_dev, pinmap, err port \"/dev/ttyS1\" uart_addr 0x00 uart_baudrate 115200 step_angle 1.8 micro_step 256 round_mm 60 speed 60 use_internal_sense_resistors True run_current_per 100 hold_current_per 100 if port \"/dev/ttyS1\": ret pinmap.set_pin_function(\"A19\", \"UART1_TX\") if ret ! err.Err.ERR_NONE: print(\"Failed in function pinmap...\") exit( 1) ret pinmap.set_pin_function(\"A18\", \"UART1_RX\") if ret ! err.Err.ERR_NONE: print(\"Failed in function pinmap...\") exit( 1) ext_dev.tmc2209.slide_scan(port, uart_addr, uart_baudrate, step_angle, micro_step, round_mm, speed, True, True, run_current_per, hold_current_per, conf_save_path './slide_scan_example.bin', force_update False) ``` 配置好串口和驱动器参数, 然后调用 `slide_scan`. `slide_scan` 的最后一个参数 `force_update` 决定了在该配置文件已经存在时的行为: > 如果 `force_update` 为 True, 将会用新的配置覆盖旧的配置 > > 如果 `force_update` 为 False, 运行平均值将会更新为新旧值的平均值, 堵转平均值将会更新成新旧堵转平均值中较大的那一个值(例如一个滑胎有左右边界, 左边界堵转平均值小于右边界堵转平均值, 也就是说右边界比左边界更容易堵转, 保存最容易堵转的平均值). 该程序执行后, 步进电机会一直保持正向旋转, 当遇到堵转时, 稍等300ms左右, 停止该程序. 程序会记录运行时的 `SG_RESULT` 寄存器平均值和堵转时的寄存器平均值到 `conf_save_path` 中. 后续 `Slide` 类可以加载该配置文件实现堵转时停止电机. * 验证配置文件的值 或许您会好奇这个配置到底能不能用. `maix.ext_dev.tmc2209` 提供了测试该配置文件的函数 `slide_test`. 先保证电机微处于堵转状态, 然后修改参数以匹配您调用 `slide_scan` 的参数, 执行以下代码. example ```python from maix import ext_dev, pinmap, err port \"/dev/ttyS1\" uart_addr 0x00 uart_baudrate 115200 step_angle 1.8 micro_step 256 round_mm 60 speed 60 use_internal_sense_resistors True run_current_per 100 hold_current_per 100 if port \"/dev/ttyS1\": ret pinmap.set_pin_function(\"A19\", \"UART1_TX\") if ret ! err.Err.ERR_NONE: print(\"Failed in function pinmap...\") exit( 1) ret pinmap.set_pin_function(\"A18\", \"UART1_RX\") if ret ! err.Err.ERR_NONE: print(\"Failed in function pinmap...\") exit( 1) ext_dev.tmc2209.slide_test(port, uart_addr, uart_baudrate, step_angle, micro_step, round_mm, speed, True, True, run_current_per, hold_current_per, conf_save_path './slide_scan_example.bin') ``` 电机将会在堵转瞬间停止转动, 程序也会随之结束. `Slide.move()` 和 `Slide.reset()` 堵转停止逻辑也是如此. * 使用 `Slide` 使用 `Slide` 的思路与 `ScrewSlide` 基本无异, 只是 `Slide` 取消了回调函数并增加了堵转停止逻辑. 如果使用 `Slide` 时未传入配置文件, `Slide`也是可以使用的. 堵转检测阈值为电机运行开始时的平均数*`Slide.stop_default_per()`/100. 电机运行近期平均数低于该值时电机停止. 可以通过 `Slide.stop_default_per()` 获取和修改该值. ```python from maix import pinmap, ext_dev, err, time port \"/dev/ttyS1\" uart_addr 0x00 uart_baudrate 115200 step_angle 1.8 micro_step 256 round_mm 60 speed 60 use_internal_sense_resistors True run_current_per 100 hold_current_per 100 if port \"/dev/ttyS1\": ret pinmap.set_pin_function(\"A19\", \"UART1_TX\") if ret ! err.Err.ERR_NONE: print(\"Failed in function pinmap...\") exit( 1) ret pinmap.set_pin_function(\"A18\", \"UART1_RX\") if ret ! err.Err.ERR_NONE: print(\"Failed in function pinmap...\") exit( 1) slide ext_dev.tmc2209.Slide(port, uart_addr, uart_baudrate, step_angle, micro_step, round_mm, speed, cfg_file_path \"./slide_conf.bin\") slide.reset() slide.move(60) slide.move( 60) ``` ## 注意事项 **本驱动程序由纯串口实现, 优点是占有引脚占用引脚较少即可实现至多4个较高精度的电机驱动. 缺点是不适用于高精度要求的应用场景.** 已知问题: * 请勿使用 MaixCAM 的 UART0 作为驱动串口, 会导致 MaixCAM 无法正常开机等问题. **!!!如有BUG, 非常欢迎您提交 PR 反馈.** ## 免责声明 本电机驱动程序(以下简称“程序”)是由[Sipeed]基于BSD 3开源协议的仓库 [janelia arduino/TMC2209](https://github.com/janelia arduino/TMC2209) 进行修改和使用的。本程序仅供学习和研究使用,不保证在所有环境和条件下都能正常工作。使用本程序的风险由用户自行承担。 [Sipeed]不对因使用或无法使用本程序而导致的任何损失或损害承担责任,包括但不限于直接损失、间接损失、偶然损失、特殊损失、惩罚性损失或后果性损失。 用户在实际应用中使用本程序前,应自行进行充分的测试和验证,确保程序符合其特定需求和环境。[Sipeed]不对程序的准确性、可靠性、完整性或适用性做出任何明示或暗示的保证。 用户在使用本程序时应遵守所有适用的法律法规,并确保不侵犯任何第三方的合法权益。[Sipeed]不对因用户违反法律法规或侵犯第三方权益而导致的任何后果承担责任。 本免责声明的解释权归[Sipeed]所有,并保留随时修改本免责声明的权利。"},"/maixpy/doc/zh/modules/thermal_cam.html":{"title":"MaixCAM MaixPy 使用热红外图像传感器","content":" title: MaixCAM MaixPy 使用热红外图像传感器 目前官方还未上架硬件产品,如果只是小分辨率需求,可以自己淘宝买一个串口或者 IIC 模块进行驱动,后面官方上线高分辨率模块会更新本文档。 热红外摄像头模块比如可以参考[K210+MLX90640红外热像仪](https://neucrack.com/p/189), [热红外 heimann (海曼) HTPA 32x32d](https://neucrack.com/p/199)。"},"/maixpy/doc/zh/modules/temp_humi.html":{"title":"MaixCAM MaixPy 读取温湿度传感器","content":" title: MaixCAM MaixPy 读取温湿度传感器 ## 简介 通过给 MaixCAM 外挂一个温湿度传感器模块,可以轻松读取到环境温度和湿度,这里以 `Si7021` 这款传感器为例,通过 `I2C` 可以驱动它。 ## 使用 完整的代码在[MaixPy/examples/ext_dev/sensors/temp_humi_si7021.py](https://github.com/sipeed/MaixPy/blob/main/examples/sensors/temp_humi_si7021.py) 注意系统镜像需要 `> 2024.6.3_maixpy_v4.2.1` 版本。"},"/maixpy/doc/zh/modules/tof.html":{"title":"","content":" titile: MaixCAM MaixPy 使用 TOF 模块测距和地形检测 Sipeed 官方有另外[两款 TOF 模块](https://wiki.sipeed.com/hardware/zh/maixsense/index.html) 可以用来测距,可以购买使用串口通信使用。"},"/maixpy/doc/zh/source_code/build.html":{"title":"MaixCAM MaixPy 开发源代码指南","content":" title: MaixCAM MaixPy 开发源代码指南 ## 获取源代码 ```shell mkdir p ~/maix cd ~/maix git clone https://github.com/sipeed/MaixPy ``` ## 获取 MaixCDK 源码 MaixPy 项目依赖于 MaixCDK,需要先克隆它,放到电脑的某个目录(勿放在 MaixPy 目录下) ```shell cd ~/maix git clone https://github.com/sipeed/MaixCDK ``` 然后需要设置环境变量 `MAIXCDK_PATH` 指定 MaixCDK 的路径,可以在 `~/.bashrc` 或者`~/.zshrc`(根据你使用的shell决定)添加: ```shell export MAIXCDK_PATH ~/maix/MaixCDK ``` 只有在成功设置环境变量后, MaixPy 才能找到 MaixCDK 源码。 ## 构建并打包成 wheel 文件 ```shell cd ~/maix/MaixPy python setup.py bdist_wheel maixcam ``` `maixcam` 可以被替换为其他板卡配置, 请查看 `MaixPy/platforms` 目录。 构建成功后, 你会在 `dist` 目录中找到 wheel 文件, 传输到设备(开发板),在设备终端中使用 `pip install U MaixPy****.whl` 在你的设备上安装或升级。 > `python setup.py bdist_wheel maixcam skip build` 不会执行构建命令, 只会打包 wheel 文件, 因此你可以先使用 `maixcdk menuconfig` 和 `maixcdk build` 来自定义构建。 > 另外如果你是在调试 API,需要频繁安装,使用 pip 安装会比较慢,可以直接编译后拷贝 `maix` 目录到设备的 `/usr/lib/python3.11/site packages`目录下覆盖旧的文件即可。 ## 手动构建 ```shell maixcdk build ``` ## 修改源代码后运行测试 * 首先, 构建源代码 ```shell maixcdk build ``` * 如果为 PC 自身构建(平台 `linux`): 然后执行 `./run.sh your_test_file_name.py` 来运行 Python 脚本。 ```shell cd test ./run.sh examples/hello_maix.py ``` * 如果为板卡交叉编译: * 最快的方式是将 `maix` 目录复制到设备的 `/usr/lib/python3.11/site packages/` 目录, 然后在设备上运行脚本。 * 或者打包 wheel 文件并在设备上使用 `pip install U MaixPy****.whl` 安装, 然后在设备上运行脚本。 ## 本地预览文档 文档位于 [docs](https://github.com/sipeed/MaixPy/tree/main/docs) 目录, 使用 `Markdown` 格式, 你可以使用 [teedoc](https://github.com/teedoc/teedoc) 来生成网页版本的文档。 API 文档会在构建 MaixPy 固件时生成, **如果你没有构建 MaixPy, API 文档将会是空的**。 ```shell pip install teedoc U cd docs teedoc install i https://pypi.tuna.tsinghua.edu.cn/simple teedoc serve ``` 然后访问 `http://127.0.0.1:2333` 在网页浏览器中预览文档。 ## 对于想要贡献的开发者 请查看 [MaixPy 开发源代码指南](./contribute.html) 如果在使用源代码时遇到任何问题, 请先参考 [FAQ](./faq.html)。"},"/maixpy/doc/zh/source_code/add_c_module.html":{"title":"给 MaixCAM MaixPy 添加一个 C/C++ 模块","content":" title: 给 MaixCAM MaixPy 添加一个 C/C++ 模块 ## 简介 有时候需要高效地执行某个函数, Python 的速度无法满足时,就可以使用 C/C++ 或者其它编译型语言来实现。 ## 通用函数封装 如果你想封装的函数实现的功能不依赖 MaixPy 的其它功能,直接使用 Python 使用 C/C++ 添加模块的通用方法,具体方法可以自行百度,比如 ffi, ctype 等 > 欢迎 PR 添加方法 ## 如果你的模块还想依赖 MaixPy 的其它基础 API ### 方法一 直接修改 MaixPy 固件,然后编译过即可,参考 [查看 MaixPy API 源码](../basic/view_src_code.html),这种方法最简单快捷,如果代码封装好了还能合并到官方仓库(提交 PR)。 * 按照[编译 MaixPy 源码](./build.html) 通过即可获得`dist/***.whl`安装包。 * 将`dist`目录下的`.whl`包发送到设备,然后使用运行代码`import os;os.system(\"pip install /root/xxxxx.whl\")`即可(替换路径)。 * 如果调试的时候觉得安装 `.whl` 包太慢了,可以使用`maixcdk build` 编译,然后使用`scp r maix_xxx root@10.228.104.1:/usr/lib/python3.11/site packages`直接拷贝到设备系统种覆盖包,这里需要根据你的包名和设备 ip 替换一下。 * 当你调试好后如果觉得自己填加的功能不错,可以考虑合并到官方的仓库,具体方法可以搜索引擎搜索\"github 提交 PR\"相关关键词学习。 修改代码: 正如 [查看 MaixPy API 源码](../basic/view_src_code.html) 问种所描述的查看和修改源码的方式,增加 C++ 函数,并且填加注释,然后编译后 MaixPy 中就能调用了,非常简单。 比如: ```cpp namespace maix::test { /** * My function, add two integer. * @param a arg a, int type * @param b arg b, int type * @return int type, will a + b * @maixpy maix.test.add */ int add(int a, int b); } ``` 没错,直接写一个 C++ 语法的函数,注意这里加了一个`@maixpy` 的注释,编译时会自动生成 Python 函数,就是这么简单! 然后就能通过`maix.test.add(1, 2)` 来调用函数了。 ### 方法二 基于工程模板创建一个 MaixPy 模块工程,这种方法适用于不想改动 MaixPy 源码,希望单独加一个包,并且还能用上 MaixPy(MaixCDK)的 API 的情况。方法如下: * 首先[编译 MaixPy 源码](./build.html) 通过,保证我们的编译环境没问题。 * 复制一份 [MaixPy/tools/maix_module](https://github.com/sipeed/MaixPy/tree/main/tools/maix_module) 工程模板到一个新的目录,可以和`MaixPy`放在同一个目录。比如将所有文件和目录复制到了`maix_xxx` 目录下。 * 在`maix_xxx`目录下,终端执行`python init_files.py`来初始化项目文件。 * 修改项目名:修改`module_name.txt` 文件,改成你要的模块名称,必须以`maix_`开头,这样方便其它用户能在 [pypi.org](https://pypi.org) 或者 [github.com](https://github.com) 搜索到你的项目。 * 和 MaixPy 一样执行`python setup.py bdist_wheel linux` 就可以开始为电脑构建。 * 构建完成后可以直接在项目根目录执行`python c \"import maix_xxx;maix_xxx.basic.print('Li Hua')\"`就能运行你的模块函数了。 * 执行`python setup.py bdist_wheel maixcam` 就可以为`MaixCAM` 构建软件包了。需要注意的是,构建过程种的代码提示文件(pyi文件)只能在给`linux` 平台构建的时候生成,所以在正式发布的时候需要先执行上一步的`linux`平台构建生成代码提示文件,然后再执行本步的命令生成`MaixCAM`平台的软件包。 * 将`dist`目录下的`.whl`包发送到设备,然后使用运行代码`import os;os.system(\"pip install /root/xxxxx.whl\")`即可(替换路径)。 * 如果调试的时候觉得安装 `.whl` 包太慢了,可以使用`maixcdk build` 编译,然后使用`scp r maix_xxx root@10.228.104.1:/usr/lib/python3.11/site packages`直接拷贝到设备系统种覆盖包,这里需要根据你的包名和设备 ip 替换一下。 * 当你调试好代码后,可以考虑将代码开源到[github.com](https://github.com),并且上传到[pypi.org](https://pypi.org)(具体上传方法可以看官方文档或者搜索教程,大概就是`pip install twine`然后 `twine upload dist/maix_xxx***.whl`就可以了。),写好后欢迎到[maixhub.com/share](https://maixhub.com/share)来分享告诉大家你的成果! 修改代码: 正如 [查看 MaixPy API 源码](../basic/view_src_code.html) 问种所描述的查看和修改源码的方式,在`components/maix/include` 和 `components/maix/src` 下增加源文件,增加 C++ 函数,并且填加注释,然后编译后就直接能调用了,非常简单。 比如: ```cpp namespace maix_xxx::test { /** * My function, add two integer. * @param a arg a, int type * @param b arg b, int type * @return int type, will a + b * @maix_xxx maix_xxx.test.add */ int add(int a, int b); } ``` 没错,直接写一个 C++ 语法的函数,注意这里加了一个`@maix_xxx` 的注释,编译时会自动生成 Python 函数,就是这么简单! 然后就能通过`maix_xxx.test.add(1, 2)` 来调用函数了。"},"/maixpy/doc/zh/source_code/contribute.html":{"title":"参与 MaixCAM MaixPy 文档修改和贡献代码","content":" title: 参与 MaixCAM MaixPy 文档修改和贡献代码 ## 参与 MaixPy 文档修改 * 点击要修改的文档右上角的`编辑本页`按钮,进入 github 源文档页面。 * 保证已经登录了 GitHub 账号。 * 在 github 预案文档页面点击右上角铅笔按钮修改文档内容。 * github 会提示需要 fork 一份到自己的仓库,点击 fork 按钮。 > 这一步就是将 MaixPy 源码仓库复刻一份到你自己的账号下,这样你就可以自由修改了。 * 修改文档内容,然后在页面底部填写修改说明,点击提交修改。 * 然后在你的仓库中找到 Pull requests 按钮,点击创建一个 Pull requests。 * 然后在弹出的页面中填写修改说明,点击提交 Pull requests,其它人和管理员就可以在[Pull requests 页面](https://github.com/sipeed/MaixPy/pulls)看到你的修改了。 * 等待管理员审核通过后,你的修改就会合并到 MaixPy 源码仓库中了。 * 合并成功后,文档会自动更新到 [MaixPy 官方文档](https://wiki.sipeed.com/maixpy)。 > 文档经过 CDN 缓存了的,可能需要等待一段时间才能看到更新,紧急更新可以联系管理员手动刷新。 > 也可以访问 [en.wiki.sipeed.com/maixpy](https://en.wiki.sipeed.com/maixpy) 查看 github pages 服务版本,这个是没有缓存实时更新的。 ## 参与 MaixPy 代码贡献 * 访问 MaixPy 代码仓库地址:[github.com/sipeed/MaixPy](https://github.com/sipeed/MaixPy) * 在修改代码前最好先创建一个 [issue](https://github.com/sipeed/MaixPy/issues) ,描述你要修改的内容让大家知道你的想法和计划,这样大家可以参与修改讨论,以免重复劳动。 * 点击右上角的 fork 按钮,将 MaixPy 代码仓库复刻一份到你自己的账号下。 * 然后在你的账号下 clone 一份代码到本地。 * 修改代码后提交到你的仓库中。 * 然后在你的仓库中找到 Pull requests 按钮,点击创建一个 Pull requests。 * 然后在弹出的页面中填写修改说明,点击提交 Pull requests,其它人和管理员就可以在[Pull requests 页面](https://github.com/sipeed/MaixPy/pulls)看到你的修改了。 * 等待管理员审核通过后,你的修改就会合并到 MaixPy 源码仓库中了。 > 需要注意的是 MaixPy 的代码大多数是从 [MaixCDK](https://github.com/sipeed/MaixCDK) 自动生成的,所以如果你修改 C/C++ 源码,很有可能你需要先修改这个仓库。"},"/maixpy/doc/zh/source_code/faq.html":{"title":"MaixCAM MaixPy 源代码常见问题","content":"MaixCAM MaixPy 源代码常见问题 ## subprocess.CalledProcessError: Command '('lsb_release', ' a')' returned non zero exit status 1. 以 root 身份编辑 `/usr/bin/lsb_release`,将第一行从 `#!/usr/bin/python3` 更改为 `python3`。 然后重新编译,应该就可以工作了。 ## ImportError: arg(): could not convert default argument 'format: maix::image::Format' in method '.**init**' into a Python object (type not registered yet?) Pybind11 需要你先注册 `image::Format`,然后才能在 `camera::Camera` 中使用它,所以我们必须先在生成的 `build/maixpy_wrapper.cpp` 源文件中定义 `image::Format`。 要实现这一点,请编辑 `components/maix/headers_priority.txt`,被依赖的应该放在依赖它的前面。 例如: ``` maix_image.hpp maix_camera.hpp ``` ## /usr/bin/ld: /lib/libgdal.so.30: undefined reference to `std::condition_variable::wait(std::unique_lock&)@GLIBCXX_3.4.30' collect2: error: ld returned 1 exit status 一般在为 Linux 构建时并且使用 conda 环境时容易出现,conda 环境中的一些库编译参数问题,解决方法就是不用 conda 即可, 或者单独找到 conda 中的那个库,替换成系统的或者直接删掉(会从系统找)"},"/maixpy/doc/zh/source_code/maixcdk.html":{"title":"MaixCAM 切换到 MaixCDK 使用 C/C++ 开发应用","content":" title: MaixCAM 切换到 MaixCDK 使用 C/C++ 开发应用 除了使用 MaixPy 开发,还有对应的 C/C++ SDK 可以使用,项目名称为 [MaixCDK](https://github.com/sipeed/MaixCDK)。 ## MaixCDK 介绍 MaixPy 基于 MaixCDK 构建,MaixPy 的大多数 API 都是基于 MaixCDK 的 API 自动生成的,所以 MaixPy 有的功能 MaixCDK 都包含。 如果你更熟悉 C/C++ 编程,或者需要更高的性能,可以使用 MaixCDK 进行开发。 ## MaixCDK 使用 MaixCDK 代码仓库地址:[github.com/sipeed/MaixCDK](https://github.com/sipeed/MaixCDK), 你可以在这里找到 MaixCDK 的代码和文档。"},"/maixpy/doc/zh/README_no_screen.html":{"title":"MaixCAM MaixPy 无屏幕版快速开始","content":" title: MaixCAM MaixPy 无屏幕版快速开始 ## 关于本页文档 正如[快速开始所述](./index.html),开发时**强烈推荐**购买带屏幕版本,会有更好的开发体验,包括使用内置的 APP,以及使用 MaixHub 应用商店的 APP,以及方便调试(比如常用设置可以直接触摸点击界面完成,可以实时在屏幕看到图像等)。 当然,如果你实在没有条件购买带屏幕的,或者你在量产时需要无屏幕的版本,请看本文。 ## 获得 MaixCAM 设备 * **MaixCAM**:在 [Sipeed 淘宝](https://item.taobao.com/item.htm?id 784724795837) 或者 [Sipeed 速卖通](https://www.aliexpress.com/store/911876460) 店铺购买 MaixCAM 。 ## 上手配置 ### 准备 TF 镜像卡和插入到设备 如果你买的套餐里面有 TF 卡,里面已经有出厂镜像了,如果出厂时 TF 卡没有安装到设备,需要先小心打开外壳(注意里面有排线连接不要扯断了),然后插入 TF 卡。另外因为出厂的固件可能比较老旧,**务必**按照[升级和烧录系统](https://wiki.sipeed.com/maixpy/doc/zh/basic/os.html)先将系统升级到最新版本,否则可能会遇到某些应用 和 API 无法使用的问题。 如果没买 TF 卡,则需要将系统烧录进自备的 TF 卡中,烧录方法请看[升级和烧录系统](./basic/os.html),然后再安装到板子。 ### 上电开机 使用 `Type C` 数据线连接 `MaixCAM` 设备给设备供电,等待设备开机。 **首先**:保证 USB 线材质量足够好,以及电脑 USB 端口质量够好(供电 > 5v 500mA,抗干扰能力正常)。 第一次等待 20 秒左右,然后电脑会出现一个或者两个虚拟网卡设备(可以在电脑的网络管理器看到)。 如果虚拟网卡设备: * 请确认购买了配套的 TF 卡,如果确认有 TF 卡,并且已经插入到设备,可以**尝试[更新到最新的系统](./basic/os.html)**。 * 如果你没有购买 TF 卡套餐,你需要按照[升级和烧录系统](./basic/os.html)的方法烧录最新的系统到 TF 卡。 * 请确认 USB 有没有松动,以及 USB 线材质量,可以换一根质量好点的线尝试。 * 请确认 USB 口供电足够,可以换一个 USB 口,或者有条件在其它电脑试试。 ## 准备连接电脑和设备 为了后面电脑(PC)能和 设备(MaixCAM)通信,我们要让它们在同一个局域网内,提供了两种方式,我们首先使用方法一: * **方法一**:有线连接, 设备通过 USB 线连接到电脑,设备会虚拟成一个 USB 网卡,这样和电脑就通过 USB 在同一局域网了,遇到问题也可以在 [FAQ](./faq.html) 中找常见问题。 .. details::方法二在不同电脑系统中驱动安装方法: :open: true 默认会有两种 USB 虚拟网卡驱动(NCM 和 RNDIS驱动),以满足不同系统的需求: * **Windows**: windows 所有系统会自动安装 RNDIS 驱动, 仅 Win11 会自动安装 NCM 驱动,两种驱动有**一个能用就行**(NCM 速度比 RNDIS 速度快)。 * 打开任务管理器 > 性能,可以看到一个虚拟的以太网,并且可以看到 ip 比如 `10.131.167.100` 是电脑的 ip, 设备的 ip 是最后一位改为`1` 即 `10.131.167.1`。如果是 Win11 则会看到两个虚拟网卡,随便选择一个 IP 使用即可。 * 另外也可以打开电脑的 `设备管理器`(搜索栏搜索`设备管理器`), RNDIS 和 NCM 驱动被正确安装的效果,**一个能用就行**: ![RNDIS ok](../../static/image/rndis_windows.jpg) ![NCM ok](../../static/image/windows_ncm_ok.png) * **Linux**: 无需额外设置,插上 USB 线即可。 使用 `ifconfig` 或者 `ip addr` 查看到 `usb0` 和 `usb1` 网卡,两个 IP 都可以使用,**注意** 这里看到的 ip 比如 `10.131.167.100` 是电脑的 ip, 设备的 ip 是最后一位改为`1` 即 `10.131.167.1`。 * **MacOS**: 在`系统设置` >`网络`里面查看到 `usb` 网卡,**注意** 这里看到的 ip 比如 `10.131.167.100` 是电脑的 ip, 设备的 ip 是最后一位改为`1` 即 `10.131.167.1`。 * **方法二**:无线连接, 设备使用 WiFi 连接到电脑连接的同一个路由器或者 WiFi 热点下(WiFi 如果出现画面卡顿或者延迟高的问题可以使用有线连接。),连接无线热点方式有两种: * 修改 TF 的 boot 分区中的 `wifi.ssid` 和 `wifi.pass` 文件,重启即可连接。修改方法: * 如果你已经了解 SSH, 可以通过 ssh 连接到设备(如果有线连接可用)修改`/boot`目录下文件。 * 也可以按照前面升级系统的方式进入升级模式后电脑会出现一个 U 盘,然后修改里面的文件即可,注意修改完要先 弹出U盘 再重启。 * 也可以直接用 读卡器,电脑会出现一个U盘,修改其中的`wifi.ssid` 和 `wifi.pass`文件即可,注意修改完要先 弹出U盘 再重启。 * 如果你有线已经可以使用,按照下一步已经可以使用 MaixVision 运行代码了,可以修改例程 `tools/wifi_connect.py` 中的 SSID 和 PASSWORD 然后运行即可。 ## 开发环境准备 * 首先保证上一步电脑和设备已经在同一个局域网中了。 * 下载 [MaixVision](https://wiki.sipeed.com/maixvision) 并安装。 * 使用 Type C 连接设备和电脑,打开 MaixVision,点击左下角的`“连接”`按钮,会自动搜索设备,稍等一下就能看到设备,点击设备有点的连接按钮以连接设备。 如果**没有扫描到设备**, 也可以在 [FAQ](./faq.html) 中找到解决方法。 这里有 MaixVision 的使用示例视频: ### 联网 首次运行需要连接网络,以激活设备安装运行库。 如果没有路由器可以用手机开一个热点。 MaixVision 修改例程 `tools/wifi_connect.py` 中的 SSID 和 PASSWORD 然后运行即可。其它连接 WiFi 的方法看前面的介绍。 ### 升级运行库 **这一步很重要 !!!** 这一步如果不做好,其它应用和功能可能无法运行(比如闪退等)。 * 首先保证上一步连接 WiFi 已经完成,并且获取到 IP 地址能访问公网。 * 运行 MaixVision 例程里面的 `tools/install_runtime.py` 来安装最新的运行库。 如果显示`Request failed` 或者`请求失败`,请先检查网络是否已经连接,需要能连接到互联网,如果还不行,请拍照联系客服处理即可。 ## 运行例程 点击 MaixVision 左侧的`示例代码`,选择一个例程,点击左下角`运行`按钮将代码发送到设备上运行。 比如: * `hello_maix.py`,点击`运行`按钮,就能看到 MaixVision 终端有来自设备打印的消息,以及右上角出现了图像。 * `camera_display.py`,这个例程会打开摄像头并在屏幕上显示摄像头的画面。 ```python from maix import camera, display, app disp display.Display() # 构造一个显示对象,并初始化屏幕 cam camera.Camera(640, 480) # 构造一个摄像头对象,手动设置了分辨率为 640x480, 并初始化摄像头 while not app.need_exit(): # 一直循环,直到程序退出(可以通过按下设备的功能按键退出或者 MaixVision 点击停止按钮退出) img cam.read() # 读取摄像头画面保存到 img 变量,可以通过 print(img) 来打印 img 的详情 disp.show(img) # 将 img 显示到屏幕上 ``` * `yolov5.py` 会检测摄像头画面中的物体框出来并显示到屏幕上,支持 80 种物体的检测,具体请看[YOLOv5/YOLOv8/YOLO11 物体检测](./vision/yolov5.html)。 其它例程可以自行尝试。 > 如果你使用相机例程遇到了图像显示卡顿,可能是网络不通畅,或者 USB 线质量或者主机 USB 质量太差造成,可以更换连接方式或者更换线缆、主机 USB 口或者电脑等。 ## 安装应用到设备 上面是在设备中运行代码,`MaixVision` 断开后代码就会停止运行,如果想让代码出现在开机菜单中,可以打包成应用安装到设备上。 点击 `MaixVision` 左下侧的安装应用按钮,填写应用信息,会将应用安装到设备上,然后在设备上就能看到应用了。 也可以选择打包应用,将你的应用分享到[MaixHub 应用商店](https://maixhub.com/app)。 > 默认例程没有显式编写退出功能,进入应用后按下设备的功能按键即可退出应用。(对于 MaixCAM 是 user 键) 如果想让程序开机自启动,可以修改并运行例程`tools/set_autostart.py`即可。 ## 下一步 看到这里,如果你觉得不错,**请务必来 [github](https://github.com/sipeed/MaixPy) 给 MaixPy 开源项目点一个 star(需要先登录 github), 你的 star 和认同是我们不断维护和添加新功能的动力!** 到这里你已经体验了一遍使用和开发流程了,接下来可以学习 `MaixPy` 语法和功能相关的内容,请按照左边的目录进行学习,如果遇到 `API` 使用问题,可以在[API 文档](/api/)中查找。 学习前最好带着自己学习的目的学,比如做一个有趣的小项目,这样学习效果会更好,项目和经验都可以分享到[MaixHub 分享广场](https://maixhub.com/share),会获得现金奖励哦! ## 常见问题 FAQ 遇到问题可以优先在 [FAQ](./faq.html) 里面找,找不到再在下面的论坛或者群询问,或者在 [MaixPy issue](https://github.com/sipeed/MaixPy/issues) 提交源码问题。 ## 分享交流 * **[MaixHub 项目和经验分享](https://maixhub.com/share)** :分享你的项目和经验,获得现金打赏,获得官方打赏的基本要求: * **可复现型**:较为完整的项目制作复现过程。 * **炫耀型**:无详细的项目复现过程,但是项目展示效果吸引人。 * Bug 解决经验型:解决了某个难题的过程和具体解决方法分享。 * [MaixPy 官方论坛](https://maixhub.com/discussion/maixpy)(提问和交流) * QQ 群: (建议在 QQ 群提问前先发个帖,方便群友快速了解你需要了什么问题,复现过程是怎样的) * MaixPy (v4) AI 视觉交流大群: 862340358 * Telegram: [MaixPy](https://t.me/maixpy) * MaixPy 源码问题: [MaixPy issue](https://github.com/sipeed/MaixPy/issues) * 商业合作或批量购买请联系 support@sipeed.com 。"},"/maixpy/doc/zh/projects/line_tracking_robot.html":{"title":"MaixCAM MaixPy 小车巡线","content":" title: MaixCAM MaixPy 小车巡线 update: date: 2024 05 09 author: lxowalle version: 1.0.0 content: 初版文档 阅读本文前,确保已经知晓如何开发MaixCAM,详情请阅读[快速开始](../index.html) ## 简介 本文将介绍如何使用MaixPy实现寻线小车 ## 如何使用MaixPy实现寻线小车 1. 准备MaixCAM与小车 2. 实现寻线功能 3. 实现小车控制功能 ### 准备MaixCAM与小车 TODO ### 实现寻线功能 使用`image`模块的`get_regression`可以快速寻找到直线,详情见[寻找直线](./line_tracking.html) 代码实现: ```python from maix import camera, display, image cam camera.Camera(320, 240) disp display.Display() # thresholds [[0, 80, 40, 80, 10, 80]] # red thresholds [[0, 80, 120, 10, 0, 30]] # green # thresholds [[0, 80, 30, 100, 120, 60]] # blue while 1: img cam.read() lines img.get_regression(thresholds, area_threshold 100) for a in lines: img.draw_line(a.x1(), a.y1(), a.x2(), a.y2(), image.COLOR_GREEN, 2) theta a.theta() rho a.rho() if theta > 90: theta 270 theta else: theta 90 theta img.draw_string(0, 0, \"theta: \" + str(theta) + \", rho: \" + str(rho), image.COLOR_BLUE) disp.show(img) ``` 上述代码实现了寻线功能, 上述参数中需注意: 设置合适的thresholds值来寻找到对应的直线 设置合适的area_threshold值来过滤环境干扰,可以过滤一些面积小的直线 使用`a.theta()`获取直线的角度 使用`a.rho()`获取直线与原点(原点在左上角)的距离 根据实际环境调试好寻线参数后, 就可以利用`a.theta()`和`a.rho()`控制小车方向了。 ### 实现小车控制功能 TODO"},"/maixpy/doc/zh/projects/index.html":{"title":"MaixCAM MaixPy 项目实战 介绍和汇总","content":" title: MaixCAM MaixPy 项目实战 介绍和汇总 ## 简介 这里提供一些常见的项目实战示例,方便社区成员可以参考复现使用,也方便激发大家的灵感做出更多更好的应用和项目出来。 要找到用 MaixPy 实现的相关的项目,有几种方式: ## MaixPy 官方文档 也就是本文档左边目录可以找到的项目实战,比如`小车巡线`。 如果你有好的项目,或者好的项目推荐,也可以贡献文档添加进来。 ## MaixHub 项目分享广场 在[MaixHub 项目分享](https://maixhub.com/share?type project) 栏目可以找到项目分享。 有高质量的分享也会被链接到 MaixPy 官方文档。 你也可以分享你的项目制作方法,会获得官方(必获得)以及社区成员的现金打赏(通常高质量能解决急需需求的更容易被打赏)。 推荐项目: * maixcam部署yolov5s 自定义模型: https://maixhub.com/share/23 ## MaixHub 应用分享 除了项目分享以外,还可以在[MaixHub 应用商店](https://maixhub.com/app) 找到可以直接运行的应用,有部分应用可能是用 MaixPy 编写的,如果作者提供了源码或者写了详细的教程也都可以参考。 推荐项目: * 简易HTTP串流服务器: https://maixhub.com/app/19 * 桌面电脑性能监视器: https://maixhub.com/app/13 * 安全帽检测模型应用: https://maixhub.com/app/10"},"/maixpy/doc/zh/projects/face_tracking.html":{"title":"MaixCAM MaixPy 2轴舵机云台人脸追踪","content":" title: MaixCAM MaixPy 2轴舵机云台人脸追踪 update: date: 2024 06 11 author: iawak9lkm version: 1.0.0 content: 初版文档 阅读本文前,确保已经知晓如何开发MaixCAM,详情请阅读[快速开始](../index.html) [源码地址](https://github.com/sipeed/MaixPy/blob/main/projects/app_face_tracking) [APP下载地址](https://maixhub.com/app/31) ## 简介 基于 MaixCAM 和云台的人脸追踪程序。实际效果如下图所示: ![](../../assets/face_tracking1.jpg) ![](../../assets/face_tracking2.jpg) ## 如何使用例程 * 组装好您的云台和MaixCAM。 * 修改 `main.py` 中的参数。 修改每个舵机使用的 MaixCAM 引脚,指定的引脚必须具备 PWM 功能。`servos.Servos` 会自行将该引脚配置为 PWM 功能。 ```python ROLL_PWM_PIN_NAME \"A17\" PITCH_PWM_PIN_NAME \"A16\" ``` 修改两个舵机的初始位置。 ```python init_pitch 80 # init position, value: [0, 100], means minimum angle to maxmum angle of servo init_roll 50 # 50 means middle ``` 修改两个舵机各自的活动范围的最小最大 PWM 占空比。请注意,某些云台结构中的舵机超出物理限制的最大活动范围时可能会造成不可预期的后果,请务必保证以下设定值对应的舵机运动范围内无阻碍。 ```python PITCH_DUTY_MIN 3.5 # The minimum duty cycle corresponding to the range of motion of the y axis servo. PITCH_DUTY_MAX 9.5 # Maximum duty cycle corresponding to the y axis servo motion range. ROLL_DUTY_MIN 2.5 # Minimum duty cycle for x axis servos. ROLL_DUTY_MAX 12.5 # Maxmum duty cycle for x axis servos. ``` 选择舵机的运动方向。 ```python pitch_reverse False # reverse out value direction roll_reverse True # reverse out value direction ``` * 最后执行代码即可。 如果您是从 MaixHub 上安装的应用,在启动器中点击 face_tracking 即可执行本程序。 如果您是从 Github 上获取的源码, 您可以在 [MaixVision](https://wiki.sipeed.com/maixvision) 中导入该工程的文件夹执行整个工程即可。 MaixVision详情请参考 [MaixVision说明](https://wiki.sipeed.com/maixpy/doc/zh/basic/maixvision.html)。 当然您也可以将整个工程文件夹通过您喜欢的方式拷贝到我们的 MaixCAM 上, 然后用 python 执行。 * 想要退出程序按左上角的按钮即可。 ![](../../../../projects/app_face_tracking/assets/exit.jpg) ## 常见问题 * 人脸跟踪效果不理想。 不同的云台使用的 PID 参数不尽相同,您可以通过调节 PID 值来使得追踪效果更丝滑。 ```python pitch_pid [0.3, 0.0001, 0.0018, 0] # [P I D I_max] roll_pid [0.3, 0.0001, 0.0018, 0] # [P I D I_max] ``` * 在完成跟踪后,云台对着不动的人脸小幅度左右抖动一段时间。 通常可以通过调节 PID 来使得该影响尽可能小;但是无法避免云台物理结构带来的抖动。可以尝试调节死区来减小抖动。 ```python target_ignore_limit 0.08 # when target error < target_err_range*target_ignore_limit , set target error to 0 ``` * 显示屏显示或终端打印 `PIN: XXX does not exist`。 这是因为 MaixCAM 板子上引出的引脚中并不存在该引脚,请在 MaixCAM 上选择一个带 PWM 功能的引脚。 * 显示屏显示或终端打印 `Pin XXX doesn't have PWM function`。 这是因为 MaixCAM 板子上引出的这个引脚没有 PWM 功能,请选择一个带 PWM 功能的引脚。 ## 如何追踪其他物体 * 在 `main.py` 中存在一个类 `Target`,该类用于自定义需要追踪的目标。 * 在 `__init__` 中,请初始化您需要用到的对象,比如摄像头等。 * 在 `__get_target()` 中,您需要计算出被追踪物体的中心点,如果帧中不存在被追踪物体,请返回 1, 1 以确保程序在未找到目标时暂时不做动作。同时,您也需要在返回坐标点之前调用 `self.__exit_listener(img)` 和 `self.disp.show(img)` 确保程序能够与您正常的完成交互。"},"/maixpy/doc/zh/vision/apriltag.html":{"title":"MaixCAM MaixPy 识别 Apriltag 标签","content":" title: MaixCAM MaixPy 识别 Apriltag 标签 update: date: 2024 04 03 author: lxowalle version: 1.0.0 content: 初版文档 阅读本文前,确保已经知晓如何开发MaixCAM,详情请阅读[快速开始](../index.html) ## 简介 本文介绍如何使用MaixPy来识别Apriltag标签 ## 使用 MaixPy 识别Apriltag标签 MaixPy的 `maix.image.Image`中提供了`find_apriltags`方法,可以可以识别apriltag标签。 ### 如何识别Apriltag标签 一个简单的示例,实现识别apriltag标签并画框 ```python from maix import image, camera, display cam camera.Camera() disp display.Display() families image.ApriltagFamilies.TAG36H11 x_scale cam.width() / 160 y_scale cam.height() / 120 while 1: img cam.read() new_img img.resize(160, 120) apriltags new_img.find_apriltags(families families) for a in apriltags: corners a.corners() for i in range(4): corners[i][0] int(corners[i][0] * x_scale) corners[i][1] int(corners[i][1] * y_scale) x int(a.x() * x_scale) y int(a.y() * y_scale) w int(a.w() * x_scale) h int(a.h() * y_scale) for i in range(4): img.draw_line(corners[i][0], corners[i][1], corners[(i + 1) % 4][0], corners[(i + 1) % 4][1], image.COLOR_RED) img.draw_string(x + w, y, \"id: \" + str(a.id()), image.COLOR_RED) img.draw_string(x + w, y + 15, \"family: \" + str(a.family()), image.COLOR_RED) disp.show(img) ``` 步骤: 1. 导入image、camera、display模块 ```python from maix import image, camera, display ``` 2. 初始化摄像头和显示 ```python cam camera.Camera() disp display.Display() ``` 3. 从摄像头获取图片并显示 ```python while 1: img cam.read() disp.show(img) ``` 4. 调用`find_apriltags`方法识别摄像头图片中的apriltag标签 ```python new_img img.resize(160, 120) apriltags new_img.find_apriltags(families families) ``` `img`是通过`cam.read()`读取到的摄像头图像 `img.resize(160, 120)`是用来将图像缩放得更小,用更小的图像来让算法计算得更快 `new_img.find_apriltags(families families)`用来寻找apriltag标签,并将查询结果保存到`apriltags`,以供后续处理。其中families用来选择apriltag族,默认为`image.ApriltagFamilies.TAG36H11` 5. 处理识别标签的结果并显示到屏幕上 ```python for a in apriltags: # 获取位置信息(并映射坐标到原图) x int(a.x() * x_scale) y int(a.y() * y_scale) w int(a.w() * x_scale) corners a.corners() for i in range(4): corners[i][0] int(corners[i][0] * x_scale) corners[i][1] int(corners[i][1] * y_scale) # 显示 for i in range(4): img.draw_line(corners[i][0], corners[i][1], corners[(i + 1) % 4][0], corners[(i + 1) % 4][1], image.COLOR_RED) img.draw_string(x + w, y, \"id: \" + str(a.id()), image.COLOR_RED) img.draw_string(x + w, y + 15, \"family: \" + str(a.family()), image.COLOR_RED) img.draw_string(x + w, y + 30, \"rotation : \" + str(180 * a.rotation() // 3.1415), image.COLOR_RED) ``` 遍历`apriltags`的成员,`apriltags`是通过`img.find_apriltags()`扫描apriltag标签的结果,如果找不到标签则`apriltags`的成员为空 `x_scale`和`y_scale`用来映射坐标,由于`new_img`是缩放后的图像,计算apriltag的坐标时需要经过映射后才能正常的画在原图`img`上 `a.corners()`用来获取已扫描到的标签的四个顶点坐标,`img.draw_line()`利用这四个顶点坐标画出标签的形状 `img.draw_string`用来显示标签的内容,其中`a.x()`和`a.y()`用来获取标签左上角坐标x和坐标y,`a.id()`用来获取标签的id,`a.family()`用来获取标签族类型,`a.rotation()`用来获取标签的旋转角度。 ### 常用参数说明 列举常用参数说明,如果没有找到可以实现应用的参数,则需要考虑是否使用其他算法实现,或者基于目前算法的结果扩展所需的功能 参数 说明 示例 roi 设置算法计算的矩形区域,roi [x, y, w, h],x,y表示矩形区域左上角坐标,w,h表示矩形区域的宽度和高度,默认为整张图片 计算坐标为(50,50),宽和高为100的区域
    ```img.find_apriltags(roi [50, 50, 100, 100])``` families apriltag标签家族类型 扫描TAG36H11家族的标签
    ```img.find_apriltags(families image.ApriltagFamilies.TAG36H11)``` 本文介绍常用方法,更多 API 请看 API 文档的 [image](../../../api/maix/image.html) 部分。"},"/maixpy/doc/zh/vision/maixhub_train.html":{"title":"MaixCAM MaixPy 使用 MaixHub 在线训练 AI 模型给 MaixPy 使用","content":" title: MaixCAM MaixPy 使用 MaixHub 在线训练 AI 模型给 MaixPy 使用 update: date: 2024 04 03 author: neucrack version: 1.0.0 content: 初版文档 ## 简介 MaixHub 提供了在线训练 AI 模型的功能,可以直接在浏览器中训练模型,不需要购买昂贵的机器,不需要搭建复杂的开发环境,也不需要写代码,非常适合入门,也适合懒得翻代码的老手。 ## 使用 MaixHub 训练模型的基本步骤 ### 确认要识别的数据类型和模型类型 要训练一个 AI 模型,需要先确定是什么数据和模型,目前 MaixHub(2024.4)提供了图像数据的`物体分类模型`和`物体检测模型`,都是图像识别模型, `物体分类模型` 比 `物体检测模型` 更简单,因为物体检测需要标注物体在图中的位置,会比较麻烦,物体分类则只需要给出图像中是什么,不需要坐标,所以更简单, 如果是初学者建议先从物体分类开始。 ### 采集数据 如前面的 AI 基础所说,要训练模型,必须准备训练用的数据集让 AI 学习,对于图像训练,我们需要创建一个数据集,并且上传图片到数据集。 保证设备已经连接网络(WiFi)。 打开设备上的 MaixHub 应用选择 采集数据 来拍照并一键上传到 MaixHub。需要先在 MaixHub 创建数据集,然后点击 设备 上传数据,会出现一个 二维码,设备扫描二维码来与MaixHub 建立连接。 注意要分清训练集和验证集的区别,要想实机运行的效果和训练效果相当,验证集的数据一定要和实机运行拍摄的图像质量一样,训练集也建议用设备拍摄的,如果要用网上的图片,一定只能用在训练集,不要用在验证集,因为数据量小,数据集与实机运行越接近越好。 ### 标注数据 对于分类模型,在上传的时候就顺便已经标注好了,即上传时选择好了图片属于那个分类。 对于目标检测模型,上传完成后需要进行手动标注,即在每一张图中框出要被识别物体的坐标大小和分类。 这个标注过程你也可以选择自己在自己的电脑中离线用比如 labelimg 这样的软件标注完毕后使用数据集中的导入功能导入到 MaixHub。 标注时善用快捷键标注起来会更快,后面MaixHub 也会增加更多辅助标注和自动标注工具(目前在上传视频处有自动标注工具也可以尝试使用)。 ### 训练模型 选择训练参数训练,选择对应的设备平台,选择 maixcam,等待排队训练,可以实时看到训练进度,等待完成即可。 ### 部署模型 训练完成后,可以设备的 MaixHub 应用中选择 部署 功能,扫码进行部署。 设备开会自动下载模型并且运行起来,模型会被存在本地,后面也能选择再次运行。 如果你觉得识别效果很不错,可以一键分享到模型库让更多人使用。 ## 使用方法 请到 [MaixHub](https://maixhub.com) 注册账号,然后登录,主页有视频教程,学习即可。 注意教程如果是使用了 M2dock 这个开发板,和 MaixCAM也是类似的,只是设备(板子)上使用的 MaixHub 应用可能稍微有点区别,大体上是相同的,请注意举一反三。"},"/maixpy/doc/zh/vision/face_recognition.html":{"title":"MaixCAM MaixPy 人脸识别","content":" title: MaixCAM MaixPy 人脸识别 ## 人脸识别简介 ![face_recognize](../../assets/face_recognize.jpg) 人脸识别就是识别当前画面中的人脸的位置以及是谁。 所以人脸识别除了要检测到人脸,一般会有一个库来保存认识的人和不认识的人。 ## 识别原理 * 使用 AI 模型检测人脸,获得坐标和五官的坐标。 * 利用五官的坐标仿射变换将图中的脸拉正对其到标准脸的样子,方便模型提取脸的特征。 * 使用特征提取模型提取脸的特征值。 * 与库中记录的人脸特征值进行对比(计算保存的和当前画面中的脸的特征值的余弦距离,得出最小的距离的库中的人脸,小于设定的阈值就认为当前画面中就是这个库中的人) ## MaixPy 使用 MaixPy maix.nn 模块中提供了人脸识别的 API, 可以直接使用,模型也内置了,也可以到 [MaixHub 模型库](https://maixhub.com/model/zoo) 下载(筛选选则对应的硬件平台,比如 maixcam)。 识别: ```python from maix import nn, camera, display, image import os import math recognizer nn.FaceRecognizer(detect_model \"/root/models/retinaface.mud\", feature_model \"/root/models/face_feature.mud\", dual_buff True) if os.path.exists(\"/root/faces.bin\"): recognizer.load_faces(\"/root/faces.bin\") cam camera.Camera(recognizer.input_width(), recognizer.input_height(), recognizer.input_format()) dis display.Display() while 1: img cam.read() faces recognizer.recognize(img, 0.5, 0.45, 0.8) for obj in faces: img.draw_rect(obj.x, obj.y, obj.w, obj.h, color image.COLOR_RED) radius math.ceil(obj.w / 10) img.draw_keypoints(obj.points, image.COLOR_RED, size radius if radius < 5 else 4) msg f'{recognizer.labels[obj.class_id]}: {obj.score:.2f}' img.draw_string(obj.x, obj.y, msg, color image.COLOR_RED) dis.show(img) ``` 第一次运行这个代码会发现能检测到人脸,但是都不认识,需要我们进入添加人脸模式学习人脸才行。 > 这里 `recognizer.labels[0]` 默认就是`unknown`,后面每添加一个人脸就会自动给 `labels` 增加一个。 比如可以在用户按下按键的时候学习人脸: ```python faces recognizer.recognize(img, 0.5, 0.45, True) for face in faces: print(face) # 这里考虑到了一个画面中有多个人脸的情况, obj.class_id 为 0 代表是没有录入的人脸 # 这里写你自己的逻辑 # 比如可以在这里根据 face 的 class_id 和坐标决定要不要添加到库里面,以及可以做用户交互逻辑,比如按下按钮才录入等 recognizer.add_face(face, label) # label 是要给人脸取的标签(名字) recognizer.save_faces(\"/root/faces.bin\") ``` ## 完整例程 这里提供一个按键录入未知人脸,以及人脸识别的例程,可以在[MaixPy 的 example 目录](https://github.com/sipeed/MaixPy/tree/main/examples) 找到`nn_face_recognize.py`。 ## dual_buff 双缓冲区加速 你可能注意到这里模型初始化使用了`dual_buff`(默认值就是 `True`),使能 `dual_buff` 参数可以加快运行效率,提高帧率,具体原理和使用注意点见 [dual_buff 介绍](./dual_buff.html)。 ## 更换其它默认识别模型 这里识别模型(区分不同人)用了 mobilenetv2 模型,如果不满足精度要求,可以更换成其它模型,比如[insight face resnet50](https://maixhub.com/model/zoo/462) 模型,当然你也可以自己训练或者找其它训练好的模型转换成 MaixCAM 支持的模型即可,转换方法看[MaixCAM 模型转换文档](../ai_model_converter/maixcam.html), mud 文件参考以有的文件写即可。"},"/maixpy/doc/zh/gui/i18n.html":{"title":"MaixPy MaixCAM i18n(国际化) 多语言实现","content":" title: MaixPy MaixCAM i18n(国际化) 多语言实现 ## i18n (国际化)简介 i18n 是国际化单词(internationalization)的简称,目的在与根据用户的地域或者喜好切换语言。 我们常用的 中文 和 英文 这个就是语言,语言有对应的地域编码( LCID),比如中文的地域编码为`zh`,英文为`en`,日文为`ja`,另外还有二级地域编码,比如简体中文对应`zh cn`,一般我们实现`zh`即可。 地域编号可以参考[Windows的地域编码表](https://www.science.co.il/language/Locale codes.php) 或者看 [wikipedia](https://en.wikipedia.org/wiki/Language_localisation)。 ## MaixPy MaixCAM 中使用 i18n 用户使用大致流程如下: * 首先用户使用时,在系统设置中可以选择系统语言,比如出厂默认是`en`即英文。 * 然后程序通过`maix.i18n.get_locale()`可以获得当前系统设置的地域。 * 程序根据系统设置的地域显示对应语言的字符串。 对于应用程序来说,比较麻烦的地方就在这里的第三步,即根据地域设置查表获取对应的字符串,下面提供两种方法,根据自己的需求选择: ### 不使用翻译文件,直接使用字典 如果你的程序只有几个字符串,可以直接手动指定翻译字典: ```python from maix import i18n trans_dict { \"zh\": { \"hello\": \"你好\" }, \"en\": { } } trans i18n.Trans(trans_dict) tr trans.tr trans.set_locale(\"zh\") print(tr(\"hello\")) print(tr(\"my friend\")) ``` 这里用`trans.set_locale(\"zh\")`临时设置语言为中文了, 运行就会打印`你好` 和 `my friend` 了, 因为没给`my friend`填加翻译,所以原封不动地返回了。 ### 自动扫描生成字典,并且从翻译文件加载 这种方法比较适合有大量需要翻译的字符串的场景。 前面的方法我们手动指定了字符串翻译,在简单场景很方便,但是如果字符串太多了,手动改字典很容易漏掉,所以我们需要程序自动帮我们找到需要翻译的字符串并生成翻译文件,我们只需要翻译一下文件就好了。 在 MaixPy 中,提供了`maix.i18n.Trans` 这个类,可以用来加载多种语言的翻译文件,调用其成员函数`tr()`,传入想要翻译的文字即可获得翻译,举例: ```python from maix import i18n, err trans i18n.Trans() tr trans.tr e trans.load(\"locales\") err.check_raise(e, \"load translation yamls failed\") print(tr(\"hello\")) ``` 这里从当前目录下的`locales`文件夹加载了翻译文件,然后根据系统的语言设置打印`hello`,比如中文就会打印`你好`。 **翻译文件**: 既然这里加载用到了翻译文件,这些翻译文件怎么制作呢? 首先我们需要知道我们需要翻译那些文字,显而易见,就是上面我们用函数`tr`调用的字符串,所以我们只需要搜索源码中所有用到了`tr`函数的字符串即可认为是我们需要翻译的所有字符串了。 所以使用流程如下: * 建立一个项目文件夹,里面存放代码入口`main.py`,可以使用 `MaixVision`` 打开这个项目文件夹方便运行。 * 编写`main.py`,让需要翻译的字符串都用上述的`tr`函数调用。 * MaixPy 提供了一个扫描工具,首先确保安装了`maixtool`(电脑通过系统终端 `pip install maixtool U` 命令来安装升级)。 * 然后在目录下仍然使用电脑终端执行`maixtool i18n d . r`来扫描需要翻译的字符串,并且生成一个`locales`目录,里面包含了中英文两种语言的翻译文件,如果要更多语言,执行`maixtool i18n h`查看帮助。 * 生成的文件是键值对组成的,比如`zh.yaml`中的`hello: hello` 的意思就是字符串`hello`中文翻译是`hello`,这显然不对,需要我们手动翻译一下,改成`hello: 你好`即可。注意编辑文件一定要用支持 `UTF 8` 编码的编辑器,特别是在`Windows`下不要将文件改为`GBK`编码了,不然会出错,可以用 MaixVision 或者 VsCode 编辑。 * 然后运行项目,或者打包项目为安装包都可以,记得把 `locales` 目录也一起打包进去。 * 如果后面又更新了源码,需要再次执行`maixtool`命令更新文件,更新会对之前已经翻译了的文件更新,如果你担心程序不小心将之前的错误覆盖,可以先自行备份一份,确认无误后再删除备份。 这样你的程序就会根据系统语言设置更改语言了,如果你调试程序也可以手动调用`trans.set_locale(\"zh\")`来手动临时切换语言。 ## 显示翻译到界面 前面的例子都是在调用`print`函数打印,如果想显示到界面上,还有一步要做,就是需要字库支持,对于英文来说默认都支持了,可是对于中文这种字库庞大的语言,默认是不支持的。 比如: ```python from maix import i18n, image, display, app, time trans_dict { \"zh\": { \"hello\": \"你好\" }, \"en\": { } } trans i18n.Trans(trans_dict) tr trans.tr trans.set_locale(\"zh\") disp display.Display() img image.Image(disp.width(), disp.height()) img.draw_string(10, 10, tr(\"hello\"), image.COLOR_WHITE, scale 2) disp.show(img) while not app.need_exit(): time.sleep_ms(100) ``` 运行会发现显示了一堆`?`,因为没有中文字库,对于`image`模块,可以加载字库,系统内置了一个中文字库,你也可以用你自己的字库: ```python from maix import i18n, image, display, app, time trans_dict { \"zh\": { \"hello\": \"你好\" }, \"en\": { } } trans i18n.Trans(trans_dict) tr trans.tr trans.set_locale(\"zh\") disp display.Display() image.load_font(\"sourcehansans\", \"/maixapp/share/font/SourceHanSansCN Regular.otf\", size 24) image.set_default_font(\"sourcehansans\") img image.Image(disp.width(), disp.height()) img.draw_string(10, 10, tr(\"hello\"), image.COLOR_WHITE, scale 2) disp.show(img) while not app.need_exit(): time.sleep_ms(100) ```"},"/maixpy/doc/zh/index.html":{"title":"MaixCAM MaixPy 快速开始","content":" title: MaixCAM MaixPy 快速开始
    资源汇总 链接 : : : : MaixPy 教程文档 📖 [wiki.sipeed.com/maixpy](https://wiki.sipeed.com/maixpy) MaixPy 例程和源码 [github.com/sipeed/MaixPy](https://github.com/sipeed/MaixPy) MaixCAM 硬件资料 📷 [wiki.sipeed.com/maixcam](https://wiki.sipeed.com/maixcam) / [wiki.sipeed.com/maixcam pro](https://wiki.sipeed.com/maixcam pro) MaixPy API 文档 📚 [wiki.sipeed.com/maixpy/api/](https://wiki.sipeed.com/maixpy/api/index.html) MaixPy 视频和教程 💿 [B站搜 MaixCAM 或 MaixPy](https://search.bilibili.com/all?keyword maixcam&from_source webtop_search&spm_id_from 333.1007&search_source 5) MaixHub 应用商店 📦 [maixhub.com/app](https://maixhub.com/app) MaixHub 分享广场 🎲 [maixhub.com/share](https://maixhub.com/share)

    > 关于 MaixPy 介绍请看 [MaixPy 官网首页](../../index.html) > 喜欢 MaixPy 请给 [ MaixPy 项目](https://github.com/sipeed/MaixPy) 点个 Star ⭐️ 以鼓励我们开发更多功能。 ## 写在前面 * 请**仔细**阅读按照下面文档的步骤,不要遗漏内容,对比进行操作。 * **左边目录**请仔细查看,基础部分一定要耐心阅读完。 * **提问前**先在左边目录仔细查找文档,以及看[FAQ](./faq.html)。 * 本文档是`MaixPy v4 教程文档`,注意与 [MaixPy v1](https://wiki.sipeed.com/soft/maixpy/zh/index.html)(k210系列)区别开,勿错看文档。 * 也可以参考下面的视频上手教程,注意视频内容有更正在**评论区和弹幕会补充,以最新的文档为准**,更多视频教程可以到 B 站搜索 MaixCAM。 ## 获得 MaixCAM 设备 ![maixcam_pro](../../static/image/maixcam_pro.png) * **MaixCAM 主体**,目前有几个版本,根据自己的需求买: * **MaixCAM Pro**(推荐): 在 [Sipeed 淘宝](https://item.taobao.com/item.htm?id 846226367137) 或者 [Sipeed 速卖通](https://www.aliexpress.com/store/911876460) 店铺购买 MaixCAM Pro。 * **MaixCAM**:在 [Sipeed 淘宝](https://item.taobao.com/item.htm?id 784724795837) 或者 [Sipeed 速卖通](https://www.aliexpress.com/store/911876460) 店铺购买 MaixCAM。 * **MaixCAM Lite**(不推荐): 无屏幕和外壳版本,价格更便宜,学习开发不建议购买,量产可以考虑购买。 * **TF 卡**: 系统安装在 TF 卡,没有 TF 无法启动。 * **摄像头**: 视觉相关应用需要摄像头,可以根据自己的使用场景和财力选择合适的摄像头型号。比如 OS01A10 成像质量比 GC4653 高。 * **触摸屏**: 方便交互,官方默认集成的应用都需要触摸屏交互,可以大大提升交互体验和开发难度。 * **电源**: 一个稳定的供电方式,MaixCAM 需要 `5v 500mA` 的稳定供电,如果供电不足可能会导致无法开机,或者运行过程中死机等情况。特别是有些电脑的 USB 口供电可能不稳定。 * **TF 读卡器**: 用来烧录系统,必备。 * **USB转串口模块**: 如果你想要电脑和 MaixCAM 之间串口通信,需要备一个,淘宝随便买一个就行,也可以直接在 Sipeed 店里一起买,比如这个[双串口转USB模块](https://item.taobao.com/item.htm?id 610365562537)。 >! 注意,目前只支持 MaixCAM 系列开发板,其它同型号芯片的开发板均不支持,包括 Sipeed 的同型号芯片开发板,请注意不要买错造成不必要的时间和金钱浪费。 ## 使用无屏幕版本 如果你使用的是无屏幕版本,请看[快速开始(无屏幕版本)](./README_no_screen.html)文档。 ## 上手配置 ### 准备 TF 镜像卡和插入到设备 如果你买的套餐里面有 TF 卡,里面已经有出厂镜像了,如果出厂时 TF 卡没有安装到设备,需要先小心打开外壳(注意里面有排线连接不要扯断了),然后插入 TF 卡。另外因为出厂的固件可能比较老旧,**务必**按照[升级和烧录系统](./basic/os.html)先将系统升级到最新版本,否则可能会遇到某些应用 和 API 无法使用的问题。 如果没买 TF 卡,则需要将系统烧录进自备的 TF 卡中,烧录方法请看[升级和烧录系统](./basic/os.html),然后再安装到板子。 ### 上电开机 使用 `Type C` 数据线连接 `MaixCAM` 设备给设备供电,等待设备开机,开机会进入功能选择界面。 ![maixcam_font](../../static/image/maixcam_font.png) 如果屏幕没有显示 * 请确认购买了配套的 TF 卡,如果确认有 TF 卡,并且已经插入到设备,可以**尝试[更新到最新的系统](./basic/os.html)**。 * 如果你没有购买 TF 卡套餐,你需要按照[升级和烧录系统](./basic/os.html)的方法烧录最新的系统到 TF 卡。 * 另外请确认屏幕和摄像头的排线没有松动,屏幕的排线在拆开外壳时很容易脱落,需要注意。 ### 联网 首次运行需要连接网络,后面会激活设备和使用 IDE 会用到。 如果没有路由器可以用手机开一个热点。 设备上点击 `设置`(`Settings`),选择`WiFi`,有两种方法连接 `WiFi` 热点: * 扫描 WiFi 分享码: * 使用手机分享`WiFi`热点二维码,或者到[maixhub.com/wifi](https://maixhub.com/wifi) 生成一个二维码。 * 点击`扫描二维码`按钮,会出现摄像头的画面,扫描前面生成的二维码进行连接。 * 搜索热点: * 点击 `扫描` 按钮开始扫描周围 `WiFi`, 可以多次点击刷新列表。 * 找到你的 WiFi 热点。 * 输入密码点击`连接`按钮进行连接。 然后等待获取到 `IP` 地址,这可能需要 `10` 到 `30` 秒,如果界面没有刷新可以退出`WiFi`功能重新进入查看,或者在`设置` > `设备信息` 中也可以看到 `IP` 信息。 ### 升级运行库 **这一步很重要 !!!** 这一步如果不做好,其它应用和功能可能无法运行(比如闪退等)。 * 首先保证上一步连接 WiFi 已经完成,并且获取到 IP 地址能访问公网。 * 设备上点击 `设置`(`Settings`),选择`安装运行库`。 * 安装完成后可以看到更新到了最新版本,然后退出即可。 如果显示`Request failed` 或者`请求失败`,请先检查网络是否已经连接,需要能连接到互联网,如果还不行,请拍照联系客服处理即可。 ### 使用内置应用 内置了很多应用,比如 找色块,AI 检测器,巡线等等,自学习检测举例: 其它的请自行摸索,以后还会更新更多应用,使用文档以及应用更新请看 [MaixHub 应用商店](https://maixhub.com/app) 。 **注意:应用只包含了 MaixPy 能实现的一部分功能,使用 MaixPy 能创造更多功能**。 ## 作为串口模块使用 > 如果是想把设备当成主控使用(或者你还不懂什么是串口模块)可以跳过这一步。 内置的各种应用可以直接当成串口模块使用,比如`找色块`、`找人脸`、`找二维码`等等, 注意这里串口仅能直接和其它单片机连接,**如果要和电脑串口通信请自备一个 USB 转串口模块**。 使用方法: * 硬件连接: 可以给设备接上`Type C一转二小板`(对于 MaixCAM Pro 是 6Pin 接口),这样我们就能将设备通过串口连接到你的主控上了,比如`Arduino`、`树莓派`、`STM32`等等。 * 打开你想用的应用,比如二维码识别,当设备扫描到二维码就会通过串口把结果发送给你的主控了。 > 发送的串口波特率是 `115200`,数据格式是 `8N1`,协议遵循 [Maix 串口通信协议标准](https://github.com/sipeed/MaixCDK/blob/master/docs/doc/convention/protocol.md),可以在[MaixHub APP](https://maixhub.com/app) 找到对应的应用介绍查看协议。 > 如果应用没有做串口输出结果,你也可以自己基于对应功能的例程,自行按照[串口使用文档](./peripheral/uart.html)添加串口输出结果。 ## 准备连接电脑和设备 为了后面电脑(PC)能和 设备(MaixCAM)通信,我们要让它们在同一个局域网内,提供了两种方式: * **方法一 (强烈推荐)**:无线连接, 设备使用 WiFi 连接到电脑连接的同一个路由器或者 WiFi 热点下: 在设备的`设置 > WiFi 设置`中连接到你的 WiFi 即可。(WiFi 如果出现**画面卡顿或者延迟**的问题可以尝试下面的方法二使用有线连接。) * **方法二**:有线连接, 设备通过 USB 线连接到电脑,设备会虚拟成一个 USB 网卡,这样和电脑就通过 USB 在同一局域网了。推荐先用 WiFi 开始是因为有线虽然传输稳定但是可能会遇到线缆不良,接触不良,驱动等问题,遇到问题也可以在 [FAQ](./faq.html) 中找常见问题。 .. details::方法二在不同电脑系统中驱动安装方法: :open: true 默认会有两种 USB 虚拟网卡驱动(NCM 和 RNDIS驱动),以满足不同系统的需求,你也可以在设备端`设置`应用 > `USB设置` 里面关掉不用的虚拟网卡: * **Windows**: windows 所有系统会自动安装 RNDIS 驱动, 仅 Win11 会自动安装 NCM 驱动,两种驱动**有一个能用就行**。 * 打开任务管理器 > 性能,可以看到一个虚拟的以太网,并且可以看到 ip 比如 `10.131.167.100` 是电脑的 ip, 设备的 ip 是最后一位改为`1` 即 `10.131.167.1`。如果是 Win11 则会看到两个虚拟网卡,随便选择一个 IP 使用即可。 * 另外也可以打开电脑的 `设备管理器`(搜索栏搜索`设备管理器`), RNDIS 和 NCM 驱动被正确安装的效果: ![RNDIS ok](../../static/image/windows_rndis_ok.png) ![NCM ok](../../static/image/windows_ncm_ok.png) * **Linux**: 无需额外设置,插上 USB 线即可。 使用 `ifconfig` 或者 `ip addr` 查看到 `usb0` 和 `usb1` 网卡,两个 IP 都可以使用,**注意** 这里看到的 ip 比如 `10.131.167.100` 是电脑的 ip, 设备的 ip 是最后一位改为`1` 即 `10.131.167.1`。 * **MacOS**: 在`系统设置` >`网络`里面查看到 `usb` 网卡,**注意** 这里看到的 ip 比如 `10.131.167.100` 是电脑的 ip, 设备的 ip 是最后一位改为`1` 即 `10.131.167.1`。 ## 开发环境准备 * 首先保证上一步电脑和设备已经在同一个局域网中了。 * 下载 [MaixVision](https://wiki.sipeed.com/maixvision) 并安装。 * 使用 Type C 连接设备和电脑,打开 MaixVision,点击左下角的`“连接”`按钮,会自动搜索设备,稍等一下就能看到设备,点击设备有点的连接按钮以连接设备。 如果**没有扫描到设备**,你也可以在**设备**的 `设置 > 设备信息` 中查看设备的 IP 地址手动输入, 也可以在 [FAQ](./faq.html) 中找到解决方法。 **连接成功后,设备的功能选择界面会消失,屏幕会黑屏,释放了所有硬件资源,如果仍然有画面显示,可以断开连接重连。** 这里有 MaixVision 的使用示例视频: ## 运行例程 点击 MaixVision 左侧的`示例代码`,选择一个例程,点击左下角`运行`按钮将代码发送到设备上运行。 比如: * `hello_maix.py`,点击`运行`按钮,就能看到 MaixVision 终端有来自设备打印的消息,以及右上角出现了图像。 * `camera_display.py`,这个例程会打开摄像头并在屏幕上显示摄像头的画面。 ```python from maix import camera, display, app disp display.Display() # 构造一个显示对象,并初始化屏幕 cam camera.Camera(640, 480) # 构造一个摄像头对象,手动设置了分辨率为 640x480, 并初始化摄像头 while not app.need_exit(): # 一直循环,直到程序退出(可以通过按下设备的功能按键退出或者 MaixVision 点击停止按钮退出) img cam.read() # 读取摄像头画面保存到 img 变量,可以通过 print(img) 来打印 img 的详情 disp.show(img) # 将 img 显示到屏幕上 ``` * `yolov5.py` 会检测摄像头画面中的物体框出来并显示到屏幕上,支持 80 种物体的检测,具体请看[YOLOv5/YOLOv8/YOLO11 物体检测](./vision/yolov5.html)。 其它例程可以自行尝试。 > 如果你使用相机例程遇到了图像显示卡顿,可能是网络不通畅,或者 USB 线质量或者主机 USB 质量太差造成,可以更换连接方式或者更换线缆、主机 USB 口或者电脑等。 ## 安装应用到设备 上面是在设备中运行代码,`MaixVision` 断开后代码就会停止运行,如果想让代码出现在开机菜单中,可以打包成应用安装到设备上。 点击 `MaixVision` 左下侧的安装应用按钮,填写应用信息,会将应用安装到设备上,然后在设备上就能看到应用了。 也可以选择打包应用,将你的应用分享到[MaixHub 应用商店](https://maixhub.com/app)。 > 默认例程没有显式编写退出功能,进入应用后按下设备的功能按键即可退出应用。(对于 MaixCAM 是 user 键) 如果想让程序开机自启动,可以在 `设置 > 开机启动` 中设置。 更多 MaixVision 使用请看 [MaixVision 文档](./basic/maixvision.html)。 ## 下一步 看到这里,如果你觉得不错,**请务必来 [github](https://github.com/sipeed/MaixPy) 给 MaixPy 开源项目点一个 star(需要先登录 github), 你的 star 和认同是我们不断维护和添加新功能的动力!** 到这里你已经体验了一遍使用和开发流程了,接下来可以学习 `MaixPy` 语法和功能相关的内容,请按照左边的目录进行学习,如果遇到 `API` 使用问题,可以在[API 文档](/api/)中查找。 学习前最好带着自己学习的目的学,比如做一个有趣的小项目,这样学习效果会更好,项目和经验都可以分享到[MaixHub 分享广场](https://maixhub.com/share),会获得现金奖励哦! ## 常见问题 FAQ 遇到问题可以优先在 [FAQ](./faq.html) 里面找,找不到再在下面的论坛或者群询问,或者在 [MaixPy issue](https://github.com/sipeed/MaixPy/issues) 提交源码问题。 ## 分享交流 * **[MaixHub 项目和经验分享](https://maixhub.com/share)** :分享你的项目和经验,获得现金打赏,获得官方打赏的基本要求: * **可复现型**:较为完整的项目制作复现过程。 * **炫耀型**:无详细的项目复现过程,但是项目展示效果吸引人。 * Bug 解决经验型:解决了某个难题的过程和具体解决方法分享。 * [MaixPy 官方论坛](https://maixhub.com/discussion/maixpy)(提问和交流) * QQ 群: (建议在 QQ 群提问前先发个帖,方便群友快速了解你需要了什么问题,复现过程是怎样的) * MaixPy (v4) AI 视觉交流大群: 862340358 * Telegram: [MaixPy](https://t.me/maixpy) * MaixPy 源码问题: [MaixPy issue](https://github.com/sipeed/MaixPy/issues) * 商业合作或批量购买请联系 support@sipeed.com 。"},"/maixpy/doc/zh/network/socket.html":{"title":"MaixPy MaixCAM 使用 socket 进行 TCP/UDP 通信","content":" title: MaixPy MaixCAM 使用 socket 进行 TCP/UDP 通信 ## socket 简介 socket 就是 TCP/UDP 通信在软件上的封装,通过 socket 接口,我们可以进行 TCP/UDP 通信。 MaixPy 由于基于 Python,我们可以直接使用内置的`socket`库进行通信,更多文档和使用教程可以自行搜索学习。 这里介绍简单的使用方法,通过这些示例代码,你可以在 MaixPy MaixCAM 上进行基本的 TCP 和 UDP 通信。 记得根据实际情况修改 IP 地址和端口号。 ## socket TCP 客户端 这里请求 TCP 服务器,发送了一句消息并等待回应,然后关闭连接。 ```python import socket def tcp_client(ip, port): client_socket socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_address (ip, port) client_socket.connect(server_address) try: # 发送数据到服务器 message 'Hello, Server!' print(\"send:\", message) client_socket.sendall(message.encode('utf 8')) # 接收服务器的响应 data client_socket.recv(1024) print('Received:', data.decode('utf 8')) finally: # 关闭连接 client_socket.close() if __name__ \"__main__\": tcp_client(\"10.228.104.1\", 8080) ``` ## socket TCP 服务端 这里创建一个 socket 服务器,并且不停等待客户端连接,客户端连接后创建一个线程用以和客户端通信,读取客户端的信息并原样发送回去。 ```python import socket import threading local_ip \"0.0.0.0\" local_port 8080 def receiveThread(conn, addr): while True: print('read...') client_data conn.recv(1024) if not client_data: break print(client_data) conn.sendall(client_data) print(f\"client {addr} disconnected\") ip_port (local_ip,local_port) sk socket.socket(socket.AF_INET, socket.SOCK_STREAM) sk.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1) sk.bind(ip_port) sk.listen(50) print(\"accept now,wait for client\") while True: conn, addr sk.accept() print(f\"client {addr} connected\") # create new thread to communicate for this client t threading.Thread(target receiveThread,args (conn, addr)) t.daemon True t.start() ``` ## socket UDP 客户端 ```python import socket def udp_send(ip, port): # 创建 socket 对象 udp_socket socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # 定义服务器的 IP 地址和端口号 server_address (ip, port) try: # 发送数据到服务器 message 'Hello, Server!' udp_socket.sendto(message.encode('utf 8'), server_address) finally: # 关闭连接 udp_socket.close() # 调用函数 udp_send(\"10.228.104.1\", 8080) ``` ## socket UDP 服务器 ```python import socket def udp_receive(ip, port): # 创建 socket 对象 udp_socket socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # 定义服务器的 IP 地址和端口号 server_address (ip, port) # 绑定端口 udp_socket.bind(server_address) print('Waiting for a message...') while True: data, address udp_socket.recvfrom(1024) print('Received:', data.decode('utf 8')) print('From:', address) # 关闭连接 udp_socket.close() # 调用函数 udp_receive('0.0.0.0', 8080) ```"},"/maixpy/doc/zh/network/websocket.html":{"title":"MaixPy MaixCAM 使用 websocket","content":" title: MaixPy MaixCAM 使用 websocket ## 简介 类似 socket,使用 websocket 可以实现长链接通信,同时还支持和 web 页面通信。 因为 MaixPy 基于 Python,所以使用 Python 通用的 `websockets` 和 `asyncio` 模块即可,更多内容可以自行搜索学习。 ## websocket 客户端 连接服务器发送 10 次消息就结束: ```python import asyncio import websockets import time async def send_msg(websocket): count 1 while count < 10: msg f\"hello {count}\" await websocket.send(msg) recv_text await websocket.recv() print(f\"receive: {recv_text}\", end \"\\n\") count + 1 time.sleep(1) await websocket.close(reason \"client exit\") async def main_logic(ip, port): async with websockets.connect(f'ws://{ip}:{port}') as websocket: await send_msg(websocket) ip \"10.228.104.100\" port 5678 asyncio.get_event_loop().run_until_complete(main_logic(ip, port)) ``` ## websocket 服务端 接受客户端的连接并且客户端发送过来消息后,返回`ack for msg:` + 发送过来的消息。 ```python import asyncio import websockets import functools async def recv_msg(websocket): print(\"new client connected, recv_msg start\") while True: try: recv_text await websocket.recv() except Exception as e: print(\"receive failed\") break print(\"received:\", recv_text) response_text f\"ack for msg: {recv_text}\" await websocket.send(response_text) print(\"recv_msg end\") async def main_logic(websocket, path, other_param): await recv_msg(websocket) ip \"0.0.0.0\" port 5678 start_server websockets.serve(functools.partial(main_logic, other_param \"test_value\"), ip, port) print(\"start server\") asyncio.get_event_loop().run_until_complete(start_server) print(\"start server loop\") asyncio.get_event_loop().run_forever() ```"},"/maixpy/doc/zh/network/flask.html":{"title":"MaixPy MaixCAM 使用 Flask 建立 HTTP 网页服务器","content":" title: MaixPy MaixCAM 使用 Flask 建立 HTTP 网页服务器 ## 简介 MaixPy 基于 Python, 所以你可以使用 Python 库 Flask,通过它可以快速实现一个 Web 网页服务器,因为是 Python 通用的,具体的用处和使用方法可以自行搜索,这里不过多阐述。 如果你只是想做一个显示摄像头图像的页面,也可以参考[JPEG 串流](../video/jpeg_streaming.html) 中的 HTTP 图像服务器的方法。 ## 简单的 HTTP 服务例程 运行下面的程序后,电脑浏览器访问 `http://设备ip:8000` 就会显示 `hello world` 字符和一张图片了。 ```python from flask import Flask, request, send_file import maix # we not use it but we import it to listening key event to exit this program app Flask(__name__) @app.route(\"/\", methods [\"GET\", \"POST\"]) def root(): print(\" \") print(request.remote_addr) print(f'headers:\\n{request.headers}') print(f'data: {request.data}') print(\" \") return 'hello world
    ' @app.route(\"/\") def hello(path): print(path) print(f'headers:\\n{request.headers}') print(f'data: {request.data}') print(\" \\n\\n\") return f\"hello from {path}\" @app.route(\"/img\") def img(): return send_file(\"/maixapp/share/icon/detector.png\") if __name__ \"__main__\": app.run(host \"0.0.0.0\", port 8000) ```"},"/maixpy/doc/zh/network/network_settings.html":{"title":"MaixPy MaixCAM 网络设置 WiFi 设置","content":" title: MaixPy MaixCAM 网络设置 WiFi 设置 ## 简介 要让 MaixCAM 能够使用网络,首先需要使用 WiFi 连接到网络。 MaixCAM 提供了几种方法连接 WiFi 热点。 ## 使用内置设置应用连接 开及后进入`设置`应用,选择`WiFi`功能,可以通过手机分享`WiFi 二维码`或者再[maixhub.com/wifi](https://maixhub.com/wifi) 生成二维码,然后扫码连接。 也可以手动扫描`WiFi`热点,然后输入密码进行连接。 连接成功等待 DHCP 获得 IP 后界面会显示 IP。 ## 通过 MaixPy 连接 ```python from maix import network, err w network.wifi.Wifi() print(\"ip:\", w.get_ip()) SSID \"Sipeed_Guest\" PASSWORD \"qwert123\" print(\"connect to\", SSID) e w.connect(SSID, PASSWORD, wait True, timeout 60) err.check_raise(e, \"connect wifi failed\") print(\"ip:\", w.get_ip()) ``` ## DNS 服务器设置 实际使用时发现有些用户的路由器 DNS 解析可能解析不到某些域名,所以默认系统中在`/boot/resolv.conf`文件设置了 DNS 服务器 ```shell nameserver 114.114.114.114 # China nameserver 223.5.5.5 # aliyun China nameserver 8.8.4.4 # google nameserver 8.8.8.8 # google nameserver 223.6.6.6 # aliyun China ``` 一般不需要修改,如果你的 DNS 解析遇到了问题可以修改这个文件。 实际系统用的配置文件路径是`/etc/resolv.conf`, 这个文件在开机时会被自动拷贝到`/etc/resolv.conf`,所以修改后直接重启最简单。 不想重启的话需要同时修改这两个文件。"},"/maixpy/doc/zh/network/mqtt.html":{"title":"MaixPy MaixCAM 使用 MQTT 订阅发布消息","content":" title: MaixPy MaixCAM 使用 MQTT 订阅发布消息 ## MQTT 简介 使用 MQTT 可以快速简单地使用 订阅 发布 模型来进行实时通信。 系统组成: * MQTT 服务器(broker),负责转发消息。 * MQTT 客户端,从服务器订阅主题,并且接收消息,以及像服务器特定的主题发布消息。 通信过程: * 客户端连接 MQTT 服务器。 * 客户端订阅自己感兴趣的主题,比如`topic1`。 * 有其它客户端或者服务器发布`topic1`这个主题的信息时,会被实时推送到客户端。 * 客户端也可以主动向特定的主题推送消息,其它订阅了这个主题的客户端都会收到,比如向自己订阅了的`topic1`推送消息自己也会收到。 ## MaixPy MaixCAM 中使用 MQTT 使用 `paho mqtt` 这个模块即可,具体用法可以自行搜索`paho mqtt`的用法,也可以参考[MaixPy/examples](https://github.com/sipeed/MaixPy/tree/main/examples/network)中的例程。 如果你使用了早期的系统,可能需要手动安装一下`paho mqtt`这个包,安装方法见[添加额外的 Python 软件包](../basic/python_pkgs.html)。"},"/maixpy/doc/zh/network/http.html":{"title":"MaixPy MaixCAM 使用 http 网络通信","content":" title: MaixPy MaixCAM 使用 http 网络通信 ## 简介 HTTP 是一个应用层网络协议,底层基于 TCP,通过它我们可以向网络服务器发送和接受信息,比如从网页服务器获取网页内容等。 更多介绍可以自行搜索 HTTP。 ## 在 MaixPy 使用 HTTP 请求 因为 MaixPy 基于 Python, 所以直接使用自带的 `requests` 库即可,`requests` 库是一个非常健全易用的库,这里就不进行过多的介绍,请自行搜索相关文档和教程使用。 这里举个例子,获取`https://example.com` 的首页内容。 ```python import requests url 'https://example.com' response requests.get(url) print(\"Response:\") print(\" status code:\", response.status_code) print(\"\") print(\" headers:\", response.headers) print(\"\") print(\" content:\", response.content) print(\"\") print(\" text:\", response.text) print(\"\") ```"},"/maixpy/doc/zh/video/record.html":{"title":"MaixCAM MaixPy 录像","content":" title: MaixCAM MaixPy 录像 update: date: 2024 05 20 author: lxowalle version: 1.0.0 content: 初版文档 ## 简介 本文档提供录像功能的使用方法 ## 示例一 一个录入`h265`格式视频的示例 ```python from maix import video, image, camera, app, time cam camera.Camera(640, 480, image.Format.FMT_YVU420SP) e video.Encoder() f open('/root/output.h265', 'wb') record_ms 2000 start_ms time.ticks_ms() while not app.need_exit(): img cam.read() frame e.encode(img) print(frame.size()) f.write(frame.to_bytes()) if time.ticks_ms() start_ms > record_ms: app.set_exit_flag(True) ``` 步骤: 1. 导入模块并初始化摄像头 ```python from maix import video, image, camera, app, time cam camera.Camera(640, 480, image.Format.FMT_YVU420SP) ``` `camera.Camera()`用来初始化摄像头, 这里初始化摄像头分辨率为`640x480`,注意目前`Encoder`只支持`NV21`格式,因此设置图像格式为`image.Format.FMT_YVU420SP`。 2. 初始化`Encoder`模块 ```python e video.Encoder() ``` `video.Encoder()`模块目前只支持处理`image.Format.FMT_YVU420SP`格式图像,支持`h265`和`h264`编码, 默认为`h265`编码。如果你想使用`h264`编码,则可以修改初始化参数为` video.Encoder(type video.VideoType.VIDEO_H264_CBR)` 注意,同时只能存在一个编码器 3. 编码摄像头的图像 ```python img cam.read() frame e.encode(img) ``` `img cam.read()`读取摄像头图像并保存到`img` `frame e.encode(img)`对`img`编码并保存结果到`frame` 4. 保存编码结果到文件 ```python f open('/root/output.h265', 'wb') f.write(frame.to_bytes(False)) ``` `f open(xxx)`打开并创建一个文件 `f.write(frame.to_bytes(False))`将编码结果`frame`转换为`bytes`类型,然后调用`f.write()`将数据写入文件中 5. 定时2s退出 ```python record_ms 2000 start_ms time.ticks_ms() while not app.need_exit(): if time.ticks_ms() start_ms > record_ms: app.set_exit_flag(True) ``` 这里是定时退出的应用逻辑,自己看看吧 6. 完成 ## 示例二 一个录入`h265`格式视频的示例 ```python from maix import video, time, image, camera, app cam camera.Camera(640, 480, image.Format.FMT_YVU420SP) e video.Encoder(capture True) e.bind_camera(cam) f open('/root/output.h265', 'wb') record_ms 2000 start_ms time.ticks_ms() while not app.need_exit(): frame e.encode() img e.capture() print(frame.size()) f.write(frame.to_bytes(True)) if time.ticks_ms() start_ms > record_ms: app.set_exit_flag(True) ``` 与示例一类似,区别在于调用了`Encoder`对象的`bind_camera`方法,`Encoder`主动取图,这样的优点是可以充分利用硬件特性,增加编码速率 ``` e video.Encoder(capture True) e.bind_camera(cam) frame e.encode() img e.capture() ``` `e video.Encoder(capture True)`使能了`capture`参数,让编码时可以抓取编码的图像 `e.bind_camera(cam)`将摄像头绑定到`Encoder`对象 `frame e.encode()`编码时不需要再传入`img`,而是内部从摄像头取图 `img e.capture()`从`Encoder`对象中抓取编码的图像 ## 转换为MP4格式 如果想要录制`mp4`格式视频,可以先录制好`H265`视频,再使用系统内的`ffmpeg`工具转换为`mp4`格式 ```python import os # Pack h265 to mp4 # /root/output.h265 是h265文件路径 # /root/output.mp4 是mp4文件路径 os.system('ffmpeg loglevel quiet i /root/output.h265 c:v copy c:a copy /root/output.mp4 y') ```"},"/maixpy/doc/zh/video/rtsp_streaming.html":{"title":"MaixCAM MaixPy 视频流 RTSP 推流","content":" title: MaixCAM MaixPy 视频流 RTSP 推流 update: date: 2024 05 20 author: lxowalle version: 1.0.0 content: 初版文档 ## 简介 本文档提供通过RTSP推流摄像头画面的方法 ## 使用方法 ```python from maix import time, rtsp, camera, image cam camera.Camera(2560, 1440, image.Format.FMT_YVU420SP) server rtsp.Rtsp() server.bind_camera(cam) server.start() print(server.get_url()) while True: time.sleep(1) ``` 步骤: 1. 导入time、rtsp、camera和image模块 ```python from maix import time, rtsp, camera, image ``` 2. 初始化摄像头 ```python cam camera.Camera(2560, 1440, image.Format.FMT_YVU420SP) # 初始化摄像头,输出分辨率2560x1440 NV21格式 ``` 注意RTSP模块目前只支持NV21格式, 因此摄像头需要配置为NV21格式输出 3. 初始化并启动Rtsp对象 ```python server rtsp.Rtsp() server.bind_camera(cam) server.start() ``` `server rtsp.Rtsp()`用来创建一个`Rtsp`对象 `server.bind_camera(cam)`用来绑定一个`Camera`对象, 绑定后原`Camera`对象将不能再使用 `server.start()`用来启动`rtsp`推流 4. 打印当前RTSP流的URL ```python print(server.get_url()) ``` `server.get_url()`用来获取`RTSP`的`播放地址`。 6. 完成,运行上须代码后, 你可以通过[VLC](https://www.videolan.org/vlc/)软件播放视频流, 已测试的`VLC`版本是`3.0.20`. 默认播放地址为`rtsp://设备的ip:8554/live` ## OSD 通过OSD来实现画线与画框 TODO"},"/maixpy/doc/zh/video/play.html":{"title":"MaixPy 播放视频","content":" title: MaixPy 播放视频 update: date: 2024 08 19 author: lxowalle version: 1.0.0 content: 初版文档 ## 简介 本文档提供播放视频功能的使用方法。 `MaixPy`支持播放`h264`、`mp4`、`flv`格式的视频,需要注意目前只支持`avc`编码的`mp4`和`flv`文件 ## 播放`MP4`视频 一个播放`mp4`视频的示例,视频文件路径为`/root/output.mp4` ```python from maix import video, display, app disp display.Display() d video.Decoder('/root/output.mp4') print(f'resolution: {d.width()}x{d.height()} bitrate: {d.bitrate()} fps: {d.fps()}') d.seek(0) while not app.need_exit(): ctx d.decode_video() if not ctx: d.seek(0) continue img ctx.image() disp.show(img) print(f'need wait : {ctx.duration_us()} us') ``` 步骤: 1. 导入模块并初始化摄像头 ```python from maix import video, display, app disp display.Display() ``` `disp display.Display()`用来初始化显示屏,用于显示解码的图像 2. 初始化`Decoder`模块 ```python d video.Decoder('/root/output.mp4') ``` `d video.Decoder('/root/output.mp4')`用来初始化解码器,并设置需要播放的视频文件路径。如果你需要播放`flv`文件,则可以填写`flv`为后缀的文件路径,例如`{your_file_path}.flv`,如果你需要播放`h264`文件,则可以填写`h264`为后缀的文件路径,例如`{your_file_path}.h264` 3. 设置解码的位置 ```python d.seek(0) ``` 可以用来设置播放视频的位置,单位是秒 4. 获取解码后的图像 ```python ctx d.decode_video() img ctx.image() ``` 每次调用都会返回一帧图像的上下文`ctx`,通过`ctx.image()`获取`img`。目前解码后只能支持输出`NV21`格式的图像 5. 显示解码后的图像 ```python disp.show(img) ``` 显示图像时使用`ctx.duration_us()`可以获取每帧图像的时长,单位是微秒 6. 完成,更多`Decoder`的用法请看[API文档](https://wiki.sipeed.com/maixpy/api/maix/video.html)"},"/maixpy/doc/zh/video/rtmp_streaming.html":{"title":"MaixCAM MaixPy 视频流 RTMP 推流","content":" title: MaixCAM MaixPy 视频流 RTMP 推流 update: date: 2024 05 20 author: lxowalle version: 1.0.0 content: 初版文档 ## 简介 本文档提供通过RTMP推送H264视频流的方法 ## 使用方法 ```python from maix import camera, time, rtmp, image cam camera.Camera(640, 480, image.Format.FMT_YVU420SP) # rtmp://192.168.0.30:1935/live/stream host '192.168.0.30' port 1935 app 'live' stream 'stream' bitrate 1000_000 r rtmp.Rtmp(host, port, app, stream, bitrate) r.bind_camera(cam) r.start() while True: time.sleep(1) ``` 步骤: 1. 导入camera, time, rtmp和image模块 ```python from maix import camera, time, rtmp, image ``` 2. 初始化摄像头 ```python cam camera.Camera(640, 480, image.Format.FMT_YVU420SP) # 初始化摄像头,输出分辨率640x480 NV21格式 ``` 注意RTMP模块目前只支持NV21格式, 因此摄像头需要配置为NV21格式输出 3. 初始化并启动Rtmp对象 ```python r rtmp.Rtmp(host, port, app, stream, bitrate) r.bind_camera(cam) r.start() ``` `r rtmp.Rtmp(host, port, app, stream, bitrate)`用来创建一个`Rtmp`对象,其中`host`指rtmp服务器的ip地址或者域名,`app`指rtmp服务器开放的应用名,`stream`指rtmp流的名称,也可以作为本次推流的密钥 `r.bind_camera(cam)`用来绑定一个`Camera`对象, 绑定后原`Camera`对象将不能再使用 `r.start()`用来启动`rtmp`推流 4. 完成 ## 向Bilibili推流测试 ### 启动bilibili直播 1. 点击直播 ![](../../../static/image/bilibili_click_live.png) 2. 点击开播设置 ![](../../../static/image/bilibili_click_live_setting.png) 3. 通过`我的直播间链接`找到直播地址 ![](../../../static/image/bilibili_check_live_link.png) 4. 往下翻,选择一个`分类`,再点击开始直播 ![](../../../static/image/bilibili_live_start.png) 5. 执行步骤4后,可以看到 ![](../../../static/image/bilibili_check_rtmp_url.png) 直播服务器的地址为:`rtmp://live push.bilivideo.com/live bvc` 串流密钥为:`?streamname live_xxxx&key 1fbfxxxxxxxxxxxxxffe0&schedule rtmp&pflag 1` 组合起来的`rtmp`推流地址就是:`rtmp://live push.bilivideo.com/live bvc/?streamname live_xxxx&key 1fbfxxxxxxxxxxxxxffe0&schedule rtmp&pflag 1` ### 运行RTMP客户端 ```python from maix import camera, time, rtmp, image cam camera.Camera(640, 480, image.Format.FMT_YVU420SP) # rtmp://live push.bilivideo.com/live bvc/?streamname live_xxxx&key 1fbfxxxxxxxxxxxxxffe0&schedule rtmp&pflag 1 host 'live push.bilivideo.com' port 1935 app 'live bvc' stream '?streamname live_xxxx&key 1fbfxxxxxxxxxxxxxffe0&schedule rtmp&pflag 1' bitrate 1000_000 r rtmp.Rtmp(host, port, app, stream, bitrate) r.bind_camera(cam) r.start() while True: time.sleep(1) ``` 上面拿到bilibili的推流地址为`rtmp://live push.bilivideo.com/live bvc/?streamname live_xxxx&key 1fbfxxxxxxxxxxxxxffe0&schedule rtmp&pflag 1` 可以拆出 1. 服务器地址为`live push.bilivideo.com` 2. 端口号为`1935`,即没有端口号则默认为`1935` 3. 应用名为`live bvc` 4. 流名称为`?streamname live_xxxx&key 1fbfxxxxxxxxxxxxxffe0&schedule rtmp&pflag 1` 运行代码,就能在直播间看到`maixcam`的画面了,如果发现直播没有显示,可以尝试先关闭直播间,再重新打开直播间,再运行代码。 动手试试吧~"},"/maixpy/doc/zh/video/jpeg_streaming.html":{"title":"MaixCAM MaixPy 视频流 JPEG 推流 / 发送图片到服务器","content":" title: MaixCAM MaixPy 视频流 JPEG 推流 / 发送图片到服务器 update: date: 2024 04 03 author: neucrack version: 1.0.0 content: 初版文档 date: 2024 05 20 author: lxowalle version: 1.0.1 content: 更新JPEG HTTP用法 ## 简介 有时需要将图像发送到服务器,或者将摄像头的视频推送到服务器,这里提供两种方法: 一个最简单的方法,即压缩成 `JPEG` 图片,然后一张一张地发送到服务器。注意,这是一种最简单的方法,不算很正规的视频推流方法,也不适合高分辨率高帧率的视频流,因为这只是一张一张发送图片,如果要高效推送视频流,请使用后文的 `RTSP` 或者 `RTMP` 模块。 建立一个HTTP服务器, 让PC端可以通过浏览器直接访问 ## 作为客户端推流的方法 ```python from maix import image import requests # create image img image.Image(640, 480, image.Format.FMT_RGB) # draw something img.draw_rect(60, 60, 80, 80, image.Color.from_rgb(255, 0, 0)) # convert to jpeg jpeg img.to_format(image.Format.FMT_JPEG) # image.Format.FMT_PNG # get jpeg bytes jpeg_bytes jpeg.to_bytes() # faster way, borrow memory from jpeg object, # but be carefully, when jpeg object is deleted, jpeg_bytes object MUST NOT be used, or program will crash # jpeg_bytes jpeg.to_bytes(copy False) # send image binary bytes to server url \"http://192.168.0.123:8080/upload\" res requests.post(url, data jpeg_bytes) print(res.status_code) print(res.text) ``` 可以看到,先将图片转换成了 `JPEG` 格式,然后将 `JPEG` 图片的二进制数据通过`TCP`发送到服务器。 ## 作为服务器推流的方法 ```python from maix import camera, time, app, http html \"\"\" JPG Stream

    MaixPy JPG Stream

    \"\"\" cam camera.Camera(320, 240) stream http.JpegStreamer() stream.set_html(html) stream.start() print(\"http://{}:{}\".format(stream.host(), stream.port())) while not app.need_exit(): t time.ticks_ms() img cam.read() jpg img.to_jpeg() stream.write(jpg) print(f\"time: {time.ticks_ms() t}ms, fps: {1000 / (time.ticks_ms() t)}\") ``` 步骤: 1. 导入image、camera和http模块 ```python from maix import image, camera, http ``` 2. 初始化摄像头 ```python cam camera.Camera(320, 240) # 初始化摄像头,输出分辨率320x240 RGB格式 ``` 3. 初始化Stream对象 ```python stream http.JpegStreamer() stream.start() ``` `http.JpegStreamer()`用来创建一个`JpegStreamer`对象,这个对象将会启动一个`http服务器`,用来向客户端发布`jpeg`图像流 `stream.start()`用来启动`http服务器` 4. 自定义html样式(可选) ```python html \"\"\" JPG Stream

    MaixPy JPG Stream

    \"\"\" stream.set_html(html) ``` `html xxx`是`html`代码,可以用来定制自己的网页风格。注意核心代码是``,一定不要漏了这行代码。 `stream.set_html(html)`用来设置自定义的`html`代码,这一步是可选的。默认浏览地址是`http://设备的ip:8000`。 5. 从摄像头获取图片并推流 ```python while 1: img cam.read() jpg img.to_jpeg() stream.write(jpg) ``` `img cam.read()`从摄像头获取一张图像,当初始化的方式为`cam camera.Camera(320, 240)`时,`img`对象是一张分辨率为320x240的RGB图。 `jpg img.to_jpeg()`将图像转换为`jpeg`格式 `stream.write(jpg)`向服务器写入图像格式,`http`服务器将会把这个图像发送到`http`客户端。 6. 完成,运行上须代码后, 你可以通过浏览器直接看到视频流, 默认地址为`http://设备的ip:8000`。打开你的浏览器看看吧!"},"/maixpy/doc/zh/faq.html":{"title":"MaixCAM MaixPy FAQ(常见问题)","content":" title: MaixCAM MaixPy FAQ(常见问题) >! 此页面列出了 MaixPy 相关的常见问题和解决方案,如果你遇到了问题,请先在这里找寻答案。 > 另外还有其它途径: > * [MaixHub 讨论版块](https://maixhub.com/discussion): 交流讨论,支持红包打赏。 > * [MaixPy issue](https://github.com/sipeed/MaixPy/issues?q ): 源码相关问题。 > * [MaixCAM 硬件 FAQ](https://wiki.sipeed.com/hardware/zh/maixcam/faq.html): MaixCAM 硬件常见问题。 ## MaixVision 无法搜索到设备? 先确认连接方式是 WiFi 还是 USB 线, **WiFi**: * 确认 WiFi 是否正确连接上并且获取到 IP 地址, 可以在 `设置 >设备信息` 或者`设置 >WiFi` 里面看到 `ip`。 **USB线**: * 确保设备通过 Type C 数据线连接到电脑,设备处于开机状态并且进入了功能选择界面。 * 确保设备驱动已经安装: * Windows 下可以在`设备管理器`中查看是否有 USB 虚拟网卡设备,如果有感叹号则是去动没有安装好,按照[快速开始](./index.html) 中的方法安装驱动即可。 * Linux 下可以通过`ifconfig`或者`ip addr`查看是否有`usb0`设备或者`lsusb`查看所有 USB 设备。 Linux 已经自带去动,所以识别不到检查硬件连接,设备系统是否是最新,以及设备是否已经正常启动即可。 * Mac OS 同 Linux 方法,或者在`系统设置` > `网络` 里面看有没有 usb 网卡。 * 另外 检查 USB 线缆的质量,换一个高质量的线缆。 * 另外 检查电脑 USB 口的质量,比如实测某些小主机 USB 口 EMI 设计太糟糕,外接一个质量好点的 USB HUB 反而可以使用了,也可以换 USB 口 或者直接换台电脑。 ## MaixVision 运行摄像头例程显示图像卡顿 默认配的 GC4653 摄像头最高帧率为 30 帧,摄像头例程正常情况下 MaixVision 的显示肉眼不会有卡顿感,如果卡顿,首先考虑传输问题: * 检查网络连接质量,比如 WiFi。 * 如果用的 USB 连接,检查 USB 线质量, 电脑 USB 口质量,可以尝试换台电脑或者 USB 口 或者 USB 线缆尝试对比。 ## 此产品适合量产吗 答案:适合。 * 软件上使用 Python 即可稳定运行,方便开发也可靠。 * 软件上另外支持和 MaixPy 相同 API 的 C++ SDK(MaixCDK),满足高效率和稳定要求。 * 硬件上提供各种形式的 PCB 和外壳,核心板和整板都有,芯片供货稳定,如果有量产需求可以联系 support@sipeed.com 咨询。 * 量大价更优。 ## MaixPy v4 和 v1 v3 有什么区别? * MaixPy v4 使用 Python 语言,是吸取了 v1 v3 经验的集大成之作,有更好的配套软件和生态,更多的功能,更简单的使用方式和更完善的文档;硬件有很大提升的同时加个和另外两者的硬件价格想当甚至更便宜;另外也做了兼容 K210 的使用体验和 API,方便用户从 v1 快速迁移到 v4。 * v1 使用了 Micropython 语言,有很多局限性,比如第三方库支持有限;同时受限于 Maix I (K210) 的硬件性能,内存不够用,AI 模型支持有限,很多编解码不支持硬件加速等缺点。 * v3 也是使用了 Python 语言,基于 Maix II Dock (v831) 硬件,硬件 AI 模型支持有限,而且全志的基础生态不够开放,API 也不够完善,此版本仅作为 Maix II Dock (v831)上面使用,不会继续更新。 ## MaixPy 目前只支持 MaixCAM 吗,用其它同款芯片的板子行不行? MaixPy 目前仅支持 MaixCAM 系列板子,其它同款芯片的板子也不支持(包括 Sipeed 的同款芯片板子 比如 LicheeRV Nano),强烈不建议尝试,导致设备损坏(比如冒烟烧屏等)后果自负。 未来 Sipeed 出的 Maix 系列的产品都将继续得到 MaixPy 支持,目前如果 MaixCAM 有什么无法满足的需求,可以到 [MaixHub 讨论版块](https://maixhub.com/discussion) 提出需求或者发送邮件到 support@sipeed.com. ## 可以用除了官方搭配的摄像头或者屏幕以外的自己的摄像头或者屏幕吗? 不建议这样操作,除非你有够丰富的软硬件知识和经验,否则可能导致设备损坏。 官方搭配的配件对应的软硬件是调教过的,表现效果是最好的,上手即可使用,其它配件可能接口不同,驱动不同,软件不同,需要自己去调教,这是一个非常复杂的过程,不建议尝试。 当然,如果你是大佬,我们也欢迎你提交 PR! ## 运行模型报错 cvimodel built for xxxcv181x CANNOT run on platform cv181x 解析模型文件失败了,一般情况是模型文件损坏造成的,确保你的模型文件是没有损坏的。 比如: * 用编辑器编辑了二进制文件导致文件损坏。比如用 maixvision 打开了 cvimodel 文件,由于 maixvision 的自动保存功能会破坏二进制文件,所以不要用 maixvision 等文本编辑器打开二进制文件并保存(后面 MaixVision 会修复这个问题,即去掉 maixvision 的自动保存功能)。 * 如果是从网上下载的,保证下载没有出问题,一般网上的文件提供 sha256sum/md5 校验值,下载下来后可以对比一下,具体方法请自行搜索或者问 ChatGPT。 * 如果是来自压缩包,请确认解压过程没有出错,可以从压缩包重新解压一遍保证中间没有出错。 * 保证传输到设备的过程没有造成文件损坏,可以对比一下设备中的文件和电脑中的文件 sha256sum 值,具体方法请自性搜索或者问 ChatGPT。 ## 上电启动黑屏,屏幕无显示 请看 [MaixCAM FAQ](https://wiki.sipeed.com/hardware/zh/maixcam/faq.html) ## 通过 USB 连接了电脑和 MaixCAM 为什么电脑没有出现串口? MaixCAM 的 USB 口是芯片的 USB2.0 接口,不是 USB 转串口接口,所以插上电脑不会出现串口,这是正常现象。 没有 USB 转串口怎么通信呢? 默认 USB 会模拟出 USB 网卡,所以当你将 USB 插上电脑时会出现虚拟网卡,按照 [快速开始](./index.html) 中的说明可以使用 MaixVision 与 MaixCAM 通信实现代码运行、图像预览、文件管理等功能。 另外,因为 USB 模拟了网卡,所以你也可以用通用的 SSH 软件连接 MaixCAM,实现通信。 或者你也可以连接 WiFi 和电脑在同一个局域网下通信。 如果你要使用串口,分为两种情况: 1. 串口和电脑通信:需要自行购买任意一款 USB 转串口模块来连接电脑的 USB 和板子的串口(对于MaixCAM 是 UART0 的 A16(TX) 和 A17(RX) 引脚,或者连接 MaixCAM 套餐送的 USB 转接板引出的两个 TX RX 引脚,也是 A16 A17 引脚,是等效的) 2. 串口和其它 MCU/SOC 通信: 直接连接 MaixCAM 的 A16(TX)和 A17(RX) 到 单片机的 RX 和 TX 引脚即可。 ## 红色屏幕,提示初始化显示失败,请查看FAQ 从子面意思可以看到是显示驱动初始化失败了。 MaixCAM 的底层的显示驱动目前(2024.7)是和摄像头驱动绑定在一起初始化的,所以遇到这个问题多半是摄像头驱动初始化失败了。 解决方法: * 尝试更新到最新的系统,安装最新的运行库(重要!!!)因为运行库需要和系统里面的驱动配合工作,版本不一致可能会出错,所以更新到最新的镜像安装最新运行库即可一般就能解决。 * 有可能是多个进程一起企图占用驱动,最简单粗暴的方法就是重启。 * 硬件上摄像头连接有问题,检查摄像头硬件连接,以及摄像头是否损坏。 ## Runtime、MaixPy、系统镜像有什么区别,我应该升级哪个? * **Runtime** 是运行时环境,系统很多功能依赖这个,包括 MaixPy 也依赖此环境,遇到无法运行程序的问题首先联网检查更新这个。 * 系统镜像包含了基本的操作系统、硬件驱动、内置应用,以及 MaixPy 固件等,是基础环境,最好是保持最新, 特别是在[Release](https://github.com/sipeed/MaixPy/releases)页面中版本更新中提到了系统有更新,则强烈建议更新系统,因为有些 MaixPy 功能可能依赖系统里面的驱动。 > 更新系统会格式化所有之前的数据,更新前请备份好设备系统中有用的数据。 * **MaixPy** 是运行 MaixPy 程序的依赖库,如果不需要更新系统功能,以及更新日志中没有提到系统有重要更新比如驱动,那可以单独更新 MaixPy 即可,不过以防有驱动变化,最好是直接重新烧录系统。 ## 加载 MUD 模型文件报错 *****.cvimodel not exists, load model failed * 检查设备中(注意不是电脑里面,需要传到设备里面去)是否真的存在你加载的 .mud 文件。 * 检查你写的模型路径写错没有。 * 如果你改过文件名,需要注意: MUD 文件是一个模型描述文件,可以用文本编辑器编辑,实际的模型文件是 .cvimodel 文件(对于MaixCAM),.mud 文件中指定了 .cvimodel 的文件名和路径,所以如果你改动了 `.cvimodel`的文件名,那么也要修改`.mud`文件中的`model`路径,比如这里 Yolov5 模型的 mud: ```ini [basic] type cvimodel model yolov5s_224_int8.cvimodel [extra] model_type yolov5 input_type rgb mean 0, 0, 0 scale 0.00392156862745098, 0.00392156862745098, 0.00392156862745098 anchors 10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326 labels person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign, parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket, bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut, cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair drier, toothbrush ``` 这里制定了 `model` 为相对这个`.mud`文件目录的 `yolov5s_224_int8.cvimodel` 文件为模型文件,如果你改了`yolov5s_224_int8.cvimodel` 为其它名,那也需要改这里。 ## MaixVision import maix 显示红色波浪线 这是 MaixVision 的代码提示功能报错找不到 maix 模块。 这里需要搞清楚一个概念: MaixVision 的代码提示依赖的是电脑本地的 Python 包,代码运行依赖的设备端的 Python 包,所以要让 MaixVision 能够提示就要在电脑上也安装 Python 和 `MaixPy` 包。具体请看[MaixVision 使用文档](./basic/maixvision.html)。 ## MaixCAM 启动非常缓慢,甚至超过了 1 分钟,或者屏幕在闪动 多半是由于供电不足造成的, MaixCAM 需要 5v 150mA~500mA 左右的电压和点流,如果你遇到了这种现象,可以使用 USB 转 TTL 模块连接 MaixCAM 的串口到电脑,可以看到`Card did not respond to voltage select! : 110` 这样的字样,说明供电不足,换一个更加的稳定的供电设备即可。 对于 MaixCAM,在开机会有 400mA 的电流,待机且屏幕有显示需要 250mA,全速运行 AI 模型需要 400mA~500mA 的电流,所以保证电源的稳定性十分重要! ## MaixCAM 黑屏无法启动,或者卡在 LOGO 界面 参考[MaixCAM FAQ](https://wiki.sipeed.com/hardware/zh/maixcam/faq.html) ## MaixVision 启动程序一直“卡在“ start running ... MaixVision 的日志输出窗口在开始启动程序是会打印一句`start running ...`代表程序开始发送到设备并开始执行, 后面再打印(输出)什么取决于你的程序,比如你调用了`print(\"hello\")` 则会打印`hello`,如果你的程序没有任何打印那就不会有任何日志。。。 所以实际上不是卡住了,而是你的程序就没有输出过任何东西,自然也就不会显示任何日志了,可以尝试在自己的程序中加`print(\"xxx\")`来打印,这也是我们调试程序最简单的方法。 ## 为什么硬件有 256MB 内存,在系统里只能用 128MB 内存呢? 因为其它内存给底层驱动和内核预留了,用于摄像头、屏幕、硬件编解码、NPU 等驱动使用,可以通过 `cat /sys/kernel/debug/ion/cvi_carveout_heap_dump/summary` 看到驱动使用的内存(算能特有,叫 ION 内存),以及其它内存可以通过`cat /proc/meminfo`看到,如果你想调整内存分配,需要自己编译系统,修改系统的`LicheeRV Nano Buildbuild/boards/sg200x/sg2002_licheervnano_sd/memmap.py` 文件中的 `ION_SIZE` 来调整(看[定制系统文档](./pro/compile_os.html))。 ## 为什么无法安装运行库,提示错误 请求失败! * 请保证设备已经成功连接到互联网,可以换一个手机热点试试。 * 确保系统镜像是烧录的最新的。 * 如果提示 DNS 解析失败,可能时网络 DNS 设置问题,可以换一个手机热点试试,或者手动修改 `/boot/resolv.conf`(只修改这个文件需要重启) 和 `/etc/resolv.conf`(修改了这个文件不用重启,重启就是把前者拷贝覆盖到这个文件)中的 DNS 服务器设置。 * 确保你是从 Sipeed 购买的正版 MaixCAM。 * 咨询客服,带上系统版本可以 device_key (可以连接上 MaixVision 点击断开连接按钮后看到,有屏幕的也可以在`系统设置 >系统信息`中看到) ## 编译报错: type not registered yet? ``` from ._maix.peripheral.key import add_default_listener ImportError: arg(): could not convert default argument into a Python object (type not registered yet?). #define ``` 显示有对象没有定义成 python 对象,在 MaixPy 中一般是由于自动扫描API生成时的顺序问题造成的,比如在`a.hpp`中有一个`@maixpy`声明的`API`, 在`b.hpp` 中有另一个`API`而且参数使用了`a.hpp`中的定义,那么可以说`b.hpp`需要依赖`a.hpp`,但目前`MaixPy`的编译脚本不会做依赖关系扫描,所以需要在`MaixPy`项目中的`components/maix/headers_priority.txt`文件中手动指定一下,`a.hpp`在`b.hpp`前面扫描就可以了。"},"/maixpy/doc/zh/basic/app_usage.html":{"title":"MaixCAM MaixPy 应用使用说明","content":" title: MaixCAM MaixPy 应用使用说明 layout: redirect redirect_url: ./app.html "},"/maixpy/doc/zh/basic/python_pkgs.html":{"title":"MaixCAM MaixPy 添加额外的 Python 软件包","content":" title: MaixCAM MaixPy 添加额外的 Python 软件包 ## 简介 MaixPy 基于 Python 语言,提供了大量方便嵌入式应用开发的功能和 API,除此之外,你也可以使用其它的 Python 包来扩展功能。 ## 安装额外的 Python 包 > 注意可能不是所有 Python 包都支持,一般只支持纯 Python 包,不支持 C 扩展包, C 扩展包可能需要你手动在电脑交叉编译(比较复杂,这里就不介绍了)。 ### 方法一: 使用 Python 代码来安装 在 MaixVision 中使用 Python 代码来安装你需要的包,比如: ```python import os os.system(\"pip install 包名\") ``` 要更新一个包,可以使用: ```python import os os.system(\"pip install upgrade 包名\") ``` ### 方法二: 终端使用 pip 命令安装 使用[Linux 基础](./linux_basic.html)中介绍的终端使用方法,使用 `pip install 包名` 安装你需要的包。"},"/maixpy/doc/zh/basic/app.html":{"title":"MaixCAM MaixPy 应用开发和应用商店","content":" title: MaixCAM MaixPy 应用开发和应用商店 ## 哪里找应用 开机后会自动进入应用选择界面,内置各种应用均发布在 [MaixHub 应用商店](https://maixhub.com/app), 可以在这里找到对应应用的介绍和使用说明。 ## 哪里找源码 源码可以在应用商店应用页面看到源码链接(如果有)。 官方集成的应用源码都在 [MaixPy/projects](https://github.com/sipeed/MaixPy/tree/main/projects) 目录 或者 [MaixCDK/projects](https://github.com/sipeed/MaixCDK/tree/main/projects) 。 ## 安装应用 可以先设置语言 `设置 > 语言`, 以及 `设置 > WiFi`。 `应用商店`应用可以用来升级和安装应用,连接上可以连接互联网的 WiFi 后即可在[MaixHub 应用商店](https://maixhub.com/app)扫码安装应用。 ## 应用生态简介 为了让开发板做到开箱即用,以及方便用户无门槛地使用,以及方便开发者分享自己的有趣应用,并且能有有效的渠道获取到反馈甚至是收益,我们推出了一个简易的应用框架,包括: * **[应用商店](https://maixhub.com/app)**: 开发者上传分享应用,用户无需开发直接下载使用,开发者可以获取到一定的现金收益(来自 MaixHub 官方以及用户打赏)。 * **出厂内置大量应用**: 官方提供了一些常用的应用,比如找色块、AI 物体检测追踪、找二维码、人脸识别等等,用户可以直接使用,也可以作为串口模块直接使用。 * **MaixPy + MaixCDK** 软件开发包:使用 [MaixPy](https://github.com/sipeed/maixpy) 或者 [MaixCDK](https://github.com/sipeed/MaixCDK) 可以用 Python 或者 C/C++ 语言快速开发嵌入式 AI 视觉听觉应用,超高效率实现你的有趣想法。 * **MaixVision** 配套电脑端开发工具: 全新的电脑端代码开发工具,快速上手、调试、运行、上传代码、安装应用到设备,一键式开发,甚至支持图像化积木式编程,小学生也能轻松上手。 大家可以多多关注应用商店,也可以在应用商店中分享自己的应用,大家一起共建活跃的社区。 ## 打包应用 使用 MaixPy + MaixVison 可以方便地开发、打包、安装应用: * 在 MaixVision 中使用 MaixPy 开发应用程序,可以是单个文件,也可以是一个工程目录。 * 连接设备。 * 点点击 MaixVision 左下角的 安装 按钮,会弹出一个界面填写应用的基本信息,id 是用来判别应用的 id,一个设备不能同时安装相同 id 的不同应用,所以 id 应该与 MaixHub 上面已经有的应用 id 不同,应用名字可以重复。以及图标等。 * 点击打包应用,会将应用打包成一个安装包,如果你要上传到 [MaixHub 应用商店](https://maixhub./com/app),用这个打包好的文件即可。 * 点击 安装应用,这会将打包好的应用安装到设备。 * 断开与设备的连接,就能看到设备功能选择界面多了一个你的应用,直接点进去就能运行。 > 如果你用 MaixCDK 开发,使用 `maixcdk relrease` 就能打包出来一个应用,具体看 MaixCDK 的文档。 ## 退出应用 如果你只是写了比较简单的应用,没有做界面和返回按钮,默认可以按设备上的功能按键(一般是 USER 或者 FUNC 或者 OK 按钮)或者返回按钮(如果有这个按键,MaixCAM 默认没有这个按键)来退出应用。 ## 安装应用 * **方法一**: 设备使用`应用商店`应用,从[应用商店](https://maixhub.com/app)找到应用,设备联网后,扫码安装。 * **方法二**: 使用安装包本地安装,将安装包传输到设备文件系统,比如`/root/my_app_v1.0.0.zip`,然后执行代码,注意修改`pkg_path`变量的路径,你也可以在`MaixPy`的 `examples/tools/install_app.py`找到本代码: ```python import os def install_app(pkg_path): if not os.path.exists(pkg_path): raise Exception(f\"package {pkg_path} not found\") cmd f\"/maixapp/apps/app_store/app_store install {pkg_path}\" err_code os.system(cmd) if err_code ! 0: print(\"[ERROR] Install failed, error code:\", err_code) else: print(f\"Install {pkg_path} success\") pkg_path \"/root/my_app_v1.0.0.zip\" install_app(pkg_path) ``` * **方法三**: * 如果是使用`MaixPy`开发的应用,在项目根目录(包含`app.yaml`和`main.py`)执行`maixtool deploy`会弹出一个二维码,保持设备和电脑在同一局域网,设备使用应用商店扫描对应的局域网地址二维码就能在线安装。 * 如果是使用`MaixCDK`开发的应用,在项目根目录执行`maixcdk deploy`也会出现二维码,保持设备和电脑在同一局域网,设备使用应用商店扫描对应的局域网地址二维码就能在线安装。 ## 应用开发基本准则 * 因为默认都配了触摸屏幕,推荐都写一个简单的界面显示,最好有触摸交互。实现方法可以在例子里面找找参考。 * 界面和按钮不要太小,因为 MaixCAM 默认的屏幕是 2.3寸 552x368分辨率,PPI 比较高屏幕比较小,要让手指能很容易戳到并且不会点错。 * 每个应用实现的主要功能实现一个简单的串口交互,基于[串口协议](https://github.com/sipeed/MaixCDK/blob/master/docs/doc/convention/protocol.md) ([例程](https://github.com/sipeed/MaixPy/tree/main/examples/communication/protocol)),这样用户可以直接当成串口模块使用,比如人脸检测应用,可以在检测到人脸后通过串口输出坐标。"},"/maixpy/doc/zh/basic/os.html":{"title":"MaixCAM MaixPy 升级和烧录系统","content":" title: MaixCAM MaixPy 升级和烧录系统 ## 介绍 如果你购买了官方(Sipeed)的带 TF 卡的套餐,一般来说出厂已经烧录好了系统,可以跳过这一步直接使用。 但是为了防止出厂烧录的系统版本过旧,**强烈建议** 先按照教程 **升级到最新** 的系统。 ## 获得最新系统 在 [MaixPy 发布页面](https://github.com/sipeed/MaixPy/releases) 找到最新的系统镜像文件,比如`maixcam_os_20240401_maixpy_v4.1.0.xz`。 > 中国国内用户下载速度慢可以用迅雷下载,速度可能会快一些。 > 或者使用例如 [github.abskoop.workers.dev](https://github.abskoop.workers.dev/) 这种代理网站下载。 备用地址:[Sourceforge](https://sourceforge.net/projects/maixpy/files/) (同步可能不及时,建议优先上面的方式) ## 如何确认系统是否需要升级 * 在开机后的功能选择界面,点击`设置`,然后点击`设备信息`,可以看到系统的版本号。 * 到[MaixPy 发布历史页面](https://github.com/sipeed/MaixPy/releases)查看更新日志,里面有 MaixPy 固件和系统镜像的更新说明,如果在你的版本后有重要更新,建议升级。 > 如果最新系统和当前系统对比只是 MaixPy 固件的常规更新,也可以不升级,在 `设置` 中的 `更新 MaixPy` 中单独更新 `MaixPy`,不过一般 **不推荐** 这样做。 ## 烧录系统到 MaixCAM 参考 硬件文档中的 [MaixCAM 系统烧录](https://wiki.sipeed.com/hardware/zh/maixcam/os.html) 教程,注意里面能满足 `USB 烧录`的条件则推荐使用 `USB 烧录`方式,USB 烧录方式不用拔 TF 卡。"},"/maixpy/doc/zh/basic/linux_basic.html":{"title":"Linux 基础知识","content":" title: Linux 基础知识 ## 简介 本章内容对于刚入门的同学来说,可以先跳过此章节,在学会 MaixPy 基础开发后再来学习也是可以的。 最新的 MaixPy 支持的 MaixCAM 硬件支持跑 Linux 系统,所以 MaixPy 底层都是基于 Linux 系统进行开发的。 虽然 Sipeed 开发的 MaixPy 已经为开发者们做了很多工作,即使不知道 Linux 系统知识也能愉快使用,但是以防在某些情况下需要一些底层操作,以及方便未接触过 Linux 的开发者学习,这里写一些 Linux 基础知识。 ## 为什么需要 Linux 系统 具体的原因大家可以自行查阅,这里用通俗的看起来不太专业的话语简单举几个例子方便初学者理解: * 在单片机中,我们的程序是一个死循环程序,用上 Linux 后我们可以同时跑很多程序,每个程序看起来都独立在同时运行,每个程序具体怎么执行的由操作系统实现。 * 基于 Linux 的开发者众多,需要功能和驱动可以很方便地找到,不需要自己再实现一遍。 * 基于 Linux 配套的软件工具丰富,可以很方便地进行开发和调试,比如在本教程没有提到的一些 Linux 通用工具理论上也是可以使用的。 ## 文件系统 什么是文件系统? * 就像电脑的文件系统一样,Linux 上会将硬件磁盘用文件系统进行管理,这样我们可以很方便地向磁盘读写数据。 * 对于学过单片机没有接触过文件系统开发的同学来讲,可以理解为我们有一个 Flash 或者 TF 卡,我们可以通过 API 读写 Flash 存取数据,断电后也能保存数据,但是 Flash 具有读写寿命,我们往往需要写一套程序去保证 Flash 读写寿命,而文件系统就可以理解成这样一套成熟的程序,文件系统帮我们完成了具体如何管理 Flash 空间和读写,我们只需调用文件系统的 API 即可,大大减少了我们的开发工作量并且用成熟的程序保证了稳定性和安全性。 ## 在电脑和设备(开发板)之间传输文件 既然设备有 Linux 和文件系统,那我们怎么发送文件到设备呢? 对于 MaixPy 我们配套了 MaixVision, 在后面的版本也会支持文件管理功能,在此之前可以用下面的方法: 这里我们主要介绍通过网络传输的方式,其它方式可自行探索`传输文件到 Linux`: * 确保设备和电脑连接到了同一个局域网,比如: * MaixCAM 的 USB 口连接到电脑会创建一个虚拟网卡,在电脑端的设备管理器就能看到,设备的 IP 可以在设备的`设置 >设备信息`中看到设备名和 IP。 * 也可以在设备`设置 >WiFi`中连接到和电脑相同的局域网。 * 电脑使用 SCP 或者 SFTP 协议传输文件到设备,具体的软件有很多,具体的软件和使用方法可以自行搜索,比如: * 在 Windows 上可以使用 WinSCP 或者 FileZilla,或者 scp 命令等。 * 在 Linux 上可以使用 FileZilla 或者 scp 命令 等。 * 在 Mac 上可以使用 FileZilla 或者 scp 命令 等。 ## 终端和命令行 终端就是通过`终端`这个软件与 Linux 系统进行通信和操作的工具,类似于 Windows 的`cmd`或者`PowerShell`。 比如我们可以在电脑的 Window 系统中的 powershell 或者 Linux系统中的 终端 工具中输入`ssh root@maixcam xxxx.local` 这里具体的名字在设备的`设置 >设备信息`中可以看到,这样我们就可以通过终端连接到设备了(用户名和密码都是`root`)。 然后我们通过输入命令来操作设备,比如`ls`命令可以列出设备文件系统中当前目录下的文件, `cd` 用来切换当前所在的目录(就像电脑文件管理中点击文件夹切换目录一样), ```shell cd / # 切换到根目录 ls # 显示当前目录(根目录)下的所有文件 ``` 然后会显示类似下面的内容: ```shell bin lib media root tmp boot lib64 mnt run usr dev linuxrc opt sbin var etc lost+found proc sys ``` 更多命令学习请自行搜索`Linux 命令行使用教程`,这里只是为了让初学者知道基本概念,这样有开发者提到时可以知道是什么意思。"},"/maixpy/doc/zh/basic/maixpy_upgrade.html":{"title":"MaixCAM 更新 MaixPy","content":" title: MaixCAM 更新 MaixPy 有两种方法,如果第一次上手使用,为了降低难度,可以直接使用出厂 TF 卡自带的 MaixPy 固件尝试,以后再考虑更新。 不过因为不知道你拿到手的是什么时候出厂的 TF 卡,所以建议都更新一下系统。 ## 直接更新系统(强烈推荐) 按照 [升级和烧录系统](./os.html) 中的操作升级到最新的系统,里面就包含了最新的 MaixPy 固件。 ## 只更新 MaixPy 固件 在 [MaixPy 仓库 release 页面](https://github.com/sipeed/MaixPy/releases) 看到最新的版本信息和更新日志,其中包含了 MaixPy 固件信息,以及对应版本使用的系统信息。 如果不想更新系统(因为一般系统变动不大,可以看 MaixPy 更新日志中是否有系统改动相关,再决定是否更新系统),则可以只更新 MaixPy 固件。 * 在设置中设置 WiFi, 让系统联网。 * 点击设置应用中的 `更新 MaixPy` 进行更新。 也可以执行 Python 代码调用系统命令来更新: ```python import os os.system(\"pip install MaixPy U\") ``` 由于默认从`pypi.org`下载,中国国内速度可能比较慢,可以设置国内的镜像站点,修改下面代码的 `server` 变量来选择,此脚本在`MaixPy` 的 `examples/tools` 目录下也有,可以直接在`MaixVision`中运行。 ```python import os def install_maixpy(server): cmd f\"pip install maixpy U i {server}\" print(\"Start install now, wait patiently ...\") err os.system(cmd) if err ! 0: print(\"[ERROR] execute failed, code:\", err) else: print(\"Install complete\") servers { \"pypi\": \"https://pypi.org/simple\", \"aliyun\": \"https://mirrors.aliyun.com/pypi/simple\", \"ustc\": \"https://pypi.mirrors.ustc.edu.cn/simple\", \"163\": \"https://mirrors.163.com/pypi/simple\", \"douban\": \"https://pypi.douban.com/simple\", \"tuna\": \"https://pypi.tuna.tsinghua.edu.cn/simple\" } # Select server based on your network server servers[\"tuna\"] install_maixpy(server) ``` > 如果你会使用终端, 也可以直接在终端中使用 `pip install MaixPy U` 来更新 MaixPy。 另外你也可以手动下载`wheel` 文件(`.whl`格式)传输到设备(传输方法见后文[MaixVision 使用](./maixvision.html))后通过 `pip install ******.whl` 命令来安装。"},"/maixpy/doc/zh/basic/auto_start.html":{"title":"MaixCAM MaixPy 应用开机自启","content":" title: MaixCAM MaixPy 应用开机自启 打包安装好的应用可以设置开机自动启动,这样开机就不会显示应用菜单,直接进入指定的应用。 ## 设置应用开机自启方法一 先打包安装好应用,然后在设备`设置 > 开机自启` 设置中选择需要自动启动的应用即可,取消开机自启也是在这里设置。 ## 设置应用开机自启方法二 运行 Python 脚本设置,修改脚本中的`new_autostart_app_id` 变量为你想设置的 `app_id`, 所有已经安装了的`app_id`会在执行脚本时打印出来,可以先执行一遍找到你想设置的`app_id`,修改变量再执行一遍即可,取消自动启动设置为`None`即可。 此脚本也可以在`MaixPy`的`examples/tools`中找到`set_autostart.py`: ```python import configparser, os def parse_apps_info(): info_path \"/maixapp/apps/app.info\" conf configparser.ConfigParser() conf.read(info_path) version conf[\"basic\"][\"version\"] apps {} for id in list(conf.keys()): if id in [\"basic\", \"DEFAULT\"]: continue apps[id] conf[id] return apps def list_apps(): apps parse_apps_info() print(f\"APP num: {len(apps)}\") for i, (id, info) in enumerate(apps.items()): name_zh info.get(\"name[zh]\", \"\") print(f\"{i + 1}. [{info['name']}] {name_zh}:\") print(f\" id: {id}\") print(f\" exec: {info['exec']}\") print(f\" author: {info['author']}\") print(f\" desc: {info['desc']}\") print(f\" desc_zh: {info.get('desc', 'None')}\") print(\"\") def get_curr_autostart_app(): path \"/maixapp/auto_start.txt\" if os.path.exists(path): with open(path, \"r\") as f: app_id f.readline().strip() return app_id return None def set_autostart_app(app_id): path \"/maixapp/auto_start.txt\" if not app_id: if os.path.exists(path): os.remove(path) return with open(path, \"w\") as f: f.write(app_id) if __name__ \"__main__\": # new_autostart_app_id \"settings\" # change to app_id you want to set new_autostart_app_id None # remove autostart list_apps() print(\"Before set autostart appid:\", get_curr_autostart_app()) set_autostart_app(new_autostart_app_id) print(\"Current autostart appid:\", get_curr_autostart_app()) ``` ## 设置应用开机自启方法三 你也可以通过修改设备中的 `/maixapp/auto_start.txt` 文件来设置,和传输文件的方法请看前面的文档。 * 首先知道你需要设置的应用的 `id` 是什么。在你打包应用的时候设置的;如果不是你自己打包的应用,可以先安装到设备,查看设备`/maixapp/apps/` 目录下的文件夹名就是应用名,(也可以下载查看设备的`/maixapp/apps/app.info` 文件,`[]`中括号部分就是应用`id`)。 * 然后写入 `id` 到 `/maixapp/auto_start.txt` 文件即可。(可以在电脑本地创建文件,然后 `MaixVision` 传输到设备。) * 如果要取消,删除设备上的 `/maixapp/auto_start.txt` 文件即可。 ## 其它方法 对于 MaixCAM, 因为底层是 Linux, 如果你熟悉 Linux, 编辑`/etc/rc.local` 或者 `/etc/init.d` 下的启动脚本也可以。 但是需要注意的是,这种方式会让 MaixVision 在连接的时候无法停止这个应用,从而造成资源占用(比如屏幕和摄像头) MaixVision 可能无法正常跑程序,而前两种方法 MaixVision 连接设备时是可以正常让程序退出以供 MaixVsion 跑程序的。 所以这种方法比较适合开机跑一些不会占用屏幕和摄像头等资源的后台进程,一般情况下如果你不熟悉 Linux 不建议这样操作。"},"/maixpy/doc/zh/vision/yolov5.html":{"title":"MaixPy MaixCAM 使用 YOLOv5 / YOLOv8 / YOLO11 模型进行目标检测","content":" title: MaixPy MaixCAM 使用 YOLOv5 / YOLOv8 / YOLO11 模型进行目标检测 ## 目标检测概念 目标检测是指在图像或视频中检测出目标的位置和类别,比如在一张图中检测出苹果、飞机等物体,并且标出物体的位置。 和分类不同的是多了一个位置信息,所以目标检测的结果一般是一个矩形框,框出物体的位置。 ## MaixPy 中使用目标检测 MaixPy 默认提供了 `YOLOv5` 和 `YOLOv8` 和 `YOLO11` 模型,可以直接使用: > YOLOv8 需要 MaixPy > 4.3.0。 > YOLO11 需要 MaixPy > 4.7.0。 ```python from maix import camera, display, image, nn, app detector nn.YOLOv5(model \"/root/models/yolov5s.mud\", dual_buff True) # detector nn.YOLOv8(model \"/root/models/yolov8n.mud\", dual_buff True) # detector nn.YOLO11(model \"/root/models/yolo11n.mud\", dual_buff True) cam camera.Camera(detector.input_width(), detector.input_height(), detector.input_format()) dis display.Display() while not app.need_exit(): img cam.read() objs detector.detect(img, conf_th 0.5, iou_th 0.45) for obj in objs: img.draw_rect(obj.x, obj.y, obj.w, obj.h, color image.COLOR_RED) msg f'{detector.labels[obj.class_id]}: {obj.score:.2f}' img.draw_string(obj.x, obj.y, msg, color image.COLOR_RED) dis.show(img) ``` 效果视频:
    这里使用了摄像头拍摄图像,然后传给 `detector`进行检测,得出结果后,将结果(分类名称和位置)显示在屏幕上。 以及这里 替换`YOLO11` 和 `YOLOv5` 和`YOLOv8`即可实现`YOLO11/v5/v8/`切换,注意模型文件路径也要修改。 模型支持的 80 种物体列表请看本文附录。 更多 API 使用参考 [maix.nn](/api/maix/nn.html) 模块的文档。 ## dual_buff 双缓冲区加速 你可能注意到这里模型初始化使用了`dual_buff`(默认值就是 `True`),使能 `dual_buff` 参数可以加快运行效率,提高帧率,具体原理和使用注意点见 [dual_buff 介绍](./dual_buff.html)。 ## 更多输入分辨率 默认的模型输入是`320x224`分辨率,因为这个分辨率比例和默认提供的屏幕分辨率接近,你也可以手动下载其它分辨率的模型替换: YOLOv5: [https://maixhub.com/model/zoo/365](https://maixhub.com/model/zoo/365) YOLOv8: [https://maixhub.com/model/zoo/400](https://maixhub.com/model/zoo/400) YOLO11: [https://maixhub.com/model/zoo/453](https://maixhub.com/model/zoo/453) 分辨率越大精度越高,但是运行耗时越长,根据你的应用场景选择合适的即可。 ## YOLOv5 和 YOLOv8 和 YOLO11 用哪个? 这里提供的 `YOLOv5s` 和 `YOLOv8n` 和 `YOLO11n` 三种模型,`YOLOv5s`模型更大,`YOLOv8n YOLO11n`速度快一点点, 精度按照官方数据来说`YOLO11n > YOLOv8n > YOLOv5s`,可以实际测试根据自己的实际情况选择。 另外你也可以尝试`YOLOv8s`或者`YOLO11s`,帧率会低一些(比如 yolov8s_320x224 比 yolov8n_320x224 慢 10ms),准确率会比前两个都高,模型可以在上面提到的模型库下载到或者自己从`YOLO`官方仓库导出模型。 ## 摄像头分辨率和模型分辨率不同可以吗 上面使用`detector.detect(img)`函数进行检测时,如果 `img` 的分辨率和模型分辨率不同,这个函数内部会自动调用`img.resize`将图像缩放成和模型输入分辨率相同的,`resize`默认使用`image.Fit.FIT_CONTAIN` 方法,即保持宽高比缩放,周围填充黑色的方式,检测到的坐标也会自动映射到原`img`的坐标上。 ## MaixHub 在线训练自己的目标检测模型 默认提供的 80 分类检测模型,如果你需要检测特定的物体,请到[MaixHub](https://maixhub.com) 学习并训练目标检测模型,创建项目时选择`目标检测模型`即可,参考[MaixHub 在线训练文档](./maixhub_train.html)。 或者到[MaixHub 模型库](https://maixhub.com/model/zoo?platform maixcam) 找社区成员分享的模型。 ## 离线训练自己的目标检测模型 强烈建议先使用 MaixHub 在线训练模型,此种方式难度比较大,不建议新手一来就碰这个方式。 此种方式有些许默认你知道的知识文中不会提,遇到问题多上网搜索学习。 请看 [离线训练YOLOv5模型](./customize_model_yolov5.html) 或者 [离线训练 YOLOv8/YOLO11 模型](./customize_model_yolov8.html) ## 附录:80分类 COCO 数据集的 8 种物体分别为: ```txt person bicycle car motorcycle airplane bus train truck boat traffic light fire hydrant stop sign parking meter bench bird cat dog horse sheep cow elephant bear zebra giraffe backpack umbrella handbag tie suitcase frisbee skis snowboard sports ball kite baseball bat baseball glove skateboard surfboard tennis racket bottle wine glass cup fork knife spoon bowl banana apple sandwich orange broccoli carrot hot dog pizza donut cake chair couch potted plant bed dining table toilet tv laptop mouse remote keyboard cell phone microwave oven toaster sink refrigerator book clock vase scissors teddy bear hair drier toothbrush ```"},"/maixpy/doc/zh/vision/ai.html":{"title":"MaixCAM MaixPy AI 视觉基本知识","content":" title: MaixCAM MaixPy AI 视觉基本知识 update: date: 2024 04 03 author: neucrack version: 1.0.0 content: 初版文档 ## 简介 如果没有 AI 基础,在学习 AI 前可以先看[什么是人工智能(AI)和机器学习](https://wiki.sipeed.com/ai/zh/basic/what_is_ai.html) 了解一下 AI 的基本概念。 然后我们使用的视觉 AI 一般都是基于`深度神经网络学习`这个方法,有兴趣可以看看[深度神经网络(DNN)基础知识](https://wiki.sipeed.com/ai/zh/basic/dnn_basic.html) ## MaixPy 中使用视觉 AI 在 MaixPy 中使用视觉 AI 很简单,默认提供了常用的 AI 模型,不需要自己训练模型就可以直接使用,在[MaixHub 模型库](https://maixhub.com/model/zoo) 中选择`maixcam` 就可以找到。 并且在底层已经封装好的 API,只需要简单的调用就可以实现。 如果你想训练自己的模型,也可以先从[MaixHub 在线训练](https://maixhub.com/model/training/project) 开始,在线平台只需要点点点就能训练出模型,不需要购买昂贵的机器,不需要搭建复杂的开发环境,也不需要写代码,非常适合入门,也适合懒得翻代码的老手。 一般训练得到了模型文件,直接传输到设备上,调用 MaixPy 的 API 就可以使用了,具体的调用方法看后文。"},"/maixpy/doc/zh/vision/display.html":{"title":"MaixCAM MaixPy 屏幕使用","content":" title: MaixCAM MaixPy 屏幕使用 update: date: 2024 03 31 author: neucrack version: 1.0.0 content: 初版文档 ## 简介 MaixPy 提供了`display`模块,可以将图像显示到屏幕上,同时,也可以将图像发送到 MaixVision 显示,方便调试和开发。 ## API 文档 本文介绍常用方法,更多 API 请看 API 文档的 [display](/api/maix/display.html) 部分。 ## 使用屏幕 * 导入`display`模块: ```python from maix import display ``` * 创建一个`Display`对象: ```python disp display.Display() ``` * 显示图像: ```python disp.show(img) ``` 这里`img`对象是`maix.image.Image`对象,可以通过`camera`模块的`read`方法获取,也可以通过`image`模块的`load`方法加载文件系统中的图像,也可以通过`image`模块的`Image`类创建一个空白图像。 比如: ```python from maix import image, display disp display.Display() img image.load(\"/root/dog.jpg\") disp.show(img) ``` 这里需要先把`dog.jpg`文件传到设备的`/root`目录下。 显示文字: ```python from maix import image, display disp display.Display() img image.Image(320, 240) img.draw_rect(0, 0, disp.width(), disp.height(), color image.Color.from_rgb(255, 0, 0), thickness 1) img.draw_rect(10, 10, 100, 100, color image.Color.from_rgb(255, 0, 0)) img.draw_string(10, 10, \"Hello MaixPy!\", color image.Color.from_rgb(255, 255, 255)) disp.show(img) ``` 从摄像头读取图像并显示: ```python from maix import camera, display, app disp display.Display() cam camera.Camera(320, 240) while not app.need_exit(): img cam.read() disp.show(img) ``` > 这里用了一个`while not app.need_exit():` 是方便程序在其它地方调用`app.set_exit_flag()`方法后退出循环。 ## 调整背光亮度 在系统的`设置`应用中可以手动调整背光亮度,如果你想在程序中调整背光亮度,可以使用`set_backlight`方法,参数就是亮度百分比,取值范围是 0 100: ```python disp.set_backlight(50) ``` 注意,程序退出回到应用选择界面后会自动恢复到系统设置的背光亮度。 ## 显示到 MaixVision 在使用 MaixVision 运行代码时,能够将图像显示到 MaixVision 上,方便调试和开发。 在调用`show`方法时,会自动压缩图像并发送到 MaixVision 显示。 当然,如果你没有屏幕,或者为了节省内存不想初始化屏幕,也可以直接调用`maix.dispaly`对象的`send_to_maixvision`方法发送图像到 MaixVision 显示。 ```python from maix import image,display img image.Image(320, 240) disp display.Display() img.draw_rect(0, 0, img.width(), img.height(), color image.Color.from_rgb(255, 0, 0), thickness 1) img.draw_rect(10, 10, 100, 100, color image.Color.from_rgb(255, 0, 0)) img.draw_string(10, 10, \"Hello MaixPy!\", color image.Color.from_rgb(255, 255, 255)) display.send_to_maixvision(img) ``` ## 更换其它型号屏幕 如果想换不同尺寸的屏幕,可以到[商城](https://wiki.sipeed.com/store)咨询购买。 对于 MaixCAM,目前支持 4 款屏幕: * 2.3寸 552x368 分辨率电容触摸屏: MaixCAM 带的屏幕。 * 2.4寸 640x480 分辨率电容触摸屏: MaixCAM Pro 带的屏幕。 * 5寸 854x480 分辨率无触摸屏: 注意无触摸,类似手机屏幕大小。 * 7寸 1280x800 分辨率电容触摸屏:7寸大屏,适合更多需要固定屏幕观看场景。 不同屏幕的刷新图像时间差别在1~5毫秒,差别不是很大,主要的区别在于图像分辨率大了图像处理时间的差别。 更换屏幕需要同时**修改配置文件**,否则可能刷新时序不同会**导致烧屏**(屏幕留下显示过的影子),所以需要注意,最好严格按照下面的步骤操作,如果出现了烧屏的问题也不要紧张,断电放置一晚上一般会恢复。 * 按照烧录系统的文档烧录系统,烧录完成后会有 U 盘出现。 * 打开 U 盘内容,看到有一个 `uEnv.txt` 文件。 * 编辑`uEnv.txt`文件,修改`pannel`键值,取值如下: * 2.3寸(MaixCAM 自带屏幕):`st7701_hd228001c31`。 * 2.4寸(MaixCAM Pro 自带屏幕): `st7701_lct024bsi20`。 * 5寸:`st7701_dxq5d0019_V0` 早期(2023年)测试屏幕`st7701_dxq5d0019b480854`。 * 7寸:`mtd700920b`,早期(2023年)测试屏幕用 `zct2133v1`。 * 保存`uEnv.txt`,并且**点击弹出 U 盘**,不要只接断电,否则可能文件丢失。 * 按下板子的`reset`按键,或者重新上电启动。 以上的方式最保险,保证上电前已经设置好了屏幕型号,如果你已经烧录好系统了,也可以修改系统的`/boot/uEnv.txt`文件然后重启。"},"/maixpy/doc/zh/vision/ocr.html":{"title":"MaixCAM MaixPy 实现 OCR 图片文字识别","content":" title: MaixCAM MaixPy 实现 OCR 图片文字识别 ## OCR 简介 OCR(Optical Character Recognition,光学字符识别)即用视觉的方式识别图像中的文字。 在很多场景会用到,比如: * 识别文字/数字卡片 * 提取卡片上的文字,比如身份证 * 电子化纸质文档 * 数显数字读取,可用于抄表、旧仪器数据电子化等 * 车牌文字识别 ## MaixPy 中使用 OCR MaixPy 移植了 [PaddleOCR](https://github.com/PaddlePaddle/PaddleOCR), 是由百度开源的一款 OCR 算法,想了解原理可以看这个开源项目。 ![OCR](../../assets/ocr.jpg) **首先保证 MaixPy 版本 > 4.6**。 然后执行代码:(完整的最新的代码在[MaixPy 仓库](https://github.com/sipeed/MaixPy/blob/main/examples/vision/ai_vision/nn_pp_ocr.py),以源码为准) ```python from maix import camera, display, image, nn, app model \"/root/models/pp_ocr.mud\" ocr nn.PP_OCR(model) cam camera.Camera(ocr.input_width(), ocr.input_height(), ocr.input_format()) dis display.Display() image.load_font(\"ppocr\", \"/maixapp/share/font/ppocr_keys_v1.ttf\", size 20) image.set_default_font(\"ppocr\") while not app.need_exit(): img cam.read() objs ocr.detect(img) for obj in objs: points obj.box.to_list() img.draw_keypoints(points, image.COLOR_RED, 4, 1, 1) img.draw_string(obj.box.x4, obj.box.y4, obj.char_str(), image.COLOR_RED) dis.show(img) ``` 可以看到用了`ocr nn.PP_OCR(model)` 加载模型,然后用`ocr.detect(img)` 检测并且识别文字得到结果画在了屏幕上。 ## 更多模型选择 到[MaixHub 模型下载](https://maixhub.com/model/zoo/449) 可以下载到更完整的模型,不同输入分辨率,不同语言,不同的版本(MaixPy 目前默认pp_ocr.mud 模型为 PPOCRv3 检测+v4识别)。 ## 只识别不检测 如果你已经有处理好了的文字图,即一张图中已知文字的四个角坐标,可以不调用`detect`函数,二是只调用`recognize`函数,这样就不会检测而是仅仅识别图片中的文字。 ## 自定义模型 默认模型提供了中文和英文文字检测识别模型,如果你有特殊的需求,比如其它语言,或者只想检测特定的图形不想检测所有类型的文字, 可以到[PaddleOCR 官方模型库](https://paddlepaddle.github.io/PaddleOCR/ppocr/model_list.html) 下载对应的模型然后转换成 MaixCAM 支持的模型格式即可。 这里最复杂的就是将模型转换成 MaixCAM 可用的模型,过程**比较复杂**,需要有基本的 Linux 使用基础以及灵活变通的能力。 * 首先自己使用 PaddleOCR 源码进行训练模型或者下载官方提供的模型,检测模型请选择 PP OCRv3, 因为效果不错的同时速度比 v4 快,识别模型请下载 v4 模型,实测 v3 在 MaixCAM 上量化后效果不理想。 * 然后将模型转成 onnx: ```shell model_path ./models/ch_PP OCRv3_rec_infer paddle2onnx model_dir ${model_path} model_filename inference.pdmodel params_filename inference.pdiparams save_file ${model_path}/inference.onnx opset_version 14 enable_onnx_checker True ``` * 然后按照[onnx转MUD格式的模型文档](../ai_model_converter/maixcam.html) 安装好环境再转换模型,这里在附录提供示例转换脚本。 * 使用 MaixPy 加载运行即可。 ## 附录:模型转换脚本 检测: ```shell #!/bin/bash set e net_name ch_PP_OCRv3_det input_w 320 input_h 224 output_name sigmoid_0.tmp_0 # scale 1/255.0 # \"mean\": [0.485, 0.456, 0.406], # \"std\": [0.229, 0.224, 0.225], # mean: mean * 255 # scale: 1/(std*255) # mean: 123.675, 116.28, 103.53 # scale: 0.01712475, 0.017507, 0.01742919 mkdir p workspace cd workspace # convert to mlir model_transform.py \\ model_name ${net_name} \\ model_def ../${net_name}.onnx \\ input_shapes [[1,3,${input_h},${input_w}]] \\ mean \"123.675,116.28,103.53\" \\ scale \"0.01712475,0.017507,0.01742919\" \\ keep_aspect_ratio \\ pixel_format bgr \\ channel_format nchw \\ output_names \"${output_name}\" \\ test_input ../test_images/test3.jpg \\ test_result ${net_name}_top_outputs.npz \\ tolerance 0.99,0.99 \\ mlir ${net_name}.mlir # export bf16 model # not use quant_input, use float32 for easy coding model_deploy.py \\ mlir ${net_name}.mlir \\ quantize BF16 \\ processor cv181x \\ test_input ${net_name}_in_f32.npz \\ test_reference ${net_name}_top_outputs.npz \\ model ${net_name}_bf16.cvimodel echo \"calibrate for int8 model\" # export int8 model run_calibration.py ${net_name}.mlir \\ dataset ../images \\ input_num 200 \\ o ${net_name}_cali_table echo \"convert to int8 model\" # export int8 model # add quant_input, use int8 for faster processing in maix.nn.NN.forward_image model_deploy.py \\ mlir ${net_name}.mlir \\ quantize INT8 \\ quant_input \\ calibration_table ${net_name}_cali_table \\ processor cv181x \\ test_input ${net_name}_in_f32.npz \\ test_reference ${net_name}_top_outputs.npz \\ tolerance 0.9,0.5 \\ model ${net_name}_int8.cvimodel ``` 识别: ```shell #!/bin/bash set e # net_name ch_PP_OCRv4_rec # output_name softmax_11.tmp_0 net_name ch_PP_OCRv3_rec_infer_sophgo output_name softmax_5.tmp_0 input_w 320 input_h 48 cali_images ../images_crop_320 # scale 1/255.0 # \"mean\": [0.5, 0.5, 0.5], # \"std\": [0.5, 0.5, 0.5], # mean: mean * 255 # scale: 1/(std*255) # mean: 127.5,127.5,127.5 # scale: 0.00784313725490196,0.00784313725490196,0.00784313725490196 mkdir p workspace cd workspace # convert to mlir model_transform.py \\ model_name ${net_name} \\ model_def ../${net_name}.onnx \\ input_shapes [[1,3,${input_h},${input_w}]] \\ mean \"127.5,127.5,127.5\" \\ scale \"0.00784313725490196,0.00784313725490196,0.00784313725490196\" \\ keep_aspect_ratio \\ pixel_format bgr \\ channel_format nchw \\ output_names \"${output_name}\" \\ test_input ../test_images/test3.jpg \\ test_result ${net_name}_top_outputs.npz \\ tolerance 0.99,0.99 \\ mlir ${net_name}.mlir # export bf16 model # not use quant_input, use float32 for easy coding model_deploy.py \\ mlir ${net_name}.mlir \\ quantize BF16 \\ processor cv181x \\ test_input ${net_name}_in_f32.npz \\ test_reference ${net_name}_top_outputs.npz \\ model ${net_name}_bf16.cvimodel echo \"calibrate for int8 model\" # export int8 model run_calibration.py ${net_name}.mlir \\ dataset $cali_images \\ input_num 200 \\ o ${net_name}_cali_table echo \"convert to int8 model\" # export int8 model # add quant_input, use int8 for faster processing in maix.nn.NN.forward_image model_deploy.py \\ mlir ${net_name}.mlir \\ quantize INT8 \\ quant_input \\ calibration_table ${net_name}_cali_table \\ processor cv181x \\ test_input ${net_name}_in_f32.npz \\ test_reference ${net_name}_top_outputs.npz \\ tolerance 0.9,0.5 \\ model ${net_name}_int8.cvimodel ```"},"/maixpy/doc/zh/vision/face_detection.html":{"title":"MaixCAM MaixPy 人脸检测和关键点检测","content":" title: MaixCAM MaixPy 人脸检测和关键点检测 ## 简介 人脸检测在很多地方都能用到,比如是为人脸识别提供人脸检测这一步骤,或者是人脸跟踪相关的应用等等。 这里提供的人脸检测不光可以检测到人脸,还能检测到 5 个关键点,包括两个眼睛,一个鼻子,一张嘴巴的两个嘴角。 ![face detection](../../assets/face_detection.jpg) ## MaixPy 中使用人脸检测 MaixPy 官方提供了三种人脸检测模型,分别来自开源项目 [face detector 1MB with landmark](https://github.com/biubug6/Face Detector 1MB with landmark) 和 [Retinafate](https://github.com/biubug6/Pytorch_Retinaface) 以及 [YOLOv8 face](https://github.com/derronqi/yolov8 face)。 这三种模型都可以用,`YOLOv8 face` 效果比较好但是速度略微慢一些,可以自己实际测试选择使用。 使用`YOLOv8 face`:(需要 MaixPy 版本 > 4.3.8) ```python from maix import camera, display, image, nn, app detector nn.YOLOv8(model \"/root/models/yolov8n_face.mud\", dual_buff True) cam camera.Camera(detector.input_width(), detector.input_height(), detector.input_format()) dis display.Display() while not app.need_exit(): img cam.read() objs detector.detect(img, conf_th 0.5, iou_th 0.45, keypoint_th 0.5) for obj in objs: img.draw_rect(obj.x, obj.y, obj.w, obj.h, color image.COLOR_RED) msg f'{detector.labels[obj.class_id]}: {obj.score:.2f}' img.draw_string(obj.x, obj.y, msg, color image.COLOR_RED) detector.draw_pose(img, obj.points, 2, image.COLOR_RED) dis.show(img) ``` 另外两种模型使用方法: 这里有一行被注释了代码是加载`Retinafae`模型,根据你下载的模型选择使用哪一行代码 ```python from maix import camera, display, image, nn, app import math detector nn.Retinaface(model \"/root/models/retinaface.mud\") # detector nn.FaceDetector(model \"/root/models/face_detector.mud\") cam camera.Camera(detector.input_width(), detector.input_height(), detector.input_format()) dis display.Display() while not app.need_exit(): img cam.read() objs detector.detect(img, conf_th 0.4, iou_th 0.45) for obj in objs: img.draw_rect(obj.x, obj.y, obj.w, obj.h, color image.COLOR_RED) radius math.ceil(obj.w / 10) img.draw_keypoints(obj.points, image.COLOR_RED, size radius if radius < 5 else 4) dis.show(img) ``` ## 模型下载和其它分辨率模型 下载模型,下载的压缩包里面有多个分辨率可以选择,分辨率越高越精准但耗时更长: * [face detector 1MB with landmark](https://maixhub.com/model/zoo/377) * [Retinafate](https://maixhub.com/model/zoo/378) * [YOLOv8 face](https://maixhub.com/model/zoo/407) ## dual_buff 双缓冲区加速 你可能注意到这里模型初始化使用了`dual_buff`(默认值就是 `True`),使能 `dual_buff` 参数可以加快运行效率,提高帧率,具体原理和使用注意点见 [dual_buff 介绍](./dual_buff.html)。"},"/maixpy/doc/zh/vision/qrcode.html":{"title":"MaixCAM MaixPy 二维码识别","content":" title: MaixCAM MaixPy 二维码识别 update: date: 2024 04 03 author: lxowalle version: 1.0.0 content: 初版文档 阅读本文前,确保已经知晓如何开发MaixCAM,详情请阅读[快速开始](../index.html) ## 简介 本文介绍如何使用MaixPy来识别二维码 ## 使用 MaixPy 识别二维码 MaixPy的 `maix.image.Image`中提供了`find_qrcodes`方法,用来识别二维码。 ### 如何识别二维码 一个简单的示例,实现识别二维码并画框 ```python from maix import image, camera, display cam camera.Camera(320, 240) disp display.Display() while 1: img cam.read() qrcodes img.find_qrcodes() for qr in qrcodes: corners qr.corners() for i in range(4): img.draw_line(corners[i][0], corners[i][1], corners[(i + 1) % 4][0], corners[(i + 1) % 4][1], image.COLOR_RED) img.draw_string(qr.x(), qr.y() 15, qr.payload(), image.COLOR_RED) disp.show(img) ``` 步骤: 1. 导入image、camera、display模块 ```python from maix import image, camera, display ``` 2. 初始化摄像头和显示 ```python cam camera.Camera(320, 240) # 初始化摄像头,输出分辨率320x240 RGB格式 disp display.Display() ``` 3. 从摄像头获取图片并显示 ```python while 1: img cam.read() disp.show(img) ``` 4. 调用`find_qrcodes`方法识别摄像头中的二维码 ```python qrcodes img.find_qrcodes() ``` `img`是通过`cam.read()`读取到的摄像头图像,当初始化的方式为`cam camera.Camera(320, 240)`时,`img`对象是一张分辨率为320x240的RGB图。 `img.find_qrcodes`用来寻找二维码,并将查询结果保存到`qrocdes`,以供后续处理 5. 处理识别二维码的结果并显示到屏幕上 ```python for qr in qrcodes: corners qr.corners() for i in range(4): img.draw_line(corners[i][0], corners[i][1], corners[(i + 1) % 4][0], corners[(i + 1) % 4][1], image.COLOR_RED) img.draw_string(qr.x(), qr.y() 15, qr.payload(), image.COLOR_RED) ``` `qrcodes`是通过`img.find_qrcodes()`查询二维码的结果,如果找不到二维码则`qrcodes`内部为空 `qr.corners()`用来获取已扫描到的二维码的四个顶点坐标,`img.draw_line()`利用这四个顶点坐标画出二维码的形状 `img.draw_string`用来显示二维码的内容和位置等信息,其中`qr.x()`和`qr.y()`用来获取二维码左上角坐标x和坐标y,`qr.payload()`用来获取二维码的内容 ### 常用参数说明 列举常用参数说明,如果没有找到可以实现应用的参数,则需要考虑是否使用其他算法实现,或者基于目前算法的结果扩展所需的功能 参数 说明 示例 roi 设置算法计算的矩形区域,roi [x, y, w, h],x,y表示矩形区域左上角坐标,w,h表示矩形区域的宽度和高度,默认为整张图片 计算坐标为(50,50),宽和高为100的区域
    ```img.find_qrcodes(roi [50, 50, 100, 100])``` 本文介绍常用方法,更多 API 请看 API 文档的 [image](../../../api/maix/image.html) 部分。"},"/maixpy/doc/zh/vision/segmentation.html":{"title":"MaixCAM MaixPy 图像语义分割","content":" title: MaixCAM MaixPy 图像语义分割 ## 简介 图像语义分割,就是识别图中特定的物体,并且讲物体部分的像素识别出来,比如下图识别到了人体和狗的身体部分,可以拿来做碰撞检测、汽车自动导航、面积测算等等。 ![](../../assets/yolov8_seg.jpg) ## MaixPy 使用图像语义分割 MaixPy 内置了 `YOLOv8 seg` 和 `YOLO11 seg` 来进行对象检测和图像分割。 MaixPy 默认提供了 coco 数据集 80 种物体分类模型。 > 使用 YOLOv8 MaixPy 版本必须 > 4.4.0 > 使用 YOLO11 MaixPy 版本必须 > 4.7.0 代码如下,也可以在 [MaixPy examples](https://github.com/sipeed/maixpy/tree/main/examples/) 中找到。 ```python from maix import camera, display, image, nn, app, time detector nn.YOLOv8(model \"/root/models/yolov8n_seg.mud\", dual_buff True) # detector nn.YOLO11(model \"/root/models/yolo11n_seg.mud\", dual_buff True) cam camera.Camera(detector.input_width(), detector.input_height(), detector.input_format()) dis display.Display() while not app.need_exit(): img cam.read() objs detector.detect(img, conf_th 0.5, iou_th 0.45) for obj in objs: # img.draw_image(obj.x, obj.y, obj.seg_mask) detector.draw_seg_mask(img, obj.x, obj.y, obj.seg_mask, threshold 127) img.draw_rect(obj.x, obj.y, obj.w, obj.h, color image.COLOR_RED) msg f'{detector.labels[obj.class_id]}: {obj.score:.2f}' img.draw_string(obj.x, obj.y, msg, color image.COLOR_RED) dis.show(img) ``` > 这里切换 YOLOv8 和 YOLO11 只需要修改上面代码种注释的部分即可。 ## 更多分辨率模型 默认是 320x224 输入分辨率的模型, 更多分辨率请到 MaixHub 模型库 下载: * YOLOv8 seg: [[MaixHub 模型库](https://maixhub.com/model/zoo/413)](https://maixhub.com/model/zoo/413) * YOLO11 seg: [[MaixHub 模型库](https://maixhub.com/model/zoo/455)](https://maixhub.com/model/zoo/455) ## dual_buff 双缓冲区加速 你可能注意到这里模型初始化使用了`dual_buff`(默认值就是 `True`),使能 `dual_buff` 参数可以加快运行效率,提高帧率,具体原理和使用注意点见 [dual_buff 介绍](./dual_buff.html)。 ## 自定义自己的物体分割模型 上面提供的是 coco 数据集 80 分类的模型,如果不满足你的要求,你也可以自己训练特定的物体检测和分割模型,按照 [离线训练YOLOv8/YOLO11](./customize_model_yolov8.html) 所述使用 YOLOv8/YOLO11 官方的分格模型训练方法进行训练,然后转换成 MaixCAM 支持的模型格式即可。"},"/maixpy/doc/zh/vision/image_ops.html":{"title":"MaixCAM MaixPy 图像基础操作","content":" title: MaixCAM MaixPy 图像基础操作 update: date: 2024 04 03 author: neucrack version: 1.0.0 content: 初版文档 date: 2024 07 08 author: neucrack version: 1.1.0 content: 优化 cv 和 image 转换文档 ## 简介 视觉应用中图像占据非常重要的位置,不管是图片还是视频,因为视频本质上就是一帧帧的图像,所以图像处理是视觉应用的基础。 ## API 文档 本文介绍常用方法, 更多 API 参考 [maix.image](/api/maix/image.html) 模块的文档。 ## 图像格式 MaixPy 提供基础图像模块`image`,里面最重要的就是`image.Image`类,用于图像的创建以及各种图像基础操作,以及图像加载和保存等。 图像格式有很多,一般我们用`image.Format.FMT_RGB888` 或者 `image.Format.FMT_RGBA8888` 或者 `image.Format.FMT_GRAYSCALE`或者`image.Format.FMT_BGR888`等。 大家知道 `RGB` 三色可以合成任意颜色,所以一般情况下我们使用 `image.Format.FMT_RGB888`就足够, `RGB888` 在内存中是 `RGB packed` 排列,即在内存中的排列: `像素1_红色, 像素1_绿色, 像素1_蓝色, 像素2_红色, 像素2_绿色, 像素2_蓝色, ...` 依次排列。 ## 创建图像 创建图像很简单,只需要指定图像的宽度和高度以及图像格式即可: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) print(img) print(img.width(), img.height(), img.format()) ``` `320` 是图像的宽度,`240` 是图像的高度,`image.Format.FMT_RGB888` 是图像的格式,格式参数可以省略,默认是`image.Format.FMT_RGB888`。 这里通过`img.width()`、`img.height()`、`img.format()`可以获取图像的宽度、高度和格式。 ## 显示到屏幕 MaixPy 提供了`maix.display.Display`类,可以方便的显示图像: ```python from maix import image, display disp display.Display() img image.Image(320, 240, image.Format.FMT_RGB888) disp.show(img) ``` 注意这里因为没有图像数据,所以显示的是黑色的图像,修改画面看后文。 ## 从文件系统读取图像 MaixPy 提供了`maix.image.load`方法,可以从文件系统读取图像: ```python from maix import image img image.load(\"/root/image.jpg\") if img is None: raise Exception(f\"load image failed\") print(img) ``` 注意这里`/root/image.jpg` 是提前传输到了板子上的,方法可以看前面的教程。 可以支持 `jpg` 和 `png` 格式的图像。 ## 保存图像到文件系统 MaixPy 的`maix.image.Image`提供了`save`方法,可以保存图像到文件系统: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) # do something with img img.save(\"/root/image.jpg\") ``` ## 画框 `image.Image`提供了`draw_rect`方法,可以在图像上画框: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img.draw_rect(10, 10, 100, 100, image.Color.from_rgb(255, 0, 0)) ``` 这里的参数依次是:`x`, `y`, `w`, `h`, `color`,`x` 和 `y` 是框的左上角坐标,`w` 和 `h` 是框的宽度和高度,`color` 是框的颜色,可以使用`image.Color.from_rgb`方法创建颜色。 可以用`thickness`指定框的线宽,默认是`1`, 也可以画实心框,传参 `thickness 1` 即可: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img.draw_rect(10, 10, 100, 100, (255, 0, 0), thickness 1) ``` ## 写字符串 `image.Image`提供了`draw_string`方法,可以在图像上写字: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img.draw_string(10, 10, \"Hello MaixPy\", image.Color.from_rgb(255, 0, 0)) ``` 这里的参数依次是:`x`, `y`, `text`, `color`,`x` 和 `y` 是文字的左上角坐标,`text` 是要写的文字,`color` 是文字的颜色,可以使用`image.Color.from_rgb`方法创建颜色。 还可以放大字体,传参 `scale` 即可: ```python img.draw_string(10, 10, \"Hello MaixPy\", image.Color.from_rgb(255, 0, 0), scale 2) ``` 获取字体的宽度和高度: ```python w, h image.string_size(\"Hello MaixPy\", scale 2) print(w, h) ``` **注意**这里`scale`是放大倍数,默认是`1`,和`draw_string`应该保持一致。 ## 中文支持和自定义字体 `image` 模块支持加载`ttf/otf`字体,默认字体只支持英文,如果要显示中文或者自定义字体可以先下载字体文件到设备上,然后加载字体。 系统也内置了几个字体,在`/maixapp/share/font`目录下面,代码示例: ```python from maix import image, display, app, time image.load_font(\"sourcehansans\", \"/maixapp/share/font/SourceHanSansCN Regular.otf\", size 32) print(\"fonts:\", image.fonts()) image.set_default_font(\"sourcehansans\") disp display.Display() img image.Image(disp.width(), disp.height()) img.draw_string(2, 2, \"你好!Hello, world!\", image.Color.from_rgba(255, 0, 0, 0.8)) disp.show(img) while not app.need_exit(): time.sleep(1) ``` 加载字体文件,然后设置默认的字体,也可以不设置默认的字体,在写字的函数参数设置: ```python img.draw_string(2, 2, \"你好!Hello, world!\", image.Color.from_rgba(255, 0, 0, 0.8), font \"sourcehansans\") ``` 注意 `string_size`方法也会使用设置的默认字体计算大小,也可以通过`font`参数单独设置要计算大小的字体。 ## 画线 `image.Image`提供了`draw_line`方法,可以在图像上画线: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img.draw_line(10, 10, 100, 100, image.Color.from_rgb(255, 0, 0)) ``` 这里的参数依次是:`x1`, `y1`, `x2`, `y2`, `color`,`x1` 和 `y1` 是线的起点坐标,`x2` 和 `y2` 是线的终点坐标,`color` 是线的颜色,可以使用`image.Color.from_rgb`方法创建颜色。 ## 画圆 `image.Image`提供了`draw_circle`方法,可以在图像上画圆: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img.draw_circle(100, 100, 50, image.Color.from_rgb(255, 0, 0)) ``` 这里的参数依次是:`x`, `y`, `r`, `color`,`x` 和 `y` 是圆心坐标,`r` 是半径,`color` 是圆的颜色,可以使用`image.Color.from_rgb`方法创建颜色。 ## 缩放图像 `image.Image`提供了`resize`方法,可以缩放图像: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img_new img.resize(160, 120) print(img, img_new) ``` 注意这里`resize`方法返回一个新的图像对象,原图像不变。 ## 剪裁图像 `image.Image`提供了`crop`方法,可以剪裁图像: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img_new img.crop(10, 10, 100, 100) print(img, img_new) ``` 注意这里`crop`方法返回一个新的图像对象,原图像不变。 ## 旋转图像 `image.Image`提供了`rotate`方法,可以旋转图像: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img_new img.rotate(90) print(img, img_new) ``` 注意这里`rotate`方法返回一个新的图像对象,原图像不变。 ## 拷贝图像 `image.Image`提供了`copy`方法,可以拷贝一份独立的图像: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img_new img.copy() print(img, img_new) ``` ## 仿射变换 `image.Image`提供了`affine`方法,可以进行仿射变换,即提供当前图中三个及以上的点坐标,以及目标图中对应的点坐标,可以自动进行图像的旋转、缩放、平移等操作变换到目标图像: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img_new img.affine([(10, 10), (100, 10), (10, 100)], [(10, 10), (100, 20), (20, 100)]) print(img, img_new) ``` 更多参数和用法请参考 API 文档。 ## 画关键点 `image.Image`提供了`draw_keypoints`方法,可以在图像上画关键点: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) keypoints [10, 10, 100, 10, 10, 100] img.draw_keypoints(keypoints, image.Color.from_rgb(255, 0, 0), size 10, thickness 1, fill False) ``` 在坐标`(10, 10)`、`(100, 10)`、`(10, 100)`画三个红色的关键点,关键点的大小是`10`,线宽是`1`,不填充。 ## 画十字 `image.Image`提供了`draw_cross`方法,可以在图像上画十字: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img.draw_cross(100, 100, image.Color.from_rgb(255, 0, 0), size 5, thickness 1) ``` 在坐标`(100, 100)`画一个红色的十字,十字的延长大小是`5`,所以线段长度为`2 * size + thickness`, 线宽是`1`。 ## 画箭头 `image.Image`提供了`draw_arrow`方法,可以在图像上画箭头: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img.draw_arrow(10, 10, 100, 100, image.Color.from_rgb(255, 0, 0), thickness 1) ``` 在坐标`(10, 10)`画一个红色的箭头,箭头的终点是`(100, 100)`,线宽是`1`。 ## 画图 `image.Image`提供了`draw_image`方法,可以在图像上画图: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img2 image.Image(100, 100, image.Format.FMT_RGB888) img2.draw_rect(10, 10, 90, 90, image.Color.from_rgb(255, 0, 0)) img.draw_image(10, 10, img2) ``` ## 转换格式 `image.Image`提供了`to_format`方法,可以转换图像格式: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img_new img.to_format(image.Format.FMT_BGR888) print(img, img_new) img_jpg img.to_format(image.Format.FMT_JPEG) print(img, img_new) ``` 注意这里`to_format`方法返回一个新的图像对象,原图像不变。 ## maix.image.Image 对象和 Numpy/OpenCV 格式互相转换 见[MaixPy 使用 OpenCV 文档](./opencv.html) ## 和 bytes 数据互相转换 `image.Image`提供了`to_bytes`方法,可以转换图像为`bytes`数据: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) data img.to_bytes() print(type(data), len(data), img.data_size()) img_jpeg image.from_bytes(320, 240, image.Format.FMT_RGB888, data) print(img_jpeg) img img_jpeg.to_format(image.Format.FMT_RGB888) print(img) ``` 这里`to_bytes`获得一个新的`bytes`对象,是独立的内存,不会影响原图。 `image.Image`构造函数中传入`data`参数可以直接从`bytes`数据构造图像对象,注意新的图像也是独立的内存,不会影响到`data`。 因为涉及到内存拷贝,所以这个方法比较耗时,不建议频繁使用。 > 如果你想用不拷贝的方式优化程序(不建议轻易使用,写不好代码会导致程序容易崩溃,),请看 API 文档。 ## 更多基础 API 使用方法 更多 API 使用方法请参考 [maix.image](/api/maix/image.html) 模块的文档。"},"/maixpy/doc/zh/vision/touchscreen.html":{"title":"MaixPy / MaixCAM 触摸屏使用方法","content":" title: MaixPy / MaixCAM 触摸屏使用方法 ## 简介 对于 MaixCAM 自带了一个触摸屏,写应用时配合触摸屏可以实现很多有趣应用,我们可以通过 API 读取到触摸屏的点按操作。 ## MaixPy 读取触摸 MaixPy 提供了一个简单的`maix.touchscreen.TouchScreen` 类来读取,举例: ```python from maix import touchscreen, app, time ts touchscreen.TouchScreen() pressed_already False last_x 0 last_y 0 last_pressed False while not app.need_exit(): x, y, pressed ts.read() if x ! last_x or y ! last_y or pressed ! last_pressed: print(x, y, pressed) last_x x last_y y last_pressed pressed if pressed: pressed_already True else: if pressed_already: print(f\"clicked, x: {x}, y: {y}\") pressed_already False time.sleep_ms(1) # sleep some time to free some CPU usage ``` ## 配合屏幕实现交互 配合屏幕可以做出一些用户交互的内容,更多可以看[MaixPy/examples/vision/touchscreen](https://github.com/sipeed/MaixPy) 目录下例程。 如前面的文章介绍的,我们要往屏幕显示内容,一般是得到一个`maix.image.Image`对象,然后调用`disp.show(img)`来显示这张图像。 实现一个按钮的最原始和简单的方法就是在这个图像上画一个按钮,然后判断用户触摸到这个区域就算是触发了按下事件,注意图像的大小要和屏幕的大小保持一致: ```python from maix import touchscreen, app, time, display, image ts touchscreen.TouchScreen() disp display.Display() img image.Image(disp.width(), disp.height()) # draw exit button exit_label \"< Exit\" size image.string_size(exit_label) exit_btn_pos [0, 0, 8*2 + size.width(), 12 * 2 + size.height()] img.draw_string(8, 12, exit_label, image.COLOR_WHITE) img.draw_rect(exit_btn_pos[0], exit_btn_pos[1], exit_btn_pos[2], exit_btn_pos[3], image.COLOR_WHITE, 2) def is_in_button(x, y, btn_pos): return x > btn_pos[0] and x < btn_pos[0] + btn_pos[2] and y > btn_pos[1] and y < btn_pos[1] + btn_pos[3] while not app.need_exit(): x, y, pressed ts.read() if is_in_button(x, y, exit_btn_pos): app.set_exit_flag(True) img.draw_circle(x, y, 1, image.Color.from_rgb(255, 255, 255), 2) disp.show(img) ``` ## 屏幕和图像大小不一样时如何处理 上面的例子可以看到 `img` 大小和屏幕大小一样,如果你的`img`和屏幕大小不一样怎么办(比如上面使用`img image.Image(240, 240)`,比如屏幕是`640x480`, 图像是`240x240`,`disp.show(img)`的默认行为是`image.Fit.FIT_CONTAIN`, 即把图片放大到`480x480`然后边上填充黑色,如果你在`240x240`的图上画了按钮,比如坐标`(0, 0, 60, 40)`,那么按钮也会自动被放大,所以触摸判断的坐标就不能用`(0, 0, 60, 40)`,需要用`((640 480) / 2, 0, 480/240*60, 480/240*40)`, 即`(80, 0, 120, 80)`。 这里为了方便缩放图像时,快速计算源图像的点或者矩形框 在 缩放后的目标图像的位置和大小,提供了`image.resize_map_pos`函数来进行此计算过程。 ```python from maix import touchscreen, app, time, display, image ts touchscreen.TouchScreen() disp display.Display() img image.Image(240, 240) img.draw_rect(0, 0, img.width(), img.height(), image.COLOR_WHITE) # draw exit button exit_label \"< Exit\" size image.string_size(exit_label) exit_btn_pos [0, 0, 8*2 + size.width(), 12 * 2 + size.height()] img.draw_string(8, 12, exit_label, image.COLOR_WHITE) img.draw_rect(exit_btn_pos[0], exit_btn_pos[1], exit_btn_pos[2], exit_btn_pos[3], image.COLOR_WHITE, 2) # 图像按键坐标映射到屏幕上的坐标 exit_btn_disp_pos image.resize_map_pos(img.width(), img.height(), disp.width(), disp.height(), image.Fit.FIT_CONTAIN, exit_btn_pos[0], exit_btn_pos[1], exit_btn_pos[2], exit_btn_pos[3]) def is_in_button(x, y, btn_pos): return x > btn_pos[0] and x < btn_pos[0] + btn_pos[2] and y > btn_pos[1] and y < btn_pos[1] + btn_pos[3] while not app.need_exit(): x, y, pressed ts.read() if is_in_button(x, y, exit_btn_disp_pos): app.set_exit_flag(True) # 屏幕的坐标映射回图像上对应的坐标,然后在图像上画点 x, y image.resize_map_pos_reverse(img.width(), img.height(), disp.width(), disp.height(), image.Fit.FIT_CONTAIN, x, y) img.draw_circle(x, y, 1, image.Color.from_rgb(255, 255, 255), 2) disp.show(img, fit image.Fit.FIT_CONTAIN) ```"},"/maixpy/doc/zh/vision/customize_model_yolov5.html":{"title":"为 MaixCAM MaixPy 离线训练 YOLOv5 模型,自定义检测物体","content":" title: 为 MaixCAM MaixPy 离线训练 YOLOv5 模型,自定义检测物体 update: date: 2024 6 20 version: v1.0 author: neucrack content: 编写文档 ## 简介 默认官方提供了 80 种物体检测,如果不满足你的需求,可以自己训练检测的物体,两种方式: * 使用 [MaixHub 在线训练](./maixhub_train.html),方便快捷,无需购买服务器也无需搭建环境,点几下鼠标就完成。 * 在自己的电脑或者服务器搭建训练环境训练。 前者好处是简单快速,后者是使用自己电脑,训练图片数量不受限制,但是后者难度会大非常多。 **注意:** 本文讲了如何自定义训练,但是有一些基础知识默认你已经拥有,如果没有请自行学习: * 本文不会讲解如何安装训练环境,请自行搜索安装(Pytorch 环境安装)测试。 * 本文不会讲解机器学习的基本概念、linux相关基础使用知识。 如果你觉得本文哪里需要改进,欢迎点击右上角`编辑本文`贡献并提交 文档 PR。 ## 流程和本文目标 要想我们的模型能在 MaixPy (MaixCAM)上使用,需要经历以下过程: * 搭建训练环境,本文略过,请自行搜索 pytorch 训练环境搭建。 * 拉取 [yolov5](https://github.com/ultralytics/yolov5) 源码到本地。 * 准备数据集,并做成 yolov5 项目需要的格式。 * 训练模型,得到一个 `onnx` 模型文件,也是本文的最终输出文件。 * 将`onnx`模型转换成 MaixPy 支持的 `MUD` 文件,这个过程在[MaixCAM 模型转换](../ai_model_converter/maixcam.html) 一文种有详细介绍。 * 使用 MaixPy 加载模型运行。 ## 参考文章 因为是比较通用的操作过程,本文只给一个流程介绍,具体细节可以自行看 **[YOLOv5 官方代码和文档](https://github.com/ultralytics/yolov5)**(**推荐**),以及搜索其训练教程,最终导出 onnx 文件即可。 这里有 MaixHub 的社区的几篇文章: * [maixcam部署yolov5s 自定义模型](https://maixhub.com/share/23) * [【流程分享】YOLOv5训练自定义数据集并部署在Maixcam上](https://maixhub.com/share/32) * [yolov5猫狗识别模型——免费云训练(新手也可复现)](https://maixhub.com/share/25) 如果你有觉得讲得不错的文章欢迎修改本文并提交 PR。 ## YOLOv5 导出 ONNX 模型文件 YOLOv5 提供了导出选项,直接在`yolov5`目录下执行 ```shell python export.py weights ../yolov5s.pt include onnx img 224 320 ``` 这里加载 pt 参数文件,转换成 onnx, 同时指定分辨率,注意这里 高在前,宽在后。 模型训练的时候用的`640x640`,我们重新指定了分辨率方便提升运行速度,这里使用`320x224`的原因是和 MaixCAM 的屏幕比例比较相近方便显示,具体可以根据你的需求设置就好了。 ## MaixCAM MUD 文件 将 onnx 转换为 `mud` 格式的模型文件时,参照 [MaixCAM 模型转换](../ai_model_converter/maixcam.html) 即可,最终会得到一个`mud`文件和`cvimodel`文件,其中 `mud` 文件内容: ```ini [basic] type cvimodel model yolov8n.cvimodel [extra] model_type yolov8 input_type rgb mean 0, 0, 0 scale 0.00392156862745098, 0.00392156862745098, 0.00392156862745098 labels person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign, parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket, bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut, cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair drier, toothbrush ``` 根据你训练的内容替换参数即可,比如你训练检测`0~9`数字,那么只需要替换`labels 0,1,2,3,4,5,6,7,8,9` 即可, 然后运行模型时将两个文件放在同一个目录下加载`mud`文件即可。 ## 上传分享到 MaixHub 到 [MaixHub 模型库](https://maixhub.com/model/zoo?platform maixcam) 上传并分享你的模型,可以多提供几个分辨率供大家选择。"},"/maixpy/doc/zh/vision/line_tracking.html":{"title":"MaixCAM MaixPy 寻找直线","content":" title: MaixCAM MaixPy 寻找直线 update: date: 2024 05 09 author: lxowalle version: 1.0.0 content: 初版文档 阅读本文前,确保已经知晓如何开发MaixCAM,详情请阅读[快速开始](../index.html) ## 简介 在视觉应用中,在巡迹小车、巡线机器人等应用中经常需要寻找线条的功能。本文将介绍: 如何使用MaixPy来实现巡线功能 如何使用MaixCam的默认应用程序巡线 ## 如何使用MaixPy来寻找直线 MaixPy的 `maix.image.Image`中提供了`get_regression`方法来寻找直线 ### 代码示例 一个简单的示例,实现寻找并画出直线 ```python from maix import camera, display, image cam camera.Camera(320, 240) disp display.Display() # thresholds [[0, 80, 40, 80, 10, 80]] # red thresholds [[0, 80, 120, 10, 0, 30]] # green # thresholds [[0, 80, 30, 100, 120, 60]] # blue while 1: img cam.read() lines img.get_regression(thresholds, area_threshold 100) for a in lines: img.draw_line(a.x1(), a.y1(), a.x2(), a.y2(), image.COLOR_GREEN, 2) theta a.theta() rho a.rho() if theta > 90: theta 270 theta else: theta 90 theta img.draw_string(0, 0, \"theta: \" + str(theta) + \", rho: \" + str(rho), image.COLOR_BLUE) disp.show(img) ``` 步骤: 1. 导入image、camera、display模块 ```python from maix import image, camera, display ``` 2. 初始化摄像头和显示 ```python cam camera.Camera(320, 240)\t# 初始化摄像头,输出分辨率320x240 RGB格式 disp display.Display() ``` 3. 从摄像头获取图片并显示 ```python while 1: img cam.read() disp.show(img) ``` 4. 调用`get_regression`方法寻找摄像头图片中的直线,并画到屏幕上 ```python lines img.get_regression(thresholds, area_threshold 100) for a in lines: img.draw_line(a.x1(), a.y1(), a.x2(), a.y2(), image.COLOR_GREEN, 2) theta a.theta() rho a.rho() if theta > 90: theta 270 theta else: theta 90 theta img.draw_string(0, 0, \"theta: \" + str(theta) + \", rho: \" + str(rho), image.COLOR_BLUE) ``` `img`是通过`cam.read()`读取到的摄像头图像,当初始化的方式为`cam camera.Camera(320, 240)`时,`img`对象是一张分辨率为320x240的RGB图。 `img.get_regression`用来寻找直线, `thresholds` 是一个颜色阈值列表,每个元素是一个颜色阈值,同时找到多个阈值就传入多个,每个颜色阈值的格式为 `[L_MIN, L_MAX, A_MIN, A_MAX, B_MIN, B_MAX]`,这里的 `L`、`A`、`B` 是`LAB`颜色空间的三个通道,`L` 通道是亮度,`A` 通道是红绿通道,`B` 通道是蓝黄通道。`pixels_threshold`是一个像素面积的阈值,用来过滤一些不需要直线。 `for a in lines`用来遍历返回的`Line`对象, 其中`a`就是当前的`Line`对象。通常`get_regression`函数只会返回一个`Line`对象,如果需要寻找多条直线,可以尝试使用`find_line`方法 使用`img.draw_line`来画出找到的线条,`a.x1(), a.y1(), a.x2(), a.y2()`分别代表直线两端的坐标 使用`img.draw_string`在左上角显示直线与x轴的夹角, `a.theta()`是直线与y轴的夹角, 这里为了方便理解转换成直线与x轴的夹角`theta`,`a.rho()`是原点与直线的垂线的长度. 5. 通过maixvision运行代码,就可以寻线啦,看看效果吧 ![image 20240509110204007](../../../static/image/line_tracking_demo.jpg) ### 常用参数说明 列举常用参数说明,如果没有找到可以实现应用的参数,则需要考虑是否使用其他算法实现,或者基于目前算法的结果扩展所需的功能 参数 说明 示例 thresholds 基于lab颜色空间的阈值,threshold [[l_min, l_max, a_min, a_max, b_min, b_max]],分别表示:
    亮度范围为[l_min, l_max]\\
    绿色到红色的分量范围为[a_min, a_max]
    蓝色到黄色的分量范围为[b_min, b_max]
    可同时设置多个阈值 设置两个阈值来检测红色和绿色
    ```img.find_blobs(threshold [[0, 80, 40, 80, 10, 80], [0, 80, 120, 10, 0, 30]])```
    红色阈值为[0, 80, 40, 80, 10, 80]
    绿色阈值为[0, 80, 120, 10, 0, 30] invert 使能阈值反转,使能后传入阈值与实际阈值相反,默认为False 使能阈值反转
    ```img.find_blobs(invert True)``` roi 设置算法计算的矩形区域,roi [x, y, w, h],x,y表示矩形区域左上角坐标,w,h表示矩形区域的宽度和高度,默认为整张图片 计算坐标为(50,50),宽和高为100的区域
    ```img.find_blobs(roi [50, 50, 100, 100])``` area_threshold 过滤像素面积小于area_threshold的直线,单位为像素点,默认为10。该参数可用于过滤一些无用的小直线 过滤面积小于1000的直线
    ```img.find_blobs(area_threshold 1000)``` pixels_threshold 过滤有效像素点小于pixels_threshold的直线,默认为10。该参数可用于过滤一些无用的小直线 过滤有效像素点小于1000的直线
    ```img.find_blobs(pixels_threshold 1000)``` 本文介绍常用方法,更多 API 请看 API 文档的 [image](../../../api/maix/image.html) 部分。 ### 提升巡线的速度 这里提供几个提升巡线速度的方法 1. 选择合适的分辨率 越大的分辨率计算速度越慢,可以根据识别距离和精度的要求来选择更合适的分辨率 2. 使用灰度图识别 使用灰度图识别时,算法只会处理一个通道,有更快的识别速度,在颜色单一的环境会很有用。注意此时向`get_regression`传入`thresholds`时,只有`l_min`和`l_max`有效。 获取灰度图的方法: ```python # 方法1 cam camera.Camera(320, 240, image.Format.FMT_GRAYSCALE) # MaixPy v4.2.1后支持 gray_img cam.read()\t\t\t\t\t\t\t\t\t\t# 获取灰度图 # 方法2 cam camera.Camera(320, 240) img cam.read() gray_img img.to_format(image.Format.FMT_GRAYSCALE)\t\t\t# 获取灰度图 ``` ## 如何使用MaixCam的默认应用程序寻找直线 为了快速验证寻找直线的功能,可以先使用MaixCam提供的`line_tracking`应用程序来体验寻找直线的效果。 ### 使用方法 1. 选择并打开`Line tracking`应用 2. 点击屏幕中需要识别的直线,左侧会显示该直线的颜色 3. 点击左侧(界面中`L A B`下方的颜色)需要检测的颜色 4. 此时就可以识别到对应的直线了,同时串口也会输出直线的坐标和角度信息。 ### 演示 ### 进阶操作 #### 手动设置LAB阈值寻找直线 APP提供手动设置LAB阈值来精确的寻找直线 操作方法: 1. `点击`左下角`选项图标`,进入配置模式 2. 将`摄像头对准`需要`寻找的物体`,`点击`屏幕上的`目标直线`,此时界面中`L A B`下方会显示该物体对应颜色的`矩形框`,并显示该物体颜色的`LAB值`。 3. 点击下方选项`L Min,L Max,A Min,A Max,B Min,B Max`,点击后右侧会出现滑动条来设置该选项值。这些值分别对应LAB颜色格式的L通道、A通道和B通道的最小值和最大值 4. 参考步骤2计算的物体颜色的`LAB值`,将`L Min,L Max,A Min,A Max,B Min,B Max`调整到合适的值,即可识别到对应的直线。 例如`LAB (20, 50, 80)`,由于`L 20`,为了适配一定范围让`L Min 10`,`L Max 30`;同理,由于`A 50`,让`A Min 40`,`A Max 60`; 由于`B 80`,让`B Min 70`,`B Max 90`。 #### 通过串口协议获取检测数据 寻找直线应用支持通过串口(默认波特率为115200)上报检测到的直线信息。 由于上报信息只有一条,这里直接用示例来说明上报信息的内容。 例如上报信息为: ```shell AA CA AC BB 0E 00 00 00 E1 09 FC 01 01 00 E9 01 6F 01 57 00 C1 C6 ``` `AA CA AC BB`:协议头部,内容固定 `0E 00 00 00`:数据长度,除了协议头部和数据长度外的总长度,这里表示长度为14 `E1`:标志位,用来标识串口消息标志 `09`:命令类型,对于寻找直线APP应用该值固定为0x09 `FC 01 01 00 E9 01 6F 01 57 00`:直线的两端坐标和角度信息,每个值用小端格式的2字节表示。`FC 01`和`01 00`表示第一个端点坐标为(508, 1),`E9 01`和`6F 01`表示第二个端点坐标为(489, 367),`57 00`表示直线与x轴的角度为87度 ` C1 C6`:CRC 校验值,用以校验帧数据在传输过程中是否出错"},"/maixpy/doc/zh/vision/dual_buff.html":{"title":"MaixCAM MaixPy MaixCAM 模型运行 dual_buff 模式介绍","content":" title: MaixCAM MaixPy MaixCAM 模型运行 dual_buff 模式介绍 ## 简介 细心的你可能注意到模型运行相关的的代码初始化时有一个参数`dual_buff True`。 比如 `YOLOv5`: ```python from maix import camera, display, image, nn, app detector nn.YOLOv5(model \"/root/models/yolov5s.mud\", dual_buff True) # detector nn.YOLOv8(model \"/root/models/yolov8n.mud\", dual_buff True) cam camera.Camera(detector.input_width(), detector.input_height(), detector.input_format()) dis display.Display() while not app.need_exit(): img cam.read() objs detector.detect(img, conf_th 0.5, iou_th 0.45) for obj in objs: img.draw_rect(obj.x, obj.y, obj.w, obj.h, color image.COLOR_RED) msg f'{detector.labels[obj.class_id]}: {obj.score:.2f}' img.draw_string(obj.x, obj.y, msg, color image.COLOR_RED) dis.show(img) ``` 一般来说这个参数默认就是`True`,除非手动设置`dual_buff False`才会关闭 `dual_buff`功能。 使能这个功能后运行的效率会提升,即帧率会提升(以上代码假设摄像头的帧率没有限制的情况下,在 MaixCAM 上会减少循环一半的时间即帧率翻倍)。 但是也有缺点,`detect`函数返回的结果是上一次调用`detect`函数的图的结果,所以结果和输入会有一帧的时间差,如果你希望`detect`出来的结果就是输入的`img`的结果而不是上一帧的结果,请禁用这个功能;另外由于准备了双份缓冲区,也会加大内存的使用,如果使用时发现内存不足,也需要禁用这个功能。 ## 原理 模型检测物体分为了几步: * 获取图像 * 图像预处理 * 模型运行 * 结果后处理 其中只有 模型运行这一步是硬件NPU 上运行的,其它步骤都在 CPU 运行。 如果`dual_buff`设置为`False`,在`detect`的时候,CPU 先预处理(此时 NPU 空闲), 然后给 NPU 运算(此时 CPU 空闲等待 NPU 运算结束),然后 CPU 后处理(NPU 空闲), 整过过程是线性的,比较简单。 但是这里发现了问题,就是 CPU 和 NPU 两者总有一个空闲着的,当加了`dual_buff True`, CPU 预处理后交给 NPU 运算,此时 CPU 不再等待 NPU 出结果,二是直接退出`detect`函数进行下一次摄像头读取和预处理,等 NPU 运算完成后, CPU 已经准备好了下一次的数据直接交给 NPU 继续运算,不给 NPU 喘息的机会,这样就充分利用了 CPU 和 NPU 高效地同时进行运算。 不过这里也需要注意,摄像头帧率如果不够高也会限制整体帧率。"},"/maixpy/doc/zh/vision/find_blobs.html":{"title":"MaixCAM MaixPy 寻找色块","content":" title: MaixCAM MaixPy 寻找色块 update: date: 2024 04 03 author: neucrack version: 1.0.0 content: 初版文档 date: 2024 04 03 author: lxowalle version: 1.0.1 content: 添加寻找色块的详细用法 阅读本文前,确保已经知晓如何开发MaixCAM,详情请阅读[快速开始](../index.html) ## 简介 本文将介绍如何使用MaixPy来寻找色块,以及如何使用MaixCam的默认应用程序寻找色块。 在视觉应用中,寻找色块是一个非常常见的需求,比如机器人找色块,自动化生产线找色块等等,即需要识别画面中的特定的颜色区域,获取这个区域的位置和大小等信息。 ## 使用 MaixPy 寻找色块 MaixPy的 `maix.image.Image`中提供了`find_blobs`方法,可以方便的找色块。 ### 如何寻找色块 一个简单的示例,实现寻找色块并画框 ```python from maix import image, camera, display cam camera.Camera(320, 240) disp display.Display() # 根据色块颜色选择对应配置 thresholds [[0, 80, 40, 80, 10, 80]] # red # thresholds [[0, 80, 120, 10, 0, 30]] # green # thresholds [[0, 80, 30, 100, 120, 60]] # blue while 1: img cam.read() blobs img.find_blobs(thresholds, pixels_threshold 500) for blob in blobs: img.draw_rect(blob[0], blob[1], blob[2], blob[3], image.COLOR_GREEN) disp.show(img) ``` 步骤: 1. 导入image、camera、display模块 ```python from maix import image, camera, display ``` 2. 初始化摄像头和显示 ```python cam camera.Camera(320, 240)\t# 初始化摄像头,输出分辨率320x240 RGB格式 disp display.Display() ``` 3. 从摄像头获取图片并显示 ```python while 1: img cam.read() disp.show(img) ``` 4. 调用`find_blobs`方法寻找摄像头图片中的色块,并画到屏幕上 ```python blobs img.find_blobs(thresholds, pixels_threshold 500) for blob in blobs: img.draw_rect(blob[0], blob[1], blob[2], blob[3], image.COLOR_GREEN) ``` `img`是通过`cam.read()`读取到的摄像头图像,当初始化的方式为`cam camera.Camera(320, 240)`时,`img`对象是一张分辨率为320x240的RGB图。 `img.find_blobs`用来寻找色块, `thresholds` 是一个颜色阈值列表,每个元素是一个颜色阈值,同时找到多个阈值就传入多个,每个颜色阈值的格式为 `[L_MIN, L_MAX, A_MIN, A_MAX, B_MIN, B_MAX]`,这里的 `L`、`A`、`B` 是`LAB`颜色空间的三个通道,`L` 通道是亮度,`A` 通道是红绿通道,`B` 通道是蓝黄通道。`pixels_threshold`是一个像素点数量的阈值,用来过滤一些不需要的小色块。 `img.draw_rect`用来画色块框,`blob[0]`、`blob[1]`、`blob[1]`、`blob[1]`分别代表色块左上角坐标x,色块左上角坐标y,色块宽度w和色块高度h ### 常用参数说明 列举常用参数说明,如果没有找到可以实现应用的参数,则需要考虑是否使用其他算法实现,或者基于目前算法的结果扩展所需的功能 参数 说明 示例 thresholds 基于lab颜色空间的阈值,threshold [[l_min, l_max, a_min, a_max, b_min, b_max]],分别表示:
    亮度范围为[l_min, l_max]\\
    绿色到红色的分量范围为[a_min, a_max]
    蓝色到黄色的分量范围为[b_min, b_max]
    可同时设置多个阈值 设置两个阈值来检测红色和绿色
    ```img.find_blobs(threshold [[0, 80, 40, 80, 10, 80], [0, 80, 120, 10, 0, 30]])```
    红色阈值为[0, 80, 40, 80, 10, 80]
    绿色阈值为[0, 80, 120, 10, 0, 30] invert 使能阈值反转,使能后传入阈值与实际阈值相反,默认为False 使能阈值反转
    ```img.find_blobs(invert True)``` roi 设置算法计算的矩形区域,roi [x, y, w, h],x,y表示矩形区域左上角坐标,w,h表示矩形区域的宽度和高度,默认为整张图片 计算坐标为(50,50),宽和高为100的区域
    ```img.find_blobs(roi [50, 50, 100, 100])``` area_threshold 过滤像素面积小于area_threshold的色块,单位为像素点,默认为10。该参数可用于过滤一些无用的小色块 过滤面积小于1000的色块
    ```img.find_blobs(area_threshold 1000)``` pixels_threshold 过滤有效像素点小于pixels_threshold的色块,默认为10。该参数可用于过滤一些无用的小色块 过滤有效像素点小于1000的色块
    ```img.find_blobs(pixels_threshold 1000)``` 本文介绍常用方法,更多 API 请看 API 文档的 [image](../../../api/maix/image.html) 部分。 ## 离线设置阈值 为了快速验证寻找色块的功能,可以先使用MaixCam提供的寻找色块应用程序来体验寻找色块的效果。 ### 演示 打开设备,选择`找色块`应用,然后在下方选择要识别的颜色,或者自定义颜色,即可以识别到对应的颜色了,在下方`设置栏`会显示当前设置的`阈值范围`,同时串口也会输出识别到的坐标和颜色信息。 [源码地址](https://github.com/sipeed/MaixCDK/tree/main/projects/app_find_blobs) ### 快速使用 #### 使用默认阈值 寻找色块APP提供了`red`、`green`、`blue`、`user`四种配置,其中`red`、`green`和`blue`用来寻找`红色`、`绿色`和`蓝色`的色块,`user`自定义的阈值在程序退出时会保存下来,下次打开应用时会加载上一次调试的阈值。快速体验时通过`点击`界面下方`按钮`即可切换到对应配置,APP界面参考如下: ![](../../../static/image/find_blobs_app.jpg) #### 快速调试阈值 操作方法: 1. 将`摄像头对准`需要`寻找的物体`,`点击`屏幕上的`目标物体`,此时`左侧`会显示该物体对应颜色的`矩形框`,并显示该物体颜色的LAB值。 2. 点击出现的`矩形框`,系统将会`自动设置`LAB阈值,此时画面将会画出该物体边缘。 这个方法优点是方便,快捷,可以很快的设置阈值并找到对应的色块。缺点是还不够精确,可以在下一步中手动微调。 #### 手动微调阈值 操作方法: 1. `点击`左下角`选项图标`,进入配置模式 2. 将`摄像头对准`需要`寻找的物体`,`点击`屏幕上的`目标物体`,此时`左侧`会显示该物体对应颜色的`矩形框`,并显示该物体颜色的`LAB值`。 3. 点击下方选项`L Min,L Max,A Min,A Max,B Min,B Max`,点击后右侧会出现滑动条来设置该选项值。这些值分别对应LAB颜色格式的L通道、A通道和B通道的最小值和最大值 4. 参考步骤2计算的物体颜色的`LAB值`,将`L Min,L Max,A Min,A Max,B Min,B Max`调整到合适的值,即可识别到对应的色块。 例如`LAB (20, 50, 80)`,由于`L 20`,为了适配一定范围让`L Min 10`,`L Max 30`;同理,由于`A 50`,让`A Min 40`,`A Max 60`; 由于`B 80`,让`B Min 70`,`B Max 90`。 这个方法可以更精确的找到合适的阈值,搭配`快速调试阈值`的方法,轻松找到想要的阈值。 #### 通过串口协议获取识别结果 寻找色块APP支持通过串口(默认波特率为115200)上报检测到的色块信息。 由于上报信息只有一条,这里直接用示例来说明上报信息的内容。 例如上报信息为: ```shell AA CA AC BB 14 00 00 00 E1 08 EE 00 37 00 15 01 F7 FF 4E 01 19 00 27 01 5A 00 A7 20 ``` `AA CA AC BB`:协议头部,内容固定 `14 00 00 00`:数据长度,除了协议头部和数据长度外的总长度 `E1`:标志位,用来标识串口消息标志 `08`:命令类型,对于寻找色块APP应用该值固定为0x08 `EE 00 37 00 15 01 F7 FF 4E 01 19 00 27 01 5A 00`:已找到色块的四个顶点坐标,每个值用小端格式的2字节表示。`EE 00`和`37 00`表示第一个顶点坐标为(238, 55),`15 01`和`F7 FF`表示第二个顶点坐标为(277, 9),`4E 01`和`19 00`表示第三个顶点坐标为(334, 25),`27 01`和`5A 00`表示第四个顶点坐标为(295, 90)。 `A7 20`:CRC 校验值,用以校验帧数据在传输过程中是否出错 ## 关于LAB颜色空间 LAB颜色空间和RGB颜色空间一样是一种表示颜色的方法,LAB可以表示人眼能看到的所有颜色。如果需要了解LAB可以去网络上搜索相关文章,那样更详细,而对于你应该只需要了解为什么选用LAB对于MaixPy的优势。 LAB对于MaixPy的优势: 1. LAB颜色空间的色域比RGB都要大,因此完全可以替换RGB。 2. LAB颜色空间下,由于L通道是亮度通道,我们常常设置到较大的范围即可(常用[0,80]),而编写代码时主要关注是A通道和B通道,这样可以减少大量的时间在纠结颜色阈值如何选择的问题上。 3. LAB颜色空间的颜色感知更均匀,更容易用代码调试。例如,对于只需要寻找红色色块,可以固定L通道和B通道值,只需要调整A通道的值即可(这是在颜色精度要求不高的情况下);如果是RGB通道则基本需要R、G、B三个通道同时变动才能找到合适的阈值。"},"/maixpy/doc/zh/vision/self_learn_detector.html":{"title":"MaixCAM MaixPy 自学习检测跟踪器","content":" title: MaixCAM MaixPy 自学习检测跟踪器 ## MaixPy 自学习检测跟踪器 和自学习分类器类似,不需要训练,直接框选目标物体即可实现检测并且跟踪物体,在简单检测场景下十分好用。 和自学习分类器不同的是因为是检测器,会有物体的坐标和大小。 ## MaixPy 中使用自学习检测跟踪器 在 MaixPy 目前提供了一种单目标学习检测跟踪算法,即开始框选目标物体,后面会一直跟踪这个物体。 这里使用的算法是[NanoTrack](https://github.com/HonglinChu/SiamTrackers/tree/master/NanoTrack),有兴趣了解原理的可以自行学习。 可以烧录最新的系统镜像(> 2024.9.5_v4.5.0)后直接使用内置的自学习跟踪应用看效果。 使用`maix.nn.NanoTrack`类即可,初始化对象后,先调用`init`方法指定要检测的目标,然后调用`track`方法连续跟踪目标,以下为简化的代码: ```python from maix import nn model_path \"/root/models/nanotrack.mud\" tracker nn.NanoTrack(model_path) tracker.init(img, x, y, w, h) pos tracker.track(img) ``` 注意这里使用了内置的模型,在系统`/root/models`下已经内置了,你也可以在[MaixHub 模型库](https://maixhub.com/model/zoo/437)下载到模型。 具体详细代码请看[MaixPy/examples/vision/ai_vision/nn_self_learn_tracker.py](https://github.com/sipeed/MaixPy/blob/main/examples/vision/ai_vision/nn_self_learn_tracker.py) ## 其它自学习跟踪算法 目前实现了 NanoTrack 算法,在简单场景非常稳定可靠,而且帧率足够高,缺点就是物体出视野再回来需要回到上次消失的附近才能检测到,以及只能检测一个目标。 如果有更好的算法,可以自行参考已有的 NanoTrack 实现方式进行实现,也欢迎讨论或者提交代码PR。"},"/maixpy/doc/zh/vision/object_track.html":{"title":"MaixCAM MaixPy 物体轨迹追踪和计数(如人流计数)","content":" title: MaixCAM MaixPy 物体轨迹追踪和计数(如人流计数) ## 轨迹追踪简介 前面我们使用 YOLOv5 YOLOv8 甚至是 find_blobs 都可以检测到物体,但是如果画面中同时存在多个物体,当我们需要区分每一个物体,就需要物体追踪功能了。 比如画面中同时有 5 个人在移动,我们需要给每个人编号,知道他们的行动轨迹。 应用: * 人流计数,比如通过某个地段的人数量。 * 工件计数,比如流水线对生产的产品进行计数。 * 物体移动轨迹记录和识别。 ## MaixCAM/MaixPy 物体追踪和人流计数效果 效果如下视频,可以跟踪每个人,以及对从上往下跨越黄色区域的人进行计数(左下角): ## MaixCAM / MaixPy 使用 物体追踪和人流计数 可以参考直接安装[应用](https://maixhub.com/app/61) 体验。 可以看[examples/vision/tracker 下的例程](https://github.com/sipeed/MaixPy/tree/main/examples/vision/tracker)。 其中`tracker_bytetrack.py` 例程是基本的物体跟踪例程,分为几个步骤: * 使用 YOLOv5 或者 YOLOv8 检测物体,这样你就可以根据你自己要检测的物体更换模型即可检测不同物体。 * 使用`maix.tracker.ByteTracker` 这个算法进行物体追踪,只需要调用一个`update`函数即可得到结果(画面中的每个轨迹),十分简单。 其中有几个参数根据自己的实际场景进行调整,具体参数以例程代码和 API 参数说明为准: ```python # configs conf_threshold 0.3 # detect threshold iou_threshold 0.45 # detect iou threshold max_lost_buff_time 120 # the frames for keep lost tracks. track_thresh 0.4 # tracking confidence threshold. high_thresh 0.6 # threshold to add to new track. match_thresh 0.8 # matching threshold for tracking, e.g. one object in two frame iou < match_thresh we think they are the same obj. max_history_num 5 # max tack's position history length. show_detect False # show detect valid_class_id [0] # we used classes index in detect model。 ``` `tracker_bytetrack_count.py` 例程则增加了人流计数例程,这里为了让例程更加简单,只简单地写了一个判断人从上往下走的计数,即当人处在黄色区域以下,同时轨迹在黄色区域内就认为是从上往下跨越了黄色区域。 实际在你的应用场景可以自己编写相关逻辑。"},"/maixpy/doc/zh/vision/customize_model_yolov8.html":{"title":"为 MaixCAM MaixPy 离线训练 YOLO11/YOLOv8 模型,自定义检测物体、关键点检测","content":" title: 为 MaixCAM MaixPy 离线训练 YOLO11/YOLOv8 模型,自定义检测物体、关键点检测 update: date: 2024 06 21 version: v1.0 author: neucrack content: 编写文档 date: 2024 10 10 version: v2.0 author: neucrack content: 增加 YOLO11 支持 ## 简介 默认官方提供了 80 种物体检测,如果不满足你的需求,可以自己训练检测的物体,可以在自己的电脑或者服务器搭建训练环境训练。 YOLOv8 / YOLO11 不光支持检测物体,还有 yolov8 pose / YOLO11 pose 支持关键点检测,出了官方的人体关键点,你还可以制作你自己的关键点数据集来训练检测指定的物体和关键点 因为 YOLOv8 和 YOLO11 主要是修改了内部网络,预处理和后处理都是一样的,所以 YOLOv8 和 YOLO11 的训练转换步骤相同,只是输出节点的名称不一样。 **注意:** 本文讲了如何自定义训练,但是有一些基础知识默认你已经拥有,如果没有请自行学习: * 本文不会讲解如何安装训练环境,请自行搜索安装(Pytorch 环境安装)测试。 * 本文不会讲解机器学习的基本概念、linux相关基础使用知识。 如果你觉得本文哪里需要改进,欢迎点击右上角`编辑本文`贡献并提交 文档 PR。 ## 流程和本文目标 要想我们的模型能在 MaixPy (MaixCAM)上使用,需要经历以下过程: * 搭建训练环境,本文略过,请自行搜索 pytorch 训练环境搭建。 * 拉取 [YOLO11/YOLOv8](https://github.com/ultralytics/ultralytics) 源码到本地。 * 准备数据集,并做成 YOLO11 / YOLOv8 项目需要的格式。 * 训练模型,得到一个 `onnx` 模型文件,也是本文的最终输出文件。 * 将`onnx`模型转换成 MaixPy 支持的 `MUD` 文件,这个过程在[MaixCAM 模型转换](../ai_model_converter/maixcam.html) 一文种有详细介绍。 * 使用 MaixPy 加载模型运行。 ## 参考文章 因为是比较通用的操作过程,本文只给一个流程介绍,具体细节可以自行看 **[YOLO11 / YOLOv8 官方代码和文档](https://github.com/ultralytics/ultralytics)**(**推荐**),以及搜索其训练教程,最终导出 onnx 文件即可。 如果你有觉得讲得不错的文章欢迎修改本文并提交 PR。 ## YOLO11 / YOLOv8 导出 onnx 模型 在 `ultralytics` 目录下创建一个`export_onnx.py` 文件 ```python from ultralytics import YOLO import sys print(sys.path) net_name sys.argv[1] # yolov8n.pt yolov8n pose.pt # https://docs.ultralytics.com/models/yolov8/#supported tasks and modes input_width int(sys.argv[2]) input_height int(sys.argv[3]) # Load a model model YOLO(net_name) # load an official model # model YOLO(\"path/to/best.pt\") # load a custom model # Predict with the model results model(\"https://ultralytics.com/images/bus.jpg\") # predict on an image path model.export(format \"onnx\", imgsz [input_height, input_width]) # export the model to ONNX format print(path) ``` 然后执行`python export_onnx.py yolov8n.pt 320 224` 就能导出 `onnx` 模型了,这里重新指定了输入分辨率,模型训练的时候用的`640x640`,我们重新指定了分辨率方便提升运行速度,这里使用`320x224`的原因是和 MaixCAM 的屏幕比例比较相近方便显示,具体可以根据你的需求设置就好了。 ## 转换为 MaixCAM 支持的模型以及 mud 文件 MaixPy/MaixCDK 目前支持了 YOLOv8 / YOLO11 检测 以及 YOLOv8 pose / YOLO11 pose 关键点检测 以及 YOLOv8 seg / YOLO11 seg 三种模型(2024.10.10)。 按照[MaixCAM 模型转换](../ai_model_converter/maixcam.html) 进行模型转换。 注意模型输出节点的选择: * 检测模型: * YOLOv8 提取 onnx 的 `/model.22/dfl/conv/Conv_output_0,/model.22/Sigmoid_output_0` 这两个输出。 * YOLO11 提取`/model.23/dfl/conv/Conv_output_0,/model.23/Sigmoid_output_0`输出。 * 关键点检测: * YOLOv8 pose 提取`/model.22/dfl/conv/Conv_output_0,/model.22/Sigmoid_output_0,/model.22/Concat_output_0`这三个输出。 * YOLO11 pose 提取`/model.23/dfl/conv/Conv_output_0,/model.23/Sigmoid_output_0,/model.23/Concat_output_0`这三个输出。 * 图像分割: * YOLOv8 seg 提取 `/model.22/dfl/conv/Conv_output_0,/model.22/Sigmoid_output_0,/model.22/Concat_output_0,output1` * YOLO11 seg 提取 `/model.23/dfl/conv/Conv_output_0,/model.23/Sigmoid_output_0,/model.23/Concat_output_0,output1`四个输出。 ![](../../assets/yolov8_out1.jpg) ![](../../assets/yolov8_out2.jpg) 对于物体检测,mud 文件为(YOLO11 model_type 改为 yolo11) ```ini [basic] type cvimodel model yolov8n.cvimodel [extra] model_type yolov8 input_type rgb mean 0, 0, 0 scale 0.00392156862745098, 0.00392156862745098, 0.00392156862745098 labels person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign, parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket, bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut, cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair drier, toothbrush ``` 根据你训练的对象替换`labels`即可。 对于关键点检测(yolov8 pose), mud 文件为(YOLO11 model_type 改为 yolo11): ```ini [basic] type cvimodel model yolov8n_pose.cvimodel [extra] model_type yolov8 type pose input_type rgb mean 0, 0, 0 scale 0.00392156862745098, 0.00392156862745098, 0.00392156862745098 labels person ``` 官方默认的时人体姿态关键点检测,所以`labels`只有一个 `person`,根据你检测的物体替换即可。 对于图像分割(yolov8 seg), mud 文件(YOLO11 model_type 改为 yolo11): ```ini [basic] type cvimodel model yolo11n seg_320x224_int8.cvimodel [extra] model_type yolov8 input_type rgb type seg mean 0, 0, 0 scale 0.00392156862745098, 0.00392156862745098, 0.00392156862745098 labels person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign, parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket, bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut, cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair drier, toothbrush ``` ## 上传分享到 MaixHub 到 [MaixHub 模型库](https://maixhub.com/model/zoo?platform maixcam) 上传并分享你的模型,可以多提供几个分辨率供大家选择。"},"/maixpy/doc/zh/vision/classify.html":{"title":"MaixCAM MaixPy 使用 AI 模型进行物体分类","content":" title: MaixCAM MaixPy 使用 AI 模型进行物体分类 ## 物体分类概念 比如眼前有两张图片,一张图里面是苹果,另一张是飞机,物体分类的任务就是把两张图分别依次输入给 AI 模型,模型会依次输出两个结果,一个是苹果,一个是飞机。 ## MaixPy 中使用物体分类 MaixPy 默认提供了 `imagenet` 数据集训练得到的 `1000`分类模型,可以直接使用: ```python from maix import camera, display, image, nn classifier nn.Classifier(model \"/root/models/mobilenetv2.mud\", dual_buff True) cam camera.Camera(classifier.input_width(), classifier.input_height(), classifier.input_format()) dis display.Display() while 1: img cam.read() res classifier.classify(img) max_idx, max_prob res[0] msg f\"{max_prob:5.2f}: {classifier.labels[max_idx]}\" img.draw_string(10, 10, msg, image.COLOR_RED) dis.show(img) ``` 效果视频: 这里使用了摄像头拍摄图像,然后传给 `classifier`进行识别,得出结果后,将结果显示在屏幕上。 更多 API 使用参考 [maix.nn](/api/maix/nn.html) 模块的文档。 ## dual_buff 双缓冲区加速 你可能注意到这里模型初始化使用了`dual_buff`(默认值就是 `True`),使能 `dual_buff` 参数可以加快运行效率,提高帧率,具体原理和使用注意点见 [dual_buff 介绍](./dual_buff.html)。 ## 使用 MaixHub 训练自己的分类模型 如果你想训练特定图像的分类模型,请到[MaixHub](https://maixhub.com) 学习并训练分类模型,创建项目时选择`分类模型`,然后上传图片训练即可,无需搭建训练环境也无需花钱购买昂贵的GPU,快速一键训练。 ## 离线训练自己的分类模型 离线训练需要自己搭建环境,请自行搜索 `PyTorch 分类模型训练` `Mobilenet`等相关关键字进行参考。 训练好模型后导出 onnx 格式的模型,然后参考 [MaixCAM 模型转换文档](../ai_model_converter/maixcam.html) 转换为 MaixCAM 支持的模型格式,最后使用上面的`nn.Classifier`类加载模型即可。 这里分类模型可以是 mobilenet 也可以是 其它模型比如 Resnet 等,模型转换时最好提取 `softmax`前一层作为最后的输出层,因为`classifier.classify(img, softmax True)` 识别函数的`softmax`参数默认为`True`,即会对结果计算一次`softmax`,所以模型就不用`softmax`这一层了,当然如果模型包含了`softmax`层,也可以指定不再执行一遍`softmax`: `classifier.classify(img, softmax False)`。"},"/maixpy/doc/zh/vision/body_key_points.html":{"title":"MaixCAM MaixPy 检测人体关键点姿态检测","content":" title: MaixCAM MaixPy 检测人体关键点姿态检测 ## 简介 使用 MaixPy 可以轻松检测人体关节的关键点的坐标,用在姿态检测比如坐姿检测,体感游戏输入等。 MaixPy 实现了基于 [YOLOv8 Pose / YOLO11 Pose](https://github.com/ultralytics/ultralytics) 的人体姿态检测,可以检测到人体`17`个关键点。 ![](../../assets/body_keypoints.jpg) ## 使用 使用 MaixPy 的 `maix.nn.YOLOv8` 或者 `maix.nn.YOLO11` 类可以轻松实现: ```python from maix import camera, display, image, nn, app detector nn.YOLOv8(model \"/root/models/yolov8n_pose.mud\", dual_buff True) # detector nn.YOLO11(model \"/root/models/yolo11n_pose.mud\", dual_buff True) cam camera.Camera(detector.input_width(), detector.input_height(), detector.input_format()) dis display.Display() while not app.need_exit(): img cam.read() objs detector.detect(img, conf_th 0.5, iou_th 0.45, keypoint_th 0.5) for obj in objs: img.draw_rect(obj.x, obj.y, obj.w, obj.h, color image.COLOR_RED) msg f'{detector.labels[obj.class_id]}: {obj.score:.2f}' img.draw_string(obj.x, obj.y, msg, color image.COLOR_RED) detector.draw_pose(img, obj.points, 8 if detector.input_width() > 480 else 4, image.COLOR_RED) dis.show(img) ``` 另外代码也在[MaixPy/examples/vision](https://github.com/sipeed/MaixPy/tree/main/examples/vision/ai_vision)目录下可以找到。 可以看到因为用了`YOLOv8 Pose` 所以这里直接用了`YOLOv8`这个类,和`YOLOv8`物体检测模型只是模型文件不同, `YOLO11`同理,然后就是`detect`函数返回的结果多了`points`值,是一个`int`类型的`list`列表,一共`17`个点,按次序依次排列,比如第一个值是鼻子的 x 坐标, 第二个值是鼻子的 y 坐标,依次为: ```python 1. 鼻子(Nose) 2. 左眼(Left Eye) 3. 右眼(Right Eye) 4. 左耳(Left Ear) 5. 右耳(Right Ear) 6. 左肩(Left Shoulder) 7. 右肩(Right Shoulder) 8. 左肘(Left Elbow) 9. 右肘(Right Elbow) 10. 左手腕(Left Wrist) 11. 右手腕(Right Wrist) 12. 左髋(Left Hip) 13. 右髋(Right Hip) 14. 左膝(Left Knee) 15. 右膝(Right Knee) 16. 左脚踝(Left Ankle) 17. 右脚踝(Right Ankle) ``` 如果某些部位被遮挡,那么值为` 1`。 ## 更多输入分辨率模型 默认的模型是输入是`320x224`分辨率,如果你希望使用更大分辨率的模型,可以到 MaixHub 模型库下载并传输到设备使用: * YOLOv8 Pose: [https://maixhub.com/model/zoo/401](https://maixhub.com/model/zoo/401) * YOLO11 Pose: [https://maixhub.com/model/zoo/454](https://maixhub.com/model/zoo/454) 分辨率越大理论上精度越高但是运行速度更低,根据你的使用场景选择,另外如果提供的分辨率不满足你的要求你也可以自己到 [YOLOv8 Pose / YOLO11 Pose](https://github.com/ultralytics/ultralytics) 使用摸新训练源码导出自己的onnx模型,然后转换为 MaixCAM 支持的模型(方法见后面的文章)。 ## dual_buff 双缓冲区加速 你可能注意到这里模型初始化使用了`dual_buff`(默认值就是 `True`),使能 `dual_buff` 参数可以加快运行效率,提高帧率,具体原理和使用注意点见 [dual_buff 介绍](./dual_buff.html)。"},"/maixpy/doc/zh/vision/custmize_model.html":{"title":"","content":"请看 [MaixCAM 模型转换](../ai_model_converter/maixcam.html),以及在左边目录找到你要转换的模型文档,比如[自定义 yolov5 模型](./customize_model_yolov5.html)。"},"/maixpy/doc/zh/vision/opencv.html":{"title":"MaixCAM MaixPy 使用 OpenCV","content":" title: MaixCAM MaixPy 使用 OpenCV ## 简介 对于 MaixCAM,因为使用了 Linux, 并且性能基本能够支撑使用`Python`版本的`OpenCV`,所以除了使用`maix`模块,你也可以直接使用`cv2`模块。 本文例程以及更多可以在[MaixPy/examples/vision/opencv](https://github.com/sipeed/MaixPy/tree/main/examples/vision/opencv) 中找到。 **注意 OpenCV 的函数基本都是 CPU 计算的,能使用 maix 的模块尽量不使用 OpenCV,因为 maix 有很多函数都是经过硬件加速过的。** ## maix.image.Image 对象和 Numpy/OpenCV 格式互相转换 `maix.image.Image`对象可以转换成`numpy`数组,这样就能给`numpy`和`opencv`等库使用: ```python from maix import image, time, display, app disp display.Display() while not app.need_exit(): img image.Image(320, 240, image.Format.FMT_RGB888) img.draw_rect(0, 0, 100, 100, image.COLOR_RED, thickness 1) t time.ticks_ms() img_bgr image.image2cv(img, ensure_bgr True, copy True) img2 image.cv2image(img_bgr, bgr True, copy True) print(\"time:\", time.ticks_ms() t) print(type(img_bgr), img_bgr.shape) print(type(img2), img2) print(\"\") disp.show(img2) ``` 前面的程序因为每次转换都要拷贝一次内存,所以速度会比较慢,下面为优化速度版本,如果不是极限追求速度不建议使用,容易出错: ```python from maix import image, time, display, app disp display.Display() while not app.need_exit(): img image.Image(320, 240, image.Format.FMT_RGB888) img.draw_rect(0, 0, 100, 100, image.COLOR_RED, thickness 1) t time.ticks_ms() img_rgb image.image2cv(img, ensure_bgr False, copy False) img2 image.cv2image(img_rgb, bgr False, copy False) print(\"time:\", time.ticks_ms() t) print(type(img_rgb), img_rgb.shape) print(type(img2), img2) disp.show(img2) ``` * `img_rgb image.image2cv(img, ensure_bgr False, copy False)`中`img_rgb` 会直接使用 `img` 的数据,不会产生内存拷贝,注意此时得到的`img_rgb` 是 `RGB` 图,`opencv`的 API 都是认为图是 `BGR` 的,所以用`opencv`的 API 操作图像时要注意,如果你无法掌控请设置`ensure_bgr`为`True`。 * `img2 image.cv2image(img_rgb, bgr False, copy False)`中设置了`copy`为`False`,即直接使用`img_rgb`的内存,不会新拷贝一份内存,所以速度更快了,但是需要小心,在 `img2` 使用结束前`img_bgr`不能被销毁,否则程序会崩溃。 * 注意因为借用了内存,所以更改转换后的图像也会影响到转换前的图像。 ## 加载一张图片 ```python import cv2 file_path \"/maixapp/share/icon/detector.png\" img cv2.imread(file_path) print(img) ``` 因为`cv2`模块比较臃肿,`import cv2`可能会需要一点时间。 ## 显示图像到屏幕 但是由于直接使用了官方的 OpenCV,没有对接显示,所以要显示到屏幕上需要转换成`maix.image.Image`对象后再用`display`来显示: ```python from maix import display, image, time import cv2 disp display.Display() file_path \"/maixapp/share/icon/detector.png\" img cv2.imread(file_path) img_show image.cv2image(img) disp.show(img_show) while not app.need_exit(): time.sleep(1) ``` ## 使用 OpenCV 函数 以边缘检测为例: 基于上面的代码,使用`cv2.Canny`函数即可: ```python from maix import image, display, app, time import cv2 file_path \"/maixapp/share/icon/detector.png\" img0 cv2.imread(file_path) disp display.Display() while not app.need_exit(): img img0.copy() # canny method t time.ticks_ms() edged cv2.Canny(img, 180, 60) t2 time.ticks_ms() t # show by maix.display t time.ticks_ms() img_show image.cv2image(edged) print(f\"edge time: {t2}ms, convert time: {time.ticks_ms() t}ms\") disp.show(img_show) ``` ## 使用摄像头 在 PC 上, 我们使用 `OpenCV` 的`VideoCapture`类来读取摄像头,对于 `MaixCAM`, `OpenCV` 没有适配,我们可以用`maix.camera` 模块来读取摄像头,然后给`OpenCV`使用。 通过`image.image2cv`函数将`maix.image.Image`对象转为`numpy.ndarray`对象给`OpenCV`使用: ```python from maix import image, display, app, time, camera import cv2 disp display.Display() cam camera.Camera(320, 240, image.Format.FMT_BGR888) while not app.need_exit(): img cam.read() # convert maix.image.Image object to numpy.ndarray object t time.ticks_ms() img image.image2cv(img, ensure_bgr False, copy False) print(\"time: \", time.ticks_ms() t) # canny method edged cv2.Canny(img, 180, 60) # show by maix.display img_show image.cv2image(edged, bgr True, copy False) disp.show(img_show) ``` ## 读取 USB 摄像头 先在开发板设置里面`USB设置`中选择`USB 模式`为`HOST`模式。如果没有屏幕,可以用`examples/tools/maixcam_switch_usb_mode.py`脚本进行设置。 ```python from maix import image, display, app import cv2 import sys cap cv2.VideoCapture(0) cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640) cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 480) # cap.set(cv2.CAP_PROP_CONVERT_RGB, 0) disp display.Display() if not cap.isOpened(): print(\"无法打开摄像头\") sys.exit(1) print(\"开始读取\") while not app.need_exit(): ret, frame cap.read() if not ret: print(\"无法读取帧\") break img image.cv2image(frame, bgr True, copy False) disp.show(img) ```"},"/maixpy/doc/zh/vision/self_learn_classifier.html":{"title":"MaixCAM MaixPy 自学习分类器","content":" title: MaixCAM MaixPy 自学习分类器 ## MaixPy 自学习分类器介绍 一般情况下我们要识别新的类别,需要在电脑端重新采集数据集并训练,步骤很麻烦,难度较高,这里提供一种不需要电脑端训练,而是直接在设备端就能秒学习新的物体,适合场景不太复杂的使用场景。 比如眼前有饮料瓶和手机,使用设备分别拍一张它们的照片作为两个分类的依据,然后再采集几张他们各个角度的照片,提取它们的特征保存,然后识别时根据图像的特征值分别和保存的特征值进行对比,和保存的哪个更相近就认为是对应的分类。 ## MaixPy 中使用自学习分类器 默认镜像自带了 [自学习分类 APP](https://maixhub.com/app/30),可以直接尝试使用熟悉使用流程。 ![](../../assets/self_learn_classifier.jpg) 步骤: * 点击`+ Class` 按钮, 采集 n 张分类(class)图,采集图时物体需要在屏幕的白色框中。 * 点击`+ Sample`按钮,采集 m 张样本图,每个分类都采集一些,顺序无所谓,张数也比较随意,最好是在各个角度拍一点,不要差距过大。 * 点击`Learn`按钮,启动学习,会自动根据采集的分类图和样本图进行分类学习,得到分类的特征。 * 屏幕中央对准物体,识别图像输出结果,可以看到屏幕显示了所属的分类,以及和这个分类的相似距离,相似距离越近则越相似。 * 此 APP 学习后的特征值会存到`/root/my_classes.bin`,所以退出应用或者重启了仍然会自动加载上一次的。 简洁版本代码,完整版本请看[例程](https://github.com/sipeed/maixpy/tree/main/examples/vision/ai_vision)里面的完整代码。 ```python from maix import nn, image classifier nn.SelfLearnClassifier(model \"/root/models/mobilenetv2.mud\", dual_buff True) img1 image.load(\"/root/1.jpg\") img2 image.load(\"/root/2.jpg\") img3 image.load(\"/root/3.jpg\") sample_1 image.load(\"/root/sample_1.jpg\") sample_2 image.load(\"/root/sample_2.jpg\") sample_3 image.load(\"/root/sample_3.jpg\") sample_4 image.load(\"/root/sample_4.jpg\") sample_5 image.load(\"/root/sample_5.jpg\") sample_6 image.load(\"/root/sample_6.jpg\") classifier.add_class(img1) classifier.add_class(img2) classifier.add_class(img3) classifier.add_sample(sample_1) classifier.add_sample(sample_2) classifier.add_sample(sample_3) classifier.add_sample(sample_4) classifier.add_sample(sample_5) classifier.add_sample(sample_6) classifier.learn() img image.load(\"/root/test.jpg\") max_idx, max_score classifier.classify(img) print(maix_idx, max_score) ``` ## 储存和加载学习到的特征值 使用 `save` 函数进行储存,会得到一个二进制文件,里面存了物体的特征值。 再使用时用`load`函数进行加载即可。 ```python classifier.save(\"/root/my_classes.bin\") classifier.load(\"/root/my_classes.bin\") ``` 如果你给每一个分类命名了,比如存到了`labels`变量,也可以使用: ```python classifier.save(\"/root/my_classes.bin\", labels labels) labels classifier.load(\"/root/my_classes.bin\") ``` ## dual_buff 双缓冲区加速 你可能注意到这里模型初始化使用了`dual_buff`(默认值就是 `True`),使能 `dual_buff` 参数可以加快运行效率,提高帧率,具体原理和使用注意点见 [dual_buff 介绍](./dual_buff.html)。"},"/maixpy/doc/zh/vision/camera.html":{"title":"MaixCAM MaixPy 摄像头使用","content":" title: MaixCAM MaixPy 摄像头使用 update: date: 2024 04 03 author: neucrack version: 1.0.0 content: 初版文档 date: 2024 08 21 author: YWJ version: 1.0.1 content: 修正文档部分bug,增加部分内容 date: 2024 10 24 author: neucrack version: 1.1.0 content: 增加 USB 摄像头支持说明 ## 简介 对于 MaixCAM 默认搭载了 GC4653 摄像头,或者可选的 OS04A10 摄像头或者全局快门摄像头,甚至是 HDMI 转 MIPI 模块,都可以直接用简单的 API 调用。 ## API 文档 本文介绍常用方法,更多 API 使用参考 [maix.camera](/api/maix/camera.html) 模块的文档。 ## 摄像头切换 目前支持的摄像头: * **GC4653**:M12 通用镜头, 1/3\" 传感器,画质清晰, 4M 像素。 * **OS04A10**:M12 通用镜头,1/1.8\" 大底传感器,画质超清, 4M像素。 * **OV2685**:不支持镜头更换,1/5\"传感器,2M 像素,画质最差,成本最低,一般不建议使用。 * **SC035HGS**:黑白全局快门摄像头,30W黑白像素,适合拍摄高速物体。 系统会自动切换,只接硬件换上即可使用。 ## 获取摄像头的图像信息 使用 MaixPy 轻松获取: ```python from maix import camera cam camera.Camera(640, 480) while 1: img cam.read() print(img) ``` 这里我们从`maix`模块导入`camera`模块,然后创建一个`Camera`对象,指定图像的宽度和高度。然后在一个循环中不断读取图像, 默认出的图为`RGB`格式,如果需要`BGR`格式,其它格式请看 API 文档。 你还可以获取灰度图像 ```python from maix import camera, image cam camera.Camera(640, 480, image.Format.FMT_GRAYSCALE)\t# 设置输出灰度图像 ``` 还可以获取NV21图像 ```python from maix import camera, image cam camera.Camera(640, 480, image.Format.FMT_YVU420SP)\t# 设置输出NV21图像 ``` 注意:如果设置了很高的分辨率(例如`2560x1440`)时需要关闭MaixVision的在线浏览功能,否则可能会因为内存不足导致代码运行异常。 ## 设置摄像头的帧率 目前摄像头支持`30fps`、`60fps`和`80fps`三种配置,由创建`Camera`对象时传入的`width`,`height`,`fps`参数来选择帧率,目前`60/80fps`下最大支持分辨率`1280x720`, `30fps`下最大支持分辨率`2560x1440`。 ### 设置帧率为30帧 ```python from maix import camera cam camera.Camera(640, 480, fps 30)\t\t\t# 设置帧率为30帧 # or cam camera.Camera(1920, 1280) # 分辨率高于1280x720时帧率会设置为30帧 ``` ### 设置帧率为60帧 ```python from maix import camera cam camera.Camera(640, 480, fps 60)\t # 设置帧率为60帧 # or cam camera.Camera(640, 480) # 分辨率低于或等于1280x720时帧率会设置为80fps ``` ### 设置帧率为80帧 ```python from maix import camera cam camera.Camera(640, 480, fps 80)\t # 设置帧率为80帧 ``` 注意: 1. 如果`Camera`传入的尺寸大于`1280x720`,例如写成`camera.Camera(1920, 1080, fps 60)`,此时`fps`参数将会失效,帧率将保持在`30fps`。 2. `60/80fps`与`30fps`的画面相比会有几个像素的偏移,在对视角有严格要求的应用下需要注意修正偏移。 3. 需要注意由于`60/80fps`和`30fps`共用了`isp`配置,在某些环境下两种帧率下的画面画质会存在一些偏差。 4. 摄像头需要看体制,有些体制无法设置到80fps,会出现画面有奇怪的纹路,请换会正常的 60fps使用。 ## 图像矫正 对于画面存在鱼眼等畸变的情况,可以使用`Image`对象下的`lens_corr`函数对图片进行畸变矫正。一般情况只需要调大和调小`strength`的值来将画面调整到合适效果即可。 ```python from maix import camera, display,app,time cam camera.Camera(320, 240) disp display.Display() while not app.need_exit(): t time.ticks_ms() img cam.read() img img.lens_corr(strength 1.5)\t# 调整strength的值直到画面不再畸变 disp.show(img) ``` 注意由于是软件矫正,需要耗费一定时间,另外也可以只接用无畸变镜头(询问商家)从硬件层面解决。 ## 跳过 开头的帧 摄像头初始化的一小段时间,可能图像采集还没稳定出现奇怪的画面,可以通过`skip_frames`函数跳过开头的几帧: ```python cam camera.Camera(640, 480) cam.skip_frames(30) # 跳过开头的30帧 ``` ## 显示摄像头获取的图像 MaixPy 提供了`display`模块,可以方便的显示图像: ```python from maix import camera, display cam camera.Camera(640, 480) disp display.Display() while 1: img cam.read() disp.show(img) ``` ## 设置摄像头参数 ### 设置曝光时间 注意设置曝光时间后,摄像头会切换到手动曝光模式,如果要切换回自动曝光模式需运行`cam.exp_mode(0)` ```python cam camera.Camera() cam.exposure(1000) ``` ### 设置增益 注意设置增益后,摄像头会切换到手动曝光模式,如果要切换回自动曝光模式需运行`cam.exp_mode(0)`。自定义的增益值只能在手动曝光模式下生效。 ```python cam camera.Camera() cam.gain(100) ``` ### 设置白平衡 ```python cam camera.Camera() cam.awb_mode(1)\t\t\t# 0,开启白平衡;1,关闭白平衡 ``` ### 设置亮度、对比度和饱和度 ```python cam camera.Camera() cam.luma(50)\t\t # 设置亮度,范围[0, 100] cam.constrast(50)\t\t# 设置对比度,范围[0, 100] cam.saturation(50)\t\t# 设置饱和度,范围[0, 100] ``` ### 更改图片长宽 ```python cam camera.Camera(width 640, height 480) ``` 或 ```python cam camera.Camera() cam.set_resolution(width 640, height 480) ``` ## 使用 USB 摄像头 除了使用开发板自带的 MIPI 接口摄像头,你也可以使用 USB 外接 USB 摄像头。 方法: * 先在开发板设置里面`USB设置`中选择`USB 模式`为`HOST`模式。如果没有屏幕,可以用`examples/tools/maixcam_switch_usb_mode.py`脚本进行设置。 * `maix.camera` 模块目前(2024.10.24) 还不支持 USB 摄像头,不过你可以参考 [OpenCV 使用 USB 摄像头](./opencv.html)。"}} \ No newline at end of file diff --git a/maixpy/static/search_index/index_2.json b/maixpy/static/search_index/index_2.json new file mode 100644 index 00000000..fa82440e --- /dev/null +++ b/maixpy/static/search_index/index_2.json @@ -0,0 +1 @@ +{"/maixpy/doc/en/ai_model_converter/maixcam.html":{"title":"Convert ONNX Model to a Format Usable by MaixCAM / MaixPy (MUD)","content":" title: Convert ONNX Model to a Format Usable by MaixCAM / MaixPy (MUD) ## Introduction Models trained on a computer cannot be directly used by MaixCAM due to its limited hardware performance. Generally, we need to perform `INT8` quantization to reduce computation and convert the model into a format supported by MaixCAM. This article explains how to convert an ONNX model into a format that MaixCAM can use (MUD model). ## Model File Formats Supported by MaixCAM MUD (Model Universal Description file) is a model description file supported by MaixPy, used to unify model files across different platforms, making MaixPy code cross platform compatible. It is essentially a text file in `ini` format and can be edited with a text editor. Typically, a MUD file is accompanied by one or more actual model files. For MaixCAM, the actual model file is in `.cvimodel` format, with the MUD file providing some descriptive information. For example, a `YOLOv8` model consists of two files: `yolov8n.mud` and `yolov8n.cvimodel`. The former contains: ```ini [basic] type cvimodel model yolov8n.cvimodel [extra] model_type yolov8 input_type rgb mean 0, 0, 0 scale 0.00392156862745098, 0.00392156862745098, 0.00392156862745098 labels person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign, parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket, bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut, cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair dryer, toothbrush ``` This file specifies the model type as `cvimodel` and the model path relative to the MUD file as `yolov8n.cvimodel`. It also includes information such as preprocessing `mean` and `scale` (which should match the preprocessing method used during training), and `labels` representing the 80 categories for object detection. When using this model, place both files in the same directory. ## Preparing the ONNX Model Prepare your ONNX model and view it on [https://netron.app/](https://netron.app/) to ensure that the operators used in your model are supported by the conversion tool. The list of supported operators can be found in the **CVITEK_TPU_SDK Developer Guide.pdf** available from [Sophgo's TPU SDK](https://developer.sophgo.com/thread/473.html). ## Identify Appropriate Quantization Output Nodes Models usually have post processing nodes that are handled by the CPU. We need to strip these out as they can affect quantization quality and potentially cause quantization to fail. For example, in `YOLOv5`: ![YOLOv5 ONNX Model](../../assets/yolov5s_onnx.jpg) There are three `conv` layers, with subsequent calculations handled by the CPU. For quantization, use the outputs of these `conv` layers as the final outputs of the model. The output names in this case are `/model.24/m.0/Conv_output_0,/model.24/m.1/Conv_output_0,/model.24/m.2/Conv_output_0`. ## Setting Up the Model Conversion Environment The model conversion uses Sophgo's [https://github.com/sophgo/tpu mlir](https://github.com/sophgo/tpu mlir). We will install it in a Docker environment to avoid compatibility issues with the host machine. ### Install Docker Follow the [official Docker installation documentation](https://docs.docker.com/engine/install/ubuntu/). For example: ```shell # Install dependencies for Docker sudo apt get update sudo apt get install apt transport https ca certificates curl gnupg agent software properties common # Add the official Docker source curl fsSL https://download.docker.com/linux/ubuntu/gpg sudo apt key add sudo add apt repository \"deb [arch amd64] https://download.docker.com/linux/ubuntu $(lsb_release cs) stable\" # Install Docker sudo apt get update sudo apt get install docker ce docker ce cli containerd.io ``` ### Pull the Docker Image ```shell docker pull sophgo/tpuc_dev:latest ``` > If pulling from within China, you may experience slow speeds. Consider setting up a local mirror. You can search for instructions or refer to [Docker Proxy and Mirror Setup](https://neucrack.com/p/286). ### Run the Container ```shell docker run privileged name tpu env v /home/$USER/data:/home/$USER/data it sophgo/tpuc_dev ``` This command starts a container named `tpu env`, mounting the `~/data` directory from the host to the container's `~/data`, enabling file sharing and path consistency. To start the container next time, use `docker start tpu env && docker attach tpu env`. ### Install tpu mlir Download the `whl` file from [GitHub](https://github.com/sophgo/tpu mlir/releases) and place it in the `~/data` directory. Install it in the container: ```shell pip install tpu_mlir*.whl # Replace with the downloaded file name ``` Running `model_transform.py` should display help information, indicating a successful installation. ## Writing the Conversion Script The conversion mainly involves two commands: `model_transform.py` and `model_deploy.py`. To simplify the process, create a script `convert_yolov5_to_cvimodel.sh`: ```shell #!/bin/bash set e net_name yolov5s input_w 640 input_h 640 # mean: 0, 0, 0 # std: 255, 255, 255 # mean # 1/std # mean: 0, 0, 0 # scale: 0.00392156862745098, 0.00392156862745098, 0.00392156862745098 mkdir p workspace cd workspace # convert to mlir model_transform.py \\ model_name ${net_name} \\ model_def ../${net_name}.onnx \\ input_shapes [[1,3,${input_h},${input_w}]] \\ mean \"0,0,0\" \\ scale \"0.00392156862745098,0.00392156862745098,0.00392156862745098\" \\ keep_aspect_ratio \\ pixel_format rgb \\ channel_format nchw \\ output_names \"/model.24/m.0/Conv_output_0,/model.24/m.1/Conv_output_0,/model.24/m.2/Conv_output_0\" \\ test_input ../dog.jpg \\ test_result ${net_name}_top_outputs.npz \\ tolerance 0.99,0.99 \\ mlir ${net_name}.mlir # export bf16 model # not use quant_input, use float32 for easy coding model_deploy.py \\ mlir ${net_name}.mlir \\ quantize BF16 \\ processor cv181x \\ test_input ${net_name}_in_f32.npz \\ test_reference ${net_name}_top_outputs.npz \\ model ${net_name}_bf16.cvimodel echo \"calibrate for int8 model\" # export int8 model run_calibration.py ${net_name}.mlir \\ dataset ../images \\ input_num 200 \\ o ${net_name}_cali_table echo \"convert to int8 model\" # export int8 model # add quant_input, use int8 for faster processing in maix.nn.NN.forward_image model_deploy.py \\ mlir ${net_name}.mlir \\ quantize INT8 \\ quant_input \\ calibration_table ${net_name}_cali_table \\ processor cv181x \\ test_input ${net_name}_in_f32.npz \\ test_reference ${net_name}_top_outputs.npz \\ tolerance 0.9,0.6 \\ model ${net_name}_int8.cvimodel ``` Key parameters include: `output_names`: Names of the output nodes we identified earlier. `mean, scale`: Preprocessing methods used during training. For instance, `YOLOv5` preprocesses the image by subtracting `mean` and dividing by `std`. In this example, `mean` is `0` and `std` is `255`, meaning the scale is `1/std`. Modify these according to your model's preprocessing method. `test_input`: The image used for testing during conversion. In this script, it's `../dog.jpg`, so ensure this image is placed in the same directory as the script. Replace it according to your model. `tolerance`: Allowed error margin before and after quantization. If errors during conversion indicate values lower than this threshold, it means the converted model might have significant deviation from the ONNX model. If acceptable, you can lower this threshold. Often, this requires optimizing the model and carefully examining post processing. `quantize`: The data type for quantization. Generally, `INT8` models are used on MaixCAM. Although a BF16 model is also converted here, INT8 is preferred for speed, while BF16 can be considered if INT8 conversion is not feasible or if precision is critical. `dataset`: The dataset used for quantization. For `YOLOv5`, it's a folder of images. Copy a subset of typical images from the coco dataset. Use ` input_num` to specify the number of images used (should be ≤ the actual number in the images directory). ## Running the Conversion Script Run the script with: ```shell chmod +x convert_yolov5_to_cvimodelsh && ./convert_yolov5_to_cvimodel.sh ``` Wait for the conversion to complete. If errors occur, carefully review the previous explanations for potential issues with parameters or output layers. Upon successful conversion, the `workspace` folder will contain a `**_int8.cvimodel` file. ## Writing the MUD File Modify the MUD file according to your model. For `YOLOv5`, the MUD file looks like this. Change `labels` to match your trained model: ```ini [basic] type cvimodel model yolov5s.cvimodel [extra] model_type yolov5 input_type rgb mean 0, 0, 0 scale 0.00392156862745098, 0.00392156862745098, 0.00392156862745098 anchors 10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326 labels person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign, parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket, bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut, cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair dryer, toothbrush ``` The `basic` section specifies the model file type and path, necessary for loading and running the model using the `maix.nn.NN` class in `MaixPy` or `MaixCDK`. The `extra` section varies based on the model. It includes parameters such as preprocessing, post processing, and labels. For `YOLOv5`, you can download its model, copy, and modify it. If you need to support a new model not currently supported by `MaixPy`, define the `extra` parameters based on the model's preprocessing and post processing requirements, then write the corresponding decoding class. If you prefer not to modify the `MaixPy` C++ source code, you can use the `maix.nn.NN` class to load the model and handle post processing in Python, though this is less efficient. ## Writing Post processing Code If you modify the `mud` file based on supported models, you can directly use the corresponding code in `MaixPy` or `MaixCDK`. If you need to support new models, design the `mud` file and write the preprocessing and post processing code: 1. **Option 1:** Use `maix.nn.NN` in `MaixPy` to load the model, then use the `forward` or `forward_image` function to run the model and process the output with Python functions. 2. **Option 2:** In `MaixCDK`, refer to [YOLOv5 source code](https://github.com/sipeed/MaixCDK/blob/71d5b3980788e6b35514434bd84cd6eeee80d085/components/nn/include/maix_nn_yolov5.hpp), add a new `hpp` file, and create a class to process your model. Modify all functions and class `@maixpy` annotations, compile the `MaixPy` project, and call the new class to run the model in `MaixPy`. You can submit the source code (Pull Request) to the main `MaixPy` repository to contribute to the community and share new models on [MaixHub](https://maixhub.com/share) for rewards ranging from 30 to 2000 yuan based on quality!"},"/maixpy/doc/en/modules/rtc.html":{"title":"Using the RTC Module with MaixCAM MaixPy","content":" title: Using the RTC Module with MaixCAM MaixPy The MaixCAM Pro has an onboard RTC module, which will automatically synchronize the system time upon power on and also sync time from the network. It will automatically re sync when there are changes in network status. Therefore, under normal circumstances, you don’t need to manually operate the RTC; you can directly use the system’s time API to get the current time. If you do need to manually operate the RTC, please refer to [bm8653 RTC Module Usage](./bm8653.html). Before manually operating the RTC, you can disable automatic synchronization by deleting the RTC and NTP related services in the system’s `/etc/init.d` directory. > MaixCAM does not have an onboard RTC."},"/maixpy/doc/en/modules/acc.html":{"title":"Reading the Accelerometer and Attitude Calculation with MaixCAM MaixPy","content":" title: Reading the Accelerometer and Attitude Calculation with MaixCAM MaixPy ## Introduction to IMU For the MaixCAM Pro, it has an onboard QMI8658 chip that integrates a three axis gyroscope and a three axis accelerometer. This chip can provide high precision data on attitude, motion, and position, making it suitable for various applications that require accurate motion detection, such as drones, robots, game controllers, and virtual reality devices. The QMI8658 features low power consumption, high stability, and high sensitivity. Below is an introduction to using the IMU module to obtain attitude data. > MaixCAM does not have an onboard accelerometer, but you can connect one externally using an IIC driver. ## Using IMU in MaixPy Example: ```python from maix.ext_dev import imu i imu.IMU(\"qmi8658\", mode imu.Mode.DUAL, acc_scale imu.AccScale.ACC_SCALE_2G, acc_odr imu.AccOdr.ACC_ODR_8000, gyro_scale imu.GyroScale.GYRO_SCALE_16DPS, gyro_odr imu.GyroOdr.GYRO_ODR_8000) while True: data i.read() print(\"\\n \") print(f\"acc x: {data[0]}\") print(f\"acc y: {data[1]}\") print(f\"acc z: {data[2]}\") print(f\"gyro x: {data[3]}\") print(f\"gyro y: {data[4]}\") print(f\"gyro z: {data[5]}\") print(f\"temp: {data[6]}\") print(\" \\n\") ``` Initialize the IMU object according to your needs, and then call `read()` to get the raw data read from the IMU. **If the `mode` parameter is set to `DUAL`, the data returned by `read()` will be `[acc_x, acc_y, acc_z, gyro_x, gyro_y, gyro_z, temp]`. If `mode` is set to only one of ACC or GYRO, it will return only the corresponding `[x, y, z, temp]`. For example, if ACC is selected, `read()` will return `[acc_x, acc_y, acc_z, temp]`.** For detailed information on the BM8653 API, please refer to the [BM8653 API Documentation](../../../api/maix/ext_dev/imu.html)"},"/maixpy/doc/en/modules/bm8653.html":{"title":"MaixPy bm8653 Driver Instructions","content":" title: MaixPy bm8653 Driver Instructions update: date: 2024 08 27 author: iawak9lkm version: 1.0.0 content: Initial document ## Introduction to BM8653 BM8653 is a real time clock (RTC) chip widely used in various electronic devices to provide accurate time and date information. It features low power consumption and high precision, capable of continuing to operate via a backup battery when the device is powered off, ensuring the continuity and accuracy of time. ## Using BM8653 in MaixPy Using BM8653 in MaixPy is straightforward; you only need to know which I2C bus your platform's BM8653 is mounted on. The onboard BM8563 on the MaixCAM Pro is mounted on I2C 4. Example: ```python from maix import ext_dev, pinmap, err, time ### Enable I2C # ret pinmap.set_pin_function(\"PIN_NAME\", \"I2Cx_SCL\") # if ret ! err.Err.ERR_NONE: # print(\"Failed in function pinmap...\") # exit( 1) # ret pinmap.set_pin_function(\"PIN_NAME\", \"I2Cx_SDA\") # if ret ! err.Err.ERR_NONE: # print(\"Failed in function pinmap...\") # exit( 1) BM8653_I2CBUS_NUM 4 rtc ext_dev.bm8563.BM8563(BM8653_I2CBUS_NUM) ### 2020 12 31 23:59:45 t [2020, 12, 31, 23, 59, 45] # Set time rtc.datetime(t) while True: rtc_now rtc.datetime() print(f\"{rtc_now[0]} {rtc_now[1]} {rtc_now[2]} {rtc_now[3]}:{rtc_now[4]}:{rtc_now[5]}\") time.sleep(1) ``` If you are using the onboard BM8653 on the MaixCAM Pro, there is no need to enable I2C 4. The example demonstrates reading from and writing to the BM8653, setting or retrieving the current time. You can also use the following example to set the current time in the BM8653 to the system time, or set the current system time to the time in the BM8653. ```python from maix import ext_dev, pinmap, err, time ### Enable I2C # ret pinmap.set_pin_function(\"PIN_NAME\", \"I2Cx_SCL\") # if ret ! err.Err.ERR_NONE: # print(\"Failed in function pinmap...\") # exit( 1) # ret pinmap.set_pin_function(\"PIN_NAME\", \"I2Cx_SDA\") # if ret ! err.Err.ERR_NONE: # print(\"Failed in function pinmap...\") # exit( 1) BM8653_I2CBUS_NUM 4 rtc ext_dev.bm8563.BM8563(BM8653_I2CBUS_NUM) ### Update RTC time from system rtc.systohc() ### Update system time from RTC # rtc.hctosys() while True: rtc_now rtc.datetime() print(f\"{rtc_now[0]} {rtc_now[1]} {rtc_now[2]} {rtc_now[3]}:{rtc_now[4]}:{rtc_now[5]}\") time.sleep(1) ``` **The underlying implementation of BM8653 is similar to the singleton pattern, ensuring that read and write operations on a single BM8653 are thread safe. This means you can create BM8653 objects freely and read/write to BM8653 from any location without causing data race conditions.** The timetuple passed to the BM8653 object follows the format (year, month, day[, hour[, minute[, second]]]), meaning the first three parameters are mandatory, and any missing subsequent parameters will not modify the corresponding time. BM8653 guarantees that a returned timetuple being empty indicates an error, and if not empty, it will always contain a list of 6 elements: (year, month, day, hour, minute, second). For detailed information on the BM8653 API, please refer to the [BM8653 API Documentation](../../../api/maix/ext_dev/bm8563.html)"},"/maixpy/doc/en/modules/qmi8658.html":{"title":"MaixPy qmi8658 Driver Instructions","content":" title: MaixPy qmi8658 Driver Instructions update: date: 2024 08 27 author: iawak9lkm version: 1.0.0 content: Initial document ## Introduction to QMI8658 QMI8658 is an Inertial Measurement Unit (IMU) chip that integrates a three axis gyroscope and a three axis accelerometer. It provides high precision attitude, motion, and position data, making it suitable for various applications requiring accurate motion detection, such as drones, robots, game controllers, and virtual reality devices. QMI8658 features low power consumption, high stability, and high sensitivity. ## Using QMI8658 in MaixPy Using QMI8658 in MaixPy is straightforward; you only need to know which I2C bus your platform's QMI8658 is mounted on. The onboard QMI8658 on the MaixCAM Pro is mounted on I2C 4. Example: ```python from maix import ext_dev, pinmap, err, time ### Enable I2C # ret pinmap.set_pin_function(\"PIN_NAME\", \"I2Cx_SCL\") # if ret ! err.Err.ERR_NONE: # print(\"Failed in function pinmap...\") # exit( 1) # ret pinmap.set_pin_function(\"PIN_NAME\", \"I2Cx_SDA\") # if ret ! err.Err.ERR_NONE: # print(\"Failed in function pinmap...\") # exit( 1) QMI8658_I2CBUS_NUM 4 imu ext_dev.qmi8658.QMI8658(QMI8658_I2CBUS_NUM, mode ext_dev.qmi8658.Mode.DUAL, acc_scale ext_dev.qmi8658.AccScale.ACC_SCALE_2G, acc_odr ext_dev.qmi8658.AccOdr.ACC_ODR_8000, gyro_scale ext_dev.qmi8658.GyroScale.GYRO_SCALE_16DPS, gyro_odr ext_dev.qmi8658.GyroOdr.GYRO_ODR_8000) while True: data imu.read() print(\"\\n \") print(f\"acc x: {data[0]}\") print(f\"acc y: {data[1]}\") print(f\"acc z: {data[2]}\") print(f\"gyro x: {data[3]}\") print(f\"gyro y: {data[4]}\") print(f\"gyro z: {data[5]}\") print(f\"temp: {data[6]}\") print(\" \\n\") ``` Initialize the QMI8658 object according to your needs, and then call `read()` to get the raw data read from the QMI8658. **If the `mode` parameter is set to `DUAL`, the data returned by `read()` will be `[acc_x, acc_y, acc_z, gyro_x, gyro_y, gyro_z, temp]`. If `mode` is set to only one of ACC or GYRO, it will return only the corresponding `[x, y, z, temp]`. For example, if ACC is selected, `read()` will return `[acc_x, acc_y, acc_z, temp]`.** For detailed information on the QMI8658 API, please refer to the [QMI8658 API Documentation](../../../api/maix/ext_dev/qmi8658.html)"},"/maixpy/doc/en/modules/tmc2209.html":{"title":"MaixPy tmc2209 单串口驱动使用介绍","content":" title: MaixPy tmc2209 单串口驱动使用介绍 update: date: 2024 08 21 author: iawak9lkm version: 1.0.0 content: 初版文档 ## Introduction to TMC2209 TMC2209 is a stepper motor driver chip produced by the German company Trinamic. It is designed specifically for 2 phase stepper motors, featuring low power consumption, high efficiency, and excellent noise suppression capabilities. TMC2209 supports currents up to 2.8A, making it suitable for various applications such as 3D printers, CNC machines, robots, etc. ## Using TMC2209 to Drive Stepper Motors in MaixPy * Ensure that your stepper motor is a 2 phase 4 wire type, and confirm the step angle of your motor (step_angle), the microstepping resolution you need (micro_step), and the distance the load moves per revolution of the motor (screw_pitch or round_mm). This information will help us configure the driver parameters later. * Generally, TMC2209 driver boards on the market have the following pins (if you find it troublesome, you can purchase our TMC2209 driver board, link [not yet available,敬请期待]): ``` EN VM MS1 GND MS2 2B RX 2A TX 1A NC 1B STEP VDD DIR GND ``` `EN`: EN is the enable pin. Connect this pin to `GND` to enable TMC2209 hardware wise. `MS1`: MS1 is one of the microstepping selection pins, used in conjunction with the MS2 pin to set the microstepping mode of the stepper motor. `MS2`: MS2 is one of the microstepping selection pins, used in conjunction with the MS1 pin to set the microstepping mode of the stepper motor. **This driver program only uses the UART mode of TMC2209. In this mode, the two microstep selection pins are respectively `AD0` (originally `MS1`) and `AD1` (originally `MS2`). The level states of these two pins together form the UART address of the TMC2209, ranging from 0x00 to 0x03.** `TX`: TX is the serial communication transmit pin, used for communication with an external microcontroller via UART. `RX`: RX is the serial communication receive pin, used for communication with an external microcontroller via UART. When using both `RX` and `TX` on TMC2209, ensure there is a 1K ohm resistor between the `RX` of the TMC2209 driver board and the `TX` of the main control chip. Otherwise, communication data anomalies may occur. `NC`: NC is the no connect pin, indicating that this pin does not need to be connected during normal use. `STEP`: STEP is the step signal input pin. Each pulse received advances the stepper motor by one step angle. Since this driver is purely UART driven, this pin does not need to be connected and can be left floating. `DIR`: DIR is the direction signal input pin, used to control the rotation direction of the stepper motor. When DIR is high, the motor rotates clockwise; when DIR is low, the motor rotates counterclockwise. Since this driver is purely UART driven, this pin does not need to be connected and can be left floating. `VM`: VM is the power input pin, connected to the positive terminal of the stepper motor's power supply. `GND`: GND is the ground pin, connected to the negative terminal of the power supply. `2B`, `2A`, `1B`, `1A`: These pins are the phase output pins of the stepper motor, connected to the two phases of the motor's coils. `VDD`: VDD is the logic power input pin, providing power to the internal logic circuits of the chip. * Using TMC2209 Driver in MaixPy As an example, let's consider a stepper motor with a step angle of 18, a microstep resolution of 256, and a screw pitch of 3mm: ```python from maix import pinmap, ext_dev, err, time port \"/dev/ttyS1\" uart_addr 0x00 uart_baudrate 115200 step_angle 18 micro_step 256 screw_pitch 3 speed 6 use_internal_sense_resistors True run_current_per 100 hold_current_per 100 if port \"/dev/ttyS1\": ret pinmap.set_pin_function(\"A19\", \"UART1_TX\") if ret ! err.Err.ERR_NONE: print(\"Failed in function pinmap...\") exit( 1) ret pinmap.set_pin_function(\"A18\", \"UART1_RX\") if ret ! err.Err.ERR_NONE: print(\"Failed in function pinmap...\") exit( 1) slide ext_dev.tmc2209.ScrewSlide(port, uart_addr, uart_baudrate, step_angle, micro_step, screw_pitch, speed, use_internal_sense_resistors, run_current_per, hold_current_per) def reset_callback() > bool: if 2 > 1: # An event occurs (e.g., a sensor is triggered), # indicating that the slide has moved to the boundary and the motor needs to stop. print(\"Reset finish...\") return True # Not occurred, no need to stop the motor. return False def move_callback(per:float) > bool: # per is the percentage of the current distance moved by move() # out of the total distance required for the current move(), ranging from 0 to 100. print(f\"Slide moving... {per}\") if per > 50: # Example: Stop moving when 50% of the total distance for the current move() has been covered. print(f\"{per} > 50%, stop.\") return True return False slide.reset(reset_callback) slide.move(screw_pitch*2, 1, move_callback) slide.move( screw_pitch) while True: slide.move(screw_pitch*2) slide.move( (screw_pitch*2)) time.sleep_ms(100) ``` First, ensure that UART1 is enabled using `pinmap` in the program. Then create a `ScrewSlide` object, using the internal reference resistor by default, and defaulting to 100% of the motor's running current and 100% of the motor's holding current. These parameters may need to be adjusted according to your motor. Next, the routine declares a reset callback function and a move callback function, which are respectively passed into the `reset()` function and `move()` function. The `reset()` and `move()` functions call the callback functions periodically to confirm whether the motor needs to be stopped immediately (when the callback function returns True). Both the `move()` and `reset()` functions are blocking functions, and they will only stop the motor and return when the callback function returns True (or when the specified length of movement is completed in the case of `move()`). ## Using tmc2209 Driver for Stepper Motors with Constant Load in MaixPy **!!!Screw stepper motors with constant load should not be considered as stepper motors with constant load, because screw stepper motors have limit devices to ensure the direction of motion of the load on the rod is known, and the screw stepper motor often collides with the limit device during operation, causing the motor load to not be constant. Other cases can be deduced by analogy to know whether it is a stepper motor with constant load.** In some application scenarios, the load on the stepper motor is constant throughout, and only increases when it hits an edge and stalls. In such cases, you can use the `Slide` class instead of the `ScrewSlide` class, where `Slide` has stall detection functionality. Using `ScrewSlide` is also feasible, it does not have stall detection but is more flexible. Please choose between these two classes based on the usage scenario; this section only discusses the `Slide` class. * Implementation Principle The TMC2209 has an internal register `SG_RESULT`, which stores data proportional to the remaining torque of the motor. If the motor load is constant, the variation in the register value is very small. When the motor stalls, the register value will rapidly decrease and maintain a lower value. By finding the running average value and stall average value of this register for the constant load motor, you can measure whether the motor is stalling at any given moment. * Obtaining the Average Value of the `SG_RESULT` Register The `maix.ext_dev.tmc2209` module provides a function to obtain and save this average value, `maix.ext_dev.tmc2209.slide_scan`. example: ```python from maix import ext_dev, pinmap, err port \"/dev/ttyS1\" uart_addr 0x00 uart_baudrate 115200 step_angle 1.8 micro_step 256 round_mm 60 speed 60 use_internal_sense_resistors True run_current_per 100 hold_current_per 100 if port \"/dev/ttyS1\": ret pinmap.set_pin_function(\"A19\", \"UART1_TX\") if ret ! err.Err.ERR_NONE: print(\"Failed in function pinmap...\") exit( 1) ret pinmap.set_pin_function(\"A18\", \"UART1_RX\") if ret ! err.Err.ERR_NONE: print(\"Failed in function pinmap...\") exit( 1) ext_dev.tmc2209.slide_scan(port, uart_addr, uart_baudrate, step_angle, micro_step, round_mm, speed, True, True, run_current_per, hold_current_per, conf_save_path './slide_scan_example.bin', force_update False) ``` After configuring the serial port and driver parameters, call `slide_scan`. The last parameter of `slide_scan`, `force_update`, determines the behavior when the configuration file already exists: > If `force_update` is True, the old configuration will be overwritten with the new configuration. > > If `force_update` is False, the running average value will be updated to the average of the new and old values, and the stall average value will be updated to the larger of the new and old stall average values (for example, if a slide has left and right boundaries, and the left boundary stall average value is less than the right boundary stall average value, meaning the right boundary is more prone to stalling than the left boundary, the easiest stalling average value will be saved). After running this program, the stepper motor will continue to rotate forward until it encounters a stall. Wait about 300ms, then stop the program. The program will record the running average value of the `SG_RESULT` register and the stall average value to `conf_save_path`. Subsequently, the `Slide` class can load this configuration file to stop the motor when a stall is detected. * Verifying the Configuration File Values You may wonder if this configuration is actually usable. The `maix.ext_dev.tmc2209` module provides a function to test this configuration file, `slide_test`. First, ensure the motor is in a stalled state, then modify the parameters to match those used when calling `slide_scan`, and execute the following code. example ```python from maix import ext_dev, pinmap, err port \"/dev/ttyS1\" uart_addr 0x00 uart_baudrate 115200 step_angle 1.8 micro_step 256 round_mm 60 speed 60 use_internal_sense_resistors True run_current_per 100 hold_current_per 100 if port \"/dev/ttyS1\": ret pinmap.set_pin_function(\"A19\", \"UART1_TX\") if ret ! err.Err.ERR_NONE: print(\"Failed in function pinmap...\") exit( 1) ret pinmap.set_pin_function(\"A18\", \"UART1_RX\") if ret ! err.Err.ERR_NONE: print(\"Failed in function pinmap...\") exit( 1) ext_dev.tmc2209.slide_test(port, uart_addr, uart_baudrate, step_angle, micro_step, round_mm, speed, True, True, run_current_per, hold_current_per, conf_save_path './slide_scan_example.bin') ``` The motor will stop rotating instantly upon encountering a stall, and the program will end accordingly. The stall stop logic for `Slide.move()` and `Slide.reset()` is the same. * Using `Slide` The approach to using `Slide` is essentially the same as using `ScrewSlide`, except that `Slide` removes the callback function and adds stall stop logic. If a configuration file is not passed when using `Slide`, it can still be used. The stall detection threshold is the average at the start of motor operation multiplied by `Slide.stop_default_per()`/100. The motor stops when the recent average operation number is lower than this value. You can obtain and modify this value through `Slide.stop_default_per()`. ```python from maix import pinmap, ext_dev, err, time port \"/dev/ttyS1\" uart_addr 0x00 uart_baudrate 115200 step_angle 1.8 micro_step 256 round_mm 60 speed 60 use_internal_sense_resistors True run_current_per 100 hold_current_per 100 if port \"/dev/ttyS1\": ret pinmap.set_pin_function(\"A19\", \"UART1_TX\") if ret ! err.Err.ERR_NONE: print(\"Failed in function pinmap...\") exit( 1) ret pinmap.set_pin_function(\"A18\", \"UART1_RX\") if ret ! err.Err.ERR_NONE: print(\"Failed in function pinmap...\") exit( 1) slide ext_dev.tmc2209.Slide(port, uart_addr, uart_baudrate, step_angle, micro_step, round_mm, speed, cfg_file_path \"./slide_conf.bin\") slide.reset() slide.move(60) slide.move( 60) ``` ## Notes **This driver is implemented purely through UART, offering the advantage of requiring fewer pins to drive up to 4 motors with relatively high precision. The downside is that it is not suitable for applications requiring extremely high precision.** Known Issues: * Do not use UART0 of MaixCAM as the driver's serial port, as it may cause MaixCAM to fail to boot properly. **!!! If you find any bugs, we welcome you to submit a PR to report them.** ## Disclaimer This motor driver program (hereinafter referred to as the \"Program\") is developed by [Sipeed] based on the BSD 3 open source license repository [janelia arduino/TMC2209](https://github.com/janelia arduino/TMC2209). The Program is intended for learning and research purposes only and is not guaranteed to work under all environmental conditions. Users assume all risks associated with the use of this Program. [Sipeed] shall not be liable for any losses or damages arising from the use or inability to use the Program, including but not limited to direct, indirect, incidental, special, punitive, or consequential damages. Users should conduct sufficient testing and validation to ensure that the Program meets their specific requirements and environment before using it in practical applications. [Sipeed] makes no express or implied warranties regarding the accuracy, reliability, completeness, or suitability of the Program. Users are responsible for complying with all applicable laws and regulations when using the Program and ensuring that they do not infringe upon the legal rights of any third parties. [Sipeed] shall not be liable for any consequences resulting from users' violation of laws or infringement of third party rights. The interpretation of this disclaimer is reserved by [Sipeed], who also reserves the right to modify this disclaimer at any time."},"/maixpy/doc/en/modules/thermal_cam.html":{"title":"Using Thermal Infrared Image Sensors with MaixCAM and MaixPy","content":"# Using Thermal Infrared Image Sensors with MaixCAM and MaixPy Currently, the official hardware product is not yet available. If you only need low resolution, you can purchase a serial or I2C module from online platforms like Taobao and drive it yourself. This document will be updated when the official high resolution module is released. For thermal infrared camera modules, you might consider options such as [K210 + MLX90640 Infrared Thermal Imager](https://neucrack.com/p/189) or [Heimann HTPA 32x32d Thermal Infrared](https://neucrack.com/p/199)."},"/maixpy/doc/en/modules/temp_humi.html":{"title":"Reading Temperature and Humidity Sensors with MaixCAM MaixPy","content":" title: Reading Temperature and Humidity Sensors with MaixCAM MaixPy ## Introduction By attaching a temperature and humidity sensor module to MaixCAM, you can easily read the environmental temperature and humidity. This example uses the `Si7021` sensor, which can be driven via `I2C`. The complete code is available at [MaixPy/examples/sensors/temp_humi_si7021.py](https://github.com/sipeed/MaixPy/blob/main/examples/sensors/temp_humi_si7021.py). Note that the system image needs to be version `> 2024.6.3_maixpy_v4.2.1`."},"/maixpy/doc/en/modules/tof.html":{"title":"Using TOF Modules for Distance Measurement and Terrain Detection with MaixCAM and MaixPy","content":"# Using TOF Modules for Distance Measurement and Terrain Detection with MaixCAM and MaixPy Sipeed offers [two additional TOF modules](https://wiki.sipeed.com/hardware/zh/maixsense/index.html) that can be used for distance measurement. These modules can be purchased and used with serial communication for your projects."},"/maixpy/doc/en/source_code/build.html":{"title":"MaixCAM MaixPy develop source code guide","content":" title: MaixCAM MaixPy develop source code guide ## Get source code ```shell mkdir p ~/maix cd ~/maix git clone https://github.com/sipeed/MaixPy ``` ## Getting MaixCDK Source Code The MaixPy project depends on MaixCDK. You need to clone it first and place it in a directory on your computer (do not place it under the MaixPy directory). ```shell cd ~/maix git clone https://github.com/sipeed/MaixCDK ``` Then, you need to set the environment variable MAIXCDK_PATH to specify the path to MaixCDK, which can be added in ~/.bashrc or ~/.zshrc (depending on your shell): ```shell export MAIXCDK_PATH ~/maix/MaixCDK ``` Only after successfully setting the environment variable can MaixPy locate the MaixCDK source code. ## Build and pack to wheel ```shell cd ~/maix/MaixPy python setup.py bdist_wheel maixcam ``` `maixcam` Can be replaced with other board config, see [setup.py]([./configs](https://github.com/sipeed/MaixPy/blob/main/setup.py)) 's `platform_names` variable. After build success, you will find wheel file in `dist` directory, use `pip install U MaixPy****.whl` on your device to install or upgrade. > `python setup.py bdist_wheel maixcam skip build` will not execute build command and only pack wheel, so you can use `maixcdk menuconfig` and `maixcdk build` first to customize building. > Additionally, if you are debugging APIs and need to install frequently, using pip can be slow. You can compile and then copy the maix directory directly to the /usr/lib/python3.11/site packages directory on your device to overwrite the old files. ## Build manually ```shell maixcdk build ``` ## Run test after modify source code * First, build source code by ```shell maixcdk build ``` * If build for PC self(platform `linux`): Then execute `./run.sh your_test_file_name.py` to run python script. ```shell cd test ./run.sh examples/hello_maix.py ``` * If cross compile for board: * The fastest way is copy `maix` dir to device's `/usr/lib/python3.11/site packages/` directory, then run script on device. * Or pack wheel and install on device by `pip install U MaixPy****.whl`, then run script on device. ## Preview documentation locally Documentation in [docs](https://github.com/sipeed/MaixPy/tree/main/docs) directory, use `Markdown` format, you can use [teedoc](https://github.com/teedoc/teedoc) to generate web version documentation. And the API doc is generated when build MaixPy firmware, **if you don't build MaixPy, the API doc will be empty**. ```shell pip install teedoc U cd docs teedoc install i https://pypi.tuna.tsinghua.edu.cn/simple teedoc serve ``` Then visit `http://127.0.0.1:2333` to preview documentation on web browser. ## For developers who want to contribute See [MaixPy develop source code guide](./contribute.html) If you encounter any problems when use source code, please refer to [FAQ](./faq.html) first."},"/maixpy/doc/en/source_code/add_c_module.html":{"title":"Adding a C/C++ Module to MaixCAM MaixPy","content":" title: Adding a C/C++ Module to MaixCAM MaixPy ## Introduction Sometimes you need to execute a function efficiently, and Python's speed is insufficient. In such cases, you can implement the function using C/C++ or other compiled languages. ## General Function Wrapping If the function you want to wrap does not depend on other features of MaixPy, you can directly use the general methods for adding modules to Python using C/C++. You can search for methods like `ffi` or `ctype` on the internet. > PRs are welcome to add more methods. ## If Your Module Needs to Depend on Other MaixPy Basic APIs ### Method 1 Directly modify the MaixPy firmware and then compile it. Refer to [View MaixPy API Source Code](../basic/view_src_code.html). This method is the simplest and fastest. If the code is well packaged, it can be merged into the official repository (by submitting a PR). * Follow [Compiling MaixPy Source Code](./build.html) to get the `dist/***.whl` installation package. * Send the `.whl` package from the `dist` directory to the device, then run the code `import os; os.system(\"pip install /root/xxxxx.whl\")` (replace the path accordingly). * If installing the `.whl` package is too slow during debugging, you can use `maixcdk build` to compile and then use `scp r maix_xxx root@10.228.104.1:/usr/lib/python3.11/site packages` to directly copy it to the device system to overwrite the package. Adjust the package name and device IP as needed. * Once you have finished debugging and feel that the features you added are valuable, consider merging them into the official repository. You can learn how to do this by searching for keywords like \"github submit PR\" on search engines. Modifying the code: As described in [View MaixPy API Source Code](../basic/view_src_code.html), you can view and modify the source code, add C++ functions, and include comments. After compiling, you can call them in MaixPy. It's very simple. For example: ```cpp namespace maix::test { /** * My function, add two integers. * @param a arg a, int type * @param b arg b, int type * @return int type, a + b * @maixpy maix.test.add */ int add(int a, int b); } ``` Yes, simply write a C++ function. Note the `@maixpy` comment. During compilation, a Python function will be automatically generated. It's that simple! Then you can call the function with `maix.test.add(1, 2)`. ### Method 2 Create a MaixPy module project based on an engineering template. This method is suitable for adding a package without modifying the MaixPy source code and still using MaixPy (MaixCDK) APIs. The method is as follows: * First, [compile MaixPy source code](./build.html) to ensure the compilation environment is set up correctly. * Copy the [MaixPy/tools/maix_module](https://github.com/sipeed/MaixPy/tree/main/tools/maix_module) project template to a new directory. It can be in the same directory as `MaixPy`. For example, copy all files and directories to the `maix_xxx` directory. * In the `maix_xxx` directory, run `python init_files.py` in the terminal to initialize the project files. * Change the project name: Modify the `module_name.txt` file to the desired module name, starting with `maix_`. This makes it easier for others to find your project on [pypi.org](https://pypi.org) or [github.com](https://github.com). * Run `python setup.py bdist_wheel linux` in the project root directory to build for the computer. * After building, you can directly run `python c \"import maix_xxx; maix_xxx.basic.print('Li Hua')\"` in the project root directory to test your module functions. * Run `python setup.py bdist_wheel maixcam` to build the package for `MaixCAM`. Note that the code prompt file (pyi file) can only be generated when building for the `linux` platform. Therefore, before releasing, first build for the `linux` platform to generate the code prompt file, then execute this command to generate the package for the `MaixCAM` platform. * Send the `.whl` package from the `dist` directory to the device, then run `import os; os.system(\"pip install /root/xxxxx.whl\")` (replace the path accordingly). * If installing the `.whl` package is too slow during debugging, you can use `maixcdk build` to compile and then use `scp r maix_xxx root@10.228.104.1:/usr/lib/python3.11/site packages` to directly copy it to the device system to overwrite the package. Adjust the package name and device IP as needed. * Once you have debugged your code, consider open sourcing it on [github.com](https://github.com) and uploading it to [pypi.org](https://pypi.org). You can refer to the official documentation or search for tutorials on how to upload. Generally, you need to run `pip install twine` and then `twine upload dist/maix_xxx***.whl`. After completing this, feel free to share your achievements on [maixhub.com/share](https://maixhub.com/share)! Modifying the code: As described in [View MaixPy API Source Code](../basic/view_src_code.html), add source files in the `components/maix/include` and `components/maix/src` directories, add C++ functions, and include comments. After compiling, you can call them directly. It's very simple. For example: ```cpp namespace maix_xxx::test { /** * My function, add two integers. * @param a arg a, int type * @param b arg b, int type * @return int type, a + b * @maix_xxx maix_xxx.test.add */ int add(int a, int b); } ``` Yes, simply write a C++ function. Note the `@maix_xxx` comment. During compilation, a Python function will be automatically generated. It's that simple! Then you can call the function with `maix_xxx.test.add(1, 2)`."},"/maixpy/doc/en/source_code/contribute.html":{"title":"Contributing to MaixCAM MaixPy Documentation Modification and Code Contribution","content":" title: Contributing to MaixCAM MaixPy Documentation Modification and Code Contribution ## Contributing to MaixPy Documentation Modification * Click the \"Edit this page\" button in the top right corner of the documentation you want to modify to enter the GitHub source documentation page. * Make sure you are logged in to your GitHub account. * Click the pencil icon in the top right corner of the GitHub preview documentation page to modify the content. * GitHub will prompt you to fork a copy to your own repository. Click the \"Fork\" button. > This step forks the MaixPy source code repository to your own account, allowing you to freely modify it. * Modify the documentation content, then fill in the modification description at the bottom of the page, and click \"Commit changes\". * Then find the \"Pull requests\" button in your repository and click to create a new Pull request. * In the pop up page, fill in the modification description and click \"Submit Pull request\". Others and administrators can then see your modifications on the [Pull requests page](https://github.com/sipeed/MaixPy/pulls). * Wait for the administrator to review and approve, and your modifications will be merged into the MaixPy source code repository. * After the merge is successful, the documentation will be automatically updated to the [MaixPy official documentation](https://wiki.sipeed.com/maixpy). > Due to CDN caching, it may take some time to see the update. For urgent updates, you can contact the administrator for manual refreshing. > You can also visit [en.wiki.sipeed.com/maixpy](https://en.wiki.sipeed.com/maixpy) to view the GitHub Pages service version, which is updated in real time without caching. ## Contributing to MaixPy Code Contribution * Visit the MaixPy code repository address: [github.com/sipeed/MaixPy](https://github.com/sipeed/MaixPy) * Before modifying the code, it is best to create an [issue](https://github.com/sipeed/MaixPy/issues) first, describing the content you want to modify to let others know your ideas and plans, so that everyone can participate in the modification discussion and avoid duplication of effort. * Click the \"Fork\" button in the top right corner to fork a copy of the MaixPy code repository to your own account. * Then clone a copy of the code from your account to your local machine. * After modifying the code, commit it to your repository. * Then find the \"Pull requests\" button in your repository and click to create a new Pull request. * In the pop up page, fill in the modification description and click \"Submit Pull request\". Others and administrators can then see your modifications on the [Pull requests page](https://github.com/sipeed/MaixPy/pulls). * Wait for the administrator to review and approve, and your modifications will be merged into the MaixPy source code repository. > Note that most of the MaixPy code is automatically generated from [MaixCDK](https://github.com/sipeed/MaixCDK), so if you modify the C/C++ source code, you may need to modify this repository first."},"/maixpy/doc/en/source_code/faq.html":{"title":"MaixCAM MaixPy Source Code FAQ","content":"MaixCAM MaixPy Source Code FAQ ## subprocess.CalledProcessError: Command '('lsb_release', ' a')' returned non zero exit status 1. Edit `/usr/bin/lsb_release` as root, change the first line from `#!/usr/bin/python3` to `python3`. Then compile again and it should work. ## ImportError: arg(): could not convert default argument 'format: maix::image::Format' in method '.__init__' into a Python object (type not registered yet?) Pybind11 need you to register `image::Format` first, then you can use it in `camera::Camera`, to we must fist define `image::Format` in generated `build/maixpy_wrapper.cpp` source file. To achieve this, edit `components/maix/headers_priority.txt`, the depended on should be placed before the one use it. e.g. ``` maix_image.hpp maix_camera.hpp ``` ## /usr/bin/ld: /lib/libgdal.so.30: undefined reference to `std::condition_variable::wait(std::unique_lock&)@GLIBCXX_3.4.30' collect2: error: ld returned 1 exit status This issue commonly arises when building for Linux and using a conda environment, due to some libraries in the conda environment having compilation parameter problems. The solution is to not use conda, or to individually locate the problematic library within conda and replace it with the system's version or simply delete it (the system will then locate the necessary library)."},"/maixpy/doc/en/source_code/maixcdk.html":{"title":"MaixCAM Switching to MaixCDK for C/C++ Application Development","content":" title: MaixCAM Switching to MaixCDK for C/C++ Application Development In addition to developing with MaixPy, there is also a corresponding C/C++ SDK available, called [MaixCDK](https://github.com/sipeed/MaixCDK). ## Introduction to MaixCDK MaixPy is built on top of MaixCDK, and most of MaixPy's APIs are automatically generated based on MaixCDK's APIs. Therefore, any functionality available in MaixPy is also included in MaixCDK. If you are more familiar with C/C++ programming or require higher performance, you can use MaixCDK for development. ## Using MaixCDK The MaixCDK code repository is located at [github.com/sipeed/MaixCDK](https://github.com/sipeed/MaixCDK), where you can find the MaixCDK code and documentation."},"/maixpy/doc/en/README_no_screen.html":{"title":"MaixCAM MaixPy Screenless Edition Quick Start","content":" title: MaixCAM MaixPy Screenless Edition Quick Start ## About This Document As mentioned in the [Quick Start Guide](./index.html), it is **strongly recommended** to purchase the version with a screen for development, as it provides a better development experience, including using the built in APP, accessing apps from the MaixHub App Store, and easier debugging (e.g., common settings can be completed directly by touching the screen interface, and images can be viewed in real time). However, if you are unable to purchase the version with a screen or require a screenless version for mass production, please refer to this document. ## Getting a MaixCAM Device * **MaixCAM**: Purchase it from the [Sipeed Taobao Store](https://item.taobao.com/item.htm?id 784724795837) or the [Sipeed Aliexpress Store](https://www.aliexpress.com/store/911876460). You can find more information about MaixCAM [here](https://wiki.sipeed.com/maixcam). ## Initial Setup ### Preparing the TF Image Card and Inserting it into the Device If your package includes a TF card, it already contains the factory image. If the TF card was not installed in the device during manufacturing, carefully open the case (be careful not to disconnect any cables inside) and insert the TF card. Additionally, since the factory firmware may be outdated, it is **essential** to update the system to the latest version by following the [Upgrade and Flash System](https://wiki.sipeed.com/maixpy/doc/zh/basic/os.html) instructions; otherwise, some applications and APIs may not function properly. If you did not purchase a TF card, you will need to flash the system onto your own TF card. Follow the [Upgrade and Flash System](./basic/os.html) guide, then install the card into the board. ### Powering On Use a `Type C` data cable to connect the `MaixCAM` device to provide power and wait for the device to boot. **Firstly**: Ensure that the USB cable is of good quality and that the USB port on your computer is reliable (power supply > 5V 500mA, normal interference resistance). The first boot may take about 20 seconds, after which your computer will detect one or two virtual network adapters (visible in your computer's network manager). If the virtual network adapter is not detected: * Ensure that you purchased the TF card package. If you have confirmed that the TF card is inserted into the device, try [updating to the latest system](./basic/os.html). * If you did not purchase the TF card package, you need to flash the latest system onto the TF card following the [Upgrade and Flash System](./basic/os.html) guide. * Check if the USB connection is loose and whether the USB cable is of good quality; you can try using a better quality cable. * Ensure that the USB port provides sufficient power. You can try another USB port or even another computer if possible. ## Preparing to Connect the Computer and Device To enable communication between your computer (PC) and the device (MaixCAM), they need to be on the same local area network. Two methods are provided; we will first use Method 1: * **Method 1**: Wired connection. The device connects to the computer via a USB cable, and it will be recognized as a virtual USB network adapter, placing it on the same local area network as the computer. If you encounter issues, refer to the [FAQ](./faq.html) for common problems. .. details::Method 2 involves driver installation on different computer systems: :open: true There are two default USB virtual network adapter drivers (NCM and RNDIS) to meet the needs of different systems: * **Windows**: All Windows systems will automatically install the RNDIS driver. Only Win11 will automatically install the NCM driver. **Either one that works is fine** (NCM is faster than RNDIS). * Open Task Manager > Performance, and you will see a virtual Ethernet connection with an IP, for example, `10.131.167.100` is the computer's IP, and the device's IP is the same except the last digit changed to `1`, i.e., `10.131.167.1`. If it's Win11, you will see two virtual network adapters; you can use any one of the IPs. * Additionally, you can open the `Device Manager` on your computer (search `Device Manager` in the search bar). If the RNDIS and NCM drivers are correctly installed, **either one that works is fine**: ![RNDIS ok](../../static/image/rndis_windows.jpg) ![NCM ok](../../static/image/windows_ncm_ok.png) * **Linux**: No extra setup is required. Just plug in the USB cable. Use `ifconfig` or `ip addr` to see `usb0` and `usb1` network adapters, and you can use either IP. **Note** that the IP, for example, `10.131.167.100`, is the computer's IP, and the device's IP is the same except the last digit changed to `1`, i.e., `10.131.167.1`. * **MacOS**: Check the `usb` network adapter in `System Settings` > `Network`. **Note** that the IP, for example, `10.131.167.100`, is the computer's IP, and the device's IP is the same except the last digit changed to `1`, i.e., `10.131.167.1`. * **Method 2**: Wireless connection. The device connects to the same router or WiFi hotspot that the computer is connected to (if you experience screen lag or high latency with WiFi, use a wired connection). There are two methods for connecting to a wireless hotspot: * Modify the `wifi.ssid` and `wifi.pass` files in the TF card's boot partition and reboot to connect. Modification methods: * If you are familiar with SSH, you can connect to the device via SSH (if wired connection is available) and modify the files in the `/boot` directory. * You can also enter upgrade mode as described in the previous section, after which a USB drive will appear on the computer. Modify the files in it, ensuring to safely eject the drive before rebooting. * You can also use a card reader, and a USB drive will appear on the computer. Modify the `wifi.ssid` and `wifi.pass` files in it, ensuring to safely eject the drive before rebooting. * If the wired connection is already available, you can follow the next step and use MaixVision to run code. Modify the `tools/wifi_connect.py` script with your SSID and PASSWORD, then run it. ## Preparing the Development Environment * First, ensure that the computer and device are on the same local area network. * Download and install [MaixVision](https://wiki.sipeed.com/maixvision). * Use a Type C cable to connect the device and computer, open MaixVision, and click the `Connect` button at the bottom left. The software will automatically search for the device. Wait a moment until the device appears, then click the device to connect. If **the device is not detected**, you can find solutions in the [FAQ](./faq.html). Here is a video tutorial on using MaixVision: ### Connecting to the Internet The first run requires a network connection to activate the device and install the runtime library. If you do not have a router, you can use your phone to create a hotspot. In MaixVision, modify the `tools/wifi_connect.py` script with your SSID and PASSWORD, then run it. For other WiFi connection methods, see the previous section. ### Upgrading the Runtime Library **This step is very important!!!** If this step is not completed, other applications and features may not function properly (e.g., crashing). * First, ensure that the WiFi connection from the previous step is completed and that you have an IP address with internet access. * Run the `tools/install_runtime.py` script from the MaixVision examples to install the latest runtime library. If `Request failed` or a similar error appears, please check if the network is connected and able to access the internet. If the problem persists, take a photo and contact customer service for assistance. ## Running Examples Click on the `Example Code` on the left side of MaixVision, select an example, and click the `Run` button at the bottom left to send the code to the device for execution. For example: * `hello_maix.py`, click the `Run` button, and you will see messages printed by the device in the MaixVision terminal, and an image will appear in the top right corner. * `camera_display.py`, this example opens the camera and displays the camera feed on the screen. ```python from maix import camera, display, app disp display.Display() # Create a display object and initialize the screen cam camera.Camera(640, 480) # Create a camera object, manually setting the resolution to 640x480, and initialize the camera while not app.need_exit(): # Keep looping until the program exits (can exit by pressing the device's function button or clicking the stop button in MaixVision) img cam.read() # Read the camera feed into the img variable, print(img) can be used to print img details disp.show (img) # Display img on the screen ``` * `yolov5.py` detects objects in the camera feed, draws bounding boxes around them, and displays them on the screen. It supports detecting 80 different objects. For more details, see [YOLOv5 Object Detection](./vision/yolov5.html). You can try other examples on your own. > If you experience image lag when using the camera examples, it may be due to poor network connection, low quality USB cable, or poor USB port quality on the host. Try changing the connection method or using a different cable, USB port, or computer. ## Installing Applications on the Device The above steps allow you to run code on the device. Once `MaixVision` is disconnected, the code will stop running. If you want the code to appear in the boot menu, you can package it as an application and install it on the device. Click the install application button at the bottom left of `MaixVision`, fill in the application information, and it will be installed on the device. You will then see the application on the device. You can also choose to package the application and share it on the [MaixHub App Store](https://maixhub.com/app). > The default examples do not include an explicit exit function. Press the device's function button to exit the application (for MaixCAM, it is the user button). If you want the program to start automatically at boot, you can modify and run the `tools/set_autostart.py` script. ## Next Steps If you like what you've seen so far, **please make sure to visit [GitHub](https://github.com/sipeed/MaixPy) and give the MaixPy open source project a star (you need to log in to GitHub first). Your star and support are our motivation to keep maintaining and adding new features!** You have now completed a basic usage and development process. Next, you can learn more about `MaixPy` syntax and features by following the directory on the left. If you encounter any issues with the `API`, you can find help in the [API Documentation](/api/). It's best to learn with a specific goal in mind, such as working on an interesting project. This will improve your learning experience. You can also share your projects and experiences on the [MaixHub Sharing Platform](https://maixhub.com/share) to earn cash rewards! ## Frequently Asked Questions (FAQ) If you encounter any issues, first check the [FAQ](./faq.html). If you can't find a solution, you can ask questions in the forum or group below, or submit a code issue on [MaixPy issue](https://github.com/sipeed/MaixPy/issues). ## Share and Communicate * **[MaixHub Project and Experience Sharing](https://maixhub.com/share)**: Share your projects and experiences to earn cash rewards. To receive official rewards, your content should meet the following criteria: * **Reproducibility**: A fairly complete project reproduction process. * **Showcase**: Projects without a detailed reproduction process but with an attractive presentation. * Bug Solution Experience: Share your process and specific solution to a difficult problem. * [MaixPy Official Forum](https://maixhub.com/discussion/maixpy) (for questions and discussions) * QQ Group: (It's recommended to post first before asking in the QQ group so others can quickly understand your problem and reproduction process) * MaixPy (v4) AI Vision Group: 862340358 * Telegram: [MaixPy](https://t.me/maixpy) * MaixPy Code Issues: [MaixPy issue](https://github.com/sipeed/MaixPy/issues) * For business cooperation or bulk purchases, please contact support@sipeed.com."},"/maixpy/doc/en/projects/line_tracking_robot.html":{"title":"MaixCAM MaixPy Line Tracking Robot (/Car)","content":" title: MaixCAM MaixPy Line Tracking Robot (/Car) update: date: 2024 05 09 author: lxowalle version: 1.0.0 content: Initial documentation Before reading this article, make sure you know how to develop with MaixCAM. For details, please read [Quick Start](../index.html). ## Introduction This article describes how to implement a line tracking robot using MaixPy. ## How to implement line tracking robot using MaixPy 1. Preparation of MaixCAM and trolley 2. Implementing the line tracking function 3. Implement the trolley control function ### Preparation of MaixCAM and trolley TODO ### Implementing the line tracking function You can quickly find straight lines using the `get_regression` of the `image` module, see [Line tracking](. /line_tracking.html). Code: ```python from maix import camera, display, image cam camera.Camera(320, 240) disp display.Display() # thresholds [[0, 80, 40, 80, 10, 80]] # red thresholds [[0, 80, 120, 10, 0, 30]] # green # thresholds [[0, 80, 30, 100, 120, 60]] # blue while 1: img cam.read() lines img.get_regression(thresholds, area_threshold 100) for a in lines: img.draw_line(a.x1(), a.y1(), a.x2(), a.y2(), image.COLOR_GREEN, 2) theta a.theta() rho a.rho() if theta > 90: theta 270 theta else: theta 90 theta img.draw_string(0, 0, \"theta: \" + str(theta) + \", rho: \" + str(rho), image.COLOR_BLUE) disp.show(img) ``` The above code implements the function of finding a straight line, note: Use `a.theta()` to get the angle of the line. Use `a.rho()` to get the distance between the line and the origin (the origin is in the upper left corner). After find the straight line with reference to the above code, you can use `a.theta()` and `a.rho()` to control the direction of the cart. ### Implement the trolley control function TODO"},"/maixpy/doc/en/projects/index.html":{"title":"Practical Projects with MaixCAM MaixPy, Introduction and Collection","content":" title: Practical Projects with MaixCAM MaixPy, Introduction and Collection ## Introduction Here we provide some common practical project examples for community members to refer to and replicate for use. This also helps to inspire everyone to create more and better applications and projects. There are several ways to find projects implemented with MaixPy: ## MaixPy Official Documentation You can find practical projects in the documentation on the left, such as \"Line Following Car.\" If you have a good project or a recommended project, you can also contribute by adding it to the documentation. ## MaixHub Project Sharing Square Projects can be found in the [MaixHub Project Sharing](https://maixhub.com/share?type project) section. High quality shares will also be linked to the MaixPy official documentation. You can also share your project making methods, which will receive official rewards (guaranteed) and cash tips from community members (usually, high quality projects that meet urgent needs are more likely to be tipped). Recommend Projects: * maixcam deploy yolov5s model: https://maixhub.com/share/23 ## MaixHub App Sharing In addition to project sharing, you can also find directly runnable applications at the [MaixHub App Store](https://maixhub.com/app), some of which might be written in MaixPy. If the author has provided the source code or written detailed tutorials, these can also be referred to. Recommend Projects: * Simple HTTP Streaming Server: https://maixhub.com/app/19 * Desktop Computer Performance Monitor: https://maixhub.com/app/13 * Safety Helmet Detection Model Application: https://maixhub.com/app/10"},"/maixpy/doc/en/projects/face_tracking.html":{"title":"MaixCAM MaixPy Face Tracking 2 axis servo gimbal","content":" title: MaixCAM MaixPy Face Tracking 2 axis servo gimbal update: date: 2024 06 11 author: iawak9lkm version: 1.0.0 content: Initial documentation Before reading this article, make sure you know how to develop with MaixCAM. For details, please read [Quick Start](../index.html). [Source Code](https://github.com/sipeed/MaixPy/blob/main/projects/app_face_tracking) [Download APP](https://maixhub.com/app/31) ## Description Face recognition and tracking is accomplished using a gimbal consisting of two servos and MaixCAM. ![](../../assets/face_tracking1.jpg) ![](../../assets/face_tracking2.jpg) ## Usage of this example program * Assemble your Gimbal and MaixCAM. * Modify the parameters in `main.py`. Modify the MaixCAM pins used for each servo. The specified pins must have PWM capability.The `servos.Servos` constructor then configures the pin for PWM functionality. ```python ROLL_PWM_PIN_NAME \"A17\" PITCH_PWM_PIN_NAME \"A16\" ``` Modify the initial positions of the two servos. ```python init_pitch 80 # init position, value: [0, 100], means minimum angle to maxmum angle of servo init_roll 50 # 50 means middle ``` You need to modify the min max PWM duty cycle for the active range of each of the two servos. NOTE: Certain Gimbal configurations may have unintended consequences when servos exceed their physically limited maximum range of motion. Ensure that there is no obstruction within the range of motion of the servos corresponding to the following setpoints. ```python PITCH_DUTY_MIN 3.5 # The minimum duty cycle corresponding to the range of motion of the y axis servo. PITCH_DUTY_MAX 9.5 # Maximum duty cycle corresponding to the y axis servo motion range. ROLL_DUTY_MIN 2.5 # Minimum duty cycle for x axis servos. ROLL_DUTY_MAX 12.5 # Maxmum duty cycle for x axis servos. ``` You need to select the direction of motion of the servos. ```python pitch_reverse False # reverse out value direction roll_reverse True # reverse out value direction ``` * Just execute the code at the end. If you installed the application from MaixHub, click face_tracking in the launcher to execute the program. If you got the source code from Github, you can import the project folder in [MaixVision](https://wiki.sipeed.com/maixvision) and execute the whole project. Please refer to [MaixVision Description](https://wiki.sipeed.com/maixpy/doc/zh/basic/maixvision.html) for more information about MaixVision. Of course, you can also copy the whole project folder to our MaixCAM in your favorite way and execute it with python. * If you want to exit the program, just press the button in the upper left corner. ![](../../../../projects/app_face_tracking/assets/exit.jpg) ### FAQs * The face tracking is not ideal. Different Gimbal use different PID parameters, you can adjust the PID value to make the effect better. ```python pitch_pid [0.3, 0.0001, 0.0018, 0] # [P I D I_max] roll_pid [0.3, 0.0001, 0.0018, 0] # [P I D I_max] ``` * After completing the tracking, the gimbal jerks small left and right for a period of time against a motionless face. You can usually make this effect as small as possible by adjusting the PID; however, there is no way to avoid the jitter caused by the physical structure of the gimbal. You can try to adjust the deadband to minimize the jitter. ```python target_ignore_limit 0.08 # when target error < target_err_range*target_ignore_limit , set target error to 0 ``` * The display shows or the terminal prints `PIN: XXX does not exist`. This is because the pin does not exist in the pinout of the MaixCAM board. Please select a pin with PWM function on MaixCAM. * The display shows or the terminal prints `Pin XXX doesn't have PWM function`. This is because the pin does not have a PWM function, you need to select a pin with a PWM function. ## How to track other objects * In `main.py` there exists a class `Target` which is used to customize the target to be tracked. * In `__init__`, initialize the objects you need to use, such as the camera. * In `__get_target()`, you need to calculate the center point of the tracked object, and if the tracked object does not exist in the frame, return 1, 1 to make sure that the program does not do anything for a while if the target is not found. You also need to call `self.__exit_listener(img)` and `self.disp.show(img)` before returning to the point to make sure that the program can interact with you properly."},"/maixpy/doc/en/vision/apriltag.html":{"title":"MaixCAM MaixPy Apriltag Recognition","content":" title: MaixCAM MaixPy Apriltag Recognition update: date: 2024 04 03 author: lxowalle version: 1.0.0 content: Initial documentation Before reading this article, make sure you are familiar with how to develop with MaixCAM. For more details, please read [Quick Start](../index.html). ## Introduction This article introduces how to use MaixPy to recognize Apriltag labels. ## Using MaixPy to Recognize Apriltag Labels MaixPy's `maix.image.Image` provides the `find_apriltags` method, which can be used to recognize Apriltag labels. ### How to Recognize Apriltag Labels A simple example of recognizing Apriltag labels and drawing bounding boxes: ```python from maix import image, camera, display cam camera.Camera() disp display.Display() families image.ApriltagFamilies.TAG36H11 x_scale cam.width() / 160 y_scale cam.height() / 120 while 1: img cam.read() new_img img.resize(160, 120) apriltags new_img.find_apriltags(families families) for a in apriltags: corners a.corners() for i in range(4): corners[i][0] int(corners[i][0] * x_scale) corners[i][1] int(corners[i][1] * y_scale) x int(a.x() * x_scale) y int(a.y() * y_scale) w int(a.w() * x_scale) h int(a.h() * y_scale) for i in range(4): img.draw_line(corners[i][0], corners[i][1], corners[(i + 1) % 4][0], corners[(i + 1) % 4][1], image.COLOR_RED) img.draw_string(x + w, y, \"id: \" + str(a.id()), image.COLOR_RED) img.draw_string(x + w, y + 15, \"family: \" + str(a.family()), image.COLOR_RED) disp.show(img) ``` Steps: 1. Import the image, camera, and display modules ```python from maix import image, camera, display ``` 2. Initialize the camera and display ```python cam camera.Camera() disp display.Display() ``` 3. Get the image from the camera and display it ```python while 1: img cam.read() disp.show(img) ``` 4. Call the `find_apriltags` method to recognize Apriltag labels in the camera image ```python new_img img.resize(160, 120) apriltags new_img.find_apriltags(families families) ``` `img` is the camera image obtained through `cam.read()` `img.resize(160, 120)` is used to scale down the image to a smaller size, allowing the algorithm to compute faster with a smaller image `new_img.find_apriltags(families families)` is used to find Apriltag labels, and the query results are saved in `apriltags` for further processing. The `families` parameter is used to select the Apriltag family, defaulting to `image.ApriltagFamilies.TAG36H11` 5. Process the recognized label results and display them on the screen ```python for a in apriltags: # Get position information (and map coordinates to the original image) x int(a.x() * x_scale) y int(a.y() * y_scale) w int(a.w() * x_scale) corners a.corners() for i in range(4): corners[i][0] int(corners[i][0] * x_scale) corners[i][1] int(corners[i][1] * y_scale) # Display for i in range(4): img.draw_line(corners[i][0], corners[i][1], corners[(i + 1) % 4][0], corners[(i + 1) % 4][1], image.COLOR_RED) img.draw_string(x + w, y, \"id: \" + str(a.id()), image.COLOR_RED) img.draw_string(x + w, y + 15, \"family: \" + str(a.family()), image.COLOR_RED) img.draw_string(x + w, y + 30, \"rotation : \" + str(180 * a.rotation() // 3.1415), image.COLOR_RED) ``` Iterate through the members of `apriltags`, which is the result of scanning Apriltag labels through `img.find_apriltags()`. If no labels are found, the members of `apriltags` will be empty. `x_scale` and `y_scale` are used to map coordinates. Since `new_img` is a scaled down image, the coordinates of the Apriltag need to be mapped to be drawn correctly on the original image `img`. `a.corners()` is used to get the coordinates of the four vertices of the detected label, and `img.draw_line()` uses these four vertex coordinates to draw the shape of the label. `img.draw_string` is used to display the label content, where `a.x()` and `a.y()` are used to get the x and y coordinates of the top left corner of the label, `a.id()` is used to get the label ID, `a.family()` is used to get the label family type, and `a.rotation()` is used to get the rotation angle of the label. ### Common Parameter Explanations Here are explanations for common parameters. If you can't find parameters to implement your application, you may need to consider using other algorithms or extending the required functionality based on the current algorithm's results. Parameter Description Example roi Set the rectangular region for the algorithm to compute. roi [x, y, w, h], where x and y represent the coordinates of the top left corner of the rectangle, and w and h represent the width and height of the rectangle. The default is the entire image. Compute the region with coordinates (50,50) and a width and height of 100:
    ```img.find_apriltags(roi [50, 50, 100, 100])``` families Apriltag label family type Scan for labels from the TAG36H11 family:
    ```img.find_apriltags(families image.ApriltagFamilies.TAG36H11)``` This article introduces common methods. For more API information, please refer to the [image](../../../api/maix/image.html) section of the API documentation."},"/maixpy/doc/en/vision/maixhub_train.html":{"title":"Using MaixHub to Train AI Models for MaixCAM MaixPy","content":" title: Using MaixHub to Train AI Models for MaixCAM MaixPy update: date: 2024 04 03 author: neucrack version: 1.0.0 content: Initial document ## Introduction MaixHub offers the functionality to train AI models online, directly within a browser. This eliminates the need for expensive hardware, complex development environments, or coding skills, making it highly suitable for beginners as well as experts who prefer not to delve into code. ## Basic Steps to Train a Model Using MaixHub ### Identify the Data and Model Types To train an AI model, you first need to determine the type of data and model. As of April 2024, MaixHub provides models for image data including `Object Classification Models` and `Object Detection Models`. Object classification models are simpler than object detection models, as the latter require marking the position of objects within images, which can be more cumbersome. Object classification merely requires identifying what is in the image without needing coordinates, making it simpler and recommended for beginners. ### Collect Data As discussed in AI basics, training a model requires a dataset for the AI to learn from. For image training, you need to create a dataset and upload images to it. Ensure the device is connected to the internet (WiFi). Open the MaixHub app on your device and choose to collect data to take photos and upload them directly to MaixHub. You need to create a dataset on MaixHub first, then click on device upload data, which will display a QR code. Scan this QR code with your device to connect to MaixHub. It's important to distinguish between training and validation datasets. To ensure the performance during actual operation matches the training results, the validation dataset must be of the same image quality as those taken during actual operation. It's also advisable to use images taken by the device for the training set. If using internet images, restrict them to the training set only, as the closer the dataset is to actual operational conditions, the better. ### Annotate Data For classification models, images are annotated during upload by selecting the appropriate category for each image. For object detection models, after uploading, you need to manually annotate each image by marking the coordinates, size, and category of the objects to be recognized. This annotation process can also be done offline on your own computer using software like labelimg, then imported into MaixHub using the dataset import feature. Utilize shortcuts during annotation to speed up the process. MaixHub will also add more annotation aids and automatic annotation tools in the future (there is already an automatic annotation tool available for videos that you can try). ### Train the Model Select training parameters, choose the corresponding device platform, select maixcam, and wait in the training queue. You can monitor the training progress in real time and wait for it to complete. ### Deploy the Model Once training is complete, you can use the deploy function in the MaixHub app on your device to scan a code and deploy. The device will automatically download and run the model, storing it locally for future use. If you find the recognition results satisfactory, you can share the model to the model library with a single click for others to use. ## How to Use Please visit [MaixHub](https://maixhub.com) to register an account, then log in. There are video tutorials on the homepage for learning. Note that if the tutorial uses the M2dock development board, the process is similar for MaixCAM, although the MaixHub application on the device might differ slightly. The overall process is the same, so please apply the knowledge flexibly."},"/maixpy/doc/en/vision/face_recognition.html":{"title":"MaixCAM MaixPy Face Recognition","content":" title: MaixCAM MaixPy Face Recognition ## Introduction to Face Recognition ![face_recognize](../../assets/face_recognize.jpg) Face recognition involves identifying the location of faces in the current view and who they are. Thus, in addition to detecting faces, face recognition typically involves a database to store known and unknown individuals. ## Recognition Principles * Use AI models to detect faces, obtaining coordinates and features of facial components. * Use the coordinates of these features for affine transformation to align the face in the image to a standard face orientation, facilitating the extraction of facial features by the model. * Employ a feature extraction model to derive facial feature values. * Compare these features with those stored in the database (by calculating the cosine distance between the saved and the current facial features, identifying the face in the database with the smallest distance; if it's below a predefined threshold, it is recognized as the person in the database). ## Using MaixPy MaixPy's `maix.nn` module provides a face recognition API, ready to use with built in models. Additional models can also be downloaded from the [MaixHub model repository](https://maixhub.com/model/zoo) (select the appropriate hardware platform, such as maixcam). Recognition: ```python from maix import nn, camera, display, image import os import math recognizer nn.FaceRecognizer(detect_model \"/root/models/retinaface.mud\", feature_model \"/root/models/face_feature.mud\", dual_buff True) if os.path.exists(\"/root/faces.bin\"): recognizer.load_faces(\"/root/faces.bin\") cam camera.Camera(recognizer.input_width(), recognizer.input_height(), recognizer.input_format()) dis display.Display() while 1: img cam.read() faces recognizer.recognize(img, 0.5, 0.45, 0.8) for obj in faces: img.draw_rect(obj.x, obj.y, obj.w, obj.h, color image.COLOR_RED) radius math.ceil(obj.w / 10) img.draw_keypoints(obj.points, image.COLOR_RED, size radius if radius < 5 else 4) msg f'{recognizer.labels[obj.class_id]}: {obj.score:.2f}' img.draw_string(obj.x, obj.y, msg, color image.COLOR_RED) dis.show(img) ``` When you first run this code, it can detect faces but will not recognize them. We need to enter a mode to learn faces. > Here `recognizer.labels[0]` is by default `unknown`, and every new face added will automatically append to `labels`. For example, you can learn faces when a user presses a button: ```python faces recognizer.recognize(img, 0.5, 0.45, True) for face in faces: print(face) # This accounts for the scenario where multiple faces are present in one scene; obj.class_id of 0 means the face is not registered # Write your own logic here # For instance, based on face’s class_id and coordinates, you can decide whether to add it to the database and facilitate user interaction, like pressing a button to register recognizer.add_face(face, label) # label is the name you assign to the face recognizer.save_faces(\"/root/faces.bin\") ``` ## Complete Example A complete example is provided for recording unknown faces and recognizing faces with a button press. This can be found in the [MaixPy example directory](https://github.com/sipeed/MaixPy/tree/main/examples) under `nn_face_recognize.py`. ## dual_buff Dual Buffer Acceleration You may have noticed that the model initialization uses `dual_buff` (which defaults to `True`). Enabling the `dual_buff` parameter can improve running efficiency and increase the frame rate. For detailed principles and usage notes, see [dual_buff Introduction](./dual_buff.html). ## Replacing Other Default Recognition Models The current recognition model (used to distinguish different individuals) is based on the MobileNetV2 model. If its accuracy does not meet your requirements, you can replace it with another model, such as the [Insight Face ResNet50](https://maixhub.com/model/zoo/462) model. Of course, you can also train your own model or find other pre trained models and convert them into a format supported by MaixCAM. For the conversion method, refer to the [MaixCAM Model Conversion Documentation](../ai_model_converter/maixcam.html), and you can write the mud file based on existing examples."},"/maixpy/doc/en/basic/maixvision.html":{"title":"MaixVision -- MaixCAM MaixPy Programming IDE + Graphical Block Programming","content":" title: MaixVision MaixCAM MaixPy Programming IDE + Graphical Block Programming ## Introduction [MaixVision](https://wiki.sipeed.com/maixvision) is a development tool specifically designed for the Maix ecosystem, supporting MaixPy programming and graphical block programming. It allows for online operation and debugging, real time image preview, and synchronizing images from device displays, which is convenient for debugging and development. It also supports packaging and installing applications on devices, allowing users to easily generate and install applications with one click. In addition, it integrates several handy tools for development, such as file management, threshold editor, QR code generator, and more. ## Download Visit the [MaixVision homepage](https://wiki.sipeed.com/maixvision) to download. ## Using MaixPy Programming and Online Running Follow the steps in [Quick Start](../index.html) to connect your device, and you can easily use MaixPy programming and run it online. ## Real time Image Preview MaixPy provides a `display` module that can show images on the screen. Also, when the `show` method of the `display` module is called, it sends the image to be displayed on MaixVision, for example: ```python from maix import display, camera cam camera.Camera(640, 480) disp display.Display() while 1: disp.show(cam.read()) ``` Here we use the camera to capture an image, then display it on the screen using the `disp.show()` method, and also send it to MaixVision for display. When we click the 'pause' button in the top right corner, it will stop sending images to MaixVision. ## Code Auto Completion Code suggestions depend on local Python packages installed on your computer. To enable code suggestions, you need to install Python on your computer and the required Python packages. * To install Python, visit the [Python official website](https://python.org/). * To install the required packages, for MaixPy, for instance, you need to install the MaixPy package on your computer using `pip install MaixPy`. If `MaixPy` gets updated, you should update it on both your computer and device. On your computer, manually execute `pip install MaixPy U` in the terminal. For device updates, update directly in the `Settings` application. > Users in China can use a local mirror `pip install i https://pypi.tuna.tsinghua.edu.cn/simple MaixPy`. * Restart MaixVision to see the code suggestions. > If suggestions still do not appear, you can manually set the path to the Python executable in settings and restart. >! Note that installing Python packages on your computer is just for code suggestions. The actual code runs on the device (development board), and the device must also have the corresponding packages to run properly. > Additionally, while you have the MaixPy package installed on your computer, due to our limited resources, we cannot guarantee that you can directly use the Maix package in your computer's Python. Please run it on supported devices. ## Calculating the Image Histogram In the previous step, we could see the image in real time in MaixVision. By selecting an area with the mouse, we can view the histogram for that area at the bottom of the screen, displaying different color channels. This feature is helpful when finding suitable parameters for some image processing algorithms. ## Distinguishing Between `Device File System` and `Computer File System` Here we have an important concept to grasp: **distinguish between the `Device File System` and the `Computer File System`**. * **Computer File System**: Operates on the computer. Opening a file or project in MaixVision accesses files on the computer, and saving is automatically done to the computer's file system. * **Device File System**: The program sends the code to the device for execution, so the files used in the code are read from the device's file system. A common issue is when students save a file on the computer, such as `D:\\data\\a.jpg`, and then use this file on the device with `img image.load(\"D:\\data\\a.jpg\")`. Naturally, the file cannot be found because the device does not have `D:\\data\\a.jpg`. For specifics on how to send files from the computer to the device, refer to the following section. ## Transferring Files to the Device First, connect to the device, then click the button to browse the device file system, as shown below. Then you can upload files to the device or download files to the computer. ![maixvision_browser2](../../assets/maixvision_browser2.jpg) ![maixvision_browser](../../assets/maixvision_browser.jpg) .. details:: Alternatively, other tools can be used, click to expand First, know the device's IP address or name, which MaixVision can find, or see in the device's `Settings >System Information`, such as `maixcam xxxx.local` or `192.168.0.123`. The username and password are `root`, using the `SFTP` protocol for file transfer, and the port number is `22`. There are many useful tools available for different systems: ### Windows Use [WinSCP](https://winscp.net/eng/index.php) or [FileZilla](https://filezilla project.org/) to connect to the device and transfer files, choosing the `SFTP` protocol and entering the device and account information to connect. Specific instructions can be searched online. ### Linux In the terminal, use the `scp` command to transfer files to the device, such as: ```bash scp /path/to/your/file.py root@maixcam xxxx.local:/root ``` ### Mac * **Method 1**: In the terminal, use the `scp` command to transfer files to the device, such as: ```bash scp /path/to/your/file.py root@maixcam xxxx.local:/root ``` * **Method 2**: Use [FileZilla](https://filezilla project.org/) or other tools to connect to the device and transfer files, choosing the `SFTP` protocol and entering the device and account information to connect. ## Using Graphical Block Programming Under development, please stay tuned."},"/maixpy/doc/en/basic/view_src_code.html":{"title":"MaixCAM MaixPy How to Find the Source Code Corresponding to MaixPy API","content":" title: MaixCAM MaixPy How to Find the Source Code Corresponding to MaixPy API ## Introduction MaixPy is implemented based on Python, with some functions written in Python and most of the underlying code written in C/C++. This ensures efficient performance. If you have questions while using a function, you can consult this document and the API documentation. If your doubts are still unresolved, you can find the underlying implementation source code using the method described in this article. **You are also welcome to contribute to the documentation or code, and become a MaixPy developer!** ## Check the Documentation First Always check the documentation first: [https://wiki.sipeed.com/maixpy/](https://wiki.sipeed.com/maixpy/), then check the API documentation: [https://wiki.sipeed.com/maixpy/api/index.html](https://wiki.sipeed.com/maixpy/api/index.html). The API documentation is only available in English because it is generated from the comments in the code, which are all in English. If you can't understand English, you can use a translation tool. ## How to Find the Source Code Corresponding to the API There are two open source repositories: [MaixPy](https://github.com/sipeed/MaixPy) and [MaixCDK](https://github.com/sipeed/MaixCDK). MaixPy is the project repository containing part of the MaixPy source code, all documents, and examples; MaixCDK contains most of the underlying C/C++ implementations of MaixPy APIs. You can download these two repositories or view them directly on the web. **Don't forget to give them a star so more people can see it!** ### Finding C/C++ Written APIs Assume we want to find the `maix.image.Image.find_blobs` function as an example. First, let's try to find it manually: * Since this is a vision related API, we look in the `components/vision/include` directory of [MaixCDK](https://github.com/sipeed/MaixCDK) and see a `maix_image.hpp` header file, where we might find it. * Searching for `find_blobs` in `maix_image.hpp`, we immediately find the function declaration: ```c++ std::vector find_blobs(std::vector> thresholds std::vector>(), bool invert false, std::vector roi std::vector(), int x_stride 2, int y_stride 1, int area_threshold 10, int pixels_threshold 10, bool merge false, int margin 0, int x_hist_bins_max 0, int y_hist_bins_max 0); ``` * We also notice that there are comments before the function declaration, from which the API documentation is automatically generated. If you compare the API documentation with this comment, you will find them identical. Modifying this comment and recompiling will generate updated API documentation. * This is just the function declaration. We find that there is no such function in `components/vision/src/maix_image.cpp`. However, we see `components/vision/src/maix_image_find_blobs.cpp`, indicating that the function is written in a separate `cpp` file. Here, we can see the function's source code. ### Finding APIs Written with Pybind11 If you can't find it in MaixCDK, look in [MaixPy/components](https://github.com/sipeed/MaixPy/tree/main/components). > In the above code, you'll notice that the first parameter we use in `find_blobs` is of type `list`, i.e., `[[...]]`, while the C/C++ definition is `std::vector>`. This is because we use `pybind11` to automatically convert the `std::vector` type to `list` type. For some types like `numpy`'s `array`, which are inconvenient to define in MaixCDK, we use the `pybind11` definitions in [MaixPy/components](https://github.com/sipeed/MaixPy/tree/main/components). For example, the `maix.image.image2cv` method uses `pybind11` related code here. ## How to Modify the Code After finding the code, modify it directly and compile the firmware following the [build documentation](../source_code/build.html). ## How to Add Code Copy other APIs, write a function, and add complete comments. Include an extra `@maixpy maix.xxx.xxx` tag in the comments, where `xxx` is the module and API name you want to add. Then compile the firmware. Refer to [MaixCDK/components/basic/includemaix_api_example.hpp](https://github.com/sipeed/MaixCDK/blob/master/components/basic/include/maix_api_example.hpp). API parameters and return values automatically convert from basic `C++` types to Python types, making it very simple. See the [pybind11 automatic type conversion list](https://pybind11.readthedocs.io/en/stable/advanced/cast/overview.html#conversion table) for details. For example, to add `maix.my_module.my_func`, create a header file in the appropriate place in MaixCDK (preferably following the current folder classification) and add the code: ```cpp namespace maix::my_module { /** * My function, add two integers. * @param a arg a, int type * @param b arg b, int type * @return int type, will return a + b * @maixpy maix.my_module.my_func */ int my_func(int a, int b); } ``` Then add a `cpp` file: ```cpp int my_func(int a, int b) { return a + b; } ``` Compile MaixPy to generate the `whl` file and install it on the device to use the `maix.my_module.my_func` function. ## How to Contribute Code If you find any unfinished APIs or bugs in MaixPy, feel free to submit a PR (Pull Request) to the MaixPy repository. For detailed submission methods, see [Contributing Documentation and Code](../source_code/contribute.html)."},"/maixpy/doc/en/basic/python.html":{"title":"Basic Knowledge of Python","content":" title: Basic Knowledge of Python The tutorial documentation of MaixPy does not delve into specific Python syntax tutorials because there are already too many excellent Python tutorials available. Here, we only introduce what needs to be learned, provide guidance on directions and paths. ## Introduction to Python Python is an interpreted, object oriented, dynamically typed high level programming language. * Interpreted: It does not require compilation, runs directly. The advantage is rapid development, while a minor drawback is the slower execution speed due to code interpretation on each run. However, most often, the bottleneck lies in the developer's code rather than the language itself. * Object oriented: It supports object oriented programming, allowing the definition of classes and objects. Compared to procedural languages, it is easier to organize code. For more details, please search independently. * Dynamically typed: Variables do not need to declare types, can be assigned directly, and the type will be automatically determined based on the assignment. This reduces code volume, but can also lead to type errors, requiring the developer's attention. In conclusion, for developers unfamiliar with Python, it is very easy to get started as Python offers plenty of ready to use libraries, a large developer community, short application development cycles, making it highly worthwhile to learn! ## Python Environment Setup You can install Python on your computer according to the Python tutorial you are following for learning. Alternatively, you can connect to a device via MaixVision on MaixVision and then run the program on the development board. ## What Python Basics are Needed to Use MaixPy? * Basic concepts of Python. * Basic concepts of object oriented programming. * Basic syntax of Python, including: * Tab indentation alignment syntax. * Variables, functions, classes, objects, comments, etc. * Control statements such as if, for, while, etc. * Modules and importing modules. * Basic data types such as int, float, str, list, dict, tuple, etc. * Difference between bytes and str, and conversion. * Exception handling, try except. * Common built in functions like print, open, len, range, etc. * Common built in modules like os, sys, time, random, math, etc. Mastering the above foundational knowledge will enable you to smoothly program with MaixPy. With the help of subsequent tutorials and examples, if unsure, you can refer to search engines, official documentation, or ask ChatGPT to successfully complete your development tasks. ## For Developers Experienced in Another Object Oriented Programming Language If you are already proficient in an object oriented language like C++/Java/C#, you simply need to quickly review Python syntax before starting to use it. You can refer to resources like [Runoob Tutorial](https://www.runoob.com/python3/python3 tutorial.html) or the [Python Official Tutorial](https://docs.python.org/3/tutorial/index.html). Alternatively, you can explore individual developers' blogs, such as [Wow! It's Python](https://neucrack.com/p/59). ## For Developers with C Language Experience but No Object Oriented Programming Experience If you only know C and lack understanding of object oriented concepts, you can start by learning about object oriented programming concepts before diving into Python. It's relatively quick and you can search for video tutorials for entry level guidance. After following introductory video tutorials, you can then refer to documentation tutorials such as [Runoob Tutorial](https://www.runoob.com/python3/python3 tutorial.html) or the [Python Official Tutorial](https://docs.python.org/3/tutorial/index.html) to get started! Once you have acquired the basic knowledge, you can start using MaixPy for programming based on the documentation and examples. ## For Programming Beginners If you have never dealt with programming before, you will need to start learning Python from scratch. Python is also quite suitable as an introductory language. You can search for video tutorials for specific guidance. After mastering the basic syntax, you will be able to use MaixPy for programming by following examples provided."},"/maixpy/doc/en/audio/recognize.html":{"title":"MaixCAM MaixPy Real-time voice recognition","content":" title: MaixCAM MaixPy Real time voice recognition update: date: 2024 10 08 author: 916BGAI version: 1.0.0 content: Initial document ## Introduction `MaixCAM` has ported the `Maix Speech` offline speech library, enabling continuous Chinese numeral recognition, keyword recognition, and large vocabulary speech recognition capabilities. It supports audio recognition in `PCM` and `WAV` formats, and can accept input recognition via the onboard microphone. ## Maix Speech [`Maix Speech`](https://github.com/sipeed/Maix Speech) is an offline speech library specifically designed for embedded environments. It features deep optimization of speech recognition algorithms, achieving a significant lead in memory usage while maintaining excellent WER. For more details on the principles, please refer to the open source project. ## Continuous Large Vocabulary Speech Recognition ```python from maix import app, nn speech nn.Speech(\"/root/models/am_3332_192_int8.mud\") speech.init(nn.SpeechDevice.DEVICE_MIC, \"hw:0,0\") def callback(data: tuple[str, str], len: int): print(data) lmS_path \"/root/models/lmS/\" speech.lvcsr(lmS_path + \"lg_6m.sfst\", lmS_path + \"lg_6m.sym\", \\ lmS_path + \"phones.bin\", lmS_path + \"words_utf.bin\", \\ callback) while not app.need_exit(): frames speech.run(1) if frames < 1: print(\"run out\\n\") speech.deinit() break ``` ### Usage 1. Import the `app` and `nn` modules ```python from maix import app, nn ``` 2. Load the acoustic model ```python speech nn.Speech(\"/root/models/am_3332_192_int8.mud\") ``` You can also load the `am_7332` acoustic model; larger models provide higher accuracy but consume more resources. 3. Choose the corresponding audio device ```python speech.init(nn.SpeechDevice.DEVICE_MIC, \"hw:0,0\") ``` This uses the onboard microphone and supports both `WAV` and `PCM` audio as input devices. ```python speech.init(nn.SpeechDevice.DEVICE_WAV, \"path/audio.wav\") # Using WAV audio input ``` ```python speech.init(nn.SpeechDevice.DEVICE_PCM, \"path/audio.pcm\") # Using PCM audio input ``` Note that `WAV` must be `16KHz` sample rate with `S16_LE` storage format. You can use the `arecord` tool for conversion. ```shell arecord d 5 r 16000 c 1 f S16_LE audio.wav ``` When recognizing `PCM/WAV` , if you want to reset the data source, such as for the next WAV file recognition, you can use the `speech.devive` method, which will automatically clear the cache: ```python speech.devive(nn.SpeechDevice.DEVICE_WAV, \"path/next.wav\") ``` 4. Set up the decoder ```python def callback(data: tuple[str, str], len: int): print(data) lmS_path \"/root/models/lmS/\" speech.lvcsr(lmS_path + \"lg_6m.sfst\", lmS_path + \"lg_6m.sym\", \\ lmS_path + \"phones.bin\", lmS_path + \"words_utf.bin\", \\ callback) ``` Users can register several decoders (or none), which decode the results from the acoustic model and execute the corresponding user callback. Here, a `lvcsr` decoder is registered to output continuous speech recognition results (for fewer than 1024 Chinese characters). For other decoder usages, please refer to the sections on continuous Chinese numeral recognition and keyword recognition. When setting up the `lvcsr` decoder, you need to specify the paths for the `sfst` file, the `sym` file (output symbol table), the path for `phones.bin` (phonetic table), and the path for `words.bin` (dictionary). Lastly, a callback function must be set to handle the decoded data. After registering the decoder, use the `speech.deinit()` method to clear the initialization. 5. Recognition ```python while not app.need_exit(): frames speech.run(1) if frames < 1: print(\"run out\\n\") speech.deinit() break ``` Use the `speech.run` method to run speech recognition. The parameter specifies the number of frames to run each time, returning the actual number of frames processed. Users can choose to run 1 frame each time and then perform other processing, or run continuously in a single thread, stopping it with an external thread. ### Recognition Results If the above program runs successfully, speaking into the onboard microphone will yield real time speech recognition results, such as: ```shell ### SIL to clear decoder! ('今天天气 怎么样 ', 'jin1 tian1 tian1 qi4 zen3 me yang4 ') ```"},"/maixpy/doc/en/audio/record.html":{"title":"MaixCAM MaixPy Audio Record","content":" title: MaixCAM MaixPy Audio Record update: date: 2024 05 20 author: lxowalle version: 1.0.0 content: Initial document ## Introduction This document provides the usage of audio recording and supports recording audio in `PCM` and `WAV` formats. The `MaixCAM` has a microphone on board, so you can use the recording function directly. ### How to use #### Getting `PCM` data If you don't pass `path` when constructing a `Recorder` object, it will only record audio and not save it to a file, but you can save it to a file manually. ```python from maix import audio, time, app r audio.Recorder() r.volume(12) print(\"sample_rate:{} format:{} channel:{}\".format(r.sample_rate(), r.format(), r.channel())) while not app.need_exit(): data r.record() print(\"data size\", len(data)) time.sleep_ms(10) print(\"record finish!\") ``` Steps: 1. Import the audio, time and app modules: ```python from maix import audio, time, app ``` 2. Initialize Recorder ```python r audio.Recorder() r.volume(12) ``` Note that the default sample rate is 48k, the sample format is little endian format signed 16 bit, and the sample channel is 1. You can also customise the parameters like this `r audio.Recorder(sample_rate 48000, format audio.Format.FMT_S16_LE, channel 1)`. So far only tested with sample rate 48000, format `FMT_S16_LE`, and number of sampling channels 1. `r.volume(12)` is used to set the volume, the volume range is [0,100] 3. Start recording ```python data r.record() ``` `data` is `bytes` type data in `PCM` format that holds the currently recorded audio. The `PCM` format is set when initialising the `Recorder` object, see step 2. Note that if the recording is too fast and there is no data in the audio buffer, it is possible to return an empty `bytes` of data. 4. Done, you can do voice processing on the `PCM` data returned by `r.record()` when doing your own applications. #### Records audio and saves it in `WAV` format. If you pass `path` when constructing a `Recorder` object, the recorded audio will be saved to a `path` file, and you can also get the currently recorded `PCM` data via the `record` method. `path` only supports paths with `.pcm` and `.wav` suffixes, and the `record` method does not return `WAV` headers when recording `.wav`, it only returns `PCM` data. ```python from maix import audio, time, app r audio.Recorder(\"/root/output.wav\") r.volume(12) print(\"sample_rate:{} format:{} channel:{}\".format(r.sample_rate(), r.format(), r.channel())) while not app.need_exit(): data r.record() print(\"data size\", len(data)) time.sleep_ms(10) print(\"record finish!\") ``` The code means basically the same as above. #### Record audio and save to `WAV` format (blocking) If the `record_ms` parameter is set during recording, recording audio will block until the time set by `record_ms` is reached, unit: ms. ```python from maix import audio, time, app r audio.Recorder(\"/root/output.wav\") r.volume(12) print(\"sample_rate:{} format:{} channel:{}\".format(r.sample_rate(), r.format(), r.channel())) r.record(5000) print(\"record finish!\") ``` The above example will keep recording `5000`ms and save it to `WAV` format, during the recording period it will block in `record` method, note that `PCM` data will not be returned when `record` is set to `record_ms`. ### Other The `Player` and `Recorder` modules have some `bugs` to be worked out, make sure they are created before other modules (`Camera` module, `Display` module, etc.). For example: ```python # Create Player and Recorder first. p audio.Player() r audio.Recorder() # Then create the Camera c camera.Camera() ```"},"/maixpy/doc/en/audio/synthesis.html":{"title":"MaixCAM MaixPy speech synthesis","content":" title: MaixCAM MaixPy speech synthesis TODO: comming soon~"},"/maixpy/doc/en/audio/digit.html":{"title":"MaixCAM MaixPy Continuous Chinese digit recognition","content":" title: MaixCAM MaixPy Continuous Chinese digit recognition update: date: 2024 10 08 author: 916BGAI version: 1.0.0 content: Initial document ## Introduction `MaixCAM` has ported the `Maix Speech` offline speech library, enabling continuous Chinese numeral recognition, keyword recognition, and large vocabulary speech recognition capabilities. It supports audio recognition in `PCM` and `WAV` formats, and can accept input recognition via the onboard microphone. ## Maix Speech [`Maix Speech`](https://github.com/sipeed/Maix Speech) is an offline speech library specifically designed for embedded environments. It features deep optimization of speech recognition algorithms, achieving a significant lead in memory usage while maintaining excellent WER. For more details on the principles, please refer to the open source project. ## Continuous Chinese digit recognition ```python from maix import app, nn speech nn.Speech(\"/root/models/am_3332_192_int8.mud\") speech.init(nn.SpeechDevice.DEVICE_MIC, \"hw:0,0\") def callback(data: str, len: int): print(data) speech.digit(640, callback) while not app.need_exit(): frames speech.run(1) if frames < 1: print(\"run out\\n\") speech.deinit() break ``` ### Usage 1. Import the `app` and `nn` modules ```python from maix import app, nn ``` 2. Load the acoustic model ```python speech nn.Speech(\"/root/models/am_3332_192_int8.mud\") ``` You can also load the `am_7332` acoustic model; larger models provide higher accuracy but consume more resources. 3. Choose the corresponding audio device ```python speech.init(nn.SpeechDevice.DEVICE_MIC, \"hw:0,0\") ``` This uses the onboard microphone and supports both `WAV` and `PCM` audio as input devices. ```python speech.init(nn.SpeechDevice.DEVICE_WAV, \"path/audio.wav\") # Using WAV audio input ``` ```python speech.init(nn.SpeechDevice.DEVICE_PCM, \"path/audio.pcm\") # Using PCM audio input ``` Note that `WAV` must be `16KHz` sample rate with `S16_LE` storage format. You can use the `arecord` tool for conversion. ```shell arecord d 5 r 16000 c 1 f S16_LE audio.wav ``` When recognizing `PCM/WAV` , if you want to reset the data source, such as for the next WAV file recognition, you can use the `speech.devive` method, which will automatically clear the cache: ```python speech.devive(nn.SpeechDevice.DEVICE_WAV, \"path/next.wav\") ``` 4. Set up the decoder ```python def callback(data: str, len: int): print(data) speech.digit(640, callback) ``` Users can register several decoders (or none), which decode the results from the acoustic model and execute the corresponding user callback. Here, a `digit` decoder is registered to output the Chinese digit recognition results from the last 4 seconds. The returned recognition results are in string format and support `0123456789 .(dot) S(ten) B(hundred) Q(thousand) W(thousand)`. For other decoder usages, please refer to the sections on Real time voice recognition and keyword recognition. When setting the `digit` decoder, you need to specify a `blank` value; exceeding this value (in ms) will insert a `_` in the output results to indicate idle silence. After registering the decoder, use the `speech.deinit()` method to clear the initialization. 5. Recognition ```python while not app.need_exit(): frames speech.run(1) if frames < 1: print(\"run out\\n\") speech.deinit() break ``` Use the `speech.run` method to run speech recognition. The parameter specifies the number of frames to run each time, returning the actual number of frames processed. Users can choose to run 1 frame each time and then perform other processing, or run continuously in a single thread, stopping it with an external thread. ### Recognition Results If the above program runs successfully, speaking into the onboard microphone will yield continuous Chinese digit recognition results, such as: ```shell _0123456789 ```"},"/maixpy/doc/en/audio/keyword.html":{"title":"MaixCAM MaixPy Keyword recognition","content":" title: MaixCAM MaixPy Keyword recognition update: date: 2024 10 08 author: 916BGAI version: 1.0.0 content: Initial document ## Introduction `MaixCAM` has ported the `Maix Speech` offline speech library, enabling continuous Chinese numeral recognition, keyword recognition, and large vocabulary speech recognition capabilities. It supports audio recognition in `PCM` and `WAV` formats, and can accept input recognition via the onboard microphone. ## Maix Speech [`Maix Speech`](https://github.com/sipeed/Maix Speech) is an offline speech library specifically designed for embedded environments. It features deep optimization of speech recognition algorithms, achieving a significant lead in memory usage while maintaining excellent WER. For more details on the principles, please refer to the open source project. ## Keyword recognition ```python from maix import app, nn speech nn.Speech(\"/root/models/am_3332_192_int8.mud\") speech.init(nn.SpeechDevice.DEVICE_MIC, \"hw:0,0\") kw_tbl ['xiao3 ai4 tong2 xue2', 'ni3 hao3', 'tian1 qi4 zen3 me yang4'] kw_gate [0.1, 0.1, 0.1] def callback(data:list[float], len: int): for i in range(len): print(f\"\\tkw{i}: {data[i]:.3f};\", end ' ') print(\"\\n\") speech.kws(kw_tbl, kw_gate, callback, True) while not app.need_exit(): frames speech.run(1) if frames < 1: print(\"run out\\n\") speech.deinit() break ``` ### Usage 1. Import the `app` and `nn` modules ```python from maix import app, nn ``` 2. Load the acoustic model ```python speech nn.Speech(\"/root/models/am_3332_192_int8.mud\") ``` You can also load the `am_7332` acoustic model; larger models provide higher accuracy but consume more resources. 3. Choose the corresponding audio device ```python speech.init(nn.SpeechDevice.DEVICE_MIC, \"hw:0,0\") ``` This uses the onboard microphone and supports both `WAV` and `PCM` audio as input devices. ```python speech.init(nn.SpeechDevice.DEVICE_WAV, \"path/audio.wav\") # Using WAV audio input ``` ```python speech.init(nn.SpeechDevice.DEVICE_PCM, \"path/audio.pcm\") # Using PCM audio input ``` Note that `WAV` must be `16KHz` sample rate with `S16_LE` storage format. You can use the `arecord` tool for conversion. ```shell arecord d 5 r 16000 c 1 f S16_LE audio.wav ``` When recognizing `PCM/WAV` , if you want to reset the data source, such as for the next WAV file recognition, you can use the `speech.devive` method, which will automatically clear the cache: ```python speech.devive(nn.SpeechDevice.DEVICE_WAV, \"path/next.wav\") ``` 4. Set up the decoder ```python kw_tbl ['xiao3 ai4 tong2 xue2', 'ni3 hao3', 'tian1 qi4 zen3 me yang4'] kw_gate [0.1, 0.1, 0.1] def callback(data:list[float], len: int): for i in range(len): print(f\"\\tkw{i}: {data[i]:.3f};\", end ' ') print(\"\\n\") speech.kws(kw_tbl, kw_gate, callback, True) ``` Users can register several decoders (or none), which decode the results from the acoustic model and execute the corresponding user callback. Here, a `kws` decoder is registered to output a list of probabilities for all registered keywords from the last frame. Users can observe the probability values and set their own thresholds for activation. For other decoder usages, please refer to the sections on Real time voice recognition and continuous Chinese numeral recognition. When setting up the `kws` decoder, you need to provide a `keyword list` separated by spaces in Pinyin, a `keyword probability threshold list` arranged in order, and specify whether to enable `automatic near sound processing`. If set to `True`, different tones of the same Pinyin will be treated as similar words to accumulate probabilities. Finally, you need to set a callback function to handle the decoded data. Users can also manually register near sound words using the `speech.similar` method, with a maximum of `10` near sound words registered for each Pinyin. (Note that using this interface to register near sound words will override the near sound table generated by enabling `automatic near sound processing`.) ```python similar_char ['zhen3', 'zheng3'] speech.similar('zen3', similar_char) ``` After registering the decoder, use the `speech.deinit()` method to clear the initialization. 5. Recognition ```python while not app.need_exit(): frames speech.run(1) if frames < 1: print(\"run out\\n\") speech.deinit() break ``` Use the `speech.run` method to run speech recognition. The parameter specifies the number of frames to run each time, returning the actual number of frames processed. Users can choose to run 1 frame each time and then perform other processing, or run continuously in a single thread, stopping it with an external thread. ### Recognition Results If the above program runs successfully, speaking into the onboard microphone will yield keyword recognition results, such as: ```shell kws log 2.048s, len 24 decoder_kws_init get 3 kws 00, xiao3 ai4 tong2 xue2 01, ni3 hao3 02, tian1 qi4 zen3 me yang4 find shared memory(491520), saved:491520 kw0: 0.959; \tkw1: 0.000; \tkw2: 0.000; # xiao3 ai4 tong2 xue2 kw0: 0.000; \tkw1: 0.930; \tkw2: 0.000; # ni3 hao3 kw0: 0.000; \tkw1: 0.000; \tkw2: 0.961; # tian1 qi4 zen3 me yang4 ```"},"/maixpy/doc/en/audio/play.html":{"title":"MaixCAM MaixPy Playback Audio","content":" title: MaixCAM MaixPy Playback Audio update: date: 2024 05 20 author: lxowalle version: 1.0.0 content: Initial document ## Introduction This document provides instructions on how to play audio ## How to use ### Hardware operation ![image 20240520134637905](../../../static/image/maixcam_hardware_back.png) The `MaixCAM` does not have a built in speaker, so you will need to solder a `1W` speaker yourself. The pins for soldering the speaker are shown in the diagram above on the `VOP` and `VON` pins corresponding to the Speaker. Note: If the `MaixCAM` has copper posts attached to these pins, they can be soldered directly to the posts, or on the other side of the board for aesthetic reasons. ### Code #### Playing a `WAV` file ```python from maix import audio, time, app p audio.Player(\"/root/output.wav\") p.play() while not app.need_exit(): time.sleep_ms(10) print(\"play finish!\") ``` Steps: 1. Import the audio, time and app modules: ```python from maix import audio, time, app ``` 2. Initialize the player: ```python p audio.Player(\"/root/output.wav\") ``` Note that the default sample rate is 48k, the sample format is little endian format signed 16 bit, and the sample channel is 1. You can also customise the parameters like this `p audio.Player(sample_rate 48000, format audio.Format.FMT_S16_LE, channel 1)`. So far only tested with sample rate 48000, format `FMT_S16_LE`, and number of sampling channels 1. If it is a `.wav` file, the sample rate, sample format and sample channel are automatically obtained. 3. Playing audio ```python p.play() ``` This will block until all audio data is written, but not until all audio data is actually played. If you exit the programme after calling `play()`, some of the audio data to be played may be lost. 4. Done #### Playback with `PCM` data ```python from maix import audio, time, app p audio.Player() with open('/root/output.pcm', 'rb') as f: ctx f.read() p.play(bytes(ctx)) while not app.need_exit(): time.sleep_ms(10) print(\"play finish!\") ``` Steps: 1. Import the audio, time and app modules: ```python from maix import audio, time, app ``` 2. Initialize the player: ```python p audio.Player() ``` Note that the default sample rate is 48k, the sample format is little endian format signed 16 bit, and the sample channel is 1. You can also customise the parameters like this `p audio.Player(sample_rate 48000, format audio.Format.FMT_S16_LE, channel 1)`. So far only tested with sample rate 48000, format `FMT_S16_LE`, and number of sampling channels 1. 3. Open and playback a PCM file ```python with open('/root/output.pcm', 'rb') as f: ctx f.read() p.play(bytes(ctx)) while not app.need_exit(): time.sleep_ms(10) ``` `with open(‘xxx’,‘rb’) as f:` open file `xxx` and get file object `f` `ctx f.read()` reads the contents of the file into `ctx` `p.play(bytes(ctx))` plays the audio, `p` is the opened player object, `ctx` is the `PCM` data converted to type bytes `time.sleep_ms(10)` Here there is a loop to wait for the playback to complete, as the playback operation is performed asynchronously, and if the program exits early, then it may result in the audio not being played completely. 4. Done ### Other The `Player` and `Recorder` modules have some `bugs` to be worked out, make sure they are created before other modules (`Camera` module, `Display` module, etc.). For example: ```python # Create Player and Recorder first. p audio.Player() r audio.Recorder() # Then create the Camera c camera.Camera() ```"},"/maixpy/doc/en/audio/ai_classify.html":{"title":"MaixCAM MaixPy AI voice classify","content":" title: MaixCAM MaixPy AI voice classify TODO: To be completed. If you need it urgently, you can first port the model yourself or process the audio into a spectrogram using FFT, and then train an AI classification model based on the image representation."},"/maixpy/doc/en/peripheral/pwm.html":{"title":"Using PWM in MaixCAM MaixPy","content":"# Using PWM in MaixCAM MaixPy ## Introduction To use `PWM` in MaixPy (v4), first set the pin function to `PWM` using `pinmap`. Each `PWM` corresponds to a specific pin, as shown in the pin diagram of MaixCAM: ![](https://wiki.sipeed.com/hardware/zh/lichee/assets/RV_Nano/intro/RV_Nano_3.jpg) ![maixcam_pro_io](/static/image/maixcam_pro_io.png) We recommend using `PWM6` and `PWM7`. For `MaixCAM`, since `WiFi` uses all pins of `SDIO1`, `PWM4~9` can only be used alternatively with `WiFi`. > TODO: Provide a method to disable WiFi (requires disabling the WiFi driver in the system, which is quite complex) ## Using PWM to Control a Servo in MaixPy Here we take controlling a servo as an example, using `PWM7` and the `A19` pin of `MaixCAM`: ```python from maix import pwm, time, pinmap SERVO_PERIOD 50 # 50Hz 20ms SERVO_MIN_DUTY 2.5 # 2.5% > 0.5ms SERVO_MAX_DUTY 12.5 # 12.5% > 2.5ms # Use PWM7 pwm_id 7 # !! set pinmap to use PWM7 pinmap.set_pin_function(\"A19\", \"PWM7\") def angle_to_duty(percent): return (SERVO_MAX_DUTY SERVO_MIN_DUTY) * percent / 100.0 + SERVO_MIN_DUTY out pwm.PWM(pwm_id, freq SERVO_PERIOD, duty angle_to_duty(0), enable True) for i in range(100): out.duty(angle_to_duty(i)) time.sleep_ms(100) for i in range(100): out.duty(angle_to_duty(100 i)) time.sleep_ms(100) ``` This code controls the servo to rotate from the minimum angle to the maximum angle and then back to the minimum angle."},"/maixpy/doc/en/peripheral/wdt.html":{"title":"Using Watchdog Timer in MaixCAM MaixPy","content":"# Using Watchdog Timer in MaixCAM MaixPy ## Introduction To prevent program issues, a watchdog timer (WDT) is often used to automatically restart the system when the program encounters a problem. The principle is that there is a countdown timer that we need to periodically reset within the program logic (also called \"feeding the dog\"). If our program gets stuck and fails to reset the countdown timer, the hardware will trigger a system reboot when the timer reaches 0. ## Using WDT in MaixPy ```python from maix import wdt, app, time w wdt.WDT(0, 1000) while not app.need_exit(): w.feed() # Here, sleep operation is our task # 200 ms is normal; if it exceeds 1000 ms, it will cause a system reset time.sleep_ms(200) ``` This code sets up a watchdog timer that requires feeding every 1000 ms. If the program fails to feed the watchdog within this period, the system will reset."},"/maixpy/doc/en/peripheral/uart.html":{"title":"Introduction to Using MaixCAM MaixPy UART Serial Port","content":" title: Introduction to Using MaixCAM MaixPy UART Serial Port ## Introduction to Serial Ports A serial port is a communication method that includes the definitions of both hardware and communication protocols. * Hardware includes: * 3 pins: `GND`, `RX`, `TX`, with cross connection for communication. `RX` and `TX` should be cross connected, meaning one side's `TX` should connect to the other side's `RX`, and both sides' `GND` should be connected together. * Controller, usually inside the chip, also known as the `UART` peripheral. Generally, a chip can have one or more `UART` controllers, each with corresponding pins. * Serial communication protocol: To ensure smooth communication between both parties, a set of protocols is established, specifying how communication should occur, including common parameters like baud rate and parity bit. Baud rate is the most commonly used parameter. Using the serial port of the board, you can communicate data with other microcontrollers or SOCs. For example, human detection can be implemented on MaixCAM, and the detected coordinates can be sent to STM32/Arduino microcontrollers via the serial port. ## Using Serial Port in MaixPy MaixCAM's default configuration exposes a serial port through the USB port. By plugging in the Type C adapter board, you can directly use the serial port pins. Alternatively, you can use the `A16(TX)` and `A17(RX)` pins directly on the board, which are equivalent to those exposed via the USB port, refer to IO interface image: ![](https://wiki.sipeed.com/hardware/zh/lichee/assets/RV_Nano/intro/RV_Nano_3.jpg) ![maixcam_pro_io](/static/image/maixcam_pro_io.png) When using the serial port exposed through USB on MaixCAM, note that the `RX` and `TX` pins on the Type C adapter board will swap between regular and reverse insertions (assuming the **Type C female port is facing forward** and matching the silk screen). If communication fails, try flipping the Type C connection to see if it resolves the issue. Although this is a design flaw, frequent plug/unplug operations are rare, so adapting to it is acceptable. After connecting the two communicating boards (cross connecting `RX` and `TX` and connecting both `GND`), you can use software for communication. Using the serial port with MaixPy is simple: ```python from maix import uart device \"/dev/ttyS0\" # ports uart.list_devices() # List available serial ports serial uart.UART(device, 115200) serial.write_str(\"hello world\") print(\"received:\", serial.read(timeout 2000)) ``` Here, we use the first serial port `/dev/ttyS0`, which is the serial port exposed via `Type C` mentioned above. More serial port APIs can be found in the [UART API documentation](../../../api/maix/peripheral/uart.html). ## MaixCAM Serial Port Usage Notes ### TX Pin Notes MaixCAM's `TX` (`UART0`) pin must not be in a pulled down state during boot up, or the device will fail to start. This is a characteristic of the chip. If you are designing a 3.3v to 5v level shifting circuit, be sure not to default it to a pulled down state and keep it floating (consider using a level shifting chip). If the device fails to boot, also check whether the `TX` pin is pulled down. ## Connecting to a Computer via Serial Port Developers may ask: Why doesn't the serial port device appear on the computer when the USB is plugged in? The answer is that the USB on the device defaults to a virtual USB network card without serial port functionality. To access the device's terminal, use SSH connection. For MaixCAM, the `serial port 0` from the Type C adapter board is directly connected to the `A16(TX)` and `A17(RX)` pins. It can be connected directly to other devices, such as microcontrollers' serial port pins. To communicate with a computer, use a USB to serial converter board (such as [this one](https://item.taobao.com/item.htm?spm a1z10.5 c s.w4002 24984936573.13.73cc59d6AkB9bS&id 610365562537)). ## Boot Log Output It is important to note that **MaixCAM's `serial port 0` will output some boot logs during startup**. After startup, the message `serial ready` will be printed. When communicating with a microcontroller, discard this information. If there are system startup issues, the boot log from `serial port 0` can help diagnose the problem. ## Sending Data There are mainly two functions for sending data: `write_str` and `write`. The `write_str` function is used to send strings, while `write` is used to send byte streams, i.e., `str` and `bytes` types, which can be converted to each other. For example: * `\"A\"` can be converted to `b\"A\"` using the `encode()` method, and vice versa, `b\"A\"` can be converted back to `\"A\"` using the `decode()` method. * `str` cannot display some invisible characters, such as the ASCII value `0`, which is generally `\\0` in strings and serves as a terminator. In `bytes` type, it can be stored as `b\"\\x00\"`. * This is more useful for non ASCII encoded strings. For example, the Chinese character `好` in `UTF 8` encoding is represented by three bytes `\\xe5\\xa5\\xbd`. We can use `\"好\".encode(\"utf 8\")` to get `b\"\\xe5\\xa5\\xbd\"`, and `b'\\xe5\\xa5\\xbd'.decode(\"utf 8)` to get `\"好\"`. So if we need to send byte data, we can use the `write()` method to send it. For example: ```python bytes_content b'\\x01\\x02\\x03' serial.write(bytes_content) ``` Therefore, for the `str` type, you can use `serial.write(str_content.encode())` instead of `write_str` to send it. If you have other data types that you want to convert into a **string to send**, you can use `Python string formatting` to create a string. For example, to send `I have xxx apple`, where `xxx` is an integer variable, you can do: ```python num 10 content \"I have {} apple\".format(num) content2 f\"I have {num} apple\" content3 \"I have {:04d} apple\".format(num) content4 f\"I have {num:d} apple\" print(content) print(content2) print(content3) print(content4) print(type(content)) serial.write_str(content) ``` Additionally, you can encode the data into a **binary stream to send**. For example, the first 4 bytes are hexadecimal `AABBCCDD`, followed by an `int` type value, and finally a `0xFF` at the end. You can use `struct.pack` to encode it (if this is unclear, you can read the explanation later): ```python from struct import pack num 10 bytes_content b'\\xAA\\xBB\\xCC\\xDD' bytes_content + pack(\" Here, we use `i` to encode `int` type data as an example. Other types, such as `B` for `unsigned char`, etc., can also be used. More `struct.pack` formatting options can be searched online with `python struct pack`. In this way, the final data sent is `AA BB CC DD 0A 00 00 00 FF` as binary data. ## Receiving Data Use the `read` method to read data directly: ```python while not app.need_exit(): data serial.read() if data: print(data) time.sleep_ms(1) ``` Similarly, the data obtained by the `read` method is also of the `bytes` type. Here, `read` reads a batch of data sent by the other party. If there is no data, it returns `b''`, which is an empty byte. Here, `time.sleep_ms(1)` is used to sleep for `1ms`, which frees up the CPU so that this thread does not occupy all CPU resources. `1ms` does not affect the program's efficiency, especially in multithreading. In addition, the `read` function has two parameters: * `len`: Represents the maximum length you want to receive. The default is ` 1`, meaning it will return as much as there is in the buffer. If you pass a value `>0`, it means it will return data up to that length. * `timeout`: * The default `0` means it will return immediately with whatever data is in the buffer. If `len` is ` 1`, it returns all data; if a length is specified, it returns data not exceeding that length. * `<0` means it waits until data is received before returning. If ` len` is ` 1`, it waits until data is received and returns (blocking read for all data); if a length is specified, it waits until it reaches `len` before returning. * `>0` means it will return after this time, regardless of whether data is received. It may seem complex, but here are some common parameter combinations: * `read()`: Which is `read( 1, 0)`, reads the data received in the buffer, usually a batch of data sent by the other party. It returns immediately when the other party has stopped sending (within one character's sending time). * `read(len 1, timeout 1)`: Blocking read for a batch of data, waits for the other party to send data and returns only when there is no more data within one character's sending time. * `read(len 10, timeout 1000)`: Blocking read for 10 characters, returns when 10 characters are read or 1000ms has passed without receiving any data. ## Setting a Callback Function for Receiving Data In MCU development, a serial port interrupt event usually occurs when data is received. MaixPy has already handled the interrupt at the bottom layer, so developers don't need to handle the interrupt themselves. If you want to call a callback function upon receiving data, you can use `set_received_callback` to set the callback function: ```python from maix import uart, app, time def on_received(serial : uart.UART, data : bytes): print(\"received:\", data) # send back serial.write(data) device \"/dev/ttyS0\" serial uart.UART(device, 115200) serial.set_received_callback(on_received) serial0.write_str(\"hello\\r\\n\") print(\"sent hello\") print(\"wait data\") while not app.need_exit(): time.sleep_ms(100) # sleep to make CPU free ``` When data is received, the set callback function will be called in **another thread**. Since it's called in another thread, unlike an interrupt function, you don't have to exit the function quickly. You can handle some tasks in the callback function before exiting, but be aware of common multithreading issues. If you use the callback function method to receive data, do not use the `read` function to read it, or it will read incorrectly. ## Using Other Serial Ports Each pin may correspond to different peripheral functions, which is also known as pin multiplexing. As shown below, each pin corresponds to different functions. For example, pin `A17` (silkscreen identification on the board) corresponds to `GPIOA17`, `UART0_RX`, and `PWM5` functions. The default function is `UART0_RX`. ![](https://wiki.sipeed.com/hardware/zh/lichee/assets/RV_Nano/intro/RV_Nano_3.jpg) ![maixcam_pro_io](/static/image/maixcam_pro_io.png) By default, you can directly use `UART0` as shown above. For other serial port pins, they are not set to the serial peripheral function by default, so you need to set the mapping to use other serial ports. Use `pinmap.set_pin_function` to set it. Let's take `UART1` as an example. First, set the pin mapping to choose the serial port function, then use the device number `/dev/ttyS1`. Note that `uart.list_devices()` will not return manually mapped serial ports by default, so you can directly pass the parameters manually: ```python from maix import app, uart, pinmap, time pinmap.set_pin_function(\"A18\", \"UART1_RX\") pinmap.set_pin_function(\"A19\", \"UART1_TX\") device \"/dev/ttyS1\" serial1 uart.UART(device, 115200) ``` ## Application Layer Communication Protocol ### Concept and Character Protocol Serial ports only define the hardware communication timing. To let the receiver understand the meaning of the character stream sent by the sender, an application communication protocol is usually established. For example, if the sender needs to send coordinates containing two integer values `x, y`, the following protocol is established: * **Frame Header**: When I start sending the `$` symbol, it means I'm about to start sending valid data. > **Content**: Designing a start symbol is because serial communication is stream based. For example, sending `12345` twice may result in receiving `12345123` at some moment. The `45` from the second frame has not been received. We can determine a complete data frame based on start and end symbols. * The value range of `x, y` is 0~65535, i.e., an unsigned short integer (`unsigned short`). I'll first send `x` then `y`, separated by a comma, such as `10,20`. * **Frame Tail**: Finally, I'll send a `*` to indicate that I've finished sending this data. In this way, sending a data packet looks like `$10,20*` as a string. The other party can receive and parse it using C language: ```c // 1. Receive data // 2. Determine if the reception is complete based on the frame header and tail, and store the complete frame data in the buff array // 3. Parse a frame of data uint16_t x, y; sscanf(buff, \"$%d,%d*\", &x, &y); ``` Thus, we have defined a simple character communication protocol with a certain degree of reliability. However, since we usually use parameters like `115200 8 N 1` for serial ports, where `N` means no parity check, we can add a **checksum** to our protocol at the end. For example: * Here, we add a checksum value after `x, y`, ranging from 0 to 255. It is the sum of all previous characters modulo 255. * Taking `$10,20` as an example, in `Python`, you can simply use the `sum` function: `sum(b'$10,20') % 255 > 20`, and send `$10,20,20*`. * The receiver reads the checksum `20`, calculates it in the same way as `$10,20`, and if it is also `20`, it means no transmission error occurred. Otherwise, we assume a transmission error and discard the packet to wait for the next one. In MaixPy, encoding a character protocol can be done using Python's string formatting feature: ```python x 10 y 20 content \"${},{}*\".format(x, y) print(content) ``` ### Binary Communication Protocol The character protocol above has a clear characteristic of using visible characters to transmit data. The advantage is simplicity and human readability. However, it uses an inconsistent number of characters and larger data volumes. For example, `$10,20*` and `$1000,2000*` have varying lengths, with `1000` using 4 characters, which means 4 bytes. We know an unsigned short integer (`uint16`) can represent values ranging from `0~65535` using only two bytes. This reduces the transmission data. We also know visible characters can be converted to binary via ASCII tables, such as `$1000` being `0x24 0x31 0x30 0x30 0x30` in binary, requiring 5 bytes. If we directly encode `1000` in binary as `0x03E8`, we can send `0x24 0x03 0xE8` in just 3 bytes, reducing communication overhead. Additionally, `0x03E8` is a 2 byte representation with `0xE8` as the low byte, transmitted first in little endian encoding. The opposite is big endian encoding. Both are fine as long as both parties agree on one. In MaixPy, converting a number to bytes is simple with `struct.pack`. For example, `0x03E8` (decimal `1000`): ```python from struct import pack b pack(\"Communication Protocol`. The system settings may have other communication methods, such as `tcp`, with `uart` as the default. You can also use `maix.app.get_sys_config_kv(\"comm\", \"method\")` to check if `uart` is currently set. ```python from maix import comm, protocol, app from maix.err import Err import struct def encode_objs(objs): ''' encode objs info to bytes body for protocol 2B x(LE) + 2B y(LE) + 2B w(LE) + 2B h(LE) + 2B idx ... ''' body b\"\" for obj in objs: body + struct.pack(\" 0: body encode_objs(objs) p.report(APP_CMD_DETECT_RES, body) # ... ``` Here, the `encode_objs` function packages all detected object information into `bytes` type data, and the `p.report` function sends the result. The content of `body` is simply defined as `2B x(LE) + 2B y(LE) + 2B w(LE) + 2B h(LE) + 2B idx ...`, meaning: * In this image, multiple objects are detected and arranged in order in `body`. Each target takes up `2+2+2+2+2 10` bytes, with `body_len / 10` objects in total. * The 1st and 2nd bytes represent the `x` coordinate of the top left corner of the detected object, in pixels. Since the yolov5 result can have negative values for this coordinate, we use a `short` type to represent it, with little endian encoding (LE). > Little endian here means the low byte is in front. For example, if the `x` coordinate is `100`, hexadecimal `0x64`, we use a two byte `short` to represent it as `0x0064`. Little endian encoding puts `0x64` first, resulting in `b'\\x64\\x00'`. * Similarly, encode the subsequent data in sequence, resulting in `10` bytes of `bytes` type data for each object. * Iterate through and encode all object information into a single `bytes` string. When calling the `report` function, the protocol header, checksum, etc., are automatically added according to the protocol, allowing the other end to receive a complete data frame. On the other end, data should be decoded according to the protocol. If the receiving end is also using MaixPy, you can directly do: ```python while not app.need_exit(): msg p.get_msg() if msg and msg.is_report and msg.cmd APP_CMD_DETECT_RES: print(\"receive objs:\", decode_objs(msg.get_body())) p.resp_ok(msg.cmd, b'1') ``` If the other device is something like `STM32` or `Arduino`, you can refer to the C language functions in the appendix of the [Maix Serial Communication Protocol Standard](https://github.com/sipeed/MaixCDK/blob/master/docs/doc/convention/protocol.md) for encoding and decoding. ## Other Tutorials * [【MaixPy/MaixCAM】Visual Tool MaixCAM Beginner Tutorial 2](https://www.bilibili.com/video/BV1vcvweCEEe/?spm_id_from 333.337.search card.all.click) Watch the serial port explanation section * [How to Communicate via Serial Port between Visual Module and STM32](https://www.bilibili.com/video/BV175vWe5EfV/?spm_id_from 333.337.search card.all.click&vd_source 6c974e13f53439d17d6a092a499df304) * [[MaixCam] Experience 2: UART Serial Communication](https://blog.csdn.net/ButterflyBoy0/article/details/140577441) * For more, search online for resources."},"/maixpy/doc/en/peripheral/adc.html":{"title":"Using ADC in MaixCAM MaixPy","content":" title: Using ADC in MaixCAM MaixPy update: date: 2024 06 11 author: iawak9lkm version: 1.0.0 content: Initial document ## ADC Introduction An ADC, which can also be called an analog to digital converter, converts an input voltage signal into an output digital signal. As the ADC converted digital signal itself does not have practical significance, only represents a relative size. Therefore, any ADC needs a reference analog as a conversion standard, the reference standard is generally the largest convertible signal size. The digital output of the ADC indicates the size of the input signal relative to the reference signal. ADC peripherals generally have two main parameters: resolution and reference voltage. * Resolution: The resolution of an ADC is expressed as the number of bits in a binary (or decimal) number. It describes the ability of the A/D converter to discriminate the input signal. Generally speaking, an A/D converter with n bit output can distinguish 2^n different levels of input analog voltage, and the minimum value of input voltage that can be distinguished is 1/(2^n) of the full scale input. For a given maximum input voltage, the more output bits, the higher the resolution. * Reference Voltage: The ADC peripheral reference voltage is the voltage that is compared to a known voltage during AD conversion to find the value of the unknown voltage. The reference voltage can be thought of as the highest upper limit voltage and can be reduced to improve resolution when the signal voltage is low. With the board's ADC, it is possible to capture external voltages and have the board verify that the voltages are up to snuff or perform specific tasks when specific voltages are detected (e.g., the ADC detects multiple buttons). ## Using ADC in MaixPy Using ADC with MaixPy is easy: ```python from maix.peripheral import adc from maix import time a adc.ADC(0, adc.RES_BIT_12) raw_data a.read() print(f\"ADC raw data:{raw_data}\") time.sleep_ms(50) vol a.read_vol() print(f\"ADC vol:{vol}\") ``` Use ADC0 to read the raw conversion data from it, or read the voltage data directly from it. See the ADC [API documentation](../../../api/maix/peripheral/adc.html) for a detailed description of the ADC API. ## Some notes on MaixCAM's ADC MaixCAM elicits an IO that connects to the ADC, this IO is GPIO B3(For MaixCAM Pro, B3 connected light LED, so ADC can't directly use). ![](https://wiki.sipeed.com/hardware/zh/lichee/assets/RV_Nano/intro/RV_Nano_3.jpg) This IO is ADC by default and does not require additional configuration. MaixCAM's ADC peripheral has a sampling accuracy of 12 bits, which means that the sampling output range is from 0 to 4095. The sampling accuracy is 1/4096 of the reference voltage. The MaixCAM's ADC peripheral cannot scan at a frequency higher than 320K/s, which is the reason for the additional wait time between ADC samples in the previous example. The MaixCAM's ADC peripheral has an internal reference voltage of 1.5V, which may vary slightly in actual use.Since the typical internal reference voltage is 1.5 V, the ADC range of Soc is 0 to 1.5 V. Since the ADC range of this range is small, MaixCAM has designed a voltage divider circuit for the ADC peripheral to increase the ADC range. The reference voltage Vin_max of this voltage divider circuit is about 4.6~5.0V, due to the error of resistor resistance in the circuit, the impedance of ADC external device, and the deviation of internal reference voltage. A higher precision default value has been chosen in the API, and there is generally no need to pass this parameter. ![](https://wiki.sipeed.com/hardware/zh/lichee/assets/RV_Nano/peripheral/adc.png) If you need high ADC accuracy, you can calculate the reference voltage for this voltage divider circuit by following the steps below: * You need to first measure to get the actual input voltage of ADC_PIN, which we call Vin. * Then you need to measure to get the actual input voltage at ADC1, which we call Vadc. The location of resistor R10 can be found in this BOM file. * You need to keep the same voltage input to ADC_PIN as in step 1 and then execute these commands in the shell: ```shell echo 1 > /sys/class/cvi saradc/cvi saradc0/device/cv_saradc cat /sys/class/cvi saradc/cvi saradc0/device/cv_saradc ``` This gives you the raw measured value of the ADC, which we call adc_data. * You need to know the resistance values of the resistors R6 and R10 in the picture, record them.Typically, the MaixCAM has a resistance value of 10KΩ (10 000Ω) for R6 and 5.1KΩ (5 100Ω) for R10. * Finally, you need to pass the results from the above steps to these python codes to get the range [0,Vin_max] of the ADC_PIN port. ```python def maixcam_get_vin_max(Vin:float, Vadc:float, adc_data:int, r6:int, r10:int, adc_max:int 4095): Vref (Vadc/adc_data)*(adc_max+1) r3 Vadc*r6/(Vin Vadc) Vin_max (Vref/r3)*(r6+r3) return Vin_max Vin 3.3\t\t# step 1 Vadc 1.06\t\t# step 2 adc_data 2700\t# step 3 r6 10000\t\t# step 4 r10 5100\t\t# step 4 if __name__ '__main__': print(maixcam_get_vin_max(Vin, Vadc, adc_data, r6, r10)) ``` Now pass the result to the third parameter of `adc.ADC()` and you will get a highly accurate ADC."},"/maixpy/doc/en/peripheral/hid.html":{"title":"Introduction to Using MaixCAM MaixPy HID Device","content":" title: Introduction to Using MaixCAM MaixPy HID Device ## 简介 MaixPy currently supports the use of keyboards, mice, and touchscreens, and the following is a guide on how to use maixPy to control your PC via HID. ## Preparation > MaixPy firmware version should be > 4.5.1. You must enable the HID device before operating HID, there are two ways: 1. Open the `Settings` application that comes with MaixCAM, click `USB Settings` in turn > tick the required HID devices, such as `Keyboard`, `Mouse`, `Touchscreen`, and then click `Confirm` , then restart MaixCAM. 2. Through the `Examples/tools/maixcam_switch_usb_mode.py` in MaixVision, modify the HID devices that need to be switched on in the `device_list`, run it and restart MaixCAM. Note: Since only 4 USB devices are supported, only 4 devices can be started at the same time among `ncm`, `rndis`, `keyboard`, `mouse`, `touchpad`, choose according to the actual demand, among them, `ncm` and `rndis` are the USB network protocol devices, you can turn them off if you don't need them, by default, they are turned on. ## Write a keyboard in MaixPy. You need to enable `HID Keyboard` to run it. The following example sends `rstuv` four characters through the keyboard and then releases the key. ```python from maix import hid, time keyboard hid.Hid(hid.DeviceType.DEVICE_KEYBOARD) # Refer to the `Universal Serial Bus HID Usage Tables` section of the [USB HID Documentation](https://www.usb.org) for key numbers. keys [21, 22, 23, 24, 25, 0] # means [r, s, t, u, v, 0], 0 means release key. for key in keys: keyboard.write([0, 0, key, 0, 0, 0, 0, 0]) ``` ## Write a mouse in MaixPy. You need to enable `HID Mouse` to run it. The following example moves the mouse 5 pixels every 100ms. ```python from maix import hid, time mouse hid.Hid(hid.DeviceType.DEVICE_MOUSE) button 0 # button state, 0 means release, 1 means left button pressed, 2 means right button pressed, 4 means wheel button pressed x_oft 0 # offset relative to current position, value range is 127~127 y_oft 0 # offset relative to current position, value range is 127~127 wheel_move 0 # The distance the wheel has moved, the range of values is 127~127 count 0 while True: x_oft + 5 y_oft + 5 mouse.write([button, x_oft, y_oft, wheel_move]) time.sleep_ms(100) count + 1 if count > 50: break ``` ## Write a touchpad in MaixPy. The `HID Touchpad` needs to be enabled to run. In the following example, move the touchscreen 150 units every 100ms. Note that the coordinate system of the touchscreen is absolute, not relative, and that you need to map the actual size of the screen to the interval [1, 0x7FFF], the coordinates (1,1) means the upper left corner, the coordinates (0x7FFF,0x7FFF) means the lower right corner. ```python from maix import hid, time touchpad hid.Hid(hid.DeviceType.DEVICE_TOUCHPAD) def touchpad_set(button, x_oft, y_oft, wheel_move): touchpad.write([button, # button state, 0 means release, 1 means left button pressed, 2 means right button pressed, 4 means wheel button pressed x_oft & 0xff, (x_oft >> 8) & 0xff, # Absolute position, the leftmost is 1, the rightmost is 0x7fff, 0 means no operation, the value range is 0 to 0x7fff. y_oft & 0xff, (y_oft >> 8) & 0xff, # Absolute position, the topmost is 1, the bottom is 0x7fff, 0 means no operation, the value range is 0 to 0x7fff wheel_move]) # wheel move distance, value range is 127~127 button 0 x_oft 0 y_oft 0 wheel_move 0 count 0 while True: x_oft + 150 y_oft + 150 touchpad_set(button, x_oft, y_oft, wheel_move) time.sleep_ms(100) count + 1 if count > 50: break ```"},"/maixpy/doc/en/peripheral/gpio.html":{"title":"MaixCAM MaixPy Using GPIO","content":"# MaixCAM MaixPy Using GPIO ## Introduction Using GPIO allows you to control pins for input or output high and low levels, which is commonly used to read signals or output control signals. **Note:** The pins on the `MaixCAM` are tolerant to `3.3V`. Do not input `5V` voltage. ## Using GPIO in MaixPy > MaixPy Firmware should > 4.1.2(not include) First, we need to know which pins and GPIOs the device has. For MaixCAM, each pin corresponds to a GPIO controller, as shown in the figure: ![](https://wiki.sipeed.com/hardware/zh/lichee/assets/RV_Nano/intro/RV_Nano_3.jpg) ![maixcam_pro_io](/static/image/maixcam_pro_io.png) It is important to note that pins can be used not only as GPIOs but also for other functions like PWM. Before using them, we need to set the pin function to GPIO. For example, on MaixCAM, **some pins are already occupied by other functions by default, such as UART0 and WiFi (SDIO1 + A26), so it is not recommended to use them.** Other pins can be used, and the A14 pin is connected to the onboard LED, which is used as a system load indicator by default. If initialized, it will automatically disable the system indicator function and can be used as a regular GPIO (note that `A14` can only be used as an output). This way, you can control the LED's on and off state. Here is the English translation of your text: The circuit diagram of the LED is shown in the figure. Therefore, we only need to provide a high signal to pin A14, and the LED will conduct and light up: ![](../../assets/gpio_led.png) ```python from maix import gpio, pinmap, time pinmap.set_pin_function(\"A14\", \"GPIOA14\") led gpio.GPIO(\"GPIOA14\", gpio.Mode.OUT) led.value(0) while 1: led.toggle() time.sleep_ms(500) ``` Here, we first use `pinmap` to set the function of the `A14` pin to `GPIO`. Of course, for `A14`, since it only has the `GPIO` function, it can be omitted. For the sake of generality, other pins may need to be set, so it is set in this example. For more APIs, please refer to the [GPIO API Documentation](https://wiki.sipeed.com/maixpy/api/maix/peripheral/gpio.html) ## GPIO in Input Mode ```python from maix import gpio, pinmap, time pinmap.set_pin_function(\"A19\", \"GPIOA19\") led gpio.GPIO(\"GPIOA19\", gpio.Mode.IN) while 1: print(led.value()) time.sleep_ms(1) # sleep to make cpu free ``` Here is the English translation of the text: ## MaixCAM Pro Uses Illumination LED Both MaixCAM and MaixCAM Pro have a small LED light connected to pin `A14`. Additionally, the MaixCAM Pro has an onboard illumination LED connected to pin `B3`, which is turned on by a high signal and off by a low signal: ```python from maix import gpio, pinmap, time pinmap.set_pin_function(\"B3\", \"GPIOB3\") led gpio.GPIO(\"GPIOB3\", gpio.Mode.OUT) led.value(0) while 1: led.toggle() time.sleep_ms(500) ```"},"/maixpy/doc/en/peripheral/spi.html":{"title":"Using SPI in MaixCAM MaixPy","content":" title: Using SPI in MaixCAM MaixPy update: date: 2024 06 11 author: iawak9lkm version: 1.0.0 content: Initial document ## SPI Introduction SPI (Serial Peripheral Interface) is a synchronous peripheral interface that enables the SoC to communicate serially with various peripheral devices to exchange information. Common peripherals are Flash RAM, network controllers, LCD display drivers, and A/D converters. SPI uses Master Slave mode, which supports one or more Slave devices. On a hardware circuit, SPI usually consists of 4 wires which are: * `MISO`(Master Output Slave Input): This pin sends data in slave mode or receives data in master mode. * `MOSI`(Master Input Slave Output): This pin sends data in master mode or receives data in slave mode. * `SCK`: Serial bus clock, output by the master device and input by the slave device. * `NSS/CS`: Slave Device Selection. It acts as a chip select pin, allowing the master device to communicate with specific slave devices individually, avoiding conflicts on the bus. In terms of communication protocols, SPI behavior is generally like this: * SPI supports one master device and multiple slave devices. When the master device needs to communicate with a specific slave device, it selects the CS pin connected to that slave device to enable this transfer.This means that a slave device has only one CS pin for the master device to select itself, and the number of chip select pins for the master device depends on how many slave devices are connected to its SPI bus. * SPI has four modes, depending on the configuration of polarity (CPOL) and phase (CPHA). Polarity affects the level of the clock signal when the SPI bus is idle. 1. CPOL 1, it indicates a high level at idle. 2. CPOL 0, it indicates a low level at idle. The phase determines the edge at which the SPI bus acquires data. There are two types of edges, rising edge and falling edge. 1. CPHA 0, it indicates that sampling starts from the first edge. 2. CPHA 1, it indicates that sampling starts from the second edge. Polarity and phase are combined to form the four modes of SPI: Mode CPOL CPHA 0 0 0 1 0 1 2 1 0 3 1 1 * SPI typically supports both full duplex transmission and half duplex transmission. * SPI does not specify a maximum transmission rate, it does not have an address scheme; SPI does not specify a communication response mechanism, it does not specify flow control rules. ## Using SPI in MaixPy This is the pinout of MaixCAM. ![](https://wiki.sipeed.com/hardware/zh/lichee/assets/RV_Nano/intro/RV_Nano_3.jpg) ![maixcam_pro_io](/static/image/maixcam_pro_io.png) You need to use `maix.peripheral.pinmap` to complete the pin mapping for SPI before use. **Note: The MaixCAM's SPI can only be used as an SPI master device. MaixCAM's SPI does not support modifying the valid level of the hardware CS pins at this time. The active level of all SPI hardware CS is low. If you need to use other CS active levels, configure the software CS pins and their active levels in the SPI API. SPI4 is the software simulated SPI, the measured maximum rate is 1.25MHz, and the usage is the same as hardware SPI.** Using SPI with MaixPy is easy: ```python from maix import spi, pinmap pin_function { \"A24\": \"SPI4_CS\", \"A23\": \"SPI4_MISO\", \"A25\": \"SPI4_MOSI\", \"A22\": \"SPI4_SCK\" } for pin, func in pin_function.items(): if 0 ! pinmap.set_pin_function(pin, func): print(f\"Failed: pin{pin}, func{func}\") exit( 1) spidev spi.SPI(4, spi.Mode.MASTER, 1250000) ### Example of full parameter passing. # spidev spi.SPI(id 4, # SPI ID # mode spi.Mode.MASTER, # SPI mode # freq 1250000, # SPI speed # polarity 0, # CPOL 0/1, default is 0 # phase 0, # CPHA 0/1, default is 0 # bits 8, # Bits of SPI, default is 8 # cs_enable True, # Use soft CS pin? True/False, default is False # cs 'GPIOA19') # Soft cs pin number, default is 'GPIOA19' b bytes(range(0, 8)) res spidev.write_read(b, len(b)) if res b: print(\"loopback test succeed\") else: print(\"loopback test failed\") print(f\"send:{b}\\nread:{res}\") ``` You need to connect the `MOSI` and `MISO` of this SPI first. Configure the required pins with `pinmap` and then enable full duplex communication, the return value will be equal to the sent value. See the [SPI API documentation]((../../../api/maix/peripheral/spi.md)) for a more detailed description of the SPI API."},"/maixpy/doc/en/peripheral/i2c.html":{"title":"Using I2C with MaixCAM MaixPy","content":" title: Using I2C with MaixCAM MaixPy > Note: Requires MaixPy image and firmware > 4.2.1 The `I2C` and corresponding pins of `MaixCAM` can be seen in the diagram: ![](https://wiki.sipeed.com/hardware/zh/lichee/assets/RV_Nano/intro/RV_Nano_3.jpg) ![maixcam_pro_io](/static/image/maixcam_pro_io.png) For MaixCAM, due to limited pin resources, the pins for `I2C1` and `I2C3` overlap with those of the WiFi module (SDIO1). Therefore, you can only use either WiFi or hardware I2C, but not both. Additionally, there is an `I2C5`, which is simulated by software at the lower driver level. It is recommended to use this one, as the drivers are already set up, and its use is the same as using hardware `I2C`. By default, the pins for `I2C5` are configured as `GPIO`. Therefore, before using the `i2c` module, you should first use the `pinmap` module to set the pin functions to `I2C5` as follows: ```python from maix import i2c, pinmap pinmap.set_pin_function(\"A15\", \"I2C5_SCL\") pinmap.set_pin_function(\"A27\", \"I2C5_SDA\") bus1 i2c.I2C(5, i2c.Mode.MASTER) slaves bus1.scan() print(\"find slaves:\", slaves) ``` For more APIs, see [i2c API documentation](https://wiki.sipeed.com/maixpy/api/maix/peripheral/i2c.html). As mentioned above, for the `MaixCAM`, you must choose between using hardware `I2C` and `WiFi`. If you need to use `I2C`, you must disable `WiFi` and use the `pinmap` module to set the pin functions for `I2C`, then operate using the `maix.i2c` module. > TODO: Provide a method to disable WiFi (requires disabling the WiFi driver in the system, which is more complex). ```python from maix import i2c, pinmap pinmap.set_pin_function(\"P18\", \"I2C1_SCL\") pinmap.set_pin_function(\"P21\", \"I2C1_SDA\") bus1 i2c.I2C(1, i2c.Mode.MASTER) slaves bus1.scan() print(\"find slaves:\", slaves) ```"},"/maixpy/doc/en/peripheral/pinmap.html":{"title":"Using PINMAP in MaixCAM MaixPy","content":" title: Using PINMAP in MaixCAM MaixPy update: date: 2024 06 11 author: iawak9lkm version: 1.0.0 content: Initial document ## Pinmap Introduction In System on Chip (SoC) design, a pin usually has more than one function, and this design method is called pin multiplexing. There are several main reasons for this: * It saves the number of SoC pins. SoCs integrate a large number of functional modules, such as CPUs, GPUs, memory controllers, I/O interfaces, communication modules, and so on. Assigning separate pins for each function would result in a very large number of pins being required, increasing the complexity and cost of the package. Through pin multiplexing, one pin can support different functions in different modes, thus significantly reducing the total number of pins. * It reduces the cost of chip packaging and manufacturing. Designers can choose smaller package sizes by reducing the number of pins, thus reducing packaging and manufacturing costs. Smaller packages not only reduce material costs, but also reduce the amount of space the chip takes up on the board, facilitating the design of more compact electronic products. * It improves design flexibility. Pin multiplexing provides greater design flexibility. Different combinations of pin functions may be required in different application scenarios, and different pin functions can be enabled according to specific needs through software configuration. For example, the same pin can be used as a UART interface in one practical application and an SPI bus interface in another. * It simplifies the PCB layout. Reducing the number of pins simplifies the layout design of a printed circuit board (PCB). Fewer pins mean fewer wiring layers and vias, which simplifies PCB design and reduces manufacturing challenges and costs. * Optimize performance. In some cases, signal paths and performance can be optimized by multiplexing pins. For example, by selecting the proper combination of pin functions, interference and noise in the signal transmission path can be reduced, improving the overall performance and reliability of the system. Pinmap displays and manages the individual pin configurations of the chip, which typically include the name of each pin and its function (usually multiple functions). We use the MaixCAM GPIO A28 as an example. * `A28` is the pin name. * `GPIOA28`/`UART2_TX`/`JTAG_TDI` are the functions supported by this pin as listed in the Soc manual, and the function of this pin at the same time can only be one of these three functions. With Pinmap, we can set the specified chip pin for the specified function. ## Using Pinmap in MaixPy The following diagram lists the pin numbers and their functions on the MaixCAM board. ![](https://wiki.sipeed.com/hardware/zh/lichee/assets/RV_Nano/intro/RV_Nano_3.jpg) ![maixcam_pro_io](/static/image/maixcam_pro_io.png) Or read the [SG2002 Chip Manual](https://cn.dl.sipeed.com/fileList/LICHEE/LicheeRV_Nano/07_Datasheet/SG2002_Preliminary_Datasheet_V1.0 alpha_CN.pdf) Pinmux section for the remaining pin numbers and functions. It's actually quite easy to use Pinmap to manage pin functions through MaixPy. ```python from maix.peripheral import pinmap print(pinmap.get_pins()) f pinmap.get_pin_functions(\"A28\") print(f\"GPIO A28 pin functions:{f}\") print(f\"Set GPIO A28 to {f[0]} function\") pinmap.set_pin_function(\"A28\", f[0]) ``` In the example, we start by listing all the pins available for management. Then we query `GPIO A28` for all the functions available. Finally the function of the pin is set to the first function listed (GPIO). For a more detailed description of the Pinmap API, see the [Pinmap API documentation](../../../api/maix/peripheral/pinmap.html)."},"/maixpy/doc/en/pro/compile_os.html":{"title":"Compiling a System for MaixCAM MaixPy","content":" title: Compiling a System for MaixCAM MaixPy ## Why Customize the System? Typically, you can download the latest system for MaixCAM directly from [this link](https://github.com/sipeed/MaixPy/releases). However, there are some scenarios where you might need to customize the system: * For example, if you are mass producing 1,000 products and want each to have your own application that automatically starts on boot, without configuring each one individually, you can modify the `builtin_files` and package a system. Once this system is flashed onto the boards, they will all include your custom files, eliminating the need to copy them again after booting. * If the official system does not include the software packages or drivers you need, you can compile your own system and select the packages you want to include. ## Obtaining the Base System The principle is to use a system from [this link](https://github.com/sipeed/LicheeRV Nano Build/releases) as the base (note that this system cannot be directly flashed onto MaixCAM as it may damage the screen), then copy the MaixCAM specific files into the base system and repackage it into a system usable by MaixCAM. If you don't need to customize the base system, you can directly download the latest system image from [here](https://github.com/sipeed/LicheeRV Nano Build/releases). If the base system doesn't meet your requirements, such as needing to add or remove some software packages and drivers, follow the instructions in the [LicheeRV Nano Build repository](https://github.com/sipeed/LicheeRV Nano Build) README to compile the system. It's recommended to use Docker for compilation to avoid environment issues and to use `bash` instead of `zsh`. Remember, the compiled system should not be flashed directly onto MaixCAM, as it might damage the screen. ## Copying Files for MaixCAM Prepare the following: * The base system, which is a `.img` or `.img.xz` file. * Additional files for MaixCAM can be downloaded from the [MaixPy release page](https://github.com/sipeed/MaixPy/releases). Download the latest `builtin_files.tar.xz`. > If you need to add custom files to the system, you can extract the files and add them to the appropriate directory. For example, if you want a `cat.jpg` file to be in the `/root` directory after flashing, simply place `cat.jpg` in the `root` directory. * Download or clone the MaixPy source code locally. * Compile MaixPy to obtain the `.whl` installation package, or you can download the latest installation package from the [MaixPy release page](https://github.com/sipeed/MaixPy/releases). In the `MaixPy/tools/os` directory, run the following command: ```shell ./gen_os.sh [skip_build_apps] ``` Here’s what each parameter means: * **base_os_filepath**: The path to the base system, in `.img` or `.img.xz` format. * **maixpy_whl_filepath**: The MaixPy package, in `.whl` format. * **builtin_files_dir_path**: The custom files for MaixCAM, which can be downloaded from the MaixPy release page. * **os_version_str**: The system version, which should follow a format like `maixcam 2024 08 16 maixpy v4.4.21`. * **skip_build_apps**: Skip compiling built in applications, optional arg. Set to 1 to skip, no this arg it will compile and copy apps from MaixCDK and MaixPy into the system. Example command: ```shell ./gen_os.sh '/home/xxx/.../LicheeRV Nano Build/install/soc_sg2002_licheervnano_sd/images/2024 08 13 14 43 0de38f.img' ../../dist/MaixPy 4.4.21 py3 none any.whl '/home/xxx/.../sys_builtin_files' maixcam 2024 08 15 maixpy v4.4.21 ``` After waiting for the built in apps to compile and copy, you should find a `maixcam 2024 08 15 maixpy v4.4.21.img.xz` system image in the `MaixPy/tools/os/tmp` directory."},"/maixpy/doc/en/gui/i18n.html":{"title":"MaixPy MaixCAM i18n (Internationalization) Multi-Language Implementation","content":" title: MaixPy MaixCAM i18n (Internationalization) Multi Language Implementation ## Introduction to i18n (Internationalization) i18n is an abbreviation for internationalization, which aims to switch languages according to the user's region or preference. Commonly used languages, such as Chinese and English, have corresponding region codes (LCID). For example, the region code for Chinese is `zh`, English is `en`, and Japanese is `ja`. There are also secondary region codes, like Simplified Chinese corresponding to `zh cn`. Generally, implementing `zh` is sufficient. For region codes, you can refer to [Windows Locale Codes](https://www.science.co.il/language/Locale codes.php) or check [Wikipedia](https://en.wikipedia.org/wiki/Language_localisation). ## Using i18n in MaixPy MaixCAM The general user process is as follows: * Initially, users can select the system language in the system settings, with the factory default being `en` (English). * Then, the program can get the current system locale using `maix.i18n.get_locale()`. * The program displays the corresponding language strings based on the system locale. For applications, the tricky part is the third step, which involves looking up the corresponding strings based on the locale settings. Here are two methods to achieve this, depending on your needs: ### Using a Dictionary Directly Without Translation Files If your program only has a few strings, you can manually specify the translation dictionary: ```python from maix import i18n trans_dict { \"zh\": { \"hello\": \"你好\" }, \"en\": { } } trans i18n.Trans(trans_dict) tr trans.tr trans.set_locale(\"zh\") print(tr(\"hello\")) print(tr(\"my friend\")) ``` Here, `trans.set_locale(\"zh\")` temporarily sets the language to Chinese. Running this will print `你好` and `my friend`, since there is no translation for `my friend`, it returns as is. ### Automatically Scanning and Generating a Dictionary, and Loading from Translation Files This method is more suitable for scenarios with many strings to translate. In the previous method, we manually specified string translations, which is convenient for simple scenarios. However, if there are too many strings, manually editing the dictionary can easily result in omissions. Therefore, we need the program to automatically find the strings that need translation and generate translation files, which we only need to translate. In MaixPy, the `maix.i18n.Trans` class is provided to load translation files in multiple languages. By calling its `tr()` function and passing in the text to be translated, you can get the translation. For example: ```python from maix import i18n, err trans i18n.Trans() tr trans.tr e trans.load(\"locales\") err.check_raise(e, \"load translation yamls failed\") print(tr(\"hello\")) ``` Here, the translation files are loaded from the `locales` folder in the current directory, and the system prints `hello` according to the language settings, such as `你好` for Chinese. **Translation Files**: Since translation files are used here, how are these files created? First, we need to know which text needs translation, which are the strings called by the `tr` function. So we just need to search for all strings that use the `tr` function in the source code to find all the strings that need translation. The usage process is as follows: * Create a project folder to store the code entry `main.py`, and open this project folder with `MaixVision` for easy operation. * Write `main.py`, using the `tr` function to call the strings that need translation. * MaixPy provides a scanning tool. First, make sure `maixtool` is installed (`pip install maixtool U` on the computer terminal to install or upgrade). * Then, in the directory, use the computer terminal to execute `maixtool i18n d . r` to scan for strings that need translation and generate a `locales` directory containing translation files for Chinese and English. For more languages, execute `maixtool i18n h` for help. * The generated files are key value pairs, for example, in `zh.yaml`, `hello: hello` means the Chinese translation of `hello` is `hello`. This is incorrect and needs manual translation, changing `hello: hello` to `hello: 你好`. Make sure to use a text editor that supports `UTF 8` encoding, especially on Windows, avoid changing the file to `GBK` encoding to prevent errors. You can use MaixVision or VsCode for editing. * Then run the project, or package the project into an installation package, remember to include the `locales` directory. * If the source code is updated later, execute the `maixtool` command again to update the files. It will update the previously translated files. If you are worried about accidental overwriting, you can back up the files first and then delete the backup after confirming everything is correct. This way, your program will change the language according to the system settings. You can also manually call `trans.set_locale(\"zh\")` to temporarily switch the language for debugging. ## Displaying Translations on the Interface The previous examples used the `print` function to display translations. If you want to display them on the interface, you need font support. For English, it is supported by default, but for languages with large font libraries like Chinese, it is not supported by default. For example: ```python from maix import i18n, image, display, app, time trans_dict { \"zh\": { \"hello\": \"你好\" }, \"en\": { } } trans i18n.Trans(trans_dict) tr trans.tr trans.set_locale(\"zh\") disp display.Display() img image.Image(disp.width(), disp.height()) img.draw_string(10, 10, tr(\"hello\"), image.COLOR_WHITE, scale 2) disp.show(img) while not app.need_exit(): time.sleep_ms(100) ``` Running this will show a bunch of `?` because there is no Chinese font library. For the `image` module, you can load a font library. The system has a built in Chinese font library, or you can use your own font library: ```python from maix import i18n, image, display, app, time trans_dict { \"zh\": { \"hello\": \"你好\" }, \"en\": { } } trans i18n.Trans(trans_dict) tr trans.tr trans.set_locale(\"zh\") disp display.Display() image.load_font(\"sourcehansans\", \"/maixapp/share/font/SourceHanSansCN Regular.otf\", size 24) image.set_default_font(\"sourcehansans\") img image.Image(disp.width(), disp.height()) img.draw_string(10, 10, tr(\"hello\"), image.COLOR_WHITE, scale 2) disp.show(img) while not app.need_exit(): time.sleep_ms(100) ```"},"/maixpy/doc/en/index.html":{"title":"MaixCAM MaixPy Quick Start","content":" title: MaixCAM MaixPy Quick Start
    Resource Summary Link : : : : Tutorial Documentation 📖 [wiki.sipeed.com/maixpy/en/](https://wiki.sipeed.com/maixpy/en/) Examples and Source Code [github.com/sipeed/MaixPy](https://github.com/sipeed/MaixPy) MaixCAM Hardware 📷 [wiki.sipeed.com/maixcam](https://wiki.sipeed.com/maixcam) / [wiki.sipeed.com/maixcam pro](https://wiki.sipeed.com/maixcam pro) API Documentation 📚 [wiki.sipeed.com/maixpy/api/](https://wiki.sipeed.com/maixpy/api/index.html) MaixHub App Store 📦 [maixhub.com/app](https://maixhub.com/app) MaixHub Sharing Square 🎲 [maixhub.com/share](https://maixhub.com/share)

    > For an introduction to MaixPy, please see the [MaixPy official website homepage](../../index.html) > Please give the [MaixPy project](https://github.com/sipeed/MaixPy) a Star ⭐️ to encourage us to develop more features if you like MaixPy. ## Before Start * Please **carefully** follow the steps outlined in this document. Do not skip any sections, and compare your actions accordingly. * **Pay close attention** to the table of contents on the left. Be sure to read through the basic sections thoroughly and patiently. * **Before asking questions**, first search the documentation in the left hand table of contents and review the [FAQ](./faq.html). * This document is the `MaixPy v4 Tutorial`. Be mindful not to confuse it with the [MaixPy v1](https://wiki.sipeed.com/soft/maixpy/zh/index.html) (K210 series), and ensure you are referring to the correct documentation. ## Get a MaixCAM Device ![maixcam_pro](../../static/image/maixcam_pro.png) * **MaixCAM**: Purchase the MaixCAM development board from the [Sipeed Taobao](https://item.taobao.com/item.htm?id 784724795837) or [Sipeed AliExpress](https://www.aliexpress.com/store/911876460) store. * **MaixCAM Pro**: Purchase the MaixCAM development board from the [Sipeed Taobao](https://item.taobao.com/item.htm?id 846226367137) or [Sipeed AliExpress](https://www.aliexpress.com/store/911876460) store. **It is recommended to purchase the bundle with a `TF card`, `camera`, `2.3 inch touchscreen`, `case`, `Type C data cable`, `Type C one to two mini board`, and `4P serial port socket+cable`**, which will be convenient for later use and development. **The following tutorials assume that you already have these accessories** (including the screen). **It is highly recommended to purchase a package that includes a screen, as it greatly enhances the development experience.** If you do not need a screen for actual deployment in a production environment, you can start with a screen included kit for initial development, and then either remove the screen or purchase a screenless version for mass production later on. * **Power Supply**: A stable power supply is crucial. MaixCAM requires a steady `5V 500mA` power supply. Insufficient power can result in failure to boot or crashes during operation. This is especially true for some computer USB ports, which may provide unstable power. * **TF Card Reader**: Used for flashing the system, essential. * **USB to serial port module**: If you want to debug serial communication with PC, it is recommended to prepare one. You can buy any one from Taobao or buy them together at Sipeed store, such as this [dual serial port to USB module](https://item.taobao.com/item.htm?spm a1z10.5 c s.w4002 24984936573.13.73cc59d6AkB9bS&id 610365562537). >! Note that currently only the MaixCAM development board is supported. Other development boards with the same chip are not supported, including Sipeed's development boards with the same chip. Please be careful not to purchase the wrong board, which could result in unnecessary waste of time and money. ## For no screen devies If you use screenless version, please refer to the [Quick Start (Screenless Version)](./README_no_screen.html) document. ## Getting Started ### Prepare the TF Image Card and Insert it into the Device If the package you purchased includes a TF card, it already contains the factory image. If the TF card was not installed in the device at the factory, you will first need to carefully open the case (be careful not to tear the ribbon cables inside) and then insert the TF card. Additionally, since the firmware from the factory may be outdated, it is highly recommended to follow the instructions on [Upgrading and Flashing the System](./basic/os.html) to upgrade the system to the latest version. If you did not purchase a TF card, you need to flash the system onto a self provided TF card. Please refer to [Upgrading and Flashing the System](./basic/os.html) for the flashing method, and then install it on the board. ### Power On Use a `Type C` data cable to connect the `MaixCAM` device and power it on. Wait for the device to boot up and enter the function selection interface. ![maixcam_font](../../static/image/maixcam_font.png) If the screen does not display: * Please confirm that you purchased the bundled TF card. If you confirm that you have a TF card and it is inserted into the device, you can try [updating to the latest system](./basic/os.html). * If you did not purchase the TF card bundle, you need to follow the instructions in [Upgrading and Flashing the System](./basic/os.html) to flash the latest system onto the TF card. * Also, ensure that the screen and camera cables are not loose. The screen cable can easily come off when opening the case, so be careful. ### Connect to the Network For the first run, you need to connect to the network, as you will need it later to activate the device and use the IDE. If you don't have a router, you can use your phone to open a hotspot. Click `Settings` on the device and select `WiFi`. There are two ways to connect to the `WiFi` hotspot: * Scan the WiFi sharing code: * Use your phone to share the `WiFi` hotspot QR code, or go to [maixhub.com/wifi](https://maixhub.com/wifi) to generate a QR code. * Click the `Scan QR code` button, the camera screen will appear, scan the QR code generated previously to connect. * Search for hotspots: * Click the `Scan` button to start scanning the surrounding `WiFi`, you can click multiple times to refresh the list. * Find your WiFi hotspot. * Enter the password and click the `Connect` button to connect. Then wait for the `IP` address to be obtained, which may take `10` to `30` seconds. If the interface does not refresh, you can exit the `WiFi` function and re enter to view it, or you can also see the `IP` information in `Settings` > `Device Information`. ### Update the Runtime Libraries **This step is very important!!!** If this step is not done properly, other applications and functions may not work (e.g., they may crash). * First, ensure that you have completed the previous step of connecting to WiFi and have obtained an IP address to access the internet. * On the device, click `Settings`, and select `Install Runtime Libraries`. * After the installation is complete, you will see that it has been updated to the latest version. Then exit. If it shows `Request failed` or `请求失败` (Request failed), please first check if the network is connected. You need to be able to connect to the internet. If it still doesn't work, please take a photo and contact customer service for assistance. ### Use Built in Applications Many applications are built in, such as Find Blobs, AI Detector, Line Follower, etc. For example, Find Blobs: Please explore other applications on your own. More applications will be updated in the future. For usage documentation and application updates, please see the [MaixHub App Store](https://maixhub.com/app). **Note: The applications only include a part of the functionality that MaixPy can achieve. Using MaixPy, you can create even more features.** ## Use as a Serial Module > If you want to use the device as the main controller (or if you don't understand what a serial module is), you can skip this step. The built in applications can be used directly as serial modules, such as `Find Blobs`, `Find Faces`, `Find QR Codes`, etc. Note that the serial port can only directly connect to other microcontrollers. **If you want to communicate with a computer via a serial port, you must provide a USB to serial module yourself.** Usage: * Hardware connection: You can connect the device to the `Type C one to two mini board`(For MaixCAM Pro is 6Pin interface), which allows you to connect the device via serial to your main controller, such as `Arduino`, `Raspberry Pi`, `STM32`, etc. * Open the application you want to use, such as QR code recognition. When the device scans a QR code, it will send the result to your main controller via serial. > The serial baud rate is `115200`, the data format is `8N1`, and the protocol follows the [Maix Serial Communication Protocol Standard](https://github.com/sipeed/MaixCDK/blob/master/docs/doc/convention/protocol.md). You can find the corresponding application introduction on the [MaixHub APP](https://maixhub.com/app) to view the protocol. > If APP no serial output, you can also do it by yourself, follow function examples and [UART usage doc](./peripheral/uart.html) to add function and serial output. ## Preparing to Connect Computer and Device To enable communication between the computer (PC) and the device (MaixCAM), we need to ensure they are on the same local area network. There are two methods to achieve this: * **Method 1 (Highly Recommended)**: Wireless Connection. Connect the device to the same router or Wi Fi hotspot that the computer is connected to via Wi Fi. Go to the device's `Settings > WiFi Settings` and connect to your Wi Fi. (If you experience **screen lag or high latency** with Wi Fi, you can try Method 2 for a wired connection.) Here is the translation: * **Method Two**: Wired Connection. The device connects to the computer via a USB cable, and the device will emulate as a USB network adapter. This way, the device and the computer will be on the same local network through the USB connection. It is recommended to start with WiFi because although a wired connection offers stable transmission, it may encounter issues such as faulty cables, poor connection, or driver problems. If you encounter any issues, you can refer to the common problems in the [FAQ](./faq.html). .. details::Method Two: Driver Installation on Different Computer Systems: :open: true By default, there are two types of USB virtual network adapter drivers (NCM and RNDIS drivers) to meet the needs of different systems. You can also disable the unused virtual network adapter on the device under `Settings` > `USB Settings`: * **Windows**: All Windows systems will automatically install the RNDIS driver, while only Windows 11 will automatically install the NCM driver. As long as **one of the drivers works**, it is sufficient. * Open Task Manager > Performance, and you should see a virtual Ethernet with an IP address such as `10.131.167.100`, which is the computer's IP address. The device's IP address is the same but with the last digit changed to `1`, i.e., `10.131.167.1`. If you are using Windows 11, you will see two virtual network adapters; you can use either IP address. * Additionally, you can open `Device Manager` (search for `Device Manager` in the search bar). The RNDIS and NCM drivers should be correctly installed, as shown below: ![RNDIS ok](../../static/image/windows_rndis_ok.png) ![NCM ok](../../static/image/windows_ncm_ok.png) * **Linux**: No additional setup is required. Simply plug in the USB cable. Use `ifconfig` or `ip addr` to see the `usb0` and `usb1` network interfaces, and either IP address can be used. **Note**: The IP address you see, such as `10.131.167.100`, is the computer's IP address, and the device's IP address is the same but with the last digit changed to `1`, i.e., `10.131.167.1`. * **MacOS**: Check for the `usb` network adapter under `System Settings` > `Network`. **Note**: The IP address you see, such as `10.131.167.100`, is the computer's IP address, and the device's IP address is the same but with the last digit changed to `1`, i.e., `10.131.167.1`. ## Preparing the Development Environment * First, ensure that the computer and the device are on the same local network as per the previous step. * Download and install [MaixVision](https://wiki.sipeed.com/maixvision). * Connect the device and the computer using a Type C cable. Open MaixVision, click the `“Connect”` button in the lower left corner, and it will automatically search for the device. Wait for a moment until the device appears, then click the connection button next to the device to connect. If **no device is detected**, you can also manually enter the device's IP address in the **device**'s `Settings > Device Info`. You can also find solutions in the [FAQ](./faq.html). **After a successful connection, the function selection interface on the device will disappear, and the screen will turn black, releasing all hardware resources. If there is still an image displayed, you can disconnect and reconnect.** Here is a video example of using MaixVision: ## Run Examples Click `Example Code` on the left side of MaixVision, select an example, and click the `Run` button in the bottom left to send the code to the device for execution. For example: * `hello_maix.py`: Click the `Run` button, and you will see messages printed from the device in the MaixVision terminal, as well as an image in the upper right corner. * `camera_display.py`: This example will open the camera and display the camera view on the screen. ```python from maix import camera, display, app disp display.Display() # Construct a display object and initialize the screen cam camera.Camera(640, 480) # Construct a camera object, manually set the resolution to 640x480, and initialize the camera while not app.need_exit(): # Keep looping until the program exits (you can exit by pressing the function key on the device or clicking the stop button in MaixVision) img cam.read() # Read the camera view and save it to the variable img, you can print(img) to print the details of img disp.show(img) # Display img on the screen ``` * `yolov5.py` will detect objects in the camera view, draw bounding boxes around them, and display them on the screen. It supports detection of 80 object types. For more details, please see [YOLOv5 Object Detection](./vision/yolov5.html). You can try other examples on your own. > If you encounter image display stuttering when using the camera examples, it may be due to poor network connectivity, or the quality of the USB cable or the host's USB being too poor. You can try changing the connection method or replacing the cable, host USB port, or computer. ## Install Applications on the Device The above examples run code on the device, but the code will stop running when `MaixVision` is disconnected. If you want the code to appear in the boot menu, you can package it as an application and install it on the device. Click the `Install App` button in the bottom left corner of `MaixVision`, fill in the application information, and the application will be installed on the device. Then you will be able to see the application on the device. You can also choose to package the application and share your application to the [MaixHub App Store](https://maixhub.com/app). > The default examples do not explicitly write an exit function, so you can exit the application by pressing the function key on the device. (For MaixCAM, it is the user key.) If you want the program to start automatically on boot, you can set it in `Settings > Boot Startup`. More MaixVision usage refer to [MaixVision documentation](./basic/maixvision.html)。 ## Next Steps If you like what you've seen so far, **please be sure to give the MaixPy open source project a star on [GitHub](https://github.com/sipeed/MaixPy) (you need to log in to GitHub first). Your star and recognition is the motivation for us to continue maintaining and adding new features!** Up to this point, you've experienced the usage and development workflow. Next, you can learn about `MaixPy` syntax and related features. Please follow the left sidebar to learn. If you have any questions about using the API, you can look it up in the [API documentation](/api/). It's best to learn with a specific purpose in mind, such as working on an interesting small project. This way, the learning effect will be better. You can share your projects and experiences on the [MaixHub Share Plaza](https://maixhub.com/share) and receive cash rewards! ## Frequently Asked Questions (FAQ) If you encounter any problems, please check the [FAQ](./faq.html) first. If you cannot find a solution there, you can ask in the forums or groups below, or submit a source code issue on [MaixPy issue](https://github.com/sipeed/MaixPy/issues). ## Share and Discuss * **[MaixHub Project and Experience Sharing](https://maixhub.com/share)**: Share your projects and experiences, and receive cash rewards. The basic requirements for receiving official rewards are: * **Reproducible**: A relatively complete process for reproducing the project. * **Showcase**: No detailed project reproduction process, but an attractive project demonstration. * **Bug solving experience**: Sharing the process and specific solution for resolving a particular issue. * [MaixPy Official Forum](https://maixhub.com/discussion/maixpy) (for asking questions and discussion) * Telegram: [MaixPy](https://t.me/maixpy) * MaixPy Source Code Issues: [MaixPy issue](https://github.com/sipeed/MaixPy/issues) * For business cooperation or bulk purchases, please contact support@sipeed.com."},"/maixpy/doc/en/network/socket.html":{"title":"Using Socket for TCP/UDP Communication with MaixPy MaixCAM","content":" title: Using Socket for TCP/UDP Communication with MaixPy MaixCAM ## Introduction to Sockets Sockets are software abstractions for TCP/UDP communication. Through socket interfaces, we can perform TCP/UDP communication. Since MaixPy is based on Python, we can directly use the built in `socket` library for communication. For more documentation and tutorials, please search online. Here, we introduce simple usage methods. With these example codes, you can perform basic TCP and UDP communication on MaixPy MaixCAM. Remember to modify the IP address and port number according to your actual situation. ## Socket TCP Client This example requests a TCP server, sends a message, waits for a response, and then closes the connection. ```python import socket def tcp_client(ip, port): client_socket socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_address (ip, port) client_socket.connect(server_address) try: # Send data to the server message 'Hello, Server!' print(\"Send:\", message) client_socket.sendall(message.encode('utf 8')) # Receive the server's response data client_socket.recv(1024) print('Received:', data.decode('utf 8')) finally: # Close the connection client_socket.close() if __name__ \"__main__\": tcp_client(\"10.228.104.1\", 8080) ``` ## Socket TCP Server This example creates a socket server that continuously waits for client connections. Once a client connects, a thread is created to communicate with the client, reading the client's message and echoing it back. ```python import socket import threading local_ip \"0.0.0.0\" local_port 8080 def receiveThread(conn, addr): while True: print('Reading...') client_data conn.recv(1024) if not client_data: break print(client_data) conn.sendall(client_data) print(f\"Client {addr} disconnected\") ip_port (local_ip, local_port) sk socket.socket(socket.AF_INET, socket.SOCK_STREAM) sk.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sk.bind(ip_port) sk.listen(50) print(\"Waiting for clients...\") while True: conn, addr sk.accept() print(f\"Client {addr} connected\") # Create a new thread to communicate with this client t threading.Thread(target receiveThread, args (conn, addr)) t.daemon True t.start() ``` ## Socket UDP Client ```python import socket def udp_send(ip, port): # Create a socket object udp_socket socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # Define the server's IP address and port number server_address (ip, port) try: # Send data to the server message 'Hello, Server!' udp_socket.sendto(message.encode('utf 8'), server_address) finally: # Close the connection udp_socket.close() # Call the function udp_send(\"10.228.104.1\", 8080) ``` ## Socket UDP Server ```python import socket def udp_receive(ip, port): # Create a socket object udp_socket socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # Define the server's IP address and port number server_address (ip, port) # Bind the port udp_socket.bind(server_address) print('Waiting for a message...') while True: data, address udp_socket.recvfrom(1024) print('Received:', data.decode('utf 8')) print('From:', address) # Close the connection udp_socket.close() # Call the function udp_receive('0.0.0.0', 8080) ```"},"/maixpy/doc/en/network/websocket.html":{"title":"Using WebSocket with MaixPy MaixCAM","content":" title: Using WebSocket with MaixPy MaixCAM ## Introduction Similar to sockets, WebSocket enables long lived communication connections and supports communication with web pages. Since MaixPy is based on Python, you can use the commonly available Python `websockets` and `asyncio` modules. For more detailed information, please refer to the documentation and tutorials available online. ## WebSocket Client The following example connects to a server, sends a message 10 times, and then ends the connection: ```python import asyncio import websockets import time async def send_msg(websocket): count 1 while count < 10: msg f\"hello {count}\" await websocket.send(msg) recv_text await websocket.recv() print(f\"Received: {recv_text}\", end \"\\n\") count + 1 time.sleep(1) await websocket.close(reason \"client exit\") async def main_logic(ip, port): async with websockets.connect(f'ws://{ip}:{port}') as websocket: await send_msg(websocket) ip \"10.228.104.100\" port 5678 asyncio.get_event_loop().run_until_complete(main_logic(ip, port)) ``` ## WebSocket Server The following example accepts client connections and responds with `ack for msg:` followed by the received message. ```python import asyncio import websockets import functools async def recv_msg(websocket): print(\"New client connected, recv_msg start\") while True: try: recv_text await websocket.recv() except Exception as e: print(\"Receive failed\") break print(\"Received:\", recv_text) response_text f\"ack for msg: {recv_text}\" await websocket.send(response_text) print(\"recv_msg end\") async def main_logic(websocket, path, other_param): await recv_msg(websocket) ip \"0.0.0.0\" port 5678 start_server websockets.serve(functools.partial(main_logic, other_param \"test_value\"), ip, port) print(\"Start server\") asyncio.get_event_loop().run_until_complete(start_server) print(\"Start server loop\") asyncio.get_event_loop().run_forever() ```"},"/maixpy/doc/en/network/flask.html":{"title":"Using Flask to Build an HTTP Web Server with MaixPy MaixCAM","content":" title: Using Flask to Build an HTTP Web Server with MaixPy MaixCAM ## Introduction MaixPy is based on Python, so you can use the Python library Flask to quickly set up a web server. As it is a common Python library, you can find specific uses and methods online, so they won't be elaborated on here. If you only want to create a page that displays camera images, you can also refer to the HTTP image server method in [JPEG Streaming](../video/jpeg_streaming.html). ## Simple HTTP Service Example After running the following program, accessing `http://device_ip:8000` in a computer browser will display the \"hello world\" text and an image. ```python from flask import Flask, request, send_file import maix # we not use it but we import it to listen for key events to exit this program app Flask(__name__) @app.route(\"/\", methods [\"GET\", \"POST\"]) def root(): print(\" \") print(request.remote_addr) print(f'headers:\\n{request.headers}') print(f'data: {request.data}') print(\" \") return 'hello world
    ' @app.route(\"/\") def hello(path): print(path) print(f'headers:\\n{request.headers}') print(f'data: {request.data}') print(\" \\n\\n\") return f\"hello from {path}\" @app.route(\"/img\") def img(): return send_file(\"/maixapp/share/icon/detector.png\") if __name__ \"__main__\": app.run(host \"0.0.0.0\", port 8000) ```"},"/maixpy/doc/en/network/network_settings.html":{"title":"Network Settings for MaixPy MaixCAM WiFi Configuration","content":" title: Network Settings for MaixPy MaixCAM WiFi Configuration ## Introduction To enable MaixCAM to use the network, it first needs to connect to the network via WiFi. MaixCAM provides several methods to connect to a WiFi hotspot. ## Using the Built in Settings Application After powering on, enter the `Settings` application and select the `WiFi` function. You can connect by sharing a `WiFi QR code` from your phone or by generating a QR code at [maixhub.com/wifi](https://maixhub.com/wifi) and scanning it. Alternatively, you can manually scan for `WiFi` hotspots and enter the password to connect. Once connected successfully and the DHCP assigns an IP address, the IP will be displayed on the screen. ## Connecting via MaixPy ```python from maix import network, err w network.wifi.Wifi() print(\"IP:\", w.get_ip()) SSID \"Sipeed_Guest\" PASSWORD \"qwert123\" print(\"Connecting to\", SSID) e w.connect(SSID, PASSWORD, wait True, timeout 60) err.check_raise(e, \"Failed to connect to WiFi\") print(\"IP:\", w.get_ip()) ``` ## DNS Server Configuration In practice, some users may find that their router's DNS resolution cannot resolve certain domain names. Therefore, the default system sets the DNS servers in the `/boot/resolv.conf` file: ```shell nameserver 114.114.114.114 # China nameserver 223.5.5.5 # Aliyun China nameserver 8.8.4.4 # Google nameserver 8.8.8.8 # Google nameserver 223.6.6.6 # Aliyun China ``` Generally, there is no need to modify this file. If you encounter DNS resolution issues, you can modify this file. The actual configuration file used by the system is located at `/etc/resolv.conf`. This file is automatically copied from `/boot/resolv.conf` at startup. Therefore, the simplest solution after modification is to reboot. If you prefer not to reboot, you need to modify both files simultaneously."},"/maixpy/doc/en/network/mqtt.html":{"title":"Using MQTT with MaixPy MaixCAM for Message Subscription and Publishing","content":" title: Using MQTT with MaixPy MaixCAM for Message Subscription and Publishing ## MQTT Introduction MQTT allows for quick and easy real time communication using a publish subscribe model. System components: * **MQTT Server (broker):** Responsible for forwarding messages. * **MQTT Clients:** Subscribe to topics from the server, receive messages, and publish messages to specific topics on the server. Communication process: * Clients connect to the MQTT server. * Clients subscribe to topics they are interested in, such as `topic1`. * When other clients or the server publish information on the `topic1` topic, it is pushed to the subscribing clients in real time. * Clients can also actively publish messages to specific topics. All clients subscribed to that topic will receive the messages. For example, if a client publishes a message to `topic1`, all clients subscribed to `topic1` will receive it, including the publishing client itself. ## Using MQTT in MaixPy MaixCAM The `paho mqtt` module can be used for this purpose. You can look up the usage of `paho mqtt` online or refer to the examples in the [MaixPy/examples](https://github.com/sipeed/MaixPy/tree/main/examples/network) repository. If you are using an older system, you might need to manually install the `paho mqtt` package. Installation instructions can be found in the [Adding Extra Python Packages](../basic/python_pkgs.html) guide."},"/maixpy/doc/en/network/http.html":{"title":"Using HTTP Network Communication with MaixPy MaixCAM","content":" title: Using HTTP Network Communication with MaixPy MaixCAM ## Introduction HTTP is an application layer network protocol based on TCP. Through it, we can send and receive information to and from network servers, such as retrieving webpage content from a web server. For more information, you can search for HTTP. ## Using HTTP Requests in MaixPy Since MaixPy is based on Python, you can directly use the built in `requests` library. The `requests` library is a very robust and user friendly library, so it won't be elaborated on here. Please search for related documentation and tutorials for more information. Here is an example of fetching the homepage content of `https://example.com`. ```python import requests url 'https://example.com' response requests.get(url) print(\"Response:\") print(\" status code:\", response.status_code) print(\"\") print(\" headers:\", response.headers) print(\"\") print(\" content:\", response.content) print(\"\") print(\" text:\", response.text) print(\"\") ```"},"/maixpy/doc/en/video/record.html":{"title":"MaixCAM MaixPy Video Record","content":" title: MaixCAM MaixPy Video Record update: date: 2024 05 20 author: lxowalle version: 1.0.0 content: Initial document ## Introduction This document provides instructions on how to use the video recording feature ## Example 1 An example of recording a video in `h265` format. ```python from maix import video, image, camera, app, time cam camera.Camera(640, 480, image.Format.FMT_YVU420SP) e video.Encoder() f open('/root/output.h265', 'wb') record_ms 2000 start_ms time.ticks_ms() while not app.need_exit(): img cam.read() frame e.encode(img) print(frame.size()) f.write(frame.to_bytes()) if time.ticks_ms() start_ms > record_ms: app.set_exit_flag(True) ``` 步骤: 1. import module and Initialize the camera ```python from maix import video, image, camera, app, time cam camera.Camera(640, 480, image.Format.FMT_YVU420SP) ``` `camera.Camera()` is used to initialise the camera, here the camera resolution is initialised to `640x480`, currently the `Encoder` only supports the `NV21` format, so set the image format to `image.Format.FMT_YVU420SP`. 2. Initialise the `Encoder` module ```python e video.Encoder() ``` The `video.Encoder()` module currently only supports processing `image.Format.FMT_YVU420SP` format images, which supports `h265` and `h264` encoding, and defaults to `h265` encoding. If you want to use `h264` encoding, then you can change the initialisation parameter to ` video.Encoder(type video.VideoType.VIDEO_H264_CBR)`. Note that only one encoder can exist at the same time 3. Encoding the camera image ```python img cam.read() frame e.encode(img) ``` `img cam.read()` read camera image and save to `img` `frame e.encode(img)` encode `img` and save result to `frame` 4. Save the encoded result to file ```python f open('/root/output.h265', 'wb') f.write(frame.to_bytes(False)) ``` `f open(xxx)` opens and creates a file `f.write(frame.to_bytes(False))` converts the encoding result `frame` to type `bytes` and then calls `f.write()` to write the data to the file 5. Timed 2s exit ```python record_ms 2000 start_ms time.ticks_ms() while not app.need_exit(): if time.ticks_ms() start_ms > record_ms: app.set_exit_flag(True) ``` Here is the application logic for the timed exit, see the code for yourself 6. Done ## Example 2 An example of recording a video in `h265` format. ```python from maix import video, time, image, camera, app cam camera.Camera(640, 480, image.Format.FMT_YVU420SP) e video.Encoder(capture True) e.bind_camera(cam) f open('/root/output.h265', 'wb') record_ms 2000 start_ms time.ticks_ms() while not app.need_exit(): frame e.encode() img e.capture() print(frame.size()) f.write(frame.to_bytes(True)) if time.ticks_ms() start_ms > record_ms: app.set_exit_flag(True) ``` Similar to example 1, the difference is that the `Encoder` object's `bind_camera` method is called, and the `Encoder` takes the initiative to get the camera image, which has the advantage of using the hardware features to increase the encoding speed. ``` e video.Encoder(capture True) e.bind_camera(cam) frame e.encode() img e.capture() ``` `e video.Encoder(capture True)` enables the `capture` parameter to allow encoding to capture encoded images when encoding `e.bind_camera(cam)` binds the camera to the `Encoder` object `frame e.encode()` Instead of passing in `img` when encoding, fetch the image from the camera internally `img e.capture()` captures the encoded image from the `Encoder` object, which can be used for image processing ## Convert to MP4 format If you want to record video in `mp4` format, you can record `H265` video first, and then use the `ffmpeg` tool in the system to convert to `mp4` format. ```python import os # Pack h265 to mp4 # /root/output.h265 is the h265 file path # /root/output.mp4 is the mp4 file path os.system('ffmpeg loglevel quiet i /root/output.h265 c:v copy c:a copy /root/output.mp4 y') ```"},"/maixpy/doc/en/video/rtsp_streaming.html":{"title":"MaixCAM MaixPy Video Streaming RTSP Push Streaming","content":" title: MaixCAM MaixPy Video Streaming RTSP Push Streaming update: date: 2024 05 20 author: lxowalle version: 1.0.0 content: Initial documentation ## Introduction This document provides methods for pushing streaming camera image via RTSP ## How to use ```python from maix import time, rtsp, camera, image server rtsp.Rtsp() cam camera.Camera(2560, 1440, image.Format.FMT_YVU420SP) server.bind_camera(cam) server.start() print(server.get_url()) while True: time.sleep(1) ``` Steps: 1. Import the image、camera、image and rtsp modules: ```python from maix import time, rtsp, camera, image ``` 2. Initialize the camera: ```python cam camera.Camera(2560, 1440, image.Format.FMT_YVU420SP) # Initialise camera, output resolution 2560x1440 NV21 format ``` Note that the RTSP module currently only supports the NV21 format, so the camera needs to be configured to output in NV21 format. 3. Initialise and start the Rtsp object ```python server rtsp.Rtsp() server.bind_camera(cam) server.start() ``` ``server rtsp.Rtsp()`` used to create an ``Rtsp`` object `server.bind_camera(cam)` is used to bind a `Camera` object, after which the original `Camera` object can no longer be used. `server.start()` is used to start the `rtsp` push stream. 4. Print the URL of the current RTSP stream ``python print(server.get_url()) `` ``server.get_url()`` is used to get the ``playback address`` of ``RTSP``. 6. Finished, after running the above code, you can play the video stream through [VLC](https://www.videolan.org/vlc/) software, the tested version of `VLC` is `3.0.20`. The default playback address is `rtsp://device ip:8554/live`. ## OSD Drawing lines and frames via OSD TODO"},"/maixpy/doc/en/video/play.html":{"title":"MaixPy Playback Video","content":" title: MaixPy Playback Video update: date: 2024 08 19 author: lxowalle version: 1.0.0 content: Initial document ## Introduction This document provides instructions for using the Play Video feature. `MaixPy` supports playing `h264`, `mp4` and `flv` video formats, note that currently only `avc` encoded `mp4` and `flv` files are supported. ## Play `MP4` video An example of playing an `mp4` video, the path to the video file is `/root/output.mp4`. ```python from maix import video, display, app disp display.Display() d video.Decoder('/root/output.mp4') print(f'resolution: {d.width()}x{d.height()} bitrate: {d.bitrate()} fps: {d.fps()}') d.seek(0) while not app.need_exit(): ctx d.decode_video() if not ctx: d.seek(0) continue img ctx.image() disp.show(img) print(f'need wait : {ctx.duration_us()} us') ``` Steps: 1. Import the module and initialise the camera ```python from maix import video, display, app disp display.Display() ``` `disp display.Display()` is used to initialise the display to show the decoded image 2. Initialise the `Decoder` module ```python d video.Decoder('/root/output.mp4') ``` `d video.Decoder(‘/root/output.mp4’)` is used to initialise the decoder and set the path to the video file that needs to be played. If you need to play `flv` files, you can fill in the path of the file with `flv` suffix, such as `{your_file_path}.flv`, if you need to play `h264` files, you can fill in the path of the file with `h264` suffix, such as `{your_file_path}.h264` 3. Set the decoding location ```python d.seek(0) ``` can be used to set the position of the video to be played, in seconds. 4. Get the decoded image ```python ctx d.decode_video() img ctx.image() ``` Each call returns a frame context, and you can obtain img through `ctx.image()`. Currently the decoded output only supports the NV21 format. 5. Display the decoded image ```python disp.show(img) ``` When displaying images, `ctx.duration_us()` can be used to get the duration of each frame in microseconds. 6. Done, see [API documentation](https://wiki.sipeed.com/maixpy/api/maix/video.html) for more usage of `Decoder`."},"/maixpy/doc/en/video/rtmp_streaming.html":{"title":"MaixCAM MaixPy Video Streaming RTMP Push Streaming","content":" title: MaixCAM MaixPy Video Streaming RTMP Push Streaming update: date: 2024 05 31 author: lxowalle version: 1.0.0 content: initial document ## Introduction This document provides methods for pushing H264 video streams via RTMP ## How to use The following example shows pushing an h264 video stream to `rtmp://192.168.0.30:1935/live/stream` ```python from maix import camera, time, rtmp, image cam camera.Camera(640, 480, image.Format.FMT_YVU420SP) # rtmp://192.168.0.30:1935/live/stream host '192.168.0.30' port 1935 app 'live' stream 'stream' bitrate 1000_000 r rtmp.Rtmp(host, port, app, stream, bitrate) r.bind_camera(cam) r.start() while True: time.sleep(1) ``` Steps: 1. Import the camera、rtmp、time and image modules: ```python from maix import camera, time, rtmp, image ``` 2. Initialize the camera: ```python cam camera.Camera(640, 480, image.Format.FMT_YVU420SP) # Initialise camera, output resolution 640x480 NV21 format ``` Note that the RTMP module currently only supports the NV21 format, so the camera needs to be configured to output in NV21 format. 3. Initialise and start the Rtmp object ```python r rtmp.Rtmp(host, port, app, stream, bitrate) r.bind_camera(cam) r.start() ``` `r rtmp.Rtmp(host, port, app, stream, bitrate)` is used to create an `Rtmp` object, where `host` refers to the ip address or domain of the rtmp server, `app` refers to the name of the application that is open to the rtmp server, and `stream` refers to the name of the rtmp stream, which can also be used as the key for pushing the stream `r.bind_camera(cam)` is used to bind a `Camera` object, the original `Camera` object can not be used after binding. `r.start()` is used to start the `rtmp` stream. 4. Done ## Push streaming test to Bilibili ### Launch bilibili live stream 1. Click on Live Streaming ![](../../../static/image/bilibili_click_live.png) 2. Click on Live Streaming Settings ![](../../../static/image/bilibili_click_live_setting.png) 3. Find the live streaming address ![](../../../static/image/bilibili_check_live_link.png) 4. Scroll down, select a category, and click Start Live! ![](../../../static/image/bilibili_live_start.png) 5. Get the push stream address ![](../../../static/image/bilibili_check_rtmp_url.png) server address: `rtmp://live push.bilivideo.com/live bvc` key:`?streamname live_xxxx&key 1fbfxxxxxxxxxxxxxffe0&schedule rtmp&pflag 1` Push stream address: `rtmp://live push.bilivideo.com/live bvc/?streamname live_xxxx&key 1fbfxxxxxxxxxxxxxffe0&schedule rtmp&pflag 1` ### Run the RTMP client ```python from maix import camera, time, rtmp, image cam camera.Camera(640, 480, image.Format.FMT_YVU420SP) # rtmp://live push.bilivideo.com/live bvc/?streamname live_xxxx&key 1fbfxxxxxxxxxxxxxffe0&schedule rtmp&pflag 1 host 'live push.bilivideo.com' port 1935 app 'live bvc' stream '?streamname live_xxxx&key 1fbfxxxxxxxxxxxxxffe0&schedule rtmp&pflag 1' bitrate 1000_000 r rtmp.Rtmp(host, port, app, stream, bitrate) r.bind_camera(cam) r.start() while True: time.sleep(1) ``` Above get bilibili's push stream address as `rtmp://live push.bilivideo.com/live bvc/?streamname live_xxxx&key 1fbfxxxxxxxxxxxxxffe0&schedule rtmp&pflag 1` Can be detached: 1. server address is `live push.bilivideo.com` 2. port is `1935`, if there is no port number, the default is `1935` 3. application name is `live bvc` 4. stream name is `?streamname live_xxxx&key 1fbfxxxxxxxxxxxxxffe0&schedule rtmp&pflag 1` Run the code and you will be able to see the `maixcam` screen in the live stream, if you find that the live stream is not displayed, try to close the live stream first, then reopen it and run the code again. Try it~!"},"/maixpy/doc/en/video/jpeg_streaming.html":{"title":"MaixCAM MaixPy Video Stream JPEG Streaming / Sending Images to Server","content":" title: MaixCAM MaixPy Video Stream JPEG Streaming / Sending Images to Server update: date: 2024 04 03 author: neucrack version: 1.0.0 content: Initial document date: 2024 05 20 author: lxowalle version: 1.0.1 content: update JPEG HTTP usage ## Introduction Sometimes it is necessary to send images to a server, or to push video from a webcam to a server, so here are two ways to do it. One of the simplest methods is to compress images into `JPEG` format and send them one by one to the server. Note, this is a very basic method and not a formal way to stream video. It is also not suitable for high resolution, high frame rate video streams, as it involves sending images one by one. For more efficient video streaming, please use the `RTSP` or `RTMP` modules discussed later. Set up an HTTP server, so that the PC side can be accessed directly through the browser. ## Methods for pushing streams as a client ```python from maix import image import requests # create image img image.Image(640, 480, image.Format.FMT_RGB) # draw something img.draw_rect(60, 60, 80, 80, image.Color.from_rgb(255, 0, 0)) # convert to jpeg jpeg img.to_format(image.Format.FMT_JPEG) # image.Format.FMT_PNG # get jpeg bytes jpeg_bytes jpeg.to_bytes() # faster way, borrow memory from jpeg object, # but be careful, when jpeg object is deleted, jpeg_bytes object MUST NOT be used, or program will crash # jpeg_bytes jpeg.to_bytes(copy False) # send image binary bytes to server url \"http://192.168.0.123:8080/upload\" res requests.post(url, data jpeg_bytes) print(res.status_code) print(res.text) ``` As you can see, the image is first converted into `JPEG` format, and then the binary data of the `JPEG` image is sent to the server via `TCP`. ## Methods for pushing streams as a server ```python from maix import camera, time, app, http html \"\"\" JPG Stream

    MaixPy JPG Stream

    \"\"\" cam camera.Camera(320, 240) stream http.JpegStreamer() stream.set_html(html) stream.start() print(\"http://{}:{}\".format(stream.host(), stream.port())) while not app.need_exit(): t time.ticks_ms() img cam.read() jpg img.to_jpeg() stream.write(jpg) print(f\"time: {time.ticks_ms() t}ms, fps: {1000 / (time.ticks_ms() t)}\") ``` Steps: 1. Import the image, camera and http modules: ```python from maix import image, camera, http ``` 2. Initialize the camera: ```python cam camera.Camera(320, 240) ``` 3. Initialize Stream Object ```python stream http.JpegStreamer() stream.start() ``` `http.JpegStreamer()` is used to create a `JpegStreamer` object, which will start an `http server` that will be used to publish `jpeg` image streams to clients. `stream.start()` is used to start the `http server`. 4. Custom html styles (optional) ```python html \"\"\" JPG Stream

    MaixPy JPG Stream

    \"\"\" stream.set_html(html) ``` `html xxx` is the `html` code that can be used to customise the style of your web page. Note that the core code is ``, be sure not to miss this line of code. `stream.set_html(html)` is used to set the custom `html` code, this step is optional. The default browsing address is `http://device_ip:8000`. 5. Getting images from the camera and pushing streams ```python while 1: img cam.read() jpg img.to_jpeg() stream.write(jpg) ``` `img cam.read()` gets an image from the camera, when initialised as `cam camera.Camera(320, 240)` the `img` object is an RGB image with a resolution of 320x240. `jpg img.to_jpeg()` converts the image to `jpeg` format `stream.write(jpg)` writes the image format to the server and the `http` server will send this image to the `http` client. 6. 6. Done, after running the code above, you can see the video stream directly through your browser, the default address is `http://device_ip:8000`. Open your browser and take a look!"},"/maixpy/doc/en/faq.html":{"title":"MaixCAM MaixPy FAQ (Frequently Asked Questions)","content":" title: MaixCAM MaixPy FAQ (Frequently Asked Questions) >! This page lists common questions and solutions related to MaixPy. If you encounter any issues, please search for answers here first. > Additionally, there are other resources: > * [MaixHub Discussion Forum](https://maixhub.com/discussion): A platform for discussions, with support for tip rewards. > * [MaixPy Issues](https://github.com/sipeed/MaixPy/issues?q ): For source code related issues. > * [MaixCAM Hardware FAQ](https://wiki.sipeed.com/hardware/zh/maixcam/faq.html): Frequently asked questions about MaixCAM hardware. ## MaixVision cannot find the device? First, confirm whether the connection method is WiFi or USB cable. **WiFi**: * Ensure that WiFi is correctly connected and has obtained an IP address. You can view the `ip` in `Settings > Device Info` or `Settings > WiFi`. **USB Cable**: * Ensure that the device is connected to the computer via a Type C data cable, and the device is powered on and has entered the function selection interface. * Ensure that the device driver is installed: * On Windows, check if there is a USB virtual network adapter device in `Device Manager`. If there is an exclamation mark, it means the driver is not installed properly. Follow the instructions in [Quick Start](./index.html) to install the driver. * On Linux, you can check if there is a `usb0` device by running `ifconfig` or `ip addr`, or check all USB devices with `lsusb`. Linux already includes the driver, so if the device is not recognized, check the hardware connection, ensure the device system is up to date, and ensure the device has booted up properly. * On macOS, follow the same steps as Linux. * Additionally, check the quality of the USB cable and try using a high quality cable. * Additionally, check the quality of the computer's USB port. For example, some small form factor PCs have poor EMI design on their USB ports, and connecting a good quality USB hub may allow the device to work. You can also try a different USB port or a different computer. ## MaixVision camera example shows choppy video The default GC4653 camera has a maximum frame rate of 30 frames per second (FPS). Under normal circumstances, the MaixVision display should not appear choppy to the naked eye. If choppiness occurs, first consider transmission issues: * Check the network connection quality, such as WiFi. * If using a USB connection, check the USB cable quality, computer USB port quality, and try using a different computer, USB port, or USB cable for comparison. ## What is the difference between MaixPy v4 and v1/v3? * MaixPy v4 uses the Python language and is the culmination of the experiences from v1 and v3, offering better supporting software and ecosystem, more features, simpler usage, and more comprehensive documentation. While the hardware has significant improvements, the pricing is even more affordable compared to the other two versions. Additionally, it provides compatibility with the K210 user experience and API, making it easier for users to migrate quickly from v1 to v4. * v1 used the Micropython language and had many limitations, such as limited third party library support. Additionally, due to the hardware performance limitations of the Maix I (K210), there was not enough memory, limited AI model support, and lack of hardware acceleration for many codecs. * v3 also used the Python language and was based on the Maix II Dock (v831) hardware. However, the hardware had limited AI model support, and the Allwinner ecosystem was not open enough, with an incomplete API. This version was only intended for use with the Maix II Dock (v831) and will not receive further updates. ## Does MaixPy currently only support MaixCAM, or can it work with other boards using the same chipset? MaixPy currently only supports the MaixCAM series of boards. Other boards using the same chipset, including Sipeed's boards like the LicheeRV Nano, are not supported. It is strongly recommended not to attempt using MaixPy with other boards, as it may result in device damage (such as smoke or screen burn), for which you will be solely responsible. In the future, Sipeed's Maix series of products will continue to be supported by MaixPy. If you have any needs that cannot be met by MaixCAM, you can post your requirements on the [MaixHub Discussion Forum](https://maixhub.com/discussion) or send an email to support@sipeed.com. ## Can I use a camera or screen other than the officially bundled ones? It is not recommended to use cameras or screens other than the officially bundled ones, unless you have sufficient software and hardware knowledge and experience. Otherwise, it may result in device damage. The officially bundled accessories have been fine tuned for both software and hardware, ensuring the best performance and allowing for out of the box usage. Other accessories may have different interfaces, drivers, and software, requiring you to calibrate them yourself, which is an extremely complex process. However, if you are an expert, we welcome you to submit a pull request! ## Model running error: cvimodel built for xxxcv181x CANNOT run on platform cv181x. Failure to parse the model file is generally caused by file corruption. Ensure that your model file is not damaged. For example: * Editing a binary file with an editor caused the file to become corrupted. For example, opening a `cvimodel` file with MaixVision can corrupt the binary file due to MaixVision's auto save feature. Therefore, do not open and save binary files with text editors like MaixVision (this issue will be fixed in a future update of MaixVision by removing the auto save feature). * If it was downloaded from the internet, make sure the download was not corrupted. Typically, files on the internet provide sha256sum/md5 checksums. After downloading, you can compare these values; for specific methods, please search online or ask ChatGPT. * If it comes from a compressed archive, ensure that the decompression process was error free. You can decompress the archive again to make sure there were no errors in the process. * Ensure that the file was not damaged during the transfer to the device. You can compare the sha256sum values of the file on the device and on your computer; for specific methods, please search online or ask ChatGPT. ## Power on Black Screen, No Display on the Screen Refer to [MaixCAM FAQ](https://wiki.sipeed.com/hardware/zh/maixcam/faq.html) ## Why doesn’t the computer detect a serial port when connecting via USB to MaixCAM? The USB port on the MaixCAM is a USB 2.0 interface of the chip, not a USB to serial interface, so it is normal for no serial port to appear when connected to a computer. How do you communicate without a USB to serial connection? By default, the USB will simulate a USB network card. When you connect the USB to your computer, a virtual network card will appear. According to the instructions in the [Quick Start Guide](./index.html), you can use MaixVision to communicate with MaixCAM to run code, preview images, manage files, and other functions. Additionally, since the USB simulates a network card, you can also use standard SSH software to connect to MaixCAM for communication. Alternatively, you can connect via WiFi and communicate within the same local network. If you need to use the serial port, there are two situations: 1. **Serial communication with a computer**: You need to purchase any USB to serial module to connect the computer's USB port with the board's serial port (for MaixCAM, it's the UART0 pins A16 (TX) and A17 (RX), or you can use the TX and RX pins on the USB adapter board that comes with the MaixCAM package, which are also the A16 and A17 pins and are functionally equivalent). 2. **Serial communication with another MCU/SOC**: Directly connect MaixCAM's A16 (TX) and A17 (RX) to the MCU's RX and TX pins. ## Red Screen, Initialization Display Failed, Please Check FAQ The message indicates that the display driver initialization failed. As of July 2024, the underlying display driver for MaixCAM is initialized together with the camera driver. Therefore, this issue is most likely caused by a failure in the camera driver initialization. To resolve this issue: * Try updating to the latest system and install the latest runtime libraries (very important!!!). The runtime libraries need to work in conjunction with the system drivers, and version mismatches may cause errors. Updating to the latest system image and installing the latest runtime libraries should generally resolve the issue. * Maybe multiple process try to occupy driver, easiest way is reboot. * Check for hardware connection issues with the camera. Ensure that the camera is properly connected and not damaged. ## What are the differences between Runtime, MaixPy, and system image? Which one should I upgrade? * **Runtime** is the runtime environment. Many system functions depend on it, including MaixPy. If you encounter the problem of being unable to run the program, first check and update it online. * The system image includes the basic operating system, hardware drivers, built in applications, and MaixPy firmware, etc. It is the basic environment. It is best to keep it up to date, especially in the [Release](https://github.com/sipeed/MaixPy/releases) page. If the version update mentions that the system has been updated, it is strongly recommended to update the system, because some MaixPy functions may depend on the drivers in the system. > Updating the system will format all previous data. Please back up useful data in the device system before updating. * **MaixPy** is a dependent library for running the MaixPy program. If you do not need to update the system function, and the update log does not mention that the system has important updates such as drivers, you can update MaixPy alone. ## Error Loading MUD Model File: *****.cvimodel not exists, load model failed * Check if the .mud file you are trying to load really exists on the device (note, it should be on the device, not on the computer, it needs to be transferred to the device). * Verify that the model path you wrote is correct. * If you have changed the file name, note that the MUD file is a model description file and can be edited with a text editor. The actual model file is the .cvimodel file (for MaixCAM). The .mud file specifies the file name and path of the .cvimodel. Therefore, if you have changed the file name of `.cvimodel`, you also need to modify the `model` path in the `.mud` file. For example, here is the mud file for the Yolov5 model: ```ini [basic] type cvimodel model yolov5s_224_int8.cvimodel [extra] model_type yolov5 input_type rgb mean 0, 0, 0 scale 0.00392156862745098, 0.00392156862745098, 0.00392156862745098 anchors 10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326 labels person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign, parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket, bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut, cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair dryer, toothbrush ``` Here, the `model` is specified as the `yolov5s_224_int8.cvimodel` file relative to the directory of this `.mud` file. If you have changed `yolov5s_224_int8.cvimodel` to another name, you need to update it here as well. ## MaixVision Shows Red Wavy Line on `import maix` This error occurs because MaixVision's code hinting feature cannot find the `maix` module. It's important to understand that MaixVision's code hinting relies on the local Python packages on your computer, while the code execution depends on the Python packages on the device. To enable MaixVision's code hinting, you need to install Python and the `MaixPy` package on your computer. For more details, refer to the [MaixVision User Documentation](./basic/maixvision.html). ## MaixCAM starts very slowly, even exceeding 1 minute, or the screen flickers This is mostly due to insufficient power supply. MaixCAM requires a voltage of around 5V and a current between 150mA and 500mA. If you encounter this issue, you can use a USB to TTL module to connect MaixCAM's serial port to a computer. You may see a message like `Card did not respond to voltage select! : 110`, indicating insufficient power supply. Simply switch to a more stable power supply to resolve the problem. For MaixCAM, it draws 400mA during startup, 250mA in standby mode with the screen on, and 400mA~500mA when running AI models at full speed. Therefore, ensuring a stable power supply is very important! ## MaixCAM Black screen and not boot up, or stock in LOGO screen Refer to [MaixCAM FAQ](https://wiki.sipeed.com/hardware/en/maixcam/faq.html) ## MaixVision Program Stuck on \"start running ...\" When the MaixVision log output window prints the message `start running ...`, it indicates that the program has been sent to the device and has begun executing. What gets printed afterward depends on your program. For instance, if you call `print(\"hello\")`, it will print `hello`. If your program doesn't include any print statements, then there will be no logs displayed. So, the program isn't actually stuck; it's just that your program hasn't output anything, so no logs are shown. You can try adding `print(\"xxx\")` in your code to generate output, which is the simplest way to debug your program. ## Why Does the Hardware Have 256MB of Memory, But Only 128MB is Available in the System? The remaining memory is reserved for low level drivers and the kernel, which are used for operating the camera, display, hardware encoding/decoding, NPU, and other drivers. You can check the memory used by these drivers (known as ION memory in CVITEK systems) by running `cat /sys/kernel/debug/ion/cvi_carveout_heap_dump/summary`. For other memory usage, you can run `cat /proc/meminfo`. If you want to adjust the memory allocation, you would need to compile the system yourself and modify the `ION_SIZE` in the `memmap.py` file located in the `LicheeRV Nano Build/build/boards/sg200x/sg2002_licheervnano_sd/` directory(refer to [customize system doc](./pro/compile_os.html)). ## Why Am I Unable to Install the Runtime Library, and an Error \"Request Failed\" Is Displayed? * Ensure that the device is successfully connected to the internet. You can try connecting to a different mobile hotspot. * Verify that the system image you flashed is the latest version. * If you see an error related to DNS resolution failure, it might be due to DNS settings issues on your network. You can try connecting to a different mobile hotspot or manually modify the DNS server settings in `/boot/resolv.conf` (modifying this file requires a reboot) and `/etc/resolv.conf` (modifying this file does not require a reboot, but rebooting will overwrite it with the contents of the former). * Make sure you have purchased a genuine MaixCAM from Sipeed. * Contact customer service, providing the system version and device_key (which can be found after disconnecting from MaixVision or, if you have a screen, in `System Settings > System Information`). Translation: ## Compile error: type not registered yet? ``` from ._maix.peripheral.key import add_default_listener ImportError: arg(): could not convert default argument into a Python object (type not registered yet?). #define ``` The error indicates that an object has not been defined as a Python object. In MaixPy, this is usually caused by an issue with the order of automatic API generation. For example, if there is an API declared with `@maixpy` in `a.hpp`, and another API in `b.hpp` that uses a definition from `a.hpp` as a parameter, then `b.hpp` depends on `a.hpp`. However, the current MaixPy compilation script does not perform dependency scanning. To resolve this, you need to manually specify the scan order in the `components/maix/headers_priority.txt` file in the MaixPy project, ensuring that `a.hpp` is scanned before `b.hpp`."},"/maixpy/doc/en/basic/app_usage.html":{"title":"MaixCAM MaixPy Application User Guide","content":" title: MaixCAM MaixPy Application User Guide layout: redirect redirect_url: ./app.html "},"/maixpy/doc/en/basic/python_pkgs.html":{"title":"MaixCAM MaixPy Add extra Python packages.","content":" title: MaixCAM MaixPy Add extra Python packages. ## Introduction MaixPy is based on the Python language and provides a wide range of functionalities and APIs for embedded application development. In addition to this, you can also use other Python packages to extend its functionality. ## Installing Additional Python Packages > Please note that not all Python packages are supported. Generally, only pure Python packages are supported, not C extension packages. C extension packages may require you to manually cross compile them on a computer (which is quite complex and won't be covered here). ### Method 1: Installing Using Python Code You can install the package you need in MaixVision using Python code, for example: ```python import os os.system(\"pip install package_name\") ``` To update a package, you can use: ```python import os os.system(\"pip install upgrade package_name\") ``` ### Method 2: Installing Using the Terminal and pip Command Follow the terminal usage method introduced in [Linux Basics](./linux_basic.html) and use `pip install package_name` to install the package you need."},"/maixpy/doc/en/basic/app.html":{"title":"MaixCAM MaixPy App development and app stores","content":" title: MaixCAM MaixPy App development and app stores ## Where to Find Applications After powering on, the device will automatically enter the application selection interface. All built in applications are available on the [MaixHub App Store](https://maixhub.com/app), where you can find corresponding app descriptions and usage instructions. ## Where to Find Source Code You can find the source code links (if available) on the app pages in the App Store. The source code for official integrated applications is located in the [MaixPy/projects](https://github.com/sipeed/MaixPy/tree/main/projects) directory or the [MaixCDK/projects](https://github.com/sipeed/MaixCDK/tree/main/projects) directory. ## Installing Applications Frequently used settings include `Settings > Language` and `Settings > WiFi`. The `App Store` application can be used to upgrade and install apps. Once connected to a WiFi network with internet access, you can scan to install apps from the [MaixHub App Store](https://maixhub.com/app). ## Introduction to Application Ecosystem In order to make the development board ready to use out of the box, make it easy for users to use without barriers, enable developers to share their interesting applications, and provide effective channels for receiving feedback and even profits, we have launched a simple application framework, including: **[App Store](https://maixhub.com/app)**: Developers can upload and share applications, which users can download and use without needing to develop them. Developers can receive certain cash rewards (from MaixHub or user tips). **Pre installed Apps**: The official provides some commonly used applications, such as color block detection, AI object detection tracking, QR code scanning, face recognition, etc., which users can use directly or use as serial module. **MaixPy + MaixCDK Software Development Kit**: Using [MaixPy](https://github.com/sipeed/maixpy) or [MaixCDK](https://github.com/sipeed/MaixCDK), you can quickly develop embedded AI visual and audio applications in Python or C/C++, efficiently realizing your interesting ideas. **MaixVision Desktop Development Tool**: A brand new desktop code development tool for quick start, debugging, running, uploading code, installing applications to devices, one click development, and even support for graphical block based programming, making it easy for elementary school students to get started. Everyone is welcome to pay attention to the App Store and share their applications in the store to build a vibrant community together. ## Packaging Applications Using MaixPy + MaixVison makes it easy to develop, package, and install applications: Develop applications with MaixPy in MaixVision, which can be a single file or a project directory. Connect the device. Click the \"Install\" button at the bottom left corner of MaixVision, fill in the basic information of the application in the popup window, where the ID is used to identify the application. A device cannot simultaneously install different applications with the same ID, so the ID should be different from the IDs of applications on MaixHub. The application name can be duplicated. You can also upload an icon. Click \"Package Application\" to package the application into an installer. If you want to upload it to the [MaixHub App Store](https://maixhub./com/app), you can use this packaged file. Click \"Install Application\" to install the packaged application on the device. Disconnect from the device, and you will see your application in the device's app selection interface. Simply click on it to run the application. > If you develop with MaixCDK, you can use `maixcdk release` to package an application. Refer to the MaixCDK documentation for specifics. ## Exiting Applications If you have developed a relatively simple application without a user interface and a back button, you can exit the application by pressing the device's function button (usually labeled as USER, FUNC, or OK) or the back button (if available, MaixCAM does not have this button by default). ## Installing Applications * **Method 1**: Use the `App Store` application on the device. Find the application on the [App Store](https://maixhub.com/app), connect the device to the internet, and scan the code to install. * **Method 2**: Install using a local installation package. Transfer the package to the device's file system, for example, to `/root/my_app_v1.0.0.zip`, and then run the following code. Make sure to modify the `pkg_path` variable to the correct path, you can also find this script in `MaixPy`'s `examples/tools/install_app.py`: ```python import os def install_app(pkg_path): if not os.path.exists(pkg_path): raise Exception(f\"Package {pkg_path} not found\") cmd f\"/maixapp/apps/app_store/app_store install {pkg_path}\" err_code os.system(cmd) if err_code ! 0: print(\"[ERROR] Install failed, error code:\", err_code) else: print(f\"Install {pkg_path} success\") pkg_path \"/root/my_app_v1.0.0.zip\" install_app(pkg_path) ``` * **Method 3**: * For applications developed using `MaixPy`, run `maixtool deploy` in the project root directory (which contains `app.yaml` and `main.py`). A QR code will be displayed. Keep the device and computer on the same local network, and use the App Store on the device to scan the QR code corresponding to the local network address for online installation. * For applications developed using `MaixCDK`, run `maixcdk deploy` in the project root directory. A QR code will be displayed. Keep the device and computer on the same local network, and use the App Store on the device to scan the QR code corresponding to the local network address for online installation. ## Basic Guidelines for Application Development Since touchscreens are standard, it is recommended to create a simple interface with touch interaction. You can refer to examples for implementation methods. Avoid making interfaces and buttons too small, as MaixCAM default screen is 2.3 inches with 552x368 resolution and high PPI. Make sure fingers can easily tap without making mistakes. Implement a simple serial interaction for the main functionality of each application based on the [serial protocol](https://github.com/sipeed/MaixCDK/blob/master/docs/doc/convention/protocol.md) (see [example](https://github.com/sipeed/MaixPy/tree/main/examples/communication/protocol)). This way, users can directly use it as a serial module. For instance, in a face detection application, you can output coordinates via serial port when a face is detected."},"/maixpy/doc/en/basic/os.html":{"title":"MaixCAM MaixPy Upgrade and burn system.","content":" title: MaixCAM MaixPy Upgrade and burn system. ## Introduction If you have purchased the official (Sipeed) package with a TF card, typically the system has already been pre programmed at the factory and can be used directly without further steps. However, to avoid using an outdated version of the pre programmed system, it is **highly recommended** to first **upgrade to the latest system** following the tutorial. ## Obtaining the Latest System Visit the [MaixPy Release page](https://github.com/sipeed/MaixPy/releases) to find the latest system image file, such as `maixcam_os_20240401_maixpy_v4.1.0.xz`. Alternate link: [Sourceforge](https://sourceforge.net/projects/maixpy/files/) ## How to Confirm if System Upgrade is Needed * Upon booting up to the main menu, click on `Settings`, then `Device Info` to check the system's version number. * Visit the [MaixPy Release History page](https://github.com/sipeed/MaixPy/releases) to review the update logs, which contain information on MaixPy firmware and system image updates. If there are significant updates after your current version, it is advisable to upgrade. > If the latest system update only includes routine MaixPy firmware updates compared to your current system, you may choose not to upgrade. You can simply update `MaixPy` separately in `Settings` under `Update MaixPy`. ## Burning the System Image to MaixCAM Refer to the hardware documentation [MaixCAM System Burning](https://wiki.sipeed.com/hardware/zh/maixcam/os.html) tutorial. Note that if the conditions for `USB Burning` are met, it is recommended to use the `USB Burning` method. The USB burning method does not require removing the TF card."},"/maixpy/doc/en/basic/linux_basic.html":{"title":"Basic Knowledge of Linux","content":" title: Basic Knowledge of Linux ## Introduction For beginners just starting out, you can skip this chapter for now and come back to it after mastering the basics of MaixPy development. The latest MaixPy supports running Linux on the MaixCAM hardware, so the underlying MaixPy development is based on the Linux system. Although Sipeed has done a lot of work for developers with MaixPy, making it possible to enjoy using it without knowledge of the Linux system, there might be situations where some low level operations are necessary or for the convenience of developers unfamiliar with Linux. In this section, we will cover some basic Linux knowledge. ## Why Linux System is Needed Specific reasons can be researched individually. Here are a few examples in simplified terms that may not sound too technical but are easy for beginners to understand: * In microcontrollers, our program is usually a loop, but with Linux, we can run multiple programs simultaneously, each appearing to run independently, where the actual execution is handled by the operating system. * With a large community of Linux based developers, required functionalities and drivers can be easily found without the need to implement them from scratch. * Linux offers a rich set of accompanying software tools for convenient development and debugging. Some Linux common tools not mentioned in this tutorial can theoretically be used as well. ## File System What is a file system? * Similar to a computer's file system, Linux manages hardware disks using a file system, making it easy for us to read and write data to the disk. * For students who have learned about microcontrollers but not familiar with file system development, imagine having a Flash or TF card where data can be read and written through APIs even after power loss. However, Flash has read/write limitations, requiring a program to ensure its longevity. A file system is like a mature program that manages the Flash space and read/write operations. By calling the file system's APIs, we can significantly reduce development work and ensure stability and security with proven programs. ## Transferring Files between Computer and Device (Development Board) Since the device has Linux and a file system, how do we send files to it? For MaixPy, we offer MaixVision for file management in future versions. Before that, you can use the following method: Here we mainly discuss transferring files through the network. Other methods can be explored on your own by searching for \"transferring files to Linux\": * Ensure the device and computer are connected to the same local network, for example: * When the MaixCAM's USB port is connected to the computer, a virtual network card is created which can be seen in the device manager on the computer, and the device's IP can be found in the device's `Settings > Device Information`. * Alternatively, connect to the same local network on the device through `Settings > WiFi`. * Use SCP or SFTP protocols on the computer to transfer files to the device. There are many specific software options and methods, such as: * On Windows, you can use WinSCP, FileZilla, or the scp command. * On Linux, use FileZilla or the scp command. * On Mac, use FileZilla or the scp command. ## Terminal and Command Line The terminal is a tool for communicating with and operating the Linux system, similar to Windows' `cmd` or `PowerShell`. For example, we can enter `ssh root@maixcam xxxx.local` in the Terminal tool on a Windows system with PowerShell or on a Linux system. You can find the specific name in the device's `Settings >Device Information`, which allows us to connect to the device through the terminal (both username and password are `root`). Then, we can operate the device by entering commands. For instance, the `ls` command can list the files in the current directory of the device, while `cd` is used to switch to a different directory (similar to clicking folders in file management on a computer), ```shell cd / # Switch to the root directory ls # Display all files in the current directory (root directory) ``` This will display similar content as below: ```shell bin lib media root tmp boot lib64 mnt run usr dev linuxrc opt sbin var etc lost+found proc sys ``` For more command learning, please search for `Linux command line usage tutorials` on your own. This is just to introduce beginners to basic concepts so that when developers mention them, they can understand what they mean."},"/maixpy/doc/en/basic/maixpy_upgrade.html":{"title":"MaixCAM Update MaixPy.","content":" title: MaixCAM Update MaixPy. There are two methods to begin with. If you are new to this and want to keep things simple, you can try using the pre installed MaixPy firmware on the TF card that comes with the device. You can consider updating it later. However, since we don't know when the TF card you received was manufactured, it is recommended to update the system. ## Updating the System Directly(Highly Recommend) Follow the steps in [Upgrading and Flashing the System](./os.html) to upgrade to the latest system, which already includes the newest MaixPy firmware. ## Updating Only the MaixPy Firmware Check the latest version information and release notes in the [MaixPy repository release page](https://github.com/sipeed/MaixPy/releases). It includes details about the MaixPy firmware and the system information corresponding to each version. If you prefer not to update the system (since system changes are usually minimal, you can check if there are any system related changes in the MaixPy update notes before deciding whether to update the system), you can simply update the MaixPy firmware. * Set up WiFi in the settings to connect the system to the internet. * Click on `Update MaixPy` in the settings app to proceed with the update. You can also execute Python code to call system command to install: ```python import os os.system(\"pip install MaixPy U\") ``` > If you are comfortable using the terminal, you can also update MaixPy by using `pip install MaixPy U` in the terminal. And you can download `wheel` file (`.whl`format) manually, and send to device(transfer method see [MaixVision Usage](./maixvision.html)), then install by `pip install *****.whl` command."},"/maixpy/doc/en/basic/auto_start.html":{"title":"MaixPy/MaixCAM Application Auto-Start at Boot","content":" title: MaixPy/MaixCAM Application Auto Start at Boot Packaged applications can be set to automatically start when the device boots up, bypassing the application menu and directly launching the specified application. ## Method One for Setting Application Auto Start First, package and install the application, then go to `Settings > Auto Start` on your device to select the application you want to auto start. To cancel auto start, you can also adjust it here. ## Method Two for Setting Application Auto Start Run the Python script to set up, and modify the `new_autostart_app_id` variable in the script to the `app_id` you want to set. All installed `app_id`s will be printed out when you run the script, so you can run it once to find the desired `app_id`, modify the variable, and then run it again. To cancel the autostart setting, set it to `None`. This script can also be found in the `MaixPy` examples under `examples/tools` as `set_autostart.py`: ```python import configparser, os def parse_apps_info(): info_path \"/maixapp/apps/app.info\" conf configparser.ConfigParser() conf.read(info_path) version conf[\"basic\"][\"version\"] apps {} for id in list(conf.keys()): if id in [\"basic\", \"DEFAULT\"]: continue apps[id] conf[id] return apps def list_apps(): apps parse_apps_info() print(f\"APP num: {len(apps)}\") for i, (id, info) in enumerate(apps.items()): name_zh info.get(\"name[zh]\", \"\") print(f\"{i + 1}. [{info['name']}] {name_zh}:\") print(f\" id: {id}\") print(f\" exec: {info['exec']}\") print(f\" author: {info['author']}\") print(f\" desc: {info['desc']}\") print(f\" desc_zh: {info.get('desc', 'None')}\") print(\"\") def get_curr_autostart_app(): path \"/maixapp/auto_start.txt\" if os.path.exists(path): with open(path, \"r\") as f: app_id f.readline().strip() return app_id return None def set_autostart_app(app_id): path \"/maixapp/auto_start.txt\" if not app_id: if os.path.exists(path): os.remove(path) return with open(path, \"w\") as f: f.write(app_id) if __name__ \"__main__\": # new_autostart_app_id \"settings\" # change to app_id you want to set new_autostart_app_id None # remove autostart list_apps() print(\"Before set autostart appid:\", get_curr_autostart_app()) set_autostart_app(new_autostart_app_id) print(\"Current autostart appid:\", get_curr_autostart_app()) ``` ## Method Three for Setting Application Auto Start You can also modify the `/maixapp/auto_start.txt` file in your device to set it up. For methods on file transfer, refer to the previous documentation. * First, determine the `id` of the application you want to set. This is set when you package the application; if it's not an application you packaged yourself, you can install it on the device and check the folder names under the device's `/maixapp/apps/` directory, which are the application names (or you can download and check the device's `/maixapp/apps/app.info` file, where the application `id` is indicated inside the `[]` brackets). * Then write the `id` into the `/maixapp/auto_start.txt` file. (You can create the file locally on your computer, and then transfer it to the device using `MaixVision`.) * To cancel, delete the `/maixapp/auto_start.txt` file on the device. ## Other Methods For MaixCAM, since the underlying system is Linux, if you are familiar with Linux, you can edit the startup scripts in `/etc/rc.local` or `/etc/init.d`. However, it is important to note that this method may cause the application to continue running when MaixVision connects, thereby occupying resources (such as the screen and camera) which might prevent MaixVision from running programs normally. The first two methods allow MaixVision to terminate the program upon connection to run its own programs. Thus, this method is more suitable for running background processes that do not occupy screen and camera resources. Generally, if you are not familiar with Linux, it is not recommended to use this method."},"/maixpy/doc/en/vision/yolov5.html":{"title":"MaixPy MaixCAM Using YOLOv5 / YOLOv8 / YOLO11 for Object Detection","content":" title: MaixPy MaixCAM Using YOLOv5 / YOLOv8 / YOLO11 for Object Detection ## Object Detection Concept Object detection refers to detecting the position and category of objects in images or videos, such as identifying apples or airplanes in a picture and marking their locations. Unlike classification, object detection includes positional information. Therefore, the result of object detection is generally a rectangular box that marks the location of the object. ## Object Detection in MaixPy MaixPy provides `YOLOv5`, `YOLOv8`, and `YOLO11` models by default, which can be used directly: > YOLOv8 requires MaixPy > 4.3.0. > YOLO11 requires MaixPy > 4.7.0. ```python from maix import camera, display, image, nn, app detector nn.YOLOv5(model \"/root/models/yolov5s.mud\", dual_buff True) # detector nn.YOLOv8(model \"/root/models/yolov8n.mud\", dual_buff True) # detector nn.YOLO11(model \"/root/models/yolo11n.mud\", dual_buff True) cam camera.Camera(detector.input_width(), detector.input_height(), detector.input_format()) dis display.Display() while not app.need_exit(): img cam.read() objs detector.detect(img, conf_th 0.5, iou_th 0.45) for obj in objs: img.draw_rect(obj.x, obj.y, obj.w, obj.h, color image.COLOR_RED) msg f'{detector.labels[obj.class_id]}: {obj.score:.2f}' img.draw_string(obj.x, obj.y, msg, color image.COLOR_RED) dis.show(img) ``` Example video:
    Here, the camera captures an image, passes it to the `detector` for detection, and then displays the results (classification name and location) on the screen. You can switch between `YOLO11`, `YOLOv5`, and `YOLOv8` simply by replacing the corresponding line and modifying the model file path. For the list of 80 objects supported by the model, see the appendix of this document. For more API usage, refer to the documentation for the [maix.nn](/api/maix/nn.html) module. ## dual_buff for Double Buffering Acceleration You may notice that the model initialization uses `dual_buff` (default value is `True`). Enabling the `dual_buff` parameter can improve efficiency and increase the frame rate. For more details and usage considerations, see the [dual_buff Introduction](./dual_buff.html). ## More Input Resolutions The default model input resolution is `320x224`, which closely matches the aspect ratio of the default screen. You can also download other model resolutions: YOLOv5: [https://maixhub.com/model/zoo/365](https://maixhub.com/model/zoo/365) YOLOv8: [https://maixhub.com/model/zoo/400](https://maixhub.com/model/zoo/400) YOLO11: [https://maixhub.com/model/zoo/453](https://maixhub.com/model/zoo/453) Higher resolutions provide more accuracy, but take longer to process. Choose the appropriate resolution based on your application. ## Which Model to Use: YOLOv5, YOLOv8, or YOLO11? We provide three models: `YOLOv5s`, `YOLOv8n`, and `YOLO11n`. The `YOLOv5s` model is larger, while `YOLOv8n` and `YOLO11n` are slightly faster. According to official data, the accuracy is `YOLO11n > YOLOv8n > YOLOv5s`. You can test them to decide which works best for your situation. Additionally, you may try `YOLOv8s` or `YOLO11s`, which will have a lower frame rate (e.g., `yolov8s_320x224` is 10ms slower than `yolov8n_320x224`), but offer higher accuracy. You can download these models from the model library mentioned above or export them yourself from the official `YOLO` repository. ## Different Resolutions for Camera and Model If the resolution of `img` is different from the model's resolution when using the `detector.detect(img)` function, the function will automatically call `img.resize` to adjust the image to the model's input resolution. The default `resize` method is `image.Fit.FIT_CONTAIN`, which scales while maintaining the aspect ratio and fills the surrounding areas with black. The detected coordinates will also be automatically mapped back to the original `img`. ## Training Your Own Object Detection Model on MaixHub If you need to detect specific objects beyond the 80 categories provided, visit [MaixHub](https://maixhub.com) to learn and train an object detection model. Select \"Object Detection Model\" when creating a project. Refer to the [MaixHub Online Training Documentation](./maixhub_train.html). Alternatively, you can find models shared by community members at the [MaixHub Model Library](https://maixhub.com/model/zoo?platform maixcam). ## Training Your Own Object Detection Model Offline We strongly recommend starting with MaixHub for online training, as the offline method is much more difficult and is not suitable for beginners. Some knowledge may not be explicitly covered here, so be prepared to do further research. Refer to [Training a Custom YOLOv5 Model](./customize_model_yolov5.html) or [Training a Custom YOLOv8/YOLO11 Model Offline](./customize_model_yolov8.html). ## Appendix: 80 Classes The 80 objects in the COCO dataset are: ```txt person bicycle car motorcycle airplane bus train truck boat traffic light fire hydrant stop sign parking meter bench bird cat dog horse sheep cow elephant bear zebra giraffe backpack umbrella handbag tie suitcase frisbee skis snowboard sports ball kite baseball bat baseball glove skateboard surfboard tennis racket bottle wine glass cup fork knife spoon bowl banana apple sandwich orange broccoli carrot hot dog pizza donut cake chair couch potted plant bed dining table toilet tv laptop mouse remote keyboard cell phone microwave oven toaster sink refrigerator book clock vase scissors teddy bear hair dryer toothbrush ```"},"/maixpy/doc/en/vision/ai.html":{"title":"MaixCAM MaixPy Basic Knowledge of AI Vision","content":" title: MaixCAM MaixPy Basic Knowledge of AI Vision update: date: 2024 04 03 author: neucrack version: 1.0.0 content: Initial documentation ## Introduction If you don't have an AI background, you can first read [What is Artificial Intelligence (AI) and Machine Learning](https://wiki.sipeed.com/ai/en/basic/what_is_ai.html) to understand the basic concepts of AI before learning about AI. Then, the visual AI we use is generally based on the `deep neural network learning` method. If you are interested, you can check out [Deep Neural Network (DNN) Basics](https://wiki.sipeed.com/ai/en/basic/dnn_basic.html). ## Using Visual AI in MaixPy Using visual AI in MaixPy is very simple. By default, commonly used AI models are provided, and you can use them directly without having to train the models yourself. You can find the `maixcam` models in the [MaixHub Model Library](https://maixhub.com/model/zoo). Additionally, the underlying APIs have been well encapsulated, and you only need to make simple calls to implement them. If you want to train your own model, you can start with [MaixHub Online Training](https://maixhub.com/model/training/project). On the online platform, you can train models just by clicking, without the need to purchase expensive machines, set up complex development environments, or write code, making it very suitable for beginners and also for experienced users who are too lazy to read code. Generally, once you have obtained the model file, you can transfer it to the device and call the MaixPy API to use it. The specific calling methods are discussed in the following sections."},"/maixpy/doc/en/vision/display.html":{"title":"MaixCAM MaixPy Screen Usage","content":" title: MaixCAM MaixPy Screen Usage update: date: 2024 03 31 author: neucrack version: 1.0.0 content: Initial document ## Introduction MaixPy provides the `display` module, which can display images on the screen, and can also send images to MaixVision for display, facilitating debugging and development. ## API Documentation This document introduces commonly used methods. For more APIs, please refer to the [display](/api/maix/display.html) section of the API documentation. ## Using the Screen * Import the `display` module: ```python from maix import display ``` * Create a `Display` object: ```python disp display.Display() ``` * Display an image: ```python disp.show(img) ``` Here, the `img` object is a `maix.image.Image` object, which can be obtained through the `read` method of the `camera` module, or loaded from an image file in the file system using the `load` method of the `image` module, or created as a blank image using the `Image` class of the `image` module. For example: ```python from maix import image, display disp display.Display() img image.load(\"/root/dog.jpg\") disp.show(img) ``` Here, you need to transfer the `dog.jpg` file to the `/root` directory on the device first. Display text: ```python from maix import image, display disp display.Display() img image.Image(320, 240) img.draw_rect(0, 0, disp.width(), disp.height(), color image.Color.from_rgb(255, 0, 0), thickness 1) img.draw_rect(10, 10, 100, 100, color image.Color.from_rgb(255, 0, 0)) img.draw_string(10, 10, \"Hello MaixPy!\", color image.Color.from_rgb(255, 255, 255)) disp.show(img) ``` Read an image from the camera and display it: ```python from maix import camera, display, app disp display.Display() cam camera.Camera(320, 240) while not app.need_exit(): img cam.read() disp.show(img) ``` > Here, `while not app.need_exit():` is used to facilitate exiting the loop when the `app.set_exit_flag()` method is called elsewhere. ## Adjusting Backlight Brightness You can manually adjust the backlight brightness in the system's \"Settings\" app. If you want to adjust the backlight brightness programmatically, you can use the `set_backlight` method, with the parameter being the brightness percentage, ranging from 0 to 100: ```python disp.set_backlight(50) ``` Note that when the program exits and returns to the app selection interface, the backlight brightness will automatically revert to the system setting. ## Displaying on MaixVision When running code in MaixVision, images can be displayed on MaixVision for easier debugging and development. When calling the `show` method, the image will be automatically compressed and sent to MaixVision for display. Of course, if you don't have a screen, or to save memory by not initializing the screen, you can also directly call the `send_to_maixvision` method of the `maix.display` object to send the image to MaixVision for display. ```python from maix import image,display from maix import image,display img image.Image(320, 240) disp display.Display() img.draw_rect(0, 0, img.width(), img.height(), color image.Color.from_rgb(255, 0, 0), thickness 1) img.draw_rect(10, 10, 100, 100, color image.Color.from_rgb(255, 0, 0)) img.draw_string(10, 10, \"Hello MaixPy!\", color image.Color.from_rgb(255, 255, 255)) display.send_to_maixvision(img) ``` ## Replacing with Other Screen Models If you wish to switch to a screen of a different size, you can consult and purchase from the [store](https://wiki.sipeed.com/store). For MaixCAM, the following four screen options are currently supported: * 2.3 inch 552x368 resolution capacitive touch screen: The default screen that comes with MaixCAM. * 2.4 inch 640x480 resolution capacitive touch screen: The default screen that comes with MaixCAM Pro. * 5 inch 854x480 resolution non touch screen: Note that this is a non touch screen, similar in size to a mobile phone screen. * 7 inch 1280x800 resolution capacitive touch screen: A large 7 inch screen, suitable for scenarios requiring a fixed screen display. The image refresh time difference between different screens is about 1 5 milliseconds, which is not significant; the main difference lies in the image resolution, which affects image processing time. When replacing the screen, you must also **modify the configuration file**; otherwise, mismatched refresh timing could **cause screen burn in** (leaving a ghost image on the screen). It’s important to follow the steps strictly as outlined below. If screen burn in occurs, don’t panic; powering off and leaving it overnight usually resolves the issue. * Follow the system burning documentation to burn the system. Once completed, a USB drive will appear. * Open the USB drive, and you will see a `uEnv.txt` file. * Edit the `uEnv.txt` file, modifying the `pannel` key value as follows: * 2.3 inch (MaixCAM default screen): `st7701_hd228001c31`. * 2.4 inch (MaixCAM Pro default screen): `st7701_lct024bsi20`. * 5 inch: `st7701_dxq5d0019_V0`, with the earlier (2023) test screen being `st7701_dxq5d0019b480854`. * 7 inch: `mtd700920b`, with the earlier (2023) test screen being `zct2133v1`. * Save the `uEnv.txt` file, and **click to eject the USB drive**—do not just disconnect the power, or the file may be lost. * Press the board's `reset` button, or power cycle to restart. The above method is the safest, ensuring the screen model is set correctly before powering on. If you have already burned the system, you can also modify the system’s `/boot/uEnv.txt` file and then reboot."},"/maixpy/doc/en/vision/ocr.html":{"title":"OCR Image Text Recognition with MaixCAM MaixPy","content":" title: OCR Image Text Recognition with MaixCAM MaixPy ## Introduction to OCR OCR (Optical Character Recognition) refers to the visual recognition of text in images. It can be applied in various scenarios, such as: * Recognizing text/numbers on cards * Extracting text from cards, such as ID cards * Digitizing paper documents * Reading digital displays, useful for meter reading and digitizing old instrument data * License plate recognition ## Using OCR in MaixPy MaixPy has integrated [PaddleOCR](https://github.com/PaddlePaddle/PaddleOCR), an open source OCR algorithm developed by Baidu. For understanding the principles, you can refer to this open source project. ![OCR](../../assets/ocr.jpg) **First, ensure that your MaixPy version is > 4.6.** Then, execute the code: (The complete, latest code can be found in the [MaixPy repository](https://github.com/sipeed/MaixPy/blob/main/examples/vision/ai_vision/nn_pp_ocr.py); please refer to the source code.) ```python from maix import camera, display, image, nn, app model \"/root/models/pp_ocr.mud\" ocr nn.PP_OCR(model) cam camera.Camera(ocr.input_width(), ocr.input_height(), ocr.input_format()) dis display.Display() image.load_font(\"ppocr\", \"/maixapp/share/font/ppocr_keys_v1.ttf\", size 20) image.set_default_font(\"ppocr\") while not app.need_exit(): img cam.read() objs ocr.detect(img) for obj in objs: points obj.box.to_list() img.draw_keypoints(points, image.COLOR_RED, 4, 1, 1) img.draw_string(obj.box.x4, obj.box.y4, obj.char_str(), image.COLOR_RED) dis.show(img) ``` You can see that `ocr nn.PP_OCR(model)` loads the model, and then `ocr.detect(img)` detects and recognizes the text, displaying the results on the screen. ## More Model Options You can download more complete models with different input resolutions, languages, and versions from the [MaixHub Model Download](https://maixhub.com/model/zoo/449) (MaixPy currently defaults to the pp_ocr.mud model, which uses PPOCRv3 for detection and v4 for recognition). ## Recognizing Without Detection If you already have a processed image with known coordinates for the four corners of the text, you can skip calling the `detect` function and simply call the `recognize` function. This way, it will only recognize the text in the image without detection. ## Custom Models The default model provides detection and recognition for Chinese and English text. If you have specific requirements, such as another language or only want to detect certain shapes without recognizing all types of text, you can download the corresponding model from the [PaddleOCR Official Model Library](https://paddlepaddle.github.io/PaddleOCR/ppocr/model_list.html) and convert it to a format supported by MaixCAM. The most complex part here is converting the model into a format usable by MaixCAM, which is a **relatively complex** process that requires basic Linux skills and adaptability. * First, either train your model using PaddleOCR source code or download the official models. Choose PP OCRv3 for detection because it is efficient and faster than v4, and download the v4 model for recognition; tests show that v3 does not perform well when quantized on MaixCAM. * Then, convert the model to ONNX: ```shell model_path ./models/ch_PP OCRv3_rec_infer paddle2onnx model_dir ${model_path} model_filename inference.pdmodel params_filename inference.pdiparams save_file ${model_path}/inference.onnx opset_version 14 enable_onnx_checker True ``` * Next, set up the environment according to the [ONNX to MUD format model documentation](../ai_model_converter/maixcam.html) and convert the model. Sample conversion scripts are provided in the appendix. * Finally, load and run it using MaixPy. ## Appendix: Model Conversion Scripts Detection: ```shell #!/bin/bash set e net_name ch_PP_OCRv3_det input_w 320 input_h 224 output_name sigmoid_0.tmp_0 # scale 1/255.0 # \"mean\": [0.485, 0.456, 0.406], # \"std\": [0.229, 0.224, 0.225], # mean: mean * 255 # scale: 1/(std*255) # mean: 123.675, 116.28, 103.53 # scale: 0.01712475, 0.017507, 0.01742919 mkdir p workspace cd workspace # convert to mlir model_transform.py \\ model_name ${net_name} \\ model_def ../${net_name}.onnx \\ input_shapes [[1,3,${input_h},${input_w}]] \\ mean \"123.675,116.28,103.53\" \\ scale \"0.01712475,0.017507,0.01742919\" \\ keep_aspect_ratio \\ pixel_format bgr \\ channel_format nchw \\ output_names \"${output_name}\" \\ test_input ../test_images/test3.jpg \\ test_result ${net_name}_top_outputs.npz \\ tolerance 0.99,0.99 \\ mlir ${net_name}.mlir # export bf16 model # not use quant_input, use float32 for easy coding model_deploy.py \\ mlir ${net_name}.mlir \\ quantize BF16 \\ processor cv181x \\ test_input ${net_name}_in_f32.npz \\ test_reference ${net_name}_top_outputs.npz \\ model ${net_name}_bf16.cvimodel echo \"calibrate for int8 model\" # export int8 model run_calibration.py ${net_name}.mlir \\ dataset ../images \\ input_num 200 \\ o ${net_name}_cali_table echo \"convert to int8 model\" # export int8 model # add quant_input, use int8 for faster processing in maix.nn.NN.forward_image model_deploy.py \\ mlir ${net_name}.mlir \\ quantize INT8 \\ quant_input \\ calibration_table ${net_name}_cali_table \\ processor cv181x \\ test_input ${net_name}_in_f32.npz \\ test_reference ${net_name}_top_outputs.npz \\ tolerance 0.9,0.5 \\ model ${net_name}_int8.cvimodel ``` Recognition: ```shell #!/bin/bash set e # net_name ch_PP_OCRv4_rec # output_name softmax_11.tmp_0 net_name ch_PP_OCRv3_rec_infer_sophgo output_name softmax_5.tmp_0 input_w 320 input_h 48 cali_images ../images_crop_320 # scale 1/255.0 # \"mean\": [0.5, 0.5, 0.5], # \"std\": [0.5, 0.5, 0.5], # mean: mean * 255 # scale: 1/(std*255) # mean: 127.5,127.5,127.5 # scale: 0.00784313725490196,0.00784313725490196,0.00784313725490196 mkdir p workspace cd workspace # convert to mlir model_transform.py \\ model_name ${net_name} \\ model_def ../${net_name}.onnx \\ input_shapes [[1,3,${input_h},${input_w}]] \\ mean \"127.5,127.5,127.5\" \\ scale \"0.00784313725490196,0.00784313725490196,0.00784313725490196\" \\ keep_aspect_ratio \\ pixel_format bgr \\ channel_format nchw \\ output_names \"${output_name}\" \\ test_input ../test_images/test3.jpg \\ test_result ${net_name}_top_outputs.npz \\ tolerance 0.99,0.99 \\ mlir ${net_name}.mlir # export bf16 model # not use quant_input, use float32 for easy coding model_deploy.py \\ mlir ${net_name}.mlir \\ quantize BF16 \\ processor cv181x \\ test_input ${net_name}_in_f32.npz \\ test_reference ${net_name}_top_outputs.npz \\ model ${net_name}_bf16.cvimodel echo \"calibrate for int8 model\" # export int8 model run_calibration.py ${net_name}.mlir \\ dataset $cali_images \\ input_num 200 \\ o ${net_name}_cali_table echo \"convert to int8 model\" # export int8 model # add quant_input, use int8 for faster processing in maix.nn.NN.forward_image model_deploy.py \\ mlir ${net_name}.mlir \\ quantize INT8 \\ quant_input \\ calibration_table ${net _name}_cali_table \\ processor cv181x \\ test_input ${net_name}_in_f32.npz \\ test_reference ${net_name}_top_outputs.npz \\ tolerance 0.9,0.5 \\ model ${net_name}_int8.cvimodel ```"},"/maixpy/doc/en/vision/face_detection.html":{"title":"MaixCAM MaixPy Face Detection and Keypoint Detection","content":" title: MaixCAM MaixPy Face Detection and Keypoint Detection ## Introduction Face detection can be applied in many scenarios, such as providing the face detection step for face recognition, or for face tracking applications, etc. The face detection provided here can not only detect faces but also detect 5 key points, including two eyes, one nose, and two corners of the mouth. ![face detection](../../assets/face_detection.jpg) ## Using Face Detection in MaixPy MaixPy officially provides three face detection models from the open source projects [Face Detector 1MB with landmark](https://github.com/biubug6/Face Detector 1MB with landmark), [Retinaface](https://github.com/biubug6/Pytorch_Retinaface), and [YOLOv8 face](https://github.com/derronqi/yolov8 face). All three models can be used. `YOLOv8 face` performs better but is slightly slower, so you can choose based on your testing. Using `YOLOv8 face` (requires MaixPy version > 4.3.8): ```python from maix import camera, display, image, nn, app detector nn.YOLOv8(model \"/root/models/yolov8n_face.mud\", dual_buff True) cam camera.Camera(detector.input_width(), detector.input_height(), detector.input_format()) dis display.Display() while not app.need_exit(): img cam.read() objs detector.detect(img, conf_th 0.5, iou_th 0.45, keypoint_th 0.5) for obj in objs: img.draw_rect(obj.x, obj.y, obj.w, obj.h, color image.COLOR_RED) msg f'{detector.labels[obj.class_id]}: {obj.score:.2f}' img.draw_string(obj.x, obj.y, msg, color image.COLOR_RED) detector.draw_pose(img, obj.points, 2, image.COLOR_RED) dis.show(img) ``` For the other two models: Here, a line of commented out code is used to load the `Retinaface` model. Choose which line of code to use based on the model you download. ```python from maix import camera, display, image, nn, app import math detector nn.Retinaface(model \"/root/models/retinaface.mud\") # detector nn.FaceDetector(model \"/root/models/face_detector.mud\") cam camera.Camera(detector.input_width(), detector.input_height(), detector.input_format()) dis display.Display() while not app.need_exit(): img cam.read() objs detector.detect(img, conf_th 0.4, iou_th 0.45) for obj in objs: img.draw_rect(obj.x, obj.y, obj.w, obj.h, color image.COLOR_RED) radius math.ceil(obj.w / 10) img.draw_keypoints(obj.points, image.COLOR_RED, size radius if radius < 5 else 4) dis.show(img) ``` ## Model Downloads and Other Resolution Models Download the models; the compressed package contains multiple resolutions to choose from. Higher resolution models are more accurate but take longer to process: * [Face Detector 1MB with landmark](https://maixhub.com/model/zoo/377) * [Retinaface](https://maixhub.com/model/zoo/378) * [YOLOv8 face](https://maixhub.com/model/zoo/407) ## dual_buff Dual Buffer Acceleration You may have noticed that the model initialization uses `dual_buff` (which defaults to `True`). Enabling the `dual_buff` parameter can improve running efficiency and increase the frame rate. For detailed principles and usage notes, see [dual_buff Introduction](./dual_buff.html)."},"/maixpy/doc/en/vision/qrcode.html":{"title":"MaixCAM MaixPy QR Code Recognition","content":" title: MaixCAM MaixPy QR Code Recognition update: date: 2024 04 03 author: lxowalle version: 1.0.0 content: Initial document Before reading this article, make sure you are familiar with how to develop with MaixCAM. For details, please read [Quick Start](../index.html). ## Introduction This article explains how to use MaixPy for QR code recognition. ## Using MaixPy to Recognize QR Codes MaixPy's `maix.image.Image` includes the `find_qrcodes` method for QR code recognition. ### How to Recognize QR Codes A simple example that recognizes QR codes and draws a bounding box: ```python from maix import image, camera, display cam camera.Camera(320, 240) disp display.Display() while True: img cam.read() qrcodes img.find_qrcodes() for qr in qrcodes: corners qr.corners() for i in range(4): img.draw_line(corners[i][0], corners[i][1], corners[(i + 1) % 4][0], corners[(i + 1) % 4][1], image.COLOR_RED) img.draw_string(qr.x(), qr.y() 15, qr.payload(), image.COLOR_RED) disp.show(img) ``` Steps: 1. Import the image, camera, and display modules: ```python from maix import image, camera, display ``` 2. Initialize the camera and display: ```python cam camera.Camera(320, 240) # Initialize the camera with a resolution of 320x240 in RGB format disp display.Display() ``` 3. Capture and display images from the camera: ```python while True: img cam.read() disp.show(img) ``` 4. Use the `find_qrcodes` method to detect QR codes in the camera image: ```python qrcodes img.find_qrcodes() ``` `img` is the camera image captured by `cam.read()`. When initialized as `cam camera.Camera(320, 240)`, the `img` object is a 320x240 resolution RGB image. `img.find_qrcodes` searches for QR codes and saves the results in `qrcodes` for further processing. 5. Process and display the results of QR code recognition on the screen: ```python for qr in qrcodes: corners qr.corners() for i in range(4): img.draw_line(corners[i][0], corners[i][1], corners[(i + 1) % 4][0], corners[(i + 1) % 4][1], image.COLOR_RED) img.draw_string(qr.x(), qr.y() 15, qr.payload(), image.COLOR_RED) ``` `qrcodes` contains the results from `img.find_qrcodes()`. If no QR codes are found, `qrcodes` will be empty. `qr.corners()` retrieves the coordinates of the four corners of the detected QR code. `img.draw_line()` uses these coordinates to draw the QR code outline. `img.draw_string` displays information about the QR code content and position. `qr.x()` and `qr.y()` retrieve the x and y coordinates of the QR code's top left corner, and `qr.payload()` retrieves the content of the QR code. ### Common Parameter Explanation List common parameters and their explanations. If you cannot find parameters that fit your application, consider whether to use a different algorithm or extend the functionality based on the current algorithm's results. Parameter Description Example roi Sets the rectangular area for the algorithm to compute, where roi [x, y, w, h], x and y denote the top left coordinates of the rectangle, and w and h denote the width and height of the rectangle, defaulting to the entire image. Compute the area with coordinates (50,50) and width and height of 100:
    `img.find_qrcodes(roi [50, 50, 100, 100])` This article introduces common methods. For more API details, refer to the [image](../../../api/maix/image.html) section of the API documentation."},"/maixpy/doc/en/vision/segmentation.html":{"title":"MaixCAM MaixPy Image Semantic Segmentation","content":" title: MaixCAM MaixPy Image Semantic Segmentation ## Introduction Image semantic segmentation refers to identifying specific objects in an image and recognizing the pixels that represent the parts of those objects. For example, in the image below, the human body and the dog are identified, and their body parts are segmented. This can be used for collision detection, autonomous vehicle navigation, area measurement, and more. ![](../../assets/yolov8_seg.jpg) ## Image Semantic Segmentation with MaixPy MaixPy includes `YOLOv8 seg` and `YOLO11 seg` for object detection and image segmentation. MaixPy provides a model for 80 object categories from the COCO dataset by default. > To use YOLOv8, MaixPy version must be > 4.4.0 > To use YOLO11, MaixPy version must be > 4.7.0 The following code demonstrates the usage, and you can also find it in [MaixPy examples](https://github.com/sipeed/maixpy/tree/main/examples/). ```python from maix import camera, display, image, nn, app, time detector nn.YOLOv8(model \"/root/models/yolov8n_seg.mud\", dual_buff True) # detector nn.YOLO11(model \"/root/models/yolo11n_seg.mud\", dual_buff True) cam camera.Camera(detector.input_width(), detector.input_height(), detector.input_format()) dis display.Display() while not app.need_exit(): img cam.read() objs detector.detect(img, conf_th 0.5, iou_th 0.45) for obj in objs: # img.draw_image(obj.x, obj.y, obj.seg_mask) detector.draw_seg_mask(img, obj.x, obj.y, obj.seg_mask, threshold 127) img.draw_rect(obj.x, obj.y, obj.w, obj.h, color image.COLOR_RED) msg f'{detector.labels[obj.class_id]}: {obj.score:.2f}' img.draw_string(obj.x, obj.y, msg, color image.COLOR_RED) dis.show(img) ``` > To switch between YOLOv8 and YOLO11, just modify the commented part of the above code. ## Models with More Resolutions The default model resolution is 320x224. For models with different resolutions, download them from the MaixHub model library: * YOLOv8 seg: [[MaixHub Model Library](https://maixhub.com/model/zoo/413)](https://maixhub.com/model/zoo/413) * YOLO11 seg: [[MaixHub Model Library](https://maixhub.com/model/zoo/455)](https://maixhub.com/model/zoo/455) ## dual_buff for Double Buffering Acceleration You may notice that `dual_buff` is used for model initialization (default value is `True`). Enabling the `dual_buff` parameter can improve efficiency and increase the frame rate. For more details and considerations, refer to the [dual_buff Introduction](./dual_buff.html). ## Customizing Your Own Object Segmentation Model The provided models are based on the 80 categories from the COCO dataset. If this does not meet your needs, you can train your own specific object detection and segmentation model. Follow the instructions in [Offline Training YOLOv8/YOLO11](./customize_model_yolov8.html) to use the official YOLOv8/YOLO11 model training method, and then convert it to a model format supported by MaixCAM."},"/maixpy/doc/en/vision/image_ops.html":{"title":"MaixCAM MaixPy Basic Image Operations","content":" title: MaixCAM MaixPy Basic Image Operations update: date: 2024 04 03 author: neucrack version: 1.0.0 content: Initial document ## Introduction Images play a very important role in visual applications. Whether it's a picture or a video, since a video is essentially a series of frames, image processing is the foundation of visual applications. ## API Documentation This document introduces common methods. For more APIs, refer to the documentation of the maix.image module. ## Image Formats MaixPy provides a basic image module `image`, where the most important part is the `image.Image` class, which is used for image creation and various basic image operations, as well as image loading and saving. There are many image formats, and we generally use `image.Format.FMT_RGB888` or `image.Format.FMT_RGBA8888` or `image.Format.FMT_GRAYSCALE` or `image.Format.FMT_BGR888`, etc. We all know that the three colors `RGB` can synthesize any color, so in most cases, we use `image.Format.FMT_RGB888`, which is sufficient. `RGB888` is `RGB packed` in memory, i.e., the arrangement in memory is: `pixel1_red, pixel1_green, pixel1_blue, pixel2_red, pixel2_green, pixel2_blue, ...` arranged in sequence. ## Creating an Image Creating an image is very simple, you only need to specify the width and height of the image, and the image format: ``` from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) print(img) print(img.width(), img.height(), img.format()) ``` `320` is the width of the image, `240` is the height of the image, and `image.Format.FMT_RGB888` is the format of the image. The format parameter can be omitted, and the default is `image.Format.FMT_RGB888`. Here, you can get the width, height, and format of the image using `img.width()`, `img.height()`, and `img.format()`. ## Displaying on the Screen MaixPy provides the `maix.display.Display` class, which can conveniently display images: ``` from maix import image, display disp display.Display() img image.Image(320, 240, image.Format.FMT_RGB888) disp.show(img) ``` Note that here, since there is no image data, a black image is displayed. See the following sections for how to modify the image. ## Reading Images from the File System MaixPy provides the `maix.image.load` method, which can read images from the file system: ``` from maix import image img image.load(\"/root/image.jpg\") if img is None: raise Exception(f\"load image failed\") print(img) ``` Note that here, `/root/image.jpg` has been transferred to the board in advance. You can refer to the previous tutorials for the method. It supports `jpg` and `png` image formats. ## Saving Images to the File System MaixPy's `maix.image.Image` provides the `save` method, which can save images to the file system: ``` from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) # do something with img img.save(\"/root/image.jpg\") ``` ## Drawing Rectangles `image.Image` provides the `draw_rect` method, which can draw rectangles on the image: ``` from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img.draw_rect(10, 10, 100, 100, image.Color.from_rgb(255, 0, 0)) ``` Here, the parameters are: `x`, `y`, `w`, `h`, `color`. `x` and `y` are the coordinates of the top left corner of the rectangle, `w` and `h` are the width and height of the rectangle, and `color` is the color of the rectangle, which can be created using the `image.Color.from_rgb` method. You can specify the line width of the rectangle using `thickness`, which defaults to `1`. You can also draw a solid rectangle by passing `thickness 1`: ``` from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img.draw_rect(10, 10, 100, 100, (255, 0, 0), thickness 1) ``` ## Writing Strings `image.Image` provides the `draw_string` method, which can write text on the image: ``` from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img.draw_string(10, 10, \"Hello MaixPy\", image.Color.from_rgb(255, 0, 0)) ``` Here, the parameters are: `x`, `y`, `text`, `color`. `x` and `y` are the coordinates of the top left corner of the text, `text` is the text to be written, and `color` is the color of the text, which can be created using the `image.Color.from_rgb` method. You can also enlarge the font by passing the `scale` parameter: ``` img.draw_string(10, 10, \"Hello MaixPy\", image.Color.from_rgb(255, 0, 0), scale 2) ``` Get the width and height of the font: ``` w, h img.string_size(\"Hello MaixPy\", scale 2) print(w, h) ``` **Note** that here, `scale` is the magnification factor, and the default is `1`. It should be consistent with `draw_string`. ## Chinese support and custom fonts The `image` module supports loading `ttf/otf` fonts. The default font only supports English. If you want to display Chinese or custom fonts, you can first download the font file to the device and then load the font. The system also has several built in fonts, under the `/maixapp/share/font` directory, code example: ```python from maix import image, display, app, time image.load_font(\"sourcehansans\", \"/maixapp/share/font/SourceHanSansCN Regular.otf\", size 32) print(\"fonts:\", image.fonts()) image.set_default_font(\"sourcehansans\") disp display.Display() img image.Image(disp.width(), disp.height()) img.draw_string(2, 2, \"Hello! Hello, world!\", image.Color.from_rgba(255, 0, 0)) disp.show(img) while not app.need_exit(): time.sleep(1) ``` Load the font file, then set the default font, or you can set the default font without setting the default font, and set the parameters in the writing function: ```python img.draw_string(2, 2, \"你好!Hello, world!\", image.Color.from_rgba(255, 0, 0), font \"sourcehansans\") ``` Note that the `string_size` method will also use the default font to calculate the size, and you can also use the `font` parameter to set the font to be calculated separately. ## Drawing Lines `image.Image` provides the `draw_line` method, which can draw lines on the image: ``` from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img.draw_line(10, 10, 100, 100, image.Color.from_rgb(255, 0, 0)) ``` Here, the parameters are: `x1`, `y1`, `x2`, `y2`, `color`. `x1` and `y1` are the coordinates of the starting point of the line, `x2` and `y2` are the coordinates of the end point of the line, and `color` is the color of the line, which can be created using the `image.Color.from_rgb` method. ## Drawing Circles `image.Image` provides the `draw_circle` method, which can draw circles on the image: ``` from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img.draw_circle(100, 100, 50, image.Color.from_rgb(255, 0, 0)) ``` Here, the parameters are: `x`, `y`, `r`, `color`. `x` and `y` are the coordinates of the center of the circle, `r` is the radius, and `color` is the color of the circle, which can be created using the `image.Color.from_rgb` method. ## Resizing Images `image.Image` provides the `resize` method, which can resize images: ``` from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img_new img.resize(160, 120) print(img, img_new) ``` Note that here, the `resize` method returns a new image object, and the original image remains unchanged. ## Cropping Images `image.Image` provides the `crop` method, which can crop images: ``` from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img_new img.crop(10, 10, 100, 100) print(img, img_new) ``` Note that here, the `crop` method returns a new image object, and the original image remains unchanged. ## Rotating Images `image.Image` provides the `rotate` method, which can rotate images: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img_new img.rotate(90) print(img, img_new) ``` Note that here, the `rotate` method returns a new image object, and the original image remains unchanged. ## Copying Images `image.Image` provides the `copy` method, which can copy an independent image: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img_new img.copy() print(img, img_new) ``` ## Affine Transformations `image.Image` provides the `affine` method, which can perform affine transformations. By providing the coordinates of three or more points in the current image and the corresponding coordinates in the target image, you can automatically perform operations such as rotation, scaling, and translation on the image to transform it into the target image: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img_new img.affine([(10, 10), (100, 10), (10, 100)], [(10, 10), (100, 20), (20, 100)]) print(img, img_new) ``` For more parameters and usage, please refer to the API documentation. ## Drawing Keypoints `image.Image` provides the `draw_keypoints` method, which can draw keypoints on the image: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) keypoints [10, 10, 100, 10, 10, 100] img.draw_keypoints(keypoints, image.Color.from_rgb(255, 0, 0), size 10, thickness 1, fill False) ``` This draws three red keypoints at the coordinates `(10, 10)`, `(100, 10)`, and `(10, 100)`. The size of the keypoints is `10`, the line width is `1`, and they are not filled. ## Drawing Crosses `image.Image` provides the `draw_cross` method, which can draw crosses on the image: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img.draw_cross(100, 100, image.Color.from_rgb(255, 0, 0), size 5, thickness 1) ``` This draws a red cross at the coordinate `(100, 100)`. The extension size of the cross is `5`, so the length of the line segment is `2 * size + thickness`, and the line width is `1`. ## Drawing Arrows `image.Image` provides the `draw_arrow` method, which can draw arrows on the image: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img.draw_arrow(10, 10, 100, 100, image.Color.from_rgb(255, 0, 0), thickness 1) ``` This draws a red arrow starting from the coordinate `(10, 10)`, with the end point at `(100, 100)`, and a line width of `1`. ## Drawing Images `image.Image` provides the `draw_image` method, which can draw images on the image: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img2 image.Image(100, 100, image.Format.FMT_RGB888) img2.draw_rect(10, 10, 90, 90, image.Color.from_rgb(255, 0, 0)) img.draw_image(10, 10, img2) ``` ## Converting Formats `image.Image` provides the `to_format` method, which can convert image formats: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) img_new img.to_format(image.Format.FMT_BGR888) print(img, img_new) img_jpg img.to_format(image.Format.FMT_JPEG) print(img, img_new) ``` Note that here, the `to_format` method returns a new image object, and the original image remains unchanged. ## Converting between Numpy/OpenCV and maix.image.Image Formats Refer to [MaixPy use OpenCV documentation](./opencv.html) ## Converting between bytes Data `image.Image` provides the `to_bytes` method, which can convert an image to `bytes` data: ```python from maix import image img image.Image(320, 240, image.Format.FMT_RGB888) data img.to_bytes() print(type(data), len(data), img.data_size()) img_jpeg image.from_bytes(320, 240, image.Format.FMT_RGB888, data) print(img_jpeg) img img_jpeg.to_format(image.Format.FMT_RGB888) print(img) ``` Here, `to_bytes` returns a new `bytes` object, which is independent memory and does not affect the original image. The `image.Image` constructor can directly construct an image object from `bytes` data by passing the `data` parameter. Note that the new image is also independent memory and does not affect `data`. Since memory copying is involved, this method is relatively time consuming and should not be used frequently. > If you want to optimize your program without copying (not recommended for casual use, as poorly written code can easily cause crashes), please refer to the API documentation. ## More Basic API Usage For more API usage, please refer to the documentation of the maix.image module."},"/maixpy/doc/en/vision/touchscreen.html":{"title":"MaixPy / MaixCAM Touchscreen Usage Guide","content":" title: MaixPy / MaixCAM Touchscreen Usage Guide ## Introduction MaixCAM comes equipped with a touchscreen, which, when used in conjunction with applications, can facilitate numerous engaging functionalities. We can utilize APIs to detect touch interactions on the touchscreen. ## Reading Touch Input with MaixPy MaixPy offers a straightforward `maix.touchscreen.TouchScreen` class for reading touch inputs. Here's an example: ```python from maix import touchscreen, app, time ts touchscreen.TouchScreen() pressed_already False last_x 0 last_y 0 last_pressed False while not app.need_exit(): x, y, pressed ts.read() if x ! last_x or y ! last_y or pressed ! last_pressed: print(x, y, pressed) last_x x last_y y last_pressed pressed if pressed: pressed_already True else: if pressed_already: print(f\"clicked, x: {x}, y: {y}\") pressed_already False time.sleep_ms(1) # sleep some time to free some CPU usage ``` ## Interactivity with the Screen Integrating the screen can enable various interactive user experiences. More examples can be found in the [MaixPy/examples/vision/touchscreen](https://github.com/sipeed/MaixPy) directory. As previously described, to display content on the screen, typically, a `maix.image.Image` object is created and displayed using `disp.show(img)`. Implementing a button is as simple as drawing one on the image and then detecting touches within its area, ensuring that the image's dimensions match those of the screen: ```python from maix import touchscreen, app, time, display, image ts touchscreen.TouchScreen() disp display.Display() img image.Image(disp.width(), disp.height()) # draw exit button exit_label \"< Exit\" size image.string_size(exit_label) exit_btn_pos [0, 0, 8*2 + size.width(), 12 * 2 + size.height()] img.draw_string(8, 12, exit_label, image.COLOR_WHITE) img.draw_rect(exit_btn_pos[0], exit_btn_pos[1], exit_btn_pos[2], exit_btn_pos[3], image.COLOR_WHITE, 2) def is_in_button(x, y, btn_pos): return x > btn_pos[0] and x < btn_pos[0] + btn_pos[2] and y > btn_pos[1] and y < btn_pos[1] + btn_pos[3] while not app.need_exit(): x, y, pressed ts.read() if is_in_button(x, y, exit_btn_pos): app.set_exit_flag(True) img.draw_circle(x, y, 1, image.Color.from_rgb(255, 255, 255), 2) disp.show(img) ``` ## Handling Different Screen and Image Sizes In the example above, the `img` matches the screen size. If your `img` and screen sizes differ (e.g., using `img image.Image(240, 240)` on a `640x480` screen), the default behavior of `disp.show(img)` is `image.Fit.FIT_CONTAIN`, which scales the image to `480x480` and fills the sides with black. If a button is drawn on the `240x240` image, such as at coordinates `(0, 0, 60, 40)`, the button will also be scaled up. Thus, the coordinates for touch detection should be adjusted to `((640 480) / 2, 0, 480/240*60, 480/240*40)`, which translates to `(80, 0, 120, 80)`. For convenience in scaling images and quickly calculating the positions and sizes of points or rectangles in the scaled image, the `image.resize_map_pos` function is provided: ```python from maix import touchscreen, app, time, display, image ts touchscreen.TouchScreen() disp display.Display() img image.Image(240, 240) img.draw_rect(0, 0, img.width(), img.height(), image.COLOR_WHITE) # draw exit button exit_label \"< Exit\" size image.string_size(exit_label) exit_btn_pos [0, 0, 8*2 + size.width(), 12 * 2 + size.height()] img.draw_string(8, 12, exit_label, image.COLOR_WHITE) img.draw_rect(exit_btn_pos[0], exit_btn_pos[1], exit_btn_pos[2], exit_btn_pos[3], image.COLOR_WHITE, 2) # 图像按键坐标映射到屏幕上的坐标 exit_btn_disp_pos image.resize_map_pos(img.width(), img.height(), disp.width(), disp.height(), image.Fit.FIT_CONTAIN, exit_btn_pos[0], exit_btn_pos[1], exit_btn_pos[2], exit_btn_pos[3]) def is_in_button(x, y, btn_pos): return x > btn_pos[0] and x < btn_pos[0] + btn_pos[2] and y > btn_pos[1] and y < btn_pos[1] + btn_pos[3] while not app.need_exit(): x, y, pressed ts.read() if is_in_button(x, y, exit_btn_disp_pos): app.set_exit_flag(True) # 屏幕的坐标映射回图像上对应的坐标,然后在图像上画点 x, y image.resize_map_pos_reverse(img.width(), img.height(), disp.width(), disp.height(), image.Fit.FIT_CONTAIN, x, y) img.draw_circle(x, y, 1, image.Color.from_rgb(255, 255, 255), 2) disp.show(img, fit image.Fit.FIT_CONTAIN) ```"},"/maixpy/doc/en/vision/customize_model_yolov5.html":{"title":"Offline Training of YOLOv5 Model for Custom Object Detection with MaixCAM MaixPy","content":" title: Offline Training of YOLOv5 Model for Custom Object Detection with MaixCAM MaixPy update: date: 2024 6 20 version: v1.0 author: neucrack content: Documentation written ## Introduction The default official model provides detection for 80 types of objects. If this does not meet your needs, you can train your own detection objects using two methods: * Use [MaixHub Online Training](./maixhub_train.html), which is convenient and fast, without needing to buy a server or set up an environment, just a few clicks of the mouse. * Set up a training environment on your own computer or server. The former is simple and quick, while the latter uses your own computer and the number of training images is not limited, but the latter is much more difficult. **Note:** This article explains how to customize training, but some basic knowledge is assumed. If you do not have this knowledge, please learn it yourself: * This article will not explain how to install the training environment. Please search and install it yourself (Pytorch environment installation) and test it. * This article will not explain the basic concepts of machine learning or basic Linux usage knowledge. If you think there is something in this article that needs improvement, feel free to click `Edit this article` in the upper right corner to contribute and submit a documentation PR. ## Process and Goals of this Article To use our model on MaixPy (MaixCAM), the following process is required: * Set up the training environment (this is not covered in this article, please search for Pytorch training environment setup). * Pull the [yolov5](https://github.com/ultralytics/yolov5) source code to your local machine. * Prepare the dataset and format it as required by the yolov5 project. * Train the model to get an `onnx` model file, which is the final output file of this article. * Convert the `onnx` model to a `MUD` file supported by MaixPy, which is detailed in [MaixCAM Model Conversion](../ai_model_converter/maixcam.html). * Use MaixPy to load and run the model. ## Reference Articles Since this is a relatively common operational process, this article only provides an overview. For specific details, you can refer to the **[YOLOv5 official code and documentation](https://github.com/ultralytics/yolov5)** (**recommended**), and search for training tutorials to ultimately export the onnx file. Here are some articles from the MaixHub community: * [Deploy yolov5s custom model on maixcam](https://maixhub.com/share/23) * [【Process Sharing】YOLOv5 training custom dataset and deploying on Maixcam](https://maixhub.com/share/32) * [YOLOv5 cat and dog recognition model—free cloud training (reproducible by beginners)](https://maixhub.com/share/25) If you find any good articles, feel free to modify this article and submit a PR. ## Exporting YOLOv5 ONNX Model File YOLOv5 provides an export option. Execute the following command in the `yolov5` directory: ```shell python export.py weights ../yolov5s.pt include onnx img 224 320 ``` This command loads the `pt` parameter file and converts it to `onnx`, while also specifying the resolution. Note that the height comes first, followed by the width. The model was trained with `640x640`, but we re specified the resolution to improve the running speed. The resolution `320x224` is used because it is closer to the MaixCAM screen ratio for better display. You can set it according to your needs. ## MaixCAM MUD File When converting onnx to `mud` format model files, refer to [MaixCAM Model Conversion](../ai_model_converter/maixcam.html). You will eventually get a `mud` file and a `cvimodel` file. The content of the `mud` file is: ```ini [basic] type cvimodel model yolov8n.cvimodel [extra] model_type yolov8 input_type rgb mean 0, 0, 0 scale 0.00392156862745098, 0.00392156862745098, 0.00392156862745098 labels person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign, parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket, bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut, cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair drier, toothbrush ``` Replace the parameters according to the content of your training. For example, if you train to detect digits `0 9`, then just replace `labels 0,1,2,3,4,5,6,7,8,9`, and then place the two files in the same directory and load the `mud` file when running the model. ## Upload share on MaixHub Share your model on [MaixHub model zoo](https://maixhub.com/model/zoo?platform maixcam) 上传并分享你的模型,可以多提供几个分辨率供大家选择。"},"/maixpy/doc/en/vision/line_tracking.html":{"title":"MaixCAM MaixPy Line Tracking","content":" title: MaixCAM MaixPy Line Tracking update: date: 2024 05 09 author: lxowalle version: 1.0.0 content: Initial document Before reading this article, make sure you already know how to develop MaixCAM. For details, please read [Quick Start](../index.html). ## Introduction In vision applications, the function of tracking line is often required in applications such as line following robot. In this article, we will describe: How to use MaixPy to tracking line. How to tracking line using MaixCam's default application ## How to use MaixPy to tracking line The `maix.image.Image` module in MaixPy provides the `get_regression` method, which can conveniently tracking line. ### Code example A simple example of finding and drawing a line. ```python from maix import camera, display, image cam camera.Camera(320, 240) disp display.Display() # thresholds [[0, 80, 40, 80, 10, 80]] # red thresholds [[0, 80, 120, 10, 0, 30]] # green # thresholds [[0, 80, 30, 100, 120, 60]] # blue while 1: img cam.read() lines img.get_regression(thresholds, area_threshold 100) for a in lines: img.draw_line(a.x1(), a.y1(), a.x2(), a.y2(), image.COLOR_GREEN, 2) theta a.theta() rho a.rho() if theta > 90: theta 270 theta else: theta 90 theta img.draw_string(0, 0, \"theta: \" + str(theta) + \", rho: \" + str(rho), image.COLOR_BLUE) disp.show(img) ``` Steps: 1. import image, camera, display modules ```python from maix import image, camera, display ``` 2. Initialize camera and display ```python cam camera.Camera(320, 240) # Initialise camera, output resolution 320x240 in RGB format. disp display.Display() ``` 3. Get the image from the camera and display it ```python while 1: img cam.read() disp.show(img) ``` 4. Call the `get_regression` method to find the straight line in the camera image and draw it to the screen ```python lines img.get_regression(thresholds, area_threshold 100) for a in lines: img.draw_line(a.x1(), a.y1(), a.x2(), a.y2(), image.COLOR_GREEN, 2) theta a.theta() rho a.rho() if theta > 90: theta 270 theta else: theta 90 theta img.draw_string(0, 0, \"theta: \" + str(theta) + \", rho: \" + str(rho), image.COLOR_BLUE) ``` `img` is the camera image read via `cam.read()`, when initialised as `cam camera.Camera(320, 240)`, the `img` object is an RGB image with a resolution of 320x240. `img.get_regression` is used to find straight lines, `thresholds` is a list of colour thresholds, each element is a colour threshold, multiple thresholds are passed in if multiple thresholds are found at the same time, and each colour threshold has the format `[L_MIN, L_MAX, A_MIN, A_MAX, B_MIN, B_MAX]`, where ` L`, `A`, `B` are the three channels of `LAB` colour space, `L` channel is the luminance, `A` channel is the red green channel, `B` channel is the blue yellow channel. `pixels_threshold` is a pixel area threshold used to filter some unwanted straight lines. `for a in lines` is used to iterate through the returned `Line` objects, where `a` is the current `Line` object. Normally the `get_regression` function will only return one `Line` object, but if you need to find more than one line, try the `find_line` method. Use `img.draw_line` to draw the found line, `a.x1(), a.y1(), a.x2(), a.y2()` represent the coordinates of the ends of the line. Use `img.draw_string` to show the angle between the line and the x axis in the upper left corner, and `a.theta()` is the angle between the line and the y axis, which is converted to `theta` for easier understanding, `a.rho()` is the length of the vertical line from the origin to the line. 5. Run the code through the maixvision, you can find the line, look at the effect! ![image 20240509110204007](../../../static/image/line_tracking_demo.jpg) ### Common Parameter Explanations Here are explanations of commonly used parameters. If you cannot find parameters that can implement your application, you may need to consider using other algorithms or extending the required functionality based on the current algorithm's results. Parameter Description Example thresholds Thresholds based on the LAB color space, thresholds [[l_min, l_max, a_min, a_max, b_min, b_max]], representing:
    Brightness range [l_min, l_max]
    Green to red component range [a_min, a_max]
    Blue to yellow component range [b_min, b_max]
    Multiple thresholds can be set simultaneously Set two thresholds to detect red and green
    ```img.find_blobs(thresholds [[0, 80, 40, 80, 10, 80], [0, 80, 120, 10, 0, 30]])```
    Red threshold is [0, 80, 40, 80, 10, 80]
    Green threshold is [0, 80, 120, 10, 0, 30] invert Enable threshold inversion, when enabled, the passed thresholds are inverted. Default is False. Enable threshold inversion
    ```img.find_blobs(invert True)``` roi Set the rectangular region for the algorithm to compute, roi [x, y, w, h], where x and y represent the coordinates of the top left corner of the rectangle, and w and h represent the width and height of the rectangle, respectively. The default is the entire image. Compute the region at (50, 50) with a width and height of 100
    ```img.find_blobs(roi [50, 50, 100, 100])``` area_threshold Filter out blobs with a pixel area smaller than area_threshold, in units of pixels. The default is 10. This parameter can be used to filter out some useless small blobs. Filter out blobs with an area smaller than 1000
    ```img.find_blobs(area_threshold 1000)``` pixels_threshold Filter out blobs with fewer valid pixels than pixels_threshold. The default is 10. This parameter can be used to filter out some useless small blobs. Filter out blobs with fewer than 1000 valid pixels
    ```img.find_blobs(pixels_threshold 1000)``` This article introduces commonly used methods. For more APIs, please see the [image](../../../api/maix/image.html) section of the API documentation. ### Increasing the speed of line tracking Here are a few ways to increase the speed of line tracking 1. Choose a suitable resolution The larger the resolution, the slower the calculation speed, you can choose a more suitable resolution according to the recognition distance and accuracy requirements. 2. Use gray scale image When using gray scale recognition, the algorithm will only process one channel, there is a faster recognition speed, in the environment of a single color will be very useful. Note that only `l_min` and `l_max` are valid when passing `thresholds` to `get_regression` when using gray scale image recognition. Methods for get gray scale image: ```python # Example 1 cam camera.Camera(320, 240, image.Format.FMT_GRAYSCALE) \t# Support after MaixPy v4.2.1 gray_img cam.read()\t\t\t\t\t\t\t\t\t\t\t# get gray scale image # Example 2 cam camera.Camera(320, 240) img cam.read() gray_img img.to_format(image.Format.FMT_GRAYSCALE)\t\t\t# get gray scale image ``` ## How to tracking line using MaixCam's default application To quickly verify the line tracking functionality, you can use the `line_tracking` application provided by MaixCam to experience the line finding effect. ### How to use it 1. Select and open the `Line tracking` application. 2. Click on the line in the screen that needs to be identified and the colour of the line will be displayed on the left hand side 3. Click on the colour to be detected on the left (the colour below L A B in the screen) 4. The line will be identified and the coordinates and angle of the line will be output from the serial port. ### Demo ### Advanced operations #### Manual adjustment of LAB threshold to tracking line The application provides manual setting of LAB threshold to tracking line accurately. Steps: 1. `Click` the `options icon` in the bottom left corner to enter configuration mode. 2. Point the `camera` at the `object` you need to `find`, `click` on the `target object` on the screen, and the `left side` will display a `rectangular frame` of the object's color and show the `LAB values` of that color. 3. Click on the bottom options `L Min`, `L Max`, `A Min`, `A Max`, `B Min`, `B Max`. After clicking, a slider will appear on the right side to set the value for that option. These values correspond to the minimum and maximum values of the L, A, and B channels in the LAB color format, respectively. 4. Referring to the `LAB values` of the object color calculated in step 2, adjust `L Min`, `L Max`, `A Min`, `A Max`, `B Min`, `B Max` to appropriate values to identify the corresponding color blobs. For example, if `LAB (20, 50, 80)`, since `L 20`, to accommodate a certain range, set `L Min 10` and `L Max 30`. Similarly, since `A 50`, set `A Min 40` and `A Max 60`. Since `B 80`, set `B Min 70` and `B Max 90`. #### Getting Detection Data via Serial Protocol The line tracking application supports reporting detected straight line information via the serial port (default baud rate is 115200). Since only one report message is sent, we can illustrate the content of the report message with an example. For instance, if the report message is: ```shell AA CA AC BB 0E 00 00 00 00 E1 09 FC 01 01 00 E9 01 6F 01 57 00 C1 C6 ``` `AA CA AC BB`: Protocol header, fixed content `0E 00 00 00`: Data length, the total length excluding the protocol header and data length, here means the length is 14. `E1`: Flag bit, used to identify the serial message flag `09`: Command type, for the line tracking application, this value is fixed at 0x09. `FC 01 01 00 E9 01 6F 01 57 00`: The coordinates and angle information for both ends of line, with each value represented as a 2 byte value in little end format. `FC 01` and `01 00` indicate that the coordinates of the first endpoint are (508, 1), `E9 01` and `6F 01` indicate that the coordinates of the second endpoint are (489, 367), and `57 00` indicates that the angle of the line to the x axis is 87 degrees `C1 C6`: CRC checksum value, used to verify if the frame data has errors during transmission."},"/maixpy/doc/en/vision/dual_buff.html":{"title":"Introduction to Running Models in Dual Buffer Mode with MaixPy MaixCAM","content":" title: Introduction to Running Models in Dual Buffer Mode with MaixPy MaixCAM ## Introduction You may have noticed that there is a parameter `dual_buff True` when initializing the code for model running. For example, in `YOLOv5`: ```python from maix import camera, display, image, nn, app detector nn.YOLOv5(model \"/root/models/yolov5s.mud\", dual_buff True) # detector nn.YOLOv8(model \"/root/models/yolov8n.mud\", dual_buff True) cam camera.Camera(detector.input_width(), detector.input_height(), detector.input_format()) dis display.Display() while not app.need_exit(): img cam.read() objs detector.detect(img, conf_th 0.5, iou_th 0.45) for obj in objs: img.draw_rect(obj.x, obj.y, obj.w, obj.h, color image.COLOR_RED) msg f'{detector.labels[obj.class_id]}: {obj.score:.2f}' img.draw_string(obj.x, obj.y, msg, color image.COLOR_RED) dis.show(img) ``` Generally, this parameter defaults to `True`, unless you manually set `dual_buff False` to disable the dual buffer function. Enabling this feature improves running efficiency, thereby increasing the frame rate (assuming the camera's frame rate is not limited, the above code will halve the loop time on MaixCAM, effectively doubling the frame rate). However, there are drawbacks. The `detect` function returns the result of the previous call to the `detect` function, meaning there is a one frame delay between the result and the input. If you want the detection result to match the input `img` rather than the previous frame, disable this feature. Additionally, due to the preparation of dual buffers, memory usage will increase. If you encounter insufficient memory issues, you will also need to disable this feature. ## Principle Model object detection involves several steps: * Capturing the image * Image preprocessing * Model execution * Post processing the results Only the model execution step runs on the hardware NPU, while other steps run on the CPU. If `dual_buff` is set to `False`, during `detect`, the CPU preprocesses (while the NPU is idle), then the NPU performs the computation (while the CPU is idle waiting for the NPU to finish), and then the CPU post processes (while the NPU is idle). This process is linear and relatively simple. However, a problem arises because either the CPU or the NPU is always idle. When `dual_buff True` is enabled, the CPU preprocesses and hands off to the NPU for computation. At this point, the CPU does not wait for the NPU to produce results but instead exits the `detect` function and proceeds to the next camera read and preprocess. Once the NPU finishes its computation, the CPU has already prepared the next data, immediately passing it to the NPU to continue computing without giving the NPU any idle time. This maximizes the efficient simultaneous operation of both the CPU and NPU. However, note that if the camera frame rate is not high enough, it will still limit the overall frame rate."},"/maixpy/doc/en/vision/find_blobs.html":{"title":"MaixCAM MaixPy Find Blobs","content":" title: MaixCAM MaixPy Find Blobs update: date: 2024 04 03 author: neucrack version: 1.0.0 content: Initial documentation date: 2024 04 03 author: lxowalle version: 1.0.1 content: Added detailed usage for finding blobs Before reading this article, make sure you know how to develop with MaixCAM. For details, please read [Quick Start](../index.html). ## Introduction This article will introduce how to use MaixPy to find color blobs and how to use the default application of MaixCam to find color blobs. In vision applications, finding color blobs is a very common requirement, such as robots finding color blobs, automated production lines finding color blobs, etc., which requires identifying specific color areas in the image and obtaining information such as the position and size of these areas. ## Using MaixPy to Find Blobs The `maix.image.Image` module in MaixPy provides the `find_blobs` method, which can conveniently find color blobs. ### How to Find Blobs A simple example to find color blobs and draw bounding boxes: ```python from maix import image, camera, display cam camera.Camera(320, 240) disp display.Display() # Select the corresponding configuration based on the color of the blob thresholds [[0, 80, 40, 80, 10, 80]] # red # thresholds [[0, 80, 120, 10, 0, 30]] # green # thresholds [[0, 80, 30, 100, 120, 60]] # blue while 1: img cam.read() blobs img.find_blobs(thresholds, pixels_threshold 500) for blob in blobs: img.draw_rect(blob[0], blob[1], blob[2], blob[3], image.COLOR_GREEN) disp.show(img) ``` Steps: 1. Import the image, camera, and display modules ```python from maix import image, camera, display ``` 2. Initialize the camera and display ```python cam camera.Camera(320, 240)\t# Initialize the camera with an output resolution of 320x240 in RGB format disp display.Display() ``` 3. Get the image from the camera and display it ```python while 1: img cam.read() disp.show(img) ``` 4. Call the `find_blobs` method to find color blobs in the camera image and draw them on the screen ```python blobs img.find_blobs(thresholds, pixels_threshold 500) for blob in blobs: img.draw_rect(blob[0], blob[1], blob[2], blob[3], image.COLOR_GREEN) ``` `img` is the camera image obtained through `cam.read()`. When initialized with `cam camera.Camera(320, 240)`, the `img` object is an RGB image with a resolution of 320x240. `img.find_blobs` is used to find color blobs. `thresholds` is a list of color thresholds, where each element is a color threshold. Multiple thresholds can be passed in to find multiple colors simultaneously. Each color threshold is in the format `[L_MIN, L_MAX, A_MIN, A_MAX, B_MIN, B_MAX]`, where `L`, `A`, and `B` are the three channels in the LAB color space. The `L` channel represents brightness, the `A` channel represents the red green component, and the `B` channel represents the blue yellow component. `pixels_threshold` is a pixel count threshold used to filter out unwanted small blobs. `img.draw_rect` is used to draw bounding boxes around the color blobs. `blob[0]`, `blob[1]`, `blob[1]`, and `blob[1]` represent the x coordinate of the top left corner of the blob, the y coordinate of the top left corner of the blob, the width of the blob, and the height of the blob, respectively. ### Common Parameter Explanations Here are explanations of commonly used parameters. If you cannot find parameters that can implement your application, you may need to consider using other algorithms or extending the required functionality based on the current algorithm's results. Parameter Description Example thresholds Thresholds based on the LAB color space, thresholds [[l_min, l_max, a_min, a_max, b_min, b_max]], representing:
    Brightness range [l_min, l_max]
    Green to red component range [a_min, a_max]
    Blue to yellow component range [b_min, b_max]
    Multiple thresholds can be set simultaneously Set two thresholds to detect red and green
    ```img.find_blobs(thresholds [[0, 80, 40, 80, 10, 80], [0, 80, 120, 10, 0, 30]])```
    Red threshold is [0, 80, 40, 80, 10, 80]
    Green threshold is [0, 80, 120, 10, 0, 30] invert Enable threshold inversion, when enabled, the passed thresholds are inverted. Default is False. Enable threshold inversion
    ```img.find_blobs(invert True)``` roi Set the rectangular region for the algorithm to compute, roi [x, y, w, h], where x and y represent the coordinates of the top left corner of the rectangle, and w and h represent the width and height of the rectangle, respectively. The default is the entire image. Compute the region at (50, 50) with a width and height of 100
    ```img.find_blobs(roi [50, 50, 100, 100])``` area_threshold Filter out blobs with a pixel area smaller than area_threshold, in units of pixels. The default is 10. This parameter can be used to filter out some useless small blobs. Filter out blobs with an area smaller than 1000
    ```img.find_blobs(area_threshold 1000)``` pixels_threshold Filter out blobs with fewer valid pixels than pixels_threshold. The default is 10. This parameter can be used to filter out some useless small blobs. Filter out blobs with fewer than 1000 valid pixels
    ```img.find_blobs(pixels_threshold 1000)``` This article introduces commonly used methods. For more APIs, please see the [image](../../../api/maix/image.html) section of the API documentation. ## Setting Thresholds Offline To quickly verify the function of find blobs, you can first use the find blobs application provided by MaixCam to experience the effect of finding color blobs. ### Demo Turn on the device, select `Find Blobs` application, then select the colour you want to identify, or customize the colour, then you can identify the corresponding colour, the `setting bar` at the bottom will show the `threshold range`, and the serial port will also output the coordinates and colour information of the identified coordinates. [source code address](https://github.com/sipeed/MaixCDK/tree/main/projects/app_find_blobs) ### Quick use #### Using the default threshold The find blobs app provides four configurations, `red`, `green`, `blue` and `user`, where `red`, `green` and `blue` are used to find `red`, `green` and `blue` colour blocks, and `user` customized thresholds are saved when the app is exited, and the next time the app is opened the thresholds from the last debugging are loaded. For quick experience, you can switch to the corresponding configuration by `clicking` the `button` at the bottom of the interface, the app interface is referenced below: ![](../../../static/image/find_blobs_app.jpg) #### Quick Debug Thresholds Method of operation: 1. Aim the `camera` at the `object` you need to `find`, `click` on the `target` on the screen, then the `left` side will show the `rectangle` of the corresponding colour of the object, and the LAB value of the object's colour. 2. Click on the rectangular box, the system will `automatically set' the LAB threshold, then the screen will draw the edge of the object. The advantage of this method is that it is easy and quick to set the threshold and find the corresponding colour block. The disadvantage is that it is not precise enough, you can fine tuning it manually in the next step. #### Manually fine tune the threshold Method of operation: 1. `Click` on the `Options icon` in the lower left corner to enter configuration mode 2. Aim the `camera` at the `object` you need to `find`, `click` on the `target object` on the screen, at this time the `left` side will show the `rectangular box` of the corresponding colour of the object, and display the `LAB value` of the object's colour. 3. Click on the lower option `L Min, L Max, A Min, A Max, B Min, B Max`, and a slider will appear on the right to set the value of this option. These values correspond to the minimum and maximum values of the L, A and B channels of the LAB colour format. 4. Referring to the `LAB value` of the object colour calculated in step 2, adjust `L Min, L Max, A Min, A Max, B Min, B Max` to the appropriate value to identify the corresponding colour block. For example, `LAB (20, 50, 80)`, since `L 20`, in order to fit a certain range, let `L Min 10`, `L Max 30`; similarly, since `A 50`, let `A Min 40`, `A Max 60`; since `B 80`, let `B Min 70`, `B Max 90`. This method can be more precise to find the right threshold, with the `Quick Debug Threshold` method, it is easy to find the desired threshold. #### Get recognition results via serial protocol The find blobs app supports reporting information about detected color blobs via the serial port (default baud rate is 115200). Since only one report message is sent, we can illustrate the content of the report message with an example. For instance, if the report message is: ``` shellCopy code AA CA AC BB 14 00 00 00 E1 08 EE 00 37 00 15 01 F7 FF 4E 01 19 00 27 01 5A 00 A7 20 ``` `AA CA AC BB`: Protocol header, content is fixed `14 00 00 00`: Data length, the total length excluding the protocol header and data length `E1`: Flag, used to identify the serial message flag `08`: Command type, for the find blobs app application, this value is fixed at 0x08 `EE 00 37 00 15 01 F7 FF 4E 01 19 00 27 01 5A 00`: Coordinates of the four vertices of the found color blob, with each value represented by 2 bytes in little endian format. `EE 00` and `37 00` represent the first vertex coordinate as (238, 55), `15 01` and `F7 FF` represent the second vertex coordinate as (277, 9), `4E 01` and `19 00` represent the third vertex coordinate as (334, 25), `27 01` and `5A 00` represent the fourth vertex coordinate as (295, 90). `A7 20`: CRC checksum value, used to verify if the frame data has errors during transmission. ## About the LAB Color Space The LAB color space, like the RGB color space, is a way to represent colors. LAB can represent all colors visible to the human eye. If you need to learn more about LAB, you can search for relevant articles online, which will provide more details. However, for you, it should be sufficient to understand why LAB is advantageous for MaixPy. Advantages of LAB for MaixPy: 1. The color gamut of the LAB color space is larger than that of RGB, so it can completely replace RGB. 2. In the LAB color space, since the L channel is the brightness channel, we often set it to a relatively large range (commonly [0, 80]), and when coding, we mainly focus on the A and B channels. This can save a lot of time spent struggling with how to select color thresholds. 3. The color perception in the LAB color space is more uniform and easier to debug with code. For example, if you only need to find red color blobs, you can fix the values of the L and B channels and only adjust the value of the A channel (in cases where high color accuracy is not required). For RGB channels, you generally need to adjust all three R, G, and B channels simultaneously to find suitable thresholds."},"/maixpy/doc/en/vision/self_learn_detector.html":{"title":"MaixCAM MaixPy Self-Learning Detection Tracker","content":" title: MaixCAM MaixPy Self Learning Detection Tracker ## MaixPy Self Learning Detection Tracker Similar to the self learning classifier, this tracker doesn't require training. You can simply select the target object by drawing a box around it, and the system will detect and track the object, making it quite useful in simple detection scenarios. Unlike the self learning classifier, the detection tracker provides the coordinates and size of the object. ## Using the Self Learning Detection Tracker in MaixPy MaixPy currently offers a single target learning detection tracking algorithm. Once you select the target object, the tracker will continuously follow it. The algorithm used here is [NanoTrack](https://github.com/HonglinChu/SiamTrackers/tree/master/NanoTrack), which you can explore if you're interested in learning more about the underlying principles. You can directly use the built in self learning tracking application after flashing the latest system image (> 2024.9.5_v4.5.0) to see the results. To use it, call the `maix.nn.NanoTrack` class. After initializing the object, call the `init` method to specify the target to be detected, then call the `track` method to continuously track the target. Below is a simplified code example: ```python from maix import nn model_path \"/root/models/nanotrack.mud\" tracker nn.NanoTrack(model_path) tracker.init(img, x, y, w, h) pos tracker.track(img) ``` Note that this uses a built in model located in the system at `/root/models`. You can also download the model from the [MaixHub model library](https://maixhub.com/model/zoo/437). For more detailed code, refer to [MaixPy/examples/vision/ai_vision/nn_self_learn_tracker.py](https://github.com/sipeed/MaixPy/blob/main/examples/vision/ai_vision/nn_self_learn_tracker.py). ## Other Self Learning Tracking Algorithms Currently, the NanoTrack algorithm is implemented, which is highly stable and reliable in simple scenarios and provides a sufficient frame rate. However, its limitations include the need for the object to return near the last disappearance point to be detected again if it goes out of view, and the fact that it can only detect one target at a time. If you have better algorithms, you can refer to the existing NanoTrack implementation for guidance. Feel free to discuss or submit code PRs."},"/maixpy/doc/en/vision/object_track.html":{"title":"MaixCAM MaixPy Object Tracking and Counting (e.g., Pedestrian Counting)","content":" title: MaixCAM MaixPy Object Tracking and Counting (e.g., Pedestrian Counting) ## Introduction to Object Tracking Previously, we used YOLOv5, YOLOv8, or even `find_blobs` to detect objects. However, when there are multiple objects in the frame and we need to distinguish between each object, object tracking becomes necessary. For instance, if there are five people moving in the frame, we need to assign each person a number and track their movement. Applications: * Pedestrian counting, such as counting the number of people passing through a certain area. * Counting workpieces, such as counting products on a production line. * Recording and recognizing the movement trajectories of objects. ## MaixCAM/MaixPy Object Tracking and Pedestrian Counting Results As shown in the video below, the system can track each person and count those who cross the yellow area from top to bottom (displayed in the lower left corner): ## Using MaixCAM/MaixPy for Object Tracking and Pedestrian Counting You can directly install the [application](https://maixhub.com/app/61) to experience it. You can also check the [examples in the `examples/vision/tracker` directory](https://github.com/sipeed/MaixPy/tree/main/examples/vision/tracker). The `tracker_bytetrack.py` example is a basic object tracking example and involves several steps: * Use YOLOv5 or YOLOv8 to detect objects. This allows you to replace the model to detect different objects according to your needs. * Use the `maix.tracker.ByteTracker` algorithm for object tracking. Simply calling the `update` function will give you the results (the trajectory of each object in the frame), which is very straightforward. Several parameters need to be adjusted according to your specific scenario. Refer to the example code and API documentation for detailed parameter descriptions: ```python # configs conf_threshold 0.3 # detection threshold iou_threshold 0.45 # detection IOU threshold max_lost_buff_time 120 # the number of frames to keep lost tracks track_thresh 0.4 # tracking confidence threshold high_thresh 0.6 # threshold to add a new track match_thresh 0.8 # matching threshold for tracking; if IOU < match_thresh between an object in two frames, they are considered the same object max_history_num 5 # maximum length of a track's position history show_detect False # show detection valid_class_id [0] # classes used in the detection model ``` The `tracker_bytetrack_count.py` example adds pedestrian counting. To keep it simple, the example only implements counting for people walking from top to bottom. If a person is below the yellow area and their trajectory crosses into the yellow area, they are counted as crossing from top to bottom. You can write custom logic based on your specific application scenario."},"/maixpy/doc/en/vision/customize_model_yolov8.html":{"title":"Offline Training for YOLO11/YOLOv8 Models on MaixCAM MaixPy to Customize Object and Keypoint Detection","content":" title: Offline Training for YOLO11/YOLOv8 Models on MaixCAM MaixPy to Customize Object and Keypoint Detection update: date: 2024 06 21 version: v1.0 author: neucrack content: Document creation date: 2024 10 10 version: v2.0 author: neucrack content: Added YOLO11 support ## Introduction The default official model provides detection for 80 different objects. If this doesn't meet your needs, you can train your own model to detect custom objects, which can be done on your own computer or server by setting up a training environment. YOLOv8 / YOLO11 not only supports object detection but also supports keypoint detection with YOLOv8 pose / YOLO11 pose. Apart from the official human keypoints, you can also create your own keypoint dataset to train models for detecting specific objects and keypoints. Since YOLOv8 and YOLO11 mainly modify the internal network while the preprocessing and post processing remain the same, the training and conversion steps for YOLOv8 and YOLO11 are identical, except for the output node names. **Note:** This article explains how to train a custom model but assumes some basic knowledge. If you do not have this background, please learn it independently: * This article will not cover how to set up the training environment; please search for how to install and test a PyTorch environment. * This article will not cover basic machine learning concepts or Linux related knowledge. If you think there are parts of this article that need improvement, please click on `Edit this article` at the top right and submit a PR to contribute to the documentation. ## Process and Article Goal To ensure our model can be used on MaixPy (MaixCAM), it must go through the following steps: * Set up the training environment (not covered in this article, please search for how to set up a PyTorch training environment). * Clone the [YOLO11/YOLOv8](https://github.com/ultralytics/ultralytics) source code locally. * Prepare the dataset and format it according to the YOLO11 / YOLOv8 project requirements. * Train the model to obtain an `onnx` model file, which is the final output of this article. * Convert the `onnx` model into a `MUD` file supported by MaixPy, as described in the [MaixCAM Model Conversion](../ai_model_converter/maixcam.html) article. * Use MaixPy to load and run the model. ## Reference Articles Since this process is quite general, this article only provides an overview. For specific details, please refer to the **[YOLO11 / YOLOv8 official code and documentation](https://github.com/ultralytics/ultralytics)** (**recommended**) and search for training tutorials to eventually export an ONNX file. If you come across good articles, feel free to edit this one and submit a PR. ## Exporting YOLO11 / YOLOv8 ONNX Models Create an `export_onnx.py` file in the `ultralytics` directory: ```python from ultralytics import YOLO import sys print(sys.path) net_name sys.argv[1] # yolov8n.pt yolov8n pose.pt # https://docs.ultralytics.com/models/yolov8/#supported tasks and modes input_width int(sys.argv[2]) input_height int(sys.argv[3]) # Load a model model YOLO(net_name) # load an official model # model YOLO(\"path/to/best.pt\") # load a custom model # Predict with the model results model(\"https://ultralytics.com/images/bus.jpg\") # predict on an image path model.export(format \"onnx\", imgsz [input_height, input_width]) # export the model to ONNX format print(path) ``` Then run `python export_onnx.py yolov8n.pt 320 224` to export the `onnx` model. Here, we have redefined the input resolution. The model was originally trained with `640x640`, but we use `320x224` to improve the processing speed and match the MaixCAM's screen aspect ratio for convenient display. You can set the resolution according to your own needs. ## Converting to a Model Supported by MaixCAM and MUD File MaixPy/MaixCDK currently supports YOLOv8 / YOLO11 for object detection, YOLOv8 pose / YOLO11 pose for keypoint detection, and YOLOv8 seg / YOLO11 seg for segmentation (as of 2024 10 10). Follow [MaixCAM Model Conversion](../ai_model_converter/maixcam.html) to convert the model. Pay attention to the model output node selection: * Object detection: * YOLOv8 extracts `/model.22/dfl/conv/Conv_output_0,/model.22/Sigmoid_output_0` from ONNX as outputs. * YOLO11 extracts `/model.23/dfl/conv/Conv_output_0,/model.23/Sigmoid_output_0`. * Keypoint detection: * YOLOv8 pose extracts `/model.22/dfl/conv/Conv_output_0,/model.22/Sigmoid_output_0,/model.22/Concat_output_0` as outputs. * YOLO11 pose extracts `/model.23/dfl/conv/Conv_output_0,/model.23/Sigmoid_output_0,/model.23/Concat_output_0`. * Image segmentation: * YOLOv8 seg extracts `/model.22/dfl/conv/Conv_output_0,/model.22/Sigmoid_output_0,/model.22/Concat_output_0,output1`. * YOLO11 seg extracts `/model.23/dfl/conv/Conv_output_0,/model.23/Sigmoid_output_0,/model.23/Concat_output_0,output1`. ![](../../assets/yolov8_out1.jpg) ![](../../assets/yolov8_out2.jpg) For object detection, the MUD file would be as follows (replace `yolo11` for YOLO11): ```ini [basic] type cvimodel model yolov8n.cvimodel [extra] model_type yolov8 input_type rgb mean 0, 0, 0 scale 0.00392156862745098, 0.00392156862745098, 0.00392156862745098 labels person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign, parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket, bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut, cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair dryer, toothbrush ``` Replace `labels` according to the objects you trained. For keypoint detection (yolov8 pose), the MUD file would be (replace `yolo11` for YOLO11): ```ini [basic] type cvimodel model yolov8n_pose.cvimodel [extra] model_type yolov8 type pose input_type rgb mean 0, 0, 0 scale 0.00392156862745098, 0.00392156862745098, 0.00392156862745098 labels person ``` The default model is for human pose detection, so `labels` only contains `person`. Replace it according to your detected objects. For image segmentation (yolov8 seg), the MUD file would be (replace `yolo11` for YOLO11): ```ini [basic] type cvimodel model yolo11n seg_320x224_int8.cvimodel [extra] model_type yolov8 input_type rgb type seg mean 0, 0, 0 scale 0.00392156862745098, 0.00392156862745098, 0.00392156862745098 labels person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign, parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie, suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket, bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut, cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven, toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair dryer, toothbrush ``` ## Upload and Share on MaixHub Visit the [MaixHub Model Library](https://maixhub.com/model/zoo?platform maixcam) to upload and share your model. Consider providing multiple resolutions for others to choose from."},"/maixpy/doc/en/vision/classify.html":{"title":"Using AI Models for Object Classification in MaixCAM MaixPy","content":" title: Using AI Models for Object Classification in MaixCAM MaixPy ## Object Classification Concept For example, if there are two images in front of you, one with an apple and the other with an airplane, the task of object classification is to input these two images into an AI model one by one. The model will then output two results, one for apple and one for airplane. ## Using Object Classification in MaixPy MaixPy provides a pre trained `1000` classification model based on the `imagenet` dataset, which can be used directly: ```python from maix import camera, display, image, nn classifier nn.Classifier(model \"/root/models/mobilenetv2.mud\", dual_buff True) cam camera.Camera(classifier.input_width(), classifier.input_height(), classifier.input_format()) dis display.Display() while 1: img cam.read() res classifier.classify(img) max_idx, max_prob res[0] msg f\"{max_prob:5.2f}: {classifier.labels[max_idx]}\" img.draw_string(10, 10, msg, image.COLOR_RED) dis.show(img) ``` Result video: Here, the camera captures an image, which is then passed to the `classifier` for recognition. The result is displayed on the screen. For more API usage, refer to the documentation for the [maix.nn](/api/maix/nn.html) module. ## dual_buff Dual Buffer Acceleration You may have noticed that the model initialization uses `dual_buff` (which defaults to `True`). Enabling the `dual_buff` parameter can improve running efficiency and increase the frame rate. For detailed principles and usage notes, see [dual_buff Introduction](./dual_buff.html). ## Training Your Own Classification Model on MaixHub If you want to train a classification model for specific images, visit [MaixHub](https://maixhub.com) to learn and train the model. When creating a project, select \"Classification Model\", then simply upload your images to train. There's no need to set up a training environment or spend money on expensive GPUs—training can be done quickly with one click. ## Offline Training for Your Own Classification Model For offline training, you need to set up your environment. Search for keywords such as `PyTorch classification model training` or `Mobilenet` for guidance. After training the model, export it in ONNX format, then refer to the [MaixCAM Model Conversion Documentation](../ai_model_converter/maixcam.html) to convert it into a model format supported by MaixCAM. Finally, use the `nn.Classifier` class mentioned above to load the model. The classification model can be Mobilenet or another model like ResNet. During model conversion, it's best to extract the layer just before `softmax` as the final output layer because the `classifier.classify(img, softmax True)` function has `softmax` enabled by default—this means the function will perform a `softmax` calculation on the results. Therefore, the model itself doesn't need a `softmax` layer. However, if the model does include a `softmax` layer, you can specify not to execute it again by using: `classifier.classify(img, softmax False)`."},"/maixpy/doc/en/vision/body_key_points.html":{"title":"MaixCAM MaixPy Human Pose Keypoint Detection","content":" title: MaixCAM MaixPy Human Pose Keypoint Detection ## Introduction Using MaixPy, you can easily detect the coordinates of keypoints on human joints, which can be used for posture detection, such as monitoring sitting posture or providing input for motion based games. MaixPy implements human pose detection based on [YOLOv8 Pose / YOLO11 Pose](https://github.com/ultralytics/ultralytics), capable of detecting `17` keypoints on the human body. ![](../../assets/body_keypoints.jpg) ## Usage You can easily implement this using the `maix.nn.YOLOv8` or `maix.nn.YOLO11` classes in MaixPy: ```python from maix import camera, display, image, nn, app detector nn.YOLOv8(model \"/root/models/yolov8n_pose.mud\", dual_buff True) # detector nn.YOLO11(model \"/root/models/yolo11n_pose.mud\", dual_buff True) cam camera.Camera(detector.input_width(), detector.input_height(), detector.input_format()) dis display.Display() while not app.need_exit(): img cam.read() objs detector.detect(img, conf_th 0.5, iou_th 0.45, keypoint_th 0.5) for obj in objs: img.draw_rect(obj.x, obj.y, obj.w, obj.h, color image.COLOR_RED) msg f'{detector.labels[obj.class_id]}: {obj.score:.2f}' img.draw_string(obj.x, obj.y, msg, color image.COLOR_RED) detector.draw_pose(img, obj.points, 8 if detector.input_width() > 480 else 4, image.COLOR_RED) dis.show(img) ``` You can also find the code in the [MaixPy/examples/vision](https://github.com/sipeed/MaixPy/tree/main/examples/vision/ai_vision) directory. Since `YOLOv8 Pose` is used here, the `YOLOv8` class is also used, with the only difference being the model file compared to `YOLOv8` object detection. The same applies to `YOLO11`. The `detect` function returns an additional `points` value, which is a list of `int` containing `17` keypoints. The points are arranged in order; for example, the first value is the x coordinate of the nose, the second value is the y coordinate of the nose, and so on: ```python 1. Nose 2. Left Eye 3. Right Eye 4. Left Ear 5. Right Ear 6. Left Shoulder 7. Right Shoulder 8. Left Elbow 9. Right Elbow 10. Left Wrist 11. Right Wrist 12. Left Hip 13. Right Hip 14. Left Knee 15. Right Knee 16. Left Ankle 17. Right Ankle ``` If any of these parts are occluded, the value will be ` 1`. ## Models with More Resolutions The default model input resolution is `320x224`. If you want to use models with higher resolution, you can download and transfer them from the MaixHub model library: * YOLOv8 Pose: [https://maixhub.com/model/zoo/401](https://maixhub.com/model/zoo/401) * YOLO11 Pose: [https://maixhub.com/model/zoo/454](https://maixhub.com/model/zoo/454) Higher resolution generally provides better accuracy but at the cost of lower processing speed. Choose the model based on your application needs. If the provided resolution does not meet your requirements, you can train your own model using the source code from [YOLOv8 Pose / YOLO11 Pose](https://github.com/ultralytics/ultralytics) and export your own ONNX model, then convert it to a format supported by MaixCAM (methods are covered in later articles). ## dual_buff for Double Buffering Acceleration You may notice that `dual_buff` is used for model initialization (default value is `True`). Enabling the `dual_buff` parameter can improve efficiency and increase the frame rate. For more details and considerations, refer to the [dual_buff Introduction](./dual_buff.html)."},"/maixpy/doc/en/vision/custmize_model.html":{"title":"","content":"Please refer to [MaixCAM Model Conversion](../ai_model_converter/maixcam.html), and find the model documentation you need to convert in the left directory, such as [Custom YOLOv5 Model](./customize_model_yolov5.html)."},"/maixpy/doc/en/vision/opencv.html":{"title":"MaixCAM MaixPy Use OpenCV","content":" title: MaixCAM MaixPy Use OpenCV ## Introduction For MaixCAM, since it uses Linux and the performance can support using the Python version of OpenCV, you can use the `cv2` module directly in addition to the `maix` module. The examples in this article and more can be found in [MaixPy/examples/vision/opencv](https://github.com/sipeed/MaixPy/tree/main/examples/vision/opencv). **Note that OpenCV functions are basically CPU calculated. If you can use maix modules, try not to use OpenCV, because many maix functions are hardware accelerated.** ## Converting between Numpy/OpenCV and maix.image.Image Formats You can convert `maix.image.Image` object to a `numpy` array, which can then be used by libraries such as `numpy` and `opencv`: ```python from maix import image, time, display, app disp display.Display() while not app.need_exit(): img image.Image(320, 240, image.Format.FMT_RGB888) img.draw_rect(0, 0, 100, 100, image.COLOR_RED, thickness 1) t time.ticks_ms() img_bgr image.image2cv(img, ensure_bgr True, copy True) img2 image.cv2image(img_bgr, bgr True, copy True) print(\"time:\", time.ticks_ms() t) print(type(img_bgr), img_bgr.shape) print(type(img2), img2) print(\"\") disp.show(img2) ``` The previous program is slower because each conversion involves a memory copy. Below is an optimized version for better performance. However, it is not recommended to use this unless you are aiming for extreme speed, as it is prone to errors: ```python from maix import image, time, display, app disp display.Display() while not app.need_exit(): img image.Image(320, 240, image.Format.FMT_RGB888) img.draw_rect(0, 0, 100, 100, image.COLOR_RED, thickness 1) t time.ticks_ms() img_rgb image.image2cv(img, ensure_bgr False, copy False) img2 image.cv2image(img_rgb, bgr False, copy False) print(\"time:\", time.ticks_ms() t) print(type(img_rgb), img_rgb.shape) print(type(img2), img2) disp.show(img2) ``` * In `img_rgb image.image2cv(img, ensure_bgr False, copy False)`, `img_rgb` directly uses the data from `img` without creating a memory copy. Note that the obtained `img_rgb` is an `RGB` image. Since OpenCV APIs assume the image is `BGR`, you need to be careful when using OpenCV APIs to process the image. If you are not sure, set `ensure_bgr` to `True`. * In `img2 image.cv2image(img_rgb, bgr False, copy False)`, setting `copy` to `False` means `img2` directly uses the memory of `img_rgb` without creating a new memory copy, resulting in faster performance. However, be cautious because `img_rgb` must not be destroyed before `img2` finishes using it; otherwise, the program will crash. * Note that since memory is borrowed, modifying the converted image will also affect the original image. ## Load an Image ```python import cv2 file_path \"/maixapp/share/icon/detector.png\" img cv2.imread(file_path) print(img) ``` Since the `cv2` module is quite large, `import cv2` may take some time. ## Display Image on Screen To display an image on the screen, convert it to a `maix.image.Image` object and then use `display` to show it: ```python from maix import display, image, time import cv2 disp display.Display() file_path \"/maixapp/share/icon/detector.png\" img cv2.imread(file_path) img_show image.cv2image(img) disp.show(img_show) while not app.need_exit(): time.sleep(1) ``` ## Use OpenCV Functions For example, edge detection: Based on the code above, use the `cv2.Canny` function: ```python from maix import image, display, app, time import cv2 file_path \"/maixapp/share/icon/detector.png\" img0 cv2.imread(file_path) disp display.Display() while not app.need_exit(): img img0.copy() # canny method t time.ticks_ms() edged cv2.Canny(img, 180, 60) t2 time.ticks_ms() t # show by maix.display t time.ticks_ms() img_show image.cv2image(edged) print(f\"edge time: {t2}ms, convert time: {time.ticks_ms() t}ms\") disp.show(img_show) ``` ## Use Camera On a PC, we use OpenCV's `VideoCapture` class to read from the camera. For MaixCAM, OpenCV does not support this directly, so we use the `maix.camera` module to read from the camera and then use it with OpenCV. Convert a `maix.image.Image` object to a `numpy.ndarray` object using the `image.image2cv` function: ```python from maix import image, display, app, time, camera import cv2 disp display.Display() cam camera.Camera(320, 240, image.Format.FMT_BGR888) while not app.need_exit(): img cam.read() # convert maix.image.Image object to numpy.ndarray object t time.ticks_ms() img image.image2cv(img, ensure_bgr False, copy False) print(\"time: \", time.ticks_ms() t) # canny method edged cv2.Canny(img, 180, 60) # show by maix.display img_show image.cv2image(edged, bgr True, copy False) disp.show(img_show) ``` ## Read USB camera First, in the development board settings, select `USB Mode` under `USB Settings` and set it to `HOST` mode. If there is no screen available, you can use the `examples/tools/maixcam_switch_usb_mode.py` script to set it. ```python from maix import image, display, app import cv2 import sys cap cv2.VideoCapture(0) cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640) cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 480) # cap.set(cv2.CAP_PROP_CONVERT_RGB, 0) disp display.Display() if not cap.isOpened(): print(\"无法打开摄像头\") sys.exit(1) print(\"开始读取\") while not app.need_exit(): ret, frame cap.read() if not ret: print(\"无法读取帧\") break img image.cv2image(frame, bgr True, copy False) disp.show(img) ```"},"/maixpy/doc/en/vision/self_learn_classifier.html":{"title":"MaixCAM MaixPy Self-Learning Classifier","content":" title: MaixCAM MaixPy Self Learning Classifier ## Introduction to MaixPy Self Learning Classifier Usually, to recognize new categories, we need to collect a dataset on a computer and retrain the model, which is a cumbersome and difficult process. Here, we provide a method that allows for instant learning of new objects directly on the device without the need for computer side training, suitable for less complex scenarios. For example, if there is a bottle and a phone in front of you, you can use the device to take a picture of each as the basis for two classifications. Then, you collect a few more pictures of them from different angles, extract their features and save them. During recognition, the feature values of the image are compared with the saved feature values, and the classification that is more similar to the saved features is considered the corresponding classification. ## Using the Self Learning Classifier in MaixPy The default image comes with the [Self Learning Classification APP](https://maixhub.com/app/30), which you can use directly to get familiar with the process. ![](../../assets/self_learn_classifier.jpg) Steps: * Click the `+ Class` button to collect n classification (class) images. The object needs to be within the white frame on the screen while collecting the images. * Click the `+ Sample` button to collect m sample images. Collect some images for each classification. The order does not matter, and the number is flexible. It's best to take pictures from different angles, but not too different. * Click the `Learn` button to start learning. The device will automatically classify and learn based on the collected classification and sample images, obtaining the characteristics of the classifications. * Align the object with the center of the screen, recognize the image, and output the result. The screen will show the classification it belongs to and the similarity distance to this classification. The closer the similarity distance, the more similar it is. * The feature values ​​learned by this APP will be saved to `/root/my_classes.bin`, so the last one will be automatically loaded after exiting the application or restarting it. Simplified version of the code, for the complete version, please refer to the [examples](https://github.com/sipeed/maixpy/tree/main/examples/vision/ai_vision) for the full code. ```python from maix import nn, image classifier nn.SelfLearnClassifier(model \"/root/models/mobilenetv2.mud\", dual_buff True) img1 image.load(\"/root/1.jpg\") img2 image.load(\"/root/2.jpg\") img3 image.load(\"/root/3.jpg\") sample_1 image.load(\"/root/sample_1.jpg\") sample_2 image.load(\"/root/sample_2.jpg\") sample_3 image.load(\"/root/sample_3.jpg\") sample_4 image.load(\"/root/sample_4.jpg\") sample_5 image.load(\"/root/sample_5.jpg\") sample_6 image.load(\"/root/sample_6.jpg\") classifier.add_class(img1) classifier.add_class(img2) classifier.add_class(img3) classifier.add_sample(sample_1) classifier.add_sample(sample_2) classifier.add_sample(sample_3) classifier.add_sample(sample_4) classifier.add_sample(sample_5) classifier.add_sample(sample_6) classifier.learn() img image.load(\"/root/test.jpg\") max_idx, max_score classifier.classify(img) print(max_idx, max_score) ``` ## Storing and Loading Learned Feature Values Use the `save` function to store the learned feature values. This will generate a binary file containing the feature values of the objects. When you need to use it again, simply use the `load` function to load the feature values. ```python classifier.save(\"/root/my_classes.bin\") classifier.load(\"/root/my_classes.bin\") ``` If you have named each classification and stored them in the `labels` variable, you can also use: ```python classifier.save(\"/root/my_classes.bin\", labels labels) labels classifier.load(\"/root/my_classes.bin\") ``` ## dual_buff Dual Buffer Acceleration You may have noticed that the model initialization uses `dual_buff` (which defaults to `True`). Enabling the `dual_buff` parameter can improve running efficiency and increase the frame rate. For detailed principles and usage notes, see [dual_buff Introduction](./dual_buff.html)."},"/maixpy/doc/en/vision/camera.html":{"title":"MaixCAM MaixPy Camera Usage","content":" title: MaixCAM MaixPy Camera Usage update: date: 2024 04 03 author: neucrack version: 1.0.0 content: Initial documentation ## Introduction For the MaixCAM, it comes with a pre installed GC4653 camera, or an optional OS04A10 camera or global shutter camera, and even an HDMI to MIPI module, all of which can be directly used with simple API calls. ## API Documentation This article introduces common methods. For more API usage, refer to the documentation of the [maix.camera](/api/maix/camera.html) module. ## Camera Switching Currently supported cameras: * **GC4653**: M12 universal lens, 1/3\" sensor, clear image quality, 4MP. * **OS04A10**: M12 universal lens, 1/1.8\" large sensor, ultra clear image quality, 4MP. * **OV2685**: Does not support lens replacement, lowest image quality, and lowest cost; generally not recommended for use. * **SC035HGS**: Monochrome global shutter camera, 0.3MP black and white, suitable for capturing high speed objects. The system will automatically switch; simply replace the hardware to use. ## Getting Images from the Camera Using MaixPy to easily get images: ```python from maix import camera cam camera.Camera(640, 480) while 1: img cam.read() print(img) ``` Here we import the `camera` module from the `maix` module, then create a `Camera` object, specifying the width and height of the image. Then, in a loop, we continuously read the images. The default output is in `RGB` format. If you need `BGR` format or other formats, please refer to the API documentation. ```python from maix import camera, image cam camera.Camera(640, 480, image.Format.FMT_GRAYSCALE) # Set the output greyscale image ``` Also get the NV21 image ```python from maix import camera, image cam camera.Camera(640, 480, image.Format.FMT_YVU420SP) # set to output NV21 image ``` Note: You need to disable MaixVision's online browsing function if you set a very high resolution (e.g. `2560x1440`), otherwise the code may run abnormally due to lack of memory. You can also get greyscale images ## Setting the frame rate of the camera Currently the camera supports `30fps`, `60fps` and `80fps` configurations, the frame rate is selected by the `width`, `height`, `fps` parameters passed when creating the `Camera` object, currently the maximum supported resolution is `1280x720` under `60/80fps`, and the maximum supported resolution is `2560x1440` under `30fps`. ### Setting the frame rate to 30 fps ```python from maix import camera cam camera.Camera(640, 480, fps 30) # set the frame rate to 30 fps # or cam camera.Camera(1920, 1280) # Frame rate is set to 30 fps when resolution is higher than 1280x720 ``` ### Set the frame rate to 60 fps ```python from maix import camera cam camera.Camera(640, 480, fps 60) # Set frame rate to 60 fps # or cam camera.Camera(640, 480) # Set frame rate to 60fps if resolution is less than or equal to 1280x720 ``` ### Set the frame rate to 80 fps ```python from maix import camera cam camera.Camera(640, 480, fps 80) # Set frame rate to 60 fps ``` Notes: 1. if `Camera` is passed in a size larger than `1280x720`, for example written as `camera.Camera(1920, 1080, fps 60)`, then the `fps` parameter will be invalidated, and the frame rate will remain at `30fps`. 2. A `60/80fps` frame will be offset by a few pixels compared to a `30fps` frame, and the offset will need to be corrected if the viewing angle is critical. 3. Note that due to the fact that `60/80fps` and `30fps` share the same `isp` configuration, in some environments there will be some deviation in the quality of the screen at the two frame rates. 4. The camera's performance depends on the system. Some systems may not support setting the camera to 80fps, which can result in strange patterns appearing on the screen. In such cases, please switch back to the normal 60fps setting. ## Image correction In case of distortion such as fisheye, you can use the `lens_corr` function under the `Image` object to correct the distortion of the image. In general, you just need to increase or decrease the value of `strength` to adjust the image to the right effect. ``python from maix import camera, display cam camera.Camera(320, 240) disp display.Display() while not app.need_exit():: t time. t time.ticks_ms() img cam.read() img img.lens_corr(strength 1.5) # Adjust the strength value until the image is no longer distorted. disp display.Display() `` Note that since the correction is done through software, it takes some time. Alternatively, you can use a distortion free lens (inquire with the vendor) to solve the issue from a hardware perspective. ## Skipping Initial Frames During the brief initialization period of the camera, the image acquisition may not be stable, resulting in strange images. You can use the `skip_frames` function to skip the initial few frames: ```python cam camera.Camera(640, 480) cam.skip_frames(30) # Skip the first 30 frames ``` ## Displaying Images MaixPy provides the `display` module, which can conveniently display images: ```python from maix import camera, display cam camera.Camera(640, 480) disp display.Display() while 1: img cam.read() disp.show(img) ``` ## Setting the camera parameters ### Set exposure time Note that after setting the exposure time, the camera will switch to manual exposure mode, if you want to switch back to automatic exposure mode you need to run `cam.exp_mode(0)`. ```python cam camera.Camera() cam.exposure(1000) ``` ### Setting the gain Note that after setting the gain, the camera will switch to manual exposure mode, to switch back to auto exposure mode you need to run `cam.exp_mode(0)`. Customised gain values will only work in manual exposure mode. ```python cam camera.Camera() cam.gain(100) ``` ### Setting the white balance ```python cam camera.Camera() cam.awb_mode(1) # 0,turn on white balance;1,turn off white balance ``` ### Setting brightness, contrast and saturation ```python cam camera.Camera() cam.luma(50) # Set brightness, range [0, 100] cam.constrast(50) # set contrast, range [0, 100] cam.saturation(50) # Set the saturation, range [0, 100]. ``` ## Using a USB Camera In addition to using the MIPI interface camera that comes with the development board, you can also use an external USB camera. Method: * First, in the development board settings, select `USB Mode` under `USB Settings` and set it to `HOST` mode. If there is no screen available, you can use the `examples/tools/maixcam_switch_usb_mode.py` script to set it. * Currently (as of 2024.10.24), the `maix.camera` module does not yet support USB cameras, but you can use `OpenCV` for this. ```python from maix import image, display import cv2 import sys cap cv2.VideoCapture(0) cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640) cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 480) # cap.set(cv2.CAP_PROP_CONVERT_RGB, 0) disp display.Display() if not cap.isOpened(): print(\"Unable to open camera\") sys.exit(1) print(\"Starting to read\") while True: ret, frame cap.read() if not ret: print(\"Unable to read frame\") return img image.cv2image(frame, bgr True, copy False) disp.show(img) ```"},"/maixpy/doc/en/no_translate.html":{"title":"no translation","content":" title: no translation class: md_page
    This page not translated yet
    Please visit
    "}} \ No newline at end of file diff --git a/maixpy/static/search_index/index_3.json b/maixpy/static/search_index/index_3.json new file mode 100644 index 00000000..019d0474 --- /dev/null +++ b/maixpy/static/search_index/index_3.json @@ -0,0 +1 @@ +{"/maixpy/404.html":{"title":"","content":" layout: 404.html "},"/maixpy/index.html":{"title":"MaixPy","content":" title: MaixPy id: home_page

    MaixPy (v4)

    极速落地 AI 视觉、听觉应用

    [![GitHub Repo stars](https://img.shields.io/github/stars/sipeed/MaixPy?style social)](https://github.com/sipeed/MaixPy)[![Apache 2.0](https://img.shields.io/badge/license Apache%20v2.0 orange.svg)](\"https://github.com/sipeed/MaixPy/blob/main/LICENSE.html)[![PyPI](https://img.shields.io/pypi/v/maixpy.svg)](https://pypi.python.org/pypi/maixpy/)[![PyPI Downloads](https://img.shields.io/pypi/dm/maixpy?label pypi%20downloads)](https://pypi.org/project/maixpy/)[![GitHub downloads](https://img.shields.io/github/downloads/sipeed/maixpy/total?label GitHub%20downloads)](https://github.com/sipeed/MaixPy) [![Build MaixCAM](https://github.com/sipeed/MaixPy/actions/workflows/build_maixcam.yml/badge.svg)](https://github.com/sipeed/MaixPy/actions/workflows/build_maixcam.yml)[![Trigger wiki](https://github.com/sipeed/MaixPy/actions/workflows/trigger_wiki.yml/badge.svg)](https://github.com/sipeed/MaixPy/actions/workflows/trigger_wiki.yml)
    [English](./en/) 中文
    MaixPy v1 (K210) 用户请查看 MaixPy v1 文档。 MaixPy v4 不支持 Maix I Maix II 系列硬件,请更新到 [MaixCAM](https://wiki.sipeed.com/maixcam pro) 硬件平台。 喜欢 MaixPy 请给 [ MaixPy 开源项目](https://github.com/sipeed/MaixPy) 点个 Star ⭐️ 以鼓励我们开发更多功能。

    简易的 API 设计, 10 行代码进行 AI 图像识别

    ```python from maix import camera, display, image, nn classifier nn.Classifier(model \"/root/models/mobilenetv2.mud\") cam camera.Camera(classifier.input_width(), classifier.input_height(), classifier.input_format()) dis display.Display() while 1: img cam.read() res classifier.classify(img) max_idx, max_prob res[0] msg f\"{max_prob:5.2f}: {classifier.labels[max_idx]}\" img.draw_string(10, 10, msg, image.COLOR_RED) dis.show(img) ```

    硬件外设控制,不在话下

    串口收发: ```python from maix import uart devices uart.list_devices() serial uart.UART(devices[0], 115200) serial.write_str(\"hello world\") print(\"received:\", serial.read(timeout 2000)) ```
    I2C 收发: ```python from maix import i2c devices i2c.list_devices() dev1 i2c.I2C(devices[0], freq 100000) slaves dev1.scan() print(\"find slaves:\", slaves) dev1.writeto(0x12, b'hello') print(\"received:\", dev1.readfrom(0x12, 5)) ```

    便捷的 MaixVision 工作站

    简化开发环境,让开发更简单快速

    在线 AI 训练平台 MaixHub

    无需 AI 基础和昂贵的训练设备,一键训练模型,一键部署到 MaixCAM
    ## 性能强劲 MaixCAM 硬件平台 ![MaixCAM](/static/image/maixcam_pro.png)
    * **CPU**: 1GHz RISC V(Linux) + 700MHz RISC V(RTOS) + 25~300MHz 8051(Low Power) * **NPU**: 1Tops@INT8 NPU, 支持 BF16,支持 YOLO11、 YOLOv8、 YOLOv5 等。 * **内存**: 256MB DDR3。 * **通信**: USB2.0/WiFi6/BLE5.4。 * **外设**: IIC/PWM/SPI/UART/WDT/GPIO/ADC * **多媒体**:4M 摄像头,2.4\" 640x480 高清电容触摸屏,H.264/H.265/MJPEG 2K 硬件编解码。 * **购买**: 有各种版本硬件提供, 详情查看[商城](https://wiki.sipeed.com/store) (缺货时咨询店家) * **更多**: 请看 [MaixCAM](https://wiki.sipeed.com/maixcam) 和 [MaixCAM Pro](https://wiki.sipeed.com/maixcam pro) 硬件文档
    ## 更多特性
    以下为部分功能简介,更多到[社区](#community)找到更多 基于 MaixPy 提供的丰富 API 可以创造出更多新功能

    OpenCV + OpenMV

    支持 OpenCV, 兼容 OpenMV

    C++版本

    MaixCDK C++版本的SDK,与MaixPy的API相同, 商业友好

    作为串口模块

    其它 MCU 通过串口命令控制

    APP商店

    将您的APP分享给社区,并一键安装APPs

    提供 MaixPy v1 兼容 API

    快速从MaixPy v1(K210)迁移到MaixPy v4

    AI 分类

    识别物体类别

    AI 对象检测

    识别物体类别和坐标

    AI 人脸识别

    识别不同人脸特征

    AI 人体关键点检测

    姿态识别、体感游戏

    AI 自学习分类器

    无需在PC上训练,在设备上瞬间学习任意物体

    AI 自学习检测器

    无需在PC上训练,在设备上瞬间学习任意物体

    AI 物体轨迹跟踪

    轨迹追踪,流量统计

    AI 监控,串流

    安防监控,可串流,甚至可以向直播平台 比如 Bilibili.com 直播

    查找颜色

    查找颜色斑点

    巡线

    小车巡线,物流搬运

    QR码和AprilTag

    识别QR码和AprilTag

    OCR

    识别图片中的字符,旧物数字化

    语音识别

    实时连续语音识别

    桌面监视器

    监视PC信息,如CPU,内存,网络等。

    天气站

    监视天气信息,如温度,湿度等。

    热红外摄像头

    选配摄像头,温度图像获取/测量

    HDMI 捕获视频

    选配,通过 HDMI 捕获图像,作为服务器监控(KVM)和远程控制、外接 AI、推流设备等

    大屏视频播放

    多种规格屏幕选择(2.3\" 2.4\" 5\" 7\"等), 硬件解码支持

    显微镜

    搭配1/8\"大底传感器 + 显微镜头 数字显微镜

    高速识别

    搭配全局摄像头,高速运动物体也能准确识别

    延时摄影

    搭配1/8\"大底传感器实现全天候延时摄影

    天文摄影

    搭配1/8\"大底传感器+高倍镜头实现天文摄影,支持长曝光模式和RAW 图输出

    陀螺仪增稳

    板载陀螺仪(仅MaixCAM Pro) 支持导出 gyroflow 防抖格式,DIY 摄影

    ## 谁在用 MaixPy?
    * **AI 算法工程师**: 轻松将你的 AI 模型部署到嵌入式设备。 > 易用的 API 访问 NPU,开源量化工具,详细的 AI 模型的文档。 * **STEM**:想要教学生 AI 和嵌入式开发的老师。 > 易用的 API,PC 工具,在线 AI 训练服务等,让你专注于教授 AI,而不是硬件和复杂的软件开发。 * **创客**: 想要制作一些酷炫的项目,但不想把事件浪费在太复杂的硬件和软件。 > Python 和 C++ 丰富简易 API,快速上手,甚至可以在几分钟内完成你的 DIY 项目。 * **工程师**: 想要做一些项目,但希望尽快有原型和落地。 > Python 和 C++ 丰富 API,高效稳定易使用,助力快速出原型及直接落地项目。 * **学生**: 想要学习 AI,嵌入式开发。 > 提供丰富文档和教程和开源代码,帮助你找到学习路线,并逐步成长。从简单的 Python 编程到视觉,AI,音频,Linux,RTOS等。 * **企业**: 想要开发 AI 视觉产品,但没有时间或工程师来开发复杂的嵌入式系统。 > 使用 MaixPy 甚至图形编程来,用更少的员工和时间。例如,向生产线添加 AI QA 系统,或为办公室添加一个 AI 安全监控器。 * **竞赛者**: 想要赢得比赛的比赛人们。 > MaixPy 集成了许多功能,易于使用,加快你作品产出速度,助力有限时间内赢得比赛,国内常见比赛都有很多同学使用 MaixPy 赢得比赛。
    ## 性能对比 相比上两代 Maix 系列产品(K210, V831)有限的 NPU 算子支持和内存限制,MaixCAM 在保持超高性价比的同时,性能和体验有了很大的提升。
    特征 Maix I K210 Maix II v831 MaixCAM CPU 400MHz RISC V x2 800MHz ARM7 **1GHz RISC V(Linux)
    700MHz RISC V(RTOS)
    25~300MHz 8051(Low Power)** 内存 6MB SRAM 64MB DDR2 **256MB DDR3** NPU 0.25Tops@INT8
    official says 1T but... 0.25Tops@INT8 **1Tops@INT8** Encoder ✖ 1080p@30fps **2K@30fps** 屏幕 2.4\" 320x240 1.3\" 240x240 **2.3\" 552x368**(MaixCAM)
    **2.4\" 640x480**(MaixCAM Pro)
    5\" 1280x720
    7\" 1280x800
    10“ 1280x800 触摸屏 ✖ ✖ **2.3\" 552x368** 摄像头 30W 200W **500W** WiFi 2.4G 2.4G **WiFi6** 2.4G/5G USB ✖ **USB2.0** **USB2.0** 以太网 ✖ 100M(选配) 100M(选配) SD 卡接口 SPI **SDIO** **SDIO** BLE ✖ ✖ **BLE5.4** 操作系统 RTOS Tina Linux **Linux + RTOS** 编程语言 C / C++ / MicroPython C / C++ / **Python3** C / **C++ / Python3** Software MaixPy MaixPy3 **MaixCDK + MaixPy v4 + opencv + numpy + ...** PC 软件 MaixPy IDE MaixPy3 IDE **MaixVision** Workstation 文档 ⭐️⭐️⭐️⭐️ ⭐️⭐️⭐️ 🌟🌟🌟🌟🌟 在线 AI 训练 ⭐️⭐️⭐️ ⭐️⭐️⭐️⭐️ 🌟🌟🌟🌟🌟 官方应用 ⭐️ ⭐️⭐️⭐️ 🌟🌟🌟🌟🌟 AI 分类(224x224) MobileNetv1 50fps
    MobileNetv2 ✖
    Resnet ✖ MobileNet ✖
    Resnet18 20fps
    Resnet50 ✖ MobileNetv2 **130fps**
    Resnet18 **62fps**
    Resnet50 **28fps** AI 检测(NPU推理部分) YOLOv2(224x224) 15fps YOLOv2(224x224) 15fps **YOLOv5s(224x224) 100fps
    YOLOv5s(320x256) 70fps
    YOLOv5s(640x640) 15fps
    YOLOv8n(640x640) 23fps
    YOLO11n(224x224)175fps
    YOLO11n(320x224)120fps
    YOLO11n(320x320)95fps
    YOLO11n(640x640)23fps** 易用性 ⭐️⭐️⭐️⭐️ ⭐️⭐️⭐️ 🌟🌟🌟🌟🌟
    **MaixCAM Pro** 相比 MaixCAM 的升级点: 1. 优化外壳设计,更美观,散热更好 2. 屏幕升级到2.4寸 640x480分辨率 3. 板载双路PWM舵机接口,标准PMOD接口,6pin端子接口 4. 板载AXP2101 PMU,支持锂电池充放电,电量计功能 5. 板载六轴IMU,qmi8658,可支持视频防抖 6. 内置1W小喇叭 7. 增加1/4英寸标准螺纹口,便于安装 8. 增加辅助照明LED 9. 增加RTC芯片 BM8653 和 RTC电池
    ## Maix 生态 ## 社区 {#community}
    社区 地址 **文档** [MaixPy 文档](/doc/zh/index.html) **应用商店** [maixhub.com/app](https://maixhub.com/app) **项目分享** [maixhub.com/share](https://maixhub.com/share) **Bilibili** B站搜索 `MaixCAM` 或者 `MaixPy` **讨论** [maixhub.com/discussion](https://maixhub.com/discussion) **MaixPy issues** [github.com/sipeed/MaixPy/issues](https://github.com/sipeed/MaixPy/issues) **Telegram** [t.me/maixpy](https://t.me/maixpy) **QQ 群** 862340358
    ## MaixPy v1, MaixPy3 and MaixPy v4 有什么区别?
    * MaixPy v1 使用 MicroPython 编程语言,仅支持 Sipeed Maix I K210 系列硬件,有限的第三方包。 * MaixPy3 专为 Sipeed Maix II Dock v831 设计,不是长期支持版本。 * MaixPy v4 使用 Python 编程语言,因此我们可以直接使用许多包。
    MaixPy v4 支持 Sipeed 的新硬件平台,这是一个长期支持版本,未来的硬件平台将支持这个版本。
    MaixPy v4 有一个 MaixPy v1 兼容的 API,所以你可以快速将你的 MaixPy v1 项目迁移到 MaixPy v4。 (MaixPy v4 不支持 K210 系列,建议升级硬件平台以使用此版本,以获得更多功能和更好的性能和更方便的编程体验。)
    "}} \ No newline at end of file diff --git a/maixpy/static/search_index/index_4.json b/maixpy/static/search_index/index_4.json new file mode 100644 index 00000000..4e03c819 --- /dev/null +++ b/maixpy/static/search_index/index_4.json @@ -0,0 +1 @@ +{"/maixpy/en/index.html":{"title":"MaixPy","content":" title: MaixPy id: home_page

    MaixPy (v4)

    Fast implementation of AI vision and auditory applications

    [![GitHub Repo stars](https://img.shields.io/github/stars/sipeed/MaixPy?style social)](https://github.com/sipeed/MaixPy)[![Apache 2.0](https://img.shields.io/badge/license Apache%20v2.0 orange.svg)](\"https://github.com/sipeed/MaixPy/blob/main/LICENSE.html)[![PyPI](https://img.shields.io/pypi/v/maixpy.svg)](https://pypi.python.org/pypi/maixpy/)[![PyPI Downloads](https://img.shields.io/pypi/dm/maixpy?label pypi%20downloads)](https://pypi.org/project/maixpy/)[![GitHub downloads](https://img.shields.io/github/downloads/sipeed/maixpy/total?label GitHub%20downloads)](https://github.com/sipeed/MaixPy) [![Build MaixCAM](https://github.com/sipeed/MaixPy/actions/workflows/build_maixcam.yml/badge.svg)](https://github.com/sipeed/MaixPy/actions/workflows/build_maixcam.yml)[![Trigger wiki](https://github.com/sipeed/MaixPy/actions/workflows/trigger_wiki.yml/badge.svg)](https://github.com/sipeed/MaixPy/actions/workflows/trigger_wiki.yml)
    English [中文](../)
    MaixPy v1 (K210) usage refer to MaixPy v1. MaixPy v4 does not support Maix I Maix II series hardware, please upgrade to the [MaixCAM](https://wiki.sipeed.com/maixcam pro) platform. If you like MaixPy, please give a star ⭐️ to the [MaixPy open source project](https://github.com/sipeed/MaixPy) to encourage us to develop more features.

    Simple API Design, AI Image Recognition with Just 10 Lines of Code

    ```python from maix import camera, display, image, nn classifier nn.Classifier(model \"/root/models/mobilenetv2.mud\") cam camera.Camera(classifier.input_width(), classifier.input_height(), classifier.input_format()) dis display.Display() while 1: img cam.read() res classifier.classify(img) max_idx, max_prob res[0] msg f\"{max_prob:5.2f}: {classifier.labels[max_idx]}\" img.draw_string(10, 10, msg, image.COLOR_RED) dis.show(img) ```

    Hardware Peripheral Control, No Big Deal

    Serial Communication: ```python from maix import uart devices uart.list_devices() serial uart.UART(devices[0], 115200) serial.write_str(\"hello world\") print(\"received:\", serial.read(timeout 2000)) ```
    I2C Communication: ```python from maix import i2c devices i2c.list_devices() dev1 i2c.I2C(devices[0], freq 100000) slaves dev1.scan() print(\"find slaves:\", slaves) dev1.writeto(0x12, b'hello') print(\"received:\", dev1.readfrom(0x12, 5)) ```

    Convenient MaixVision Workstation

    Simplify the development environment to make development easier and faster

    Online AI Training Platform MaixHub

    No need for AI expertise or expensive training equipment, train models with one click, deploy to MaixCAM with one click.
    ## High performance MaixCAM Hardware Platform ![MaixCAM](/static/image/maixcam_pro.png)
    * **CPU**: 1GHz RISC V (Linux) + 700MHz RISC V (RTOS) + 25~300MHz 8051 (Low Power) * **NPU**: 1Tops@INT8 NPU, supports BF16, YOLO11, YOLOv8, YOLOv5, etc. * **Memory**: 256MB DDR3 * **Communication**: USB2.0/WiFi6/BLE5.4 * **Peripherals**: IIC/PWM/SPI/UART/WDT/GPIO/ADC * **Multimedia**: 4M camera, 2.4\" 640x480 HD capacitive touchscreen, H.264/H.265/MJPEG 2K hardware codec. * **Purchase**: Various hardware versions are available, see [Store](https://wiki.sipeed.com/store) (contact the store for availability) * **More**: See [MaixCAM](https://wiki.sipeed.com/maixcam) and [MaixCAM Pro](https://wiki.sipeed.com/maixcam pro) hardware documentation
    ## More Features
    Here are some feature highlights, find more in the [Community](#community) You can create new features using the rich API provided by MaixPy.

    OpenCV + OpenMV

    Supports OpenCV, compatible with OpenMV

    C++ Version

    MaixCDK C++ version SDK, same API as MaixPy, commercial friendly

    As a Serial Module

    Control other MCUs via serial commands

    APP Store

    Share your apps with the community and install them with one click via the APP Store.

    MaixPy v1 Compatible API

    Quickly migrate from MaixPy v1 (K210) to MaixPy v4

    AI Classification

    Identify object categories

    AI Object Detection

    Identify object categories and coordinates

    AI Face Recognition

    Recognize different facial features

    AI Body Keypoint Detection

    Posture recognition, body sensing games

    AI Self learning Classifier

    Instantly learn any object on the device without PC training

    AI Self learning Detector

    Instantly learn any object on the device without PC training

    AI Object Tracking

    Track objects, count traffic

    AI Surveillance, Streaming

    Security monitoring, streaming, even live stream to platforms like Bilibili.com

    Color Detection

    Detect color spots

    Line Following

    Line following car, logistics transportation

    QR Code and AprilTag

    Recognize QR codes and AprilTag

    OCR

    Recognize characters in images, digitize old items

    Voice Recognition

    Real time continuous voice recognition

    Desktop Monitor

    Monitor PC information such as CPU, memory, and network.

    Weather Station

    Monitor weather information such as temperature and humidity.

    Thermal Infrared Camera

    Optional camera, for temperature image acquisition/measurement

    HDMI Video Capture

    Optional feature, capture images via HDMI for server monitoring (KVM), remote control, external AI, streaming devices, etc.

    Large Screen Video Playback

    Multiple screen sizes (2.3\", 2.4\", 5\", 7\", etc.), hardware decoding support

    Microscope

    Pair with 1/8\" large sensor + microscope lens digital microscope

    High Speed Recognition

    Pair with a global shutter camera to accurately recognize high speed moving objects

    Time lapse Photography

    Pair with a 1/8\" large sensor for all day time lapse photography

    Astronomical Photography

    Pair with a 1/8\" large sensor + high power lens for astronomical photography, supports long exposure mode and RAW image output

    Gyroscope Stabilization

    Onboard gyroscope (MaixCAM Pro only), supports exporting gyroflow stabilization format for DIY photography

    ## Who Uses MaixPy?
    * **AI Algorithm Engineers**: Easily deploy your AI models to embedded devices. > Easy to use API to access NPU, open source quantization tools, detailed documentation on AI models. * **STEM**: Teachers who want to teach students AI and embedded development. > Easy to use API, PC tools, online AI training services, allowing you to focus on teaching AI instead of hardware and complex software development. * **Makers**: Want to create cool projects without spending too much time on complex hardware and software. > Rich, simple Python and C++ APIs, quick to get started, complete your DIY projects in just minutes. * **Engineers**: Want to build projects but hope to have prototypes and solutions quickly. > Rich Python and C++ APIs, efficient, stable, and easy to use, helping you quickly create prototypes and implement projects directly. * **Students**: Want to learn AI and embedded development. > Offers rich documentation, tutorials, and open source code, helping you find learning paths and gradually grow, from simple Python programming to vision, AI, audio, Linux, RTOS, etc. * **Companies**: Want to develop AI vision products but don’t have the time or engineers to develop complex embedded systems. > Use MaixPy or even graphical programming to reduce the number of employees and time. For example, adding an AI QA system to the production line, or an AI security monitor to the office. * **Competitors**: People who want to win competitions. > MaixPy integrates many features, is easy to use, speeds up the output of your work, and helps you win competitions in a short time. Many students use MaixPy to win common competitions in China.
    ## Performance Comparison Compared to the limited NPU operator support and memory constraints of the previous two generations of Maix series products (K210, V831), MaixCAM offers significant improvements in performance and experience while maintaining an excellent price performance ratio.
    Feature Maix I K210 Maix II v831 MaixCAM CPU 400MHz RISC V x2 800MHz ARM7 **1GHz RISC V(Linux)
    700MHz RISC V(RTOS)
    25~300MHz 8051(Low Power)** Memory 6MB SRAM 64MB DDR2 **256MB DDR3** NPU 0.25Tops@INT8
    official says 1T but... 0.25Tops@INT8 **1Tops@INT8** Encoder ✖ 1080p@30fps **2K@30fps** Screen 2.4\" 320x240 1.3\" 240x240 **2.3\" 552x368**(MaixCAM)
    **2.4\" 640x480**(MaixCAM Pro)
    5\" 1280x720
    7\" 1280x800
    10“ 1280x800 Touchscreen ✖ ✖ **2.3\" 552x368** Camera 30W 200W **500W** WiFi 2.4G 2.4G **WiFi6** 2.4G/5G USB ✖ **USB2.0** **USB2.0** Ethernet ✖ 100M(optional) 100M(optional) SD Card Interface SPI **SDIO** **SDIO** BLE ✖ ✖ **BLE5.4** Operating System RTOS Tina Linux **Linux + RTOS** Programming Language C / C++ / MicroPython C / C++ / **Python3** C / **C++ / Python3** Software MaixPy MaixPy3 **MaixCDK + MaixPy v4 + OpenCV + Numpy + ...** PC Software MaixPy IDE MaixPy3 IDE **MaixVision** Workstation Documentation ⭐️⭐️⭐️⭐️ ⭐️⭐️⭐️ 🌟🌟🌟🌟🌟 Online AI Training ⭐️⭐️⭐️ ⭐️⭐️⭐️⭐️ 🌟🌟🌟🌟🌟 Official Apps ⭐️ ⭐️⭐️⭐️ 🌟🌟🌟🌟🌟 AI Classification (224x224) MobileNetv1 50fps
    MobileNetv2 ✖
    Resnet ✖ MobileNet ✖
    Resnet18 20fps
    Resnet50 ✖ MobileNetv2 **130fps**
    Resnet18 **62fps**
    Resnet50 **28fps** AI Detection (NPU inference part) YOLOv2(224x224) 15fps YOLOv2(224x224) 15fps **YOLOv5s(224x224) 100fps
    YOLOv5s(320x256) 70fps
    YOLOv5s(640x640) 15fps
    YOLOv8n(640x640) 23fps
    YOLO11n(224x224)175fps
    YOLO11n(320x224)120fps
    YOLO11n(320x320)95fps
    YOLO11n(640x640)23fps** Ease of Use ⭐️⭐️⭐️⭐️ ⭐️⭐️⭐️ 🌟🌟🌟🌟🌟
    **MaixCAM Pro** Upgrades compared to MaixCAM: 1. Optimized case design for better aesthetics and heat dissipation 2. Screen upgraded to 2.4 inches with 640x480 resolution 3. Dual channel PWM servo interface, standard PMOD interface, 6 pin terminal interface 4. Onboard AXP2101 PMU, supports lithium battery charging and discharging, power metering function 5. Onboard six axis IMU, qmi8658, supports video stabilization 6. Built in 1W small speaker 7. Added 1/4 inch standard thread mount for easy installation 8. Added auxiliary lighting LED 9. Added RTC chip BM8653 and RTC battery
    ## Maix Ecosystem ## Community {#community}
    Community Address **Documentation** [MaixPy Documentation](/doc/en/index.html) **App Store** [maixhub.com/app](https://maixhub.com/app) **Project Sharing** [maixhub.com/share](https://maixhub.com/share) **Bilibili** Search for `MaixCAM` or `MaixPy` on Bilibili **Discussion** [maixhub.com/discussion](https://maixhub.com/discussion) **MaixPy issues** [github.com/sipeed/MaixPy/issues](https://github.com/sipeed/MaixPy/issues) **Telegram** [t.me/maixpy](https://t.me/maixpy) **QQ Group** 862340358
    ## What Are the Differences Between MaixPy v1, MaixPy3, and MaixPy v4?
    * MaixPy v1 uses the MicroPython programming language and only supports the Sipeed Maix I K210 series hardware with limited third party packages. * MaixPy3 is specifically designed for Sipeed Maix II Dock v831 and is not a long term support version. * MaixPy v4 uses the Python programming language, allowing direct use of many packages.
    MaixPy v4 supports Sipeed's new hardware platform and is a long term support version. Future hardware platforms will support this version.
    MaixPy v4 has a MaixPy v1 compatible API, so you can quickly migrate your MaixPy v1 projects to MaixPy v4. (MaixPy v4 does not support the K210 series. It is recommended to upgrade your hardware platform to use this version for more features, better performance,
    and a more convenient programming experience.)
    "},"/maixpy/en/no_translate.html":{"title":"no translation","content":" title: no translation class: md_page
    This page not translated yet
    Please visit
    "}} \ No newline at end of file diff --git a/maixpy/static/video/app_store.mp4 b/maixpy/static/video/app_store.mp4 new file mode 100644 index 00000000..a59f95ad Binary files /dev/null and b/maixpy/static/video/app_store.mp4 differ diff --git a/maixpy/static/video/classifier.mp4 b/maixpy/static/video/classifier.mp4 new file mode 100644 index 00000000..01614991 Binary files /dev/null and b/maixpy/static/video/classifier.mp4 differ diff --git a/maixpy/static/video/desktop_monitor.mp4 b/maixpy/static/video/desktop_monitor.mp4 new file mode 100644 index 00000000..ccff7b91 Binary files /dev/null and b/maixpy/static/video/desktop_monitor.mp4 differ diff --git a/maixpy/static/video/detector.mp4 b/maixpy/static/video/detector.mp4 new file mode 100644 index 00000000..3af82ecb Binary files /dev/null and b/maixpy/static/video/detector.mp4 differ diff --git a/maixpy/static/video/find_blobs.mp4 b/maixpy/static/video/find_blobs.mp4 new file mode 100644 index 00000000..104338c8 Binary files /dev/null and b/maixpy/static/video/find_blobs.mp4 differ diff --git a/maixpy/static/video/gyroflow.mp4 b/maixpy/static/video/gyroflow.mp4 new file mode 100644 index 00000000..10b2f54a Binary files /dev/null and b/maixpy/static/video/gyroflow.mp4 differ diff --git a/maixpy/static/video/line_track.mp4 b/maixpy/static/video/line_track.mp4 new file mode 100644 index 00000000..9fcaa483 Binary files /dev/null and b/maixpy/static/video/line_track.mp4 differ diff --git a/maixpy/static/video/line_tracking_app.mp4 b/maixpy/static/video/line_tracking_app.mp4 new file mode 100644 index 00000000..09f1c51b Binary files /dev/null and b/maixpy/static/video/line_tracking_app.mp4 differ diff --git a/maixpy/static/video/maixvision.mp4 b/maixpy/static/video/maixvision.mp4 new file mode 100644 index 00000000..e0892710 Binary files /dev/null and b/maixpy/static/video/maixvision.mp4 differ diff --git a/maixpy/static/video/microscope.mp4 b/maixpy/static/video/microscope.mp4 new file mode 100644 index 00000000..25449fe5 Binary files /dev/null and b/maixpy/static/video/microscope.mp4 differ diff --git a/maixpy/static/video/qr_apriltag.mp4 b/maixpy/static/video/qr_apriltag.mp4 new file mode 100644 index 00000000..a8b863c4 Binary files /dev/null and b/maixpy/static/video/qr_apriltag.mp4 differ diff --git a/maixpy/static/video/self_learn_tracker.mp4 b/maixpy/static/video/self_learn_tracker.mp4 new file mode 100644 index 00000000..2075c303 Binary files /dev/null and b/maixpy/static/video/self_learn_tracker.mp4 differ diff --git a/maixpy/static/video/time_lapse.mp4 b/maixpy/static/video/time_lapse.mp4 new file mode 100644 index 00000000..c748a60d Binary files /dev/null and b/maixpy/static/video/time_lapse.mp4 differ diff --git a/maixpy/static/video/tracker.mp4 b/maixpy/static/video/tracker.mp4 new file mode 100644 index 00000000..cecdb26a Binary files /dev/null and b/maixpy/static/video/tracker.mp4 differ diff --git a/maixpy/static/video/video_play.mp4 b/maixpy/static/video/video_play.mp4 new file mode 100644 index 00000000..bc14ec2d Binary files /dev/null and b/maixpy/static/video/video_play.mp4 differ diff --git a/maixpy/teedoc-plugin-markdown-parser/mermaid.min.js b/maixpy/teedoc-plugin-markdown-parser/mermaid.min.js new file mode 100644 index 00000000..02a60732 --- /dev/null +++ b/maixpy/teedoc-plugin-markdown-parser/mermaid.min.js @@ -0,0 +1,3 @@ +/*! For license information please see mermaid.min.js.LICENSE.txt */ +!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.mermaid=e():t.mermaid=e()}("undefined"!=typeof self?self:this,(()=>(()=>{var t={1362:(t,e,n)=>{t=n.nmd(t);var r=function(){var t=function(t,e,n,r){for(n=n||{},r=t.length;r--;n[t[r]]=e);return n},e=[1,3],n=[1,7],r=[1,8],i=[1,9],a=[1,10],o=[1,13],s=[1,12],c=[1,16,25],u=[1,20],l=[1,31],h=[1,32],f=[1,33],d=[1,35],p=[1,38],g=[1,36],y=[1,37],m=[1,39],v=[1,40],b=[1,41],_=[1,42],x=[1,45],w=[1,46],k=[1,47],T=[1,48],C=[16,25],E=[1,62],S=[1,63],A=[1,64],M=[1,65],N=[1,66],D=[1,67],B=[16,25,32,44,45,53,56,57,58,59,60,61,66,68],L=[16,25,30,32,44,45,49,53,56,57,58,59,60,61,66,68,83,84,85,86],O=[5,8,9,10,11,16,19,23,25],I=[53,83,84,85,86],R=[53,60,61,83,84,85,86],F=[53,56,57,58,59,83,84,85,86],P=[16,25,32],Y=[1,99],j={trace:function(){},yy:{},symbols_:{error:2,start:3,mermaidDoc:4,statments:5,direction:6,directive:7,direction_tb:8,direction_bt:9,direction_rl:10,direction_lr:11,graphConfig:12,openDirective:13,typeDirective:14,closeDirective:15,NEWLINE:16,":":17,argDirective:18,open_directive:19,type_directive:20,arg_directive:21,close_directive:22,CLASS_DIAGRAM:23,statements:24,EOF:25,statement:26,className:27,alphaNumToken:28,classLiteralName:29,GENERICTYPE:30,relationStatement:31,LABEL:32,classStatement:33,methodStatement:34,annotationStatement:35,clickStatement:36,cssClassStatement:37,acc_title:38,acc_title_value:39,acc_descr:40,acc_descr_value:41,acc_descr_multiline_value:42,CLASS:43,STYLE_SEPARATOR:44,STRUCT_START:45,members:46,STRUCT_STOP:47,ANNOTATION_START:48,ANNOTATION_END:49,MEMBER:50,SEPARATOR:51,relation:52,STR:53,relationType:54,lineType:55,AGGREGATION:56,EXTENSION:57,COMPOSITION:58,DEPENDENCY:59,LINE:60,DOTTED_LINE:61,CALLBACK:62,LINK:63,LINK_TARGET:64,CLICK:65,CALLBACK_NAME:66,CALLBACK_ARGS:67,HREF:68,CSSCLASS:69,commentToken:70,textToken:71,graphCodeTokens:72,textNoTagsToken:73,TAGSTART:74,TAGEND:75,"==":76,"--":77,PCT:78,DEFAULT:79,SPACE:80,MINUS:81,keywords:82,UNICODE_TEXT:83,NUM:84,ALPHA:85,BQUOTE_STR:86,$accept:0,$end:1},terminals_:{2:"error",5:"statments",8:"direction_tb",9:"direction_bt",10:"direction_rl",11:"direction_lr",16:"NEWLINE",17:":",19:"open_directive",20:"type_directive",21:"arg_directive",22:"close_directive",23:"CLASS_DIAGRAM",25:"EOF",30:"GENERICTYPE",32:"LABEL",38:"acc_title",39:"acc_title_value",40:"acc_descr",41:"acc_descr_value",42:"acc_descr_multiline_value",43:"CLASS",44:"STYLE_SEPARATOR",45:"STRUCT_START",47:"STRUCT_STOP",48:"ANNOTATION_START",49:"ANNOTATION_END",50:"MEMBER",51:"SEPARATOR",53:"STR",56:"AGGREGATION",57:"EXTENSION",58:"COMPOSITION",59:"DEPENDENCY",60:"LINE",61:"DOTTED_LINE",62:"CALLBACK",63:"LINK",64:"LINK_TARGET",65:"CLICK",66:"CALLBACK_NAME",67:"CALLBACK_ARGS",68:"HREF",69:"CSSCLASS",72:"graphCodeTokens",74:"TAGSTART",75:"TAGEND",76:"==",77:"--",78:"PCT",79:"DEFAULT",80:"SPACE",81:"MINUS",82:"keywords",83:"UNICODE_TEXT",84:"NUM",85:"ALPHA",86:"BQUOTE_STR"},productions_:[0,[3,1],[3,1],[3,1],[3,2],[6,1],[6,1],[6,1],[6,1],[4,1],[7,4],[7,6],[13,1],[14,1],[18,1],[15,1],[12,4],[24,1],[24,2],[24,3],[27,1],[27,1],[27,2],[27,2],[27,2],[26,1],[26,2],[26,1],[26,1],[26,1],[26,1],[26,1],[26,1],[26,1],[26,2],[26,2],[26,1],[33,2],[33,4],[33,5],[33,7],[35,4],[46,1],[46,2],[34,1],[34,2],[34,1],[34,1],[31,3],[31,4],[31,4],[31,5],[52,3],[52,2],[52,2],[52,1],[54,1],[54,1],[54,1],[54,1],[55,1],[55,1],[36,3],[36,4],[36,3],[36,4],[36,4],[36,5],[36,3],[36,4],[36,4],[36,5],[36,3],[36,4],[36,4],[36,5],[37,3],[70,1],[70,1],[71,1],[71,1],[71,1],[71,1],[71,1],[71,1],[71,1],[73,1],[73,1],[73,1],[73,1],[28,1],[28,1],[28,1],[29,1]],performAction:function(t,e,n,r,i,a,o){var s=a.length-1;switch(i){case 5:r.setDirection("TB");break;case 6:r.setDirection("BT");break;case 7:r.setDirection("RL");break;case 8:r.setDirection("LR");break;case 12:r.parseDirective("%%{","open_directive");break;case 13:r.parseDirective(a[s],"type_directive");break;case 14:a[s]=a[s].trim().replace(/'/g,'"'),r.parseDirective(a[s],"arg_directive");break;case 15:r.parseDirective("}%%","close_directive","class");break;case 20:case 21:this.$=a[s];break;case 22:this.$=a[s-1]+a[s];break;case 23:case 24:this.$=a[s-1]+"~"+a[s];break;case 25:r.addRelation(a[s]);break;case 26:a[s-1].title=r.cleanupLabel(a[s]),r.addRelation(a[s-1]);break;case 34:this.$=a[s].trim(),r.setTitle(this.$);break;case 35:case 36:this.$=a[s].trim(),r.setAccDescription(this.$);break;case 37:r.addClass(a[s]);break;case 38:r.addClass(a[s-2]),r.setCssClass(a[s-2],a[s]);break;case 39:r.addClass(a[s-3]),r.addMembers(a[s-3],a[s-1]);break;case 40:r.addClass(a[s-5]),r.setCssClass(a[s-5],a[s-3]),r.addMembers(a[s-5],a[s-1]);break;case 41:r.addAnnotation(a[s],a[s-2]);break;case 42:this.$=[a[s]];break;case 43:a[s].push(a[s-1]),this.$=a[s];break;case 44:case 46:case 47:break;case 45:r.addMember(a[s-1],r.cleanupLabel(a[s]));break;case 48:this.$={id1:a[s-2],id2:a[s],relation:a[s-1],relationTitle1:"none",relationTitle2:"none"};break;case 49:this.$={id1:a[s-3],id2:a[s],relation:a[s-1],relationTitle1:a[s-2],relationTitle2:"none"};break;case 50:this.$={id1:a[s-3],id2:a[s],relation:a[s-2],relationTitle1:"none",relationTitle2:a[s-1]};break;case 51:this.$={id1:a[s-4],id2:a[s],relation:a[s-2],relationTitle1:a[s-3],relationTitle2:a[s-1]};break;case 52:this.$={type1:a[s-2],type2:a[s],lineType:a[s-1]};break;case 53:this.$={type1:"none",type2:a[s],lineType:a[s-1]};break;case 54:this.$={type1:a[s-1],type2:"none",lineType:a[s]};break;case 55:this.$={type1:"none",type2:"none",lineType:a[s]};break;case 56:this.$=r.relationType.AGGREGATION;break;case 57:this.$=r.relationType.EXTENSION;break;case 58:this.$=r.relationType.COMPOSITION;break;case 59:this.$=r.relationType.DEPENDENCY;break;case 60:this.$=r.lineType.LINE;break;case 61:this.$=r.lineType.DOTTED_LINE;break;case 62:case 68:this.$=a[s-2],r.setClickEvent(a[s-1],a[s]);break;case 63:case 69:this.$=a[s-3],r.setClickEvent(a[s-2],a[s-1]),r.setTooltip(a[s-2],a[s]);break;case 64:case 72:this.$=a[s-2],r.setLink(a[s-1],a[s]);break;case 65:case 73:this.$=a[s-3],r.setLink(a[s-2],a[s-1],a[s]);break;case 66:case 74:this.$=a[s-3],r.setLink(a[s-2],a[s-1]),r.setTooltip(a[s-2],a[s]);break;case 67:case 75:this.$=a[s-4],r.setLink(a[s-3],a[s-2],a[s]),r.setTooltip(a[s-3],a[s-1]);break;case 70:this.$=a[s-3],r.setClickEvent(a[s-2],a[s-1],a[s]);break;case 71:this.$=a[s-4],r.setClickEvent(a[s-3],a[s-2],a[s-1]),r.setTooltip(a[s-3],a[s]);break;case 76:r.setCssClass(a[s-1],a[s])}},table:[{3:1,4:2,5:e,6:4,7:5,8:n,9:r,10:i,11:a,12:6,13:11,19:o,23:s},{1:[3]},{1:[2,1]},{1:[2,2]},{1:[2,3]},{3:14,4:2,5:e,6:4,7:5,8:n,9:r,10:i,11:a,12:6,13:11,19:o,23:s},{1:[2,9]},t(c,[2,5]),t(c,[2,6]),t(c,[2,7]),t(c,[2,8]),{14:15,20:[1,16]},{16:[1,17]},{20:[2,12]},{1:[2,4]},{15:18,17:[1,19],22:u},t([17,22],[2,13]),{6:30,7:29,8:n,9:r,10:i,11:a,13:11,19:o,24:21,26:22,27:34,28:43,29:44,31:23,33:24,34:25,35:26,36:27,37:28,38:l,40:h,42:f,43:d,48:p,50:g,51:y,62:m,63:v,65:b,69:_,83:x,84:w,85:k,86:T},{16:[1,49]},{18:50,21:[1,51]},{16:[2,15]},{25:[1,52]},{16:[1,53],25:[2,17]},t(C,[2,25],{32:[1,54]}),t(C,[2,27]),t(C,[2,28]),t(C,[2,29]),t(C,[2,30]),t(C,[2,31]),t(C,[2,32]),t(C,[2,33]),{39:[1,55]},{41:[1,56]},t(C,[2,36]),t(C,[2,44],{52:57,54:60,55:61,32:[1,59],53:[1,58],56:E,57:S,58:A,59:M,60:N,61:D}),{27:68,28:43,29:44,83:x,84:w,85:k,86:T},t(C,[2,46]),t(C,[2,47]),{28:69,83:x,84:w,85:k},{27:70,28:43,29:44,83:x,84:w,85:k,86:T},{27:71,28:43,29:44,83:x,84:w,85:k,86:T},{27:72,28:43,29:44,83:x,84:w,85:k,86:T},{53:[1,73]},t(B,[2,20],{28:43,29:44,27:74,30:[1,75],83:x,84:w,85:k,86:T}),t(B,[2,21],{30:[1,76]}),t(L,[2,90]),t(L,[2,91]),t(L,[2,92]),t([16,25,30,32,44,45,53,56,57,58,59,60,61,66,68],[2,93]),t(O,[2,10]),{15:77,22:u},{22:[2,14]},{1:[2,16]},{6:30,7:29,8:n,9:r,10:i,11:a,13:11,19:o,24:78,25:[2,18],26:22,27:34,28:43,29:44,31:23,33:24,34:25,35:26,36:27,37:28,38:l,40:h,42:f,43:d,48:p,50:g,51:y,62:m,63:v,65:b,69:_,83:x,84:w,85:k,86:T},t(C,[2,26]),t(C,[2,34]),t(C,[2,35]),{27:79,28:43,29:44,53:[1,80],83:x,84:w,85:k,86:T},{52:81,54:60,55:61,56:E,57:S,58:A,59:M,60:N,61:D},t(C,[2,45]),{55:82,60:N,61:D},t(I,[2,55],{54:83,56:E,57:S,58:A,59:M}),t(R,[2,56]),t(R,[2,57]),t(R,[2,58]),t(R,[2,59]),t(F,[2,60]),t(F,[2,61]),t(C,[2,37],{44:[1,84],45:[1,85]}),{49:[1,86]},{53:[1,87]},{53:[1,88]},{66:[1,89],68:[1,90]},{28:91,83:x,84:w,85:k},t(B,[2,22]),t(B,[2,23]),t(B,[2,24]),{16:[1,92]},{25:[2,19]},t(P,[2,48]),{27:93,28:43,29:44,83:x,84:w,85:k,86:T},{27:94,28:43,29:44,53:[1,95],83:x,84:w,85:k,86:T},t(I,[2,54],{54:96,56:E,57:S,58:A,59:M}),t(I,[2,53]),{28:97,83:x,84:w,85:k},{46:98,50:Y},{27:100,28:43,29:44,83:x,84:w,85:k,86:T},t(C,[2,62],{53:[1,101]}),t(C,[2,64],{53:[1,103],64:[1,102]}),t(C,[2,68],{53:[1,104],67:[1,105]}),t(C,[2,72],{53:[1,107],64:[1,106]}),t(C,[2,76]),t(O,[2,11]),t(P,[2,50]),t(P,[2,49]),{27:108,28:43,29:44,83:x,84:w,85:k,86:T},t(I,[2,52]),t(C,[2,38],{45:[1,109]}),{47:[1,110]},{46:111,47:[2,42],50:Y},t(C,[2,41]),t(C,[2,63]),t(C,[2,65]),t(C,[2,66],{64:[1,112]}),t(C,[2,69]),t(C,[2,70],{53:[1,113]}),t(C,[2,73]),t(C,[2,74],{64:[1,114]}),t(P,[2,51]),{46:115,50:Y},t(C,[2,39]),{47:[2,43]},t(C,[2,67]),t(C,[2,71]),t(C,[2,75]),{47:[1,116]},t(C,[2,40])],defaultActions:{2:[2,1],3:[2,2],4:[2,3],6:[2,9],13:[2,12],14:[2,4],20:[2,15],51:[2,14],52:[2,16],78:[2,19],111:[2,43]},parseError:function(t,e){if(!e.recoverable){var n=new Error(t);throw n.hash=e,n}this.trace(t)},parse:function(t){var e=this,n=[0],r=[],i=[null],a=[],o=this.table,s="",c=0,u=0,l=0,h=2,f=1,d=a.slice.call(arguments,1),p=Object.create(this.lexer),g={yy:{}};for(var y in this.yy)Object.prototype.hasOwnProperty.call(this.yy,y)&&(g.yy[y]=this.yy[y]);p.setInput(t,g.yy),g.yy.lexer=p,g.yy.parser=this,void 0===p.yylloc&&(p.yylloc={});var m=p.yylloc;a.push(m);var v=p.options&&p.options.ranges;function b(){var t;return"number"!=typeof(t=r.pop()||p.lex()||f)&&(t instanceof Array&&(t=(r=t).pop()),t=e.symbols_[t]||t),t}"function"==typeof g.yy.parseError?this.parseError=g.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;for(var _,x,w,k,T,C,E,S,A,M={};;){if(w=n[n.length-1],this.defaultActions[w]?k=this.defaultActions[w]:(null==_&&(_=b()),k=o[w]&&o[w][_]),void 0===k||!k.length||!k[0]){var N="";for(C in A=[],o[w])this.terminals_[C]&&C>h&&A.push("'"+this.terminals_[C]+"'");N=p.showPosition?"Parse error on line "+(c+1)+":\n"+p.showPosition()+"\nExpecting "+A.join(", ")+", got '"+(this.terminals_[_]||_)+"'":"Parse error on line "+(c+1)+": Unexpected "+(_==f?"end of input":"'"+(this.terminals_[_]||_)+"'"),this.parseError(N,{text:p.match,token:this.terminals_[_]||_,line:p.yylineno,loc:m,expected:A})}if(k[0]instanceof Array&&k.length>1)throw new Error("Parse Error: multiple actions possible at state: "+w+", token: "+_);switch(k[0]){case 1:n.push(_),i.push(p.yytext),a.push(p.yylloc),n.push(k[1]),_=null,x?(_=x,x=null):(u=p.yyleng,s=p.yytext,c=p.yylineno,m=p.yylloc,l>0&&l--);break;case 2:if(E=this.productions_[k[1]][1],M.$=i[i.length-E],M._$={first_line:a[a.length-(E||1)].first_line,last_line:a[a.length-1].last_line,first_column:a[a.length-(E||1)].first_column,last_column:a[a.length-1].last_column},v&&(M._$.range=[a[a.length-(E||1)].range[0],a[a.length-1].range[1]]),void 0!==(T=this.performAction.apply(M,[s,u,c,g.yy,k[1],i,a].concat(d))))return T;E&&(n=n.slice(0,-1*E*2),i=i.slice(0,-1*E),a=a.slice(0,-1*E)),n.push(this.productions_[k[1]][0]),i.push(M.$),a.push(M._$),S=o[n[n.length-2]][n[n.length-1]],n.push(S);break;case 3:return!0}}return!0}},U={EOF:1,parseError:function(t,e){if(!this.yy.parser)throw new Error(t);this.yy.parser.parseError(t,e)},setInput:function(t,e){return this.yy=e||this.yy||{},this._input=t,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},input:function(){var t=this._input[0];return this.yytext+=t,this.yyleng++,this.offset++,this.match+=t,this.matched+=t,t.match(/(?:\r\n?|\n).*/g)?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),t},unput:function(t){var e=t.length,n=t.split(/(?:\r\n?|\n)/g);this._input=t+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-e),this.offset-=e;var r=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),n.length-1&&(this.yylineno-=n.length-1);var i=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:n?(n.length===r.length?this.yylloc.first_column:0)+r[r.length-n.length].length-n[0].length:this.yylloc.first_column-e},this.options.ranges&&(this.yylloc.range=[i[0],i[0]+this.yyleng-e]),this.yyleng=this.yytext.length,this},more:function(){return this._more=!0,this},reject:function(){return this.options.backtrack_lexer?(this._backtrack=!0,this):this.parseError("Lexical error on line "+(this.yylineno+1)+". You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n"+this.showPosition(),{text:"",token:null,line:this.yylineno})},less:function(t){this.unput(this.match.slice(t))},pastInput:function(){var t=this.matched.substr(0,this.matched.length-this.match.length);return(t.length>20?"...":"")+t.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var t=this.match;return t.length<20&&(t+=this._input.substr(0,20-t.length)),(t.substr(0,20)+(t.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var t=this.pastInput(),e=new Array(t.length+1).join("-");return t+this.upcomingInput()+"\n"+e+"^"},test_match:function(t,e){var n,r,i;if(this.options.backtrack_lexer&&(i={yylineno:this.yylineno,yylloc:{first_line:this.yylloc.first_line,last_line:this.last_line,first_column:this.yylloc.first_column,last_column:this.yylloc.last_column},yytext:this.yytext,match:this.match,matches:this.matches,matched:this.matched,yyleng:this.yyleng,offset:this.offset,_more:this._more,_input:this._input,yy:this.yy,conditionStack:this.conditionStack.slice(0),done:this.done},this.options.ranges&&(i.yylloc.range=this.yylloc.range.slice(0))),(r=t[0].match(/(?:\r\n?|\n).*/g))&&(this.yylineno+=r.length),this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:r?r[r.length-1].length-r[r.length-1].match(/\r?\n?/)[0].length:this.yylloc.last_column+t[0].length},this.yytext+=t[0],this.match+=t[0],this.matches=t,this.yyleng=this.yytext.length,this.options.ranges&&(this.yylloc.range=[this.offset,this.offset+=this.yyleng]),this._more=!1,this._backtrack=!1,this._input=this._input.slice(t[0].length),this.matched+=t[0],n=this.performAction.call(this,this.yy,this,e,this.conditionStack[this.conditionStack.length-1]),this.done&&this._input&&(this.done=!1),n)return n;if(this._backtrack){for(var a in i)this[a]=i[a];return!1}return!1},next:function(){if(this.done)return this.EOF;var t,e,n,r;this._input||(this.done=!0),this._more||(this.yytext="",this.match="");for(var i=this._currentRules(),a=0;ae[0].length)){if(e=n,r=a,this.options.backtrack_lexer){if(!1!==(t=this.test_match(n,i[a])))return t;if(this._backtrack){e=!1;continue}return!1}if(!this.options.flex)break}return e?!1!==(t=this.test_match(e,i[r]))&&t:""===this._input?this.EOF:this.parseError("Lexical error on line "+(this.yylineno+1)+". Unrecognized text.\n"+this.showPosition(),{text:"",token:null,line:this.yylineno})},lex:function(){return this.next()||this.lex()},begin:function(t){this.conditionStack.push(t)},popState:function(){return this.conditionStack.length-1>0?this.conditionStack.pop():this.conditionStack[0]},_currentRules:function(){return this.conditionStack.length&&this.conditionStack[this.conditionStack.length-1]?this.conditions[this.conditionStack[this.conditionStack.length-1]].rules:this.conditions.INITIAL.rules},topState:function(t){return(t=this.conditionStack.length-1-Math.abs(t||0))>=0?this.conditionStack[t]:"INITIAL"},pushState:function(t){this.begin(t)},stateStackSize:function(){return this.conditionStack.length},options:{},performAction:function(t,e,n,r){switch(n){case 0:return this.begin("open_directive"),19;case 1:return 8;case 2:return 9;case 3:return 10;case 4:return 11;case 5:return this.begin("type_directive"),20;case 6:return this.popState(),this.begin("arg_directive"),17;case 7:return this.popState(),this.popState(),22;case 8:return 21;case 9:case 10:case 19:case 26:break;case 11:return this.begin("acc_title"),38;case 12:return this.popState(),"acc_title_value";case 13:return this.begin("acc_descr"),40;case 14:return this.popState(),"acc_descr_value";case 15:this.begin("acc_descr_multiline");break;case 16:case 36:case 39:case 42:case 45:case 48:case 51:this.popState();break;case 17:return"acc_descr_multiline_value";case 18:return 16;case 20:case 21:return 23;case 22:return this.begin("struct"),45;case 23:return"EOF_IN_STRUCT";case 24:return"OPEN_IN_STRUCT";case 25:return this.popState(),47;case 27:return"MEMBER";case 28:return 43;case 29:return 69;case 30:return 62;case 31:return 63;case 32:return 65;case 33:return 48;case 34:return 49;case 35:this.begin("generic");break;case 37:return"GENERICTYPE";case 38:this.begin("string");break;case 40:return"STR";case 41:this.begin("bqstring");break;case 43:return"BQUOTE_STR";case 44:this.begin("href");break;case 46:return 68;case 47:this.begin("callback_name");break;case 49:this.popState(),this.begin("callback_args");break;case 50:return 66;case 52:return 67;case 53:case 54:case 55:case 56:return 64;case 57:case 58:return 57;case 59:case 60:return 59;case 61:return 58;case 62:return 56;case 63:return 60;case 64:return 61;case 65:return 32;case 66:return 44;case 67:return 81;case 68:return"DOT";case 69:return"PLUS";case 70:return 78;case 71:case 72:return"EQUALS";case 73:return 85;case 74:return"PUNCTUATION";case 75:return 84;case 76:return 83;case 77:return 80;case 78:return 25}},rules:[/^(?:%%\{)/,/^(?:.*direction\s+TB[^\n]*)/,/^(?:.*direction\s+BT[^\n]*)/,/^(?:.*direction\s+RL[^\n]*)/,/^(?:.*direction\s+LR[^\n]*)/,/^(?:((?:(?!\}%%)[^:.])*))/,/^(?::)/,/^(?:\}%%)/,/^(?:((?:(?!\}%%).|\n)*))/,/^(?:%%(?!\{)*[^\n]*(\r?\n?)+)/,/^(?:%%[^\n]*(\r?\n)*)/,/^(?:accTitle\s*:\s*)/,/^(?:(?!\n||)*[^\n]*)/,/^(?:accDescr\s*:\s*)/,/^(?:(?!\n||)*[^\n]*)/,/^(?:accDescr\s*\{\s*)/,/^(?:[\}])/,/^(?:[^\}]*)/,/^(?:\s*(\r?\n)+)/,/^(?:\s+)/,/^(?:classDiagram-v2\b)/,/^(?:classDiagram\b)/,/^(?:[{])/,/^(?:$)/,/^(?:[{])/,/^(?:[}])/,/^(?:[\n])/,/^(?:[^{}\n]*)/,/^(?:class\b)/,/^(?:cssClass\b)/,/^(?:callback\b)/,/^(?:link\b)/,/^(?:click\b)/,/^(?:<<)/,/^(?:>>)/,/^(?:[~])/,/^(?:[~])/,/^(?:[^~]*)/,/^(?:["])/,/^(?:["])/,/^(?:[^"]*)/,/^(?:[`])/,/^(?:[`])/,/^(?:[^`]+)/,/^(?:href[\s]+["])/,/^(?:["])/,/^(?:[^"]*)/,/^(?:call[\s]+)/,/^(?:\([\s]*\))/,/^(?:\()/,/^(?:[^(]*)/,/^(?:\))/,/^(?:[^)]*)/,/^(?:_self\b)/,/^(?:_blank\b)/,/^(?:_parent\b)/,/^(?:_top\b)/,/^(?:\s*<\|)/,/^(?:\s*\|>)/,/^(?:\s*>)/,/^(?:\s*<)/,/^(?:\s*\*)/,/^(?:\s*o\b)/,/^(?:--)/,/^(?:\.\.)/,/^(?::{1}[^:\n;]+)/,/^(?::{3})/,/^(?:-)/,/^(?:\.)/,/^(?:\+)/,/^(?:%)/,/^(?:=)/,/^(?:=)/,/^(?:\w+)/,/^(?:[!"#$%&'*+,-.`?\\/])/,/^(?:[0-9]+)/,/^(?:[\u00AA\u00B5\u00BA\u00C0-\u00D6\u00D8-\u00F6]|[\u00F8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0370-\u0374\u0376\u0377]|[\u037A-\u037D\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5]|[\u03F7-\u0481\u048A-\u0527\u0531-\u0556\u0559\u0561-\u0587\u05D0-\u05EA]|[\u05F0-\u05F2\u0620-\u064A\u066E\u066F\u0671-\u06D3\u06D5\u06E5\u06E6\u06EE]|[\u06EF\u06FA-\u06FC\u06FF\u0710\u0712-\u072F\u074D-\u07A5\u07B1\u07CA-\u07EA]|[\u07F4\u07F5\u07FA\u0800-\u0815\u081A\u0824\u0828\u0840-\u0858\u08A0]|[\u08A2-\u08AC\u0904-\u0939\u093D\u0950\u0958-\u0961\u0971-\u0977]|[\u0979-\u097F\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2]|[\u09B6-\u09B9\u09BD\u09CE\u09DC\u09DD\u09DF-\u09E1\u09F0\u09F1\u0A05-\u0A0A]|[\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39]|[\u0A59-\u0A5C\u0A5E\u0A72-\u0A74\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8]|[\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABD\u0AD0\u0AE0\u0AE1\u0B05-\u0B0C]|[\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3D\u0B5C]|[\u0B5D\u0B5F-\u0B61\u0B71\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99]|[\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BD0]|[\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C33\u0C35-\u0C39\u0C3D]|[\u0C58\u0C59\u0C60\u0C61\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3]|[\u0CB5-\u0CB9\u0CBD\u0CDE\u0CE0\u0CE1\u0CF1\u0CF2\u0D05-\u0D0C\u0D0E-\u0D10]|[\u0D12-\u0D3A\u0D3D\u0D4E\u0D60\u0D61\u0D7A-\u0D7F\u0D85-\u0D96\u0D9A-\u0DB1]|[\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0E01-\u0E30\u0E32\u0E33\u0E40-\u0E46\u0E81]|[\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3]|[\u0EA5\u0EA7\u0EAA\u0EAB\u0EAD-\u0EB0\u0EB2\u0EB3\u0EBD\u0EC0-\u0EC4\u0EC6]|[\u0EDC-\u0EDF\u0F00\u0F40-\u0F47\u0F49-\u0F6C\u0F88-\u0F8C\u1000-\u102A]|[\u103F\u1050-\u1055\u105A-\u105D\u1061\u1065\u1066\u106E-\u1070\u1075-\u1081]|[\u108E\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D]|[\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0]|[\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310]|[\u1312-\u1315\u1318-\u135A\u1380-\u138F\u13A0-\u13F4\u1401-\u166C]|[\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u1700-\u170C\u170E-\u1711]|[\u1720-\u1731\u1740-\u1751\u1760-\u176C\u176E-\u1770\u1780-\u17B3\u17D7]|[\u17DC\u1820-\u1877\u1880-\u18A8\u18AA\u18B0-\u18F5\u1900-\u191C]|[\u1950-\u196D\u1970-\u1974\u1980-\u19AB\u19C1-\u19C7\u1A00-\u1A16]|[\u1A20-\u1A54\u1AA7\u1B05-\u1B33\u1B45-\u1B4B\u1B83-\u1BA0\u1BAE\u1BAF]|[\u1BBA-\u1BE5\u1C00-\u1C23\u1C4D-\u1C4F\u1C5A-\u1C7D\u1CE9-\u1CEC]|[\u1CEE-\u1CF1\u1CF5\u1CF6\u1D00-\u1DBF\u1E00-\u1F15\u1F18-\u1F1D]|[\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D]|[\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3]|[\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u2071\u207F]|[\u2090-\u209C\u2102\u2107\u210A-\u2113\u2115\u2119-\u211D\u2124\u2126\u2128]|[\u212A-\u212D\u212F-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2183\u2184]|[\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2CE4\u2CEB-\u2CEE\u2CF2\u2CF3]|[\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D80-\u2D96\u2DA0-\u2DA6]|[\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE]|[\u2DD0-\u2DD6\u2DD8-\u2DDE\u2E2F\u3005\u3006\u3031-\u3035\u303B\u303C]|[\u3041-\u3096\u309D-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D]|[\u3131-\u318E\u31A0-\u31BA\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FCC]|[\uA000-\uA48C\uA4D0-\uA4FD\uA500-\uA60C\uA610-\uA61F\uA62A\uA62B]|[\uA640-\uA66E\uA67F-\uA697\uA6A0-\uA6E5\uA717-\uA71F\uA722-\uA788]|[\uA78B-\uA78E\uA790-\uA793\uA7A0-\uA7AA\uA7F8-\uA801\uA803-\uA805]|[\uA807-\uA80A\uA80C-\uA822\uA840-\uA873\uA882-\uA8B3\uA8F2-\uA8F7\uA8FB]|[\uA90A-\uA925\uA930-\uA946\uA960-\uA97C\uA984-\uA9B2\uA9CF\uAA00-\uAA28]|[\uAA40-\uAA42\uAA44-\uAA4B\uAA60-\uAA76\uAA7A\uAA80-\uAAAF\uAAB1\uAAB5]|[\uAAB6\uAAB9-\uAABD\uAAC0\uAAC2\uAADB-\uAADD\uAAE0-\uAAEA\uAAF2-\uAAF4]|[\uAB01-\uAB06\uAB09-\uAB0E\uAB11-\uAB16\uAB20-\uAB26\uAB28-\uAB2E]|[\uABC0-\uABE2\uAC00-\uD7A3\uD7B0-\uD7C6\uD7CB-\uD7FB\uF900-\uFA6D]|[\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D\uFB1F-\uFB28\uFB2A-\uFB36]|[\uFB38-\uFB3C\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D]|[\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDFB\uFE70-\uFE74\uFE76-\uFEFC]|[\uFF21-\uFF3A\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF]|[\uFFD2-\uFFD7\uFFDA-\uFFDC])/,/^(?:\s)/,/^(?:$)/],conditions:{acc_descr_multiline:{rules:[16,17],inclusive:!1},acc_descr:{rules:[14],inclusive:!1},acc_title:{rules:[12],inclusive:!1},arg_directive:{rules:[7,8],inclusive:!1},type_directive:{rules:[6,7],inclusive:!1},open_directive:{rules:[5],inclusive:!1},callback_args:{rules:[51,52],inclusive:!1},callback_name:{rules:[48,49,50],inclusive:!1},href:{rules:[45,46],inclusive:!1},struct:{rules:[23,24,25,26,27],inclusive:!1},generic:{rules:[36,37],inclusive:!1},bqstring:{rules:[42,43],inclusive:!1},string:{rules:[39,40],inclusive:!1},INITIAL:{rules:[0,1,2,3,4,9,10,11,13,15,18,19,20,21,22,28,29,30,31,32,33,34,35,38,41,44,47,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78],inclusive:!0}}};function z(){this.yy={}}return j.lexer=U,z.prototype=j,j.Parser=z,new z}();e.parser=r,e.Parser=r.Parser,e.parse=function(){return r.parse.apply(r,arguments)},e.main=function(t){t[1]||(console.log("Usage: "+t[0]+" FILE"),process.exit(1));var r=n(8218).readFileSync(n(6470).normalize(t[1]),"utf8");return e.parser.parse(r)},n.c[n.s]===t&&e.main(process.argv.slice(1))},5890:(t,e,n)=>{t=n.nmd(t);var r=function(){var t=function(t,e,n,r){for(n=n||{},r=t.length;r--;n[t[r]]=e);return n},e=[1,2],n=[1,5],r=[6,9,11,23,25,27,29,30,48],i=[1,17],a=[1,18],o=[1,19],s=[1,20],c=[1,21],u=[1,24],l=[1,29],h=[1,30],f=[1,31],d=[1,32],p=[1,44],g=[30,45,46],y=[4,6,9,11,23,25,27,29,30,48],m=[41,42,43,44],v=[22,36],b=[1,62],_={trace:function(){},yy:{},symbols_:{error:2,start:3,ER_DIAGRAM:4,document:5,EOF:6,directive:7,line:8,SPACE:9,statement:10,NEWLINE:11,openDirective:12,typeDirective:13,closeDirective:14,":":15,argDirective:16,entityName:17,relSpec:18,role:19,BLOCK_START:20,attributes:21,BLOCK_STOP:22,title:23,title_value:24,acc_title:25,acc_title_value:26,acc_descr:27,acc_descr_value:28,acc_descr_multiline_value:29,ALPHANUM:30,attribute:31,attributeType:32,attributeName:33,attributeKeyType:34,attributeComment:35,ATTRIBUTE_WORD:36,ATTRIBUTE_KEY:37,COMMENT:38,cardinality:39,relType:40,ZERO_OR_ONE:41,ZERO_OR_MORE:42,ONE_OR_MORE:43,ONLY_ONE:44,NON_IDENTIFYING:45,IDENTIFYING:46,WORD:47,open_directive:48,type_directive:49,arg_directive:50,close_directive:51,$accept:0,$end:1},terminals_:{2:"error",4:"ER_DIAGRAM",6:"EOF",9:"SPACE",11:"NEWLINE",15:":",20:"BLOCK_START",22:"BLOCK_STOP",23:"title",24:"title_value",25:"acc_title",26:"acc_title_value",27:"acc_descr",28:"acc_descr_value",29:"acc_descr_multiline_value",30:"ALPHANUM",36:"ATTRIBUTE_WORD",37:"ATTRIBUTE_KEY",38:"COMMENT",41:"ZERO_OR_ONE",42:"ZERO_OR_MORE",43:"ONE_OR_MORE",44:"ONLY_ONE",45:"NON_IDENTIFYING",46:"IDENTIFYING",47:"WORD",48:"open_directive",49:"type_directive",50:"arg_directive",51:"close_directive"},productions_:[0,[3,3],[3,2],[5,0],[5,2],[8,2],[8,1],[8,1],[8,1],[7,4],[7,6],[10,1],[10,5],[10,4],[10,3],[10,1],[10,2],[10,2],[10,2],[10,1],[17,1],[21,1],[21,2],[31,2],[31,3],[31,3],[31,4],[32,1],[33,1],[34,1],[35,1],[18,3],[39,1],[39,1],[39,1],[39,1],[40,1],[40,1],[19,1],[19,1],[12,1],[13,1],[16,1],[14,1]],performAction:function(t,e,n,r,i,a,o){var s=a.length-1;switch(i){case 1:break;case 3:case 7:case 8:this.$=[];break;case 4:a[s-1].push(a[s]),this.$=a[s-1];break;case 5:case 6:case 20:case 27:case 28:case 29:case 39:this.$=a[s];break;case 12:r.addEntity(a[s-4]),r.addEntity(a[s-2]),r.addRelationship(a[s-4],a[s],a[s-2],a[s-3]);break;case 13:r.addEntity(a[s-3]),r.addAttributes(a[s-3],a[s-1]);break;case 14:r.addEntity(a[s-2]);break;case 15:r.addEntity(a[s]);break;case 16:case 17:this.$=a[s].trim(),r.setTitle(this.$);break;case 18:case 19:this.$=a[s].trim(),r.setAccDescription(this.$);break;case 21:this.$=[a[s]];break;case 22:a[s].push(a[s-1]),this.$=a[s];break;case 23:this.$={attributeType:a[s-1],attributeName:a[s]};break;case 24:this.$={attributeType:a[s-2],attributeName:a[s-1],attributeKeyType:a[s]};break;case 25:this.$={attributeType:a[s-2],attributeName:a[s-1],attributeComment:a[s]};break;case 26:this.$={attributeType:a[s-3],attributeName:a[s-2],attributeKeyType:a[s-1],attributeComment:a[s]};break;case 30:case 38:this.$=a[s].replace(/"/g,"");break;case 31:this.$={cardA:a[s],relType:a[s-1],cardB:a[s-2]};break;case 32:this.$=r.Cardinality.ZERO_OR_ONE;break;case 33:this.$=r.Cardinality.ZERO_OR_MORE;break;case 34:this.$=r.Cardinality.ONE_OR_MORE;break;case 35:this.$=r.Cardinality.ONLY_ONE;break;case 36:this.$=r.Identification.NON_IDENTIFYING;break;case 37:this.$=r.Identification.IDENTIFYING;break;case 40:r.parseDirective("%%{","open_directive");break;case 41:r.parseDirective(a[s],"type_directive");break;case 42:a[s]=a[s].trim().replace(/'/g,'"'),r.parseDirective(a[s],"arg_directive");break;case 43:r.parseDirective("}%%","close_directive","er")}},table:[{3:1,4:e,7:3,12:4,48:n},{1:[3]},t(r,[2,3],{5:6}),{3:7,4:e,7:3,12:4,48:n},{13:8,49:[1,9]},{49:[2,40]},{6:[1,10],7:15,8:11,9:[1,12],10:13,11:[1,14],12:4,17:16,23:i,25:a,27:o,29:s,30:c,48:n},{1:[2,2]},{14:22,15:[1,23],51:u},t([15,51],[2,41]),t(r,[2,8],{1:[2,1]}),t(r,[2,4]),{7:15,10:25,12:4,17:16,23:i,25:a,27:o,29:s,30:c,48:n},t(r,[2,6]),t(r,[2,7]),t(r,[2,11]),t(r,[2,15],{18:26,39:28,20:[1,27],41:l,42:h,43:f,44:d}),{24:[1,33]},{26:[1,34]},{28:[1,35]},t(r,[2,19]),t([6,9,11,15,20,23,25,27,29,30,41,42,43,44,48],[2,20]),{11:[1,36]},{16:37,50:[1,38]},{11:[2,43]},t(r,[2,5]),{17:39,30:c},{21:40,22:[1,41],31:42,32:43,36:p},{40:45,45:[1,46],46:[1,47]},t(g,[2,32]),t(g,[2,33]),t(g,[2,34]),t(g,[2,35]),t(r,[2,16]),t(r,[2,17]),t(r,[2,18]),t(y,[2,9]),{14:48,51:u},{51:[2,42]},{15:[1,49]},{22:[1,50]},t(r,[2,14]),{21:51,22:[2,21],31:42,32:43,36:p},{33:52,36:[1,53]},{36:[2,27]},{39:54,41:l,42:h,43:f,44:d},t(m,[2,36]),t(m,[2,37]),{11:[1,55]},{19:56,30:[1,58],47:[1,57]},t(r,[2,13]),{22:[2,22]},t(v,[2,23],{34:59,35:60,37:[1,61],38:b}),t([22,36,37,38],[2,28]),{30:[2,31]},t(y,[2,10]),t(r,[2,12]),t(r,[2,38]),t(r,[2,39]),t(v,[2,24],{35:63,38:b}),t(v,[2,25]),t([22,36,38],[2,29]),t(v,[2,30]),t(v,[2,26])],defaultActions:{5:[2,40],7:[2,2],24:[2,43],38:[2,42],44:[2,27],51:[2,22],54:[2,31]},parseError:function(t,e){if(!e.recoverable){var n=new Error(t);throw n.hash=e,n}this.trace(t)},parse:function(t){var e=this,n=[0],r=[],i=[null],a=[],o=this.table,s="",c=0,u=0,l=0,h=2,f=1,d=a.slice.call(arguments,1),p=Object.create(this.lexer),g={yy:{}};for(var y in this.yy)Object.prototype.hasOwnProperty.call(this.yy,y)&&(g.yy[y]=this.yy[y]);p.setInput(t,g.yy),g.yy.lexer=p,g.yy.parser=this,void 0===p.yylloc&&(p.yylloc={});var m=p.yylloc;a.push(m);var v=p.options&&p.options.ranges;function b(){var t;return"number"!=typeof(t=r.pop()||p.lex()||f)&&(t instanceof Array&&(t=(r=t).pop()),t=e.symbols_[t]||t),t}"function"==typeof g.yy.parseError?this.parseError=g.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;for(var _,x,w,k,T,C,E,S,A,M={};;){if(w=n[n.length-1],this.defaultActions[w]?k=this.defaultActions[w]:(null==_&&(_=b()),k=o[w]&&o[w][_]),void 0===k||!k.length||!k[0]){var N="";for(C in A=[],o[w])this.terminals_[C]&&C>h&&A.push("'"+this.terminals_[C]+"'");N=p.showPosition?"Parse error on line "+(c+1)+":\n"+p.showPosition()+"\nExpecting "+A.join(", ")+", got '"+(this.terminals_[_]||_)+"'":"Parse error on line "+(c+1)+": Unexpected "+(_==f?"end of input":"'"+(this.terminals_[_]||_)+"'"),this.parseError(N,{text:p.match,token:this.terminals_[_]||_,line:p.yylineno,loc:m,expected:A})}if(k[0]instanceof Array&&k.length>1)throw new Error("Parse Error: multiple actions possible at state: "+w+", token: "+_);switch(k[0]){case 1:n.push(_),i.push(p.yytext),a.push(p.yylloc),n.push(k[1]),_=null,x?(_=x,x=null):(u=p.yyleng,s=p.yytext,c=p.yylineno,m=p.yylloc,l>0&&l--);break;case 2:if(E=this.productions_[k[1]][1],M.$=i[i.length-E],M._$={first_line:a[a.length-(E||1)].first_line,last_line:a[a.length-1].last_line,first_column:a[a.length-(E||1)].first_column,last_column:a[a.length-1].last_column},v&&(M._$.range=[a[a.length-(E||1)].range[0],a[a.length-1].range[1]]),void 0!==(T=this.performAction.apply(M,[s,u,c,g.yy,k[1],i,a].concat(d))))return T;E&&(n=n.slice(0,-1*E*2),i=i.slice(0,-1*E),a=a.slice(0,-1*E)),n.push(this.productions_[k[1]][0]),i.push(M.$),a.push(M._$),S=o[n[n.length-2]][n[n.length-1]],n.push(S);break;case 3:return!0}}return!0}},x={EOF:1,parseError:function(t,e){if(!this.yy.parser)throw new Error(t);this.yy.parser.parseError(t,e)},setInput:function(t,e){return this.yy=e||this.yy||{},this._input=t,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},input:function(){var t=this._input[0];return this.yytext+=t,this.yyleng++,this.offset++,this.match+=t,this.matched+=t,t.match(/(?:\r\n?|\n).*/g)?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),t},unput:function(t){var e=t.length,n=t.split(/(?:\r\n?|\n)/g);this._input=t+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-e),this.offset-=e;var r=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),n.length-1&&(this.yylineno-=n.length-1);var i=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:n?(n.length===r.length?this.yylloc.first_column:0)+r[r.length-n.length].length-n[0].length:this.yylloc.first_column-e},this.options.ranges&&(this.yylloc.range=[i[0],i[0]+this.yyleng-e]),this.yyleng=this.yytext.length,this},more:function(){return this._more=!0,this},reject:function(){return this.options.backtrack_lexer?(this._backtrack=!0,this):this.parseError("Lexical error on line "+(this.yylineno+1)+". You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n"+this.showPosition(),{text:"",token:null,line:this.yylineno})},less:function(t){this.unput(this.match.slice(t))},pastInput:function(){var t=this.matched.substr(0,this.matched.length-this.match.length);return(t.length>20?"...":"")+t.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var t=this.match;return t.length<20&&(t+=this._input.substr(0,20-t.length)),(t.substr(0,20)+(t.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var t=this.pastInput(),e=new Array(t.length+1).join("-");return t+this.upcomingInput()+"\n"+e+"^"},test_match:function(t,e){var n,r,i;if(this.options.backtrack_lexer&&(i={yylineno:this.yylineno,yylloc:{first_line:this.yylloc.first_line,last_line:this.last_line,first_column:this.yylloc.first_column,last_column:this.yylloc.last_column},yytext:this.yytext,match:this.match,matches:this.matches,matched:this.matched,yyleng:this.yyleng,offset:this.offset,_more:this._more,_input:this._input,yy:this.yy,conditionStack:this.conditionStack.slice(0),done:this.done},this.options.ranges&&(i.yylloc.range=this.yylloc.range.slice(0))),(r=t[0].match(/(?:\r\n?|\n).*/g))&&(this.yylineno+=r.length),this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:r?r[r.length-1].length-r[r.length-1].match(/\r?\n?/)[0].length:this.yylloc.last_column+t[0].length},this.yytext+=t[0],this.match+=t[0],this.matches=t,this.yyleng=this.yytext.length,this.options.ranges&&(this.yylloc.range=[this.offset,this.offset+=this.yyleng]),this._more=!1,this._backtrack=!1,this._input=this._input.slice(t[0].length),this.matched+=t[0],n=this.performAction.call(this,this.yy,this,e,this.conditionStack[this.conditionStack.length-1]),this.done&&this._input&&(this.done=!1),n)return n;if(this._backtrack){for(var a in i)this[a]=i[a];return!1}return!1},next:function(){if(this.done)return this.EOF;var t,e,n,r;this._input||(this.done=!0),this._more||(this.yytext="",this.match="");for(var i=this._currentRules(),a=0;ae[0].length)){if(e=n,r=a,this.options.backtrack_lexer){if(!1!==(t=this.test_match(n,i[a])))return t;if(this._backtrack){e=!1;continue}return!1}if(!this.options.flex)break}return e?!1!==(t=this.test_match(e,i[r]))&&t:""===this._input?this.EOF:this.parseError("Lexical error on line "+(this.yylineno+1)+". Unrecognized text.\n"+this.showPosition(),{text:"",token:null,line:this.yylineno})},lex:function(){return this.next()||this.lex()},begin:function(t){this.conditionStack.push(t)},popState:function(){return this.conditionStack.length-1>0?this.conditionStack.pop():this.conditionStack[0]},_currentRules:function(){return this.conditionStack.length&&this.conditionStack[this.conditionStack.length-1]?this.conditions[this.conditionStack[this.conditionStack.length-1]].rules:this.conditions.INITIAL.rules},topState:function(t){return(t=this.conditionStack.length-1-Math.abs(t||0))>=0?this.conditionStack[t]:"INITIAL"},pushState:function(t){this.begin(t)},stateStackSize:function(){return this.conditionStack.length},options:{"case-insensitive":!0},performAction:function(t,e,n,r){switch(n){case 0:return this.begin("acc_title"),25;case 1:return this.popState(),"acc_title_value";case 2:return this.begin("acc_descr"),27;case 3:return this.popState(),"acc_descr_value";case 4:this.begin("acc_descr_multiline");break;case 5:this.popState();break;case 6:return"acc_descr_multiline_value";case 7:return this.begin("open_directive"),48;case 8:return this.begin("type_directive"),49;case 9:return this.popState(),this.begin("arg_directive"),15;case 10:return this.popState(),this.popState(),51;case 11:return 50;case 12:case 13:case 15:case 20:case 24:break;case 14:return 11;case 16:return 9;case 17:return 47;case 18:return 4;case 19:return this.begin("block"),20;case 21:return 37;case 22:return 36;case 23:return 38;case 25:return this.popState(),22;case 26:case 39:return e.yytext[0];case 27:case 31:return 41;case 28:case 32:return 42;case 29:case 33:return 43;case 30:return 44;case 34:case 36:case 37:return 45;case 35:return 46;case 38:return 30;case 40:return 6}},rules:[/^(?:accTitle\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*\{\s*)/i,/^(?:[\}])/i,/^(?:[^\}]*)/i,/^(?:%%\{)/i,/^(?:((?:(?!\}%%)[^:.])*))/i,/^(?::)/i,/^(?:\}%%)/i,/^(?:((?:(?!\}%%).|\n)*))/i,/^(?:%(?!\{)[^\n]*)/i,/^(?:[^\}]%%[^\n]*)/i,/^(?:[\n]+)/i,/^(?:\s+)/i,/^(?:[\s]+)/i,/^(?:"[^"]*")/i,/^(?:erDiagram\b)/i,/^(?:\{)/i,/^(?:\s+)/i,/^(?:(?:PK)|(?:FK))/i,/^(?:[A-Za-z][A-Za-z0-9\-_]*)/i,/^(?:"[^"]*")/i,/^(?:[\n]+)/i,/^(?:\})/i,/^(?:.)/i,/^(?:\|o\b)/i,/^(?:\}o\b)/i,/^(?:\}\|)/i,/^(?:\|\|)/i,/^(?:o\|)/i,/^(?:o\{)/i,/^(?:\|\{)/i,/^(?:\.\.)/i,/^(?:--)/i,/^(?:\.-)/i,/^(?:-\.)/i,/^(?:[A-Za-z][A-Za-z0-9\-_]*)/i,/^(?:.)/i,/^(?:$)/i],conditions:{acc_descr_multiline:{rules:[5,6],inclusive:!1},acc_descr:{rules:[3],inclusive:!1},acc_title:{rules:[1],inclusive:!1},open_directive:{rules:[8],inclusive:!1},type_directive:{rules:[9,10],inclusive:!1},arg_directive:{rules:[10,11],inclusive:!1},block:{rules:[20,21,22,23,24,25,26],inclusive:!1},INITIAL:{rules:[0,2,4,7,12,13,14,15,16,17,18,19,27,28,29,30,31,32,33,34,35,36,37,38,39,40],inclusive:!0}}};function w(){this.yy={}}return _.lexer=x,w.prototype=_,_.Parser=w,new w}();e.parser=r,e.Parser=r.Parser,e.parse=function(){return r.parse.apply(r,arguments)},e.main=function(t){t[1]||(console.log("Usage: "+t[0]+" FILE"),process.exit(1));var r=n(8009).readFileSync(n(6470).normalize(t[1]),"utf8");return e.parser.parse(r)},n.c[n.s]===t&&e.main(process.argv.slice(1))},3602:(t,e,n)=>{t=n.nmd(t);var r=function(){var t=function(t,e,n,r){for(n=n||{},r=t.length;r--;n[t[r]]=e);return n},e=[1,9],n=[1,7],r=[1,6],i=[1,8],a=[1,20,21,22,23,38,44,46,48,52,66,67,86,87,88,89,90,91,95,105,106,109,111,112,118,119,120,121,122,123,124,125,126,127],o=[2,10],s=[1,20],c=[1,21],u=[1,22],l=[1,23],h=[1,30],f=[1,32],d=[1,33],p=[1,34],g=[1,62],y=[1,48],m=[1,52],v=[1,36],b=[1,37],_=[1,38],x=[1,39],w=[1,40],k=[1,56],T=[1,63],C=[1,51],E=[1,53],S=[1,55],A=[1,59],M=[1,60],N=[1,41],D=[1,42],B=[1,43],L=[1,44],O=[1,61],I=[1,50],R=[1,54],F=[1,57],P=[1,58],Y=[1,49],j=[1,66],U=[1,71],z=[1,20,21,22,23,38,42,44,46,48,52,66,67,86,87,88,89,90,91,95,105,106,109,111,112,118,119,120,121,122,123,124,125,126,127],$=[1,75],q=[1,74],H=[1,76],W=[20,21,23,81,82],V=[1,99],G=[1,104],X=[1,107],Z=[1,108],Q=[1,101],K=[1,106],J=[1,109],tt=[1,102],et=[1,114],nt=[1,113],rt=[1,103],it=[1,105],at=[1,110],ot=[1,111],st=[1,112],ct=[1,115],ut=[20,21,22,23,81,82],lt=[20,21,22,23,53,81,82],ht=[20,21,22,23,40,52,53,55,57,59,61,63,65,66,67,69,71,73,74,76,81,82,91,95,105,106,109,111,112,122,123,124,125,126,127],ft=[20,21,23],dt=[20,21,23,52,66,67,81,82,91,95,105,106,109,111,112,122,123,124,125,126,127],pt=[1,12,20,21,22,23,24,38,42,44,46,48,52,66,67,86,87,88,89,90,91,95,105,106,109,111,112,118,119,120,121,122,123,124,125,126,127],gt=[52,66,67,91,95,105,106,109,111,112,122,123,124,125,126,127],yt=[1,149],mt=[1,157],vt=[1,158],bt=[1,159],_t=[1,160],xt=[1,144],wt=[1,145],kt=[1,141],Tt=[1,152],Ct=[1,153],Et=[1,154],St=[1,155],At=[1,156],Mt=[1,161],Nt=[1,162],Dt=[1,147],Bt=[1,150],Lt=[1,146],Ot=[1,143],It=[20,21,22,23,38,42,44,46,48,52,66,67,86,87,88,89,90,91,95,105,106,109,111,112,118,119,120,121,122,123,124,125,126,127],Rt=[1,165],Ft=[20,21,22,23,26,52,66,67,91,105,106,109,111,112,122,123,124,125,126,127],Pt=[20,21,22,23,24,26,38,40,41,42,52,56,58,60,62,64,66,67,68,70,72,73,75,77,81,82,86,87,88,89,90,91,92,95,105,106,109,111,112,113,114,122,123,124,125,126,127],Yt=[12,21,22,24],jt=[22,106],Ut=[1,250],zt=[1,245],$t=[1,246],qt=[1,254],Ht=[1,251],Wt=[1,248],Vt=[1,247],Gt=[1,249],Xt=[1,252],Zt=[1,253],Qt=[1,255],Kt=[1,273],Jt=[20,21,23,106],te=[20,21,22,23,66,67,86,102,105,106,109,110,111,112,113],ee={trace:function(){},yy:{},symbols_:{error:2,start:3,mermaidDoc:4,directive:5,openDirective:6,typeDirective:7,closeDirective:8,separator:9,":":10,argDirective:11,open_directive:12,type_directive:13,arg_directive:14,close_directive:15,graphConfig:16,document:17,line:18,statement:19,SEMI:20,NEWLINE:21,SPACE:22,EOF:23,GRAPH:24,NODIR:25,DIR:26,FirstStmtSeperator:27,ending:28,endToken:29,spaceList:30,spaceListNewline:31,verticeStatement:32,styleStatement:33,linkStyleStatement:34,classDefStatement:35,classStatement:36,clickStatement:37,subgraph:38,text:39,SQS:40,SQE:41,end:42,direction:43,acc_title:44,acc_title_value:45,acc_descr:46,acc_descr_value:47,acc_descr_multiline_value:48,link:49,node:50,vertex:51,AMP:52,STYLE_SEPARATOR:53,idString:54,DOUBLECIRCLESTART:55,DOUBLECIRCLEEND:56,PS:57,PE:58,"(-":59,"-)":60,STADIUMSTART:61,STADIUMEND:62,SUBROUTINESTART:63,SUBROUTINEEND:64,VERTEX_WITH_PROPS_START:65,ALPHA:66,COLON:67,PIPE:68,CYLINDERSTART:69,CYLINDEREND:70,DIAMOND_START:71,DIAMOND_STOP:72,TAGEND:73,TRAPSTART:74,TRAPEND:75,INVTRAPSTART:76,INVTRAPEND:77,linkStatement:78,arrowText:79,TESTSTR:80,START_LINK:81,LINK:82,textToken:83,STR:84,keywords:85,STYLE:86,LINKSTYLE:87,CLASSDEF:88,CLASS:89,CLICK:90,DOWN:91,UP:92,textNoTags:93,textNoTagsToken:94,DEFAULT:95,stylesOpt:96,alphaNum:97,CALLBACKNAME:98,CALLBACKARGS:99,HREF:100,LINK_TARGET:101,HEX:102,numList:103,INTERPOLATE:104,NUM:105,COMMA:106,style:107,styleComponent:108,MINUS:109,UNIT:110,BRKT:111,DOT:112,PCT:113,TAGSTART:114,alphaNumToken:115,idStringToken:116,alphaNumStatement:117,direction_tb:118,direction_bt:119,direction_rl:120,direction_lr:121,PUNCTUATION:122,UNICODE_TEXT:123,PLUS:124,EQUALS:125,MULT:126,UNDERSCORE:127,graphCodeTokens:128,ARROW_CROSS:129,ARROW_POINT:130,ARROW_CIRCLE:131,ARROW_OPEN:132,QUOTE:133,$accept:0,$end:1},terminals_:{2:"error",10:":",12:"open_directive",13:"type_directive",14:"arg_directive",15:"close_directive",20:"SEMI",21:"NEWLINE",22:"SPACE",23:"EOF",24:"GRAPH",25:"NODIR",26:"DIR",38:"subgraph",40:"SQS",41:"SQE",42:"end",44:"acc_title",45:"acc_title_value",46:"acc_descr",47:"acc_descr_value",48:"acc_descr_multiline_value",52:"AMP",53:"STYLE_SEPARATOR",55:"DOUBLECIRCLESTART",56:"DOUBLECIRCLEEND",57:"PS",58:"PE",59:"(-",60:"-)",61:"STADIUMSTART",62:"STADIUMEND",63:"SUBROUTINESTART",64:"SUBROUTINEEND",65:"VERTEX_WITH_PROPS_START",66:"ALPHA",67:"COLON",68:"PIPE",69:"CYLINDERSTART",70:"CYLINDEREND",71:"DIAMOND_START",72:"DIAMOND_STOP",73:"TAGEND",74:"TRAPSTART",75:"TRAPEND",76:"INVTRAPSTART",77:"INVTRAPEND",80:"TESTSTR",81:"START_LINK",82:"LINK",84:"STR",86:"STYLE",87:"LINKSTYLE",88:"CLASSDEF",89:"CLASS",90:"CLICK",91:"DOWN",92:"UP",95:"DEFAULT",98:"CALLBACKNAME",99:"CALLBACKARGS",100:"HREF",101:"LINK_TARGET",102:"HEX",104:"INTERPOLATE",105:"NUM",106:"COMMA",109:"MINUS",110:"UNIT",111:"BRKT",112:"DOT",113:"PCT",114:"TAGSTART",118:"direction_tb",119:"direction_bt",120:"direction_rl",121:"direction_lr",122:"PUNCTUATION",123:"UNICODE_TEXT",124:"PLUS",125:"EQUALS",126:"MULT",127:"UNDERSCORE",129:"ARROW_CROSS",130:"ARROW_POINT",131:"ARROW_CIRCLE",132:"ARROW_OPEN",133:"QUOTE"},productions_:[0,[3,1],[3,2],[5,4],[5,6],[6,1],[7,1],[11,1],[8,1],[4,2],[17,0],[17,2],[18,1],[18,1],[18,1],[18,1],[18,1],[16,2],[16,2],[16,2],[16,3],[28,2],[28,1],[29,1],[29,1],[29,1],[27,1],[27,1],[27,2],[31,2],[31,2],[31,1],[31,1],[30,2],[30,1],[19,2],[19,2],[19,2],[19,2],[19,2],[19,2],[19,9],[19,6],[19,4],[19,1],[19,2],[19,2],[19,1],[9,1],[9,1],[9,1],[32,3],[32,4],[32,2],[32,1],[50,1],[50,5],[50,3],[51,4],[51,4],[51,6],[51,4],[51,4],[51,4],[51,8],[51,4],[51,4],[51,4],[51,6],[51,4],[51,4],[51,4],[51,4],[51,4],[51,1],[49,2],[49,3],[49,3],[49,1],[49,3],[78,1],[79,3],[39,1],[39,2],[39,1],[85,1],[85,1],[85,1],[85,1],[85,1],[85,1],[85,1],[85,1],[85,1],[85,1],[85,1],[93,1],[93,2],[35,5],[35,5],[36,5],[37,2],[37,4],[37,3],[37,5],[37,2],[37,4],[37,4],[37,6],[37,2],[37,4],[37,2],[37,4],[37,4],[37,6],[33,5],[33,5],[34,5],[34,5],[34,9],[34,9],[34,7],[34,7],[103,1],[103,3],[96,1],[96,3],[107,1],[107,2],[108,1],[108,1],[108,1],[108,1],[108,1],[108,1],[108,1],[108,1],[108,1],[108,1],[108,1],[83,1],[83,1],[83,1],[83,1],[83,1],[83,1],[94,1],[94,1],[94,1],[94,1],[54,1],[54,2],[97,1],[97,2],[117,1],[117,1],[117,1],[117,1],[43,1],[43,1],[43,1],[43,1],[115,1],[115,1],[115,1],[115,1],[115,1],[115,1],[115,1],[115,1],[115,1],[115,1],[115,1],[115,1],[115,1],[116,1],[116,1],[116,1],[116,1],[116,1],[116,1],[116,1],[116,1],[116,1],[116,1],[116,1],[116,1],[116,1],[116,1],[116,1],[116,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1]],performAction:function(t,e,n,r,i,a,o){var s=a.length-1;switch(i){case 5:r.parseDirective("%%{","open_directive");break;case 6:r.parseDirective(a[s],"type_directive");break;case 7:a[s]=a[s].trim().replace(/'/g,'"'),r.parseDirective(a[s],"arg_directive");break;case 8:r.parseDirective("}%%","close_directive","flowchart");break;case 10:case 36:case 37:case 38:case 39:case 40:this.$=[];break;case 11:a[s]!==[]&&a[s-1].push(a[s]),this.$=a[s-1];break;case 12:case 82:case 84:case 96:case 152:case 154:case 155:case 78:case 150:this.$=a[s];break;case 19:r.setDirection("TB"),this.$="TB";break;case 20:r.setDirection(a[s-1]),this.$=a[s-1];break;case 35:this.$=a[s-1].nodes;break;case 41:this.$=r.addSubGraph(a[s-6],a[s-1],a[s-4]);break;case 42:this.$=r.addSubGraph(a[s-3],a[s-1],a[s-3]);break;case 43:this.$=r.addSubGraph(void 0,a[s-1],void 0);break;case 45:this.$=a[s].trim(),r.setTitle(this.$);break;case 46:case 47:this.$=a[s].trim(),r.setAccDescription(this.$);break;case 51:r.addLink(a[s-2].stmt,a[s],a[s-1]),this.$={stmt:a[s],nodes:a[s].concat(a[s-2].nodes)};break;case 52:r.addLink(a[s-3].stmt,a[s-1],a[s-2]),this.$={stmt:a[s-1],nodes:a[s-1].concat(a[s-3].nodes)};break;case 53:this.$={stmt:a[s-1],nodes:a[s-1]};break;case 54:this.$={stmt:a[s],nodes:a[s]};break;case 55:case 123:case 125:this.$=[a[s]];break;case 56:this.$=a[s-4].concat(a[s]);break;case 57:this.$=[a[s-2]],r.setClass(a[s-2],a[s]);break;case 58:this.$=a[s-3],r.addVertex(a[s-3],a[s-1],"square");break;case 59:this.$=a[s-3],r.addVertex(a[s-3],a[s-1],"doublecircle");break;case 60:this.$=a[s-5],r.addVertex(a[s-5],a[s-2],"circle");break;case 61:this.$=a[s-3],r.addVertex(a[s-3],a[s-1],"ellipse");break;case 62:this.$=a[s-3],r.addVertex(a[s-3],a[s-1],"stadium");break;case 63:this.$=a[s-3],r.addVertex(a[s-3],a[s-1],"subroutine");break;case 64:this.$=a[s-7],r.addVertex(a[s-7],a[s-1],"rect",void 0,void 0,void 0,Object.fromEntries([[a[s-5],a[s-3]]]));break;case 65:this.$=a[s-3],r.addVertex(a[s-3],a[s-1],"cylinder");break;case 66:this.$=a[s-3],r.addVertex(a[s-3],a[s-1],"round");break;case 67:this.$=a[s-3],r.addVertex(a[s-3],a[s-1],"diamond");break;case 68:this.$=a[s-5],r.addVertex(a[s-5],a[s-2],"hexagon");break;case 69:this.$=a[s-3],r.addVertex(a[s-3],a[s-1],"odd");break;case 70:this.$=a[s-3],r.addVertex(a[s-3],a[s-1],"trapezoid");break;case 71:this.$=a[s-3],r.addVertex(a[s-3],a[s-1],"inv_trapezoid");break;case 72:this.$=a[s-3],r.addVertex(a[s-3],a[s-1],"lean_right");break;case 73:this.$=a[s-3],r.addVertex(a[s-3],a[s-1],"lean_left");break;case 74:this.$=a[s],r.addVertex(a[s]);break;case 75:a[s-1].text=a[s],this.$=a[s-1];break;case 76:case 77:a[s-2].text=a[s-1],this.$=a[s-2];break;case 79:var c=r.destructLink(a[s],a[s-2]);this.$={type:c.type,stroke:c.stroke,length:c.length,text:a[s-1]};break;case 80:c=r.destructLink(a[s]),this.$={type:c.type,stroke:c.stroke,length:c.length};break;case 81:this.$=a[s-1];break;case 83:case 97:case 153:case 151:this.$=a[s-1]+""+a[s];break;case 98:case 99:this.$=a[s-4],r.addClass(a[s-2],a[s]);break;case 100:this.$=a[s-4],r.setClass(a[s-2],a[s]);break;case 101:case 109:this.$=a[s-1],r.setClickEvent(a[s-1],a[s]);break;case 102:case 110:this.$=a[s-3],r.setClickEvent(a[s-3],a[s-2]),r.setTooltip(a[s-3],a[s]);break;case 103:this.$=a[s-2],r.setClickEvent(a[s-2],a[s-1],a[s]);break;case 104:this.$=a[s-4],r.setClickEvent(a[s-4],a[s-3],a[s-2]),r.setTooltip(a[s-4],a[s]);break;case 105:case 111:this.$=a[s-1],r.setLink(a[s-1],a[s]);break;case 106:case 112:this.$=a[s-3],r.setLink(a[s-3],a[s-2]),r.setTooltip(a[s-3],a[s]);break;case 107:case 113:this.$=a[s-3],r.setLink(a[s-3],a[s-2],a[s]);break;case 108:case 114:this.$=a[s-5],r.setLink(a[s-5],a[s-4],a[s]),r.setTooltip(a[s-5],a[s-2]);break;case 115:this.$=a[s-4],r.addVertex(a[s-2],void 0,void 0,a[s]);break;case 116:case 118:this.$=a[s-4],r.updateLink(a[s-2],a[s]);break;case 117:this.$=a[s-4],r.updateLink([a[s-2]],a[s]);break;case 119:this.$=a[s-8],r.updateLinkInterpolate([a[s-6]],a[s-2]),r.updateLink([a[s-6]],a[s]);break;case 120:this.$=a[s-8],r.updateLinkInterpolate(a[s-6],a[s-2]),r.updateLink(a[s-6],a[s]);break;case 121:this.$=a[s-6],r.updateLinkInterpolate([a[s-4]],a[s]);break;case 122:this.$=a[s-6],r.updateLinkInterpolate(a[s-4],a[s]);break;case 124:case 126:a[s-2].push(a[s]),this.$=a[s-2];break;case 128:this.$=a[s-1]+a[s];break;case 156:this.$="v";break;case 157:this.$="-";break;case 158:this.$={stmt:"dir",value:"TB"};break;case 159:this.$={stmt:"dir",value:"BT"};break;case 160:this.$={stmt:"dir",value:"RL"};break;case 161:this.$={stmt:"dir",value:"LR"}}},table:[{3:1,4:2,5:3,6:5,12:e,16:4,21:n,22:r,24:i},{1:[3]},{1:[2,1]},{3:10,4:2,5:3,6:5,12:e,16:4,21:n,22:r,24:i},t(a,o,{17:11}),{7:12,13:[1,13]},{16:14,21:n,22:r,24:i},{16:15,21:n,22:r,24:i},{25:[1,16],26:[1,17]},{13:[2,5]},{1:[2,2]},{1:[2,9],18:18,19:19,20:s,21:c,22:u,23:l,32:24,33:25,34:26,35:27,36:28,37:29,38:h,43:31,44:f,46:d,48:p,50:35,51:45,52:g,54:46,66:y,67:m,86:v,87:b,88:_,89:x,90:w,91:k,95:T,105:C,106:E,109:S,111:A,112:M,116:47,118:N,119:D,120:B,121:L,122:O,123:I,124:R,125:F,126:P,127:Y},{8:64,10:[1,65],15:j},t([10,15],[2,6]),t(a,[2,17]),t(a,[2,18]),t(a,[2,19]),{20:[1,68],21:[1,69],22:U,27:67,30:70},t(z,[2,11]),t(z,[2,12]),t(z,[2,13]),t(z,[2,14]),t(z,[2,15]),t(z,[2,16]),{9:72,20:$,21:q,23:H,49:73,78:77,81:[1,78],82:[1,79]},{9:80,20:$,21:q,23:H},{9:81,20:$,21:q,23:H},{9:82,20:$,21:q,23:H},{9:83,20:$,21:q,23:H},{9:84,20:$,21:q,23:H},{9:86,20:$,21:q,22:[1,85],23:H},t(z,[2,44]),{45:[1,87]},{47:[1,88]},t(z,[2,47]),t(W,[2,54],{30:89,22:U}),{22:[1,90]},{22:[1,91]},{22:[1,92]},{22:[1,93]},{26:V,52:G,66:X,67:Z,84:[1,97],91:Q,97:96,98:[1,94],100:[1,95],105:K,106:J,109:tt,111:et,112:nt,115:100,117:98,122:rt,123:it,124:at,125:ot,126:st,127:ct},t(z,[2,158]),t(z,[2,159]),t(z,[2,160]),t(z,[2,161]),t(ut,[2,55],{53:[1,116]}),t(lt,[2,74],{116:129,40:[1,117],52:g,55:[1,118],57:[1,119],59:[1,120],61:[1,121],63:[1,122],65:[1,123],66:y,67:m,69:[1,124],71:[1,125],73:[1,126],74:[1,127],76:[1,128],91:k,95:T,105:C,106:E,109:S,111:A,112:M,122:O,123:I,124:R,125:F,126:P,127:Y}),t(ht,[2,150]),t(ht,[2,175]),t(ht,[2,176]),t(ht,[2,177]),t(ht,[2,178]),t(ht,[2,179]),t(ht,[2,180]),t(ht,[2,181]),t(ht,[2,182]),t(ht,[2,183]),t(ht,[2,184]),t(ht,[2,185]),t(ht,[2,186]),t(ht,[2,187]),t(ht,[2,188]),t(ht,[2,189]),t(ht,[2,190]),{9:130,20:$,21:q,23:H},{11:131,14:[1,132]},t(ft,[2,8]),t(a,[2,20]),t(a,[2,26]),t(a,[2,27]),{21:[1,133]},t(dt,[2,34],{30:134,22:U}),t(z,[2,35]),{50:135,51:45,52:g,54:46,66:y,67:m,91:k,95:T,105:C,106:E,109:S,111:A,112:M,116:47,122:O,123:I,124:R,125:F,126:P,127:Y},t(pt,[2,48]),t(pt,[2,49]),t(pt,[2,50]),t(gt,[2,78],{79:136,68:[1,138],80:[1,137]}),{22:yt,24:mt,26:vt,38:bt,39:139,42:_t,52:G,66:X,67:Z,73:xt,81:wt,83:140,84:kt,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},t([52,66,67,68,80,91,95,105,106,109,111,112,122,123,124,125,126,127],[2,80]),t(z,[2,36]),t(z,[2,37]),t(z,[2,38]),t(z,[2,39]),t(z,[2,40]),{22:yt,24:mt,26:vt,38:bt,39:163,42:_t,52:G,66:X,67:Z,73:xt,81:wt,83:140,84:kt,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},t(It,o,{17:164}),t(z,[2,45]),t(z,[2,46]),t(W,[2,53],{52:Rt}),{26:V,52:G,66:X,67:Z,91:Q,97:166,102:[1,167],105:K,106:J,109:tt,111:et,112:nt,115:100,117:98,122:rt,123:it,124:at,125:ot,126:st,127:ct},{95:[1,168],103:169,105:[1,170]},{26:V,52:G,66:X,67:Z,91:Q,95:[1,171],97:172,105:K,106:J,109:tt,111:et,112:nt,115:100,117:98,122:rt,123:it,124:at,125:ot,126:st,127:ct},{26:V,52:G,66:X,67:Z,91:Q,97:173,105:K,106:J,109:tt,111:et,112:nt,115:100,117:98,122:rt,123:it,124:at,125:ot,126:st,127:ct},t(ft,[2,101],{22:[1,174],99:[1,175]}),t(ft,[2,105],{22:[1,176]}),t(ft,[2,109],{115:100,117:178,22:[1,177],26:V,52:G,66:X,67:Z,91:Q,105:K,106:J,109:tt,111:et,112:nt,122:rt,123:it,124:at,125:ot,126:st,127:ct}),t(ft,[2,111],{22:[1,179]}),t(Ft,[2,152]),t(Ft,[2,154]),t(Ft,[2,155]),t(Ft,[2,156]),t(Ft,[2,157]),t(Pt,[2,162]),t(Pt,[2,163]),t(Pt,[2,164]),t(Pt,[2,165]),t(Pt,[2,166]),t(Pt,[2,167]),t(Pt,[2,168]),t(Pt,[2,169]),t(Pt,[2,170]),t(Pt,[2,171]),t(Pt,[2,172]),t(Pt,[2,173]),t(Pt,[2,174]),{52:g,54:180,66:y,67:m,91:k,95:T,105:C,106:E,109:S,111:A,112:M,116:47,122:O,123:I,124:R,125:F,126:P,127:Y},{22:yt,24:mt,26:vt,38:bt,39:181,42:_t,52:G,66:X,67:Z,73:xt,81:wt,83:140,84:kt,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{22:yt,24:mt,26:vt,38:bt,39:182,42:_t,52:G,66:X,67:Z,73:xt,81:wt,83:140,84:kt,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{22:yt,24:mt,26:vt,38:bt,39:184,42:_t,52:G,57:[1,183],66:X,67:Z,73:xt,81:wt,83:140,84:kt,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{22:yt,24:mt,26:vt,38:bt,39:185,42:_t,52:G,66:X,67:Z,73:xt,81:wt,83:140,84:kt,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{22:yt,24:mt,26:vt,38:bt,39:186,42:_t,52:G,66:X,67:Z,73:xt,81:wt,83:140,84:kt,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{22:yt,24:mt,26:vt,38:bt,39:187,42:_t,52:G,66:X,67:Z,73:xt,81:wt,83:140,84:kt,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{66:[1,188]},{22:yt,24:mt,26:vt,38:bt,39:189,42:_t,52:G,66:X,67:Z,73:xt,81:wt,83:140,84:kt,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{22:yt,24:mt,26:vt,38:bt,39:190,42:_t,52:G,66:X,67:Z,71:[1,191],73:xt,81:wt,83:140,84:kt,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{22:yt,24:mt,26:vt,38:bt,39:192,42:_t,52:G,66:X,67:Z,73:xt,81:wt,83:140,84:kt,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{22:yt,24:mt,26:vt,38:bt,39:193,42:_t,52:G,66:X,67:Z,73:xt,81:wt,83:140,84:kt,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{22:yt,24:mt,26:vt,38:bt,39:194,42:_t,52:G,66:X,67:Z,73:xt,81:wt,83:140,84:kt,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},t(ht,[2,151]),t(Yt,[2,3]),{8:195,15:j},{15:[2,7]},t(a,[2,28]),t(dt,[2,33]),t(W,[2,51],{30:196,22:U}),t(gt,[2,75],{22:[1,197]}),{22:[1,198]},{22:yt,24:mt,26:vt,38:bt,39:199,42:_t,52:G,66:X,67:Z,73:xt,81:wt,83:140,84:kt,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{22:yt,24:mt,26:vt,38:bt,42:_t,52:G,66:X,67:Z,73:xt,81:wt,82:[1,200],83:201,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},t(Pt,[2,82]),t(Pt,[2,84]),t(Pt,[2,140]),t(Pt,[2,141]),t(Pt,[2,142]),t(Pt,[2,143]),t(Pt,[2,144]),t(Pt,[2,145]),t(Pt,[2,146]),t(Pt,[2,147]),t(Pt,[2,148]),t(Pt,[2,149]),t(Pt,[2,85]),t(Pt,[2,86]),t(Pt,[2,87]),t(Pt,[2,88]),t(Pt,[2,89]),t(Pt,[2,90]),t(Pt,[2,91]),t(Pt,[2,92]),t(Pt,[2,93]),t(Pt,[2,94]),t(Pt,[2,95]),{9:203,20:$,21:q,22:yt,23:H,24:mt,26:vt,38:bt,40:[1,202],42:_t,52:G,66:X,67:Z,73:xt,81:wt,83:201,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{18:18,19:19,20:s,21:c,22:u,23:l,32:24,33:25,34:26,35:27,36:28,37:29,38:h,42:[1,204],43:31,44:f,46:d,48:p,50:35,51:45,52:g,54:46,66:y,67:m,86:v,87:b,88:_,89:x,90:w,91:k,95:T,105:C,106:E,109:S,111:A,112:M,116:47,118:N,119:D,120:B,121:L,122:O,123:I,124:R,125:F,126:P,127:Y},{22:U,30:205},{22:[1,206],26:V,52:G,66:X,67:Z,91:Q,105:K,106:J,109:tt,111:et,112:nt,115:100,117:178,122:rt,123:it,124:at,125:ot,126:st,127:ct},{22:[1,207]},{22:[1,208]},{22:[1,209],106:[1,210]},t(jt,[2,123]),{22:[1,211]},{22:[1,212],26:V,52:G,66:X,67:Z,91:Q,105:K,106:J,109:tt,111:et,112:nt,115:100,117:178,122:rt,123:it,124:at,125:ot,126:st,127:ct},{22:[1,213],26:V,52:G,66:X,67:Z,91:Q,105:K,106:J,109:tt,111:et,112:nt,115:100,117:178,122:rt,123:it,124:at,125:ot,126:st,127:ct},{84:[1,214]},t(ft,[2,103],{22:[1,215]}),{84:[1,216],101:[1,217]},{84:[1,218]},t(Ft,[2,153]),{84:[1,219],101:[1,220]},t(ut,[2,57],{116:129,52:g,66:y,67:m,91:k,95:T,105:C,106:E,109:S,111:A,112:M,122:O,123:I,124:R,125:F,126:P,127:Y}),{22:yt,24:mt,26:vt,38:bt,41:[1,221],42:_t,52:G,66:X,67:Z,73:xt,81:wt,83:201,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{22:yt,24:mt,26:vt,38:bt,42:_t,52:G,56:[1,222],66:X,67:Z,73:xt,81:wt,83:201,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{22:yt,24:mt,26:vt,38:bt,39:223,42:_t,52:G,66:X,67:Z,73:xt,81:wt,83:140,84:kt,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{22:yt,24:mt,26:vt,38:bt,42:_t,52:G,58:[1,224],66:X,67:Z,73:xt,81:wt,83:201,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{22:yt,24:mt,26:vt,38:bt,42:_t,52:G,60:[1,225],66:X,67:Z,73:xt,81:wt,83:201,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{22:yt,24:mt,26:vt,38:bt,42:_t,52:G,62:[1,226],66:X,67:Z,73:xt,81:wt,83:201,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{22:yt,24:mt,26:vt,38:bt,42:_t,52:G,64:[1,227],66:X,67:Z,73:xt,81:wt,83:201,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{67:[1,228]},{22:yt,24:mt,26:vt,38:bt,42:_t,52:G,66:X,67:Z,70:[1,229],73:xt,81:wt,83:201,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{22:yt,24:mt,26:vt,38:bt,42:_t,52:G,66:X,67:Z,72:[1,230],73:xt,81:wt,83:201,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{22:yt,24:mt,26:vt,38:bt,39:231,42:_t,52:G,66:X,67:Z,73:xt,81:wt,83:140,84:kt,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{22:yt,24:mt,26:vt,38:bt,41:[1,232],42:_t,52:G,66:X,67:Z,73:xt,81:wt,83:201,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{22:yt,24:mt,26:vt,38:bt,42:_t,52:G,66:X,67:Z,73:xt,75:[1,233],77:[1,234],81:wt,83:201,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{22:yt,24:mt,26:vt,38:bt,42:_t,52:G,66:X,67:Z,73:xt,75:[1,236],77:[1,235],81:wt,83:201,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{9:237,20:$,21:q,23:H},t(W,[2,52],{52:Rt}),t(gt,[2,77]),t(gt,[2,76]),{22:yt,24:mt,26:vt,38:bt,42:_t,52:G,66:X,67:Z,68:[1,238],73:xt,81:wt,83:201,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},t(gt,[2,79]),t(Pt,[2,83]),{22:yt,24:mt,26:vt,38:bt,39:239,42:_t,52:G,66:X,67:Z,73:xt,81:wt,83:140,84:kt,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},t(It,o,{17:240}),t(z,[2,43]),{51:241,52:g,54:46,66:y,67:m,91:k,95:T,105:C,106:E,109:S,111:A,112:M,116:47,122:O,123:I,124:R,125:F,126:P,127:Y},{22:Ut,66:zt,67:$t,86:qt,96:242,102:Ht,105:Wt,107:243,108:244,109:Vt,110:Gt,111:Xt,112:Zt,113:Qt},{22:Ut,66:zt,67:$t,86:qt,96:256,102:Ht,105:Wt,107:243,108:244,109:Vt,110:Gt,111:Xt,112:Zt,113:Qt},{22:Ut,66:zt,67:$t,86:qt,96:257,102:Ht,104:[1,258],105:Wt,107:243,108:244,109:Vt,110:Gt,111:Xt,112:Zt,113:Qt},{22:Ut,66:zt,67:$t,86:qt,96:259,102:Ht,104:[1,260],105:Wt,107:243,108:244,109:Vt,110:Gt,111:Xt,112:Zt,113:Qt},{105:[1,261]},{22:Ut,66:zt,67:$t,86:qt,96:262,102:Ht,105:Wt,107:243,108:244,109:Vt,110:Gt,111:Xt,112:Zt,113:Qt},{22:Ut,66:zt,67:$t,86:qt,96:263,102:Ht,105:Wt,107:243,108:244,109:Vt,110:Gt,111:Xt,112:Zt,113:Qt},{26:V,52:G,66:X,67:Z,91:Q,97:264,105:K,106:J,109:tt,111:et,112:nt,115:100,117:98,122:rt,123:it,124:at,125:ot,126:st,127:ct},t(ft,[2,102]),{84:[1,265]},t(ft,[2,106],{22:[1,266]}),t(ft,[2,107]),t(ft,[2,110]),t(ft,[2,112],{22:[1,267]}),t(ft,[2,113]),t(lt,[2,58]),t(lt,[2,59]),{22:yt,24:mt,26:vt,38:bt,42:_t,52:G,58:[1,268],66:X,67:Z,73:xt,81:wt,83:201,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},t(lt,[2,66]),t(lt,[2,61]),t(lt,[2,62]),t(lt,[2,63]),{66:[1,269]},t(lt,[2,65]),t(lt,[2,67]),{22:yt,24:mt,26:vt,38:bt,42:_t,52:G,66:X,67:Z,72:[1,270],73:xt,81:wt,83:201,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},t(lt,[2,69]),t(lt,[2,70]),t(lt,[2,72]),t(lt,[2,71]),t(lt,[2,73]),t(Yt,[2,4]),t([22,52,66,67,91,95,105,106,109,111,112,122,123,124,125,126,127],[2,81]),{22:yt,24:mt,26:vt,38:bt,41:[1,271],42:_t,52:G,66:X,67:Z,73:xt,81:wt,83:201,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{18:18,19:19,20:s,21:c,22:u,23:l,32:24,33:25,34:26,35:27,36:28,37:29,38:h,42:[1,272],43:31,44:f,46:d,48:p,50:35,51:45,52:g,54:46,66:y,67:m,86:v,87:b,88:_,89:x,90:w,91:k,95:T,105:C,106:E,109:S,111:A,112:M,116:47,118:N,119:D,120:B,121:L,122:O,123:I,124:R,125:F,126:P,127:Y},t(ut,[2,56]),t(ft,[2,115],{106:Kt}),t(Jt,[2,125],{108:274,22:Ut,66:zt,67:$t,86:qt,102:Ht,105:Wt,109:Vt,110:Gt,111:Xt,112:Zt,113:Qt}),t(te,[2,127]),t(te,[2,129]),t(te,[2,130]),t(te,[2,131]),t(te,[2,132]),t(te,[2,133]),t(te,[2,134]),t(te,[2,135]),t(te,[2,136]),t(te,[2,137]),t(te,[2,138]),t(te,[2,139]),t(ft,[2,116],{106:Kt}),t(ft,[2,117],{106:Kt}),{22:[1,275]},t(ft,[2,118],{106:Kt}),{22:[1,276]},t(jt,[2,124]),t(ft,[2,98],{106:Kt}),t(ft,[2,99],{106:Kt}),t(ft,[2,100],{115:100,117:178,26:V,52:G,66:X,67:Z,91:Q,105:K,106:J,109:tt,111:et,112:nt,122:rt,123:it,124:at,125:ot,126:st,127:ct}),t(ft,[2,104]),{101:[1,277]},{101:[1,278]},{58:[1,279]},{68:[1,280]},{72:[1,281]},{9:282,20:$,21:q,23:H},t(z,[2,42]),{22:Ut,66:zt,67:$t,86:qt,102:Ht,105:Wt,107:283,108:244,109:Vt,110:Gt,111:Xt,112:Zt,113:Qt},t(te,[2,128]),{26:V,52:G,66:X,67:Z,91:Q,97:284,105:K,106:J,109:tt,111:et,112:nt,115:100,117:98,122:rt,123:it,124:at,125:ot,126:st,127:ct},{26:V,52:G,66:X,67:Z,91:Q,97:285,105:K,106:J,109:tt,111:et,112:nt,115:100,117:98,122:rt,123:it,124:at,125:ot,126:st,127:ct},t(ft,[2,108]),t(ft,[2,114]),t(lt,[2,60]),{22:yt,24:mt,26:vt,38:bt,39:286,42:_t,52:G,66:X,67:Z,73:xt,81:wt,83:140,84:kt,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},t(lt,[2,68]),t(It,o,{17:287}),t(Jt,[2,126],{108:274,22:Ut,66:zt,67:$t,86:qt,102:Ht,105:Wt,109:Vt,110:Gt,111:Xt,112:Zt,113:Qt}),t(ft,[2,121],{115:100,117:178,22:[1,288],26:V,52:G,66:X,67:Z,91:Q,105:K,106:J,109:tt,111:et,112:nt,122:rt,123:it,124:at,125:ot,126:st,127:ct}),t(ft,[2,122],{115:100,117:178,22:[1,289],26:V,52:G,66:X,67:Z,91:Q,105:K,106:J,109:tt,111:et,112:nt,122:rt,123:it,124:at,125:ot,126:st,127:ct}),{22:yt,24:mt,26:vt,38:bt,41:[1,290],42:_t,52:G,66:X,67:Z,73:xt,81:wt,83:201,85:151,86:Tt,87:Ct,88:Et,89:St,90:At,91:Mt,92:Nt,94:142,95:Dt,105:K,106:J,109:Bt,111:et,112:nt,113:Lt,114:Ot,115:148,122:rt,123:it,124:at,125:ot,126:st,127:ct},{18:18,19:19,20:s,21:c,22:u,23:l,32:24,33:25,34:26,35:27,36:28,37:29,38:h,42:[1,291],43:31,44:f,46:d,48:p,50:35,51:45,52:g,54:46,66:y,67:m,86:v,87:b,88:_,89:x,90:w,91:k,95:T,105:C,106:E,109:S,111:A,112:M,116:47,118:N,119:D,120:B,121:L,122:O,123:I,124:R,125:F,126:P,127:Y},{22:Ut,66:zt,67:$t,86:qt,96:292,102:Ht,105:Wt,107:243,108:244,109:Vt,110:Gt,111:Xt,112:Zt,113:Qt},{22:Ut,66:zt,67:$t,86:qt,96:293,102:Ht,105:Wt,107:243,108:244,109:Vt,110:Gt,111:Xt,112:Zt,113:Qt},t(lt,[2,64]),t(z,[2,41]),t(ft,[2,119],{106:Kt}),t(ft,[2,120],{106:Kt})],defaultActions:{2:[2,1],9:[2,5],10:[2,2],132:[2,7]},parseError:function(t,e){if(!e.recoverable){var n=new Error(t);throw n.hash=e,n}this.trace(t)},parse:function(t){var e=this,n=[0],r=[],i=[null],a=[],o=this.table,s="",c=0,u=0,l=0,h=2,f=1,d=a.slice.call(arguments,1),p=Object.create(this.lexer),g={yy:{}};for(var y in this.yy)Object.prototype.hasOwnProperty.call(this.yy,y)&&(g.yy[y]=this.yy[y]);p.setInput(t,g.yy),g.yy.lexer=p,g.yy.parser=this,void 0===p.yylloc&&(p.yylloc={});var m=p.yylloc;a.push(m);var v=p.options&&p.options.ranges;function b(){var t;return"number"!=typeof(t=r.pop()||p.lex()||f)&&(t instanceof Array&&(t=(r=t).pop()),t=e.symbols_[t]||t),t}"function"==typeof g.yy.parseError?this.parseError=g.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;for(var _,x,w,k,T,C,E,S,A,M={};;){if(w=n[n.length-1],this.defaultActions[w]?k=this.defaultActions[w]:(null==_&&(_=b()),k=o[w]&&o[w][_]),void 0===k||!k.length||!k[0]){var N="";for(C in A=[],o[w])this.terminals_[C]&&C>h&&A.push("'"+this.terminals_[C]+"'");N=p.showPosition?"Parse error on line "+(c+1)+":\n"+p.showPosition()+"\nExpecting "+A.join(", ")+", got '"+(this.terminals_[_]||_)+"'":"Parse error on line "+(c+1)+": Unexpected "+(_==f?"end of input":"'"+(this.terminals_[_]||_)+"'"),this.parseError(N,{text:p.match,token:this.terminals_[_]||_,line:p.yylineno,loc:m,expected:A})}if(k[0]instanceof Array&&k.length>1)throw new Error("Parse Error: multiple actions possible at state: "+w+", token: "+_);switch(k[0]){case 1:n.push(_),i.push(p.yytext),a.push(p.yylloc),n.push(k[1]),_=null,x?(_=x,x=null):(u=p.yyleng,s=p.yytext,c=p.yylineno,m=p.yylloc,l>0&&l--);break;case 2:if(E=this.productions_[k[1]][1],M.$=i[i.length-E],M._$={first_line:a[a.length-(E||1)].first_line,last_line:a[a.length-1].last_line,first_column:a[a.length-(E||1)].first_column,last_column:a[a.length-1].last_column},v&&(M._$.range=[a[a.length-(E||1)].range[0],a[a.length-1].range[1]]),void 0!==(T=this.performAction.apply(M,[s,u,c,g.yy,k[1],i,a].concat(d))))return T;E&&(n=n.slice(0,-1*E*2),i=i.slice(0,-1*E),a=a.slice(0,-1*E)),n.push(this.productions_[k[1]][0]),i.push(M.$),a.push(M._$),S=o[n[n.length-2]][n[n.length-1]],n.push(S);break;case 3:return!0}}return!0}},ne={EOF:1,parseError:function(t,e){if(!this.yy.parser)throw new Error(t);this.yy.parser.parseError(t,e)},setInput:function(t,e){return this.yy=e||this.yy||{},this._input=t,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},input:function(){var t=this._input[0];return this.yytext+=t,this.yyleng++,this.offset++,this.match+=t,this.matched+=t,t.match(/(?:\r\n?|\n).*/g)?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),t},unput:function(t){var e=t.length,n=t.split(/(?:\r\n?|\n)/g);this._input=t+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-e),this.offset-=e;var r=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),n.length-1&&(this.yylineno-=n.length-1);var i=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:n?(n.length===r.length?this.yylloc.first_column:0)+r[r.length-n.length].length-n[0].length:this.yylloc.first_column-e},this.options.ranges&&(this.yylloc.range=[i[0],i[0]+this.yyleng-e]),this.yyleng=this.yytext.length,this},more:function(){return this._more=!0,this},reject:function(){return this.options.backtrack_lexer?(this._backtrack=!0,this):this.parseError("Lexical error on line "+(this.yylineno+1)+". You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n"+this.showPosition(),{text:"",token:null,line:this.yylineno})},less:function(t){this.unput(this.match.slice(t))},pastInput:function(){var t=this.matched.substr(0,this.matched.length-this.match.length);return(t.length>20?"...":"")+t.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var t=this.match;return t.length<20&&(t+=this._input.substr(0,20-t.length)),(t.substr(0,20)+(t.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var t=this.pastInput(),e=new Array(t.length+1).join("-");return t+this.upcomingInput()+"\n"+e+"^"},test_match:function(t,e){var n,r,i;if(this.options.backtrack_lexer&&(i={yylineno:this.yylineno,yylloc:{first_line:this.yylloc.first_line,last_line:this.last_line,first_column:this.yylloc.first_column,last_column:this.yylloc.last_column},yytext:this.yytext,match:this.match,matches:this.matches,matched:this.matched,yyleng:this.yyleng,offset:this.offset,_more:this._more,_input:this._input,yy:this.yy,conditionStack:this.conditionStack.slice(0),done:this.done},this.options.ranges&&(i.yylloc.range=this.yylloc.range.slice(0))),(r=t[0].match(/(?:\r\n?|\n).*/g))&&(this.yylineno+=r.length),this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:r?r[r.length-1].length-r[r.length-1].match(/\r?\n?/)[0].length:this.yylloc.last_column+t[0].length},this.yytext+=t[0],this.match+=t[0],this.matches=t,this.yyleng=this.yytext.length,this.options.ranges&&(this.yylloc.range=[this.offset,this.offset+=this.yyleng]),this._more=!1,this._backtrack=!1,this._input=this._input.slice(t[0].length),this.matched+=t[0],n=this.performAction.call(this,this.yy,this,e,this.conditionStack[this.conditionStack.length-1]),this.done&&this._input&&(this.done=!1),n)return n;if(this._backtrack){for(var a in i)this[a]=i[a];return!1}return!1},next:function(){if(this.done)return this.EOF;var t,e,n,r;this._input||(this.done=!0),this._more||(this.yytext="",this.match="");for(var i=this._currentRules(),a=0;ae[0].length)){if(e=n,r=a,this.options.backtrack_lexer){if(!1!==(t=this.test_match(n,i[a])))return t;if(this._backtrack){e=!1;continue}return!1}if(!this.options.flex)break}return e?!1!==(t=this.test_match(e,i[r]))&&t:""===this._input?this.EOF:this.parseError("Lexical error on line "+(this.yylineno+1)+". Unrecognized text.\n"+this.showPosition(),{text:"",token:null,line:this.yylineno})},lex:function(){return this.next()||this.lex()},begin:function(t){this.conditionStack.push(t)},popState:function(){return this.conditionStack.length-1>0?this.conditionStack.pop():this.conditionStack[0]},_currentRules:function(){return this.conditionStack.length&&this.conditionStack[this.conditionStack.length-1]?this.conditions[this.conditionStack[this.conditionStack.length-1]].rules:this.conditions.INITIAL.rules},topState:function(t){return(t=this.conditionStack.length-1-Math.abs(t||0))>=0?this.conditionStack[t]:"INITIAL"},pushState:function(t){this.begin(t)},stateStackSize:function(){return this.conditionStack.length},options:{},performAction:function(t,e,n,r){switch(n){case 0:return this.begin("open_directive"),12;case 1:return this.begin("type_directive"),13;case 2:return this.popState(),this.begin("arg_directive"),10;case 3:return this.popState(),this.popState(),15;case 4:return 14;case 5:case 6:break;case 7:return this.begin("acc_title"),44;case 8:return this.popState(),"acc_title_value";case 9:return this.begin("acc_descr"),46;case 10:return this.popState(),"acc_descr_value";case 11:this.begin("acc_descr_multiline");break;case 12:case 15:case 24:case 27:case 30:case 33:this.popState();break;case 13:return"acc_descr_multiline_value";case 14:this.begin("string");break;case 16:return"STR";case 17:return 86;case 18:return 95;case 19:return 87;case 20:return 104;case 21:return 88;case 22:return 89;case 23:this.begin("href");break;case 25:return 100;case 26:this.begin("callbackname");break;case 28:this.popState(),this.begin("callbackargs");break;case 29:return 98;case 31:return 99;case 32:this.begin("click");break;case 34:return 90;case 35:case 36:return t.lex.firstGraph()&&this.begin("dir"),24;case 37:return 38;case 38:return 42;case 39:case 40:case 41:case 42:return 101;case 43:return this.popState(),25;case 44:case 45:case 46:case 47:case 48:case 49:case 50:case 51:case 52:case 53:return this.popState(),26;case 54:return 118;case 55:return 119;case 56:return 120;case 57:return 121;case 58:return 105;case 59:return 111;case 60:return 53;case 61:return 67;case 62:return 52;case 63:return 20;case 64:return 106;case 65:return 126;case 66:case 67:case 68:return 82;case 69:case 70:case 71:return 81;case 72:return 59;case 73:return 60;case 74:return 61;case 75:return 62;case 76:return 63;case 77:return 64;case 78:return 65;case 79:return 69;case 80:return 70;case 81:return 55;case 82:return 56;case 83:return 109;case 84:return 112;case 85:return 127;case 86:return 124;case 87:return 113;case 88:case 89:return 125;case 90:return 114;case 91:return 73;case 92:return 92;case 93:return"SEP";case 94:return 91;case 95:return 66;case 96:return 75;case 97:return 74;case 98:return 77;case 99:return 76;case 100:return 122;case 101:return 123;case 102:return 68;case 103:return 57;case 104:return 58;case 105:return 40;case 106:return 41;case 107:return 71;case 108:return 72;case 109:return 133;case 110:return 21;case 111:return 22;case 112:return 23}},rules:[/^(?:%%\{)/,/^(?:((?:(?!\}%%)[^:.])*))/,/^(?::)/,/^(?:\}%%)/,/^(?:((?:(?!\}%%).|\n)*))/,/^(?:%%(?!\{)[^\n]*)/,/^(?:[^\}]%%[^\n]*)/,/^(?:accTitle\s*:\s*)/,/^(?:(?!\n||)*[^\n]*)/,/^(?:accDescr\s*:\s*)/,/^(?:(?!\n||)*[^\n]*)/,/^(?:accDescr\s*\{\s*)/,/^(?:[\}])/,/^(?:[^\}]*)/,/^(?:["])/,/^(?:["])/,/^(?:[^"]*)/,/^(?:style\b)/,/^(?:default\b)/,/^(?:linkStyle\b)/,/^(?:interpolate\b)/,/^(?:classDef\b)/,/^(?:class\b)/,/^(?:href[\s]+["])/,/^(?:["])/,/^(?:[^"]*)/,/^(?:call[\s]+)/,/^(?:\([\s]*\))/,/^(?:\()/,/^(?:[^(]*)/,/^(?:\))/,/^(?:[^)]*)/,/^(?:click[\s]+)/,/^(?:[\s\n])/,/^(?:[^\s\n]*)/,/^(?:graph\b)/,/^(?:flowchart\b)/,/^(?:subgraph\b)/,/^(?:end\b\s*)/,/^(?:_self\b)/,/^(?:_blank\b)/,/^(?:_parent\b)/,/^(?:_top\b)/,/^(?:(\r?\n)*\s*\n)/,/^(?:\s*LR\b)/,/^(?:\s*RL\b)/,/^(?:\s*TB\b)/,/^(?:\s*BT\b)/,/^(?:\s*TD\b)/,/^(?:\s*BR\b)/,/^(?:\s*<)/,/^(?:\s*>)/,/^(?:\s*\^)/,/^(?:\s*v\b)/,/^(?:.*direction\s+TB[^\n]*)/,/^(?:.*direction\s+BT[^\n]*)/,/^(?:.*direction\s+RL[^\n]*)/,/^(?:.*direction\s+LR[^\n]*)/,/^(?:[0-9]+)/,/^(?:#)/,/^(?::::)/,/^(?::)/,/^(?:&)/,/^(?:;)/,/^(?:,)/,/^(?:\*)/,/^(?:\s*[xo<]?--+[-xo>]\s*)/,/^(?:\s*[xo<]?==+[=xo>]\s*)/,/^(?:\s*[xo<]?-?\.+-[xo>]?\s*)/,/^(?:\s*[xo<]?--\s*)/,/^(?:\s*[xo<]?==\s*)/,/^(?:\s*[xo<]?-\.\s*)/,/^(?:\(-)/,/^(?:-\))/,/^(?:\(\[)/,/^(?:\]\))/,/^(?:\[\[)/,/^(?:\]\])/,/^(?:\[\|)/,/^(?:\[\()/,/^(?:\)\])/,/^(?:\(\(\()/,/^(?:\)\)\))/,/^(?:-)/,/^(?:\.)/,/^(?:[\_])/,/^(?:\+)/,/^(?:%)/,/^(?:=)/,/^(?:=)/,/^(?:<)/,/^(?:>)/,/^(?:\^)/,/^(?:\\\|)/,/^(?:v\b)/,/^(?:[A-Za-z]+)/,/^(?:\\\])/,/^(?:\[\/)/,/^(?:\/\])/,/^(?:\[\\)/,/^(?:[!"#$%&'*+,-.`?\\_/])/,/^(?:[\u00AA\u00B5\u00BA\u00C0-\u00D6\u00D8-\u00F6]|[\u00F8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0370-\u0374\u0376\u0377]|[\u037A-\u037D\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5]|[\u03F7-\u0481\u048A-\u0527\u0531-\u0556\u0559\u0561-\u0587\u05D0-\u05EA]|[\u05F0-\u05F2\u0620-\u064A\u066E\u066F\u0671-\u06D3\u06D5\u06E5\u06E6\u06EE]|[\u06EF\u06FA-\u06FC\u06FF\u0710\u0712-\u072F\u074D-\u07A5\u07B1\u07CA-\u07EA]|[\u07F4\u07F5\u07FA\u0800-\u0815\u081A\u0824\u0828\u0840-\u0858\u08A0]|[\u08A2-\u08AC\u0904-\u0939\u093D\u0950\u0958-\u0961\u0971-\u0977]|[\u0979-\u097F\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2]|[\u09B6-\u09B9\u09BD\u09CE\u09DC\u09DD\u09DF-\u09E1\u09F0\u09F1\u0A05-\u0A0A]|[\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39]|[\u0A59-\u0A5C\u0A5E\u0A72-\u0A74\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8]|[\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABD\u0AD0\u0AE0\u0AE1\u0B05-\u0B0C]|[\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3D\u0B5C]|[\u0B5D\u0B5F-\u0B61\u0B71\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99]|[\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BD0]|[\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C33\u0C35-\u0C39\u0C3D]|[\u0C58\u0C59\u0C60\u0C61\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3]|[\u0CB5-\u0CB9\u0CBD\u0CDE\u0CE0\u0CE1\u0CF1\u0CF2\u0D05-\u0D0C\u0D0E-\u0D10]|[\u0D12-\u0D3A\u0D3D\u0D4E\u0D60\u0D61\u0D7A-\u0D7F\u0D85-\u0D96\u0D9A-\u0DB1]|[\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0E01-\u0E30\u0E32\u0E33\u0E40-\u0E46\u0E81]|[\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3]|[\u0EA5\u0EA7\u0EAA\u0EAB\u0EAD-\u0EB0\u0EB2\u0EB3\u0EBD\u0EC0-\u0EC4\u0EC6]|[\u0EDC-\u0EDF\u0F00\u0F40-\u0F47\u0F49-\u0F6C\u0F88-\u0F8C\u1000-\u102A]|[\u103F\u1050-\u1055\u105A-\u105D\u1061\u1065\u1066\u106E-\u1070\u1075-\u1081]|[\u108E\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D]|[\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0]|[\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310]|[\u1312-\u1315\u1318-\u135A\u1380-\u138F\u13A0-\u13F4\u1401-\u166C]|[\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u1700-\u170C\u170E-\u1711]|[\u1720-\u1731\u1740-\u1751\u1760-\u176C\u176E-\u1770\u1780-\u17B3\u17D7]|[\u17DC\u1820-\u1877\u1880-\u18A8\u18AA\u18B0-\u18F5\u1900-\u191C]|[\u1950-\u196D\u1970-\u1974\u1980-\u19AB\u19C1-\u19C7\u1A00-\u1A16]|[\u1A20-\u1A54\u1AA7\u1B05-\u1B33\u1B45-\u1B4B\u1B83-\u1BA0\u1BAE\u1BAF]|[\u1BBA-\u1BE5\u1C00-\u1C23\u1C4D-\u1C4F\u1C5A-\u1C7D\u1CE9-\u1CEC]|[\u1CEE-\u1CF1\u1CF5\u1CF6\u1D00-\u1DBF\u1E00-\u1F15\u1F18-\u1F1D]|[\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D]|[\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3]|[\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u2071\u207F]|[\u2090-\u209C\u2102\u2107\u210A-\u2113\u2115\u2119-\u211D\u2124\u2126\u2128]|[\u212A-\u212D\u212F-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2183\u2184]|[\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2CE4\u2CEB-\u2CEE\u2CF2\u2CF3]|[\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D80-\u2D96\u2DA0-\u2DA6]|[\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE]|[\u2DD0-\u2DD6\u2DD8-\u2DDE\u2E2F\u3005\u3006\u3031-\u3035\u303B\u303C]|[\u3041-\u3096\u309D-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D]|[\u3131-\u318E\u31A0-\u31BA\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FCC]|[\uA000-\uA48C\uA4D0-\uA4FD\uA500-\uA60C\uA610-\uA61F\uA62A\uA62B]|[\uA640-\uA66E\uA67F-\uA697\uA6A0-\uA6E5\uA717-\uA71F\uA722-\uA788]|[\uA78B-\uA78E\uA790-\uA793\uA7A0-\uA7AA\uA7F8-\uA801\uA803-\uA805]|[\uA807-\uA80A\uA80C-\uA822\uA840-\uA873\uA882-\uA8B3\uA8F2-\uA8F7\uA8FB]|[\uA90A-\uA925\uA930-\uA946\uA960-\uA97C\uA984-\uA9B2\uA9CF\uAA00-\uAA28]|[\uAA40-\uAA42\uAA44-\uAA4B\uAA60-\uAA76\uAA7A\uAA80-\uAAAF\uAAB1\uAAB5]|[\uAAB6\uAAB9-\uAABD\uAAC0\uAAC2\uAADB-\uAADD\uAAE0-\uAAEA\uAAF2-\uAAF4]|[\uAB01-\uAB06\uAB09-\uAB0E\uAB11-\uAB16\uAB20-\uAB26\uAB28-\uAB2E]|[\uABC0-\uABE2\uAC00-\uD7A3\uD7B0-\uD7C6\uD7CB-\uD7FB\uF900-\uFA6D]|[\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D\uFB1F-\uFB28\uFB2A-\uFB36]|[\uFB38-\uFB3C\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D]|[\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDFB\uFE70-\uFE74\uFE76-\uFEFC]|[\uFF21-\uFF3A\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF]|[\uFFD2-\uFFD7\uFFDA-\uFFDC])/,/^(?:\|)/,/^(?:\()/,/^(?:\))/,/^(?:\[)/,/^(?:\])/,/^(?:\{)/,/^(?:\})/,/^(?:")/,/^(?:(\r?\n)+)/,/^(?:\s)/,/^(?:$)/],conditions:{close_directive:{rules:[],inclusive:!1},arg_directive:{rules:[3,4],inclusive:!1},type_directive:{rules:[2,3],inclusive:!1},open_directive:{rules:[1],inclusive:!1},callbackargs:{rules:[30,31],inclusive:!1},callbackname:{rules:[27,28,29],inclusive:!1},href:{rules:[24,25],inclusive:!1},click:{rules:[33,34],inclusive:!1},vertex:{rules:[],inclusive:!1},dir:{rules:[43,44,45,46,47,48,49,50,51,52,53],inclusive:!1},acc_descr_multiline:{rules:[12,13],inclusive:!1},acc_descr:{rules:[10],inclusive:!1},acc_title:{rules:[8],inclusive:!1},string:{rules:[15,16],inclusive:!1},INITIAL:{rules:[0,5,6,7,9,11,14,17,18,19,20,21,22,23,26,32,35,36,37,38,39,40,41,42,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112],inclusive:!0}}};function re(){this.yy={}}return ee.lexer=ne,re.prototype=ee,ee.Parser=re,new re}();e.parser=r,e.Parser=r.Parser,e.parse=function(){return r.parse.apply(r,arguments)},e.main=function(t){t[1]||(console.log("Usage: "+t[0]+" FILE"),process.exit(1));var r=n(5354).readFileSync(n(6470).normalize(t[1]),"utf8");return e.parser.parse(r)},n.c[n.s]===t&&e.main(process.argv.slice(1))},9959:(t,e,n)=>{t=n.nmd(t);var r=function(){var t=function(t,e,n,r){for(n=n||{},r=t.length;r--;n[t[r]]=e);return n},e=[1,3],n=[1,5],r=[7,9,11,12,13,14,15,16,17,18,19,20,22,24,25,27,34,39],i=[1,15],a=[1,16],o=[1,17],s=[1,18],c=[1,19],u=[1,20],l=[1,21],h=[1,22],f=[1,23],d=[1,24],p=[1,25],g=[1,26],y=[1,28],m=[1,30],v=[1,33],b=[5,7,9,11,12,13,14,15,16,17,18,19,20,22,24,25,27,34,39],_={trace:function(){},yy:{},symbols_:{error:2,start:3,directive:4,gantt:5,document:6,EOF:7,line:8,SPACE:9,statement:10,NL:11,dateFormat:12,inclusiveEndDates:13,topAxis:14,axisFormat:15,excludes:16,includes:17,todayMarker:18,title:19,acc_title:20,acc_title_value:21,acc_descr:22,acc_descr_value:23,acc_descr_multiline_value:24,section:25,clickStatement:26,taskTxt:27,taskData:28,openDirective:29,typeDirective:30,closeDirective:31,":":32,argDirective:33,click:34,callbackname:35,callbackargs:36,href:37,clickStatementDebug:38,open_directive:39,type_directive:40,arg_directive:41,close_directive:42,$accept:0,$end:1},terminals_:{2:"error",5:"gantt",7:"EOF",9:"SPACE",11:"NL",12:"dateFormat",13:"inclusiveEndDates",14:"topAxis",15:"axisFormat",16:"excludes",17:"includes",18:"todayMarker",19:"title",20:"acc_title",21:"acc_title_value",22:"acc_descr",23:"acc_descr_value",24:"acc_descr_multiline_value",25:"section",27:"taskTxt",28:"taskData",32:":",34:"click",35:"callbackname",36:"callbackargs",37:"href",39:"open_directive",40:"type_directive",41:"arg_directive",42:"close_directive"},productions_:[0,[3,2],[3,3],[6,0],[6,2],[8,2],[8,1],[8,1],[8,1],[10,1],[10,1],[10,1],[10,1],[10,1],[10,1],[10,1],[10,1],[10,2],[10,2],[10,1],[10,1],[10,1],[10,2],[10,1],[4,4],[4,6],[26,2],[26,3],[26,3],[26,4],[26,3],[26,4],[26,2],[38,2],[38,3],[38,3],[38,4],[38,3],[38,4],[38,2],[29,1],[30,1],[33,1],[31,1]],performAction:function(t,e,n,r,i,a,o){var s=a.length-1;switch(i){case 2:return a[s-1];case 3:case 7:case 8:this.$=[];break;case 4:a[s-1].push(a[s]),this.$=a[s-1];break;case 5:case 6:this.$=a[s];break;case 9:r.setDateFormat(a[s].substr(11)),this.$=a[s].substr(11);break;case 10:r.enableInclusiveEndDates(),this.$=a[s].substr(18);break;case 11:r.TopAxis(),this.$=a[s].substr(8);break;case 12:r.setAxisFormat(a[s].substr(11)),this.$=a[s].substr(11);break;case 13:r.setExcludes(a[s].substr(9)),this.$=a[s].substr(9);break;case 14:r.setIncludes(a[s].substr(9)),this.$=a[s].substr(9);break;case 15:r.setTodayMarker(a[s].substr(12)),this.$=a[s].substr(12);break;case 16:r.setTitle(a[s].substr(6)),this.$=a[s].substr(6);break;case 17:this.$=a[s].trim(),r.setTitle(this.$);break;case 18:case 19:this.$=a[s].trim(),r.setAccDescription(this.$);break;case 20:r.addSection(a[s].substr(8)),this.$=a[s].substr(8);break;case 22:r.addTask(a[s-1],a[s]),this.$="task";break;case 26:this.$=a[s-1],r.setClickEvent(a[s-1],a[s],null);break;case 27:this.$=a[s-2],r.setClickEvent(a[s-2],a[s-1],a[s]);break;case 28:this.$=a[s-2],r.setClickEvent(a[s-2],a[s-1],null),r.setLink(a[s-2],a[s]);break;case 29:this.$=a[s-3],r.setClickEvent(a[s-3],a[s-2],a[s-1]),r.setLink(a[s-3],a[s]);break;case 30:this.$=a[s-2],r.setClickEvent(a[s-2],a[s],null),r.setLink(a[s-2],a[s-1]);break;case 31:this.$=a[s-3],r.setClickEvent(a[s-3],a[s-1],a[s]),r.setLink(a[s-3],a[s-2]);break;case 32:this.$=a[s-1],r.setLink(a[s-1],a[s]);break;case 33:case 39:this.$=a[s-1]+" "+a[s];break;case 34:case 35:case 37:this.$=a[s-2]+" "+a[s-1]+" "+a[s];break;case 36:case 38:this.$=a[s-3]+" "+a[s-2]+" "+a[s-1]+" "+a[s];break;case 40:r.parseDirective("%%{","open_directive");break;case 41:r.parseDirective(a[s],"type_directive");break;case 42:a[s]=a[s].trim().replace(/'/g,'"'),r.parseDirective(a[s],"arg_directive");break;case 43:r.parseDirective("}%%","close_directive","gantt")}},table:[{3:1,4:2,5:e,29:4,39:n},{1:[3]},{3:6,4:2,5:e,29:4,39:n},t(r,[2,3],{6:7}),{30:8,40:[1,9]},{40:[2,40]},{1:[2,1]},{4:29,7:[1,10],8:11,9:[1,12],10:13,11:[1,14],12:i,13:a,14:o,15:s,16:c,17:u,18:l,19:h,20:f,22:d,24:p,25:g,26:27,27:y,29:4,34:m,39:n},{31:31,32:[1,32],42:v},t([32,42],[2,41]),t(r,[2,8],{1:[2,2]}),t(r,[2,4]),{4:29,10:34,12:i,13:a,14:o,15:s,16:c,17:u,18:l,19:h,20:f,22:d,24:p,25:g,26:27,27:y,29:4,34:m,39:n},t(r,[2,6]),t(r,[2,7]),t(r,[2,9]),t(r,[2,10]),t(r,[2,11]),t(r,[2,12]),t(r,[2,13]),t(r,[2,14]),t(r,[2,15]),t(r,[2,16]),{21:[1,35]},{23:[1,36]},t(r,[2,19]),t(r,[2,20]),t(r,[2,21]),{28:[1,37]},t(r,[2,23]),{35:[1,38],37:[1,39]},{11:[1,40]},{33:41,41:[1,42]},{11:[2,43]},t(r,[2,5]),t(r,[2,17]),t(r,[2,18]),t(r,[2,22]),t(r,[2,26],{36:[1,43],37:[1,44]}),t(r,[2,32],{35:[1,45]}),t(b,[2,24]),{31:46,42:v},{42:[2,42]},t(r,[2,27],{37:[1,47]}),t(r,[2,28]),t(r,[2,30],{36:[1,48]}),{11:[1,49]},t(r,[2,29]),t(r,[2,31]),t(b,[2,25])],defaultActions:{5:[2,40],6:[2,1],33:[2,43],42:[2,42]},parseError:function(t,e){if(!e.recoverable){var n=new Error(t);throw n.hash=e,n}this.trace(t)},parse:function(t){var e=this,n=[0],r=[],i=[null],a=[],o=this.table,s="",c=0,u=0,l=0,h=2,f=1,d=a.slice.call(arguments,1),p=Object.create(this.lexer),g={yy:{}};for(var y in this.yy)Object.prototype.hasOwnProperty.call(this.yy,y)&&(g.yy[y]=this.yy[y]);p.setInput(t,g.yy),g.yy.lexer=p,g.yy.parser=this,void 0===p.yylloc&&(p.yylloc={});var m=p.yylloc;a.push(m);var v=p.options&&p.options.ranges;function b(){var t;return"number"!=typeof(t=r.pop()||p.lex()||f)&&(t instanceof Array&&(t=(r=t).pop()),t=e.symbols_[t]||t),t}"function"==typeof g.yy.parseError?this.parseError=g.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;for(var _,x,w,k,T,C,E,S,A,M={};;){if(w=n[n.length-1],this.defaultActions[w]?k=this.defaultActions[w]:(null==_&&(_=b()),k=o[w]&&o[w][_]),void 0===k||!k.length||!k[0]){var N="";for(C in A=[],o[w])this.terminals_[C]&&C>h&&A.push("'"+this.terminals_[C]+"'");N=p.showPosition?"Parse error on line "+(c+1)+":\n"+p.showPosition()+"\nExpecting "+A.join(", ")+", got '"+(this.terminals_[_]||_)+"'":"Parse error on line "+(c+1)+": Unexpected "+(_==f?"end of input":"'"+(this.terminals_[_]||_)+"'"),this.parseError(N,{text:p.match,token:this.terminals_[_]||_,line:p.yylineno,loc:m,expected:A})}if(k[0]instanceof Array&&k.length>1)throw new Error("Parse Error: multiple actions possible at state: "+w+", token: "+_);switch(k[0]){case 1:n.push(_),i.push(p.yytext),a.push(p.yylloc),n.push(k[1]),_=null,x?(_=x,x=null):(u=p.yyleng,s=p.yytext,c=p.yylineno,m=p.yylloc,l>0&&l--);break;case 2:if(E=this.productions_[k[1]][1],M.$=i[i.length-E],M._$={first_line:a[a.length-(E||1)].first_line,last_line:a[a.length-1].last_line,first_column:a[a.length-(E||1)].first_column,last_column:a[a.length-1].last_column},v&&(M._$.range=[a[a.length-(E||1)].range[0],a[a.length-1].range[1]]),void 0!==(T=this.performAction.apply(M,[s,u,c,g.yy,k[1],i,a].concat(d))))return T;E&&(n=n.slice(0,-1*E*2),i=i.slice(0,-1*E),a=a.slice(0,-1*E)),n.push(this.productions_[k[1]][0]),i.push(M.$),a.push(M._$),S=o[n[n.length-2]][n[n.length-1]],n.push(S);break;case 3:return!0}}return!0}},x={EOF:1,parseError:function(t,e){if(!this.yy.parser)throw new Error(t);this.yy.parser.parseError(t,e)},setInput:function(t,e){return this.yy=e||this.yy||{},this._input=t,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},input:function(){var t=this._input[0];return this.yytext+=t,this.yyleng++,this.offset++,this.match+=t,this.matched+=t,t.match(/(?:\r\n?|\n).*/g)?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),t},unput:function(t){var e=t.length,n=t.split(/(?:\r\n?|\n)/g);this._input=t+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-e),this.offset-=e;var r=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),n.length-1&&(this.yylineno-=n.length-1);var i=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:n?(n.length===r.length?this.yylloc.first_column:0)+r[r.length-n.length].length-n[0].length:this.yylloc.first_column-e},this.options.ranges&&(this.yylloc.range=[i[0],i[0]+this.yyleng-e]),this.yyleng=this.yytext.length,this},more:function(){return this._more=!0,this},reject:function(){return this.options.backtrack_lexer?(this._backtrack=!0,this):this.parseError("Lexical error on line "+(this.yylineno+1)+". You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n"+this.showPosition(),{text:"",token:null,line:this.yylineno})},less:function(t){this.unput(this.match.slice(t))},pastInput:function(){var t=this.matched.substr(0,this.matched.length-this.match.length);return(t.length>20?"...":"")+t.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var t=this.match;return t.length<20&&(t+=this._input.substr(0,20-t.length)),(t.substr(0,20)+(t.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var t=this.pastInput(),e=new Array(t.length+1).join("-");return t+this.upcomingInput()+"\n"+e+"^"},test_match:function(t,e){var n,r,i;if(this.options.backtrack_lexer&&(i={yylineno:this.yylineno,yylloc:{first_line:this.yylloc.first_line,last_line:this.last_line,first_column:this.yylloc.first_column,last_column:this.yylloc.last_column},yytext:this.yytext,match:this.match,matches:this.matches,matched:this.matched,yyleng:this.yyleng,offset:this.offset,_more:this._more,_input:this._input,yy:this.yy,conditionStack:this.conditionStack.slice(0),done:this.done},this.options.ranges&&(i.yylloc.range=this.yylloc.range.slice(0))),(r=t[0].match(/(?:\r\n?|\n).*/g))&&(this.yylineno+=r.length),this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:r?r[r.length-1].length-r[r.length-1].match(/\r?\n?/)[0].length:this.yylloc.last_column+t[0].length},this.yytext+=t[0],this.match+=t[0],this.matches=t,this.yyleng=this.yytext.length,this.options.ranges&&(this.yylloc.range=[this.offset,this.offset+=this.yyleng]),this._more=!1,this._backtrack=!1,this._input=this._input.slice(t[0].length),this.matched+=t[0],n=this.performAction.call(this,this.yy,this,e,this.conditionStack[this.conditionStack.length-1]),this.done&&this._input&&(this.done=!1),n)return n;if(this._backtrack){for(var a in i)this[a]=i[a];return!1}return!1},next:function(){if(this.done)return this.EOF;var t,e,n,r;this._input||(this.done=!0),this._more||(this.yytext="",this.match="");for(var i=this._currentRules(),a=0;ae[0].length)){if(e=n,r=a,this.options.backtrack_lexer){if(!1!==(t=this.test_match(n,i[a])))return t;if(this._backtrack){e=!1;continue}return!1}if(!this.options.flex)break}return e?!1!==(t=this.test_match(e,i[r]))&&t:""===this._input?this.EOF:this.parseError("Lexical error on line "+(this.yylineno+1)+". Unrecognized text.\n"+this.showPosition(),{text:"",token:null,line:this.yylineno})},lex:function(){return this.next()||this.lex()},begin:function(t){this.conditionStack.push(t)},popState:function(){return this.conditionStack.length-1>0?this.conditionStack.pop():this.conditionStack[0]},_currentRules:function(){return this.conditionStack.length&&this.conditionStack[this.conditionStack.length-1]?this.conditions[this.conditionStack[this.conditionStack.length-1]].rules:this.conditions.INITIAL.rules},topState:function(t){return(t=this.conditionStack.length-1-Math.abs(t||0))>=0?this.conditionStack[t]:"INITIAL"},pushState:function(t){this.begin(t)},stateStackSize:function(){return this.conditionStack.length},options:{"case-insensitive":!0},performAction:function(t,e,n,r){switch(n){case 0:return this.begin("open_directive"),39;case 1:return this.begin("type_directive"),40;case 2:return this.popState(),this.begin("arg_directive"),32;case 3:return this.popState(),this.popState(),42;case 4:return 41;case 5:return this.begin("acc_title"),20;case 6:return this.popState(),"acc_title_value";case 7:return this.begin("acc_descr"),22;case 8:return this.popState(),"acc_descr_value";case 9:this.begin("acc_descr_multiline");break;case 10:case 20:case 23:case 26:case 29:this.popState();break;case 11:return"acc_descr_multiline_value";case 12:case 13:case 14:case 16:case 17:case 18:break;case 15:return 11;case 19:this.begin("href");break;case 21:return 37;case 22:this.begin("callbackname");break;case 24:this.popState(),this.begin("callbackargs");break;case 25:return 35;case 27:return 36;case 28:this.begin("click");break;case 30:return 34;case 31:return 5;case 32:return 12;case 33:return 13;case 34:return 14;case 35:return 15;case 36:return 17;case 37:return 16;case 38:return 18;case 39:return"date";case 40:return 19;case 41:return"accDescription";case 42:return 25;case 43:return 27;case 44:return 28;case 45:return 32;case 46:return 7;case 47:return"INVALID"}},rules:[/^(?:%%\{)/i,/^(?:((?:(?!\}%%)[^:.])*))/i,/^(?::)/i,/^(?:\}%%)/i,/^(?:((?:(?!\}%%).|\n)*))/i,/^(?:accTitle\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*\{\s*)/i,/^(?:[\}])/i,/^(?:[^\}]*)/i,/^(?:%%(?!\{)*[^\n]*)/i,/^(?:[^\}]%%*[^\n]*)/i,/^(?:%%*[^\n]*[\n]*)/i,/^(?:[\n]+)/i,/^(?:\s+)/i,/^(?:#[^\n]*)/i,/^(?:%[^\n]*)/i,/^(?:href[\s]+["])/i,/^(?:["])/i,/^(?:[^"]*)/i,/^(?:call[\s]+)/i,/^(?:\([\s]*\))/i,/^(?:\()/i,/^(?:[^(]*)/i,/^(?:\))/i,/^(?:[^)]*)/i,/^(?:click[\s]+)/i,/^(?:[\s\n])/i,/^(?:[^\s\n]*)/i,/^(?:gantt\b)/i,/^(?:dateFormat\s[^#\n;]+)/i,/^(?:inclusiveEndDates\b)/i,/^(?:topAxis\b)/i,/^(?:axisFormat\s[^#\n;]+)/i,/^(?:includes\s[^#\n;]+)/i,/^(?:excludes\s[^#\n;]+)/i,/^(?:todayMarker\s[^\n;]+)/i,/^(?:\d\d\d\d-\d\d-\d\d\b)/i,/^(?:title\s[^#\n;]+)/i,/^(?:accDescription\s[^#\n;]+)/i,/^(?:section\s[^#:\n;]+)/i,/^(?:[^#:\n;]+)/i,/^(?::[^#\n;]+)/i,/^(?::)/i,/^(?:$)/i,/^(?:.)/i],conditions:{acc_descr_multiline:{rules:[10,11],inclusive:!1},acc_descr:{rules:[8],inclusive:!1},acc_title:{rules:[6],inclusive:!1},close_directive:{rules:[],inclusive:!1},arg_directive:{rules:[3,4],inclusive:!1},type_directive:{rules:[2,3],inclusive:!1},open_directive:{rules:[1],inclusive:!1},callbackargs:{rules:[26,27],inclusive:!1},callbackname:{rules:[23,24,25],inclusive:!1},href:{rules:[20,21],inclusive:!1},click:{rules:[29,30],inclusive:!1},INITIAL:{rules:[0,5,7,9,12,13,14,15,16,17,18,19,22,28,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47],inclusive:!0}}};function w(){this.yy={}}return _.lexer=x,w.prototype=_,_.Parser=w,new w}();e.parser=r,e.Parser=r.Parser,e.parse=function(){return r.parse.apply(r,arguments)},e.main=function(t){t[1]||(console.log("Usage: "+t[0]+" FILE"),process.exit(1));var r=n(6878).readFileSync(n(6470).normalize(t[1]),"utf8");return e.parser.parse(r)},n.c[n.s]===t&&e.main(process.argv.slice(1))},2553:(t,e,n)=>{t=n.nmd(t);var r=function(){var t=function(t,e,n,r){for(n=n||{},r=t.length;r--;n[t[r]]=e);return n},e=[1,4],n=[1,7],r=[1,5],i=[1,9],a=[1,6],o=[2,6],s=[1,16],c=[6,8,14,19,21,23,24,26,28,31,34,47,51],u=[8,14,19,21,23,24,26,28,31,34],l=[8,13,14,19,21,23,24,26,28,31,34],h=[1,26],f=[6,8,14,47,51],d=[8,14,51],p=[1,61],g=[1,62],y=[1,63],m=[8,14,32,38,39,51],v={trace:function(){},yy:{},symbols_:{error:2,start:3,eol:4,directive:5,GG:6,document:7,EOF:8,":":9,DIR:10,options:11,body:12,OPT:13,NL:14,line:15,statement:16,commitStatement:17,mergeStatement:18,acc_title:19,acc_title_value:20,acc_descr:21,acc_descr_value:22,acc_descr_multiline_value:23,section:24,branchStatement:25,CHECKOUT:26,ID:27,BRANCH:28,ORDER:29,NUM:30,MERGE:31,COMMIT_TAG:32,STR:33,COMMIT:34,commit_arg:35,COMMIT_TYPE:36,commitType:37,COMMIT_ID:38,COMMIT_MSG:39,NORMAL:40,REVERSE:41,HIGHLIGHT:42,openDirective:43,typeDirective:44,closeDirective:45,argDirective:46,open_directive:47,type_directive:48,arg_directive:49,close_directive:50,";":51,$accept:0,$end:1},terminals_:{2:"error",6:"GG",8:"EOF",9:":",10:"DIR",13:"OPT",14:"NL",19:"acc_title",20:"acc_title_value",21:"acc_descr",22:"acc_descr_value",23:"acc_descr_multiline_value",24:"section",26:"CHECKOUT",27:"ID",28:"BRANCH",29:"ORDER",30:"NUM",31:"MERGE",32:"COMMIT_TAG",33:"STR",34:"COMMIT",36:"COMMIT_TYPE",38:"COMMIT_ID",39:"COMMIT_MSG",40:"NORMAL",41:"REVERSE",42:"HIGHLIGHT",47:"open_directive",48:"type_directive",49:"arg_directive",50:"close_directive",51:";"},productions_:[0,[3,2],[3,2],[3,3],[3,4],[3,5],[7,0],[7,2],[11,2],[11,1],[12,0],[12,2],[15,2],[15,1],[16,1],[16,1],[16,2],[16,2],[16,1],[16,1],[16,1],[16,2],[25,2],[25,4],[18,2],[18,4],[17,2],[17,3],[17,3],[17,5],[17,5],[17,3],[17,5],[17,5],[17,5],[17,5],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,3],[17,5],[17,5],[17,5],[17,5],[17,5],[17,5],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[35,0],[35,1],[37,1],[37,1],[37,1],[5,3],[5,5],[43,1],[44,1],[46,1],[45,1],[4,1],[4,1],[4,1]],performAction:function(t,e,n,r,i,a,o){var s=a.length-1;switch(i){case 3:return a[s];case 4:return a[s-1];case 5:return r.setDirection(a[s-3]),a[s-1];case 7:r.setOptions(a[s-1]),this.$=a[s];break;case 8:a[s-1]+=a[s],this.$=a[s-1];break;case 10:this.$=[];break;case 11:a[s-1].push(a[s]),this.$=a[s-1];break;case 12:this.$=a[s-1];break;case 16:this.$=a[s].trim(),r.setTitle(this.$);break;case 17:case 18:this.$=a[s].trim(),r.setAccDescription(this.$);break;case 19:r.addSection(a[s].substr(8)),this.$=a[s].substr(8);break;case 21:r.checkout(a[s]);break;case 22:r.branch(a[s]);break;case 23:r.branch(a[s-2],a[s]);break;case 24:r.merge(a[s]);break;case 25:r.merge(a[s-2],a[s]);break;case 26:r.commit(a[s]);break;case 27:r.commit("","",r.commitType.NORMAL,a[s]);break;case 28:r.commit("","",a[s],"");break;case 29:r.commit("","",a[s],a[s-2]);break;case 30:r.commit("","",a[s-2],a[s]);break;case 31:r.commit("",a[s],r.commitType.NORMAL,"");break;case 32:r.commit("",a[s-2],r.commitType.NORMAL,a[s]);break;case 33:r.commit("",a[s],r.commitType.NORMAL,a[s-2]);break;case 34:r.commit("",a[s-2],a[s],"");break;case 35:r.commit("",a[s],a[s-2],"");break;case 36:r.commit("",a[s-4],a[s-2],a[s]);break;case 37:r.commit("",a[s-4],a[s],a[s-2]);break;case 38:r.commit("",a[s-2],a[s-4],a[s]);break;case 39:r.commit("",a[s],a[s-4],a[s-2]);break;case 40:r.commit("",a[s],a[s-2],a[s-4]);break;case 41:r.commit("",a[s-2],a[s],a[s-4]);break;case 42:r.commit(a[s],"",r.commitType.NORMAL,"");break;case 43:r.commit(a[s],"",r.commitType.NORMAL,a[s-2]);break;case 44:r.commit(a[s-2],"",r.commitType.NORMAL,a[s]);break;case 45:r.commit(a[s-2],"",a[s],"");break;case 46:r.commit(a[s],"",a[s-2],"");break;case 47:r.commit(a[s],a[s-2],r.commitType.NORMAL,"");break;case 48:r.commit(a[s-2],a[s],r.commitType.NORMAL,"");break;case 49:r.commit(a[s-4],"",a[s-2],a[s]);break;case 50:r.commit(a[s-4],"",a[s],a[s-2]);break;case 51:r.commit(a[s-2],"",a[s-4],a[s]);break;case 52:r.commit(a[s],"",a[s-4],a[s-2]);break;case 53:r.commit(a[s],"",a[s-2],a[s-4]);break;case 54:r.commit(a[s-2],"",a[s],a[s-4]);break;case 55:r.commit(a[s-4],a[s],a[s-2],"");break;case 56:r.commit(a[s-4],a[s-2],a[s],"");break;case 57:r.commit(a[s-2],a[s],a[s-4],"");break;case 58:r.commit(a[s],a[s-2],a[s-4],"");break;case 59:r.commit(a[s],a[s-4],a[s-2],"");break;case 60:r.commit(a[s-2],a[s-4],a[s],"");break;case 61:r.commit(a[s-4],a[s],r.commitType.NORMAL,a[s-2]);break;case 62:r.commit(a[s-4],a[s-2],r.commitType.NORMAL,a[s]);break;case 63:r.commit(a[s-2],a[s],r.commitType.NORMAL,a[s-4]);break;case 64:r.commit(a[s],a[s-2],r.commitType.NORMAL,a[s-4]);break;case 65:r.commit(a[s],a[s-4],r.commitType.NORMAL,a[s-2]);break;case 66:r.commit(a[s-2],a[s-4],r.commitType.NORMAL,a[s]);break;case 67:r.commit(a[s-6],a[s-4],a[s-2],a[s]);break;case 68:r.commit(a[s-6],a[s-4],a[s],a[s-2]);break;case 69:r.commit(a[s-6],a[s-2],a[s-4],a[s]);break;case 70:r.commit(a[s-6],a[s],a[s-4],a[s-2]);break;case 71:r.commit(a[s-6],a[s-2],a[s],a[s-4]);break;case 72:r.commit(a[s-6],a[s],a[s-2],a[s-4]);break;case 73:r.commit(a[s-4],a[s-6],a[s-2],a[s]);break;case 74:r.commit(a[s-4],a[s-6],a[s],a[s-2]);break;case 75:r.commit(a[s-2],a[s-6],a[s-4],a[s]);break;case 76:r.commit(a[s],a[s-6],a[s-4],a[s-2]);break;case 77:r.commit(a[s-2],a[s-6],a[s],a[s-4]);break;case 78:r.commit(a[s],a[s-6],a[s-2],a[s-4]);break;case 79:r.commit(a[s],a[s-4],a[s-2],a[s-6]);break;case 80:r.commit(a[s-2],a[s-4],a[s],a[s-6]);break;case 81:r.commit(a[s],a[s-2],a[s-4],a[s-6]);break;case 82:r.commit(a[s-2],a[s],a[s-4],a[s-6]);break;case 83:r.commit(a[s-4],a[s-2],a[s],a[s-6]);break;case 84:r.commit(a[s-4],a[s],a[s-2],a[s-6]);break;case 85:r.commit(a[s-2],a[s-4],a[s-6],a[s]);break;case 86:r.commit(a[s],a[s-4],a[s-6],a[s-2]);break;case 87:r.commit(a[s-2],a[s],a[s-6],a[s-4]);break;case 88:r.commit(a[s],a[s-2],a[s-6],a[s-4]);break;case 89:r.commit(a[s-4],a[s-2],a[s-6],a[s]);break;case 90:r.commit(a[s-4],a[s],a[s-6],a[s-2]);break;case 91:this.$="";break;case 92:this.$=a[s];break;case 93:this.$=r.commitType.NORMAL;break;case 94:this.$=r.commitType.REVERSE;break;case 95:this.$=r.commitType.HIGHLIGHT;break;case 98:r.parseDirective("%%{","open_directive");break;case 99:r.parseDirective(a[s],"type_directive");break;case 100:a[s]=a[s].trim().replace(/'/g,'"'),r.parseDirective(a[s],"arg_directive");break;case 101:r.parseDirective("}%%","close_directive","gitGraph")}},table:[{3:1,4:2,5:3,6:e,8:n,14:r,43:8,47:i,51:a},{1:[3]},{3:10,4:2,5:3,6:e,8:n,14:r,43:8,47:i,51:a},{3:11,4:2,5:3,6:e,8:n,14:r,43:8,47:i,51:a},{7:12,8:o,9:[1,13],10:[1,14],11:15,14:s},t(c,[2,102]),t(c,[2,103]),t(c,[2,104]),{44:17,48:[1,18]},{48:[2,98]},{1:[2,1]},{1:[2,2]},{8:[1,19]},{7:20,8:o,11:15,14:s},{9:[1,21]},t(u,[2,10],{12:22,13:[1,23]}),t(l,[2,9]),{9:[1,25],45:24,50:h},t([9,50],[2,99]),{1:[2,3]},{8:[1,27]},{7:28,8:o,11:15,14:s},{8:[2,7],14:[1,31],15:29,16:30,17:32,18:33,19:[1,34],21:[1,35],23:[1,36],24:[1,37],25:38,26:[1,39],28:[1,42],31:[1,41],34:[1,40]},t(l,[2,8]),t(f,[2,96]),{46:43,49:[1,44]},t(f,[2,101]),{1:[2,4]},{8:[1,45]},t(u,[2,11]),{4:46,8:n,14:r,51:a},t(u,[2,13]),t(d,[2,14]),t(d,[2,15]),{20:[1,47]},{22:[1,48]},t(d,[2,18]),t(d,[2,19]),t(d,[2,20]),{27:[1,49]},t(d,[2,91],{35:50,32:[1,51],33:[1,55],36:[1,52],38:[1,53],39:[1,54]}),{27:[1,56]},{27:[1,57]},{45:58,50:h},{50:[2,100]},{1:[2,5]},t(u,[2,12]),t(d,[2,16]),t(d,[2,17]),t(d,[2,21]),t(d,[2,26]),{33:[1,59]},{37:60,40:p,41:g,42:y},{33:[1,64]},{33:[1,65]},t(d,[2,92]),t(d,[2,24],{32:[1,66]}),t(d,[2,22],{29:[1,67]}),t(f,[2,97]),t(d,[2,27],{36:[1,68],38:[1,69],39:[1,70]}),t(d,[2,28],{32:[1,71],38:[1,72],39:[1,73]}),t(m,[2,93]),t(m,[2,94]),t(m,[2,95]),t(d,[2,31],{32:[1,74],36:[1,75],39:[1,76]}),t(d,[2,42],{32:[1,77],36:[1,78],38:[1,79]}),{33:[1,80]},{30:[1,81]},{37:82,40:p,41:g,42:y},{33:[1,83]},{33:[1,84]},{33:[1,85]},{33:[1,86]},{33:[1,87]},{33:[1,88]},{37:89,40:p,41:g,42:y},{33:[1,90]},{33:[1,91]},{37:92,40:p,41:g,42:y},{33:[1,93]},t(d,[2,25]),t(d,[2,23]),t(d,[2,29],{38:[1,94],39:[1,95]}),t(d,[2,33],{36:[1,96],39:[1,97]}),t(d,[2,43],{36:[1,98],38:[1,99]}),t(d,[2,30],{38:[1,100],39:[1,101]}),t(d,[2,35],{32:[1,102],39:[1,103]}),t(d,[2,46],{32:[1,104],38:[1,105]}),t(d,[2,32],{36:[1,106],39:[1,107]}),t(d,[2,34],{32:[1,108],39:[1,109]}),t(d,[2,47],{32:[1,111],36:[1,110]}),t(d,[2,44],{36:[1,112],38:[1,113]}),t(d,[2,45],{32:[1,114],38:[1,115]}),t(d,[2,48],{32:[1,117],36:[1,116]}),{33:[1,118]},{33:[1,119]},{37:120,40:p,41:g,42:y},{33:[1,121]},{37:122,40:p,41:g,42:y},{33:[1,123]},{33:[1,124]},{33:[1,125]},{33:[1,126]},{33:[1,127]},{33:[1,128]},{33:[1,129]},{37:130,40:p,41:g,42:y},{33:[1,131]},{33:[1,132]},{33:[1,133]},{37:134,40:p,41:g,42:y},{33:[1,135]},{37:136,40:p,41:g,42:y},{33:[1,137]},{33:[1,138]},{33:[1,139]},{37:140,40:p,41:g,42:y},{33:[1,141]},t(d,[2,40],{39:[1,142]}),t(d,[2,53],{38:[1,143]}),t(d,[2,41],{39:[1,144]}),t(d,[2,64],{36:[1,145]}),t(d,[2,54],{38:[1,146]}),t(d,[2,63],{36:[1,147]}),t(d,[2,39],{39:[1,148]}),t(d,[2,52],{38:[1,149]}),t(d,[2,38],{39:[1,150]}),t(d,[2,58],{32:[1,151]}),t(d,[2,51],{38:[1,152]}),t(d,[2,57],{32:[1,153]}),t(d,[2,37],{39:[1,154]}),t(d,[2,65],{36:[1,155]}),t(d,[2,36],{39:[1,156]}),t(d,[2,59],{32:[1,157]}),t(d,[2,60],{32:[1,158]}),t(d,[2,66],{36:[1,159]}),t(d,[2,50],{38:[1,160]}),t(d,[2,61],{36:[1,161]}),t(d,[2,49],{38:[1,162]}),t(d,[2,55],{32:[1,163]}),t(d,[2,56],{32:[1,164]}),t(d,[2,62],{36:[1,165]}),{33:[1,166]},{33:[1,167]},{33:[1,168]},{37:169,40:p,41:g,42:y},{33:[1,170]},{37:171,40:p,41:g,42:y},{33:[1,172]},{33:[1,173]},{33:[1,174]},{33:[1,175]},{33:[1,176]},{33:[1,177]},{33:[1,178]},{37:179,40:p,41:g,42:y},{33:[1,180]},{33:[1,181]},{33:[1,182]},{37:183,40:p,41:g,42:y},{33:[1,184]},{37:185,40:p,41:g,42:y},{33:[1,186]},{33:[1,187]},{33:[1,188]},{37:189,40:p,41:g,42:y},t(d,[2,81]),t(d,[2,82]),t(d,[2,79]),t(d,[2,80]),t(d,[2,84]),t(d,[2,83]),t(d,[2,88]),t(d,[2,87]),t(d,[2,86]),t(d,[2,85]),t(d,[2,90]),t(d,[2,89]),t(d,[2,78]),t(d,[2,77]),t(d,[2,76]),t(d,[2,75]),t(d,[2,73]),t(d,[2,74]),t(d,[2,72]),t(d,[2,71]),t(d,[2,70]),t(d,[2,69]),t(d,[2,67]),t(d,[2,68])],defaultActions:{9:[2,98],10:[2,1],11:[2,2],19:[2,3],27:[2,4],44:[2,100],45:[2,5]},parseError:function(t,e){if(!e.recoverable){var n=new Error(t);throw n.hash=e,n}this.trace(t)},parse:function(t){var e=this,n=[0],r=[],i=[null],a=[],o=this.table,s="",c=0,u=0,l=0,h=2,f=1,d=a.slice.call(arguments,1),p=Object.create(this.lexer),g={yy:{}};for(var y in this.yy)Object.prototype.hasOwnProperty.call(this.yy,y)&&(g.yy[y]=this.yy[y]);p.setInput(t,g.yy),g.yy.lexer=p,g.yy.parser=this,void 0===p.yylloc&&(p.yylloc={});var m=p.yylloc;a.push(m);var v=p.options&&p.options.ranges;function b(){var t;return"number"!=typeof(t=r.pop()||p.lex()||f)&&(t instanceof Array&&(t=(r=t).pop()),t=e.symbols_[t]||t),t}"function"==typeof g.yy.parseError?this.parseError=g.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;for(var _,x,w,k,T,C,E,S,A,M={};;){if(w=n[n.length-1],this.defaultActions[w]?k=this.defaultActions[w]:(null==_&&(_=b()),k=o[w]&&o[w][_]),void 0===k||!k.length||!k[0]){var N="";for(C in A=[],o[w])this.terminals_[C]&&C>h&&A.push("'"+this.terminals_[C]+"'");N=p.showPosition?"Parse error on line "+(c+1)+":\n"+p.showPosition()+"\nExpecting "+A.join(", ")+", got '"+(this.terminals_[_]||_)+"'":"Parse error on line "+(c+1)+": Unexpected "+(_==f?"end of input":"'"+(this.terminals_[_]||_)+"'"),this.parseError(N,{text:p.match,token:this.terminals_[_]||_,line:p.yylineno,loc:m,expected:A})}if(k[0]instanceof Array&&k.length>1)throw new Error("Parse Error: multiple actions possible at state: "+w+", token: "+_);switch(k[0]){case 1:n.push(_),i.push(p.yytext),a.push(p.yylloc),n.push(k[1]),_=null,x?(_=x,x=null):(u=p.yyleng,s=p.yytext,c=p.yylineno,m=p.yylloc,l>0&&l--);break;case 2:if(E=this.productions_[k[1]][1],M.$=i[i.length-E],M._$={first_line:a[a.length-(E||1)].first_line,last_line:a[a.length-1].last_line,first_column:a[a.length-(E||1)].first_column,last_column:a[a.length-1].last_column},v&&(M._$.range=[a[a.length-(E||1)].range[0],a[a.length-1].range[1]]),void 0!==(T=this.performAction.apply(M,[s,u,c,g.yy,k[1],i,a].concat(d))))return T;E&&(n=n.slice(0,-1*E*2),i=i.slice(0,-1*E),a=a.slice(0,-1*E)),n.push(this.productions_[k[1]][0]),i.push(M.$),a.push(M._$),S=o[n[n.length-2]][n[n.length-1]],n.push(S);break;case 3:return!0}}return!0}},b={EOF:1,parseError:function(t,e){if(!this.yy.parser)throw new Error(t);this.yy.parser.parseError(t,e)},setInput:function(t,e){return this.yy=e||this.yy||{},this._input=t,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},input:function(){var t=this._input[0];return this.yytext+=t,this.yyleng++,this.offset++,this.match+=t,this.matched+=t,t.match(/(?:\r\n?|\n).*/g)?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),t},unput:function(t){var e=t.length,n=t.split(/(?:\r\n?|\n)/g);this._input=t+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-e),this.offset-=e;var r=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),n.length-1&&(this.yylineno-=n.length-1);var i=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:n?(n.length===r.length?this.yylloc.first_column:0)+r[r.length-n.length].length-n[0].length:this.yylloc.first_column-e},this.options.ranges&&(this.yylloc.range=[i[0],i[0]+this.yyleng-e]),this.yyleng=this.yytext.length,this},more:function(){return this._more=!0,this},reject:function(){return this.options.backtrack_lexer?(this._backtrack=!0,this):this.parseError("Lexical error on line "+(this.yylineno+1)+". You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n"+this.showPosition(),{text:"",token:null,line:this.yylineno})},less:function(t){this.unput(this.match.slice(t))},pastInput:function(){var t=this.matched.substr(0,this.matched.length-this.match.length);return(t.length>20?"...":"")+t.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var t=this.match;return t.length<20&&(t+=this._input.substr(0,20-t.length)),(t.substr(0,20)+(t.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var t=this.pastInput(),e=new Array(t.length+1).join("-");return t+this.upcomingInput()+"\n"+e+"^"},test_match:function(t,e){var n,r,i;if(this.options.backtrack_lexer&&(i={yylineno:this.yylineno,yylloc:{first_line:this.yylloc.first_line,last_line:this.last_line,first_column:this.yylloc.first_column,last_column:this.yylloc.last_column},yytext:this.yytext,match:this.match,matches:this.matches,matched:this.matched,yyleng:this.yyleng,offset:this.offset,_more:this._more,_input:this._input,yy:this.yy,conditionStack:this.conditionStack.slice(0),done:this.done},this.options.ranges&&(i.yylloc.range=this.yylloc.range.slice(0))),(r=t[0].match(/(?:\r\n?|\n).*/g))&&(this.yylineno+=r.length),this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:r?r[r.length-1].length-r[r.length-1].match(/\r?\n?/)[0].length:this.yylloc.last_column+t[0].length},this.yytext+=t[0],this.match+=t[0],this.matches=t,this.yyleng=this.yytext.length,this.options.ranges&&(this.yylloc.range=[this.offset,this.offset+=this.yyleng]),this._more=!1,this._backtrack=!1,this._input=this._input.slice(t[0].length),this.matched+=t[0],n=this.performAction.call(this,this.yy,this,e,this.conditionStack[this.conditionStack.length-1]),this.done&&this._input&&(this.done=!1),n)return n;if(this._backtrack){for(var a in i)this[a]=i[a];return!1}return!1},next:function(){if(this.done)return this.EOF;var t,e,n,r;this._input||(this.done=!0),this._more||(this.yytext="",this.match="");for(var i=this._currentRules(),a=0;ae[0].length)){if(e=n,r=a,this.options.backtrack_lexer){if(!1!==(t=this.test_match(n,i[a])))return t;if(this._backtrack){e=!1;continue}return!1}if(!this.options.flex)break}return e?!1!==(t=this.test_match(e,i[r]))&&t:""===this._input?this.EOF:this.parseError("Lexical error on line "+(this.yylineno+1)+". Unrecognized text.\n"+this.showPosition(),{text:"",token:null,line:this.yylineno})},lex:function(){return this.next()||this.lex()},begin:function(t){this.conditionStack.push(t)},popState:function(){return this.conditionStack.length-1>0?this.conditionStack.pop():this.conditionStack[0]},_currentRules:function(){return this.conditionStack.length&&this.conditionStack[this.conditionStack.length-1]?this.conditions[this.conditionStack[this.conditionStack.length-1]].rules:this.conditions.INITIAL.rules},topState:function(t){return(t=this.conditionStack.length-1-Math.abs(t||0))>=0?this.conditionStack[t]:"INITIAL"},pushState:function(t){this.begin(t)},stateStackSize:function(){return this.conditionStack.length},options:{"case-insensitive":!0},performAction:function(t,e,n,r){switch(n){case 0:return this.begin("open_directive"),47;case 1:return this.begin("type_directive"),48;case 2:return this.popState(),this.begin("arg_directive"),9;case 3:return this.popState(),this.popState(),50;case 4:return 49;case 5:return this.begin("acc_title"),19;case 6:return this.popState(),"acc_title_value";case 7:return this.begin("acc_descr"),21;case 8:return this.popState(),"acc_descr_value";case 9:this.begin("acc_descr_multiline");break;case 10:case 34:case 37:this.popState();break;case 11:return"acc_descr_multiline_value";case 12:return 14;case 13:case 14:case 15:break;case 16:return 6;case 17:return 34;case 18:return 38;case 19:return 36;case 20:return 39;case 21:return 40;case 22:return 41;case 23:return 42;case 24:return 32;case 25:return 28;case 26:return 29;case 27:return 31;case 28:return 26;case 29:case 30:return 10;case 31:return 9;case 32:return"CARET";case 33:this.begin("options");break;case 35:return 13;case 36:this.begin("string");break;case 38:return 33;case 39:return 30;case 40:return 27;case 41:return 8}},rules:[/^(?:%%\{)/i,/^(?:((?:(?!\}%%)[^:.])*))/i,/^(?::)/i,/^(?:\}%%)/i,/^(?:((?:(?!\}%%).|\n)*))/i,/^(?:accTitle\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*\{\s*)/i,/^(?:[\}])/i,/^(?:[^\}]*)/i,/^(?:(\r?\n)+)/i,/^(?:\s+)/i,/^(?:#[^\n]*)/i,/^(?:%[^\n]*)/i,/^(?:gitGraph\b)/i,/^(?:commit\b)/i,/^(?:id:)/i,/^(?:type:)/i,/^(?:msg:)/i,/^(?:NORMAL\b)/i,/^(?:REVERSE\b)/i,/^(?:HIGHLIGHT\b)/i,/^(?:tag:)/i,/^(?:branch\b)/i,/^(?:order:)/i,/^(?:merge\b)/i,/^(?:checkout\b)/i,/^(?:LR\b)/i,/^(?:BT\b)/i,/^(?::)/i,/^(?:\^)/i,/^(?:options\r?\n)/i,/^(?:[ \r\n\t]+end\b)/i,/^(?:[\s\S]+(?=[ \r\n\t]+end))/i,/^(?:["])/i,/^(?:["])/i,/^(?:[^"]*)/i,/^(?:[0-9]+)/i,/^(?:[a-zA-Z][-_\./a-zA-Z0-9]*[-_a-zA-Z0-9])/i,/^(?:$)/i],conditions:{acc_descr_multiline:{rules:[10,11],inclusive:!1},acc_descr:{rules:[8],inclusive:!1},acc_title:{rules:[6],inclusive:!1},close_directive:{rules:[],inclusive:!1},arg_directive:{rules:[3,4],inclusive:!1},type_directive:{rules:[2,3],inclusive:!1},open_directive:{rules:[1],inclusive:!1},options:{rules:[34,35],inclusive:!1},string:{rules:[37,38],inclusive:!1},INITIAL:{rules:[0,5,7,9,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,36,39,40,41],inclusive:!0}}};function _(){this.yy={}}return v.lexer=b,_.prototype=v,v.Parser=_,new _}();e.parser=r,e.Parser=r.Parser,e.parse=function(){return r.parse.apply(r,arguments)},e.main=function(t){t[1]||(console.log("Usage: "+t[0]+" FILE"),process.exit(1));var r=n(8183).readFileSync(n(6470).normalize(t[1]),"utf8");return e.parser.parse(r)},n.c[n.s]===t&&e.main(process.argv.slice(1))},6765:(t,e,n)=>{t=n.nmd(t);var r=function(){var t=function(t,e,n,r){for(n=n||{},r=t.length;r--;n[t[r]]=e);return n},e=[6,9,10],n={trace:function(){},yy:{},symbols_:{error:2,start:3,info:4,document:5,EOF:6,line:7,statement:8,NL:9,showInfo:10,$accept:0,$end:1},terminals_:{2:"error",4:"info",6:"EOF",9:"NL",10:"showInfo"},productions_:[0,[3,3],[5,0],[5,2],[7,1],[7,1],[8,1]],performAction:function(t,e,n,r,i,a,o){switch(a.length,i){case 1:return r;case 4:break;case 6:r.setInfo(!0)}},table:[{3:1,4:[1,2]},{1:[3]},t(e,[2,2],{5:3}),{6:[1,4],7:5,8:6,9:[1,7],10:[1,8]},{1:[2,1]},t(e,[2,3]),t(e,[2,4]),t(e,[2,5]),t(e,[2,6])],defaultActions:{4:[2,1]},parseError:function(t,e){if(!e.recoverable){var n=new Error(t);throw n.hash=e,n}this.trace(t)},parse:function(t){var e=this,n=[0],r=[],i=[null],a=[],o=this.table,s="",c=0,u=0,l=0,h=2,f=1,d=a.slice.call(arguments,1),p=Object.create(this.lexer),g={yy:{}};for(var y in this.yy)Object.prototype.hasOwnProperty.call(this.yy,y)&&(g.yy[y]=this.yy[y]);p.setInput(t,g.yy),g.yy.lexer=p,g.yy.parser=this,void 0===p.yylloc&&(p.yylloc={});var m=p.yylloc;a.push(m);var v=p.options&&p.options.ranges;function b(){var t;return"number"!=typeof(t=r.pop()||p.lex()||f)&&(t instanceof Array&&(t=(r=t).pop()),t=e.symbols_[t]||t),t}"function"==typeof g.yy.parseError?this.parseError=g.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;for(var _,x,w,k,T,C,E,S,A,M={};;){if(w=n[n.length-1],this.defaultActions[w]?k=this.defaultActions[w]:(null==_&&(_=b()),k=o[w]&&o[w][_]),void 0===k||!k.length||!k[0]){var N="";for(C in A=[],o[w])this.terminals_[C]&&C>h&&A.push("'"+this.terminals_[C]+"'");N=p.showPosition?"Parse error on line "+(c+1)+":\n"+p.showPosition()+"\nExpecting "+A.join(", ")+", got '"+(this.terminals_[_]||_)+"'":"Parse error on line "+(c+1)+": Unexpected "+(_==f?"end of input":"'"+(this.terminals_[_]||_)+"'"),this.parseError(N,{text:p.match,token:this.terminals_[_]||_,line:p.yylineno,loc:m,expected:A})}if(k[0]instanceof Array&&k.length>1)throw new Error("Parse Error: multiple actions possible at state: "+w+", token: "+_);switch(k[0]){case 1:n.push(_),i.push(p.yytext),a.push(p.yylloc),n.push(k[1]),_=null,x?(_=x,x=null):(u=p.yyleng,s=p.yytext,c=p.yylineno,m=p.yylloc,l>0&&l--);break;case 2:if(E=this.productions_[k[1]][1],M.$=i[i.length-E],M._$={first_line:a[a.length-(E||1)].first_line,last_line:a[a.length-1].last_line,first_column:a[a.length-(E||1)].first_column,last_column:a[a.length-1].last_column},v&&(M._$.range=[a[a.length-(E||1)].range[0],a[a.length-1].range[1]]),void 0!==(T=this.performAction.apply(M,[s,u,c,g.yy,k[1],i,a].concat(d))))return T;E&&(n=n.slice(0,-1*E*2),i=i.slice(0,-1*E),a=a.slice(0,-1*E)),n.push(this.productions_[k[1]][0]),i.push(M.$),a.push(M._$),S=o[n[n.length-2]][n[n.length-1]],n.push(S);break;case 3:return!0}}return!0}},r={EOF:1,parseError:function(t,e){if(!this.yy.parser)throw new Error(t);this.yy.parser.parseError(t,e)},setInput:function(t,e){return this.yy=e||this.yy||{},this._input=t,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},input:function(){var t=this._input[0];return this.yytext+=t,this.yyleng++,this.offset++,this.match+=t,this.matched+=t,t.match(/(?:\r\n?|\n).*/g)?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),t},unput:function(t){var e=t.length,n=t.split(/(?:\r\n?|\n)/g);this._input=t+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-e),this.offset-=e;var r=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),n.length-1&&(this.yylineno-=n.length-1);var i=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:n?(n.length===r.length?this.yylloc.first_column:0)+r[r.length-n.length].length-n[0].length:this.yylloc.first_column-e},this.options.ranges&&(this.yylloc.range=[i[0],i[0]+this.yyleng-e]),this.yyleng=this.yytext.length,this},more:function(){return this._more=!0,this},reject:function(){return this.options.backtrack_lexer?(this._backtrack=!0,this):this.parseError("Lexical error on line "+(this.yylineno+1)+". You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n"+this.showPosition(),{text:"",token:null,line:this.yylineno})},less:function(t){this.unput(this.match.slice(t))},pastInput:function(){var t=this.matched.substr(0,this.matched.length-this.match.length);return(t.length>20?"...":"")+t.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var t=this.match;return t.length<20&&(t+=this._input.substr(0,20-t.length)),(t.substr(0,20)+(t.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var t=this.pastInput(),e=new Array(t.length+1).join("-");return t+this.upcomingInput()+"\n"+e+"^"},test_match:function(t,e){var n,r,i;if(this.options.backtrack_lexer&&(i={yylineno:this.yylineno,yylloc:{first_line:this.yylloc.first_line,last_line:this.last_line,first_column:this.yylloc.first_column,last_column:this.yylloc.last_column},yytext:this.yytext,match:this.match,matches:this.matches,matched:this.matched,yyleng:this.yyleng,offset:this.offset,_more:this._more,_input:this._input,yy:this.yy,conditionStack:this.conditionStack.slice(0),done:this.done},this.options.ranges&&(i.yylloc.range=this.yylloc.range.slice(0))),(r=t[0].match(/(?:\r\n?|\n).*/g))&&(this.yylineno+=r.length),this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:r?r[r.length-1].length-r[r.length-1].match(/\r?\n?/)[0].length:this.yylloc.last_column+t[0].length},this.yytext+=t[0],this.match+=t[0],this.matches=t,this.yyleng=this.yytext.length,this.options.ranges&&(this.yylloc.range=[this.offset,this.offset+=this.yyleng]),this._more=!1,this._backtrack=!1,this._input=this._input.slice(t[0].length),this.matched+=t[0],n=this.performAction.call(this,this.yy,this,e,this.conditionStack[this.conditionStack.length-1]),this.done&&this._input&&(this.done=!1),n)return n;if(this._backtrack){for(var a in i)this[a]=i[a];return!1}return!1},next:function(){if(this.done)return this.EOF;var t,e,n,r;this._input||(this.done=!0),this._more||(this.yytext="",this.match="");for(var i=this._currentRules(),a=0;ae[0].length)){if(e=n,r=a,this.options.backtrack_lexer){if(!1!==(t=this.test_match(n,i[a])))return t;if(this._backtrack){e=!1;continue}return!1}if(!this.options.flex)break}return e?!1!==(t=this.test_match(e,i[r]))&&t:""===this._input?this.EOF:this.parseError("Lexical error on line "+(this.yylineno+1)+". Unrecognized text.\n"+this.showPosition(),{text:"",token:null,line:this.yylineno})},lex:function(){return this.next()||this.lex()},begin:function(t){this.conditionStack.push(t)},popState:function(){return this.conditionStack.length-1>0?this.conditionStack.pop():this.conditionStack[0]},_currentRules:function(){return this.conditionStack.length&&this.conditionStack[this.conditionStack.length-1]?this.conditions[this.conditionStack[this.conditionStack.length-1]].rules:this.conditions.INITIAL.rules},topState:function(t){return(t=this.conditionStack.length-1-Math.abs(t||0))>=0?this.conditionStack[t]:"INITIAL"},pushState:function(t){this.begin(t)},stateStackSize:function(){return this.conditionStack.length},options:{"case-insensitive":!0},performAction:function(t,e,n,r){switch(n){case 0:return 4;case 1:return 9;case 2:return"space";case 3:return 10;case 4:return 6;case 5:return"TXT"}},rules:[/^(?:info\b)/i,/^(?:[\s\n\r]+)/i,/^(?:[\s]+)/i,/^(?:showInfo\b)/i,/^(?:$)/i,/^(?:.)/i],conditions:{INITIAL:{rules:[0,1,2,3,4,5],inclusive:!0}}};function i(){this.yy={}}return n.lexer=r,i.prototype=n,n.Parser=i,new i}();e.parser=r,e.Parser=r.Parser,e.parse=function(){return r.parse.apply(r,arguments)},e.main=function(t){t[1]||(console.log("Usage: "+t[0]+" FILE"),process.exit(1));var r=n(1428).readFileSync(n(6470).normalize(t[1]),"utf8");return e.parser.parse(r)},n.c[n.s]===t&&e.main(process.argv.slice(1))},7062:(t,e,n)=>{t=n.nmd(t);var r=function(){var t=function(t,e,n,r){for(n=n||{},r=t.length;r--;n[t[r]]=e);return n},e=[1,4],n=[1,5],r=[1,6],i=[1,7],a=[1,9],o=[1,11,13,15,17,19,20,26,27,28,29],s=[2,5],c=[1,6,11,13,15,17,19,20,26,27,28,29],u=[26,27,28],l=[2,8],h=[1,18],f=[1,19],d=[1,20],p=[1,21],g=[1,22],y=[1,23],m=[1,28],v=[6,26,27,28,29],b={trace:function(){},yy:{},symbols_:{error:2,start:3,eol:4,directive:5,PIE:6,document:7,showData:8,line:9,statement:10,txt:11,value:12,title:13,title_value:14,acc_title:15,acc_title_value:16,acc_descr:17,acc_descr_value:18,acc_descr_multiline_value:19,section:20,openDirective:21,typeDirective:22,closeDirective:23,":":24,argDirective:25,NEWLINE:26,";":27,EOF:28,open_directive:29,type_directive:30,arg_directive:31,close_directive:32,$accept:0,$end:1},terminals_:{2:"error",6:"PIE",8:"showData",11:"txt",12:"value",13:"title",14:"title_value",15:"acc_title",16:"acc_title_value",17:"acc_descr",18:"acc_descr_value",19:"acc_descr_multiline_value",20:"section",24:":",26:"NEWLINE",27:";",28:"EOF",29:"open_directive",30:"type_directive",31:"arg_directive",32:"close_directive"},productions_:[0,[3,2],[3,2],[3,2],[3,3],[7,0],[7,2],[9,2],[10,0],[10,2],[10,2],[10,2],[10,2],[10,1],[10,1],[10,1],[5,3],[5,5],[4,1],[4,1],[4,1],[21,1],[22,1],[25,1],[23,1]],performAction:function(t,e,n,r,i,a,o){var s=a.length-1;switch(i){case 4:r.setShowData(!0);break;case 7:this.$=a[s-1];break;case 9:r.addSection(a[s-1],r.cleanupValue(a[s]));break;case 10:this.$=a[s].trim(),r.setPieTitle(this.$);break;case 11:this.$=a[s].trim(),r.setTitle(this.$);break;case 12:case 13:this.$=a[s].trim(),r.setAccDescription(this.$);break;case 14:r.addSection(a[s].substr(8)),this.$=a[s].substr(8);break;case 21:r.parseDirective("%%{","open_directive");break;case 22:r.parseDirective(a[s],"type_directive");break;case 23:a[s]=a[s].trim().replace(/'/g,'"'),r.parseDirective(a[s],"arg_directive");break;case 24:r.parseDirective("}%%","close_directive","pie")}},table:[{3:1,4:2,5:3,6:e,21:8,26:n,27:r,28:i,29:a},{1:[3]},{3:10,4:2,5:3,6:e,21:8,26:n,27:r,28:i,29:a},{3:11,4:2,5:3,6:e,21:8,26:n,27:r,28:i,29:a},t(o,s,{7:12,8:[1,13]}),t(c,[2,18]),t(c,[2,19]),t(c,[2,20]),{22:14,30:[1,15]},{30:[2,21]},{1:[2,1]},{1:[2,2]},t(u,l,{21:8,9:16,10:17,5:24,1:[2,3],11:h,13:f,15:d,17:p,19:g,20:y,29:a}),t(o,s,{7:25}),{23:26,24:[1,27],32:m},t([24,32],[2,22]),t(o,[2,6]),{4:29,26:n,27:r,28:i},{12:[1,30]},{14:[1,31]},{16:[1,32]},{18:[1,33]},t(u,[2,13]),t(u,[2,14]),t(u,[2,15]),t(u,l,{21:8,9:16,10:17,5:24,1:[2,4],11:h,13:f,15:d,17:p,19:g,20:y,29:a}),t(v,[2,16]),{25:34,31:[1,35]},t(v,[2,24]),t(o,[2,7]),t(u,[2,9]),t(u,[2,10]),t(u,[2,11]),t(u,[2,12]),{23:36,32:m},{32:[2,23]},t(v,[2,17])],defaultActions:{9:[2,21],10:[2,1],11:[2,2],35:[2,23]},parseError:function(t,e){if(!e.recoverable){var n=new Error(t);throw n.hash=e,n}this.trace(t)},parse:function(t){var e=this,n=[0],r=[],i=[null],a=[],o=this.table,s="",c=0,u=0,l=0,h=2,f=1,d=a.slice.call(arguments,1),p=Object.create(this.lexer),g={yy:{}};for(var y in this.yy)Object.prototype.hasOwnProperty.call(this.yy,y)&&(g.yy[y]=this.yy[y]);p.setInput(t,g.yy),g.yy.lexer=p,g.yy.parser=this,void 0===p.yylloc&&(p.yylloc={});var m=p.yylloc;a.push(m);var v=p.options&&p.options.ranges;function b(){var t;return"number"!=typeof(t=r.pop()||p.lex()||f)&&(t instanceof Array&&(t=(r=t).pop()),t=e.symbols_[t]||t),t}"function"==typeof g.yy.parseError?this.parseError=g.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;for(var _,x,w,k,T,C,E,S,A,M={};;){if(w=n[n.length-1],this.defaultActions[w]?k=this.defaultActions[w]:(null==_&&(_=b()),k=o[w]&&o[w][_]),void 0===k||!k.length||!k[0]){var N="";for(C in A=[],o[w])this.terminals_[C]&&C>h&&A.push("'"+this.terminals_[C]+"'");N=p.showPosition?"Parse error on line "+(c+1)+":\n"+p.showPosition()+"\nExpecting "+A.join(", ")+", got '"+(this.terminals_[_]||_)+"'":"Parse error on line "+(c+1)+": Unexpected "+(_==f?"end of input":"'"+(this.terminals_[_]||_)+"'"),this.parseError(N,{text:p.match,token:this.terminals_[_]||_,line:p.yylineno,loc:m,expected:A})}if(k[0]instanceof Array&&k.length>1)throw new Error("Parse Error: multiple actions possible at state: "+w+", token: "+_);switch(k[0]){case 1:n.push(_),i.push(p.yytext),a.push(p.yylloc),n.push(k[1]),_=null,x?(_=x,x=null):(u=p.yyleng,s=p.yytext,c=p.yylineno,m=p.yylloc,l>0&&l--);break;case 2:if(E=this.productions_[k[1]][1],M.$=i[i.length-E],M._$={first_line:a[a.length-(E||1)].first_line,last_line:a[a.length-1].last_line,first_column:a[a.length-(E||1)].first_column,last_column:a[a.length-1].last_column},v&&(M._$.range=[a[a.length-(E||1)].range[0],a[a.length-1].range[1]]),void 0!==(T=this.performAction.apply(M,[s,u,c,g.yy,k[1],i,a].concat(d))))return T;E&&(n=n.slice(0,-1*E*2),i=i.slice(0,-1*E),a=a.slice(0,-1*E)),n.push(this.productions_[k[1]][0]),i.push(M.$),a.push(M._$),S=o[n[n.length-2]][n[n.length-1]],n.push(S);break;case 3:return!0}}return!0}},_={EOF:1,parseError:function(t,e){if(!this.yy.parser)throw new Error(t);this.yy.parser.parseError(t,e)},setInput:function(t,e){return this.yy=e||this.yy||{},this._input=t,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},input:function(){var t=this._input[0];return this.yytext+=t,this.yyleng++,this.offset++,this.match+=t,this.matched+=t,t.match(/(?:\r\n?|\n).*/g)?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),t},unput:function(t){var e=t.length,n=t.split(/(?:\r\n?|\n)/g);this._input=t+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-e),this.offset-=e;var r=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),n.length-1&&(this.yylineno-=n.length-1);var i=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:n?(n.length===r.length?this.yylloc.first_column:0)+r[r.length-n.length].length-n[0].length:this.yylloc.first_column-e},this.options.ranges&&(this.yylloc.range=[i[0],i[0]+this.yyleng-e]),this.yyleng=this.yytext.length,this},more:function(){return this._more=!0,this},reject:function(){return this.options.backtrack_lexer?(this._backtrack=!0,this):this.parseError("Lexical error on line "+(this.yylineno+1)+". You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n"+this.showPosition(),{text:"",token:null,line:this.yylineno})},less:function(t){this.unput(this.match.slice(t))},pastInput:function(){var t=this.matched.substr(0,this.matched.length-this.match.length);return(t.length>20?"...":"")+t.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var t=this.match;return t.length<20&&(t+=this._input.substr(0,20-t.length)),(t.substr(0,20)+(t.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var t=this.pastInput(),e=new Array(t.length+1).join("-");return t+this.upcomingInput()+"\n"+e+"^"},test_match:function(t,e){var n,r,i;if(this.options.backtrack_lexer&&(i={yylineno:this.yylineno,yylloc:{first_line:this.yylloc.first_line,last_line:this.last_line,first_column:this.yylloc.first_column,last_column:this.yylloc.last_column},yytext:this.yytext,match:this.match,matches:this.matches,matched:this.matched,yyleng:this.yyleng,offset:this.offset,_more:this._more,_input:this._input,yy:this.yy,conditionStack:this.conditionStack.slice(0),done:this.done},this.options.ranges&&(i.yylloc.range=this.yylloc.range.slice(0))),(r=t[0].match(/(?:\r\n?|\n).*/g))&&(this.yylineno+=r.length),this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:r?r[r.length-1].length-r[r.length-1].match(/\r?\n?/)[0].length:this.yylloc.last_column+t[0].length},this.yytext+=t[0],this.match+=t[0],this.matches=t,this.yyleng=this.yytext.length,this.options.ranges&&(this.yylloc.range=[this.offset,this.offset+=this.yyleng]),this._more=!1,this._backtrack=!1,this._input=this._input.slice(t[0].length),this.matched+=t[0],n=this.performAction.call(this,this.yy,this,e,this.conditionStack[this.conditionStack.length-1]),this.done&&this._input&&(this.done=!1),n)return n;if(this._backtrack){for(var a in i)this[a]=i[a];return!1}return!1},next:function(){if(this.done)return this.EOF;var t,e,n,r;this._input||(this.done=!0),this._more||(this.yytext="",this.match="");for(var i=this._currentRules(),a=0;ae[0].length)){if(e=n,r=a,this.options.backtrack_lexer){if(!1!==(t=this.test_match(n,i[a])))return t;if(this._backtrack){e=!1;continue}return!1}if(!this.options.flex)break}return e?!1!==(t=this.test_match(e,i[r]))&&t:""===this._input?this.EOF:this.parseError("Lexical error on line "+(this.yylineno+1)+". Unrecognized text.\n"+this.showPosition(),{text:"",token:null,line:this.yylineno})},lex:function(){return this.next()||this.lex()},begin:function(t){this.conditionStack.push(t)},popState:function(){return this.conditionStack.length-1>0?this.conditionStack.pop():this.conditionStack[0]},_currentRules:function(){return this.conditionStack.length&&this.conditionStack[this.conditionStack.length-1]?this.conditions[this.conditionStack[this.conditionStack.length-1]].rules:this.conditions.INITIAL.rules},topState:function(t){return(t=this.conditionStack.length-1-Math.abs(t||0))>=0?this.conditionStack[t]:"INITIAL"},pushState:function(t){this.begin(t)},stateStackSize:function(){return this.conditionStack.length},options:{"case-insensitive":!0},performAction:function(t,e,n,r){switch(n){case 0:return this.begin("open_directive"),29;case 1:return this.begin("type_directive"),30;case 2:return this.popState(),this.begin("arg_directive"),24;case 3:return this.popState(),this.popState(),32;case 4:return 31;case 5:case 6:case 8:case 9:break;case 7:return 26;case 10:return this.begin("title"),13;case 11:return this.popState(),"title_value";case 12:return this.begin("acc_title"),15;case 13:return this.popState(),"acc_title_value";case 14:return this.begin("acc_descr"),17;case 15:return this.popState(),"acc_descr_value";case 16:this.begin("acc_descr_multiline");break;case 17:case 20:this.popState();break;case 18:return"acc_descr_multiline_value";case 19:this.begin("string");break;case 21:return"txt";case 22:return 6;case 23:return 8;case 24:return"value";case 25:return 28}},rules:[/^(?:%%\{)/i,/^(?:((?:(?!\}%%)[^:.])*))/i,/^(?::)/i,/^(?:\}%%)/i,/^(?:((?:(?!\}%%).|\n)*))/i,/^(?:%%(?!\{)[^\n]*)/i,/^(?:[^\}]%%[^\n]*)/i,/^(?:[\n\r]+)/i,/^(?:%%[^\n]*)/i,/^(?:[\s]+)/i,/^(?:title\b)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accTitle\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*\{\s*)/i,/^(?:[\}])/i,/^(?:[^\}]*)/i,/^(?:["])/i,/^(?:["])/i,/^(?:[^"]*)/i,/^(?:pie\b)/i,/^(?:showData\b)/i,/^(?::[\s]*[\d]+(?:\.[\d]+)?)/i,/^(?:$)/i],conditions:{acc_descr_multiline:{rules:[17,18],inclusive:!1},acc_descr:{rules:[15],inclusive:!1},acc_title:{rules:[13],inclusive:!1},close_directive:{rules:[],inclusive:!1},arg_directive:{rules:[3,4],inclusive:!1},type_directive:{rules:[2,3],inclusive:!1},open_directive:{rules:[1],inclusive:!1},title:{rules:[11],inclusive:!1},string:{rules:[20,21],inclusive:!1},INITIAL:{rules:[0,5,6,7,8,9,10,12,14,16,19,22,23,24,25],inclusive:!0}}};function x(){this.yy={}}return b.lexer=_,x.prototype=b,b.Parser=x,new x}();e.parser=r,e.Parser=r.Parser,e.parse=function(){return r.parse.apply(r,arguments)},e.main=function(t){t[1]||(console.log("Usage: "+t[0]+" FILE"),process.exit(1));var r=n(4551).readFileSync(n(6470).normalize(t[1]),"utf8");return e.parser.parse(r)},n.c[n.s]===t&&e.main(process.argv.slice(1))},3176:(t,e,n)=>{t=n.nmd(t);var r=function(){var t=function(t,e,n,r){for(n=n||{},r=t.length;r--;n[t[r]]=e);return n},e=[1,3],n=[1,5],r=[1,6],i=[1,7],a=[1,8],o=[5,6,8,14,16,18,19,40,41,42,43,44,45,53,71,72],s=[1,22],c=[2,13],u=[1,26],l=[1,27],h=[1,28],f=[1,29],d=[1,30],p=[1,31],g=[1,24],y=[1,32],m=[1,33],v=[1,36],b=[71,72],_=[5,8,14,16,18,19,40,41,42,43,44,45,53,60,62,71,72],x=[1,56],w=[1,57],k=[1,58],T=[1,59],C=[1,60],E=[1,61],S=[1,62],A=[62,63],M=[1,74],N=[1,70],D=[1,71],B=[1,72],L=[1,73],O=[1,75],I=[1,79],R=[1,80],F=[1,77],P=[1,78],Y=[5,8,14,16,18,19,40,41,42,43,44,45,53,71,72],j={trace:function(){},yy:{},symbols_:{error:2,start:3,directive:4,NEWLINE:5,RD:6,diagram:7,EOF:8,openDirective:9,typeDirective:10,closeDirective:11,":":12,argDirective:13,acc_title:14,acc_title_value:15,acc_descr:16,acc_descr_value:17,acc_descr_multiline_value:18,open_directive:19,type_directive:20,arg_directive:21,close_directive:22,requirementDef:23,elementDef:24,relationshipDef:25,requirementType:26,requirementName:27,STRUCT_START:28,requirementBody:29,ID:30,COLONSEP:31,id:32,TEXT:33,text:34,RISK:35,riskLevel:36,VERIFYMTHD:37,verifyType:38,STRUCT_STOP:39,REQUIREMENT:40,FUNCTIONAL_REQUIREMENT:41,INTERFACE_REQUIREMENT:42,PERFORMANCE_REQUIREMENT:43,PHYSICAL_REQUIREMENT:44,DESIGN_CONSTRAINT:45,LOW_RISK:46,MED_RISK:47,HIGH_RISK:48,VERIFY_ANALYSIS:49,VERIFY_DEMONSTRATION:50,VERIFY_INSPECTION:51,VERIFY_TEST:52,ELEMENT:53,elementName:54,elementBody:55,TYPE:56,type:57,DOCREF:58,ref:59,END_ARROW_L:60,relationship:61,LINE:62,END_ARROW_R:63,CONTAINS:64,COPIES:65,DERIVES:66,SATISFIES:67,VERIFIES:68,REFINES:69,TRACES:70,unqString:71,qString:72,$accept:0,$end:1},terminals_:{2:"error",5:"NEWLINE",6:"RD",8:"EOF",12:":",14:"acc_title",15:"acc_title_value",16:"acc_descr",17:"acc_descr_value",18:"acc_descr_multiline_value",19:"open_directive",20:"type_directive",21:"arg_directive",22:"close_directive",28:"STRUCT_START",30:"ID",31:"COLONSEP",33:"TEXT",35:"RISK",37:"VERIFYMTHD",39:"STRUCT_STOP",40:"REQUIREMENT",41:"FUNCTIONAL_REQUIREMENT",42:"INTERFACE_REQUIREMENT",43:"PERFORMANCE_REQUIREMENT",44:"PHYSICAL_REQUIREMENT",45:"DESIGN_CONSTRAINT",46:"LOW_RISK",47:"MED_RISK",48:"HIGH_RISK",49:"VERIFY_ANALYSIS",50:"VERIFY_DEMONSTRATION",51:"VERIFY_INSPECTION",52:"VERIFY_TEST",53:"ELEMENT",56:"TYPE",58:"DOCREF",60:"END_ARROW_L",62:"LINE",63:"END_ARROW_R",64:"CONTAINS",65:"COPIES",66:"DERIVES",67:"SATISFIES",68:"VERIFIES",69:"REFINES",70:"TRACES",71:"unqString",72:"qString"},productions_:[0,[3,3],[3,2],[3,4],[4,3],[4,5],[4,2],[4,2],[4,1],[9,1],[10,1],[13,1],[11,1],[7,0],[7,2],[7,2],[7,2],[7,2],[7,2],[23,5],[29,5],[29,5],[29,5],[29,5],[29,2],[29,1],[26,1],[26,1],[26,1],[26,1],[26,1],[26,1],[36,1],[36,1],[36,1],[38,1],[38,1],[38,1],[38,1],[24,5],[55,5],[55,5],[55,2],[55,1],[25,5],[25,5],[61,1],[61,1],[61,1],[61,1],[61,1],[61,1],[61,1],[27,1],[27,1],[32,1],[32,1],[34,1],[34,1],[54,1],[54,1],[57,1],[57,1],[59,1],[59,1]],performAction:function(t,e,n,r,i,a,o){var s=a.length-1;switch(i){case 6:this.$=a[s].trim(),r.setTitle(this.$);break;case 7:case 8:this.$=a[s].trim(),r.setAccDescription(this.$);break;case 9:r.parseDirective("%%{","open_directive");break;case 10:r.parseDirective(a[s],"type_directive");break;case 11:a[s]=a[s].trim().replace(/'/g,'"'),r.parseDirective(a[s],"arg_directive");break;case 12:r.parseDirective("}%%","close_directive","pie");break;case 13:this.$=[];break;case 19:r.addRequirement(a[s-3],a[s-4]);break;case 20:r.setNewReqId(a[s-2]);break;case 21:r.setNewReqText(a[s-2]);break;case 22:r.setNewReqRisk(a[s-2]);break;case 23:r.setNewReqVerifyMethod(a[s-2]);break;case 26:this.$=r.RequirementType.REQUIREMENT;break;case 27:this.$=r.RequirementType.FUNCTIONAL_REQUIREMENT;break;case 28:this.$=r.RequirementType.INTERFACE_REQUIREMENT;break;case 29:this.$=r.RequirementType.PERFORMANCE_REQUIREMENT;break;case 30:this.$=r.RequirementType.PHYSICAL_REQUIREMENT;break;case 31:this.$=r.RequirementType.DESIGN_CONSTRAINT;break;case 32:this.$=r.RiskLevel.LOW_RISK;break;case 33:this.$=r.RiskLevel.MED_RISK;break;case 34:this.$=r.RiskLevel.HIGH_RISK;break;case 35:this.$=r.VerifyType.VERIFY_ANALYSIS;break;case 36:this.$=r.VerifyType.VERIFY_DEMONSTRATION;break;case 37:this.$=r.VerifyType.VERIFY_INSPECTION;break;case 38:this.$=r.VerifyType.VERIFY_TEST;break;case 39:r.addElement(a[s-3]);break;case 40:r.setNewElementType(a[s-2]);break;case 41:r.setNewElementDocRef(a[s-2]);break;case 44:r.addRelationship(a[s-2],a[s],a[s-4]);break;case 45:r.addRelationship(a[s-2],a[s-4],a[s]);break;case 46:this.$=r.Relationships.CONTAINS;break;case 47:this.$=r.Relationships.COPIES;break;case 48:this.$=r.Relationships.DERIVES;break;case 49:this.$=r.Relationships.SATISFIES;break;case 50:this.$=r.Relationships.VERIFIES;break;case 51:this.$=r.Relationships.REFINES;break;case 52:this.$=r.Relationships.TRACES}},table:[{3:1,4:2,6:e,9:4,14:n,16:r,18:i,19:a},{1:[3]},{3:10,4:2,5:[1,9],6:e,9:4,14:n,16:r,18:i,19:a},{5:[1,11]},{10:12,20:[1,13]},{15:[1,14]},{17:[1,15]},t(o,[2,8]),{20:[2,9]},{3:16,4:2,6:e,9:4,14:n,16:r,18:i,19:a},{1:[2,2]},{4:21,5:s,7:17,8:c,9:4,14:n,16:r,18:i,19:a,23:18,24:19,25:20,26:23,32:25,40:u,41:l,42:h,43:f,44:d,45:p,53:g,71:y,72:m},{11:34,12:[1,35],22:v},t([12,22],[2,10]),t(o,[2,6]),t(o,[2,7]),{1:[2,1]},{8:[1,37]},{4:21,5:s,7:38,8:c,9:4,14:n,16:r,18:i,19:a,23:18,24:19,25:20,26:23,32:25,40:u,41:l,42:h,43:f,44:d,45:p,53:g,71:y,72:m},{4:21,5:s,7:39,8:c,9:4,14:n,16:r,18:i,19:a,23:18,24:19,25:20,26:23,32:25,40:u,41:l,42:h,43:f,44:d,45:p,53:g,71:y,72:m},{4:21,5:s,7:40,8:c,9:4,14:n,16:r,18:i,19:a,23:18,24:19,25:20,26:23,32:25,40:u,41:l,42:h,43:f,44:d,45:p,53:g,71:y,72:m},{4:21,5:s,7:41,8:c,9:4,14:n,16:r,18:i,19:a,23:18,24:19,25:20,26:23,32:25,40:u,41:l,42:h,43:f,44:d,45:p,53:g,71:y,72:m},{4:21,5:s,7:42,8:c,9:4,14:n,16:r,18:i,19:a,23:18,24:19,25:20,26:23,32:25,40:u,41:l,42:h,43:f,44:d,45:p,53:g,71:y,72:m},{27:43,71:[1,44],72:[1,45]},{54:46,71:[1,47],72:[1,48]},{60:[1,49],62:[1,50]},t(b,[2,26]),t(b,[2,27]),t(b,[2,28]),t(b,[2,29]),t(b,[2,30]),t(b,[2,31]),t(_,[2,55]),t(_,[2,56]),t(o,[2,4]),{13:51,21:[1,52]},t(o,[2,12]),{1:[2,3]},{8:[2,14]},{8:[2,15]},{8:[2,16]},{8:[2,17]},{8:[2,18]},{28:[1,53]},{28:[2,53]},{28:[2,54]},{28:[1,54]},{28:[2,59]},{28:[2,60]},{61:55,64:x,65:w,66:k,67:T,68:C,69:E,70:S},{61:63,64:x,65:w,66:k,67:T,68:C,69:E,70:S},{11:64,22:v},{22:[2,11]},{5:[1,65]},{5:[1,66]},{62:[1,67]},t(A,[2,46]),t(A,[2,47]),t(A,[2,48]),t(A,[2,49]),t(A,[2,50]),t(A,[2,51]),t(A,[2,52]),{63:[1,68]},t(o,[2,5]),{5:M,29:69,30:N,33:D,35:B,37:L,39:O},{5:I,39:R,55:76,56:F,58:P},{32:81,71:y,72:m},{32:82,71:y,72:m},t(Y,[2,19]),{31:[1,83]},{31:[1,84]},{31:[1,85]},{31:[1,86]},{5:M,29:87,30:N,33:D,35:B,37:L,39:O},t(Y,[2,25]),t(Y,[2,39]),{31:[1,88]},{31:[1,89]},{5:I,39:R,55:90,56:F,58:P},t(Y,[2,43]),t(Y,[2,44]),t(Y,[2,45]),{32:91,71:y,72:m},{34:92,71:[1,93],72:[1,94]},{36:95,46:[1,96],47:[1,97],48:[1,98]},{38:99,49:[1,100],50:[1,101],51:[1,102],52:[1,103]},t(Y,[2,24]),{57:104,71:[1,105],72:[1,106]},{59:107,71:[1,108],72:[1,109]},t(Y,[2,42]),{5:[1,110]},{5:[1,111]},{5:[2,57]},{5:[2,58]},{5:[1,112]},{5:[2,32]},{5:[2,33]},{5:[2,34]},{5:[1,113]},{5:[2,35]},{5:[2,36]},{5:[2,37]},{5:[2,38]},{5:[1,114]},{5:[2,61]},{5:[2,62]},{5:[1,115]},{5:[2,63]},{5:[2,64]},{5:M,29:116,30:N,33:D,35:B,37:L,39:O},{5:M,29:117,30:N,33:D,35:B,37:L,39:O},{5:M,29:118,30:N,33:D,35:B,37:L,39:O},{5:M,29:119,30:N,33:D,35:B,37:L,39:O},{5:I,39:R,55:120,56:F,58:P},{5:I,39:R,55:121,56:F,58:P},t(Y,[2,20]),t(Y,[2,21]),t(Y,[2,22]),t(Y,[2,23]),t(Y,[2,40]),t(Y,[2,41])],defaultActions:{8:[2,9],10:[2,2],16:[2,1],37:[2,3],38:[2,14],39:[2,15],40:[2,16],41:[2,17],42:[2,18],44:[2,53],45:[2,54],47:[2,59],48:[2,60],52:[2,11],93:[2,57],94:[2,58],96:[2,32],97:[2,33],98:[2,34],100:[2,35],101:[2,36],102:[2,37],103:[2,38],105:[2,61],106:[2,62],108:[2,63],109:[2,64]},parseError:function(t,e){if(!e.recoverable){var n=new Error(t);throw n.hash=e,n}this.trace(t)},parse:function(t){var e=this,n=[0],r=[],i=[null],a=[],o=this.table,s="",c=0,u=0,l=0,h=2,f=1,d=a.slice.call(arguments,1),p=Object.create(this.lexer),g={yy:{}};for(var y in this.yy)Object.prototype.hasOwnProperty.call(this.yy,y)&&(g.yy[y]=this.yy[y]);p.setInput(t,g.yy),g.yy.lexer=p,g.yy.parser=this,void 0===p.yylloc&&(p.yylloc={});var m=p.yylloc;a.push(m);var v=p.options&&p.options.ranges;function b(){var t;return"number"!=typeof(t=r.pop()||p.lex()||f)&&(t instanceof Array&&(t=(r=t).pop()),t=e.symbols_[t]||t),t}"function"==typeof g.yy.parseError?this.parseError=g.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;for(var _,x,w,k,T,C,E,S,A,M={};;){if(w=n[n.length-1],this.defaultActions[w]?k=this.defaultActions[w]:(null==_&&(_=b()),k=o[w]&&o[w][_]),void 0===k||!k.length||!k[0]){var N="";for(C in A=[],o[w])this.terminals_[C]&&C>h&&A.push("'"+this.terminals_[C]+"'");N=p.showPosition?"Parse error on line "+(c+1)+":\n"+p.showPosition()+"\nExpecting "+A.join(", ")+", got '"+(this.terminals_[_]||_)+"'":"Parse error on line "+(c+1)+": Unexpected "+(_==f?"end of input":"'"+(this.terminals_[_]||_)+"'"),this.parseError(N,{text:p.match,token:this.terminals_[_]||_,line:p.yylineno,loc:m,expected:A})}if(k[0]instanceof Array&&k.length>1)throw new Error("Parse Error: multiple actions possible at state: "+w+", token: "+_);switch(k[0]){case 1:n.push(_),i.push(p.yytext),a.push(p.yylloc),n.push(k[1]),_=null,x?(_=x,x=null):(u=p.yyleng,s=p.yytext,c=p.yylineno,m=p.yylloc,l>0&&l--);break;case 2:if(E=this.productions_[k[1]][1],M.$=i[i.length-E],M._$={first_line:a[a.length-(E||1)].first_line,last_line:a[a.length-1].last_line,first_column:a[a.length-(E||1)].first_column,last_column:a[a.length-1].last_column},v&&(M._$.range=[a[a.length-(E||1)].range[0],a[a.length-1].range[1]]),void 0!==(T=this.performAction.apply(M,[s,u,c,g.yy,k[1],i,a].concat(d))))return T;E&&(n=n.slice(0,-1*E*2),i=i.slice(0,-1*E),a=a.slice(0,-1*E)),n.push(this.productions_[k[1]][0]),i.push(M.$),a.push(M._$),S=o[n[n.length-2]][n[n.length-1]],n.push(S);break;case 3:return!0}}return!0}},U={EOF:1,parseError:function(t,e){if(!this.yy.parser)throw new Error(t);this.yy.parser.parseError(t,e)},setInput:function(t,e){return this.yy=e||this.yy||{},this._input=t,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},input:function(){var t=this._input[0];return this.yytext+=t,this.yyleng++,this.offset++,this.match+=t,this.matched+=t,t.match(/(?:\r\n?|\n).*/g)?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),t},unput:function(t){var e=t.length,n=t.split(/(?:\r\n?|\n)/g);this._input=t+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-e),this.offset-=e;var r=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),n.length-1&&(this.yylineno-=n.length-1);var i=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:n?(n.length===r.length?this.yylloc.first_column:0)+r[r.length-n.length].length-n[0].length:this.yylloc.first_column-e},this.options.ranges&&(this.yylloc.range=[i[0],i[0]+this.yyleng-e]),this.yyleng=this.yytext.length,this},more:function(){return this._more=!0,this},reject:function(){return this.options.backtrack_lexer?(this._backtrack=!0,this):this.parseError("Lexical error on line "+(this.yylineno+1)+". You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n"+this.showPosition(),{text:"",token:null,line:this.yylineno})},less:function(t){this.unput(this.match.slice(t))},pastInput:function(){var t=this.matched.substr(0,this.matched.length-this.match.length);return(t.length>20?"...":"")+t.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var t=this.match;return t.length<20&&(t+=this._input.substr(0,20-t.length)),(t.substr(0,20)+(t.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var t=this.pastInput(),e=new Array(t.length+1).join("-");return t+this.upcomingInput()+"\n"+e+"^"},test_match:function(t,e){var n,r,i;if(this.options.backtrack_lexer&&(i={yylineno:this.yylineno,yylloc:{first_line:this.yylloc.first_line,last_line:this.last_line,first_column:this.yylloc.first_column,last_column:this.yylloc.last_column},yytext:this.yytext,match:this.match,matches:this.matches,matched:this.matched,yyleng:this.yyleng,offset:this.offset,_more:this._more,_input:this._input,yy:this.yy,conditionStack:this.conditionStack.slice(0),done:this.done},this.options.ranges&&(i.yylloc.range=this.yylloc.range.slice(0))),(r=t[0].match(/(?:\r\n?|\n).*/g))&&(this.yylineno+=r.length),this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:r?r[r.length-1].length-r[r.length-1].match(/\r?\n?/)[0].length:this.yylloc.last_column+t[0].length},this.yytext+=t[0],this.match+=t[0],this.matches=t,this.yyleng=this.yytext.length,this.options.ranges&&(this.yylloc.range=[this.offset,this.offset+=this.yyleng]),this._more=!1,this._backtrack=!1,this._input=this._input.slice(t[0].length),this.matched+=t[0],n=this.performAction.call(this,this.yy,this,e,this.conditionStack[this.conditionStack.length-1]),this.done&&this._input&&(this.done=!1),n)return n;if(this._backtrack){for(var a in i)this[a]=i[a];return!1}return!1},next:function(){if(this.done)return this.EOF;var t,e,n,r;this._input||(this.done=!0),this._more||(this.yytext="",this.match="");for(var i=this._currentRules(),a=0;ae[0].length)){if(e=n,r=a,this.options.backtrack_lexer){if(!1!==(t=this.test_match(n,i[a])))return t;if(this._backtrack){e=!1;continue}return!1}if(!this.options.flex)break}return e?!1!==(t=this.test_match(e,i[r]))&&t:""===this._input?this.EOF:this.parseError("Lexical error on line "+(this.yylineno+1)+". Unrecognized text.\n"+this.showPosition(),{text:"",token:null,line:this.yylineno})},lex:function(){return this.next()||this.lex()},begin:function(t){this.conditionStack.push(t)},popState:function(){return this.conditionStack.length-1>0?this.conditionStack.pop():this.conditionStack[0]},_currentRules:function(){return this.conditionStack.length&&this.conditionStack[this.conditionStack.length-1]?this.conditions[this.conditionStack[this.conditionStack.length-1]].rules:this.conditions.INITIAL.rules},topState:function(t){return(t=this.conditionStack.length-1-Math.abs(t||0))>=0?this.conditionStack[t]:"INITIAL"},pushState:function(t){this.begin(t)},stateStackSize:function(){return this.conditionStack.length},options:{"case-insensitive":!0},performAction:function(t,e,n,r){switch(n){case 0:return this.begin("open_directive"),19;case 1:return this.begin("type_directive"),20;case 2:return this.popState(),this.begin("arg_directive"),12;case 3:return this.popState(),this.popState(),22;case 4:return 21;case 5:return"title";case 6:return this.begin("acc_title"),14;case 7:return this.popState(),"acc_title_value";case 8:return this.begin("acc_descr"),16;case 9:return this.popState(),"acc_descr_value";case 10:this.begin("acc_descr_multiline");break;case 11:case 53:this.popState();break;case 12:return"acc_descr_multiline_value";case 13:return 5;case 14:case 15:case 16:break;case 17:return 8;case 18:return 6;case 19:return 28;case 20:return 39;case 21:return 31;case 22:return 30;case 23:return 33;case 24:return 35;case 25:return 37;case 26:return 40;case 27:return 41;case 28:return 42;case 29:return 43;case 30:return 44;case 31:return 45;case 32:return 46;case 33:return 47;case 34:return 48;case 35:return 49;case 36:return 50;case 37:return 51;case 38:return 52;case 39:return 53;case 40:return 64;case 41:return 65;case 42:return 66;case 43:return 67;case 44:return 68;case 45:return 69;case 46:return 70;case 47:return 56;case 48:return 58;case 49:return 60;case 50:return 63;case 51:return 62;case 52:this.begin("string");break;case 54:return"qString";case 55:return e.yytext=e.yytext.trim(),71}},rules:[/^(?:%%\{)/i,/^(?:((?:(?!\}%%)[^:.])*))/i,/^(?::)/i,/^(?:\}%%)/i,/^(?:((?:(?!\}%%).|\n)*))/i,/^(?:title\s[^#\n;]+)/i,/^(?:accTitle\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*\{\s*)/i,/^(?:[\}])/i,/^(?:[^\}]*)/i,/^(?:(\r?\n)+)/i,/^(?:\s+)/i,/^(?:#[^\n]*)/i,/^(?:%[^\n]*)/i,/^(?:$)/i,/^(?:requirementDiagram\b)/i,/^(?:\{)/i,/^(?:\})/i,/^(?::)/i,/^(?:id\b)/i,/^(?:text\b)/i,/^(?:risk\b)/i,/^(?:verifyMethod\b)/i,/^(?:requirement\b)/i,/^(?:functionalRequirement\b)/i,/^(?:interfaceRequirement\b)/i,/^(?:performanceRequirement\b)/i,/^(?:physicalRequirement\b)/i,/^(?:designConstraint\b)/i,/^(?:low\b)/i,/^(?:medium\b)/i,/^(?:high\b)/i,/^(?:analysis\b)/i,/^(?:demonstration\b)/i,/^(?:inspection\b)/i,/^(?:test\b)/i,/^(?:element\b)/i,/^(?:contains\b)/i,/^(?:copies\b)/i,/^(?:derives\b)/i,/^(?:satisfies\b)/i,/^(?:verifies\b)/i,/^(?:refines\b)/i,/^(?:traces\b)/i,/^(?:type\b)/i,/^(?:docref\b)/i,/^(?:<-)/i,/^(?:->)/i,/^(?:-)/i,/^(?:["])/i,/^(?:["])/i,/^(?:[^"]*)/i,/^(?:[\w][^\r\n\{\<\>\-\=]*)/i],conditions:{acc_descr_multiline:{rules:[11,12],inclusive:!1},acc_descr:{rules:[9],inclusive:!1},acc_title:{rules:[7],inclusive:!1},close_directive:{rules:[],inclusive:!1},arg_directive:{rules:[3,4],inclusive:!1},type_directive:{rules:[2,3],inclusive:!1},open_directive:{rules:[1],inclusive:!1},unqString:{rules:[],inclusive:!1},token:{rules:[],inclusive:!1},string:{rules:[53,54],inclusive:!1},INITIAL:{rules:[0,5,6,8,10,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55],inclusive:!0}}};function z(){this.yy={}}return j.lexer=U,z.prototype=j,j.Parser=z,new z}();e.parser=r,e.Parser=r.Parser,e.parse=function(){return r.parse.apply(r,arguments)},e.main=function(t){t[1]||(console.log("Usage: "+t[0]+" FILE"),process.exit(1));var r=n(8800).readFileSync(n(6470).normalize(t[1]),"utf8");return e.parser.parse(r)},n.c[n.s]===t&&e.main(process.argv.slice(1))},6876:(t,e,n)=>{t=n.nmd(t);var r=function(){var t=function(t,e,n,r){for(n=n||{},r=t.length;r--;n[t[r]]=e);return n},e=[1,2],n=[1,3],r=[1,5],i=[1,7],a=[2,5],o=[1,15],s=[1,17],c=[1,18],u=[1,19],l=[1,21],h=[1,22],f=[1,23],d=[1,29],p=[1,30],g=[1,31],y=[1,32],m=[1,33],v=[1,34],b=[1,35],_=[1,36],x=[1,37],w=[1,38],k=[1,41],T=[1,42],C=[1,43],E=[1,44],S=[1,45],A=[1,46],M=[1,49],N=[1,4,5,16,20,22,25,26,32,33,34,36,38,39,40,41,42,43,45,47,48,49,54,55,56,57,65,75],D=[4,5,16,20,22,25,26,32,33,34,36,38,39,40,41,42,43,45,49,54,55,56,57,65,75],B=[4,5,16,20,22,25,26,32,33,34,36,38,39,40,41,42,43,45,48,49,54,55,56,57,65,75],L=[4,5,16,20,22,25,26,32,33,34,36,38,39,40,41,42,43,45,47,49,54,55,56,57,65,75],O=[63,64,65],I=[1,114],R=[1,4,5,7,16,20,22,25,26,32,33,34,36,38,39,40,41,42,43,45,47,48,49,54,55,56,57,65,75],F={trace:function(){},yy:{},symbols_:{error:2,start:3,SPACE:4,NEWLINE:5,directive:6,SD:7,document:8,line:9,statement:10,openDirective:11,typeDirective:12,closeDirective:13,":":14,argDirective:15,participant:16,actor:17,AS:18,restOfLine:19,participant_actor:20,signal:21,autonumber:22,NUM:23,off:24,activate:25,deactivate:26,note_statement:27,links_statement:28,link_statement:29,properties_statement:30,details_statement:31,title:32,legacy_title:33,acc_title:34,acc_title_value:35,acc_descr:36,acc_descr_value:37,acc_descr_multiline_value:38,loop:39,end:40,rect:41,opt:42,alt:43,else_sections:44,par:45,par_sections:46,and:47,else:48,note:49,placement:50,text2:51,over:52,actor_pair:53,links:54,link:55,properties:56,details:57,spaceList:58,",":59,left_of:60,right_of:61,signaltype:62,"+":63,"-":64,ACTOR:65,SOLID_OPEN_ARROW:66,DOTTED_OPEN_ARROW:67,SOLID_ARROW:68,DOTTED_ARROW:69,SOLID_CROSS:70,DOTTED_CROSS:71,SOLID_POINT:72,DOTTED_POINT:73,TXT:74,open_directive:75,type_directive:76,arg_directive:77,close_directive:78,$accept:0,$end:1},terminals_:{2:"error",4:"SPACE",5:"NEWLINE",7:"SD",14:":",16:"participant",18:"AS",19:"restOfLine",20:"participant_actor",22:"autonumber",23:"NUM",24:"off",25:"activate",26:"deactivate",32:"title",33:"legacy_title",34:"acc_title",35:"acc_title_value",36:"acc_descr",37:"acc_descr_value",38:"acc_descr_multiline_value",39:"loop",40:"end",41:"rect",42:"opt",43:"alt",45:"par",47:"and",48:"else",49:"note",52:"over",54:"links",55:"link",56:"properties",57:"details",59:",",60:"left_of",61:"right_of",63:"+",64:"-",65:"ACTOR",66:"SOLID_OPEN_ARROW",67:"DOTTED_OPEN_ARROW",68:"SOLID_ARROW",69:"DOTTED_ARROW",70:"SOLID_CROSS",71:"DOTTED_CROSS",72:"SOLID_POINT",73:"DOTTED_POINT",74:"TXT",75:"open_directive",76:"type_directive",77:"arg_directive",78:"close_directive"},productions_:[0,[3,2],[3,2],[3,2],[3,2],[8,0],[8,2],[9,2],[9,1],[9,1],[6,4],[6,6],[10,5],[10,3],[10,5],[10,3],[10,2],[10,4],[10,3],[10,3],[10,2],[10,3],[10,3],[10,2],[10,2],[10,2],[10,2],[10,2],[10,1],[10,1],[10,2],[10,2],[10,1],[10,4],[10,4],[10,4],[10,4],[10,4],[10,1],[46,1],[46,4],[44,1],[44,4],[27,4],[27,4],[28,3],[29,3],[30,3],[31,3],[58,2],[58,1],[53,3],[53,1],[50,1],[50,1],[21,5],[21,5],[21,4],[17,1],[62,1],[62,1],[62,1],[62,1],[62,1],[62,1],[62,1],[62,1],[51,1],[11,1],[12,1],[15,1],[13,1]],performAction:function(t,e,n,r,i,a,o){var s=a.length-1;switch(i){case 4:return r.apply(a[s]),a[s];case 5:case 9:this.$=[];break;case 6:a[s-1].push(a[s]),this.$=a[s-1];break;case 7:case 8:case 52:this.$=a[s];break;case 12:a[s-3].type="addParticipant",a[s-3].description=r.parseMessage(a[s-1]),this.$=a[s-3];break;case 13:a[s-1].type="addParticipant",this.$=a[s-1];break;case 14:a[s-3].type="addActor",a[s-3].description=r.parseMessage(a[s-1]),this.$=a[s-3];break;case 15:a[s-1].type="addActor",this.$=a[s-1];break;case 17:this.$={type:"sequenceIndex",sequenceIndex:Number(a[s-2]),sequenceIndexStep:Number(a[s-1]),sequenceVisible:!0,signalType:r.LINETYPE.AUTONUMBER};break;case 18:this.$={type:"sequenceIndex",sequenceIndex:Number(a[s-1]),sequenceIndexStep:1,sequenceVisible:!0,signalType:r.LINETYPE.AUTONUMBER};break;case 19:this.$={type:"sequenceIndex",sequenceVisible:!1,signalType:r.LINETYPE.AUTONUMBER};break;case 20:this.$={type:"sequenceIndex",sequenceVisible:!0,signalType:r.LINETYPE.AUTONUMBER};break;case 21:this.$={type:"activeStart",signalType:r.LINETYPE.ACTIVE_START,actor:a[s-1]};break;case 22:this.$={type:"activeEnd",signalType:r.LINETYPE.ACTIVE_END,actor:a[s-1]};break;case 28:r.setDiagramTitle(a[s].substring(6)),this.$=a[s].substring(6);break;case 29:r.setDiagramTitle(a[s].substring(7)),this.$=a[s].substring(7);break;case 30:this.$=a[s].trim(),r.setTitle(this.$);break;case 31:case 32:this.$=a[s].trim(),r.setAccDescription(this.$);break;case 33:a[s-1].unshift({type:"loopStart",loopText:r.parseMessage(a[s-2]),signalType:r.LINETYPE.LOOP_START}),a[s-1].push({type:"loopEnd",loopText:a[s-2],signalType:r.LINETYPE.LOOP_END}),this.$=a[s-1];break;case 34:a[s-1].unshift({type:"rectStart",color:r.parseMessage(a[s-2]),signalType:r.LINETYPE.RECT_START}),a[s-1].push({type:"rectEnd",color:r.parseMessage(a[s-2]),signalType:r.LINETYPE.RECT_END}),this.$=a[s-1];break;case 35:a[s-1].unshift({type:"optStart",optText:r.parseMessage(a[s-2]),signalType:r.LINETYPE.OPT_START}),a[s-1].push({type:"optEnd",optText:r.parseMessage(a[s-2]),signalType:r.LINETYPE.OPT_END}),this.$=a[s-1];break;case 36:a[s-1].unshift({type:"altStart",altText:r.parseMessage(a[s-2]),signalType:r.LINETYPE.ALT_START}),a[s-1].push({type:"altEnd",signalType:r.LINETYPE.ALT_END}),this.$=a[s-1];break;case 37:a[s-1].unshift({type:"parStart",parText:r.parseMessage(a[s-2]),signalType:r.LINETYPE.PAR_START}),a[s-1].push({type:"parEnd",signalType:r.LINETYPE.PAR_END}),this.$=a[s-1];break;case 40:this.$=a[s-3].concat([{type:"and",parText:r.parseMessage(a[s-1]),signalType:r.LINETYPE.PAR_AND},a[s]]);break;case 42:this.$=a[s-3].concat([{type:"else",altText:r.parseMessage(a[s-1]),signalType:r.LINETYPE.ALT_ELSE},a[s]]);break;case 43:this.$=[a[s-1],{type:"addNote",placement:a[s-2],actor:a[s-1].actor,text:a[s]}];break;case 44:a[s-2]=[].concat(a[s-1],a[s-1]).slice(0,2),a[s-2][0]=a[s-2][0].actor,a[s-2][1]=a[s-2][1].actor,this.$=[a[s-1],{type:"addNote",placement:r.PLACEMENT.OVER,actor:a[s-2].slice(0,2),text:a[s]}];break;case 45:this.$=[a[s-1],{type:"addLinks",actor:a[s-1].actor,text:a[s]}];break;case 46:this.$=[a[s-1],{type:"addALink",actor:a[s-1].actor,text:a[s]}];break;case 47:this.$=[a[s-1],{type:"addProperties",actor:a[s-1].actor,text:a[s]}];break;case 48:this.$=[a[s-1],{type:"addDetails",actor:a[s-1].actor,text:a[s]}];break;case 51:this.$=[a[s-2],a[s]];break;case 53:this.$=r.PLACEMENT.LEFTOF;break;case 54:this.$=r.PLACEMENT.RIGHTOF;break;case 55:this.$=[a[s-4],a[s-1],{type:"addMessage",from:a[s-4].actor,to:a[s-1].actor,signalType:a[s-3],msg:a[s]},{type:"activeStart",signalType:r.LINETYPE.ACTIVE_START,actor:a[s-1]}];break;case 56:this.$=[a[s-4],a[s-1],{type:"addMessage",from:a[s-4].actor,to:a[s-1].actor,signalType:a[s-3],msg:a[s]},{type:"activeEnd",signalType:r.LINETYPE.ACTIVE_END,actor:a[s-4]}];break;case 57:this.$=[a[s-3],a[s-1],{type:"addMessage",from:a[s-3].actor,to:a[s-1].actor,signalType:a[s-2],msg:a[s]}];break;case 58:this.$={type:"addParticipant",actor:a[s]};break;case 59:this.$=r.LINETYPE.SOLID_OPEN;break;case 60:this.$=r.LINETYPE.DOTTED_OPEN;break;case 61:this.$=r.LINETYPE.SOLID;break;case 62:this.$=r.LINETYPE.DOTTED;break;case 63:this.$=r.LINETYPE.SOLID_CROSS;break;case 64:this.$=r.LINETYPE.DOTTED_CROSS;break;case 65:this.$=r.LINETYPE.SOLID_POINT;break;case 66:this.$=r.LINETYPE.DOTTED_POINT;break;case 67:this.$=r.parseMessage(a[s].trim().substring(1));break;case 68:r.parseDirective("%%{","open_directive");break;case 69:r.parseDirective(a[s],"type_directive");break;case 70:a[s]=a[s].trim().replace(/'/g,'"'),r.parseDirective(a[s],"arg_directive");break;case 71:r.parseDirective("}%%","close_directive","sequence")}},table:[{3:1,4:e,5:n,6:4,7:r,11:6,75:i},{1:[3]},{3:8,4:e,5:n,6:4,7:r,11:6,75:i},{3:9,4:e,5:n,6:4,7:r,11:6,75:i},{3:10,4:e,5:n,6:4,7:r,11:6,75:i},t([1,4,5,16,20,22,25,26,32,33,34,36,38,39,41,42,43,45,49,54,55,56,57,65,75],a,{8:11}),{12:12,76:[1,13]},{76:[2,68]},{1:[2,1]},{1:[2,2]},{1:[2,3]},{1:[2,4],4:o,5:s,6:39,9:14,10:16,11:6,16:c,17:40,20:u,21:20,22:l,25:h,26:f,27:24,28:25,29:26,30:27,31:28,32:d,33:p,34:g,36:y,38:m,39:v,41:b,42:_,43:x,45:w,49:k,54:T,55:C,56:E,57:S,65:A,75:i},{13:47,14:[1,48],78:M},t([14,78],[2,69]),t(N,[2,6]),{6:39,10:50,11:6,16:c,17:40,20:u,21:20,22:l,25:h,26:f,27:24,28:25,29:26,30:27,31:28,32:d,33:p,34:g,36:y,38:m,39:v,41:b,42:_,43:x,45:w,49:k,54:T,55:C,56:E,57:S,65:A,75:i},t(N,[2,8]),t(N,[2,9]),{17:51,65:A},{17:52,65:A},{5:[1,53]},{5:[1,56],23:[1,54],24:[1,55]},{17:57,65:A},{17:58,65:A},{5:[1,59]},{5:[1,60]},{5:[1,61]},{5:[1,62]},{5:[1,63]},t(N,[2,28]),t(N,[2,29]),{35:[1,64]},{37:[1,65]},t(N,[2,32]),{19:[1,66]},{19:[1,67]},{19:[1,68]},{19:[1,69]},{19:[1,70]},t(N,[2,38]),{62:71,66:[1,72],67:[1,73],68:[1,74],69:[1,75],70:[1,76],71:[1,77],72:[1,78],73:[1,79]},{50:80,52:[1,81],60:[1,82],61:[1,83]},{17:84,65:A},{17:85,65:A},{17:86,65:A},{17:87,65:A},t([5,18,59,66,67,68,69,70,71,72,73,74],[2,58]),{5:[1,88]},{15:89,77:[1,90]},{5:[2,71]},t(N,[2,7]),{5:[1,92],18:[1,91]},{5:[1,94],18:[1,93]},t(N,[2,16]),{5:[1,96],23:[1,95]},{5:[1,97]},t(N,[2,20]),{5:[1,98]},{5:[1,99]},t(N,[2,23]),t(N,[2,24]),t(N,[2,25]),t(N,[2,26]),t(N,[2,27]),t(N,[2,30]),t(N,[2,31]),t(D,a,{8:100}),t(D,a,{8:101}),t(D,a,{8:102}),t(B,a,{44:103,8:104}),t(L,a,{46:105,8:106}),{17:109,63:[1,107],64:[1,108],65:A},t(O,[2,59]),t(O,[2,60]),t(O,[2,61]),t(O,[2,62]),t(O,[2,63]),t(O,[2,64]),t(O,[2,65]),t(O,[2,66]),{17:110,65:A},{17:112,53:111,65:A},{65:[2,53]},{65:[2,54]},{51:113,74:I},{51:115,74:I},{51:116,74:I},{51:117,74:I},t(R,[2,10]),{13:118,78:M},{78:[2,70]},{19:[1,119]},t(N,[2,13]),{19:[1,120]},t(N,[2,15]),{5:[1,121]},t(N,[2,18]),t(N,[2,19]),t(N,[2,21]),t(N,[2,22]),{4:o,5:s,6:39,9:14,10:16,11:6,16:c,17:40,20:u,21:20,22:l,25:h,26:f,27:24,28:25,29:26,30:27,31:28,32:d,33:p,34:g,36:y,38:m,39:v,40:[1,122],41:b,42:_,43:x,45:w,49:k,54:T,55:C,56:E,57:S,65:A,75:i},{4:o,5:s,6:39,9:14,10:16,11:6,16:c,17:40,20:u,21:20,22:l,25:h,26:f,27:24,28:25,29:26,30:27,31:28,32:d,33:p,34:g,36:y,38:m,39:v,40:[1,123],41:b,42:_,43:x,45:w,49:k,54:T,55:C,56:E,57:S,65:A,75:i},{4:o,5:s,6:39,9:14,10:16,11:6,16:c,17:40,20:u,21:20,22:l,25:h,26:f,27:24,28:25,29:26,30:27,31:28,32:d,33:p,34:g,36:y,38:m,39:v,40:[1,124],41:b,42:_,43:x,45:w,49:k,54:T,55:C,56:E,57:S,65:A,75:i},{40:[1,125]},{4:o,5:s,6:39,9:14,10:16,11:6,16:c,17:40,20:u,21:20,22:l,25:h,26:f,27:24,28:25,29:26,30:27,31:28,32:d,33:p,34:g,36:y,38:m,39:v,40:[2,41],41:b,42:_,43:x,45:w,48:[1,126],49:k,54:T,55:C,56:E,57:S,65:A,75:i},{40:[1,127]},{4:o,5:s,6:39,9:14,10:16,11:6,16:c,17:40,20:u,21:20,22:l,25:h,26:f,27:24,28:25,29:26,30:27,31:28,32:d,33:p,34:g,36:y,38:m,39:v,40:[2,39],41:b,42:_,43:x,45:w,47:[1,128],49:k,54:T,55:C,56:E,57:S,65:A,75:i},{17:129,65:A},{17:130,65:A},{51:131,74:I},{51:132,74:I},{51:133,74:I},{59:[1,134],74:[2,52]},{5:[2,45]},{5:[2,67]},{5:[2,46]},{5:[2,47]},{5:[2,48]},{5:[1,135]},{5:[1,136]},{5:[1,137]},t(N,[2,17]),t(N,[2,33]),t(N,[2,34]),t(N,[2,35]),t(N,[2,36]),{19:[1,138]},t(N,[2,37]),{19:[1,139]},{51:140,74:I},{51:141,74:I},{5:[2,57]},{5:[2,43]},{5:[2,44]},{17:142,65:A},t(R,[2,11]),t(N,[2,12]),t(N,[2,14]),t(B,a,{8:104,44:143}),t(L,a,{8:106,46:144}),{5:[2,55]},{5:[2,56]},{74:[2,51]},{40:[2,42]},{40:[2,40]}],defaultActions:{7:[2,68],8:[2,1],9:[2,2],10:[2,3],49:[2,71],82:[2,53],83:[2,54],90:[2,70],113:[2,45],114:[2,67],115:[2,46],116:[2,47],117:[2,48],131:[2,57],132:[2,43],133:[2,44],140:[2,55],141:[2,56],142:[2,51],143:[2,42],144:[2,40]},parseError:function(t,e){if(!e.recoverable){var n=new Error(t);throw n.hash=e,n}this.trace(t)},parse:function(t){var e=this,n=[0],r=[],i=[null],a=[],o=this.table,s="",c=0,u=0,l=0,h=2,f=1,d=a.slice.call(arguments,1),p=Object.create(this.lexer),g={yy:{}};for(var y in this.yy)Object.prototype.hasOwnProperty.call(this.yy,y)&&(g.yy[y]=this.yy[y]);p.setInput(t,g.yy),g.yy.lexer=p,g.yy.parser=this,void 0===p.yylloc&&(p.yylloc={});var m=p.yylloc;a.push(m);var v=p.options&&p.options.ranges;function b(){var t;return"number"!=typeof(t=r.pop()||p.lex()||f)&&(t instanceof Array&&(t=(r=t).pop()),t=e.symbols_[t]||t),t}"function"==typeof g.yy.parseError?this.parseError=g.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;for(var _,x,w,k,T,C,E,S,A,M={};;){if(w=n[n.length-1],this.defaultActions[w]?k=this.defaultActions[w]:(null==_&&(_=b()),k=o[w]&&o[w][_]),void 0===k||!k.length||!k[0]){var N="";for(C in A=[],o[w])this.terminals_[C]&&C>h&&A.push("'"+this.terminals_[C]+"'");N=p.showPosition?"Parse error on line "+(c+1)+":\n"+p.showPosition()+"\nExpecting "+A.join(", ")+", got '"+(this.terminals_[_]||_)+"'":"Parse error on line "+(c+1)+": Unexpected "+(_==f?"end of input":"'"+(this.terminals_[_]||_)+"'"),this.parseError(N,{text:p.match,token:this.terminals_[_]||_,line:p.yylineno,loc:m,expected:A})}if(k[0]instanceof Array&&k.length>1)throw new Error("Parse Error: multiple actions possible at state: "+w+", token: "+_);switch(k[0]){case 1:n.push(_),i.push(p.yytext),a.push(p.yylloc),n.push(k[1]),_=null,x?(_=x,x=null):(u=p.yyleng,s=p.yytext,c=p.yylineno,m=p.yylloc,l>0&&l--);break;case 2:if(E=this.productions_[k[1]][1],M.$=i[i.length-E],M._$={first_line:a[a.length-(E||1)].first_line,last_line:a[a.length-1].last_line,first_column:a[a.length-(E||1)].first_column,last_column:a[a.length-1].last_column},v&&(M._$.range=[a[a.length-(E||1)].range[0],a[a.length-1].range[1]]),void 0!==(T=this.performAction.apply(M,[s,u,c,g.yy,k[1],i,a].concat(d))))return T;E&&(n=n.slice(0,-1*E*2),i=i.slice(0,-1*E),a=a.slice(0,-1*E)),n.push(this.productions_[k[1]][0]),i.push(M.$),a.push(M._$),S=o[n[n.length-2]][n[n.length-1]],n.push(S);break;case 3:return!0}}return!0}},P={EOF:1,parseError:function(t,e){if(!this.yy.parser)throw new Error(t);this.yy.parser.parseError(t,e)},setInput:function(t,e){return this.yy=e||this.yy||{},this._input=t,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},input:function(){var t=this._input[0];return this.yytext+=t,this.yyleng++,this.offset++,this.match+=t,this.matched+=t,t.match(/(?:\r\n?|\n).*/g)?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),t},unput:function(t){var e=t.length,n=t.split(/(?:\r\n?|\n)/g);this._input=t+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-e),this.offset-=e;var r=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),n.length-1&&(this.yylineno-=n.length-1);var i=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:n?(n.length===r.length?this.yylloc.first_column:0)+r[r.length-n.length].length-n[0].length:this.yylloc.first_column-e},this.options.ranges&&(this.yylloc.range=[i[0],i[0]+this.yyleng-e]),this.yyleng=this.yytext.length,this},more:function(){return this._more=!0,this},reject:function(){return this.options.backtrack_lexer?(this._backtrack=!0,this):this.parseError("Lexical error on line "+(this.yylineno+1)+". You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n"+this.showPosition(),{text:"",token:null,line:this.yylineno})},less:function(t){this.unput(this.match.slice(t))},pastInput:function(){var t=this.matched.substr(0,this.matched.length-this.match.length);return(t.length>20?"...":"")+t.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var t=this.match;return t.length<20&&(t+=this._input.substr(0,20-t.length)),(t.substr(0,20)+(t.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var t=this.pastInput(),e=new Array(t.length+1).join("-");return t+this.upcomingInput()+"\n"+e+"^"},test_match:function(t,e){var n,r,i;if(this.options.backtrack_lexer&&(i={yylineno:this.yylineno,yylloc:{first_line:this.yylloc.first_line,last_line:this.last_line,first_column:this.yylloc.first_column,last_column:this.yylloc.last_column},yytext:this.yytext,match:this.match,matches:this.matches,matched:this.matched,yyleng:this.yyleng,offset:this.offset,_more:this._more,_input:this._input,yy:this.yy,conditionStack:this.conditionStack.slice(0),done:this.done},this.options.ranges&&(i.yylloc.range=this.yylloc.range.slice(0))),(r=t[0].match(/(?:\r\n?|\n).*/g))&&(this.yylineno+=r.length),this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:r?r[r.length-1].length-r[r.length-1].match(/\r?\n?/)[0].length:this.yylloc.last_column+t[0].length},this.yytext+=t[0],this.match+=t[0],this.matches=t,this.yyleng=this.yytext.length,this.options.ranges&&(this.yylloc.range=[this.offset,this.offset+=this.yyleng]),this._more=!1,this._backtrack=!1,this._input=this._input.slice(t[0].length),this.matched+=t[0],n=this.performAction.call(this,this.yy,this,e,this.conditionStack[this.conditionStack.length-1]),this.done&&this._input&&(this.done=!1),n)return n;if(this._backtrack){for(var a in i)this[a]=i[a];return!1}return!1},next:function(){if(this.done)return this.EOF;var t,e,n,r;this._input||(this.done=!0),this._more||(this.yytext="",this.match="");for(var i=this._currentRules(),a=0;ae[0].length)){if(e=n,r=a,this.options.backtrack_lexer){if(!1!==(t=this.test_match(n,i[a])))return t;if(this._backtrack){e=!1;continue}return!1}if(!this.options.flex)break}return e?!1!==(t=this.test_match(e,i[r]))&&t:""===this._input?this.EOF:this.parseError("Lexical error on line "+(this.yylineno+1)+". Unrecognized text.\n"+this.showPosition(),{text:"",token:null,line:this.yylineno})},lex:function(){return this.next()||this.lex()},begin:function(t){this.conditionStack.push(t)},popState:function(){return this.conditionStack.length-1>0?this.conditionStack.pop():this.conditionStack[0]},_currentRules:function(){return this.conditionStack.length&&this.conditionStack[this.conditionStack.length-1]?this.conditions[this.conditionStack[this.conditionStack.length-1]].rules:this.conditions.INITIAL.rules},topState:function(t){return(t=this.conditionStack.length-1-Math.abs(t||0))>=0?this.conditionStack[t]:"INITIAL"},pushState:function(t){this.begin(t)},stateStackSize:function(){return this.conditionStack.length},options:{"case-insensitive":!0},performAction:function(t,e,n,r){switch(n){case 0:return this.begin("open_directive"),75;case 1:return this.begin("type_directive"),76;case 2:return this.popState(),this.begin("arg_directive"),14;case 3:return this.popState(),this.popState(),78;case 4:return 77;case 5:case 49:case 62:return 5;case 6:case 7:case 8:case 9:case 10:break;case 11:return 23;case 12:return this.begin("ID"),16;case 13:return this.begin("ID"),20;case 14:return e.yytext=e.yytext.trim(),this.begin("ALIAS"),65;case 15:return this.popState(),this.popState(),this.begin("LINE"),18;case 16:return this.popState(),this.popState(),5;case 17:return this.begin("LINE"),39;case 18:return this.begin("LINE"),41;case 19:return this.begin("LINE"),42;case 20:return this.begin("LINE"),43;case 21:return this.begin("LINE"),48;case 22:return this.begin("LINE"),45;case 23:return this.begin("LINE"),47;case 24:return this.popState(),19;case 25:return 40;case 26:return 60;case 27:return 61;case 28:return 54;case 29:return 55;case 30:return 56;case 31:return 57;case 32:return 52;case 33:return 49;case 34:return this.begin("ID"),25;case 35:return this.begin("ID"),26;case 36:return 32;case 37:return 33;case 38:return this.begin("acc_title"),34;case 39:return this.popState(),"acc_title_value";case 40:return this.begin("acc_descr"),36;case 41:return this.popState(),"acc_descr_value";case 42:this.begin("acc_descr_multiline");break;case 43:this.popState();break;case 44:return"acc_descr_multiline_value";case 45:return 7;case 46:return 22;case 47:return 24;case 48:return 59;case 50:return e.yytext=e.yytext.trim(),65;case 51:return 68;case 52:return 69;case 53:return 66;case 54:return 67;case 55:return 70;case 56:return 71;case 57:return 72;case 58:return 73;case 59:return 74;case 60:return 63;case 61:return 64;case 63:return"INVALID"}},rules:[/^(?:%%\{)/i,/^(?:((?:(?!\}%%)[^:.])*))/i,/^(?::)/i,/^(?:\}%%)/i,/^(?:((?:(?!\}%%).|\n)*))/i,/^(?:[\n]+)/i,/^(?:\s+)/i,/^(?:((?!\n)\s)+)/i,/^(?:#[^\n]*)/i,/^(?:%(?!\{)[^\n]*)/i,/^(?:[^\}]%%[^\n]*)/i,/^(?:[0-9]+(?=[ \n]+))/i,/^(?:participant\b)/i,/^(?:actor\b)/i,/^(?:[^\->:\n,;]+?(?=((?!\n)\s)+as(?!\n)\s|[#\n;]|$))/i,/^(?:as\b)/i,/^(?:(?:))/i,/^(?:loop\b)/i,/^(?:rect\b)/i,/^(?:opt\b)/i,/^(?:alt\b)/i,/^(?:else\b)/i,/^(?:par\b)/i,/^(?:and\b)/i,/^(?:(?:[:]?(?:no)?wrap)?[^#\n;]*)/i,/^(?:end\b)/i,/^(?:left of\b)/i,/^(?:right of\b)/i,/^(?:links\b)/i,/^(?:link\b)/i,/^(?:properties\b)/i,/^(?:details\b)/i,/^(?:over\b)/i,/^(?:note\b)/i,/^(?:activate\b)/i,/^(?:deactivate\b)/i,/^(?:title\s[^#\n;]+)/i,/^(?:title:\s[^#\n;]+)/i,/^(?:accTitle\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*\{\s*)/i,/^(?:[\}])/i,/^(?:[^\}]*)/i,/^(?:sequenceDiagram\b)/i,/^(?:autonumber\b)/i,/^(?:off\b)/i,/^(?:,)/i,/^(?:;)/i,/^(?:[^\+\->:\n,;]+((?!(-x|--x|-\)|--\)))[\-]*[^\+\->:\n,;]+)*)/i,/^(?:->>)/i,/^(?:-->>)/i,/^(?:->)/i,/^(?:-->)/i,/^(?:-[x])/i,/^(?:--[x])/i,/^(?:-[\)])/i,/^(?:--[\)])/i,/^(?::(?:(?:no)?wrap)?[^#\n;]+)/i,/^(?:\+)/i,/^(?:-)/i,/^(?:$)/i,/^(?:.)/i],conditions:{acc_descr_multiline:{rules:[43,44],inclusive:!1},acc_descr:{rules:[41],inclusive:!1},acc_title:{rules:[39],inclusive:!1},open_directive:{rules:[1,8],inclusive:!1},type_directive:{rules:[2,3,8],inclusive:!1},arg_directive:{rules:[3,4,8],inclusive:!1},ID:{rules:[7,8,14],inclusive:!1},ALIAS:{rules:[7,8,15,16],inclusive:!1},LINE:{rules:[7,8,24],inclusive:!1},INITIAL:{rules:[0,5,6,8,9,10,11,12,13,17,18,19,20,21,22,23,25,26,27,28,29,30,31,32,33,34,35,36,37,38,40,42,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63],inclusive:!0}}};function Y(){this.yy={}}return F.lexer=P,Y.prototype=F,F.Parser=Y,new Y}();e.parser=r,e.Parser=r.Parser,e.parse=function(){return r.parse.apply(r,arguments)},e.main=function(t){t[1]||(console.log("Usage: "+t[0]+" FILE"),process.exit(1));var r=n(1993).readFileSync(n(6470).normalize(t[1]),"utf8");return e.parser.parse(r)},n.c[n.s]===t&&e.main(process.argv.slice(1))},3584:(t,e,n)=>{t=n.nmd(t);var r=function(){var t=function(t,e,n,r){for(n=n||{},r=t.length;r--;n[t[r]]=e);return n},e=[1,2],n=[1,3],r=[1,5],i=[1,7],a=[2,5],o=[1,15],s=[1,17],c=[1,19],u=[1,20],l=[1,21],h=[1,22],f=[1,33],d=[1,23],p=[1,24],g=[1,25],y=[1,26],m=[1,27],v=[1,30],b=[1,31],_=[1,32],x=[1,35],w=[1,36],k=[1,37],T=[1,38],C=[1,34],E=[1,41],S=[1,4,5,14,15,17,19,20,22,23,24,25,26,27,31,33,35,41,42,43,44,47,50],A=[1,4,5,12,13,14,15,17,19,20,22,23,24,25,26,27,31,33,35,41,42,43,44,47,50],M=[1,4,5,7,14,15,17,19,20,22,23,24,25,26,27,31,33,35,41,42,43,44,47,50],N=[4,5,14,15,17,19,20,22,23,24,25,26,27,31,33,35,41,42,43,44,47,50],D={trace:function(){},yy:{},symbols_:{error:2,start:3,SPACE:4,NL:5,directive:6,SD:7,document:8,line:9,statement:10,idStatement:11,DESCR:12,"--\x3e":13,HIDE_EMPTY:14,scale:15,WIDTH:16,COMPOSIT_STATE:17,STRUCT_START:18,STRUCT_STOP:19,STATE_DESCR:20,AS:21,ID:22,FORK:23,JOIN:24,CHOICE:25,CONCURRENT:26,note:27,notePosition:28,NOTE_TEXT:29,direction:30,acc_title:31,acc_title_value:32,acc_descr:33,acc_descr_value:34,acc_descr_multiline_value:35,openDirective:36,typeDirective:37,closeDirective:38,":":39,argDirective:40,direction_tb:41,direction_bt:42,direction_rl:43,direction_lr:44,eol:45,";":46,EDGE_STATE:47,left_of:48,right_of:49,open_directive:50,type_directive:51,arg_directive:52,close_directive:53,$accept:0,$end:1},terminals_:{2:"error",4:"SPACE",5:"NL",7:"SD",12:"DESCR",13:"--\x3e",14:"HIDE_EMPTY",15:"scale",16:"WIDTH",17:"COMPOSIT_STATE",18:"STRUCT_START",19:"STRUCT_STOP",20:"STATE_DESCR",21:"AS",22:"ID",23:"FORK",24:"JOIN",25:"CHOICE",26:"CONCURRENT",27:"note",29:"NOTE_TEXT",31:"acc_title",32:"acc_title_value",33:"acc_descr",34:"acc_descr_value",35:"acc_descr_multiline_value",39:":",41:"direction_tb",42:"direction_bt",43:"direction_rl",44:"direction_lr",46:";",47:"EDGE_STATE",48:"left_of",49:"right_of",50:"open_directive",51:"type_directive",52:"arg_directive",53:"close_directive"},productions_:[0,[3,2],[3,2],[3,2],[3,2],[8,0],[8,2],[9,2],[9,1],[9,1],[10,1],[10,2],[10,3],[10,4],[10,1],[10,2],[10,1],[10,4],[10,3],[10,6],[10,1],[10,1],[10,1],[10,1],[10,4],[10,4],[10,1],[10,1],[10,2],[10,2],[10,1],[6,3],[6,5],[30,1],[30,1],[30,1],[30,1],[45,1],[45,1],[11,1],[11,1],[28,1],[28,1],[36,1],[37,1],[40,1],[38,1]],performAction:function(t,e,n,r,i,a,o){var s=a.length-1;switch(i){case 4:return r.setRootDoc(a[s]),a[s];case 5:this.$=[];break;case 6:"nl"!=a[s]&&(a[s-1].push(a[s]),this.$=a[s-1]);break;case 7:case 8:case 39:case 40:this.$=a[s];break;case 9:this.$="nl";break;case 10:this.$={stmt:"state",id:a[s],type:"default",description:""};break;case 11:this.$={stmt:"state",id:a[s-1],type:"default",description:r.trimColon(a[s])};break;case 12:this.$={stmt:"relation",state1:{stmt:"state",id:a[s-2],type:"default",description:""},state2:{stmt:"state",id:a[s],type:"default",description:""}};break;case 13:this.$={stmt:"relation",state1:{stmt:"state",id:a[s-3],type:"default",description:""},state2:{stmt:"state",id:a[s-1],type:"default",description:""},description:a[s].substr(1).trim()};break;case 17:this.$={stmt:"state",id:a[s-3],type:"default",description:"",doc:a[s-1]};break;case 18:var c=a[s],u=a[s-2].trim();if(a[s].match(":")){var l=a[s].split(":");c=l[0],u=[u,l[1]]}this.$={stmt:"state",id:c,type:"default",description:u};break;case 19:this.$={stmt:"state",id:a[s-3],type:"default",description:a[s-5],doc:a[s-1]};break;case 20:this.$={stmt:"state",id:a[s],type:"fork"};break;case 21:this.$={stmt:"state",id:a[s],type:"join"};break;case 22:this.$={stmt:"state",id:a[s],type:"choice"};break;case 23:this.$={stmt:"state",id:r.getDividerId(),type:"divider"};break;case 24:this.$={stmt:"state",id:a[s-1].trim(),note:{position:a[s-2].trim(),text:a[s].trim()}};break;case 28:this.$=a[s].trim(),r.setTitle(this.$);break;case 29:case 30:this.$=a[s].trim(),r.setAccDescription(this.$);break;case 33:r.setDirection("TB"),this.$={stmt:"dir",value:"TB"};break;case 34:r.setDirection("BT"),this.$={stmt:"dir",value:"BT"};break;case 35:r.setDirection("RL"),this.$={stmt:"dir",value:"RL"};break;case 36:r.setDirection("LR"),this.$={stmt:"dir",value:"LR"};break;case 43:r.parseDirective("%%{","open_directive");break;case 44:r.parseDirective(a[s],"type_directive");break;case 45:a[s]=a[s].trim().replace(/'/g,'"'),r.parseDirective(a[s],"arg_directive");break;case 46:r.parseDirective("}%%","close_directive","state")}},table:[{3:1,4:e,5:n,6:4,7:r,36:6,50:i},{1:[3]},{3:8,4:e,5:n,6:4,7:r,36:6,50:i},{3:9,4:e,5:n,6:4,7:r,36:6,50:i},{3:10,4:e,5:n,6:4,7:r,36:6,50:i},t([1,4,5,14,15,17,20,22,23,24,25,26,27,31,33,35,41,42,43,44,47,50],a,{8:11}),{37:12,51:[1,13]},{51:[2,43]},{1:[2,1]},{1:[2,2]},{1:[2,3]},{1:[2,4],4:o,5:s,6:28,9:14,10:16,11:18,14:c,15:u,17:l,20:h,22:f,23:d,24:p,25:g,26:y,27:m,30:29,31:v,33:b,35:_,36:6,41:x,42:w,43:k,44:T,47:C,50:i},{38:39,39:[1,40],53:E},t([39,53],[2,44]),t(S,[2,6]),{6:28,10:42,11:18,14:c,15:u,17:l,20:h,22:f,23:d,24:p,25:g,26:y,27:m,30:29,31:v,33:b,35:_,36:6,41:x,42:w,43:k,44:T,47:C,50:i},t(S,[2,8]),t(S,[2,9]),t(S,[2,10],{12:[1,43],13:[1,44]}),t(S,[2,14]),{16:[1,45]},t(S,[2,16],{18:[1,46]}),{21:[1,47]},t(S,[2,20]),t(S,[2,21]),t(S,[2,22]),t(S,[2,23]),{28:48,29:[1,49],48:[1,50],49:[1,51]},t(S,[2,26]),t(S,[2,27]),{32:[1,52]},{34:[1,53]},t(S,[2,30]),t(A,[2,39]),t(A,[2,40]),t(S,[2,33]),t(S,[2,34]),t(S,[2,35]),t(S,[2,36]),t(M,[2,31]),{40:54,52:[1,55]},t(M,[2,46]),t(S,[2,7]),t(S,[2,11]),{11:56,22:f,47:C},t(S,[2,15]),t(N,a,{8:57}),{22:[1,58]},{22:[1,59]},{21:[1,60]},{22:[2,41]},{22:[2,42]},t(S,[2,28]),t(S,[2,29]),{38:61,53:E},{53:[2,45]},t(S,[2,12],{12:[1,62]}),{4:o,5:s,6:28,9:14,10:16,11:18,14:c,15:u,17:l,19:[1,63],20:h,22:f,23:d,24:p,25:g,26:y,27:m,30:29,31:v,33:b,35:_,36:6,41:x,42:w,43:k,44:T,47:C,50:i},t(S,[2,18],{18:[1,64]}),{29:[1,65]},{22:[1,66]},t(M,[2,32]),t(S,[2,13]),t(S,[2,17]),t(N,a,{8:67}),t(S,[2,24]),t(S,[2,25]),{4:o,5:s,6:28,9:14,10:16,11:18,14:c,15:u,17:l,19:[1,68],20:h,22:f,23:d,24:p,25:g,26:y,27:m,30:29,31:v,33:b,35:_,36:6,41:x,42:w,43:k,44:T,47:C,50:i},t(S,[2,19])],defaultActions:{7:[2,43],8:[2,1],9:[2,2],10:[2,3],50:[2,41],51:[2,42],55:[2,45]},parseError:function(t,e){if(!e.recoverable){var n=new Error(t);throw n.hash=e,n}this.trace(t)},parse:function(t){var e=this,n=[0],r=[],i=[null],a=[],o=this.table,s="",c=0,u=0,l=0,h=2,f=1,d=a.slice.call(arguments,1),p=Object.create(this.lexer),g={yy:{}};for(var y in this.yy)Object.prototype.hasOwnProperty.call(this.yy,y)&&(g.yy[y]=this.yy[y]);p.setInput(t,g.yy),g.yy.lexer=p,g.yy.parser=this,void 0===p.yylloc&&(p.yylloc={});var m=p.yylloc;a.push(m);var v=p.options&&p.options.ranges;function b(){var t;return"number"!=typeof(t=r.pop()||p.lex()||f)&&(t instanceof Array&&(t=(r=t).pop()),t=e.symbols_[t]||t),t}"function"==typeof g.yy.parseError?this.parseError=g.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;for(var _,x,w,k,T,C,E,S,A,M={};;){if(w=n[n.length-1],this.defaultActions[w]?k=this.defaultActions[w]:(null==_&&(_=b()),k=o[w]&&o[w][_]),void 0===k||!k.length||!k[0]){var N="";for(C in A=[],o[w])this.terminals_[C]&&C>h&&A.push("'"+this.terminals_[C]+"'");N=p.showPosition?"Parse error on line "+(c+1)+":\n"+p.showPosition()+"\nExpecting "+A.join(", ")+", got '"+(this.terminals_[_]||_)+"'":"Parse error on line "+(c+1)+": Unexpected "+(_==f?"end of input":"'"+(this.terminals_[_]||_)+"'"),this.parseError(N,{text:p.match,token:this.terminals_[_]||_,line:p.yylineno,loc:m,expected:A})}if(k[0]instanceof Array&&k.length>1)throw new Error("Parse Error: multiple actions possible at state: "+w+", token: "+_);switch(k[0]){case 1:n.push(_),i.push(p.yytext),a.push(p.yylloc),n.push(k[1]),_=null,x?(_=x,x=null):(u=p.yyleng,s=p.yytext,c=p.yylineno,m=p.yylloc,l>0&&l--);break;case 2:if(E=this.productions_[k[1]][1],M.$=i[i.length-E],M._$={first_line:a[a.length-(E||1)].first_line,last_line:a[a.length-1].last_line,first_column:a[a.length-(E||1)].first_column,last_column:a[a.length-1].last_column},v&&(M._$.range=[a[a.length-(E||1)].range[0],a[a.length-1].range[1]]),void 0!==(T=this.performAction.apply(M,[s,u,c,g.yy,k[1],i,a].concat(d))))return T;E&&(n=n.slice(0,-1*E*2),i=i.slice(0,-1*E),a=a.slice(0,-1*E)),n.push(this.productions_[k[1]][0]),i.push(M.$),a.push(M._$),S=o[n[n.length-2]][n[n.length-1]],n.push(S);break;case 3:return!0}}return!0}},B={EOF:1,parseError:function(t,e){if(!this.yy.parser)throw new Error(t);this.yy.parser.parseError(t,e)},setInput:function(t,e){return this.yy=e||this.yy||{},this._input=t,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},input:function(){var t=this._input[0];return this.yytext+=t,this.yyleng++,this.offset++,this.match+=t,this.matched+=t,t.match(/(?:\r\n?|\n).*/g)?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),t},unput:function(t){var e=t.length,n=t.split(/(?:\r\n?|\n)/g);this._input=t+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-e),this.offset-=e;var r=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),n.length-1&&(this.yylineno-=n.length-1);var i=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:n?(n.length===r.length?this.yylloc.first_column:0)+r[r.length-n.length].length-n[0].length:this.yylloc.first_column-e},this.options.ranges&&(this.yylloc.range=[i[0],i[0]+this.yyleng-e]),this.yyleng=this.yytext.length,this},more:function(){return this._more=!0,this},reject:function(){return this.options.backtrack_lexer?(this._backtrack=!0,this):this.parseError("Lexical error on line "+(this.yylineno+1)+". You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n"+this.showPosition(),{text:"",token:null,line:this.yylineno})},less:function(t){this.unput(this.match.slice(t))},pastInput:function(){var t=this.matched.substr(0,this.matched.length-this.match.length);return(t.length>20?"...":"")+t.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var t=this.match;return t.length<20&&(t+=this._input.substr(0,20-t.length)),(t.substr(0,20)+(t.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var t=this.pastInput(),e=new Array(t.length+1).join("-");return t+this.upcomingInput()+"\n"+e+"^"},test_match:function(t,e){var n,r,i;if(this.options.backtrack_lexer&&(i={yylineno:this.yylineno,yylloc:{first_line:this.yylloc.first_line,last_line:this.last_line,first_column:this.yylloc.first_column,last_column:this.yylloc.last_column},yytext:this.yytext,match:this.match,matches:this.matches,matched:this.matched,yyleng:this.yyleng,offset:this.offset,_more:this._more,_input:this._input,yy:this.yy,conditionStack:this.conditionStack.slice(0),done:this.done},this.options.ranges&&(i.yylloc.range=this.yylloc.range.slice(0))),(r=t[0].match(/(?:\r\n?|\n).*/g))&&(this.yylineno+=r.length),this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:r?r[r.length-1].length-r[r.length-1].match(/\r?\n?/)[0].length:this.yylloc.last_column+t[0].length},this.yytext+=t[0],this.match+=t[0],this.matches=t,this.yyleng=this.yytext.length,this.options.ranges&&(this.yylloc.range=[this.offset,this.offset+=this.yyleng]),this._more=!1,this._backtrack=!1,this._input=this._input.slice(t[0].length),this.matched+=t[0],n=this.performAction.call(this,this.yy,this,e,this.conditionStack[this.conditionStack.length-1]),this.done&&this._input&&(this.done=!1),n)return n;if(this._backtrack){for(var a in i)this[a]=i[a];return!1}return!1},next:function(){if(this.done)return this.EOF;var t,e,n,r;this._input||(this.done=!0),this._more||(this.yytext="",this.match="");for(var i=this._currentRules(),a=0;ae[0].length)){if(e=n,r=a,this.options.backtrack_lexer){if(!1!==(t=this.test_match(n,i[a])))return t;if(this._backtrack){e=!1;continue}return!1}if(!this.options.flex)break}return e?!1!==(t=this.test_match(e,i[r]))&&t:""===this._input?this.EOF:this.parseError("Lexical error on line "+(this.yylineno+1)+". Unrecognized text.\n"+this.showPosition(),{text:"",token:null,line:this.yylineno})},lex:function(){return this.next()||this.lex()},begin:function(t){this.conditionStack.push(t)},popState:function(){return this.conditionStack.length-1>0?this.conditionStack.pop():this.conditionStack[0]},_currentRules:function(){return this.conditionStack.length&&this.conditionStack[this.conditionStack.length-1]?this.conditions[this.conditionStack[this.conditionStack.length-1]].rules:this.conditions.INITIAL.rules},topState:function(t){return(t=this.conditionStack.length-1-Math.abs(t||0))>=0?this.conditionStack[t]:"INITIAL"},pushState:function(t){this.begin(t)},stateStackSize:function(){return this.conditionStack.length},options:{"case-insensitive":!0},performAction:function(t,e,n,r){switch(n){case 0:case 33:return 41;case 1:case 34:return 42;case 2:case 35:return 43;case 3:case 36:return 44;case 4:return this.begin("open_directive"),50;case 5:return this.begin("type_directive"),51;case 6:return this.popState(),this.begin("arg_directive"),39;case 7:return this.popState(),this.popState(),53;case 8:return 52;case 9:case 10:case 12:case 13:case 14:case 15:case 46:case 52:break;case 11:case 66:return 5;case 16:return this.pushState("SCALE"),15;case 17:return 16;case 18:case 24:case 40:case 43:this.popState();break;case 19:return this.begin("acc_title"),31;case 20:return this.popState(),"acc_title_value";case 21:return this.begin("acc_descr"),33;case 22:return this.popState(),"acc_descr_value";case 23:this.begin("acc_descr_multiline");break;case 25:return"acc_descr_multiline_value";case 26:this.pushState("STATE");break;case 27:case 30:return this.popState(),e.yytext=e.yytext.slice(0,-8).trim(),23;case 28:case 31:return this.popState(),e.yytext=e.yytext.slice(0,-8).trim(),24;case 29:case 32:return this.popState(),e.yytext=e.yytext.slice(0,-10).trim(),25;case 37:this.begin("STATE_STRING");break;case 38:return this.popState(),this.pushState("STATE_ID"),"AS";case 39:case 54:return this.popState(),"ID";case 41:return"STATE_DESCR";case 42:return 17;case 44:return this.popState(),this.pushState("struct"),18;case 45:return this.popState(),19;case 47:return this.begin("NOTE"),27;case 48:return this.popState(),this.pushState("NOTE_ID"),48;case 49:return this.popState(),this.pushState("NOTE_ID"),49;case 50:this.popState(),this.pushState("FLOATING_NOTE");break;case 51:return this.popState(),this.pushState("FLOATING_NOTE_ID"),"AS";case 53:return"NOTE_TEXT";case 55:return this.popState(),this.pushState("NOTE_TEXT"),22;case 56:return this.popState(),e.yytext=e.yytext.substr(2).trim(),29;case 57:return this.popState(),e.yytext=e.yytext.slice(0,-8).trim(),29;case 58:case 59:return 7;case 60:return 14;case 61:return 47;case 62:return 22;case 63:return e.yytext=e.yytext.trim(),12;case 64:return 13;case 65:return 26;case 67:return"INVALID"}},rules:[/^(?:.*direction\s+TB[^\n]*)/i,/^(?:.*direction\s+BT[^\n]*)/i,/^(?:.*direction\s+RL[^\n]*)/i,/^(?:.*direction\s+LR[^\n]*)/i,/^(?:%%\{)/i,/^(?:((?:(?!\}%%)[^:.])*))/i,/^(?::)/i,/^(?:\}%%)/i,/^(?:((?:(?!\}%%).|\n)*))/i,/^(?:%%(?!\{)[^\n]*)/i,/^(?:[^\}]%%[^\n]*)/i,/^(?:[\n]+)/i,/^(?:[\s]+)/i,/^(?:((?!\n)\s)+)/i,/^(?:#[^\n]*)/i,/^(?:%[^\n]*)/i,/^(?:scale\s+)/i,/^(?:\d+)/i,/^(?:\s+width\b)/i,/^(?:accTitle\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*\{\s*)/i,/^(?:[\}])/i,/^(?:[^\}]*)/i,/^(?:state\s+)/i,/^(?:.*<>)/i,/^(?:.*<>)/i,/^(?:.*<>)/i,/^(?:.*\[\[fork\]\])/i,/^(?:.*\[\[join\]\])/i,/^(?:.*\[\[choice\]\])/i,/^(?:.*direction\s+TB[^\n]*)/i,/^(?:.*direction\s+BT[^\n]*)/i,/^(?:.*direction\s+RL[^\n]*)/i,/^(?:.*direction\s+LR[^\n]*)/i,/^(?:["])/i,/^(?:\s*as\s+)/i,/^(?:[^\n\{]*)/i,/^(?:["])/i,/^(?:[^"]*)/i,/^(?:[^\n\s\{]+)/i,/^(?:\n)/i,/^(?:\{)/i,/^(?:\})/i,/^(?:[\n])/i,/^(?:note\s+)/i,/^(?:left of\b)/i,/^(?:right of\b)/i,/^(?:")/i,/^(?:\s*as\s*)/i,/^(?:["])/i,/^(?:[^"]*)/i,/^(?:[^\n]*)/i,/^(?:\s*[^:\n\s\-]+)/i,/^(?:\s*:[^:\n;]+)/i,/^(?:[\s\S]*?end note\b)/i,/^(?:stateDiagram\s+)/i,/^(?:stateDiagram-v2\s+)/i,/^(?:hide empty description\b)/i,/^(?:\[\*\])/i,/^(?:[^:\n\s\-\{]+)/i,/^(?:\s*:[^:\n;]+)/i,/^(?:-->)/i,/^(?:--)/i,/^(?:$)/i,/^(?:.)/i],conditions:{LINE:{rules:[13,14],inclusive:!1},close_directive:{rules:[13,14],inclusive:!1},arg_directive:{rules:[7,8,13,14],inclusive:!1},type_directive:{rules:[6,7,13,14],inclusive:!1},open_directive:{rules:[5,13,14],inclusive:!1},struct:{rules:[13,14,26,33,34,35,36,45,46,47,61,62,63,64,65],inclusive:!1},FLOATING_NOTE_ID:{rules:[54],inclusive:!1},FLOATING_NOTE:{rules:[51,52,53],inclusive:!1},NOTE_TEXT:{rules:[56,57],inclusive:!1},NOTE_ID:{rules:[55],inclusive:!1},NOTE:{rules:[48,49,50],inclusive:!1},acc_descr_multiline:{rules:[24,25],inclusive:!1},acc_descr:{rules:[22],inclusive:!1},acc_title:{rules:[20],inclusive:!1},SCALE:{rules:[17,18],inclusive:!1},ALIAS:{rules:[],inclusive:!1},STATE_ID:{rules:[39],inclusive:!1},STATE_STRING:{rules:[40,41],inclusive:!1},FORK_STATE:{rules:[],inclusive:!1},STATE:{rules:[13,14,27,28,29,30,31,32,37,38,42,43,44],inclusive:!1},ID:{rules:[13,14],inclusive:!1},INITIAL:{rules:[0,1,2,3,4,9,10,11,12,14,15,16,19,21,23,26,44,47,58,59,60,61,62,63,64,66,67],inclusive:!0}}};function L(){this.yy={}}return D.lexer=B,L.prototype=D,D.Parser=L,new L}();e.parser=r,e.Parser=r.Parser,e.parse=function(){return r.parse.apply(r,arguments)},e.main=function(t){t[1]||(console.log("Usage: "+t[0]+" FILE"),process.exit(1));var r=n(3069).readFileSync(n(6470).normalize(t[1]),"utf8");return e.parser.parse(r)},n.c[n.s]===t&&e.main(process.argv.slice(1))},9763:(t,e,n)=>{t=n.nmd(t);var r=function(){var t=function(t,e,n,r){for(n=n||{},r=t.length;r--;n[t[r]]=e);return n},e=[1,2],n=[1,5],r=[6,9,11,17,18,20,22,23,24,26],i=[1,15],a=[1,16],o=[1,17],s=[1,18],c=[1,19],u=[1,20],l=[1,24],h=[4,6,9,11,17,18,20,22,23,24,26],f={trace:function(){},yy:{},symbols_:{error:2,start:3,journey:4,document:5,EOF:6,directive:7,line:8,SPACE:9,statement:10,NEWLINE:11,openDirective:12,typeDirective:13,closeDirective:14,":":15,argDirective:16,title:17,acc_title:18,acc_title_value:19,acc_descr:20,acc_descr_value:21,acc_descr_multiline_value:22,section:23,taskName:24,taskData:25,open_directive:26,type_directive:27,arg_directive:28,close_directive:29,$accept:0,$end:1},terminals_:{2:"error",4:"journey",6:"EOF",9:"SPACE",11:"NEWLINE",15:":",17:"title",18:"acc_title",19:"acc_title_value",20:"acc_descr",21:"acc_descr_value",22:"acc_descr_multiline_value",23:"section",24:"taskName",25:"taskData",26:"open_directive",27:"type_directive",28:"arg_directive",29:"close_directive"},productions_:[0,[3,3],[3,2],[5,0],[5,2],[8,2],[8,1],[8,1],[8,1],[7,4],[7,6],[10,1],[10,2],[10,2],[10,1],[10,1],[10,2],[10,1],[12,1],[13,1],[16,1],[14,1]],performAction:function(t,e,n,r,i,a,o){var s=a.length-1;switch(i){case 1:return a[s-1];case 3:case 7:case 8:this.$=[];break;case 4:a[s-1].push(a[s]),this.$=a[s-1];break;case 5:case 6:this.$=a[s];break;case 11:r.setTitle(a[s].substr(6)),this.$=a[s].substr(6);break;case 12:this.$=a[s].trim(),r.setTitle(this.$);break;case 13:case 14:this.$=a[s].trim(),r.setAccDescription(this.$);break;case 15:r.addSection(a[s].substr(8)),this.$=a[s].substr(8);break;case 16:r.addTask(a[s-1],a[s]),this.$="task";break;case 18:r.parseDirective("%%{","open_directive");break;case 19:r.parseDirective(a[s],"type_directive");break;case 20:a[s]=a[s].trim().replace(/'/g,'"'),r.parseDirective(a[s],"arg_directive");break;case 21:r.parseDirective("}%%","close_directive","journey")}},table:[{3:1,4:e,7:3,12:4,26:n},{1:[3]},t(r,[2,3],{5:6}),{3:7,4:e,7:3,12:4,26:n},{13:8,27:[1,9]},{27:[2,18]},{6:[1,10],7:21,8:11,9:[1,12],10:13,11:[1,14],12:4,17:i,18:a,20:o,22:s,23:c,24:u,26:n},{1:[2,2]},{14:22,15:[1,23],29:l},t([15,29],[2,19]),t(r,[2,8],{1:[2,1]}),t(r,[2,4]),{7:21,10:25,12:4,17:i,18:a,20:o,22:s,23:c,24:u,26:n},t(r,[2,6]),t(r,[2,7]),t(r,[2,11]),{19:[1,26]},{21:[1,27]},t(r,[2,14]),t(r,[2,15]),{25:[1,28]},t(r,[2,17]),{11:[1,29]},{16:30,28:[1,31]},{11:[2,21]},t(r,[2,5]),t(r,[2,12]),t(r,[2,13]),t(r,[2,16]),t(h,[2,9]),{14:32,29:l},{29:[2,20]},{11:[1,33]},t(h,[2,10])],defaultActions:{5:[2,18],7:[2,2],24:[2,21],31:[2,20]},parseError:function(t,e){if(!e.recoverable){var n=new Error(t);throw n.hash=e,n}this.trace(t)},parse:function(t){var e=this,n=[0],r=[],i=[null],a=[],o=this.table,s="",c=0,u=0,l=0,h=2,f=1,d=a.slice.call(arguments,1),p=Object.create(this.lexer),g={yy:{}};for(var y in this.yy)Object.prototype.hasOwnProperty.call(this.yy,y)&&(g.yy[y]=this.yy[y]);p.setInput(t,g.yy),g.yy.lexer=p,g.yy.parser=this,void 0===p.yylloc&&(p.yylloc={});var m=p.yylloc;a.push(m);var v=p.options&&p.options.ranges;function b(){var t;return"number"!=typeof(t=r.pop()||p.lex()||f)&&(t instanceof Array&&(t=(r=t).pop()),t=e.symbols_[t]||t),t}"function"==typeof g.yy.parseError?this.parseError=g.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;for(var _,x,w,k,T,C,E,S,A,M={};;){if(w=n[n.length-1],this.defaultActions[w]?k=this.defaultActions[w]:(null==_&&(_=b()),k=o[w]&&o[w][_]),void 0===k||!k.length||!k[0]){var N="";for(C in A=[],o[w])this.terminals_[C]&&C>h&&A.push("'"+this.terminals_[C]+"'");N=p.showPosition?"Parse error on line "+(c+1)+":\n"+p.showPosition()+"\nExpecting "+A.join(", ")+", got '"+(this.terminals_[_]||_)+"'":"Parse error on line "+(c+1)+": Unexpected "+(_==f?"end of input":"'"+(this.terminals_[_]||_)+"'"),this.parseError(N,{text:p.match,token:this.terminals_[_]||_,line:p.yylineno,loc:m,expected:A})}if(k[0]instanceof Array&&k.length>1)throw new Error("Parse Error: multiple actions possible at state: "+w+", token: "+_);switch(k[0]){case 1:n.push(_),i.push(p.yytext),a.push(p.yylloc),n.push(k[1]),_=null,x?(_=x,x=null):(u=p.yyleng,s=p.yytext,c=p.yylineno,m=p.yylloc,l>0&&l--);break;case 2:if(E=this.productions_[k[1]][1],M.$=i[i.length-E],M._$={first_line:a[a.length-(E||1)].first_line,last_line:a[a.length-1].last_line,first_column:a[a.length-(E||1)].first_column,last_column:a[a.length-1].last_column},v&&(M._$.range=[a[a.length-(E||1)].range[0],a[a.length-1].range[1]]),void 0!==(T=this.performAction.apply(M,[s,u,c,g.yy,k[1],i,a].concat(d))))return T;E&&(n=n.slice(0,-1*E*2),i=i.slice(0,-1*E),a=a.slice(0,-1*E)),n.push(this.productions_[k[1]][0]),i.push(M.$),a.push(M._$),S=o[n[n.length-2]][n[n.length-1]],n.push(S);break;case 3:return!0}}return!0}},d={EOF:1,parseError:function(t,e){if(!this.yy.parser)throw new Error(t);this.yy.parser.parseError(t,e)},setInput:function(t,e){return this.yy=e||this.yy||{},this._input=t,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},input:function(){var t=this._input[0];return this.yytext+=t,this.yyleng++,this.offset++,this.match+=t,this.matched+=t,t.match(/(?:\r\n?|\n).*/g)?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),t},unput:function(t){var e=t.length,n=t.split(/(?:\r\n?|\n)/g);this._input=t+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-e),this.offset-=e;var r=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),n.length-1&&(this.yylineno-=n.length-1);var i=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:n?(n.length===r.length?this.yylloc.first_column:0)+r[r.length-n.length].length-n[0].length:this.yylloc.first_column-e},this.options.ranges&&(this.yylloc.range=[i[0],i[0]+this.yyleng-e]),this.yyleng=this.yytext.length,this},more:function(){return this._more=!0,this},reject:function(){return this.options.backtrack_lexer?(this._backtrack=!0,this):this.parseError("Lexical error on line "+(this.yylineno+1)+". You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n"+this.showPosition(),{text:"",token:null,line:this.yylineno})},less:function(t){this.unput(this.match.slice(t))},pastInput:function(){var t=this.matched.substr(0,this.matched.length-this.match.length);return(t.length>20?"...":"")+t.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var t=this.match;return t.length<20&&(t+=this._input.substr(0,20-t.length)),(t.substr(0,20)+(t.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var t=this.pastInput(),e=new Array(t.length+1).join("-");return t+this.upcomingInput()+"\n"+e+"^"},test_match:function(t,e){var n,r,i;if(this.options.backtrack_lexer&&(i={yylineno:this.yylineno,yylloc:{first_line:this.yylloc.first_line,last_line:this.last_line,first_column:this.yylloc.first_column,last_column:this.yylloc.last_column},yytext:this.yytext,match:this.match,matches:this.matches,matched:this.matched,yyleng:this.yyleng,offset:this.offset,_more:this._more,_input:this._input,yy:this.yy,conditionStack:this.conditionStack.slice(0),done:this.done},this.options.ranges&&(i.yylloc.range=this.yylloc.range.slice(0))),(r=t[0].match(/(?:\r\n?|\n).*/g))&&(this.yylineno+=r.length),this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:r?r[r.length-1].length-r[r.length-1].match(/\r?\n?/)[0].length:this.yylloc.last_column+t[0].length},this.yytext+=t[0],this.match+=t[0],this.matches=t,this.yyleng=this.yytext.length,this.options.ranges&&(this.yylloc.range=[this.offset,this.offset+=this.yyleng]),this._more=!1,this._backtrack=!1,this._input=this._input.slice(t[0].length),this.matched+=t[0],n=this.performAction.call(this,this.yy,this,e,this.conditionStack[this.conditionStack.length-1]),this.done&&this._input&&(this.done=!1),n)return n;if(this._backtrack){for(var a in i)this[a]=i[a];return!1}return!1},next:function(){if(this.done)return this.EOF;var t,e,n,r;this._input||(this.done=!0),this._more||(this.yytext="",this.match="");for(var i=this._currentRules(),a=0;ae[0].length)){if(e=n,r=a,this.options.backtrack_lexer){if(!1!==(t=this.test_match(n,i[a])))return t;if(this._backtrack){e=!1;continue}return!1}if(!this.options.flex)break}return e?!1!==(t=this.test_match(e,i[r]))&&t:""===this._input?this.EOF:this.parseError("Lexical error on line "+(this.yylineno+1)+". Unrecognized text.\n"+this.showPosition(),{text:"",token:null,line:this.yylineno})},lex:function(){return this.next()||this.lex()},begin:function(t){this.conditionStack.push(t)},popState:function(){return this.conditionStack.length-1>0?this.conditionStack.pop():this.conditionStack[0]},_currentRules:function(){return this.conditionStack.length&&this.conditionStack[this.conditionStack.length-1]?this.conditions[this.conditionStack[this.conditionStack.length-1]].rules:this.conditions.INITIAL.rules},topState:function(t){return(t=this.conditionStack.length-1-Math.abs(t||0))>=0?this.conditionStack[t]:"INITIAL"},pushState:function(t){this.begin(t)},stateStackSize:function(){return this.conditionStack.length},options:{"case-insensitive":!0},performAction:function(t,e,n,r){switch(n){case 0:return this.begin("open_directive"),26;case 1:return this.begin("type_directive"),27;case 2:return this.popState(),this.begin("arg_directive"),15;case 3:return this.popState(),this.popState(),29;case 4:return 28;case 5:case 6:case 8:case 9:break;case 7:return 11;case 10:return 4;case 11:return 17;case 12:return this.begin("acc_title"),18;case 13:return this.popState(),"acc_title_value";case 14:return this.begin("acc_descr"),20;case 15:return this.popState(),"acc_descr_value";case 16:this.begin("acc_descr_multiline");break;case 17:this.popState();break;case 18:return"acc_descr_multiline_value";case 19:return 23;case 20:return 24;case 21:return 25;case 22:return 15;case 23:return 6;case 24:return"INVALID"}},rules:[/^(?:%%\{)/i,/^(?:((?:(?!\}%%)[^:.])*))/i,/^(?::)/i,/^(?:\}%%)/i,/^(?:((?:(?!\}%%).|\n)*))/i,/^(?:%(?!\{)[^\n]*)/i,/^(?:[^\}]%%[^\n]*)/i,/^(?:[\n]+)/i,/^(?:\s+)/i,/^(?:#[^\n]*)/i,/^(?:journey\b)/i,/^(?:title\s[^#\n;]+)/i,/^(?:accTitle\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*\{\s*)/i,/^(?:[\}])/i,/^(?:[^\}]*)/i,/^(?:section\s[^#:\n;]+)/i,/^(?:[^#:\n;]+)/i,/^(?::[^#\n;]+)/i,/^(?::)/i,/^(?:$)/i,/^(?:.)/i],conditions:{open_directive:{rules:[1],inclusive:!1},type_directive:{rules:[2,3],inclusive:!1},arg_directive:{rules:[3,4],inclusive:!1},acc_descr_multiline:{rules:[17,18],inclusive:!1},acc_descr:{rules:[15],inclusive:!1},acc_title:{rules:[13],inclusive:!1},INITIAL:{rules:[0,5,6,7,8,9,10,11,12,14,16,19,20,21,22,23,24],inclusive:!0}}};function p(){this.yy={}}return f.lexer=d,p.prototype=f,f.Parser=p,new p}();e.parser=r,e.Parser=r.Parser,e.parse=function(){return r.parse.apply(r,arguments)},e.main=function(t){t[1]||(console.log("Usage: "+t[0]+" FILE"),process.exit(1));var r=n(9143).readFileSync(n(6470).normalize(t[1]),"utf8");return e.parser.parse(r)},n.c[n.s]===t&&e.main(process.argv.slice(1))},7967:(t,e)=>{"use strict";e.N=void 0;var n=/^([^\w]*)(javascript|data|vbscript)/im,r=/&#(\w+)(^\w|;)?/g,i=/[\u0000-\u001F\u007F-\u009F\u2000-\u200D\uFEFF]/gim,a=/^([^:]+):/gm,o=[".","/"];e.N=function(t){var e,s=(e=t||"",e.replace(r,(function(t,e){return String.fromCharCode(e)}))).replace(i,"").trim();if(!s)return"about:blank";if(function(t){return o.indexOf(t[0])>-1}(s))return s;var c=s.match(a);if(!c)return s;var u=c[0];return n.test(u)?"about:blank":s}},3841:t=>{t.exports=function(t,e){return t.intersect(e)}},8968:(t,e,n)=>{"use strict";n.d(e,{default:()=>HE});var r=n(1941),i=n.n(r),a={debug:1,info:2,warn:3,error:4,fatal:5},o={debug:function(){},info:function(){},warn:function(){},error:function(){},fatal:function(){}},s=function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"fatal";isNaN(t)&&(t=t.toLowerCase(),void 0!==a[t]&&(t=a[t])),o.trace=function(){},o.debug=function(){},o.info=function(){},o.warn=function(){},o.error=function(){},o.fatal=function(){},t<=a.fatal&&(o.fatal=console.error?console.error.bind(console,c("FATAL"),"color: orange"):console.log.bind(console,"",c("FATAL"))),t<=a.error&&(o.error=console.error?console.error.bind(console,c("ERROR"),"color: orange"):console.log.bind(console,"",c("ERROR"))),t<=a.warn&&(o.warn=console.warn?console.warn.bind(console,c("WARN"),"color: orange"):console.log.bind(console,"",c("WARN"))),t<=a.info&&(o.info=console.info?console.info.bind(console,c("INFO"),"color: lightblue"):console.log.bind(console,"",c("INFO"))),t<=a.debug&&(o.debug=console.debug?console.debug.bind(console,c("DEBUG"),"color: lightgreen"):console.log.bind(console,"",c("DEBUG")))},c=function(t){var e=i()().format("ss.SSS");return"%c".concat(e," : ").concat(t," : ")};function u(t,e){let n;if(void 0===e)for(const e of t)null!=e&&(n=e)&&(n=e);else{let r=-1;for(let i of t)null!=(i=e(i,++r,t))&&(n=i)&&(n=i)}return n}function l(t,e){let n;if(void 0===e)for(const e of t)null!=e&&(n>e||void 0===n&&e>=e)&&(n=e);else{let r=-1;for(let i of t)null!=(i=e(i,++r,t))&&(n>i||void 0===n&&i>=i)&&(n=i)}return n}function h(t){return t}var f=1e-6;function d(t){return"translate("+t+",0)"}function p(t){return"translate(0,"+t+")"}function g(t){return e=>+t(e)}function y(t,e){return e=Math.max(0,t.bandwidth()-2*e)/2,t.round()&&(e=Math.round(e)),n=>+t(n)+e}function m(){return!this.__axis}function v(t,e){var n=[],r=null,i=null,a=6,o=6,s=3,c="undefined"!=typeof window&&window.devicePixelRatio>1?0:.5,u=1===t||4===t?-1:1,l=4===t||2===t?"x":"y",v=1===t||3===t?d:p;function b(d){var p=null==r?e.ticks?e.ticks.apply(e,n):e.domain():r,b=null==i?e.tickFormat?e.tickFormat.apply(e,n):h:i,_=Math.max(a,0)+s,x=e.range(),w=+x[0]+c,k=+x[x.length-1]+c,T=(e.bandwidth?y:g)(e.copy(),c),C=d.selection?d.selection():d,E=C.selectAll(".domain").data([null]),S=C.selectAll(".tick").data(p,e).order(),A=S.exit(),M=S.enter().append("g").attr("class","tick"),N=S.select("line"),D=S.select("text");E=E.merge(E.enter().insert("path",".tick").attr("class","domain").attr("stroke","currentColor")),S=S.merge(M),N=N.merge(M.append("line").attr("stroke","currentColor").attr(l+"2",u*a)),D=D.merge(M.append("text").attr("fill","currentColor").attr(l,u*_).attr("dy",1===t?"0em":3===t?"0.71em":"0.32em")),d!==C&&(E=E.transition(d),S=S.transition(d),N=N.transition(d),D=D.transition(d),A=A.transition(d).attr("opacity",f).attr("transform",(function(t){return isFinite(t=T(t))?v(t+c):this.getAttribute("transform")})),M.attr("opacity",f).attr("transform",(function(t){var e=this.parentNode.__axis;return v((e&&isFinite(e=e(t))?e:T(t))+c)}))),A.remove(),E.attr("d",4===t||2===t?o?"M"+u*o+","+w+"H"+c+"V"+k+"H"+u*o:"M"+c+","+w+"V"+k:o?"M"+w+","+u*o+"V"+c+"H"+k+"V"+u*o:"M"+w+","+c+"H"+k),S.attr("opacity",1).attr("transform",(function(t){return v(T(t)+c)})),N.attr(l+"2",u*a),D.attr(l,u*_).text(b),C.filter(m).attr("fill","none").attr("font-size",10).attr("font-family","sans-serif").attr("text-anchor",2===t?"start":4===t?"end":"middle"),C.each((function(){this.__axis=T}))}return b.scale=function(t){return arguments.length?(e=t,b):e},b.ticks=function(){return n=Array.from(arguments),b},b.tickArguments=function(t){return arguments.length?(n=null==t?[]:Array.from(t),b):n.slice()},b.tickValues=function(t){return arguments.length?(r=null==t?null:Array.from(t),b):r&&r.slice()},b.tickFormat=function(t){return arguments.length?(i=t,b):i},b.tickSize=function(t){return arguments.length?(a=o=+t,b):a},b.tickSizeInner=function(t){return arguments.length?(a=+t,b):a},b.tickSizeOuter=function(t){return arguments.length?(o=+t,b):o},b.tickPadding=function(t){return arguments.length?(s=+t,b):s},b.offset=function(t){return arguments.length?(c=+t,b):c},b}function b(){}function _(t){return null==t?b:function(){return this.querySelector(t)}}function x(t){return null==t?[]:Array.isArray(t)?t:Array.from(t)}function w(){return[]}function k(t){return null==t?w:function(){return this.querySelectorAll(t)}}function T(t){return function(){return this.matches(t)}}function C(t){return function(e){return e.matches(t)}}var E=Array.prototype.find;function S(){return this.firstElementChild}var A=Array.prototype.filter;function M(){return Array.from(this.children)}function N(t){return new Array(t.length)}function D(t,e){this.ownerDocument=t.ownerDocument,this.namespaceURI=t.namespaceURI,this._next=null,this._parent=t,this.__data__=e}function B(t){return function(){return t}}function L(t,e,n,r,i,a){for(var o,s=0,c=e.length,u=a.length;se?1:t>=e?0:NaN}D.prototype={constructor:D,appendChild:function(t){return this._parent.insertBefore(t,this._next)},insertBefore:function(t,e){return this._parent.insertBefore(t,e)},querySelector:function(t){return this._parent.querySelector(t)},querySelectorAll:function(t){return this._parent.querySelectorAll(t)}};var P="http://www.w3.org/1999/xhtml";const Y={svg:"http://www.w3.org/2000/svg",xhtml:P,xlink:"http://www.w3.org/1999/xlink",xml:"http://www.w3.org/XML/1998/namespace",xmlns:"http://www.w3.org/2000/xmlns/"};function j(t){var e=t+="",n=e.indexOf(":");return n>=0&&"xmlns"!==(e=t.slice(0,n))&&(t=t.slice(n+1)),Y.hasOwnProperty(e)?{space:Y[e],local:t}:t}function U(t){return function(){this.removeAttribute(t)}}function z(t){return function(){this.removeAttributeNS(t.space,t.local)}}function $(t,e){return function(){this.setAttribute(t,e)}}function q(t,e){return function(){this.setAttributeNS(t.space,t.local,e)}}function H(t,e){return function(){var n=e.apply(this,arguments);null==n?this.removeAttribute(t):this.setAttribute(t,n)}}function W(t,e){return function(){var n=e.apply(this,arguments);null==n?this.removeAttributeNS(t.space,t.local):this.setAttributeNS(t.space,t.local,n)}}function V(t){return t.ownerDocument&&t.ownerDocument.defaultView||t.document&&t||t.defaultView}function G(t){return function(){this.style.removeProperty(t)}}function X(t,e,n){return function(){this.style.setProperty(t,e,n)}}function Z(t,e,n){return function(){var r=e.apply(this,arguments);null==r?this.style.removeProperty(t):this.style.setProperty(t,r,n)}}function Q(t,e){return t.style.getPropertyValue(e)||V(t).getComputedStyle(t,null).getPropertyValue(e)}function K(t){return function(){delete this[t]}}function J(t,e){return function(){this[t]=e}}function tt(t,e){return function(){var n=e.apply(this,arguments);null==n?delete this[t]:this[t]=n}}function et(t){return t.trim().split(/^|\s+/)}function nt(t){return t.classList||new rt(t)}function rt(t){this._node=t,this._names=et(t.getAttribute("class")||"")}function it(t,e){for(var n=nt(t),r=-1,i=e.length;++r=0&&(e=t.slice(n+1),t=t.slice(0,n)),{type:t,name:e}}))}function Ct(t){return function(){var e=this.__on;if(e){for(var n,r=0,i=-1,a=e.length;r=0&&(this._names.splice(e,1),this._node.setAttribute("class",this._names.join(" ")))},contains:function(t){return this._names.indexOf(t)>=0}};var Nt=[null];function Dt(t,e){this._groups=t,this._parents=e}function Bt(){return new Dt([[document.documentElement]],Nt)}Dt.prototype=Bt.prototype={constructor:Dt,select:function(t){"function"!=typeof t&&(t=_(t));for(var e=this._groups,n=e.length,r=new Array(n),i=0;i=x&&(x=_+1);!(b=y[x])&&++x=0;)(r=i[a])&&(o&&4^r.compareDocumentPosition(o)&&o.parentNode.insertBefore(r,o),o=r);return this},sort:function(t){function e(e,n){return e&&n?t(e.__data__,n.__data__):!e-!n}t||(t=F);for(var n=this._groups,r=n.length,i=new Array(r),a=0;a1?this.each((null==e?G:"function"==typeof e?Z:X)(t,e,null==n?"":n)):Q(this.node(),t)},property:function(t,e){return arguments.length>1?this.each((null==e?K:"function"==typeof e?tt:J)(t,e)):this.node()[t]},classed:function(t,e){var n=et(t+"");if(arguments.length<2){for(var r=nt(this.node()),i=-1,a=n.length;++i{}};function It(){for(var t,e=0,n=arguments.length,r={};e=0&&(n=t.slice(r+1),t=t.slice(0,r)),t&&!e.hasOwnProperty(t))throw new Error("unknown type: "+t);return{type:t,name:n}}))}function Pt(t,e){for(var n,r=0,i=t.length;r0)for(var n,r,i=new Array(n),a=0;a=0&&e._call.call(void 0,t),e=e._next;--$t}()}finally{$t=0,function(){for(var t,e,n=Ut,r=1/0;n;)n._call?(r>n._time&&(r=n._time),t=n,n=n._next):(e=n._next,n._next=null,n=t?t._next=e:Ut=e);zt=t,re(r)}(),Vt=0}}function ne(){var t=Xt.now(),e=t-Wt;e>1e3&&(Gt-=e,Wt=t)}function re(t){$t||(qt&&(qt=clearTimeout(qt)),t-Vt>24?(t<1/0&&(qt=setTimeout(ee,t-Xt.now()-Gt)),Ht&&(Ht=clearInterval(Ht))):(Ht||(Wt=Xt.now(),Ht=setInterval(ne,1e3)),$t=1,Zt(ee)))}function ie(t,e,n){var r=new Jt;return e=null==e?0:+e,r.restart((n=>{r.stop(),t(n+e)}),e,n),r}Jt.prototype=te.prototype={constructor:Jt,restart:function(t,e,n){if("function"!=typeof t)throw new TypeError("callback is not a function");n=(null==n?Qt():+n)+(null==e?0:+e),this._next||zt===this||(zt?zt._next=this:Ut=this,zt=this),this._call=t,this._time=n,re()},stop:function(){this._call&&(this._call=null,this._time=1/0,re())}};var ae=jt("start","end","cancel","interrupt"),oe=[];function se(t,e,n,r,i,a){var o=t.__transition;if(o){if(n in o)return}else t.__transition={};!function(t,e,n){var r,i=t.__transition;function a(c){var u,l,h,f;if(1!==n.state)return s();for(u in i)if((f=i[u]).name===n.name){if(3===f.state)return ie(a);4===f.state?(f.state=6,f.timer.stop(),f.on.call("interrupt",t,t.__data__,f.index,f.group),delete i[u]):+u0)throw new Error("too late; already scheduled");return n}function ue(t,e){var n=le(t,e);if(n.state>3)throw new Error("too late; already running");return n}function le(t,e){var n=t.__transition;if(!n||!(n=n[e]))throw new Error("transition not found");return n}function he(t,e){return t=+t,e=+e,function(n){return t*(1-n)+e*n}}var fe,de=180/Math.PI,pe={translateX:0,translateY:0,rotate:0,skewX:0,scaleX:1,scaleY:1};function ge(t,e,n,r,i,a){var o,s,c;return(o=Math.sqrt(t*t+e*e))&&(t/=o,e/=o),(c=t*n+e*r)&&(n-=t*c,r-=e*c),(s=Math.sqrt(n*n+r*r))&&(n/=s,r/=s,c/=s),t*r180?e+=360:e-t>180&&(t+=360),a.push({i:n.push(i(n)+"rotate(",null,r)-2,x:he(t,e)})):e&&n.push(i(n)+"rotate("+e+r)}(a.rotate,o.rotate,s,c),function(t,e,n,a){t!==e?a.push({i:n.push(i(n)+"skewX(",null,r)-2,x:he(t,e)}):e&&n.push(i(n)+"skewX("+e+r)}(a.skewX,o.skewX,s,c),function(t,e,n,r,a,o){if(t!==n||e!==r){var s=a.push(i(a)+"scale(",null,",",null,")");o.push({i:s-4,x:he(t,n)},{i:s-2,x:he(e,r)})}else 1===n&&1===r||a.push(i(a)+"scale("+n+","+r+")")}(a.scaleX,a.scaleY,o.scaleX,o.scaleY,s,c),a=o=null,function(t){for(var e,n=-1,r=c.length;++n>8&15|e>>4&240,e>>4&15|240&e,(15&e)<<4|15&e,1):8===n?ze(e>>24&255,e>>16&255,e>>8&255,(255&e)/255):4===n?ze(e>>12&15|e>>8&240,e>>8&15|e>>4&240,e>>4&15|240&e,((15&e)<<4|15&e)/255):null):(e=De.exec(t))?new He(e[1],e[2],e[3],1):(e=Be.exec(t))?new He(255*e[1]/100,255*e[2]/100,255*e[3]/100,1):(e=Le.exec(t))?ze(e[1],e[2],e[3],e[4]):(e=Oe.exec(t))?ze(255*e[1]/100,255*e[2]/100,255*e[3]/100,e[4]):(e=Ie.exec(t))?Xe(e[1],e[2]/100,e[3]/100,1):(e=Re.exec(t))?Xe(e[1],e[2]/100,e[3]/100,e[4]):Fe.hasOwnProperty(t)?Ue(Fe[t]):"transparent"===t?new He(NaN,NaN,NaN,0):null}function Ue(t){return new He(t>>16&255,t>>8&255,255&t,1)}function ze(t,e,n,r){return r<=0&&(t=e=n=NaN),new He(t,e,n,r)}function $e(t){return t instanceof Te||(t=je(t)),t?new He((t=t.rgb()).r,t.g,t.b,t.opacity):new He}function qe(t,e,n,r){return 1===arguments.length?$e(t):new He(t,e,n,null==r?1:r)}function He(t,e,n,r){this.r=+t,this.g=+e,this.b=+n,this.opacity=+r}function We(){return"#"+Ge(this.r)+Ge(this.g)+Ge(this.b)}function Ve(){var t=this.opacity;return(1===(t=isNaN(t)?1:Math.max(0,Math.min(1,t)))?"rgb(":"rgba(")+Math.max(0,Math.min(255,Math.round(this.r)||0))+", "+Math.max(0,Math.min(255,Math.round(this.g)||0))+", "+Math.max(0,Math.min(255,Math.round(this.b)||0))+(1===t?")":", "+t+")")}function Ge(t){return((t=Math.max(0,Math.min(255,Math.round(t)||0)))<16?"0":"")+t.toString(16)}function Xe(t,e,n,r){return r<=0?t=e=n=NaN:n<=0||n>=1?t=e=NaN:e<=0&&(t=NaN),new Qe(t,e,n,r)}function Ze(t){if(t instanceof Qe)return new Qe(t.h,t.s,t.l,t.opacity);if(t instanceof Te||(t=je(t)),!t)return new Qe;if(t instanceof Qe)return t;var e=(t=t.rgb()).r/255,n=t.g/255,r=t.b/255,i=Math.min(e,n,r),a=Math.max(e,n,r),o=NaN,s=a-i,c=(a+i)/2;return s?(o=e===a?(n-r)/s+6*(n0&&c<1?0:o,new Qe(o,s,c,t.opacity)}function Qe(t,e,n,r){this.h=+t,this.s=+e,this.l=+n,this.opacity=+r}function Ke(t,e,n){return 255*(t<60?e+(n-e)*t/60:t<180?n:t<240?e+(n-e)*(240-t)/60:e)}function Je(t,e,n,r,i){var a=t*t,o=a*t;return((1-3*t+3*a-o)*e+(4-6*a+3*o)*n+(1+3*t+3*a-3*o)*r+o*i)/6}we(Te,je,{copy:function(t){return Object.assign(new this.constructor,this,t)},displayable:function(){return this.rgb().displayable()},hex:Pe,formatHex:Pe,formatHsl:function(){return Ze(this).formatHsl()},formatRgb:Ye,toString:Ye}),we(He,qe,ke(Te,{brighter:function(t){return t=null==t?Ee:Math.pow(Ee,t),new He(this.r*t,this.g*t,this.b*t,this.opacity)},darker:function(t){return t=null==t?Ce:Math.pow(Ce,t),new He(this.r*t,this.g*t,this.b*t,this.opacity)},rgb:function(){return this},displayable:function(){return-.5<=this.r&&this.r<255.5&&-.5<=this.g&&this.g<255.5&&-.5<=this.b&&this.b<255.5&&0<=this.opacity&&this.opacity<=1},hex:We,formatHex:We,formatRgb:Ve,toString:Ve})),we(Qe,(function(t,e,n,r){return 1===arguments.length?Ze(t):new Qe(t,e,n,null==r?1:r)}),ke(Te,{brighter:function(t){return t=null==t?Ee:Math.pow(Ee,t),new Qe(this.h,this.s,this.l*t,this.opacity)},darker:function(t){return t=null==t?Ce:Math.pow(Ce,t),new Qe(this.h,this.s,this.l*t,this.opacity)},rgb:function(){var t=this.h%360+360*(this.h<0),e=isNaN(t)||isNaN(this.s)?0:this.s,n=this.l,r=n+(n<.5?n:1-n)*e,i=2*n-r;return new He(Ke(t>=240?t-240:t+120,i,r),Ke(t,i,r),Ke(t<120?t+240:t-120,i,r),this.opacity)},displayable:function(){return(0<=this.s&&this.s<=1||isNaN(this.s))&&0<=this.l&&this.l<=1&&0<=this.opacity&&this.opacity<=1},formatHsl:function(){var t=this.opacity;return(1===(t=isNaN(t)?1:Math.max(0,Math.min(1,t)))?"hsl(":"hsla(")+(this.h||0)+", "+100*(this.s||0)+"%, "+100*(this.l||0)+"%"+(1===t?")":", "+t+")")}}));const tn=t=>()=>t;function en(t,e){var n=e-t;return n?function(t,e){return function(n){return t+n*e}}(t,n):tn(isNaN(t)?e:t)}const nn=function t(e){var n=function(t){return 1==(t=+t)?en:function(e,n){return n-e?function(t,e,n){return t=Math.pow(t,n),e=Math.pow(e,n)-t,n=1/n,function(r){return Math.pow(t+r*e,n)}}(e,n,t):tn(isNaN(e)?n:e)}}(e);function r(t,e){var r=n((t=qe(t)).r,(e=qe(e)).r),i=n(t.g,e.g),a=n(t.b,e.b),o=en(t.opacity,e.opacity);return function(e){return t.r=r(e),t.g=i(e),t.b=a(e),t.opacity=o(e),t+""}}return r.gamma=t,r}(1);function rn(t){return function(e){var n,r,i=e.length,a=new Array(i),o=new Array(i),s=new Array(i);for(n=0;n=1?(n=1,e-1):Math.floor(n*e),i=t[r],a=t[r+1],o=r>0?t[r-1]:2*i-a,s=ra&&(i=e.slice(a,i),s[o]?s[o]+=i:s[++o]=i),(n=n[0])===(r=r[0])?s[o]?s[o]+=r:s[++o]=r:(s[++o]=null,c.push({i:o,x:he(n,r)})),a=on.lastIndex;return a=0&&(t=t.slice(0,e)),!t||"start"===t}))}(e)?ce:ue;return function(){var o=a(this,t),s=o.on;s!==r&&(i=(r=s).copy()).on(e,n),o.on=i}}var Cn=Lt.prototype.constructor;function En(t){return function(){this.style.removeProperty(t)}}function Sn(t,e,n){return function(r){this.style.setProperty(t,e.call(this,r),n)}}function An(t,e,n){var r,i;function a(){var a=e.apply(this,arguments);return a!==i&&(r=(i=a)&&Sn(t,a,n)),r}return a._value=e,a}function Mn(t){return function(e){this.textContent=t.call(this,e)}}function Nn(t){var e,n;function r(){var r=t.apply(this,arguments);return r!==n&&(e=(n=r)&&Mn(r)),e}return r._value=t,r}var Dn=0;function Bn(t,e,n,r){this._groups=t,this._parents=e,this._name=n,this._id=r}function Ln(){return++Dn}var On=Lt.prototype;Bn.prototype=function(t){return Lt().transition(t)}.prototype={constructor:Bn,select:function(t){var e=this._name,n=this._id;"function"!=typeof t&&(t=_(t));for(var r=this._groups,i=r.length,a=new Array(i),o=0;o2&&n.state<5,n.state=6,n.timer.stop(),n.on.call(r?"interrupt":"cancel",t,t.__data__,n.index,n.group),delete a[i]):o=!1;o&&delete t.__transition}}(this,t)}))},Lt.prototype.transition=function(t){var e,n;t instanceof Bn?(e=t._id,t=t._name):(e=Ln(),(n=In).time=Qt(),t=null==t?null:t+"");for(var r=this._groups,i=r.length,a=0;a>8&15|e>>4&240,e>>4&15|240&e,(15&e)<<4|15&e,1):8===n?sr(e>>24&255,e>>16&255,e>>8&255,(255&e)/255):4===n?sr(e>>12&15|e>>8&240,e>>8&15|e>>4&240,e>>4&15|240&e,((15&e)<<4|15&e)/255):null):(e=Zn.exec(t))?new lr(e[1],e[2],e[3],1):(e=Qn.exec(t))?new lr(255*e[1]/100,255*e[2]/100,255*e[3]/100,1):(e=Kn.exec(t))?sr(e[1],e[2],e[3],e[4]):(e=Jn.exec(t))?sr(255*e[1]/100,255*e[2]/100,255*e[3]/100,e[4]):(e=tr.exec(t))?pr(e[1],e[2]/100,e[3]/100,1):(e=er.exec(t))?pr(e[1],e[2]/100,e[3]/100,e[4]):nr.hasOwnProperty(t)?or(nr[t]):"transparent"===t?new lr(NaN,NaN,NaN,0):null}function or(t){return new lr(t>>16&255,t>>8&255,255&t,1)}function sr(t,e,n,r){return r<=0&&(t=e=n=NaN),new lr(t,e,n,r)}function cr(t){return t instanceof $n||(t=ar(t)),t?new lr((t=t.rgb()).r,t.g,t.b,t.opacity):new lr}function ur(t,e,n,r){return 1===arguments.length?cr(t):new lr(t,e,n,null==r?1:r)}function lr(t,e,n,r){this.r=+t,this.g=+e,this.b=+n,this.opacity=+r}function hr(){return"#"+dr(this.r)+dr(this.g)+dr(this.b)}function fr(){var t=this.opacity;return(1===(t=isNaN(t)?1:Math.max(0,Math.min(1,t)))?"rgb(":"rgba(")+Math.max(0,Math.min(255,Math.round(this.r)||0))+", "+Math.max(0,Math.min(255,Math.round(this.g)||0))+", "+Math.max(0,Math.min(255,Math.round(this.b)||0))+(1===t?")":", "+t+")")}function dr(t){return((t=Math.max(0,Math.min(255,Math.round(t)||0)))<16?"0":"")+t.toString(16)}function pr(t,e,n,r){return r<=0?t=e=n=NaN:n<=0||n>=1?t=e=NaN:e<=0&&(t=NaN),new yr(t,e,n,r)}function gr(t){if(t instanceof yr)return new yr(t.h,t.s,t.l,t.opacity);if(t instanceof $n||(t=ar(t)),!t)return new yr;if(t instanceof yr)return t;var e=(t=t.rgb()).r/255,n=t.g/255,r=t.b/255,i=Math.min(e,n,r),a=Math.max(e,n,r),o=NaN,s=a-i,c=(a+i)/2;return s?(o=e===a?(n-r)/s+6*(n0&&c<1?0:o,new yr(o,s,c,t.opacity)}function yr(t,e,n,r){this.h=+t,this.s=+e,this.l=+n,this.opacity=+r}function mr(t,e,n){return 255*(t<60?e+(n-e)*t/60:t<180?n:t<240?e+(n-e)*(240-t)/60:e)}Un($n,ar,{copy:function(t){return Object.assign(new this.constructor,this,t)},displayable:function(){return this.rgb().displayable()},hex:rr,formatHex:rr,formatHsl:function(){return gr(this).formatHsl()},formatRgb:ir,toString:ir}),Un(lr,ur,zn($n,{brighter:function(t){return t=null==t?Hn:Math.pow(Hn,t),new lr(this.r*t,this.g*t,this.b*t,this.opacity)},darker:function(t){return t=null==t?qn:Math.pow(qn,t),new lr(this.r*t,this.g*t,this.b*t,this.opacity)},rgb:function(){return this},displayable:function(){return-.5<=this.r&&this.r<255.5&&-.5<=this.g&&this.g<255.5&&-.5<=this.b&&this.b<255.5&&0<=this.opacity&&this.opacity<=1},hex:hr,formatHex:hr,formatRgb:fr,toString:fr})),Un(yr,(function(t,e,n,r){return 1===arguments.length?gr(t):new yr(t,e,n,null==r?1:r)}),zn($n,{brighter:function(t){return t=null==t?Hn:Math.pow(Hn,t),new yr(this.h,this.s,this.l*t,this.opacity)},darker:function(t){return t=null==t?qn:Math.pow(qn,t),new yr(this.h,this.s,this.l*t,this.opacity)},rgb:function(){var t=this.h%360+360*(this.h<0),e=isNaN(t)||isNaN(this.s)?0:this.s,n=this.l,r=n+(n<.5?n:1-n)*e,i=2*n-r;return new lr(mr(t>=240?t-240:t+120,i,r),mr(t,i,r),mr(t<120?t+240:t-120,i,r),this.opacity)},displayable:function(){return(0<=this.s&&this.s<=1||isNaN(this.s))&&0<=this.l&&this.l<=1&&0<=this.opacity&&this.opacity<=1},formatHsl:function(){var t=this.opacity;return(1===(t=isNaN(t)?1:Math.max(0,Math.min(1,t)))?"hsl(":"hsla(")+(this.h||0)+", "+100*(this.s||0)+"%, "+100*(this.l||0)+"%"+(1===t?")":", "+t+")")}}));const vr=Math.PI/180,br=180/Math.PI,_r=.96422,xr=.82521,wr=4/29,kr=6/29,Tr=3*kr*kr;function Cr(t){if(t instanceof Er)return new Er(t.l,t.a,t.b,t.opacity);if(t instanceof Lr)return Or(t);t instanceof lr||(t=cr(t));var e,n,r=Nr(t.r),i=Nr(t.g),a=Nr(t.b),o=Sr((.2225045*r+.7168786*i+.0606169*a)/1);return r===i&&i===a?e=n=o:(e=Sr((.4360747*r+.3850649*i+.1430804*a)/_r),n=Sr((.0139322*r+.0971045*i+.7141733*a)/xr)),new Er(116*o-16,500*(e-o),200*(o-n),t.opacity)}function Er(t,e,n,r){this.l=+t,this.a=+e,this.b=+n,this.opacity=+r}function Sr(t){return t>.008856451679035631?Math.pow(t,1/3):t/Tr+wr}function Ar(t){return t>kr?t*t*t:Tr*(t-wr)}function Mr(t){return 255*(t<=.0031308?12.92*t:1.055*Math.pow(t,1/2.4)-.055)}function Nr(t){return(t/=255)<=.04045?t/12.92:Math.pow((t+.055)/1.055,2.4)}function Dr(t){if(t instanceof Lr)return new Lr(t.h,t.c,t.l,t.opacity);if(t instanceof Er||(t=Cr(t)),0===t.a&&0===t.b)return new Lr(NaN,0()=>t;function Rr(t,e){return function(n){return t+n*e}}function Fr(t,e){var n=e-t;return n?Rr(t,n):Ir(isNaN(t)?e:t)}function Pr(t){return function(e,n){var r=t((e=Br(e)).h,(n=Br(n)).h),i=Fr(e.c,n.c),a=Fr(e.l,n.l),o=Fr(e.opacity,n.opacity);return function(t){return e.h=r(t),e.c=i(t),e.l=a(t),e.opacity=o(t),e+""}}}const Yr=Pr((function(t,e){var n=e-t;return n?Rr(t,n>180||n<-180?n-360*Math.round(n/360):n):Ir(isNaN(t)?e:t)}));Pr(Fr);var jr=Math.sqrt(50),Ur=Math.sqrt(10),zr=Math.sqrt(2);function $r(t,e,n){var r=(e-t)/Math.max(0,n),i=Math.floor(Math.log(r)/Math.LN10),a=r/Math.pow(10,i);return i>=0?(a>=jr?10:a>=Ur?5:a>=zr?2:1)*Math.pow(10,i):-Math.pow(10,-i)/(a>=jr?10:a>=Ur?5:a>=zr?2:1)}function qr(t,e,n){var r=Math.abs(e-t)/Math.max(0,n),i=Math.pow(10,Math.floor(Math.log(r)/Math.LN10)),a=r/i;return a>=jr?i*=10:a>=Ur?i*=5:a>=zr&&(i*=2),ee?1:t>=e?0:NaN}function Wr(t){let e=t,n=t,r=t;function i(t,e,i=0,a=t.length){if(i>>1;r(t[n],e)<0?i=n+1:a=n}while(it(e)-n,n=Hr,r=(e,n)=>Hr(t(e),n)),{left:i,center:function(t,n,r=0,a=t.length){const o=i(t,n,r,a-1);return o>r&&e(t[o-1],n)>-e(t[o],n)?o-1:o},right:function(t,e,i=0,a=t.length){if(i>>1;r(t[n],e)<=0?i=n+1:a=n}while(i>8&15|e>>4&240,e>>4&15|240&e,(15&e)<<4|15&e,1):8===n?yi(e>>24&255,e>>16&255,e>>8&255,(255&e)/255):4===n?yi(e>>12&15|e>>8&240,e>>8&15|e>>4&240,e>>4&15|240&e,((15&e)<<4|15&e)/255):null):(e=ai.exec(t))?new bi(e[1],e[2],e[3],1):(e=oi.exec(t))?new bi(255*e[1]/100,255*e[2]/100,255*e[3]/100,1):(e=si.exec(t))?yi(e[1],e[2],e[3],e[4]):(e=ci.exec(t))?yi(255*e[1]/100,255*e[2]/100,255*e[3]/100,e[4]):(e=ui.exec(t))?ki(e[1],e[2]/100,e[3]/100,1):(e=li.exec(t))?ki(e[1],e[2]/100,e[3]/100,e[4]):hi.hasOwnProperty(t)?gi(hi[t]):"transparent"===t?new bi(NaN,NaN,NaN,0):null}function gi(t){return new bi(t>>16&255,t>>8&255,255&t,1)}function yi(t,e,n,r){return r<=0&&(t=e=n=NaN),new bi(t,e,n,r)}function mi(t){return t instanceof Kr||(t=pi(t)),t?new bi((t=t.rgb()).r,t.g,t.b,t.opacity):new bi}function vi(t,e,n,r){return 1===arguments.length?mi(t):new bi(t,e,n,null==r?1:r)}function bi(t,e,n,r){this.r=+t,this.g=+e,this.b=+n,this.opacity=+r}function _i(){return"#"+wi(this.r)+wi(this.g)+wi(this.b)}function xi(){var t=this.opacity;return(1===(t=isNaN(t)?1:Math.max(0,Math.min(1,t)))?"rgb(":"rgba(")+Math.max(0,Math.min(255,Math.round(this.r)||0))+", "+Math.max(0,Math.min(255,Math.round(this.g)||0))+", "+Math.max(0,Math.min(255,Math.round(this.b)||0))+(1===t?")":", "+t+")")}function wi(t){return((t=Math.max(0,Math.min(255,Math.round(t)||0)))<16?"0":"")+t.toString(16)}function ki(t,e,n,r){return r<=0?t=e=n=NaN:n<=0||n>=1?t=e=NaN:e<=0&&(t=NaN),new Ci(t,e,n,r)}function Ti(t){if(t instanceof Ci)return new Ci(t.h,t.s,t.l,t.opacity);if(t instanceof Kr||(t=pi(t)),!t)return new Ci;if(t instanceof Ci)return t;var e=(t=t.rgb()).r/255,n=t.g/255,r=t.b/255,i=Math.min(e,n,r),a=Math.max(e,n,r),o=NaN,s=a-i,c=(a+i)/2;return s?(o=e===a?(n-r)/s+6*(n0&&c<1?0:o,new Ci(o,s,c,t.opacity)}function Ci(t,e,n,r){this.h=+t,this.s=+e,this.l=+n,this.opacity=+r}function Ei(t,e,n){return 255*(t<60?e+(n-e)*t/60:t<180?n:t<240?e+(n-e)*(240-t)/60:e)}function Si(t,e,n,r,i){var a=t*t,o=a*t;return((1-3*t+3*a-o)*e+(4-6*a+3*o)*n+(1+3*t+3*a-3*o)*r+o*i)/6}Zr(Kr,pi,{copy:function(t){return Object.assign(new this.constructor,this,t)},displayable:function(){return this.rgb().displayable()},hex:fi,formatHex:fi,formatHsl:function(){return Ti(this).formatHsl()},formatRgb:di,toString:di}),Zr(bi,vi,Qr(Kr,{brighter:function(t){return t=null==t?ti:Math.pow(ti,t),new bi(this.r*t,this.g*t,this.b*t,this.opacity)},darker:function(t){return t=null==t?Jr:Math.pow(Jr,t),new bi(this.r*t,this.g*t,this.b*t,this.opacity)},rgb:function(){return this},displayable:function(){return-.5<=this.r&&this.r<255.5&&-.5<=this.g&&this.g<255.5&&-.5<=this.b&&this.b<255.5&&0<=this.opacity&&this.opacity<=1},hex:_i,formatHex:_i,formatRgb:xi,toString:xi})),Zr(Ci,(function(t,e,n,r){return 1===arguments.length?Ti(t):new Ci(t,e,n,null==r?1:r)}),Qr(Kr,{brighter:function(t){return t=null==t?ti:Math.pow(ti,t),new Ci(this.h,this.s,this.l*t,this.opacity)},darker:function(t){return t=null==t?Jr:Math.pow(Jr,t),new Ci(this.h,this.s,this.l*t,this.opacity)},rgb:function(){var t=this.h%360+360*(this.h<0),e=isNaN(t)||isNaN(this.s)?0:this.s,n=this.l,r=n+(n<.5?n:1-n)*e,i=2*n-r;return new bi(Ei(t>=240?t-240:t+120,i,r),Ei(t,i,r),Ei(t<120?t+240:t-120,i,r),this.opacity)},displayable:function(){return(0<=this.s&&this.s<=1||isNaN(this.s))&&0<=this.l&&this.l<=1&&0<=this.opacity&&this.opacity<=1},formatHsl:function(){var t=this.opacity;return(1===(t=isNaN(t)?1:Math.max(0,Math.min(1,t)))?"hsl(":"hsla(")+(this.h||0)+", "+100*(this.s||0)+"%, "+100*(this.l||0)+"%"+(1===t?")":", "+t+")")}}));const Ai=t=>()=>t;function Mi(t,e){var n=e-t;return n?function(t,e){return function(n){return t+n*e}}(t,n):Ai(isNaN(t)?e:t)}const Ni=function t(e){var n=function(t){return 1==(t=+t)?Mi:function(e,n){return n-e?function(t,e,n){return t=Math.pow(t,n),e=Math.pow(e,n)-t,n=1/n,function(r){return Math.pow(t+r*e,n)}}(e,n,t):Ai(isNaN(e)?n:e)}}(e);function r(t,e){var r=n((t=vi(t)).r,(e=vi(e)).r),i=n(t.g,e.g),a=n(t.b,e.b),o=Mi(t.opacity,e.opacity);return function(e){return t.r=r(e),t.g=i(e),t.b=a(e),t.opacity=o(e),t+""}}return r.gamma=t,r}(1);function Di(t){return function(e){var n,r,i=e.length,a=new Array(i),o=new Array(i),s=new Array(i);for(n=0;n=1?(n=1,e-1):Math.floor(n*e),i=t[r],a=t[r+1],o=r>0?t[r-1]:2*i-a,s=ra&&(i=e.slice(a,i),s[o]?s[o]+=i:s[++o]=i),(n=n[0])===(r=r[0])?s[o]?s[o]+=r:s[++o]=r:(s[++o]=null,c.push({i:o,x:Oi(n,r)})),a=Fi.lastIndex;return ae&&(n=t,t=e,e=n),u=function(n){return Math.max(t,Math.min(e,n))}),r=c>2?Vi:Wi,i=a=null,h}function h(e){return null==e||isNaN(e=+e)?n:(i||(i=r(o.map(t),s,c)))(t(u(e)))}return h.invert=function(n){return u(e((a||(a=r(s,o.map(t),Oi)))(n)))},h.domain=function(t){return arguments.length?(o=Array.from(t,zi),l()):o.slice()},h.range=function(t){return arguments.length?(s=Array.from(t),l()):s.slice()},h.rangeRound=function(t){return s=Array.from(t),c=Ui,l()},h.clamp=function(t){return arguments.length?(u=!!t||qi,l()):u!==qi},h.interpolate=function(t){return arguments.length?(c=t,l()):c},h.unknown=function(t){return arguments.length?(n=t,h):n},function(n,r){return t=n,e=r,l()}}()(qi,qi)}function Zi(t,e){switch(arguments.length){case 0:break;case 1:this.range(t);break;default:this.range(e).domain(t)}return this}var Qi,Ki=/^(?:(.)?([<>=^]))?([+\-( ])?([$#])?(0)?(\d+)?(,)?(\.\d+)?(~)?([a-z%])?$/i;function Ji(t){if(!(e=Ki.exec(t)))throw new Error("invalid format: "+t);var e;return new ta({fill:e[1],align:e[2],sign:e[3],symbol:e[4],zero:e[5],width:e[6],comma:e[7],precision:e[8]&&e[8].slice(1),trim:e[9],type:e[10]})}function ta(t){this.fill=void 0===t.fill?" ":t.fill+"",this.align=void 0===t.align?">":t.align+"",this.sign=void 0===t.sign?"-":t.sign+"",this.symbol=void 0===t.symbol?"":t.symbol+"",this.zero=!!t.zero,this.width=void 0===t.width?void 0:+t.width,this.comma=!!t.comma,this.precision=void 0===t.precision?void 0:+t.precision,this.trim=!!t.trim,this.type=void 0===t.type?"":t.type+""}function ea(t,e){if((n=(t=e?t.toExponential(e-1):t.toExponential()).indexOf("e"))<0)return null;var n,r=t.slice(0,n);return[r.length>1?r[0]+r.slice(2):r,+t.slice(n+1)]}function na(t){return(t=ea(Math.abs(t)))?t[1]:NaN}function ra(t,e){var n=ea(t,e);if(!n)return t+"";var r=n[0],i=n[1];return i<0?"0."+new Array(-i).join("0")+r:r.length>i+1?r.slice(0,i+1)+"."+r.slice(i+1):r+new Array(i-r.length+2).join("0")}Ji.prototype=ta.prototype,ta.prototype.toString=function(){return this.fill+this.align+this.sign+this.symbol+(this.zero?"0":"")+(void 0===this.width?"":Math.max(1,0|this.width))+(this.comma?",":"")+(void 0===this.precision?"":"."+Math.max(0,0|this.precision))+(this.trim?"~":"")+this.type};const ia={"%":(t,e)=>(100*t).toFixed(e),b:t=>Math.round(t).toString(2),c:t=>t+"",d:function(t){return Math.abs(t=Math.round(t))>=1e21?t.toLocaleString("en").replace(/,/g,""):t.toString(10)},e:(t,e)=>t.toExponential(e),f:(t,e)=>t.toFixed(e),g:(t,e)=>t.toPrecision(e),o:t=>Math.round(t).toString(8),p:(t,e)=>ra(100*t,e),r:ra,s:function(t,e){var n=ea(t,e);if(!n)return t+"";var r=n[0],i=n[1],a=i-(Qi=3*Math.max(-8,Math.min(8,Math.floor(i/3))))+1,o=r.length;return a===o?r:a>o?r+new Array(a-o+1).join("0"):a>0?r.slice(0,a)+"."+r.slice(a):"0."+new Array(1-a).join("0")+ea(t,Math.max(0,e+a-1))[0]},X:t=>Math.round(t).toString(16).toUpperCase(),x:t=>Math.round(t).toString(16)};function aa(t){return t}var oa,sa,ca,ua=Array.prototype.map,la=["y","z","a","f","p","n","µ","m","","k","M","G","T","P","E","Z","Y"];function ha(t){var e=t.domain;return t.ticks=function(t){var n=e();return function(t,e,n){var r,i,a,o,s=-1;if(n=+n,(t=+t)==(e=+e)&&n>0)return[t];if((r=e0){let n=Math.round(t/o),r=Math.round(e/o);for(n*oe&&--r,a=new Array(i=r-n+1);++se&&--r,a=new Array(i=r-n+1);++s0;){if((i=$r(c,u,n))===r)return a[o]=c,a[s]=u,e(a);if(i>0)c=Math.floor(c/i)*i,u=Math.ceil(u/i)*i;else{if(!(i<0))break;c=Math.ceil(c*i)/i,u=Math.floor(u*i)/i}r=i}return t},t}function fa(){var t=Xi();return t.copy=function(){return Gi(t,fa())},Zi.apply(t,arguments),ha(t)}oa=function(t){var e,n,r=void 0===t.grouping||void 0===t.thousands?aa:(e=ua.call(t.grouping,Number),n=t.thousands+"",function(t,r){for(var i=t.length,a=[],o=0,s=e[0],c=0;i>0&&s>0&&(c+s+1>r&&(s=Math.max(1,r-c)),a.push(t.substring(i-=s,i+s)),!((c+=s+1)>r));)s=e[o=(o+1)%e.length];return a.reverse().join(n)}),i=void 0===t.currency?"":t.currency[0]+"",a=void 0===t.currency?"":t.currency[1]+"",o=void 0===t.decimal?".":t.decimal+"",s=void 0===t.numerals?aa:function(t){return function(e){return e.replace(/[0-9]/g,(function(e){return t[+e]}))}}(ua.call(t.numerals,String)),c=void 0===t.percent?"%":t.percent+"",u=void 0===t.minus?"−":t.minus+"",l=void 0===t.nan?"NaN":t.nan+"";function h(t){var e=(t=Ji(t)).fill,n=t.align,h=t.sign,f=t.symbol,d=t.zero,p=t.width,g=t.comma,y=t.precision,m=t.trim,v=t.type;"n"===v?(g=!0,v="g"):ia[v]||(void 0===y&&(y=12),m=!0,v="g"),(d||"0"===e&&"="===n)&&(d=!0,e="0",n="=");var b="$"===f?i:"#"===f&&/[boxX]/.test(v)?"0"+v.toLowerCase():"",_="$"===f?a:/[%p]/.test(v)?c:"",x=ia[v],w=/[defgprs%]/.test(v);function k(t){var i,a,c,f=b,k=_;if("c"===v)k=x(t)+k,t="";else{var T=(t=+t)<0||1/t<0;if(t=isNaN(t)?l:x(Math.abs(t),y),m&&(t=function(t){t:for(var e,n=t.length,r=1,i=-1;r0&&(i=0)}return i>0?t.slice(0,i)+t.slice(e+1):t}(t)),T&&0==+t&&"+"!==h&&(T=!1),f=(T?"("===h?h:u:"-"===h||"("===h?"":h)+f,k=("s"===v?la[8+Qi/3]:"")+k+(T&&"("===h?")":""),w)for(i=-1,a=t.length;++i(c=t.charCodeAt(i))||c>57){k=(46===c?o+t.slice(i+1):t.slice(i))+k,t=t.slice(0,i);break}}g&&!d&&(t=r(t,1/0));var C=f.length+t.length+k.length,E=C>1)+f+t+k+E.slice(C);break;default:t=E+f+t+k}return s(t)}return y=void 0===y?6:/[gprs]/.test(v)?Math.max(1,Math.min(21,y)):Math.max(0,Math.min(20,y)),k.toString=function(){return t+""},k}return{format:h,formatPrefix:function(t,e){var n=h(((t=Ji(t)).type="f",t)),r=3*Math.max(-8,Math.min(8,Math.floor(na(e)/3))),i=Math.pow(10,-r),a=la[8+r/3];return function(t){return n(i*t)+a}}}}({thousands:",",grouping:[3],currency:["$",""]}),sa=oa.format,ca=oa.formatPrefix;class da extends Map{constructor(t,e=ga){if(super(),Object.defineProperties(this,{_intern:{value:new Map},_key:{value:e}}),null!=t)for(const[e,n]of t)this.set(e,n)}get(t){return super.get(pa(this,t))}has(t){return super.has(pa(this,t))}set(t,e){return super.set(function({_intern:t,_key:e},n){const r=e(n);return t.has(r)?t.get(r):(t.set(r,n),n)}(this,t),e)}delete(t){return super.delete(function({_intern:t,_key:e},n){const r=e(n);return t.has(r)&&(n=t.get(r),t.delete(r)),n}(this,t))}}function pa({_intern:t,_key:e},n){const r=e(n);return t.has(r)?t.get(r):n}function ga(t){return null!==t&&"object"==typeof t?t.valueOf():t}Set;const ya=Symbol("implicit");function ma(){var t=new da,e=[],n=[],r=ya;function i(i){let a=t.get(i);if(void 0===a){if(r!==ya)return r;t.set(i,a=e.push(i)-1)}return n[a%n.length]}return i.domain=function(n){if(!arguments.length)return e.slice();e=[],t=new da;for(const r of n)t.has(r)||t.set(r,e.push(r)-1);return i},i.range=function(t){return arguments.length?(n=Array.from(t),i):n.slice()},i.unknown=function(t){return arguments.length?(r=t,i):r},i.copy=function(){return ma(e,n).unknown(r)},Zi.apply(i,arguments),i}const va=1e3,ba=6e4,_a=36e5,xa=864e5,wa=6048e5,ka=31536e6;var Ta=new Date,Ca=new Date;function Ea(t,e,n,r){function i(e){return t(e=0===arguments.length?new Date:new Date(+e)),e}return i.floor=function(e){return t(e=new Date(+e)),e},i.ceil=function(n){return t(n=new Date(n-1)),e(n,1),t(n),n},i.round=function(t){var e=i(t),n=i.ceil(t);return t-e0))return s;do{s.push(o=new Date(+n)),e(n,a),t(n)}while(o=e)for(;t(e),!n(e);)e.setTime(e-1)}),(function(t,r){if(t>=t)if(r<0)for(;++r<=0;)for(;e(t,-1),!n(t););else for(;--r>=0;)for(;e(t,1),!n(t););}))},n&&(i.count=function(e,r){return Ta.setTime(+e),Ca.setTime(+r),t(Ta),t(Ca),Math.floor(n(Ta,Ca))},i.every=function(t){return t=Math.floor(t),isFinite(t)&&t>0?t>1?i.filter(r?function(e){return r(e)%t==0}:function(e){return i.count(0,e)%t==0}):i:null}),i}var Sa=Ea((function(){}),(function(t,e){t.setTime(+t+e)}),(function(t,e){return e-t}));Sa.every=function(t){return t=Math.floor(t),isFinite(t)&&t>0?t>1?Ea((function(e){e.setTime(Math.floor(e/t)*t)}),(function(e,n){e.setTime(+e+n*t)}),(function(e,n){return(n-e)/t})):Sa:null};const Aa=Sa;Sa.range;var Ma=Ea((function(t){t.setTime(t-t.getMilliseconds())}),(function(t,e){t.setTime(+t+e*va)}),(function(t,e){return(e-t)/va}),(function(t){return t.getUTCSeconds()}));const Na=Ma;Ma.range;var Da=Ea((function(t){t.setTime(t-t.getMilliseconds()-t.getSeconds()*va)}),(function(t,e){t.setTime(+t+e*ba)}),(function(t,e){return(e-t)/ba}),(function(t){return t.getMinutes()}));const Ba=Da;Da.range;var La=Ea((function(t){t.setTime(t-t.getMilliseconds()-t.getSeconds()*va-t.getMinutes()*ba)}),(function(t,e){t.setTime(+t+e*_a)}),(function(t,e){return(e-t)/_a}),(function(t){return t.getHours()}));const Oa=La;La.range;var Ia=Ea((t=>t.setHours(0,0,0,0)),((t,e)=>t.setDate(t.getDate()+e)),((t,e)=>(e-t-(e.getTimezoneOffset()-t.getTimezoneOffset())*ba)/xa),(t=>t.getDate()-1));const Ra=Ia;function Fa(t){return Ea((function(e){e.setDate(e.getDate()-(e.getDay()+7-t)%7),e.setHours(0,0,0,0)}),(function(t,e){t.setDate(t.getDate()+7*e)}),(function(t,e){return(e-t-(e.getTimezoneOffset()-t.getTimezoneOffset())*ba)/wa}))}Ia.range;var Pa=Fa(0),Ya=Fa(1),ja=Fa(2),Ua=Fa(3),za=Fa(4),$a=Fa(5),qa=Fa(6),Ha=(Pa.range,Ya.range,ja.range,Ua.range,za.range,$a.range,qa.range,Ea((function(t){t.setDate(1),t.setHours(0,0,0,0)}),(function(t,e){t.setMonth(t.getMonth()+e)}),(function(t,e){return e.getMonth()-t.getMonth()+12*(e.getFullYear()-t.getFullYear())}),(function(t){return t.getMonth()})));const Wa=Ha;Ha.range;var Va=Ea((function(t){t.setMonth(0,1),t.setHours(0,0,0,0)}),(function(t,e){t.setFullYear(t.getFullYear()+e)}),(function(t,e){return e.getFullYear()-t.getFullYear()}),(function(t){return t.getFullYear()}));Va.every=function(t){return isFinite(t=Math.floor(t))&&t>0?Ea((function(e){e.setFullYear(Math.floor(e.getFullYear()/t)*t),e.setMonth(0,1),e.setHours(0,0,0,0)}),(function(e,n){e.setFullYear(e.getFullYear()+n*t)})):null};const Ga=Va;Va.range;var Xa=Ea((function(t){t.setUTCSeconds(0,0)}),(function(t,e){t.setTime(+t+e*ba)}),(function(t,e){return(e-t)/ba}),(function(t){return t.getUTCMinutes()}));const Za=Xa;Xa.range;var Qa=Ea((function(t){t.setUTCMinutes(0,0,0)}),(function(t,e){t.setTime(+t+e*_a)}),(function(t,e){return(e-t)/_a}),(function(t){return t.getUTCHours()}));const Ka=Qa;Qa.range;var Ja=Ea((function(t){t.setUTCHours(0,0,0,0)}),(function(t,e){t.setUTCDate(t.getUTCDate()+e)}),(function(t,e){return(e-t)/xa}),(function(t){return t.getUTCDate()-1}));const to=Ja;function eo(t){return Ea((function(e){e.setUTCDate(e.getUTCDate()-(e.getUTCDay()+7-t)%7),e.setUTCHours(0,0,0,0)}),(function(t,e){t.setUTCDate(t.getUTCDate()+7*e)}),(function(t,e){return(e-t)/wa}))}Ja.range;var no=eo(0),ro=eo(1),io=eo(2),ao=eo(3),oo=eo(4),so=eo(5),co=eo(6),uo=(no.range,ro.range,io.range,ao.range,oo.range,so.range,co.range,Ea((function(t){t.setUTCDate(1),t.setUTCHours(0,0,0,0)}),(function(t,e){t.setUTCMonth(t.getUTCMonth()+e)}),(function(t,e){return e.getUTCMonth()-t.getUTCMonth()+12*(e.getUTCFullYear()-t.getUTCFullYear())}),(function(t){return t.getUTCMonth()})));const lo=uo;uo.range;var ho=Ea((function(t){t.setUTCMonth(0,1),t.setUTCHours(0,0,0,0)}),(function(t,e){t.setUTCFullYear(t.getUTCFullYear()+e)}),(function(t,e){return e.getUTCFullYear()-t.getUTCFullYear()}),(function(t){return t.getUTCFullYear()}));ho.every=function(t){return isFinite(t=Math.floor(t))&&t>0?Ea((function(e){e.setUTCFullYear(Math.floor(e.getUTCFullYear()/t)*t),e.setUTCMonth(0,1),e.setUTCHours(0,0,0,0)}),(function(e,n){e.setUTCFullYear(e.getUTCFullYear()+n*t)})):null};const fo=ho;function po(t,e,n,r,i,a){const o=[[Na,1,va],[Na,5,5e3],[Na,15,15e3],[Na,30,3e4],[a,1,ba],[a,5,3e5],[a,15,9e5],[a,30,18e5],[i,1,_a],[i,3,108e5],[i,6,216e5],[i,12,432e5],[r,1,xa],[r,2,1728e5],[n,1,wa],[e,1,2592e6],[e,3,7776e6],[t,1,ka]];function s(e,n,r){const i=Math.abs(n-e)/r,a=Wr((([,,t])=>t)).right(o,i);if(a===o.length)return t.every(qr(e/ka,n/ka,r));if(0===a)return Aa.every(Math.max(qr(e,n,r),1));const[s,c]=o[i/o[a-1][2][t.toLowerCase(),e])))}function Bo(t,e,n){var r=Co.exec(e.slice(n,n+1));return r?(t.w=+r[0],n+r[0].length):-1}function Lo(t,e,n){var r=Co.exec(e.slice(n,n+1));return r?(t.u=+r[0],n+r[0].length):-1}function Oo(t,e,n){var r=Co.exec(e.slice(n,n+2));return r?(t.U=+r[0],n+r[0].length):-1}function Io(t,e,n){var r=Co.exec(e.slice(n,n+2));return r?(t.V=+r[0],n+r[0].length):-1}function Ro(t,e,n){var r=Co.exec(e.slice(n,n+2));return r?(t.W=+r[0],n+r[0].length):-1}function Fo(t,e,n){var r=Co.exec(e.slice(n,n+4));return r?(t.y=+r[0],n+r[0].length):-1}function Po(t,e,n){var r=Co.exec(e.slice(n,n+2));return r?(t.y=+r[0]+(+r[0]>68?1900:2e3),n+r[0].length):-1}function Yo(t,e,n){var r=/^(Z)|([+-]\d\d)(?::?(\d\d))?/.exec(e.slice(n,n+6));return r?(t.Z=r[1]?0:-(r[2]+(r[3]||"00")),n+r[0].length):-1}function jo(t,e,n){var r=Co.exec(e.slice(n,n+1));return r?(t.q=3*r[0]-3,n+r[0].length):-1}function Uo(t,e,n){var r=Co.exec(e.slice(n,n+2));return r?(t.m=r[0]-1,n+r[0].length):-1}function zo(t,e,n){var r=Co.exec(e.slice(n,n+2));return r?(t.d=+r[0],n+r[0].length):-1}function $o(t,e,n){var r=Co.exec(e.slice(n,n+3));return r?(t.m=0,t.d=+r[0],n+r[0].length):-1}function qo(t,e,n){var r=Co.exec(e.slice(n,n+2));return r?(t.H=+r[0],n+r[0].length):-1}function Ho(t,e,n){var r=Co.exec(e.slice(n,n+2));return r?(t.M=+r[0],n+r[0].length):-1}function Wo(t,e,n){var r=Co.exec(e.slice(n,n+2));return r?(t.S=+r[0],n+r[0].length):-1}function Vo(t,e,n){var r=Co.exec(e.slice(n,n+3));return r?(t.L=+r[0],n+r[0].length):-1}function Go(t,e,n){var r=Co.exec(e.slice(n,n+6));return r?(t.L=Math.floor(r[0]/1e3),n+r[0].length):-1}function Xo(t,e,n){var r=Eo.exec(e.slice(n,n+1));return r?n+r[0].length:-1}function Zo(t,e,n){var r=Co.exec(e.slice(n));return r?(t.Q=+r[0],n+r[0].length):-1}function Qo(t,e,n){var r=Co.exec(e.slice(n));return r?(t.s=+r[0],n+r[0].length):-1}function Ko(t,e){return Ao(t.getDate(),e,2)}function Jo(t,e){return Ao(t.getHours(),e,2)}function ts(t,e){return Ao(t.getHours()%12||12,e,2)}function es(t,e){return Ao(1+Ra.count(Ga(t),t),e,3)}function ns(t,e){return Ao(t.getMilliseconds(),e,3)}function rs(t,e){return ns(t,e)+"000"}function is(t,e){return Ao(t.getMonth()+1,e,2)}function as(t,e){return Ao(t.getMinutes(),e,2)}function os(t,e){return Ao(t.getSeconds(),e,2)}function ss(t){var e=t.getDay();return 0===e?7:e}function cs(t,e){return Ao(Pa.count(Ga(t)-1,t),e,2)}function us(t){var e=t.getDay();return e>=4||0===e?za(t):za.ceil(t)}function ls(t,e){return t=us(t),Ao(za.count(Ga(t),t)+(4===Ga(t).getDay()),e,2)}function hs(t){return t.getDay()}function fs(t,e){return Ao(Ya.count(Ga(t)-1,t),e,2)}function ds(t,e){return Ao(t.getFullYear()%100,e,2)}function ps(t,e){return Ao((t=us(t)).getFullYear()%100,e,2)}function gs(t,e){return Ao(t.getFullYear()%1e4,e,4)}function ys(t,e){var n=t.getDay();return Ao((t=n>=4||0===n?za(t):za.ceil(t)).getFullYear()%1e4,e,4)}function ms(t){var e=t.getTimezoneOffset();return(e>0?"-":(e*=-1,"+"))+Ao(e/60|0,"0",2)+Ao(e%60,"0",2)}function vs(t,e){return Ao(t.getUTCDate(),e,2)}function bs(t,e){return Ao(t.getUTCHours(),e,2)}function _s(t,e){return Ao(t.getUTCHours()%12||12,e,2)}function xs(t,e){return Ao(1+to.count(fo(t),t),e,3)}function ws(t,e){return Ao(t.getUTCMilliseconds(),e,3)}function ks(t,e){return ws(t,e)+"000"}function Ts(t,e){return Ao(t.getUTCMonth()+1,e,2)}function Cs(t,e){return Ao(t.getUTCMinutes(),e,2)}function Es(t,e){return Ao(t.getUTCSeconds(),e,2)}function Ss(t){var e=t.getUTCDay();return 0===e?7:e}function As(t,e){return Ao(no.count(fo(t)-1,t),e,2)}function Ms(t){var e=t.getUTCDay();return e>=4||0===e?oo(t):oo.ceil(t)}function Ns(t,e){return t=Ms(t),Ao(oo.count(fo(t),t)+(4===fo(t).getUTCDay()),e,2)}function Ds(t){return t.getUTCDay()}function Bs(t,e){return Ao(ro.count(fo(t)-1,t),e,2)}function Ls(t,e){return Ao(t.getUTCFullYear()%100,e,2)}function Os(t,e){return Ao((t=Ms(t)).getUTCFullYear()%100,e,2)}function Is(t,e){return Ao(t.getUTCFullYear()%1e4,e,4)}function Rs(t,e){var n=t.getUTCDay();return Ao((t=n>=4||0===n?oo(t):oo.ceil(t)).getUTCFullYear()%1e4,e,4)}function Fs(){return"+0000"}function Ps(){return"%"}function Ys(t){return+t}function js(t){return Math.floor(+t/1e3)}function Us(t){return new Date(t)}function zs(t){return t instanceof Date?+t:+new Date(+t)}function $s(t,e,n,r,i,a,o,s,c,u){var l=Xi(),h=l.invert,f=l.domain,d=u(".%L"),p=u(":%S"),g=u("%I:%M"),y=u("%I %p"),m=u("%a %d"),v=u("%b %d"),b=u("%B"),_=u("%Y");function x(t){return(c(t)=12)]},q:function(t){return 1+~~(t.getMonth()/3)},Q:Ys,s:js,S:os,u:ss,U:cs,V:ls,w:hs,W:fs,x:null,X:null,y:ds,Y:gs,Z:ms,"%":Ps},_={a:function(t){return o[t.getUTCDay()]},A:function(t){return a[t.getUTCDay()]},b:function(t){return c[t.getUTCMonth()]},B:function(t){return s[t.getUTCMonth()]},c:null,d:vs,e:vs,f:ks,g:Os,G:Rs,H:bs,I:_s,j:xs,L:ws,m:Ts,M:Cs,p:function(t){return i[+(t.getUTCHours()>=12)]},q:function(t){return 1+~~(t.getUTCMonth()/3)},Q:Ys,s:js,S:Es,u:Ss,U:As,V:Ns,w:Ds,W:Bs,x:null,X:null,y:Ls,Y:Is,Z:Fs,"%":Ps},x={a:function(t,e,n){var r=d.exec(e.slice(n));return r?(t.w=p.get(r[0].toLowerCase()),n+r[0].length):-1},A:function(t,e,n){var r=h.exec(e.slice(n));return r?(t.w=f.get(r[0].toLowerCase()),n+r[0].length):-1},b:function(t,e,n){var r=m.exec(e.slice(n));return r?(t.m=v.get(r[0].toLowerCase()),n+r[0].length):-1},B:function(t,e,n){var r=g.exec(e.slice(n));return r?(t.m=y.get(r[0].toLowerCase()),n+r[0].length):-1},c:function(t,n,r){return T(t,e,n,r)},d:zo,e:zo,f:Go,g:Po,G:Fo,H:qo,I:qo,j:$o,L:Vo,m:Uo,M:Ho,p:function(t,e,n){var r=u.exec(e.slice(n));return r?(t.p=l.get(r[0].toLowerCase()),n+r[0].length):-1},q:jo,Q:Zo,s:Qo,S:Wo,u:Lo,U:Oo,V:Io,w:Bo,W:Ro,x:function(t,e,r){return T(t,n,e,r)},X:function(t,e,n){return T(t,r,e,n)},y:Po,Y:Fo,Z:Yo,"%":Xo};function w(t,e){return function(n){var r,i,a,o=[],s=-1,c=0,u=t.length;for(n instanceof Date||(n=new Date(+n));++s53)return null;"w"in a||(a.w=1),"Z"in a?(i=(r=_o(xo(a.y,0,1))).getUTCDay(),r=i>4||0===i?ro.ceil(r):ro(r),r=to.offset(r,7*(a.V-1)),a.y=r.getUTCFullYear(),a.m=r.getUTCMonth(),a.d=r.getUTCDate()+(a.w+6)%7):(i=(r=bo(xo(a.y,0,1))).getDay(),r=i>4||0===i?Ya.ceil(r):Ya(r),r=Ra.offset(r,7*(a.V-1)),a.y=r.getFullYear(),a.m=r.getMonth(),a.d=r.getDate()+(a.w+6)%7)}else("W"in a||"U"in a)&&("w"in a||(a.w="u"in a?a.u%7:"W"in a?1:0),i="Z"in a?_o(xo(a.y,0,1)).getUTCDay():bo(xo(a.y,0,1)).getDay(),a.m=0,a.d="W"in a?(a.w+6)%7+7*a.W-(i+5)%7:a.w+7*a.U-(i+6)%7);return"Z"in a?(a.H+=a.Z/100|0,a.M+=a.Z%100,_o(a)):bo(a)}}function T(t,e,n,r){for(var i,a,o=0,s=e.length,c=n.length;o=c)return-1;if(37===(i=e.charCodeAt(o++))){if(i=e.charAt(o++),!(a=x[i in To?e.charAt(o++):i])||(r=a(t,n,r))<0)return-1}else if(i!=n.charCodeAt(r++))return-1}return r}return b.x=w(n,b),b.X=w(r,b),b.c=w(e,b),_.x=w(n,_),_.X=w(r,_),_.c=w(e,_),{format:function(t){var e=w(t+="",b);return e.toString=function(){return t},e},parse:function(t){var e=k(t+="",!1);return e.toString=function(){return t},e},utcFormat:function(t){var e=w(t+="",_);return e.toString=function(){return t},e},utcParse:function(t){var e=k(t+="",!0);return e.toString=function(){return t},e}}}({dateTime:"%x, %X",date:"%-m/%-d/%Y",time:"%-I:%M:%S %p",periods:["AM","PM"],days:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],shortDays:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],months:["January","February","March","April","May","June","July","August","September","October","November","December"],shortMonths:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]}),ko=wo.format,wo.parse,wo.utcFormat,wo.utcParse;var Qs=Array.prototype.find;function Ks(){return this.firstElementChild}var Js=Array.prototype.filter;function tc(){return Array.from(this.children)}function ec(t){return new Array(t.length)}function nc(t,e){this.ownerDocument=t.ownerDocument,this.namespaceURI=t.namespaceURI,this._next=null,this._parent=t,this.__data__=e}function rc(t){return function(){return t}}function ic(t,e,n,r,i,a){for(var o,s=0,c=e.length,u=a.length;se?1:t>=e?0:NaN}nc.prototype={constructor:nc,appendChild:function(t){return this._parent.insertBefore(t,this._next)},insertBefore:function(t,e){return this._parent.insertBefore(t,e)},querySelector:function(t){return this._parent.querySelector(t)},querySelectorAll:function(t){return this._parent.querySelectorAll(t)}};var uc="http://www.w3.org/1999/xhtml";const lc={svg:"http://www.w3.org/2000/svg",xhtml:uc,xlink:"http://www.w3.org/1999/xlink",xml:"http://www.w3.org/XML/1998/namespace",xmlns:"http://www.w3.org/2000/xmlns/"};function hc(t){var e=t+="",n=e.indexOf(":");return n>=0&&"xmlns"!==(e=t.slice(0,n))&&(t=t.slice(n+1)),lc.hasOwnProperty(e)?{space:lc[e],local:t}:t}function fc(t){return function(){this.removeAttribute(t)}}function dc(t){return function(){this.removeAttributeNS(t.space,t.local)}}function pc(t,e){return function(){this.setAttribute(t,e)}}function gc(t,e){return function(){this.setAttributeNS(t.space,t.local,e)}}function yc(t,e){return function(){var n=e.apply(this,arguments);null==n?this.removeAttribute(t):this.setAttribute(t,n)}}function mc(t,e){return function(){var n=e.apply(this,arguments);null==n?this.removeAttributeNS(t.space,t.local):this.setAttributeNS(t.space,t.local,n)}}function vc(t){return t.ownerDocument&&t.ownerDocument.defaultView||t.document&&t||t.defaultView}function bc(t){return function(){this.style.removeProperty(t)}}function _c(t,e,n){return function(){this.style.setProperty(t,e,n)}}function xc(t,e,n){return function(){var r=e.apply(this,arguments);null==r?this.style.removeProperty(t):this.style.setProperty(t,r,n)}}function wc(t,e){return t.style.getPropertyValue(e)||vc(t).getComputedStyle(t,null).getPropertyValue(e)}function kc(t){return function(){delete this[t]}}function Tc(t,e){return function(){this[t]=e}}function Cc(t,e){return function(){var n=e.apply(this,arguments);null==n?delete this[t]:this[t]=n}}function Ec(t){return t.trim().split(/^|\s+/)}function Sc(t){return t.classList||new Ac(t)}function Ac(t){this._node=t,this._names=Ec(t.getAttribute("class")||"")}function Mc(t,e){for(var n=Sc(t),r=-1,i=e.length;++r=0&&(e=t.slice(n+1),t=t.slice(0,n)),{type:t,name:e}}))}function Zc(t){return function(){var e=this.__on;if(e){for(var n,r=0,i=-1,a=e.length;r=0&&(this._names.splice(e,1),this._node.setAttribute("class",this._names.join(" ")))},contains:function(t){return this._names.indexOf(t)>=0}};var eu=[null];function nu(t,e){this._groups=t,this._parents=e}function ru(){return new nu([[document.documentElement]],eu)}nu.prototype=ru.prototype={constructor:nu,select:function(t){"function"!=typeof t&&(t=Hs(t));for(var e=this._groups,n=e.length,r=new Array(n),i=0;i=x&&(x=_+1);!(b=y[x])&&++x=0;)(r=i[a])&&(o&&4^r.compareDocumentPosition(o)&&o.parentNode.insertBefore(r,o),o=r);return this},sort:function(t){function e(e,n){return e&&n?t(e.__data__,n.__data__):!e-!n}t||(t=cc);for(var n=this._groups,r=n.length,i=new Array(r),a=0;a1?this.each((null==e?bc:"function"==typeof e?xc:_c)(t,e,null==n?"":n)):wc(this.node(),t)},property:function(t,e){return arguments.length>1?this.each((null==e?kc:"function"==typeof e?Cc:Tc)(t,e)):this.node()[t]},classed:function(t,e){var n=Ec(t+"");if(arguments.length<2){for(var r=Sc(this.node()),i=-1,a=n.length;++iuu)if(Math.abs(l*s-c*u)>uu&&i){var f=n-a,d=r-o,p=s*s+c*c,g=f*f+d*d,y=Math.sqrt(p),m=Math.sqrt(h),v=i*Math.tan((su-Math.acos((p+h-g)/(2*y*m)))/2),b=v/m,_=v/y;Math.abs(b-1)>uu&&(this._+="L"+(t+b*u)+","+(e+b*l)),this._+="A"+i+","+i+",0,0,"+ +(l*f>u*d)+","+(this._x1=t+_*s)+","+(this._y1=e+_*c)}else this._+="L"+(this._x1=t)+","+(this._y1=e)},arc:function(t,e,n,r,i,a){t=+t,e=+e,a=!!a;var o=(n=+n)*Math.cos(r),s=n*Math.sin(r),c=t+o,u=e+s,l=1^a,h=a?r-i:i-r;if(n<0)throw new Error("negative radius: "+n);null===this._x1?this._+="M"+c+","+u:(Math.abs(this._x1-c)>uu||Math.abs(this._y1-u)>uu)&&(this._+="L"+c+","+u),n&&(h<0&&(h=h%cu+cu),h>lu?this._+="A"+n+","+n+",0,1,"+l+","+(t-o)+","+(e-s)+"A"+n+","+n+",0,1,"+l+","+(this._x1=c)+","+(this._y1=u):h>uu&&(this._+="A"+n+","+n+",0,"+ +(h>=su)+","+l+","+(this._x1=t+n*Math.cos(i))+","+(this._y1=e+n*Math.sin(i))))},rect:function(t,e,n,r){this._+="M"+(this._x0=this._x1=+t)+","+(this._y0=this._y1=+e)+"h"+ +n+"v"+ +r+"h"+-n+"Z"},toString:function(){return this._}};const du=fu;function pu(t){return function(){return t}}var gu=Math.abs,yu=Math.atan2,mu=Math.cos,vu=Math.max,bu=Math.min,_u=Math.sin,xu=Math.sqrt,wu=1e-12,ku=Math.PI,Tu=ku/2,Cu=2*ku;function Eu(t){return t>1?0:t<-1?ku:Math.acos(t)}function Su(t){return t>=1?Tu:t<=-1?-Tu:Math.asin(t)}function Au(t){return t.innerRadius}function Mu(t){return t.outerRadius}function Nu(t){return t.startAngle}function Du(t){return t.endAngle}function Bu(t){return t&&t.padAngle}function Lu(t,e,n,r,i,a,o,s){var c=n-t,u=r-e,l=o-i,h=s-a,f=h*c-l*u;if(!(f*fN*N+D*D&&(T=E,C=S),{cx:T,cy:C,x01:-l,y01:-h,x11:T*(i/x-1),y11:C*(i/x-1)}}function Iu(){var t=Au,e=Mu,n=pu(0),r=null,i=Nu,a=Du,o=Bu,s=null;function c(){var c,u,l=+t.apply(this,arguments),h=+e.apply(this,arguments),f=i.apply(this,arguments)-Tu,d=a.apply(this,arguments)-Tu,p=gu(d-f),g=d>f;if(s||(s=c=du()),hwu)if(p>Cu-wu)s.moveTo(h*mu(f),h*_u(f)),s.arc(0,0,h,f,d,!g),l>wu&&(s.moveTo(l*mu(d),l*_u(d)),s.arc(0,0,l,d,f,g));else{var y,m,v=f,b=d,_=f,x=d,w=p,k=p,T=o.apply(this,arguments)/2,C=T>wu&&(r?+r.apply(this,arguments):xu(l*l+h*h)),E=bu(gu(h-l)/2,+n.apply(this,arguments)),S=E,A=E;if(C>wu){var M=Su(C/l*_u(T)),N=Su(C/h*_u(T));(w-=2*M)>wu?(_+=M*=g?1:-1,x-=M):(w=0,_=x=(f+d)/2),(k-=2*N)>wu?(v+=N*=g?1:-1,b-=N):(k=0,v=b=(f+d)/2)}var D=h*mu(v),B=h*_u(v),L=l*mu(x),O=l*_u(x);if(E>wu){var I,R=h*mu(b),F=h*_u(b),P=l*mu(_),Y=l*_u(_);if(pwu?A>wu?(y=Ou(P,Y,D,B,h,A,g),m=Ou(R,F,L,O,h,A,g),s.moveTo(y.cx+y.x01,y.cy+y.y01),Awu&&w>wu?S>wu?(y=Ou(L,O,R,F,l,-S,g),m=Ou(D,B,P,Y,l,-S,g),s.lineTo(y.cx+y.x01,y.cy+y.y01),St?1:e>=t?0:NaN}function $u(t){return t}function qu(){}function Hu(t,e,n){t._context.bezierCurveTo((2*t._x0+t._x1)/3,(2*t._y0+t._y1)/3,(t._x0+2*t._x1)/3,(t._y0+2*t._y1)/3,(t._x0+4*t._x1+e)/6,(t._y0+4*t._y1+n)/6)}function Wu(t){this._context=t}function Vu(t){return new Wu(t)}function Gu(t){this._context=t}function Xu(t){this._context=t}function Zu(t){this._context=t}function Qu(t){return t<0?-1:1}function Ku(t,e,n){var r=t._x1-t._x0,i=e-t._x1,a=(t._y1-t._y0)/(r||i<0&&-0),o=(n-t._y1)/(i||r<0&&-0),s=(a*i+o*r)/(r+i);return(Qu(a)+Qu(o))*Math.min(Math.abs(a),Math.abs(o),.5*Math.abs(s))||0}function Ju(t,e){var n=t._x1-t._x0;return n?(3*(t._y1-t._y0)/n-e)/2:e}function tl(t,e,n){var r=t._x0,i=t._y0,a=t._x1,o=t._y1,s=(a-r)/3;t._context.bezierCurveTo(r+s,i+s*e,a-s,o-s*n,a,o)}function el(t){this._context=t}function nl(t){this._context=new rl(t)}function rl(t){this._context=t}function il(t){this._context=t}function al(t){var e,n,r=t.length-1,i=new Array(r),a=new Array(r),o=new Array(r);for(i[0]=0,a[0]=2,o[0]=t[0]+2*t[1],e=1;e=0;--e)i[e]=(o[e]-i[e+1])/a[e];for(a[r-1]=(t[r]+i[r-1])/2,e=0;e=0&&(this._t=1-this._t,this._line=1-this._line)},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,e):this._context.moveTo(t,e);break;case 1:this._point=2;default:if(this._t<=0)this._context.lineTo(this._x,e),this._context.lineTo(t,e);else{var n=this._x*(1-this._t)+t*this._t;this._context.lineTo(n,this._y),this._context.lineTo(n,e)}}this._x=t,this._y=e}};var sl=new Date,cl=new Date;function ul(t,e,n,r){function i(e){return t(e=0===arguments.length?new Date:new Date(+e)),e}return i.floor=function(e){return t(e=new Date(+e)),e},i.ceil=function(n){return t(n=new Date(n-1)),e(n,1),t(n),n},i.round=function(t){var e=i(t),n=i.ceil(t);return t-e0))return s;do{s.push(o=new Date(+n)),e(n,a),t(n)}while(o=e)for(;t(e),!n(e);)e.setTime(e-1)}),(function(t,r){if(t>=t)if(r<0)for(;++r<=0;)for(;e(t,-1),!n(t););else for(;--r>=0;)for(;e(t,1),!n(t););}))},n&&(i.count=function(e,r){return sl.setTime(+e),cl.setTime(+r),t(sl),t(cl),Math.floor(n(sl,cl))},i.every=function(t){return t=Math.floor(t),isFinite(t)&&t>0?t>1?i.filter(r?function(e){return r(e)%t==0}:function(e){return i.count(0,e)%t==0}):i:null}),i}const ll=864e5,hl=6048e5;function fl(t){return ul((function(e){e.setUTCDate(e.getUTCDate()-(e.getUTCDay()+7-t)%7),e.setUTCHours(0,0,0,0)}),(function(t,e){t.setUTCDate(t.getUTCDate()+7*e)}),(function(t,e){return(e-t)/hl}))}var dl=fl(0),pl=fl(1),gl=fl(2),yl=fl(3),ml=fl(4),vl=fl(5),bl=fl(6),_l=(dl.range,pl.range,gl.range,yl.range,ml.range,vl.range,bl.range,ul((function(t){t.setUTCHours(0,0,0,0)}),(function(t,e){t.setUTCDate(t.getUTCDate()+e)}),(function(t,e){return(e-t)/ll}),(function(t){return t.getUTCDate()-1})));const xl=_l;function wl(t){return ul((function(e){e.setDate(e.getDate()-(e.getDay()+7-t)%7),e.setHours(0,0,0,0)}),(function(t,e){t.setDate(t.getDate()+7*e)}),(function(t,e){return(e-t-6e4*(e.getTimezoneOffset()-t.getTimezoneOffset()))/hl}))}_l.range;var kl=wl(0),Tl=wl(1),Cl=wl(2),El=wl(3),Sl=wl(4),Al=wl(5),Ml=wl(6),Nl=(kl.range,Tl.range,Cl.range,El.range,Sl.range,Al.range,Ml.range,ul((t=>t.setHours(0,0,0,0)),((t,e)=>t.setDate(t.getDate()+e)),((t,e)=>(e-t-6e4*(e.getTimezoneOffset()-t.getTimezoneOffset()))/ll),(t=>t.getDate()-1)));const Dl=Nl;Nl.range;var Bl=ul((function(t){t.setMonth(0,1),t.setHours(0,0,0,0)}),(function(t,e){t.setFullYear(t.getFullYear()+e)}),(function(t,e){return e.getFullYear()-t.getFullYear()}),(function(t){return t.getFullYear()}));Bl.every=function(t){return isFinite(t=Math.floor(t))&&t>0?ul((function(e){e.setFullYear(Math.floor(e.getFullYear()/t)*t),e.setMonth(0,1),e.setHours(0,0,0,0)}),(function(e,n){e.setFullYear(e.getFullYear()+n*t)})):null};const Ll=Bl;Bl.range;var Ol=ul((function(t){t.setUTCMonth(0,1),t.setUTCHours(0,0,0,0)}),(function(t,e){t.setUTCFullYear(t.getUTCFullYear()+e)}),(function(t,e){return e.getUTCFullYear()-t.getUTCFullYear()}),(function(t){return t.getUTCFullYear()}));Ol.every=function(t){return isFinite(t=Math.floor(t))&&t>0?ul((function(e){e.setUTCFullYear(Math.floor(e.getUTCFullYear()/t)*t),e.setUTCMonth(0,1),e.setUTCHours(0,0,0,0)}),(function(e,n){e.setUTCFullYear(e.getUTCFullYear()+n*t)})):null};const Il=Ol;function Rl(t){if(0<=t.y&&t.y<100){var e=new Date(-1,t.m,t.d,t.H,t.M,t.S,t.L);return e.setFullYear(t.y),e}return new Date(t.y,t.m,t.d,t.H,t.M,t.S,t.L)}function Fl(t){if(0<=t.y&&t.y<100){var e=new Date(Date.UTC(-1,t.m,t.d,t.H,t.M,t.S,t.L));return e.setUTCFullYear(t.y),e}return new Date(Date.UTC(t.y,t.m,t.d,t.H,t.M,t.S,t.L))}function Pl(t,e,n){return{y:t,m:e,d:n,H:0,M:0,S:0,L:0}}Ol.range;var Yl,jl,Ul={"-":"",_:" ",0:"0"},zl=/^\s*\d+/,$l=/^%/,ql=/[\\^$*+?|[\]().{}]/g;function Hl(t,e,n){var r=t<0?"-":"",i=(r?-t:t)+"",a=i.length;return r+(a[t.toLowerCase(),e])))}function Xl(t,e,n){var r=zl.exec(e.slice(n,n+1));return r?(t.w=+r[0],n+r[0].length):-1}function Zl(t,e,n){var r=zl.exec(e.slice(n,n+1));return r?(t.u=+r[0],n+r[0].length):-1}function Ql(t,e,n){var r=zl.exec(e.slice(n,n+2));return r?(t.U=+r[0],n+r[0].length):-1}function Kl(t,e,n){var r=zl.exec(e.slice(n,n+2));return r?(t.V=+r[0],n+r[0].length):-1}function Jl(t,e,n){var r=zl.exec(e.slice(n,n+2));return r?(t.W=+r[0],n+r[0].length):-1}function th(t,e,n){var r=zl.exec(e.slice(n,n+4));return r?(t.y=+r[0],n+r[0].length):-1}function eh(t,e,n){var r=zl.exec(e.slice(n,n+2));return r?(t.y=+r[0]+(+r[0]>68?1900:2e3),n+r[0].length):-1}function nh(t,e,n){var r=/^(Z)|([+-]\d\d)(?::?(\d\d))?/.exec(e.slice(n,n+6));return r?(t.Z=r[1]?0:-(r[2]+(r[3]||"00")),n+r[0].length):-1}function rh(t,e,n){var r=zl.exec(e.slice(n,n+1));return r?(t.q=3*r[0]-3,n+r[0].length):-1}function ih(t,e,n){var r=zl.exec(e.slice(n,n+2));return r?(t.m=r[0]-1,n+r[0].length):-1}function ah(t,e,n){var r=zl.exec(e.slice(n,n+2));return r?(t.d=+r[0],n+r[0].length):-1}function oh(t,e,n){var r=zl.exec(e.slice(n,n+3));return r?(t.m=0,t.d=+r[0],n+r[0].length):-1}function sh(t,e,n){var r=zl.exec(e.slice(n,n+2));return r?(t.H=+r[0],n+r[0].length):-1}function ch(t,e,n){var r=zl.exec(e.slice(n,n+2));return r?(t.M=+r[0],n+r[0].length):-1}function uh(t,e,n){var r=zl.exec(e.slice(n,n+2));return r?(t.S=+r[0],n+r[0].length):-1}function lh(t,e,n){var r=zl.exec(e.slice(n,n+3));return r?(t.L=+r[0],n+r[0].length):-1}function hh(t,e,n){var r=zl.exec(e.slice(n,n+6));return r?(t.L=Math.floor(r[0]/1e3),n+r[0].length):-1}function fh(t,e,n){var r=$l.exec(e.slice(n,n+1));return r?n+r[0].length:-1}function dh(t,e,n){var r=zl.exec(e.slice(n));return r?(t.Q=+r[0],n+r[0].length):-1}function ph(t,e,n){var r=zl.exec(e.slice(n));return r?(t.s=+r[0],n+r[0].length):-1}function gh(t,e){return Hl(t.getDate(),e,2)}function yh(t,e){return Hl(t.getHours(),e,2)}function mh(t,e){return Hl(t.getHours()%12||12,e,2)}function vh(t,e){return Hl(1+Dl.count(Ll(t),t),e,3)}function bh(t,e){return Hl(t.getMilliseconds(),e,3)}function _h(t,e){return bh(t,e)+"000"}function xh(t,e){return Hl(t.getMonth()+1,e,2)}function wh(t,e){return Hl(t.getMinutes(),e,2)}function kh(t,e){return Hl(t.getSeconds(),e,2)}function Th(t){var e=t.getDay();return 0===e?7:e}function Ch(t,e){return Hl(kl.count(Ll(t)-1,t),e,2)}function Eh(t){var e=t.getDay();return e>=4||0===e?Sl(t):Sl.ceil(t)}function Sh(t,e){return t=Eh(t),Hl(Sl.count(Ll(t),t)+(4===Ll(t).getDay()),e,2)}function Ah(t){return t.getDay()}function Mh(t,e){return Hl(Tl.count(Ll(t)-1,t),e,2)}function Nh(t,e){return Hl(t.getFullYear()%100,e,2)}function Dh(t,e){return Hl((t=Eh(t)).getFullYear()%100,e,2)}function Bh(t,e){return Hl(t.getFullYear()%1e4,e,4)}function Lh(t,e){var n=t.getDay();return Hl((t=n>=4||0===n?Sl(t):Sl.ceil(t)).getFullYear()%1e4,e,4)}function Oh(t){var e=t.getTimezoneOffset();return(e>0?"-":(e*=-1,"+"))+Hl(e/60|0,"0",2)+Hl(e%60,"0",2)}function Ih(t,e){return Hl(t.getUTCDate(),e,2)}function Rh(t,e){return Hl(t.getUTCHours(),e,2)}function Fh(t,e){return Hl(t.getUTCHours()%12||12,e,2)}function Ph(t,e){return Hl(1+xl.count(Il(t),t),e,3)}function Yh(t,e){return Hl(t.getUTCMilliseconds(),e,3)}function jh(t,e){return Yh(t,e)+"000"}function Uh(t,e){return Hl(t.getUTCMonth()+1,e,2)}function zh(t,e){return Hl(t.getUTCMinutes(),e,2)}function $h(t,e){return Hl(t.getUTCSeconds(),e,2)}function qh(t){var e=t.getUTCDay();return 0===e?7:e}function Hh(t,e){return Hl(dl.count(Il(t)-1,t),e,2)}function Wh(t){var e=t.getUTCDay();return e>=4||0===e?ml(t):ml.ceil(t)}function Vh(t,e){return t=Wh(t),Hl(ml.count(Il(t),t)+(4===Il(t).getUTCDay()),e,2)}function Gh(t){return t.getUTCDay()}function Xh(t,e){return Hl(pl.count(Il(t)-1,t),e,2)}function Zh(t,e){return Hl(t.getUTCFullYear()%100,e,2)}function Qh(t,e){return Hl((t=Wh(t)).getUTCFullYear()%100,e,2)}function Kh(t,e){return Hl(t.getUTCFullYear()%1e4,e,4)}function Jh(t,e){var n=t.getUTCDay();return Hl((t=n>=4||0===n?ml(t):ml.ceil(t)).getUTCFullYear()%1e4,e,4)}function tf(){return"+0000"}function ef(){return"%"}function nf(t){return+t}function rf(t){return Math.floor(+t/1e3)}Yl=function(t){var e=t.dateTime,n=t.date,r=t.time,i=t.periods,a=t.days,o=t.shortDays,s=t.months,c=t.shortMonths,u=Vl(i),l=Gl(i),h=Vl(a),f=Gl(a),d=Vl(o),p=Gl(o),g=Vl(s),y=Gl(s),m=Vl(c),v=Gl(c),b={a:function(t){return o[t.getDay()]},A:function(t){return a[t.getDay()]},b:function(t){return c[t.getMonth()]},B:function(t){return s[t.getMonth()]},c:null,d:gh,e:gh,f:_h,g:Dh,G:Lh,H:yh,I:mh,j:vh,L:bh,m:xh,M:wh,p:function(t){return i[+(t.getHours()>=12)]},q:function(t){return 1+~~(t.getMonth()/3)},Q:nf,s:rf,S:kh,u:Th,U:Ch,V:Sh,w:Ah,W:Mh,x:null,X:null,y:Nh,Y:Bh,Z:Oh,"%":ef},_={a:function(t){return o[t.getUTCDay()]},A:function(t){return a[t.getUTCDay()]},b:function(t){return c[t.getUTCMonth()]},B:function(t){return s[t.getUTCMonth()]},c:null,d:Ih,e:Ih,f:jh,g:Qh,G:Jh,H:Rh,I:Fh,j:Ph,L:Yh,m:Uh,M:zh,p:function(t){return i[+(t.getUTCHours()>=12)]},q:function(t){return 1+~~(t.getUTCMonth()/3)},Q:nf,s:rf,S:$h,u:qh,U:Hh,V:Vh,w:Gh,W:Xh,x:null,X:null,y:Zh,Y:Kh,Z:tf,"%":ef},x={a:function(t,e,n){var r=d.exec(e.slice(n));return r?(t.w=p.get(r[0].toLowerCase()),n+r[0].length):-1},A:function(t,e,n){var r=h.exec(e.slice(n));return r?(t.w=f.get(r[0].toLowerCase()),n+r[0].length):-1},b:function(t,e,n){var r=m.exec(e.slice(n));return r?(t.m=v.get(r[0].toLowerCase()),n+r[0].length):-1},B:function(t,e,n){var r=g.exec(e.slice(n));return r?(t.m=y.get(r[0].toLowerCase()),n+r[0].length):-1},c:function(t,n,r){return T(t,e,n,r)},d:ah,e:ah,f:hh,g:eh,G:th,H:sh,I:sh,j:oh,L:lh,m:ih,M:ch,p:function(t,e,n){var r=u.exec(e.slice(n));return r?(t.p=l.get(r[0].toLowerCase()),n+r[0].length):-1},q:rh,Q:dh,s:ph,S:uh,u:Zl,U:Ql,V:Kl,w:Xl,W:Jl,x:function(t,e,r){return T(t,n,e,r)},X:function(t,e,n){return T(t,r,e,n)},y:eh,Y:th,Z:nh,"%":fh};function w(t,e){return function(n){var r,i,a,o=[],s=-1,c=0,u=t.length;for(n instanceof Date||(n=new Date(+n));++s53)return null;"w"in a||(a.w=1),"Z"in a?(i=(r=Fl(Pl(a.y,0,1))).getUTCDay(),r=i>4||0===i?pl.ceil(r):pl(r),r=xl.offset(r,7*(a.V-1)),a.y=r.getUTCFullYear(),a.m=r.getUTCMonth(),a.d=r.getUTCDate()+(a.w+6)%7):(i=(r=Rl(Pl(a.y,0,1))).getDay(),r=i>4||0===i?Tl.ceil(r):Tl(r),r=Dl.offset(r,7*(a.V-1)),a.y=r.getFullYear(),a.m=r.getMonth(),a.d=r.getDate()+(a.w+6)%7)}else("W"in a||"U"in a)&&("w"in a||(a.w="u"in a?a.u%7:"W"in a?1:0),i="Z"in a?Fl(Pl(a.y,0,1)).getUTCDay():Rl(Pl(a.y,0,1)).getDay(),a.m=0,a.d="W"in a?(a.w+6)%7+7*a.W-(i+5)%7:a.w+7*a.U-(i+6)%7);return"Z"in a?(a.H+=a.Z/100|0,a.M+=a.Z%100,Fl(a)):Rl(a)}}function T(t,e,n,r){for(var i,a,o=0,s=e.length,c=n.length;o=c)return-1;if(37===(i=e.charCodeAt(o++))){if(i=e.charAt(o++),!(a=x[i in Ul?e.charAt(o++):i])||(r=a(t,n,r))<0)return-1}else if(i!=n.charCodeAt(r++))return-1}return r}return b.x=w(n,b),b.X=w(r,b),b.c=w(e,b),_.x=w(n,_),_.X=w(r,_),_.c=w(e,_),{format:function(t){var e=w(t+="",b);return e.toString=function(){return t},e},parse:function(t){var e=k(t+="",!1);return e.toString=function(){return t},e},utcFormat:function(t){var e=w(t+="",_);return e.toString=function(){return t},e},utcParse:function(t){var e=k(t+="",!0);return e.toString=function(){return t},e}}}({dateTime:"%x, %X",date:"%-m/%-d/%Y",time:"%-I:%M:%S %p",periods:["AM","PM"],days:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],shortDays:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],months:["January","February","March","April","May","June","July","August","September","October","November","December"],shortMonths:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]}),jl=Yl.format,Yl.parse,Yl.utcFormat,Yl.utcParse;var af={value:()=>{}};function of(){for(var t,e=0,n=arguments.length,r={};e=0&&(n=t.slice(r+1),t=t.slice(0,r)),t&&!e.hasOwnProperty(t))throw new Error("unknown type: "+t);return{type:t,name:n}}))}function uf(t,e){for(var n,r=0,i=t.length;r0)for(var n,r,i=new Array(n),a=0;a=0&&e._call.call(void 0,t),e=e._next;--pf}()}finally{pf=0,function(){for(var t,e,n=ff,r=1/0;n;)n._call?(r>n._time&&(r=n._time),t=n,n=n._next):(e=n._next,n._next=null,n=t?t._next=e:ff=e);df=t,Af(r)}(),vf=0}}function Sf(){var t=_f.now(),e=t-mf;e>1e3&&(bf-=e,mf=t)}function Af(t){pf||(gf&&(gf=clearTimeout(gf)),t-vf>24?(t<1/0&&(gf=setTimeout(Ef,t-_f.now()-bf)),yf&&(yf=clearInterval(yf))):(yf||(mf=_f.now(),yf=setInterval(Sf,1e3)),pf=1,xf(Ef)))}function Mf(t,e,n){var r=new Tf;return e=null==e?0:+e,r.restart((n=>{r.stop(),t(n+e)}),e,n),r}Tf.prototype=Cf.prototype={constructor:Tf,restart:function(t,e,n){if("function"!=typeof t)throw new TypeError("callback is not a function");n=(null==n?wf():+n)+(null==e?0:+e),this._next||df===this||(df?df._next=this:ff=this,df=this),this._call=t,this._time=n,Af()},stop:function(){this._call&&(this._call=null,this._time=1/0,Af())}};var Nf=hf("start","end","cancel","interrupt"),Df=[];function Bf(t,e,n,r,i,a){var o=t.__transition;if(o){if(n in o)return}else t.__transition={};!function(t,e,n){var r,i=t.__transition;function a(c){var u,l,h,f;if(1!==n.state)return s();for(u in i)if((f=i[u]).name===n.name){if(3===f.state)return Mf(a);4===f.state?(f.state=6,f.timer.stop(),f.on.call("interrupt",t,t.__data__,f.index,f.group),delete i[u]):+u0)throw new Error("too late; already scheduled");return n}function Of(t,e){var n=If(t,e);if(n.state>3)throw new Error("too late; already running");return n}function If(t,e){var n=t.__transition;if(!n||!(n=n[e]))throw new Error("transition not found");return n}function Rf(t,e){return t=+t,e=+e,function(n){return t*(1-n)+e*n}}var Ff,Pf=180/Math.PI,Yf={translateX:0,translateY:0,rotate:0,skewX:0,scaleX:1,scaleY:1};function jf(t,e,n,r,i,a){var o,s,c;return(o=Math.sqrt(t*t+e*e))&&(t/=o,e/=o),(c=t*n+e*r)&&(n-=t*c,r-=e*c),(s=Math.sqrt(n*n+r*r))&&(n/=s,r/=s,c/=s),t*r180?e+=360:e-t>180&&(t+=360),a.push({i:n.push(i(n)+"rotate(",null,r)-2,x:Rf(t,e)})):e&&n.push(i(n)+"rotate("+e+r)}(a.rotate,o.rotate,s,c),function(t,e,n,a){t!==e?a.push({i:n.push(i(n)+"skewX(",null,r)-2,x:Rf(t,e)}):e&&n.push(i(n)+"skewX("+e+r)}(a.skewX,o.skewX,s,c),function(t,e,n,r,a,o){if(t!==n||e!==r){var s=a.push(i(a)+"scale(",null,",",null,")");o.push({i:s-4,x:Rf(t,n)},{i:s-2,x:Rf(e,r)})}else 1===n&&1===r||a.push(i(a)+"scale("+n+","+r+")")}(a.scaleX,a.scaleY,o.scaleX,o.scaleY,s,c),a=o=null,function(t){for(var e,n=-1,r=c.length;++n=1?(n=1,e-1):Math.floor(n*e),i=t[r],a=t[r+1],o=r>0?t[r-1]:2*i-a,s=ra&&(i=e.slice(a,i),s[o]?s[o]+=i:s[++o]=i),(n=n[0])===(r=r[0])?s[o]?s[o]+=r:s[++o]=r:(s[++o]=null,c.push({i:o,x:Rf(n,r)})),a=Qf.lastIndex;return a=0&&(t=t.slice(0,e)),!t||"start"===t}))}(e)?Lf:Of;return function(){var o=a(this,t),s=o.on;s!==r&&(i=(r=s).copy()).on(e,n),o.on=i}}var yd=iu.prototype.constructor;function md(t){return function(){this.style.removeProperty(t)}}function vd(t,e,n){return function(r){this.style.setProperty(t,e.call(this,r),n)}}function bd(t,e,n){var r,i;function a(){var a=e.apply(this,arguments);return a!==i&&(r=(i=a)&&vd(t,a,n)),r}return a._value=e,a}function _d(t){return function(e){this.textContent=t.call(this,e)}}function xd(t){var e,n;function r(){var r=t.apply(this,arguments);return r!==n&&(e=(n=r)&&_d(r)),e}return r._value=t,r}var wd=0;function kd(t,e,n,r){this._groups=t,this._parents=e,this._name=n,this._id=r}function Td(){return++wd}var Cd=iu.prototype;kd.prototype=function(t){return iu().transition(t)}.prototype={constructor:kd,select:function(t){var e=this._name,n=this._id;"function"!=typeof t&&(t=Hs(t));for(var r=this._groups,i=r.length,a=new Array(i),o=0;o2&&n.state<5,n.state=6,n.timer.stop(),n.on.call(r?"interrupt":"cancel",t,t.__data__,n.index,n.group),delete a[i]):o=!1;o&&delete t.__transition}}(this,t)}))},iu.prototype.transition=function(t){var e,n;t instanceof kd?(e=t._id,t=t._name):(e=Td(),(n=Ed).time=wf(),t=null==t?null:t+"");for(var r=this._groups,i=r.length,a=0;ae?1:t>=e?0:NaN}jd.prototype={constructor:jd,appendChild:function(t){return this._parent.insertBefore(t,this._next)},insertBefore:function(t,e){return this._parent.insertBefore(t,e)},querySelector:function(t){return this._parent.querySelector(t)},querySelectorAll:function(t){return this._parent.querySelectorAll(t)}};var Vd="http://www.w3.org/1999/xhtml";const Gd={svg:"http://www.w3.org/2000/svg",xhtml:Vd,xlink:"http://www.w3.org/1999/xlink",xml:"http://www.w3.org/XML/1998/namespace",xmlns:"http://www.w3.org/2000/xmlns/"};function Xd(t){var e=t+="",n=e.indexOf(":");return n>=0&&"xmlns"!==(e=t.slice(0,n))&&(t=t.slice(n+1)),Gd.hasOwnProperty(e)?{space:Gd[e],local:t}:t}function Zd(t){return function(){this.removeAttribute(t)}}function Qd(t){return function(){this.removeAttributeNS(t.space,t.local)}}function Kd(t,e){return function(){this.setAttribute(t,e)}}function Jd(t,e){return function(){this.setAttributeNS(t.space,t.local,e)}}function tp(t,e){return function(){var n=e.apply(this,arguments);null==n?this.removeAttribute(t):this.setAttribute(t,n)}}function ep(t,e){return function(){var n=e.apply(this,arguments);null==n?this.removeAttributeNS(t.space,t.local):this.setAttributeNS(t.space,t.local,n)}}function np(t){return t.ownerDocument&&t.ownerDocument.defaultView||t.document&&t||t.defaultView}function rp(t){return function(){this.style.removeProperty(t)}}function ip(t,e,n){return function(){this.style.setProperty(t,e,n)}}function ap(t,e,n){return function(){var r=e.apply(this,arguments);null==r?this.style.removeProperty(t):this.style.setProperty(t,r,n)}}function op(t,e){return t.style.getPropertyValue(e)||np(t).getComputedStyle(t,null).getPropertyValue(e)}function sp(t){return function(){delete this[t]}}function cp(t,e){return function(){this[t]=e}}function up(t,e){return function(){var n=e.apply(this,arguments);null==n?delete this[t]:this[t]=n}}function lp(t){return t.trim().split(/^|\s+/)}function hp(t){return t.classList||new fp(t)}function fp(t){this._node=t,this._names=lp(t.getAttribute("class")||"")}function dp(t,e){for(var n=hp(t),r=-1,i=e.length;++r=0&&(e=t.slice(n+1),t=t.slice(0,n)),{type:t,name:e}}))}function Op(t){return function(){var e=this.__on;if(e){for(var n,r=0,i=-1,a=e.length;r=0&&(this._names.splice(e,1),this._node.setAttribute("class",this._names.join(" ")))},contains:function(t){return this._names.indexOf(t)>=0}};var Yp=[null];function jp(t,e){this._groups=t,this._parents=e}function Up(){return new jp([[document.documentElement]],Yp)}jp.prototype=Up.prototype={constructor:jp,select:function(t){"function"!=typeof t&&(t=Md(t));for(var e=this._groups,n=e.length,r=new Array(n),i=0;i=x&&(x=_+1);!(b=y[x])&&++x=0;)(r=i[a])&&(o&&4^r.compareDocumentPosition(o)&&o.parentNode.insertBefore(r,o),o=r);return this},sort:function(t){function e(e,n){return e&&n?t(e.__data__,n.__data__):!e-!n}t||(t=Wd);for(var n=this._groups,r=n.length,i=new Array(r),a=0;a1?this.each((null==e?rp:"function"==typeof e?ap:ip)(t,e,null==n?"":n)):op(this.node(),t)},property:function(t,e){return arguments.length>1?this.each((null==e?sp:"function"==typeof e?up:cp)(t,e)):this.node()[t]},classed:function(t,e){var n=lp(t+"");if(arguments.length<2){for(var r=hp(this.node()),i=-1,a=n.length;++i{}};function qp(){for(var t,e=0,n=arguments.length,r={};e=0&&(n=t.slice(r+1),t=t.slice(0,r)),t&&!e.hasOwnProperty(t))throw new Error("unknown type: "+t);return{type:t,name:n}}))}function Vp(t,e){for(var n,r=0,i=t.length;r0)for(var n,r,i=new Array(n),a=0;a=0&&e._call.call(void 0,t),e=e._next;--Kp}()}finally{Kp=0,function(){for(var t,e,n=Zp,r=1/0;n;)n._call?(r>n._time&&(r=n._time),t=n,n=n._next):(e=n._next,n._next=null,n=t?t._next=e:Zp=e);Qp=t,fg(r)}(),ng=0}}function hg(){var t=ig.now(),e=t-eg;e>1e3&&(rg-=e,eg=t)}function fg(t){Kp||(Jp&&(Jp=clearTimeout(Jp)),t-ng>24?(t<1/0&&(Jp=setTimeout(lg,t-ig.now()-rg)),tg&&(tg=clearInterval(tg))):(tg||(eg=ig.now(),tg=setInterval(hg,1e3)),Kp=1,ag(lg)))}function dg(t,e,n){var r=new cg;return e=null==e?0:+e,r.restart((n=>{r.stop(),t(n+e)}),e,n),r}cg.prototype=ug.prototype={constructor:cg,restart:function(t,e,n){if("function"!=typeof t)throw new TypeError("callback is not a function");n=(null==n?og():+n)+(null==e?0:+e),this._next||Qp===this||(Qp?Qp._next=this:Zp=this,Qp=this),this._call=t,this._time=n,fg()},stop:function(){this._call&&(this._call=null,this._time=1/0,fg())}};var pg=Xp("start","end","cancel","interrupt"),gg=[];function yg(t,e,n,r,i,a){var o=t.__transition;if(o){if(n in o)return}else t.__transition={};!function(t,e,n){var r,i=t.__transition;function a(c){var u,l,h,f;if(1!==n.state)return s();for(u in i)if((f=i[u]).name===n.name){if(3===f.state)return dg(a);4===f.state?(f.state=6,f.timer.stop(),f.on.call("interrupt",t,t.__data__,f.index,f.group),delete i[u]):+u0)throw new Error("too late; already scheduled");return n}function vg(t,e){var n=bg(t,e);if(n.state>3)throw new Error("too late; already running");return n}function bg(t,e){var n=t.__transition;if(!n||!(n=n[e]))throw new Error("transition not found");return n}function _g(t,e){return t=+t,e=+e,function(n){return t*(1-n)+e*n}}var xg,wg=180/Math.PI,kg={translateX:0,translateY:0,rotate:0,skewX:0,scaleX:1,scaleY:1};function Tg(t,e,n,r,i,a){var o,s,c;return(o=Math.sqrt(t*t+e*e))&&(t/=o,e/=o),(c=t*n+e*r)&&(n-=t*c,r-=e*c),(s=Math.sqrt(n*n+r*r))&&(n/=s,r/=s,c/=s),t*r180?e+=360:e-t>180&&(t+=360),a.push({i:n.push(i(n)+"rotate(",null,r)-2,x:_g(t,e)})):e&&n.push(i(n)+"rotate("+e+r)}(a.rotate,o.rotate,s,c),function(t,e,n,a){t!==e?a.push({i:n.push(i(n)+"skewX(",null,r)-2,x:_g(t,e)}):e&&n.push(i(n)+"skewX("+e+r)}(a.skewX,o.skewX,s,c),function(t,e,n,r,a,o){if(t!==n||e!==r){var s=a.push(i(a)+"scale(",null,",",null,")");o.push({i:s-4,x:_g(t,n)},{i:s-2,x:_g(e,r)})}else 1===n&&1===r||a.push(i(a)+"scale("+n+","+r+")")}(a.scaleX,a.scaleY,o.scaleX,o.scaleY,s,c),a=o=null,function(t){for(var e,n=-1,r=c.length;++n>8&15|e>>4&240,e>>4&15|240&e,(15&e)<<4|15&e,1):8===n?Qg(e>>24&255,e>>16&255,e>>8&255,(255&e)/255):4===n?Qg(e>>12&15|e>>8&240,e>>8&15|e>>4&240,e>>4&15|240&e,((15&e)<<4|15&e)/255):null):(e=jg.exec(t))?new ty(e[1],e[2],e[3],1):(e=Ug.exec(t))?new ty(255*e[1]/100,255*e[2]/100,255*e[3]/100,1):(e=zg.exec(t))?Qg(e[1],e[2],e[3],e[4]):(e=$g.exec(t))?Qg(255*e[1]/100,255*e[2]/100,255*e[3]/100,e[4]):(e=qg.exec(t))?iy(e[1],e[2]/100,e[3]/100,1):(e=Hg.exec(t))?iy(e[1],e[2]/100,e[3]/100,e[4]):Wg.hasOwnProperty(t)?Zg(Wg[t]):"transparent"===t?new ty(NaN,NaN,NaN,0):null}function Zg(t){return new ty(t>>16&255,t>>8&255,255&t,1)}function Qg(t,e,n,r){return r<=0&&(t=e=n=NaN),new ty(t,e,n,r)}function Kg(t){return t instanceof Lg||(t=Xg(t)),t?new ty((t=t.rgb()).r,t.g,t.b,t.opacity):new ty}function Jg(t,e,n,r){return 1===arguments.length?Kg(t):new ty(t,e,n,null==r?1:r)}function ty(t,e,n,r){this.r=+t,this.g=+e,this.b=+n,this.opacity=+r}function ey(){return"#"+ry(this.r)+ry(this.g)+ry(this.b)}function ny(){var t=this.opacity;return(1===(t=isNaN(t)?1:Math.max(0,Math.min(1,t)))?"rgb(":"rgba(")+Math.max(0,Math.min(255,Math.round(this.r)||0))+", "+Math.max(0,Math.min(255,Math.round(this.g)||0))+", "+Math.max(0,Math.min(255,Math.round(this.b)||0))+(1===t?")":", "+t+")")}function ry(t){return((t=Math.max(0,Math.min(255,Math.round(t)||0)))<16?"0":"")+t.toString(16)}function iy(t,e,n,r){return r<=0?t=e=n=NaN:n<=0||n>=1?t=e=NaN:e<=0&&(t=NaN),new oy(t,e,n,r)}function ay(t){if(t instanceof oy)return new oy(t.h,t.s,t.l,t.opacity);if(t instanceof Lg||(t=Xg(t)),!t)return new oy;if(t instanceof oy)return t;var e=(t=t.rgb()).r/255,n=t.g/255,r=t.b/255,i=Math.min(e,n,r),a=Math.max(e,n,r),o=NaN,s=a-i,c=(a+i)/2;return s?(o=e===a?(n-r)/s+6*(n0&&c<1?0:o,new oy(o,s,c,t.opacity)}function oy(t,e,n,r){this.h=+t,this.s=+e,this.l=+n,this.opacity=+r}function sy(t,e,n){return 255*(t<60?e+(n-e)*t/60:t<180?n:t<240?e+(n-e)*(240-t)/60:e)}function cy(t,e,n,r,i){var a=t*t,o=a*t;return((1-3*t+3*a-o)*e+(4-6*a+3*o)*n+(1+3*t+3*a-3*o)*r+o*i)/6}Dg(Lg,Xg,{copy:function(t){return Object.assign(new this.constructor,this,t)},displayable:function(){return this.rgb().displayable()},hex:Vg,formatHex:Vg,formatHsl:function(){return ay(this).formatHsl()},formatRgb:Gg,toString:Gg}),Dg(ty,Jg,Bg(Lg,{brighter:function(t){return t=null==t?Ig:Math.pow(Ig,t),new ty(this.r*t,this.g*t,this.b*t,this.opacity)},darker:function(t){return t=null==t?Og:Math.pow(Og,t),new ty(this.r*t,this.g*t,this.b*t,this.opacity)},rgb:function(){return this},displayable:function(){return-.5<=this.r&&this.r<255.5&&-.5<=this.g&&this.g<255.5&&-.5<=this.b&&this.b<255.5&&0<=this.opacity&&this.opacity<=1},hex:ey,formatHex:ey,formatRgb:ny,toString:ny})),Dg(oy,(function(t,e,n,r){return 1===arguments.length?ay(t):new oy(t,e,n,null==r?1:r)}),Bg(Lg,{brighter:function(t){return t=null==t?Ig:Math.pow(Ig,t),new oy(this.h,this.s,this.l*t,this.opacity)},darker:function(t){return t=null==t?Og:Math.pow(Og,t),new oy(this.h,this.s,this.l*t,this.opacity)},rgb:function(){var t=this.h%360+360*(this.h<0),e=isNaN(t)||isNaN(this.s)?0:this.s,n=this.l,r=n+(n<.5?n:1-n)*e,i=2*n-r;return new ty(sy(t>=240?t-240:t+120,i,r),sy(t,i,r),sy(t<120?t+240:t-120,i,r),this.opacity)},displayable:function(){return(0<=this.s&&this.s<=1||isNaN(this.s))&&0<=this.l&&this.l<=1&&0<=this.opacity&&this.opacity<=1},formatHsl:function(){var t=this.opacity;return(1===(t=isNaN(t)?1:Math.max(0,Math.min(1,t)))?"hsl(":"hsla(")+(this.h||0)+", "+100*(this.s||0)+"%, "+100*(this.l||0)+"%"+(1===t?")":", "+t+")")}}));const uy=t=>()=>t;function ly(t,e){var n=e-t;return n?function(t,e){return function(n){return t+n*e}}(t,n):uy(isNaN(t)?e:t)}const hy=function t(e){var n=function(t){return 1==(t=+t)?ly:function(e,n){return n-e?function(t,e,n){return t=Math.pow(t,n),e=Math.pow(e,n)-t,n=1/n,function(r){return Math.pow(t+r*e,n)}}(e,n,t):uy(isNaN(e)?n:e)}}(e);function r(t,e){var r=n((t=Jg(t)).r,(e=Jg(e)).r),i=n(t.g,e.g),a=n(t.b,e.b),o=ly(t.opacity,e.opacity);return function(e){return t.r=r(e),t.g=i(e),t.b=a(e),t.opacity=o(e),t+""}}return r.gamma=t,r}(1);function fy(t){return function(e){var n,r,i=e.length,a=new Array(i),o=new Array(i),s=new Array(i);for(n=0;n=1?(n=1,e-1):Math.floor(n*e),i=t[r],a=t[r+1],o=r>0?t[r-1]:2*i-a,s=ra&&(i=e.slice(a,i),s[o]?s[o]+=i:s[++o]=i),(n=n[0])===(r=r[0])?s[o]?s[o]+=r:s[++o]=r:(s[++o]=null,c.push({i:o,x:_g(n,r)})),a=py.lastIndex;return a=0&&(t=t.slice(0,e)),!t||"start"===t}))}(e)?mg:vg;return function(){var o=a(this,t),s=o.on;s!==r&&(i=(r=s).copy()).on(e,n),o.on=i}}var Ly=zp.prototype.constructor;function Oy(t){return function(){this.style.removeProperty(t)}}function Iy(t,e,n){return function(r){this.style.setProperty(t,e.call(this,r),n)}}function Ry(t,e,n){var r,i;function a(){var a=e.apply(this,arguments);return a!==i&&(r=(i=a)&&Iy(t,a,n)),r}return a._value=e,a}function Fy(t){return function(e){this.textContent=t.call(this,e)}}function Py(t){var e,n;function r(){var r=t.apply(this,arguments);return r!==n&&(e=(n=r)&&Fy(r)),e}return r._value=t,r}var Yy=0;function jy(t,e,n,r){this._groups=t,this._parents=e,this._name=n,this._id=r}function Uy(){return++Yy}var zy=zp.prototype;jy.prototype=function(t){return zp().transition(t)}.prototype={constructor:jy,select:function(t){var e=this._name,n=this._id;"function"!=typeof t&&(t=Md(t));for(var r=this._groups,i=r.length,a=new Array(i),o=0;o2&&n.state<5,n.state=6,n.timer.stop(),n.on.call(r?"interrupt":"cancel",t,t.__data__,n.index,n.group),delete a[i]):o=!1;o&&delete t.__transition}}(this,t)}))},zp.prototype.transition=function(t){var e,n;t instanceof jy?(e=t._id,t=t._name):(e=Uy(),(n=$y).time=og(),t=null==t?null:t+"");for(var r=this._groups,i=r.length,a=0;a0?tm(fm,--lm):0,cm--,10===hm&&(cm=1,sm--),hm}function gm(){return hm=lm2||bm(hm)>3?"":" "}function wm(t,e){for(;--e&&gm()&&!(hm<48||hm>102||hm>57&&hm<65||hm>70&&hm<97););return vm(t,mm()+(e<6&&32==ym()&&32==gm()))}function km(t){for(;gm();)switch(hm){case t:return lm;case 34:case 39:34!==t&&39!==t&&km(hm);break;case 40:41===t&&km(t);break;case 92:gm()}return lm}function Tm(t,e){for(;gm()&&t+hm!==57&&(t+hm!==84||47!==ym()););return"/*"+vm(e,lm-1)+"*"+Zy(47===t?t:gm())}function Cm(t){for(;!bm(ym());)gm();return vm(t,lm)}function Em(t){return function(t){return fm="",t}(Sm("",null,null,null,[""],t=function(t){return sm=cm=1,um=nm(fm=t),lm=0,[]}(t),0,[0],t))}function Sm(t,e,n,r,i,a,o,s,c){for(var u=0,l=0,h=o,f=0,d=0,p=0,g=1,y=1,m=1,v=0,b="",_=i,x=a,w=r,k=b;y;)switch(p=v,v=gm()){case 40:if(108!=p&&58==k.charCodeAt(h-1)){-1!=Jy(k+=Ky(_m(v),"&","&\f"),"&\f")&&(m=-1);break}case 34:case 39:case 91:k+=_m(v);break;case 9:case 10:case 13:case 32:k+=xm(p);break;case 92:k+=wm(mm()-1,7);continue;case 47:switch(ym()){case 42:case 47:im(Mm(Tm(gm(),mm()),e,n),c);break;default:k+="/"}break;case 123*g:s[u++]=nm(k)*m;case 125*g:case 59:case 0:switch(v){case 0:case 125:y=0;case 59+l:d>0&&nm(k)-h&&im(d>32?Nm(k+";",r,n,h-1):Nm(Ky(k," ","")+";",r,n,h-2),c);break;case 59:k+=";";default:if(im(w=Am(k,e,n,u,l,i,s,b,_=[],x=[],h),a),123===v)if(0===l)Sm(k,e,w,w,_,a,h,s,x);else switch(f){case 100:case 109:case 115:Sm(t,w,w,r&&im(Am(t,w,w,0,0,i,s,b,i,_=[],h),x),i,x,h,s,r?_:x);break;default:Sm(k,w,w,w,[""],x,0,s,x)}}u=l=d=0,g=m=1,b=k="",h=o;break;case 58:h=1+nm(k),d=p;default:if(g<1)if(123==v)--g;else if(125==v&&0==g++&&125==pm())continue;switch(k+=Zy(v),v*g){case 38:m=l>0?1:(k+="\f",-1);break;case 44:s[u++]=(nm(k)-1)*m,m=1;break;case 64:45===ym()&&(k+=_m(gm())),f=ym(),l=h=nm(b=k+=Cm(mm())),v++;break;case 45:45===p&&2==nm(k)&&(g=0)}}return a}function Am(t,e,n,r,i,a,o,s,c,u,l){for(var h=i-1,f=0===i?a:[""],d=rm(f),p=0,g=0,y=0;p0?f[m]+" "+v:Ky(v,/&\f/g,f[m])))&&(c[y++]=b);return dm(t,e,n,0===i?Vy:s,c,u,l)}function Mm(t,e,n){return dm(t,e,n,Wy,Zy(hm),em(t,2,-2),0)}function Nm(t,e,n,r){return dm(t,e,n,Gy,em(t,0,r),em(t,r+1,-1),r)}const Dm="9.1.1";var Bm=n(7967),Lm=n(7856),Om=n.n(Lm),Im=function(t){var e=t.replace(/\\u[\dA-F]{4}/gi,(function(t){return String.fromCharCode(parseInt(t.replace(/\\u/g,""),16))}));return e=(e=(e=e.replace(/\\x([0-9a-f]{2})/gi,(function(t,e){return String.fromCharCode(parseInt(e,16))}))).replace(/\\[\d\d\d]{3}/gi,(function(t){return String.fromCharCode(parseInt(t.replace(/\\/g,""),8))}))).replace(/\\[\d\d\d]{2}/gi,(function(t){return String.fromCharCode(parseInt(t.replace(/\\/g,""),8))}))},Rm=function(t){for(var e="",n=0;n>=0;){if(!((n=t.indexOf("=0)){e+=t,n=-1;break}e+=t.substr(0,n),(n=(t=t.substr(n+1)).indexOf("<\/script>"))>=0&&(n+=9,t=t.substr(n))}var r=Im(e);return(r=(r=(r=(r=r.replaceAll(/script>/gi,"#")).replaceAll(/javascript:/gi,"#")).replaceAll(/javascript&colon/gi,"#")).replaceAll(/onerror=/gi,"onerror:")).replaceAll(/')}else"loose"!==s.securityLevel&&(T=Om().sanitize(T,{ADD_TAGS:["foreignobject"],ADD_ATTR:["dominant-baseline"]}));if(void 0!==n)switch(g){case"flowchart":case"flowchart-v2":n(T,Gx.bindFunctions);break;case"gantt":n(T,Qw.bindFunctions);break;case"class":case"classDiagram":n(T,Zb.bindFunctions);break;default:n(T)}else o.debug("CB = undefined!");IT.forEach((function(t){t()})),IT=[];var S="sandbox"===s.securityLevel?"#i"+t:"#d"+t,A=au(S).node();return null!==A&&"function"==typeof A.remove&&au(S).node().remove(),T},parse:function(t){t+="\n";var e=wb(),n=db.detectInit(t,e);n&&o.info("reinit ",n);var r,i=db.detectType(t,e);switch(o.debug("Type "+i),i){case"gitGraph":wk.clear(),(r=Tk()).parser.yy=wk;break;case"flowchart":case"flowchart-v2":Gx.clear(),(r=Zx()).parser.yy=Gx;break;case"sequence":OT.clear(),(r=pT()).parser.yy=OT;break;case"gantt":(r=ek()).parser.yy=Qw;break;case"class":case"classDiagram":(r=n_()).parser.yy=Zb;break;case"state":case"stateDiagram":(r=EC()).parser.yy=UC;break;case"info":o.debug("info info info"),(r=Fk()).parser.yy=Ik;break;case"pie":o.debug("pie"),(r=Uk()).parser.yy=Hk;break;case"er":o.debug("er"),(r=dx()).parser.yy=hx;break;case"journey":o.debug("Journey"),(r=pE()).parser.yy=fE;break;case"requirement":case"requirementDiagram":o.debug("RequirementDiagram"),(r=Zk()).parser.yy=nT}return r.parser.yy.graphType=i,r.parser.yy.parseError=function(t,e){throw{str:t,hash:e}},r.parse(t),r},parseDirective:function(t,e,n,r){try{if(void 0!==e)switch(e=e.trim(),n){case"open_directive":PE={};break;case"type_directive":PE.type=e.toLowerCase();break;case"arg_directive":PE.args=JSON.parse(e);break;case"close_directive":(function(t,e,n){switch(o.debug("Directive type=".concat(e.type," with args:"),e.args),e.type){case"init":case"initialize":["config"].forEach((function(t){void 0!==e.args[t]&&("flowchart-v2"===n&&(n="flowchart"),e.args[n]=e.args[t],delete e.args[t])})),o.debug("sanitize in handleDirective",e.args),hb(e.args),o.debug("sanitize in handleDirective (done)",e.args),e.args,Tb(e.args);break;case"wrap":case"nowrap":t&&t.setWrap&&t.setWrap("wrap"===e.type);break;case"themeCss":o.warn("themeCss encountered");break;default:o.warn("Unhandled directive: source: '%%{".concat(e.type,": ").concat(JSON.stringify(e.args?e.args:{}),"}%%"),e)}})(t,PE,r),PE=null}}catch(t){o.error("Error while rendering sequenceDiagram directive: ".concat(e," jison context: ").concat(n)),o.error(t.message)}},initialize:function(t){t&&t.fontFamily&&(t.themeVariables&&t.themeVariables.fontFamily||(t.themeVariables={fontFamily:t.fontFamily})),function(t){gb=rb({},t)}(t),t&&t.theme&&Mv[t.theme]?t.themeVariables=Mv[t.theme].getThemeVariables(t.themeVariables):t&&(t.themeVariables=Mv.default.getThemeVariables(t.themeVariables));var e="object"===RE(t)?function(t){return mb=rb({},yb),mb=rb(mb,t),t.theme&&Mv[t.theme]&&(mb.themeVariables=Mv[t.theme].getThemeVariables(t.themeVariables)),bb=_b(mb,vb),mb}(t):xb();YE(e),s(e.logLevel)},reinitialize:function(){},getConfig:wb,setConfig:function(t){return rb(bb,t),wb()},getSiteConfig:xb,updateSiteConfig:function(t){return mb=rb(mb,t),_b(mb,vb),mb},reset:function(){Cb()},globalReset:function(){Cb(),YE(wb())},defaultConfig:yb});s(wb().logLevel),Cb(wb());const UE=jE;var zE=function(){var t,e,n=UE.getConfig();arguments.length>=2?(void 0!==arguments[0]&&(qE.sequenceConfig=arguments[0]),t=arguments[1]):t=arguments[0],"function"==typeof arguments[arguments.length-1]?(e=arguments[arguments.length-1],o.debug("Callback function found")):void 0!==n.mermaid&&("function"==typeof n.mermaid.callback?(e=n.mermaid.callback,o.debug("Callback function found")):o.debug("No Callback function found")),t=void 0===t?document.querySelectorAll(".mermaid"):"string"==typeof t?document.querySelectorAll(t):t instanceof window.Node?[t]:t,o.debug("Start On Load before: "+qE.startOnLoad),void 0!==qE.startOnLoad&&(o.debug("Start On Load inner: "+qE.startOnLoad),UE.updateSiteConfig({startOnLoad:qE.startOnLoad})),void 0!==qE.ganttConfig&&UE.updateSiteConfig({gantt:qE.ganttConfig});for(var r,i=new db.initIdGeneratior(n.deterministicIds,n.deterministicIDSeed),a=function(n){var a=t[n];if(a.getAttribute("data-processed"))return"continue";a.setAttribute("data-processed",!0);var s="mermaid-".concat(i.next());r=a.innerHTML,r=db.entityDecode(r).trim().replace(//gi,"
    ");var c=db.detectInit(r);c&&o.debug("Detected early reinit: ",c),UE.render(s,r,(function(t,n){a.innerHTML=t,void 0!==e&&e(s),n&&n(a)}),a)},s=0;s{t.exports={graphlib:n(6614),dagre:n(1463),intersect:n(8114),render:n(5787),util:n(8355),version:n(5689)}},9144:(t,e,n)=>{var r=n(8355);function i(t,e,n,i){var a=t.append("marker").attr("id",e).attr("viewBox","0 0 10 10").attr("refX",9).attr("refY",5).attr("markerUnits","strokeWidth").attr("markerWidth",8).attr("markerHeight",6).attr("orient","auto").append("path").attr("d","M 0 0 L 10 5 L 0 10 z").style("stroke-width",1).style("stroke-dasharray","1,0");r.applyStyle(a,n[i+"Style"]),n[i+"Class"]&&a.attr("class",n[i+"Class"])}t.exports={default:i,normal:i,vee:function(t,e,n,i){var a=t.append("marker").attr("id",e).attr("viewBox","0 0 10 10").attr("refX",9).attr("refY",5).attr("markerUnits","strokeWidth").attr("markerWidth",8).attr("markerHeight",6).attr("orient","auto").append("path").attr("d","M 0 0 L 10 5 L 0 10 L 4 5 z").style("stroke-width",1).style("stroke-dasharray","1,0");r.applyStyle(a,n[i+"Style"]),n[i+"Class"]&&a.attr("class",n[i+"Class"])},undirected:function(t,e,n,i){var a=t.append("marker").attr("id",e).attr("viewBox","0 0 10 10").attr("refX",9).attr("refY",5).attr("markerUnits","strokeWidth").attr("markerWidth",8).attr("markerHeight",6).attr("orient","auto").append("path").attr("d","M 0 5 L 10 5").style("stroke-width",1).style("stroke-dasharray","1,0");r.applyStyle(a,n[i+"Style"]),n[i+"Class"]&&a.attr("class",n[i+"Class"])}}},5632:(t,e,n)=>{var r=n(8355),i=n(4322),a=n(1322);t.exports=function(t,e){var n,o=e.nodes().filter((function(t){return r.isSubgraph(e,t)})),s=t.selectAll("g.cluster").data(o,(function(t){return t}));return s.selectAll("*").remove(),s.enter().append("g").attr("class","cluster").attr("id",(function(t){return e.node(t).id})).style("opacity",0),s=t.selectAll("g.cluster"),r.applyTransition(s,e).style("opacity",1),s.each((function(t){var n=e.node(t),r=i.select(this);i.select(this).append("rect");var o=r.append("g").attr("class","label");a(o,n,n.clusterLabelPos)})),s.selectAll("rect").each((function(t){var n=e.node(t),a=i.select(this);r.applyStyle(a,n.style)})),n=s.exit?s.exit():s.selectAll(null),r.applyTransition(n,e).style("opacity",0).remove(),s}},6315:(t,e,n)=>{"use strict";var r=n(1034),i=n(1322),a=n(8355),o=n(4322);t.exports=function(t,e){var n,s=t.selectAll("g.edgeLabel").data(e.edges(),(function(t){return a.edgeToId(t)})).classed("update",!0);return s.exit().remove(),s.enter().append("g").classed("edgeLabel",!0).style("opacity",0),(s=t.selectAll("g.edgeLabel")).each((function(t){var n=o.select(this);n.select(".label").remove();var a=e.edge(t),s=i(n,e.edge(t),0,0).classed("label",!0),c=s.node().getBBox();a.labelId&&s.attr("id",a.labelId),r.has(a,"width")||(a.width=c.width),r.has(a,"height")||(a.height=c.height)})),n=s.exit?s.exit():s.selectAll(null),a.applyTransition(n,e).style("opacity",0).remove(),s}},940:(t,e,n)=>{"use strict";var r=n(1034),i=n(3042),a=n(8355),o=n(4322);function s(t,e){var n=(o.line||o.svg.line)().x((function(t){return t.x})).y((function(t){return t.y}));return(n.curve||n.interpolate)(t.curve),n(e)}t.exports=function(t,e,n){var c=t.selectAll("g.edgePath").data(e.edges(),(function(t){return a.edgeToId(t)})).classed("update",!0),u=function(t,e){var n=t.enter().append("g").attr("class","edgePath").style("opacity",0);return n.append("path").attr("class","path").attr("d",(function(t){var n=e.edge(t),i=e.node(t.v).elem;return s(n,r.range(n.points.length).map((function(){return e=(t=i).getBBox(),{x:(n=t.ownerSVGElement.getScreenCTM().inverse().multiply(t.getScreenCTM()).translate(e.width/2,e.height/2)).e,y:n.f};var t,e,n})))})),n.append("defs"),n}(c,e);!function(t,e){var n=t.exit();a.applyTransition(n,e).style("opacity",0).remove()}(c,e);var l=void 0!==c.merge?c.merge(u):c;return a.applyTransition(l,e).style("opacity",1),l.each((function(t){var n=o.select(this),r=e.edge(t);r.elem=this,r.id&&n.attr("id",r.id),a.applyClass(n,r.class,(n.classed("update")?"update ":"")+"edgePath")})),l.selectAll("path.path").each((function(t){var n=e.edge(t);n.arrowheadId=r.uniqueId("arrowhead");var c=o.select(this).attr("marker-end",(function(){return"url("+(t=location.href,e=n.arrowheadId,t.split("#")[0]+"#"+e+")");var t,e})).style("fill","none");a.applyTransition(c,e).attr("d",(function(t){return function(t,e){var n=t.edge(e),r=t.node(e.v),a=t.node(e.w),o=n.points.slice(1,n.points.length-1);return o.unshift(i(r,o[0])),o.push(i(a,o[o.length-1])),s(n,o)}(e,t)})),a.applyStyle(c,n.style)})),l.selectAll("defs *").remove(),l.selectAll("defs").each((function(t){var r=e.edge(t);(0,n[r.arrowhead])(o.select(this),r.arrowheadId,r,"arrowhead")})),l}},607:(t,e,n)=>{"use strict";var r=n(1034),i=n(1322),a=n(8355),o=n(4322);t.exports=function(t,e,n){var s,c=e.nodes().filter((function(t){return!a.isSubgraph(e,t)})),u=t.selectAll("g.node").data(c,(function(t){return t})).classed("update",!0);return u.exit().remove(),u.enter().append("g").attr("class","node").style("opacity",0),(u=t.selectAll("g.node")).each((function(t){var s=e.node(t),c=o.select(this);a.applyClass(c,s.class,(c.classed("update")?"update ":"")+"node"),c.select("g.label").remove();var u=c.append("g").attr("class","label"),l=i(u,s),h=n[s.shape],f=r.pick(l.node().getBBox(),"width","height");s.elem=this,s.id&&c.attr("id",s.id),s.labelId&&u.attr("id",s.labelId),r.has(s,"width")&&(f.width=s.width),r.has(s,"height")&&(f.height=s.height),f.width+=s.paddingLeft+s.paddingRight,f.height+=s.paddingTop+s.paddingBottom,u.attr("transform","translate("+(s.paddingLeft-s.paddingRight)/2+","+(s.paddingTop-s.paddingBottom)/2+")");var d=o.select(this);d.select(".label-container").remove();var p=h(d,f,s).classed("label-container",!0);a.applyStyle(p,s.style);var g=p.node().getBBox();s.width=g.width,s.height=g.height})),s=u.exit?u.exit():u.selectAll(null),a.applyTransition(s,e).style("opacity",0).remove(),u}},4322:(t,e,n)=>{var r;if(!r)try{r=n(7188)}catch(t){}r||(r=window.d3),t.exports=r},1463:(t,e,n)=>{var r;try{r=n(681)}catch(t){}r||(r=window.dagre),t.exports=r},6614:(t,e,n)=>{var r;try{r=n(8282)}catch(t){}r||(r=window.graphlib),t.exports=r},8114:(t,e,n)=>{t.exports={node:n(3042),circle:n(6587),ellipse:n(3260),polygon:n(5337),rect:n(8049)}},6587:(t,e,n)=>{var r=n(3260);t.exports=function(t,e,n){return r(t,e,e,n)}},3260:t=>{t.exports=function(t,e,n,r){var i=t.x,a=t.y,o=i-r.x,s=a-r.y,c=Math.sqrt(e*e*s*s+n*n*o*o),u=Math.abs(e*n*o/c);r.x{function e(t,e){return t*e>0}t.exports=function(t,n,r,i){var a,o,s,c,u,l,h,f,d,p,g,y,m;if(!(a=n.y-t.y,s=t.x-n.x,u=n.x*t.y-t.x*n.y,d=a*r.x+s*r.y+u,p=a*i.x+s*i.y+u,0!==d&&0!==p&&e(d,p)||(o=i.y-r.y,c=r.x-i.x,l=i.x*r.y-r.x*i.y,h=o*t.x+c*t.y+l,f=o*n.x+c*n.y+l,0!==h&&0!==f&&e(h,f)||0==(g=a*c-o*s))))return y=Math.abs(g/2),{x:(m=s*l-c*u)<0?(m-y)/g:(m+y)/g,y:(m=o*u-a*l)<0?(m-y)/g:(m+y)/g}}},3042:t=>{t.exports=function(t,e){return t.intersect(e)}},5337:(t,e,n)=>{var r=n(6808);t.exports=function(t,e,n){var i=t.x,a=t.y,o=[],s=Number.POSITIVE_INFINITY,c=Number.POSITIVE_INFINITY;e.forEach((function(t){s=Math.min(s,t.x),c=Math.min(c,t.y)}));for(var u=i-t.width/2-s,l=a-t.height/2-c,h=0;h1&&o.sort((function(t,e){var r=t.x-n.x,i=t.y-n.y,a=Math.sqrt(r*r+i*i),o=e.x-n.x,s=e.y-n.y,c=Math.sqrt(o*o+s*s);return a{t.exports=function(t,e){var n,r,i=t.x,a=t.y,o=e.x-i,s=e.y-a,c=t.width/2,u=t.height/2;return Math.abs(s)*c>Math.abs(o)*u?(s<0&&(u=-u),n=0===s?0:u*o/s,r=u):(o<0&&(c=-c),n=c,r=0===o?0:c*s/o),{x:i+n,y:a+r}}},8284:(t,e,n)=>{var r=n(8355);t.exports=function(t,e){var n=t.append("foreignObject").attr("width","100000"),i=n.append("xhtml:div");i.attr("xmlns","http://www.w3.org/1999/xhtml");var a=e.label;switch(typeof a){case"function":i.insert(a);break;case"object":i.insert((function(){return a}));break;default:i.html(a)}r.applyStyle(i,e.labelStyle),i.style("display","inline-block"),i.style("white-space","nowrap");var o=i.node().getBoundingClientRect();return n.attr("width",o.width).attr("height",o.height),n}},1322:(t,e,n)=>{var r=n(7318),i=n(8284),a=n(8287);t.exports=function(t,e,n){var o=e.label,s=t.append("g");"svg"===e.labelType?a(s,e):"string"!=typeof o||"html"===e.labelType?i(s,e):r(s,e);var c,u=s.node().getBBox();switch(n){case"top":c=-e.height/2;break;case"bottom":c=e.height/2-u.height;break;default:c=-u.height/2}return s.attr("transform","translate("+-u.width/2+","+c+")"),s}},8287:(t,e,n)=>{var r=n(8355);t.exports=function(t,e){var n=t;return n.node().appendChild(e.label),r.applyStyle(n,e.labelStyle),n}},7318:(t,e,n)=>{var r=n(8355);t.exports=function(t,e){for(var n=t.append("text"),i=function(t){for(var e,n="",r=!1,i=0;i{var r;try{r={defaults:n(1747),each:n(6073),isFunction:n(3560),isPlainObject:n(8630),pick:n(9722),has:n(8721),range:n(6026),uniqueId:n(3955)}}catch(t){}r||(r=window._),t.exports=r},6381:(t,e,n)=>{"use strict";var r=n(8355),i=n(4322);t.exports=function(t,e){var n=t.filter((function(){return!i.select(this).classed("update")}));function a(t){var n=e.node(t);return"translate("+n.x+","+n.y+")"}n.attr("transform",a),r.applyTransition(t,e).style("opacity",1).attr("transform",a),r.applyTransition(n.selectAll("rect"),e).attr("width",(function(t){return e.node(t).width})).attr("height",(function(t){return e.node(t).height})).attr("x",(function(t){return-e.node(t).width/2})).attr("y",(function(t){return-e.node(t).height/2}))}},4577:(t,e,n)=>{"use strict";var r=n(8355),i=n(4322),a=n(1034);t.exports=function(t,e){function n(t){var n=e.edge(t);return a.has(n,"x")?"translate("+n.x+","+n.y+")":""}t.filter((function(){return!i.select(this).classed("update")})).attr("transform",n),r.applyTransition(t,e).style("opacity",1).attr("transform",n)}},4849:(t,e,n)=>{"use strict";var r=n(8355),i=n(4322);t.exports=function(t,e){function n(t){var n=e.node(t);return"translate("+n.x+","+n.y+")"}t.filter((function(){return!i.select(this).classed("update")})).attr("transform",n),r.applyTransition(t,e).style("opacity",1).attr("transform",n)}},5787:(t,e,n)=>{var r=n(1034),i=n(4322),a=n(1463).layout;t.exports=function(){var t=n(607),e=n(5632),i=n(6315),u=n(940),l=n(4849),h=n(4577),f=n(6381),d=n(4418),p=n(9144),g=function(n,g){!function(t){t.nodes().forEach((function(e){var n=t.node(e);r.has(n,"label")||t.children(e).length||(n.label=e),r.has(n,"paddingX")&&r.defaults(n,{paddingLeft:n.paddingX,paddingRight:n.paddingX}),r.has(n,"paddingY")&&r.defaults(n,{paddingTop:n.paddingY,paddingBottom:n.paddingY}),r.has(n,"padding")&&r.defaults(n,{paddingLeft:n.padding,paddingRight:n.padding,paddingTop:n.padding,paddingBottom:n.padding}),r.defaults(n,o),r.each(["paddingLeft","paddingRight","paddingTop","paddingBottom"],(function(t){n[t]=Number(n[t])})),r.has(n,"width")&&(n._prevWidth=n.width),r.has(n,"height")&&(n._prevHeight=n.height)})),t.edges().forEach((function(e){var n=t.edge(e);r.has(n,"label")||(n.label=""),r.defaults(n,s)}))}(g);var y=c(n,"output"),m=c(y,"clusters"),v=c(y,"edgePaths"),b=i(c(y,"edgeLabels"),g),_=t(c(y,"nodes"),g,d);a(g),l(_,g),h(b,g),u(v,g,p);var x=e(m,g);f(x,g),function(t){r.each(t.nodes(),(function(e){var n=t.node(e);r.has(n,"_prevWidth")?n.width=n._prevWidth:delete n.width,r.has(n,"_prevHeight")?n.height=n._prevHeight:delete n.height,delete n._prevWidth,delete n._prevHeight}))}(g)};return g.createNodes=function(e){return arguments.length?(t=e,g):t},g.createClusters=function(t){return arguments.length?(e=t,g):e},g.createEdgeLabels=function(t){return arguments.length?(i=t,g):i},g.createEdgePaths=function(t){return arguments.length?(u=t,g):u},g.shapes=function(t){return arguments.length?(d=t,g):d},g.arrows=function(t){return arguments.length?(p=t,g):p},g};var o={paddingLeft:10,paddingRight:10,paddingTop:10,paddingBottom:10,rx:0,ry:0,shape:"rect"},s={arrowhead:"normal",curve:i.curveLinear};function c(t,e){var n=t.select("g."+e);return n.empty()&&(n=t.append("g").attr("class",e)),n}},4418:(t,e,n)=>{"use strict";var r=n(8049),i=n(3260),a=n(6587),o=n(5337);t.exports={rect:function(t,e,n){var i=t.insert("rect",":first-child").attr("rx",n.rx).attr("ry",n.ry).attr("x",-e.width/2).attr("y",-e.height/2).attr("width",e.width).attr("height",e.height);return n.intersect=function(t){return r(n,t)},i},ellipse:function(t,e,n){var r=e.width/2,a=e.height/2,o=t.insert("ellipse",":first-child").attr("x",-e.width/2).attr("y",-e.height/2).attr("rx",r).attr("ry",a);return n.intersect=function(t){return i(n,r,a,t)},o},circle:function(t,e,n){var r=Math.max(e.width,e.height)/2,i=t.insert("circle",":first-child").attr("x",-e.width/2).attr("y",-e.height/2).attr("r",r);return n.intersect=function(t){return a(n,r,t)},i},diamond:function(t,e,n){var r=e.width*Math.SQRT2/2,i=e.height*Math.SQRT2/2,a=[{x:0,y:-i},{x:-r,y:0},{x:0,y:i},{x:r,y:0}],s=t.insert("polygon",":first-child").attr("points",a.map((function(t){return t.x+","+t.y})).join(" "));return n.intersect=function(t){return o(n,a,t)},s}}},8355:(t,e,n)=>{var r=n(1034);t.exports={isSubgraph:function(t,e){return!!t.children(e).length},edgeToId:function(t){return a(t.v)+":"+a(t.w)+":"+a(t.name)},applyStyle:function(t,e){e&&t.attr("style",e)},applyClass:function(t,e,n){e&&t.attr("class",e).attr("class",n+" "+t.attr("class"))},applyTransition:function(t,e){var n=e.graph();if(r.isPlainObject(n)){var i=n.transition;if(r.isFunction(i))return i(t)}return t}};var i=/:/g;function a(t){return t?String(t).replace(i,"\\:"):""}},5689:t=>{t.exports="0.6.4"},7188:(t,e,n)=>{"use strict";n.r(e),n.d(e,{FormatSpecifier:()=>uc,active:()=>Jr,arc:()=>fx,area:()=>vx,areaRadial:()=>Sx,ascending:()=>i,autoType:()=>Fo,axisBottom:()=>it,axisLeft:()=>at,axisRight:()=>rt,axisTop:()=>nt,bisect:()=>u,bisectLeft:()=>c,bisectRight:()=>s,bisector:()=>a,blob:()=>ms,brush:()=>Ai,brushSelection:()=>Ci,brushX:()=>Ei,brushY:()=>Si,buffer:()=>bs,chord:()=>Fi,clientPoint:()=>Dn,cluster:()=>Sd,color:()=>Ve,contourDensity:()=>oo,contours:()=>to,create:()=>Y_,creator:()=>ie,cross:()=>f,csv:()=>Ts,csvFormat:()=>To,csvFormatBody:()=>Co,csvFormatRow:()=>So,csvFormatRows:()=>Eo,csvFormatValue:()=>Ao,csvParse:()=>wo,csvParseRows:()=>ko,cubehelix:()=>qa,curveBasis:()=>sw,curveBasisClosed:()=>uw,curveBasisOpen:()=>hw,curveBundle:()=>dw,curveCardinal:()=>yw,curveCardinalClosed:()=>vw,curveCardinalOpen:()=>_w,curveCatmullRom:()=>kw,curveCatmullRomClosed:()=>Cw,curveCatmullRomOpen:()=>Sw,curveLinear:()=>px,curveLinearClosed:()=>Mw,curveMonotoneX:()=>Fw,curveMonotoneY:()=>Pw,curveNatural:()=>Uw,curveStep:()=>$w,curveStepAfter:()=>Hw,curveStepBefore:()=>qw,customEvent:()=>ye,descending:()=>d,deviation:()=>y,dispatch:()=>ft,drag:()=>po,dragDisable:()=>Se,dragEnable:()=>Ae,dsv:()=>ks,dsvFormat:()=>_o,easeBack:()=>hs,easeBackIn:()=>us,easeBackInOut:()=>hs,easeBackOut:()=>ls,easeBounce:()=>os,easeBounceIn:()=>as,easeBounceInOut:()=>ss,easeBounceOut:()=>os,easeCircle:()=>rs,easeCircleIn:()=>es,easeCircleInOut:()=>rs,easeCircleOut:()=>ns,easeCubic:()=>Xr,easeCubicIn:()=>Vr,easeCubicInOut:()=>Xr,easeCubicOut:()=>Gr,easeElastic:()=>ps,easeElasticIn:()=>ds,easeElasticInOut:()=>gs,easeElasticOut:()=>ps,easeExp:()=>ts,easeExpIn:()=>Ko,easeExpInOut:()=>ts,easeExpOut:()=>Jo,easeLinear:()=>Yo,easePoly:()=>Ho,easePolyIn:()=>$o,easePolyInOut:()=>Ho,easePolyOut:()=>qo,easeQuad:()=>zo,easeQuadIn:()=>jo,easeQuadInOut:()=>zo,easeQuadOut:()=>Uo,easeSin:()=>Zo,easeSinIn:()=>Go,easeSinInOut:()=>Zo,easeSinOut:()=>Xo,entries:()=>pa,event:()=>le,extent:()=>m,forceCenter:()=>Ls,forceCollide:()=>Ws,forceLink:()=>Xs,forceManyBody:()=>tc,forceRadial:()=>ec,forceSimulation:()=>Js,forceX:()=>nc,forceY:()=>rc,format:()=>pc,formatDefaultLocale:()=>bc,formatLocale:()=>vc,formatPrefix:()=>gc,formatSpecifier:()=>cc,geoAlbers:()=>zf,geoAlbersUsa:()=>$f,geoArea:()=>gu,geoAzimuthalEqualArea:()=>Vf,geoAzimuthalEqualAreaRaw:()=>Wf,geoAzimuthalEquidistant:()=>Xf,geoAzimuthalEquidistantRaw:()=>Gf,geoBounds:()=>sl,geoCentroid:()=>bl,geoCircle:()=>Nl,geoClipAntimeridian:()=>zl,geoClipCircle:()=>$l,geoClipExtent:()=>Vl,geoClipRectangle:()=>Wl,geoConicConformal:()=>ed,geoConicConformalRaw:()=>td,geoConicEqualArea:()=>Uf,geoConicEqualAreaRaw:()=>jf,geoConicEquidistant:()=>ad,geoConicEquidistantRaw:()=>id,geoContains:()=>ph,geoDistance:()=>ah,geoEqualEarth:()=>fd,geoEqualEarthRaw:()=>hd,geoEquirectangular:()=>rd,geoEquirectangularRaw:()=>nd,geoGnomonic:()=>pd,geoGnomonicRaw:()=>dd,geoGraticule:()=>mh,geoGraticule10:()=>vh,geoIdentity:()=>gd,geoInterpolate:()=>bh,geoLength:()=>nh,geoMercator:()=>Qf,geoMercatorRaw:()=>Zf,geoNaturalEarth1:()=>md,geoNaturalEarth1Raw:()=>yd,geoOrthographic:()=>bd,geoOrthographicRaw:()=>vd,geoPath:()=>kf,geoProjection:()=>Ff,geoProjectionMutator:()=>Pf,geoRotation:()=>Sl,geoStereographic:()=>xd,geoStereographicRaw:()=>_d,geoStream:()=>nu,geoTransform:()=>Tf,geoTransverseMercator:()=>kd,geoTransverseMercatorRaw:()=>wd,gray:()=>ka,hcl:()=>Ba,hierarchy:()=>Md,histogram:()=>D,hsl:()=>an,html:()=>Ds,image:()=>Es,interpolate:()=>Mn,interpolateArray:()=>xn,interpolateBasis:()=>un,interpolateBasisClosed:()=>ln,interpolateBlues:()=>f_,interpolateBrBG:()=>Tb,interpolateBuGn:()=>zb,interpolateBuPu:()=>qb,interpolateCividis:()=>k_,interpolateCool:()=>E_,interpolateCubehelix:()=>Up,interpolateCubehelixDefault:()=>T_,interpolateCubehelixLong:()=>zp,interpolateDate:()=>kn,interpolateDiscrete:()=>Sp,interpolateGnBu:()=>Wb,interpolateGreens:()=>p_,interpolateGreys:()=>y_,interpolateHcl:()=>Pp,interpolateHclLong:()=>Yp,interpolateHsl:()=>Op,interpolateHslLong:()=>Ip,interpolateHue:()=>Ap,interpolateInferno:()=>F_,interpolateLab:()=>Rp,interpolateMagma:()=>R_,interpolateNumber:()=>Tn,interpolateNumberArray:()=>bn,interpolateObject:()=>Cn,interpolateOrRd:()=>Gb,interpolateOranges:()=>w_,interpolatePRGn:()=>Eb,interpolatePiYG:()=>Ab,interpolatePlasma:()=>P_,interpolatePuBu:()=>Kb,interpolatePuBuGn:()=>Zb,interpolatePuOr:()=>Nb,interpolatePuRd:()=>t_,interpolatePurples:()=>v_,interpolateRainbow:()=>A_,interpolateRdBu:()=>Bb,interpolateRdGy:()=>Ob,interpolateRdPu:()=>n_,interpolateRdYlBu:()=>Rb,interpolateRdYlGn:()=>Pb,interpolateReds:()=>__,interpolateRgb:()=>gn,interpolateRgbBasis:()=>mn,interpolateRgbBasisClosed:()=>vn,interpolateRound:()=>Mp,interpolateSinebow:()=>B_,interpolateSpectral:()=>jb,interpolateString:()=>An,interpolateTransformCss:()=>pr,interpolateTransformSvg:()=>gr,interpolateTurbo:()=>L_,interpolateViridis:()=>I_,interpolateWarm:()=>C_,interpolateYlGn:()=>o_,interpolateYlGnBu:()=>i_,interpolateYlOrBr:()=>c_,interpolateYlOrRd:()=>l_,interpolateZoom:()=>Bp,interrupt:()=>ar,interval:()=>fk,isoFormat:()=>uk,isoParse:()=>hk,json:()=>As,keys:()=>fa,lab:()=>Ta,lch:()=>Da,line:()=>mx,lineRadial:()=>Ex,linkHorizontal:()=>Rx,linkRadial:()=>Px,linkVertical:()=>Fx,local:()=>U_,map:()=>na,matcher:()=>mt,max:()=>I,mean:()=>R,median:()=>F,merge:()=>P,min:()=>Y,mouse:()=>Ln,namespace:()=>Ct,namespaces:()=>Tt,nest:()=>ra,now:()=>qn,pack:()=>tp,packEnclose:()=>Id,packSiblings:()=>Gd,pairs:()=>l,partition:()=>op,path:()=>Wi,permute:()=>j,pie:()=>xx,piecewise:()=>$p,pointRadial:()=>Ax,polygonArea:()=>Hp,polygonCentroid:()=>Wp,polygonContains:()=>Qp,polygonHull:()=>Zp,polygonLength:()=>Kp,precisionFixed:()=>_c,precisionPrefix:()=>xc,precisionRound:()=>wc,quadtree:()=>js,quantile:()=>B,quantize:()=>qp,radialArea:()=>Sx,radialLine:()=>Ex,randomBates:()=>ig,randomExponential:()=>ag,randomIrwinHall:()=>rg,randomLogNormal:()=>ng,randomNormal:()=>eg,randomUniform:()=>tg,range:()=>k,rgb:()=>Qe,ribbon:()=>Ki,scaleBand:()=>dg,scaleDiverging:()=>ob,scaleDivergingLog:()=>sb,scaleDivergingPow:()=>ub,scaleDivergingSqrt:()=>lb,scaleDivergingSymlog:()=>cb,scaleIdentity:()=>Mg,scaleImplicit:()=>hg,scaleLinear:()=>Ag,scaleLog:()=>Pg,scaleOrdinal:()=>fg,scalePoint:()=>gg,scalePow:()=>Vg,scaleQuantile:()=>Xg,scaleQuantize:()=>Zg,scaleSequential:()=>Jv,scaleSequentialLog:()=>tb,scaleSequentialPow:()=>nb,scaleSequentialQuantile:()=>ib,scaleSequentialSqrt:()=>rb,scaleSequentialSymlog:()=>eb,scaleSqrt:()=>Gg,scaleSymlog:()=>zg,scaleThreshold:()=>Qg,scaleTime:()=>jv,scaleUtc:()=>Zv,scan:()=>U,schemeAccent:()=>db,schemeBlues:()=>h_,schemeBrBG:()=>kb,schemeBuGn:()=>Ub,schemeBuPu:()=>$b,schemeCategory10:()=>fb,schemeDark2:()=>pb,schemeGnBu:()=>Hb,schemeGreens:()=>d_,schemeGreys:()=>g_,schemeOrRd:()=>Vb,schemeOranges:()=>x_,schemePRGn:()=>Cb,schemePaired:()=>gb,schemePastel1:()=>yb,schemePastel2:()=>mb,schemePiYG:()=>Sb,schemePuBu:()=>Qb,schemePuBuGn:()=>Xb,schemePuOr:()=>Mb,schemePuRd:()=>Jb,schemePurples:()=>m_,schemeRdBu:()=>Db,schemeRdGy:()=>Lb,schemeRdPu:()=>e_,schemeRdYlBu:()=>Ib,schemeRdYlGn:()=>Fb,schemeReds:()=>b_,schemeSet1:()=>vb,schemeSet2:()=>bb,schemeSet3:()=>_b,schemeSpectral:()=>Yb,schemeTableau10:()=>xb,schemeYlGn:()=>a_,schemeYlGnBu:()=>r_,schemeYlOrBr:()=>s_,schemeYlOrRd:()=>u_,select:()=>Te,selectAll:()=>$_,selection:()=>ke,selector:()=>pt,selectorAll:()=>yt,set:()=>ha,shuffle:()=>z,stack:()=>Xw,stackOffsetDiverging:()=>Qw,stackOffsetExpand:()=>Zw,stackOffsetNone:()=>Ww,stackOffsetSilhouette:()=>Kw,stackOffsetWiggle:()=>Jw,stackOrderAppearance:()=>tk,stackOrderAscending:()=>nk,stackOrderDescending:()=>ik,stackOrderInsideOut:()=>ak,stackOrderNone:()=>Vw,stackOrderReverse:()=>ok,stratify:()=>hp,style:()=>Rt,sum:()=>$,svg:()=>Bs,symbol:()=>rw,symbolCircle:()=>Yx,symbolCross:()=>jx,symbolDiamond:()=>$x,symbolSquare:()=>Gx,symbolStar:()=>Vx,symbolTriangle:()=>Zx,symbolWye:()=>ew,symbols:()=>nw,text:()=>xs,thresholdFreedmanDiaconis:()=>L,thresholdScott:()=>O,thresholdSturges:()=>N,tickFormat:()=>Eg,tickIncrement:()=>A,tickStep:()=>M,ticks:()=>S,timeDay:()=>Ay,timeDays:()=>My,timeFormat:()=>pm,timeFormatDefaultLocale:()=>Iv,timeFormatLocale:()=>fm,timeFriday:()=>vy,timeFridays:()=>Cy,timeHour:()=>Dy,timeHours:()=>By,timeInterval:()=>ty,timeMillisecond:()=>jy,timeMilliseconds:()=>Uy,timeMinute:()=>Oy,timeMinutes:()=>Iy,timeMonday:()=>py,timeMondays:()=>xy,timeMonth:()=>ay,timeMonths:()=>oy,timeParse:()=>gm,timeSaturday:()=>by,timeSaturdays:()=>Ey,timeSecond:()=>Fy,timeSeconds:()=>Py,timeSunday:()=>dy,timeSundays:()=>_y,timeThursday:()=>my,timeThursdays:()=>Ty,timeTuesday:()=>gy,timeTuesdays:()=>wy,timeWednesday:()=>yy,timeWednesdays:()=>ky,timeWeek:()=>dy,timeWeeks:()=>_y,timeYear:()=>ny,timeYears:()=>ry,timeout:()=>Kn,timer:()=>Vn,timerFlush:()=>Gn,touch:()=>Bn,touches:()=>q_,transition:()=>qr,transpose:()=>q,tree:()=>vp,treemap:()=>kp,treemapBinary:()=>Tp,treemapDice:()=>ap,treemapResquarify:()=>Ep,treemapSlice:()=>bp,treemapSliceDice:()=>Cp,treemapSquarify:()=>wp,tsv:()=>Cs,tsvFormat:()=>Bo,tsvFormatBody:()=>Lo,tsvFormatRow:()=>Io,tsvFormatRows:()=>Oo,tsvFormatValue:()=>Ro,tsvParse:()=>No,tsvParseRows:()=>Do,utcDay:()=>im,utcDays:()=>am,utcFormat:()=>ym,utcFriday:()=>Gy,utcFridays:()=>em,utcHour:()=>Hv,utcHours:()=>Wv,utcMillisecond:()=>jy,utcMilliseconds:()=>Uy,utcMinute:()=>Gv,utcMinutes:()=>Xv,utcMonday:()=>qy,utcMondays:()=>Qy,utcMonth:()=>zv,utcMonths:()=>$v,utcParse:()=>mm,utcSaturday:()=>Xy,utcSaturdays:()=>nm,utcSecond:()=>Fy,utcSeconds:()=>Py,utcSunday:()=>$y,utcSundays:()=>Zy,utcThursday:()=>Vy,utcThursdays:()=>tm,utcTuesday:()=>Hy,utcTuesdays:()=>Ky,utcWednesday:()=>Wy,utcWednesdays:()=>Jy,utcWeek:()=>$y,utcWeeks:()=>Zy,utcYear:()=>sm,utcYears:()=>cm,values:()=>da,variance:()=>g,version:()=>r,voronoi:()=>Kk,window:()=>Bt,xml:()=>Ns,zip:()=>W,zoom:()=>fT,zoomIdentity:()=>nT,zoomTransform:()=>rT});var r="5.16.0";function i(t,e){return te?1:t>=e?0:NaN}function a(t){var e;return 1===t.length&&(e=t,t=function(t,n){return i(e(t),n)}),{left:function(e,n,r,i){for(null==r&&(r=0),null==i&&(i=e.length);r>>1;t(e[a],n)<0?r=a+1:i=a}return r},right:function(e,n,r,i){for(null==r&&(r=0),null==i&&(i=e.length);r>>1;t(e[a],n)>0?i=a:r=a+1}return r}}}var o=a(i),s=o.right,c=o.left;const u=s;function l(t,e){null==e&&(e=h);for(var n=0,r=t.length-1,i=t[0],a=new Array(r<0?0:r);nt?1:e>=t?0:NaN}function p(t){return null===t?NaN:+t}function g(t,e){var n,r,i=t.length,a=0,o=-1,s=0,c=0;if(null==e)for(;++o1)return c/(a-1)}function y(t,e){var n=g(t,e);return n?Math.sqrt(n):n}function m(t,e){var n,r,i,a=t.length,o=-1;if(null==e){for(;++o=n)for(r=i=n;++on&&(r=n),i=n)for(r=i=n;++on&&(r=n),i0)return[t];if((r=e0)for(t=Math.ceil(t/o),e=Math.floor(e/o),a=new Array(i=Math.ceil(e-t+1));++s=0?(a>=T?10:a>=C?5:a>=E?2:1)*Math.pow(10,i):-Math.pow(10,-i)/(a>=T?10:a>=C?5:a>=E?2:1)}function M(t,e,n){var r=Math.abs(e-t)/Math.max(0,n),i=Math.pow(10,Math.floor(Math.log(r)/Math.LN10)),a=r/i;return a>=T?i*=10:a>=C?i*=5:a>=E&&(i*=2),eh;)f.pop(),--d;var p,g=new Array(d+1);for(i=0;i<=d;++i)(p=g[i]=[]).x0=i>0?f[i-1]:l,p.x1=i=1)return+n(t[r-1],r-1,t);var r,i=(r-1)*e,a=Math.floor(i),o=+n(t[a],a,t);return o+(+n(t[a+1],a+1,t)-o)*(i-a)}}function L(t,e,n){return t=_.call(t,p).sort(i),Math.ceil((n-e)/(2*(B(t,.75)-B(t,.25))*Math.pow(t.length,-1/3)))}function O(t,e,n){return Math.ceil((n-e)/(3.5*y(t)*Math.pow(t.length,-1/3)))}function I(t,e){var n,r,i=t.length,a=-1;if(null==e){for(;++a=n)for(r=n;++ar&&(r=n)}else for(;++a=n)for(r=n;++ar&&(r=n);return r}function R(t,e){var n,r=t.length,i=r,a=-1,o=0;if(null==e)for(;++a=0;)for(e=(r=t[i]).length;--e>=0;)n[--o]=r[e];return n}function Y(t,e){var n,r,i=t.length,a=-1;if(null==e){for(;++a=n)for(r=n;++an&&(r=n)}else for(;++a=n)for(r=n;++an&&(r=n);return r}function j(t,e){for(var n=e.length,r=new Array(n);n--;)r[n]=t[e[n]];return r}function U(t,e){if(n=t.length){var n,r,a=0,o=0,s=t[o];for(null==e&&(e=i);++a=0&&(n=t.slice(r+1),t=t.slice(0,r)),t&&!e.hasOwnProperty(t))throw new Error("unknown type: "+t);return{type:t,name:n}}))}function lt(t,e){for(var n,r=0,i=t.length;r0)for(var n,r,i=new Array(n),a=0;ae?1:t>=e?0:NaN}bt.prototype={constructor:bt,appendChild:function(t){return this._parent.insertBefore(t,this._next)},insertBefore:function(t,e){return this._parent.insertBefore(t,e)},querySelector:function(t){return this._parent.querySelector(t)},querySelectorAll:function(t){return this._parent.querySelectorAll(t)}};var kt="http://www.w3.org/1999/xhtml";const Tt={svg:"http://www.w3.org/2000/svg",xhtml:kt,xlink:"http://www.w3.org/1999/xlink",xml:"http://www.w3.org/XML/1998/namespace",xmlns:"http://www.w3.org/2000/xmlns/"};function Ct(t){var e=t+="",n=e.indexOf(":");return n>=0&&"xmlns"!==(e=t.slice(0,n))&&(t=t.slice(n+1)),Tt.hasOwnProperty(e)?{space:Tt[e],local:t}:t}function Et(t){return function(){this.removeAttribute(t)}}function St(t){return function(){this.removeAttributeNS(t.space,t.local)}}function At(t,e){return function(){this.setAttribute(t,e)}}function Mt(t,e){return function(){this.setAttributeNS(t.space,t.local,e)}}function Nt(t,e){return function(){var n=e.apply(this,arguments);null==n?this.removeAttribute(t):this.setAttribute(t,n)}}function Dt(t,e){return function(){var n=e.apply(this,arguments);null==n?this.removeAttributeNS(t.space,t.local):this.setAttributeNS(t.space,t.local,n)}}function Bt(t){return t.ownerDocument&&t.ownerDocument.defaultView||t.document&&t||t.defaultView}function Lt(t){return function(){this.style.removeProperty(t)}}function Ot(t,e,n){return function(){this.style.setProperty(t,e,n)}}function It(t,e,n){return function(){var r=e.apply(this,arguments);null==r?this.style.removeProperty(t):this.style.setProperty(t,r,n)}}function Rt(t,e){return t.style.getPropertyValue(e)||Bt(t).getComputedStyle(t,null).getPropertyValue(e)}function Ft(t){return function(){delete this[t]}}function Pt(t,e){return function(){this[t]=e}}function Yt(t,e){return function(){var n=e.apply(this,arguments);null==n?delete this[t]:this[t]=n}}function jt(t){return t.trim().split(/^|\s+/)}function Ut(t){return t.classList||new zt(t)}function zt(t){this._node=t,this._names=jt(t.getAttribute("class")||"")}function $t(t,e){for(var n=Ut(t),r=-1,i=e.length;++r=0&&(this._names.splice(e,1),this._node.setAttribute("class",this._names.join(" ")))},contains:function(t){return this._names.indexOf(t)>=0}};var ue={},le=null;function he(t,e,n){return t=fe(t,e,n),function(e){var n=e.relatedTarget;n&&(n===this||8&n.compareDocumentPosition(this))||t.call(this,e)}}function fe(t,e,n){return function(r){var i=le;le=r;try{t.call(this,this.__data__,e,n)}finally{le=i}}}function de(t){return t.trim().split(/^|\s+/).map((function(t){var e="",n=t.indexOf(".");return n>=0&&(e=t.slice(n+1),t=t.slice(0,n)),{type:t,name:e}}))}function pe(t){return function(){var e=this.__on;if(e){for(var n,r=0,i=-1,a=e.length;r=x&&(x=_+1);!(b=m[x])&&++x=0;)(r=i[a])&&(o&&4^r.compareDocumentPosition(o)&&o.parentNode.insertBefore(r,o),o=r);return this},sort:function(t){function e(e,n){return e&&n?t(e.__data__,n.__data__):!e-!n}t||(t=wt);for(var n=this._groups,r=n.length,i=new Array(r),a=0;a1?this.each((null==e?Lt:"function"==typeof e?It:Ot)(t,e,null==n?"":n)):Rt(this.node(),t)},property:function(t,e){return arguments.length>1?this.each((null==e?Ft:"function"==typeof e?Yt:Pt)(t,e)):this.node()[t]},classed:function(t,e){var n=jt(t+"");if(arguments.length<2){for(var r=Ut(this.node()),i=-1,a=n.length;++i>8&15|e>>4&240,e>>4&15|240&e,(15&e)<<4|15&e,1):8===n?Xe(e>>24&255,e>>16&255,e>>8&255,(255&e)/255):4===n?Xe(e>>12&15|e>>8&240,e>>8&15|e>>4&240,e>>4&15|240&e,((15&e)<<4|15&e)/255):null):(e=Pe.exec(t))?new Ke(e[1],e[2],e[3],1):(e=Ye.exec(t))?new Ke(255*e[1]/100,255*e[2]/100,255*e[3]/100,1):(e=je.exec(t))?Xe(e[1],e[2],e[3],e[4]):(e=Ue.exec(t))?Xe(255*e[1]/100,255*e[2]/100,255*e[3]/100,e[4]):(e=ze.exec(t))?nn(e[1],e[2]/100,e[3]/100,1):(e=$e.exec(t))?nn(e[1],e[2]/100,e[3]/100,e[4]):qe.hasOwnProperty(t)?Ge(qe[t]):"transparent"===t?new Ke(NaN,NaN,NaN,0):null}function Ge(t){return new Ke(t>>16&255,t>>8&255,255&t,1)}function Xe(t,e,n,r){return r<=0&&(t=e=n=NaN),new Ke(t,e,n,r)}function Ze(t){return t instanceof De||(t=Ve(t)),t?new Ke((t=t.rgb()).r,t.g,t.b,t.opacity):new Ke}function Qe(t,e,n,r){return 1===arguments.length?Ze(t):new Ke(t,e,n,null==r?1:r)}function Ke(t,e,n,r){this.r=+t,this.g=+e,this.b=+n,this.opacity=+r}function Je(){return"#"+en(this.r)+en(this.g)+en(this.b)}function tn(){var t=this.opacity;return(1===(t=isNaN(t)?1:Math.max(0,Math.min(1,t)))?"rgb(":"rgba(")+Math.max(0,Math.min(255,Math.round(this.r)||0))+", "+Math.max(0,Math.min(255,Math.round(this.g)||0))+", "+Math.max(0,Math.min(255,Math.round(this.b)||0))+(1===t?")":", "+t+")")}function en(t){return((t=Math.max(0,Math.min(255,Math.round(t)||0)))<16?"0":"")+t.toString(16)}function nn(t,e,n,r){return r<=0?t=e=n=NaN:n<=0||n>=1?t=e=NaN:e<=0&&(t=NaN),new on(t,e,n,r)}function rn(t){if(t instanceof on)return new on(t.h,t.s,t.l,t.opacity);if(t instanceof De||(t=Ve(t)),!t)return new on;if(t instanceof on)return t;var e=(t=t.rgb()).r/255,n=t.g/255,r=t.b/255,i=Math.min(e,n,r),a=Math.max(e,n,r),o=NaN,s=a-i,c=(a+i)/2;return s?(o=e===a?(n-r)/s+6*(n0&&c<1?0:o,new on(o,s,c,t.opacity)}function an(t,e,n,r){return 1===arguments.length?rn(t):new on(t,e,n,null==r?1:r)}function on(t,e,n,r){this.h=+t,this.s=+e,this.l=+n,this.opacity=+r}function sn(t,e,n){return 255*(t<60?e+(n-e)*t/60:t<180?n:t<240?e+(n-e)*(240-t)/60:e)}function cn(t,e,n,r,i){var a=t*t,o=a*t;return((1-3*t+3*a-o)*e+(4-6*a+3*o)*n+(1+3*t+3*a-3*o)*r+o*i)/6}function un(t){var e=t.length-1;return function(n){var r=n<=0?n=0:n>=1?(n=1,e-1):Math.floor(n*e),i=t[r],a=t[r+1],o=r>0?t[r-1]:2*i-a,s=r180||n<-180?n-360*Math.round(n/360):n):hn(isNaN(t)?e:t)}function pn(t,e){var n=e-t;return n?fn(t,n):hn(isNaN(t)?e:t)}Me(De,Ve,{copy:function(t){return Object.assign(new this.constructor,this,t)},displayable:function(){return this.rgb().displayable()},hex:He,formatHex:He,formatHsl:function(){return rn(this).formatHsl()},formatRgb:We,toString:We}),Me(Ke,Qe,Ne(De,{brighter:function(t){return t=null==t?Le:Math.pow(Le,t),new Ke(this.r*t,this.g*t,this.b*t,this.opacity)},darker:function(t){return t=null==t?Be:Math.pow(Be,t),new Ke(this.r*t,this.g*t,this.b*t,this.opacity)},rgb:function(){return this},displayable:function(){return-.5<=this.r&&this.r<255.5&&-.5<=this.g&&this.g<255.5&&-.5<=this.b&&this.b<255.5&&0<=this.opacity&&this.opacity<=1},hex:Je,formatHex:Je,formatRgb:tn,toString:tn})),Me(on,an,Ne(De,{brighter:function(t){return t=null==t?Le:Math.pow(Le,t),new on(this.h,this.s,this.l*t,this.opacity)},darker:function(t){return t=null==t?Be:Math.pow(Be,t),new on(this.h,this.s,this.l*t,this.opacity)},rgb:function(){var t=this.h%360+360*(this.h<0),e=isNaN(t)||isNaN(this.s)?0:this.s,n=this.l,r=n+(n<.5?n:1-n)*e,i=2*n-r;return new Ke(sn(t>=240?t-240:t+120,i,r),sn(t,i,r),sn(t<120?t+240:t-120,i,r),this.opacity)},displayable:function(){return(0<=this.s&&this.s<=1||isNaN(this.s))&&0<=this.l&&this.l<=1&&0<=this.opacity&&this.opacity<=1},formatHsl:function(){var t=this.opacity;return(1===(t=isNaN(t)?1:Math.max(0,Math.min(1,t)))?"hsl(":"hsla(")+(this.h||0)+", "+100*(this.s||0)+"%, "+100*(this.l||0)+"%"+(1===t?")":", "+t+")")}}));const gn=function t(e){var n=function(t){return 1==(t=+t)?pn:function(e,n){return n-e?function(t,e,n){return t=Math.pow(t,n),e=Math.pow(e,n)-t,n=1/n,function(r){return Math.pow(t+r*e,n)}}(e,n,t):hn(isNaN(e)?n:e)}}(e);function r(t,e){var r=n((t=Qe(t)).r,(e=Qe(e)).r),i=n(t.g,e.g),a=n(t.b,e.b),o=pn(t.opacity,e.opacity);return function(e){return t.r=r(e),t.g=i(e),t.b=a(e),t.opacity=o(e),t+""}}return r.gamma=t,r}(1);function yn(t){return function(e){var n,r,i=e.length,a=new Array(i),o=new Array(i),s=new Array(i);for(n=0;na&&(i=e.slice(a,i),s[o]?s[o]+=i:s[++o]=i),(n=n[0])===(r=r[0])?s[o]?s[o]+=r:s[++o]=r:(s[++o]=null,c.push({i:o,x:Tn(n,r)})),a=Sn.lastIndex;return a=0&&e._call.call(null,t),e=e._next;--Rn}function Xn(){jn=(Yn=zn.now())+Un,Rn=Fn=0;try{Gn()}finally{Rn=0,function(){for(var t,e,n=On,r=1/0;n;)n._call?(r>n._time&&(r=n._time),t=n,n=n._next):(e=n._next,n._next=null,n=t?t._next=e:On=e);In=t,Qn(r)}(),jn=0}}function Zn(){var t=zn.now(),e=t-Yn;e>1e3&&(Un-=e,Yn=t)}function Qn(t){Rn||(Fn&&(Fn=clearTimeout(Fn)),t-jn>24?(t<1/0&&(Fn=setTimeout(Xn,t-zn.now()-Un)),Pn&&(Pn=clearInterval(Pn))):(Pn||(Yn=zn.now(),Pn=setInterval(Zn,1e3)),Rn=1,$n(Xn)))}function Kn(t,e,n){var r=new Wn;return e=null==e?0:+e,r.restart((function(n){r.stop(),t(n+e)}),e,n),r}Wn.prototype=Vn.prototype={constructor:Wn,restart:function(t,e,n){if("function"!=typeof t)throw new TypeError("callback is not a function");n=(null==n?qn():+n)+(null==e?0:+e),this._next||In===this||(In?In._next=this:On=this,In=this),this._call=t,this._time=n,Qn()},stop:function(){this._call&&(this._call=null,this._time=1/0,Qn())}};var Jn=ft("start","end","cancel","interrupt"),tr=[];function er(t,e,n,r,i,a){var o=t.__transition;if(o){if(n in o)return}else t.__transition={};!function(t,e,n){var r,i=t.__transition;function a(c){var u,l,h,f;if(1!==n.state)return s();for(u in i)if((f=i[u]).name===n.name){if(3===f.state)return Kn(a);4===f.state?(f.state=6,f.timer.stop(),f.on.call("interrupt",t,t.__data__,f.index,f.group),delete i[u]):+u0)throw new Error("too late; already scheduled");return n}function rr(t,e){var n=ir(t,e);if(n.state>3)throw new Error("too late; already running");return n}function ir(t,e){var n=t.__transition;if(!n||!(n=n[e]))throw new Error("transition not found");return n}function ar(t,e){var n,r,i,a=t.__transition,o=!0;if(a){for(i in e=null==e?null:e+"",a)(n=a[i]).name===e?(r=n.state>2&&n.state<5,n.state=6,n.timer.stop(),n.on.call(r?"interrupt":"cancel",t,t.__data__,n.index,n.group),delete a[i]):o=!1;o&&delete t.__transition}}var or,sr,cr,ur,lr=180/Math.PI,hr={translateX:0,translateY:0,rotate:0,skewX:0,scaleX:1,scaleY:1};function fr(t,e,n,r,i,a){var o,s,c;return(o=Math.sqrt(t*t+e*e))&&(t/=o,e/=o),(c=t*n+e*r)&&(n-=t*c,r-=e*c),(s=Math.sqrt(n*n+r*r))&&(n/=s,r/=s,c/=s),t*r180?e+=360:e-t>180&&(t+=360),a.push({i:n.push(i(n)+"rotate(",null,r)-2,x:Tn(t,e)})):e&&n.push(i(n)+"rotate("+e+r)}(a.rotate,o.rotate,s,c),function(t,e,n,a){t!==e?a.push({i:n.push(i(n)+"skewX(",null,r)-2,x:Tn(t,e)}):e&&n.push(i(n)+"skewX("+e+r)}(a.skewX,o.skewX,s,c),function(t,e,n,r,a,o){if(t!==n||e!==r){var s=a.push(i(a)+"scale(",null,",",null,")");o.push({i:s-4,x:Tn(t,n)},{i:s-2,x:Tn(e,r)})}else 1===n&&1===r||a.push(i(a)+"scale("+n+","+r+")")}(a.scaleX,a.scaleY,o.scaleX,o.scaleY,s,c),a=o=null,function(t){for(var e,n=-1,r=c.length;++n=0&&(t=t.slice(0,e)),!t||"start"===t}))}(e)?nr:rr;return function(){var o=a(this,t),s=o.on;s!==r&&(i=(r=s).copy()).on(e,n),o.on=i}}var Rr=ke.prototype.constructor;function Fr(t){return function(){this.style.removeProperty(t)}}function Pr(t,e,n){return function(r){this.style.setProperty(t,e.call(this,r),n)}}function Yr(t,e,n){var r,i;function a(){var a=e.apply(this,arguments);return a!==i&&(r=(i=a)&&Pr(t,a,n)),r}return a._value=e,a}function jr(t){return function(e){this.textContent=t.call(this,e)}}function Ur(t){var e,n;function r(){var r=t.apply(this,arguments);return r!==n&&(e=(n=r)&&jr(r)),e}return r._value=t,r}var zr=0;function $r(t,e,n,r){this._groups=t,this._parents=e,this._name=n,this._id=r}function qr(t){return ke().transition(t)}function Hr(){return++zr}var Wr=ke.prototype;function Vr(t){return t*t*t}function Gr(t){return--t*t*t+1}function Xr(t){return((t*=2)<=1?t*t*t:(t-=2)*t*t+2)/2}$r.prototype=qr.prototype={constructor:$r,select:function(t){var e=this._name,n=this._id;"function"!=typeof t&&(t=pt(t));for(var r=this._groups,i=r.length,a=new Array(i),o=0;o1&&n.name===e)return new $r([[t]],Kr,e,+r);return null}function ti(t){return function(){return t}}function ei(t,e,n){this.target=t,this.type=e,this.selection=n}function ni(){le.stopImmediatePropagation()}function ri(){le.preventDefault(),le.stopImmediatePropagation()}var ii={name:"drag"},ai={name:"space"},oi={name:"handle"},si={name:"center"};function ci(t){return[+t[0],+t[1]]}function ui(t){return[ci(t[0]),ci(t[1])]}function li(t){return function(e){return Bn(e,le.touches,t)}}var hi={name:"x",handles:["w","e"].map(bi),input:function(t,e){return null==t?null:[[+t[0],e[0][1]],[+t[1],e[1][1]]]},output:function(t){return t&&[t[0][0],t[1][0]]}},fi={name:"y",handles:["n","s"].map(bi),input:function(t,e){return null==t?null:[[e[0][0],+t[0]],[e[1][0],+t[1]]]},output:function(t){return t&&[t[0][1],t[1][1]]}},di={name:"xy",handles:["n","w","e","s","nw","ne","sw","se"].map(bi),input:function(t){return null==t?null:ui(t)},output:function(t){return t}},pi={overlay:"crosshair",selection:"move",n:"ns-resize",e:"ew-resize",s:"ns-resize",w:"ew-resize",nw:"nwse-resize",ne:"nesw-resize",se:"nwse-resize",sw:"nesw-resize"},gi={e:"w",w:"e",nw:"ne",ne:"nw",se:"sw",sw:"se"},yi={n:"s",s:"n",nw:"sw",ne:"se",se:"ne",sw:"nw"},mi={overlay:1,selection:1,n:null,e:1,s:null,w:-1,nw:-1,ne:1,se:1,sw:-1},vi={overlay:1,selection:1,n:-1,e:null,s:1,w:null,nw:-1,ne:-1,se:1,sw:1};function bi(t){return{type:t}}function _i(){return!le.ctrlKey&&!le.button}function xi(){var t=this.ownerSVGElement||this;return t.hasAttribute("viewBox")?[[(t=t.viewBox.baseVal).x,t.y],[t.x+t.width,t.y+t.height]]:[[0,0],[t.width.baseVal.value,t.height.baseVal.value]]}function wi(){return navigator.maxTouchPoints||"ontouchstart"in this}function ki(t){for(;!t.__brush;)if(!(t=t.parentNode))return;return t.__brush}function Ti(t){return t[0][0]===t[1][0]||t[0][1]===t[1][1]}function Ci(t){var e=t.__brush;return e?e.dim.output(e.selection):null}function Ei(){return Mi(hi)}function Si(){return Mi(fi)}function Ai(){return Mi(di)}function Mi(t){var e,n=xi,r=_i,i=wi,a=!0,o=ft("start","brush","end"),s=6;function c(e){var n=e.property("__brush",g).selectAll(".overlay").data([bi("overlay")]);n.enter().append("rect").attr("class","overlay").attr("pointer-events","all").attr("cursor",pi.overlay).merge(n).each((function(){var t=ki(this).extent;Te(this).attr("x",t[0][0]).attr("y",t[0][1]).attr("width",t[1][0]-t[0][0]).attr("height",t[1][1]-t[0][1])})),e.selectAll(".selection").data([bi("selection")]).enter().append("rect").attr("class","selection").attr("cursor",pi.selection).attr("fill","#777").attr("fill-opacity",.3).attr("stroke","#fff").attr("shape-rendering","crispEdges");var r=e.selectAll(".handle").data(t.handles,(function(t){return t.type}));r.exit().remove(),r.enter().append("rect").attr("class",(function(t){return"handle handle--"+t.type})).attr("cursor",(function(t){return pi[t.type]})),e.each(u).attr("fill","none").attr("pointer-events","all").on("mousedown.brush",f).filter(i).on("touchstart.brush",f).on("touchmove.brush",d).on("touchend.brush touchcancel.brush",p).style("touch-action","none").style("-webkit-tap-highlight-color","rgba(0,0,0,0)")}function u(){var t=Te(this),e=ki(this).selection;e?(t.selectAll(".selection").style("display",null).attr("x",e[0][0]).attr("y",e[0][1]).attr("width",e[1][0]-e[0][0]).attr("height",e[1][1]-e[0][1]),t.selectAll(".handle").style("display",null).attr("x",(function(t){return"e"===t.type[t.type.length-1]?e[1][0]-s/2:e[0][0]-s/2})).attr("y",(function(t){return"s"===t.type[0]?e[1][1]-s/2:e[0][1]-s/2})).attr("width",(function(t){return"n"===t.type||"s"===t.type?e[1][0]-e[0][0]+s:s})).attr("height",(function(t){return"e"===t.type||"w"===t.type?e[1][1]-e[0][1]+s:s}))):t.selectAll(".selection,.handle").style("display","none").attr("x",null).attr("y",null).attr("width",null).attr("height",null)}function l(t,e,n){var r=t.__brush.emitter;return!r||n&&r.clean?new h(t,e,n):r}function h(t,e,n){this.that=t,this.args=e,this.state=t.__brush,this.active=0,this.clean=n}function f(){if((!e||le.touches)&&r.apply(this,arguments)){var n,i,o,s,c,h,f,d,p,g,y,m=this,v=le.target.__data__.type,b="selection"===(a&&le.metaKey?v="overlay":v)?ii:a&&le.altKey?si:oi,_=t===fi?null:mi[v],x=t===hi?null:vi[v],w=ki(m),k=w.extent,T=w.selection,C=k[0][0],E=k[0][1],S=k[1][0],A=k[1][1],M=0,N=0,D=_&&x&&a&&le.shiftKey,B=le.touches?li(le.changedTouches[0].identifier):Ln,L=B(m),O=L,I=l(m,arguments,!0).beforestart();"overlay"===v?(T&&(p=!0),w.selection=T=[[n=t===fi?C:L[0],o=t===hi?E:L[1]],[c=t===fi?S:n,f=t===hi?A:o]]):(n=T[0][0],o=T[0][1],c=T[1][0],f=T[1][1]),i=n,s=o,h=c,d=f;var R=Te(m).attr("pointer-events","none"),F=R.selectAll(".overlay").attr("cursor",pi[v]);if(le.touches)I.moved=Y,I.ended=U;else{var P=Te(le.view).on("mousemove.brush",Y,!0).on("mouseup.brush",U,!0);a&&P.on("keydown.brush",z,!0).on("keyup.brush",$,!0),Se(le.view)}ni(),ar(m),u.call(m),I.start()}function Y(){var t=B(m);!D||g||y||(Math.abs(t[0]-O[0])>Math.abs(t[1]-O[1])?y=!0:g=!0),O=t,p=!0,ri(),j()}function j(){var t;switch(M=O[0]-L[0],N=O[1]-L[1],b){case ai:case ii:_&&(M=Math.max(C-n,Math.min(S-c,M)),i=n+M,h=c+M),x&&(N=Math.max(E-o,Math.min(A-f,N)),s=o+N,d=f+N);break;case oi:_<0?(M=Math.max(C-n,Math.min(S-n,M)),i=n+M,h=c):_>0&&(M=Math.max(C-c,Math.min(S-c,M)),i=n,h=c+M),x<0?(N=Math.max(E-o,Math.min(A-o,N)),s=o+N,d=f):x>0&&(N=Math.max(E-f,Math.min(A-f,N)),s=o,d=f+N);break;case si:_&&(i=Math.max(C,Math.min(S,n-M*_)),h=Math.max(C,Math.min(S,c+M*_))),x&&(s=Math.max(E,Math.min(A,o-N*x)),d=Math.max(E,Math.min(A,f+N*x)))}h0&&(n=i-M),x<0?f=d-N:x>0&&(o=s-N),b=ai,F.attr("cursor",pi.selection),j());break;default:return}ri()}function $(){switch(le.keyCode){case 16:D&&(g=y=D=!1,j());break;case 18:b===si&&(_<0?c=h:_>0&&(n=i),x<0?f=d:x>0&&(o=s),b=oi,j());break;case 32:b===ai&&(le.altKey?(_&&(c=h-M*_,n=i+M*_),x&&(f=d-N*x,o=s+N*x),b=si):(_<0?c=h:_>0&&(n=i),x<0?f=d:x>0&&(o=s),b=oi),F.attr("cursor",pi[v]),j());break;default:return}ri()}}function d(){l(this,arguments).moved()}function p(){l(this,arguments).ended()}function g(){var e=this.__brush||{selection:null};return e.extent=ui(n.apply(this,arguments)),e.dim=t,e}return c.move=function(e,n){e.selection?e.on("start.brush",(function(){l(this,arguments).beforestart().start()})).on("interrupt.brush end.brush",(function(){l(this,arguments).end()})).tween("brush",(function(){var e=this,r=e.__brush,i=l(e,arguments),a=r.selection,o=t.input("function"==typeof n?n.apply(this,arguments):n,r.extent),s=Mn(a,o);function c(t){r.selection=1===t&&null===o?null:s(t),u.call(e),i.brush()}return null!==a&&null!==o?c:c(1)})):e.each((function(){var e=this,r=arguments,i=e.__brush,a=t.input("function"==typeof n?n.apply(e,r):n,i.extent),o=l(e,r).beforestart();ar(e),i.selection=null===a?null:a,u.call(e),o.start().brush().end()}))},c.clear=function(t){c.move(t,null)},h.prototype={beforestart:function(){return 1==++this.active&&(this.state.emitter=this,this.starting=!0),this},start:function(){return this.starting?(this.starting=!1,this.emit("start")):this.emit("brush"),this},brush:function(){return this.emit("brush"),this},end:function(){return 0==--this.active&&(delete this.state.emitter,this.emit("end")),this},emit:function(e){ye(new ei(c,e,t.output(this.state.selection)),o.apply,o,[e,this.that,this.args])}},c.extent=function(t){return arguments.length?(n="function"==typeof t?t:ti(ui(t)),c):n},c.filter=function(t){return arguments.length?(r="function"==typeof t?t:ti(!!t),c):r},c.touchable=function(t){return arguments.length?(i="function"==typeof t?t:ti(!!t),c):i},c.handleSize=function(t){return arguments.length?(s=+t,c):s},c.keyModifiers=function(t){return arguments.length?(a=!!t,c):a},c.on=function(){var t=o.on.apply(o,arguments);return t===o?c:t},c}var Ni=Math.cos,Di=Math.sin,Bi=Math.PI,Li=Bi/2,Oi=2*Bi,Ii=Math.max;function Ri(t){return function(e,n){return t(e.source.value+e.target.value,n.source.value+n.target.value)}}function Fi(){var t=0,e=null,n=null,r=null;function i(i){var a,o,s,c,u,l,h=i.length,f=[],d=k(h),p=[],g=[],y=g.groups=new Array(h),m=new Array(h*h);for(a=0,u=-1;++uzi)if(Math.abs(l*s-c*u)>zi&&i){var f=n-a,d=r-o,p=s*s+c*c,g=f*f+d*d,y=Math.sqrt(p),m=Math.sqrt(h),v=i*Math.tan((ji-Math.acos((p+h-g)/(2*y*m)))/2),b=v/m,_=v/y;Math.abs(b-1)>zi&&(this._+="L"+(t+b*u)+","+(e+b*l)),this._+="A"+i+","+i+",0,0,"+ +(l*f>u*d)+","+(this._x1=t+_*s)+","+(this._y1=e+_*c)}else this._+="L"+(this._x1=t)+","+(this._y1=e)},arc:function(t,e,n,r,i,a){t=+t,e=+e,a=!!a;var o=(n=+n)*Math.cos(r),s=n*Math.sin(r),c=t+o,u=e+s,l=1^a,h=a?r-i:i-r;if(n<0)throw new Error("negative radius: "+n);null===this._x1?this._+="M"+c+","+u:(Math.abs(this._x1-c)>zi||Math.abs(this._y1-u)>zi)&&(this._+="L"+c+","+u),n&&(h<0&&(h=h%Ui+Ui),h>$i?this._+="A"+n+","+n+",0,1,"+l+","+(t-o)+","+(e-s)+"A"+n+","+n+",0,1,"+l+","+(this._x1=c)+","+(this._y1=u):h>zi&&(this._+="A"+n+","+n+",0,"+ +(h>=ji)+","+l+","+(this._x1=t+n*Math.cos(i))+","+(this._y1=e+n*Math.sin(i))))},rect:function(t,e,n,r){this._+="M"+(this._x0=this._x1=+t)+","+(this._y0=this._y1=+e)+"h"+ +n+"v"+ +r+"h"+-n+"Z"},toString:function(){return this._}};const Wi=Hi;function Vi(t){return t.source}function Gi(t){return t.target}function Xi(t){return t.radius}function Zi(t){return t.startAngle}function Qi(t){return t.endAngle}function Ki(){var t=Vi,e=Gi,n=Xi,r=Zi,i=Qi,a=null;function o(){var o,s=Pi.call(arguments),c=t.apply(this,s),u=e.apply(this,s),l=+n.apply(this,(s[0]=c,s)),h=r.apply(this,s)-Li,f=i.apply(this,s)-Li,d=l*Ni(h),p=l*Di(h),g=+n.apply(this,(s[0]=u,s)),y=r.apply(this,s)-Li,m=i.apply(this,s)-Li;if(a||(a=o=Wi()),a.moveTo(d,p),a.arc(0,0,l,h,f),h===y&&f===m||(a.quadraticCurveTo(0,0,g*Ni(y),g*Di(y)),a.arc(0,0,g,y,m)),a.quadraticCurveTo(0,0,d,p),a.closePath(),o)return a=null,o+""||null}return o.radius=function(t){return arguments.length?(n="function"==typeof t?t:Yi(+t),o):n},o.startAngle=function(t){return arguments.length?(r="function"==typeof t?t:Yi(+t),o):r},o.endAngle=function(t){return arguments.length?(i="function"==typeof t?t:Yi(+t),o):i},o.source=function(e){return arguments.length?(t=e,o):t},o.target=function(t){return arguments.length?(e=t,o):e},o.context=function(t){return arguments.length?(a=null==t?null:t,o):a},o}var Ji="$";function ta(){}function ea(t,e){var n=new ta;if(t instanceof ta)t.each((function(t,e){n.set(e,t)}));else if(Array.isArray(t)){var r,i=-1,a=t.length;if(null==e)for(;++i=r.length)return null!=t&&n.sort(t),null!=e?e(n):n;for(var c,u,l,h=-1,f=n.length,d=r[i++],p=na(),g=o();++hr.length)return t;var a,s=i[n-1];return null!=e&&n>=r.length?a=t.entries():(a=[],t.each((function(t,e){a.push({key:e,values:o(t,n)})}))),null!=s?a.sort((function(t,e){return s(t.key,e.key)})):a}return n={object:function(t){return a(t,0,ia,aa)},map:function(t){return a(t,0,oa,sa)},entries:function(t){return o(a(t,0,oa,sa),0)},key:function(t){return r.push(t),n},sortKeys:function(t){return i[r.length-1]=t,n},sortValues:function(e){return t=e,n},rollup:function(t){return e=t,n}}}function ia(){return{}}function aa(t,e,n){t[e]=n}function oa(){return na()}function sa(t,e,n){t.set(e,n)}function ca(){}var ua=na.prototype;function la(t,e){var n=new ca;if(t instanceof ca)t.each((function(t){n.add(t)}));else if(t){var r=-1,i=t.length;if(null==e)for(;++r.008856451679035631?Math.pow(t,1/3):t/xa+ba}function Sa(t){return t>_a?t*t*t:xa*(t-ba)}function Aa(t){return 255*(t<=.0031308?12.92*t:1.055*Math.pow(t,1/2.4)-.055)}function Ma(t){return(t/=255)<=.04045?t/12.92:Math.pow((t+.055)/1.055,2.4)}function Na(t){if(t instanceof La)return new La(t.h,t.c,t.l,t.opacity);if(t instanceof Ca||(t=wa(t)),0===t.a&&0===t.b)return new La(NaN,0r!=d>r&&n<(f-u)*(r-l)/(d-l)+u&&(i=-i)}return i}function Qa(t,e,n){var r,i,a,o;return function(t,e,n){return(e[0]-t[0])*(n[1]-t[1])==(n[0]-t[0])*(e[1]-t[1])}(t,e,n)&&(i=t[r=+(t[0]===e[0])],a=n[r],o=e[r],i<=a&&a<=o||o<=a&&a<=i)}function Ka(){}var Ja=[[],[[[1,1.5],[.5,1]]],[[[1.5,1],[1,1.5]]],[[[1.5,1],[.5,1]]],[[[1,.5],[1.5,1]]],[[[1,1.5],[.5,1]],[[1,.5],[1.5,1]]],[[[1,.5],[1,1.5]]],[[[1,.5],[.5,1]]],[[[.5,1],[1,.5]]],[[[1,1.5],[1,.5]]],[[[.5,1],[1,.5]],[[1.5,1],[1,1.5]]],[[[1.5,1],[1,.5]]],[[[.5,1],[1.5,1]]],[[[1,1.5],[1.5,1]]],[[[.5,1],[1,1.5]]],[]];function to(){var t=1,e=1,n=N,r=s;function i(t){var e=n(t);if(Array.isArray(e))e=e.slice().sort(Va);else{var r=m(t),i=r[0],o=r[1];e=M(i,o,e),e=k(Math.floor(i/e)*e,Math.floor(o/e)*e,e)}return e.map((function(e){return a(t,e)}))}function a(n,i){var a=[],s=[];return function(n,r,i){var a,s,c,u,l,h,f=new Array,d=new Array;for(a=s=-1,u=n[0]>=r,Ja[u<<1].forEach(p);++a=r,Ja[c|u<<1].forEach(p);for(Ja[u<<0].forEach(p);++s=r,l=n[s*t]>=r,Ja[u<<1|l<<2].forEach(p);++a=r,h=l,l=n[s*t+a+1]>=r,Ja[c|u<<1|l<<2|h<<3].forEach(p);Ja[u|l<<3].forEach(p)}for(a=-1,l=n[s*t]>=r,Ja[l<<2].forEach(p);++a=r,Ja[l<<2|h<<3].forEach(p);function p(t){var e,n,r=[t[0][0]+a,t[0][1]+s],c=[t[1][0]+a,t[1][1]+s],u=o(r),l=o(c);(e=d[u])?(n=f[l])?(delete d[e.end],delete f[n.start],e===n?(e.ring.push(c),i(e.ring)):f[e.start]=d[n.end]={start:e.start,end:n.end,ring:e.ring.concat(n.ring)}):(delete d[e.end],e.ring.push(c),d[e.end=l]=e):(e=f[l])?(n=d[u])?(delete f[e.start],delete d[n.end],e===n?(e.ring.push(c),i(e.ring)):f[n.start]=d[e.end]={start:n.start,end:e.end,ring:n.ring.concat(e.ring)}):(delete f[e.start],e.ring.unshift(r),f[e.start=u]=e):f[u]=d[l]={start:u,end:l,ring:[r,c]}}Ja[l<<3].forEach(p)}(n,i,(function(t){r(t,n,i),function(t){for(var e=0,n=t.length,r=t[n-1][1]*t[0][0]-t[n-1][0]*t[0][1];++e0?a.push([t]):s.push(t)})),s.forEach((function(t){for(var e,n=0,r=a.length;n0&&o0&&s0&&a>0))throw new Error("invalid size");return t=r,e=a,i},i.thresholds=function(t){return arguments.length?(n="function"==typeof t?t:Array.isArray(t)?Ga(Wa.call(t)):Ga(t),i):n},i.smooth=function(t){return arguments.length?(r=t?s:Ka,i):r===s},i}function eo(t,e,n){for(var r=t.width,i=t.height,a=1+(n<<1),o=0;o=n&&(s>=a&&(c-=t.data[s-a+o*r]),e.data[s-n+o*r]=c/Math.min(s+1,r-1+a-s,a))}function no(t,e,n){for(var r=t.width,i=t.height,a=1+(n<<1),o=0;o=n&&(s>=a&&(c-=t.data[o+(s-a)*r]),e.data[o+(s-n)*r]=c/Math.min(s+1,i-1+a-s,a))}function ro(t){return t[0]}function io(t){return t[1]}function ao(){return 1}function oo(){var t=ro,e=io,n=ao,r=960,i=500,a=20,o=2,s=3*a,c=r+2*s>>o,u=i+2*s>>o,l=Ga(20);function h(r){var i=new Float32Array(c*u),h=new Float32Array(c*u);r.forEach((function(r,a,l){var h=+t(r,a,l)+s>>o,f=+e(r,a,l)+s>>o,d=+n(r,a,l);h>=0&&h=0&&f>o),no({width:c,height:u,data:h},{width:c,height:u,data:i},a>>o),eo({width:c,height:u,data:i},{width:c,height:u,data:h},a>>o),no({width:c,height:u,data:h},{width:c,height:u,data:i},a>>o),eo({width:c,height:u,data:i},{width:c,height:u,data:h},a>>o),no({width:c,height:u,data:h},{width:c,height:u,data:i},a>>o);var d=l(i);if(!Array.isArray(d)){var p=I(i);d=M(0,p,d),(d=k(0,Math.floor(p/d)*d,d)).shift()}return to().thresholds(d).size([c,u])(i).map(f)}function f(t){return t.value*=Math.pow(2,-2*o),t.coordinates.forEach(d),t}function d(t){t.forEach(p)}function p(t){t.forEach(g)}function g(t){t[0]=t[0]*Math.pow(2,o)-s,t[1]=t[1]*Math.pow(2,o)-s}function y(){return c=r+2*(s=3*a)>>o,u=i+2*s>>o,h}return h.x=function(e){return arguments.length?(t="function"==typeof e?e:Ga(+e),h):t},h.y=function(t){return arguments.length?(e="function"==typeof t?t:Ga(+t),h):e},h.weight=function(t){return arguments.length?(n="function"==typeof t?t:Ga(+t),h):n},h.size=function(t){if(!arguments.length)return[r,i];var e=Math.ceil(t[0]),n=Math.ceil(t[1]);if(!(e>=0||e>=0))throw new Error("invalid size");return r=e,i=n,y()},h.cellSize=function(t){if(!arguments.length)return 1<=1))throw new Error("invalid cell size");return o=Math.floor(Math.log(t)/Math.LN2),y()},h.thresholds=function(t){return arguments.length?(l="function"==typeof t?t:Array.isArray(t)?Ga(Wa.call(t)):Ga(t),h):l},h.bandwidth=function(t){if(!arguments.length)return Math.sqrt(a*(a+1));if(!((t=+t)>=0))throw new Error("invalid bandwidth");return a=Math.round((Math.sqrt(4*t*t+1)-1)/2),y()},h}function so(t){return function(){return t}}function co(t,e,n,r,i,a,o,s,c,u){this.target=t,this.type=e,this.subject=n,this.identifier=r,this.active=i,this.x=a,this.y=o,this.dx=s,this.dy=c,this._=u}function uo(){return!le.ctrlKey&&!le.button}function lo(){return this.parentNode}function ho(t){return null==t?{x:le.x,y:le.y}:t}function fo(){return navigator.maxTouchPoints||"ontouchstart"in this}function po(){var t,e,n,r,i=uo,a=lo,o=ho,s=fo,c={},u=ft("start","drag","end"),l=0,h=0;function f(t){t.on("mousedown.drag",d).filter(s).on("touchstart.drag",y).on("touchmove.drag",m).on("touchend.drag touchcancel.drag",v).style("touch-action","none").style("-webkit-tap-highlight-color","rgba(0,0,0,0)")}function d(){if(!r&&i.apply(this,arguments)){var o=b("mouse",a.apply(this,arguments),Ln,this,arguments);o&&(Te(le.view).on("mousemove.drag",p,!0).on("mouseup.drag",g,!0),Se(le.view),Ce(),n=!1,t=le.clientX,e=le.clientY,o("start"))}}function p(){if(Ee(),!n){var r=le.clientX-t,i=le.clientY-e;n=r*r+i*i>h}c.mouse("drag")}function g(){Te(le.view).on("mousemove.drag mouseup.drag",null),Ae(le.view,n),Ee(),c.mouse("end")}function y(){if(i.apply(this,arguments)){var t,e,n=le.changedTouches,r=a.apply(this,arguments),o=n.length;for(t=0;t=a?c=!0:10===(r=t.charCodeAt(o++))?u=!0:13===r&&(u=!0,10===t.charCodeAt(o)&&++o),t.slice(i+1,e-1).replace(/""/g,'"')}for(;o9999?"+"+bo(t,6):bo(t,4)}(t.getUTCFullYear())+"-"+bo(t.getUTCMonth()+1,2)+"-"+bo(t.getUTCDate(),2)+(i?"T"+bo(e,2)+":"+bo(n,2)+":"+bo(r,2)+"."+bo(i,3)+"Z":r?"T"+bo(e,2)+":"+bo(n,2)+":"+bo(r,2)+"Z":n||e?"T"+bo(e,2)+":"+bo(n,2)+"Z":"")}(t):e.test(t+="")?'"'+t.replace(/"/g,'""')+'"':t}return{parse:function(t,e){var n,i,a=r(t,(function(t,r){if(n)return n(t,r-1);i=t,n=e?function(t,e){var n=mo(t);return function(r,i){return e(n(r),i,t)}}(t,e):mo(t)}));return a.columns=i||[],a},parseRows:r,format:function(e,n){return null==n&&(n=vo(e)),[n.map(o).join(t)].concat(i(e,n)).join("\n")},formatBody:function(t,e){return null==e&&(e=vo(t)),i(t,e).join("\n")},formatRows:function(t){return t.map(a).join("\n")},formatRow:a,formatValue:o}}var xo=_o(","),wo=xo.parse,ko=xo.parseRows,To=xo.format,Co=xo.formatBody,Eo=xo.formatRows,So=xo.formatRow,Ao=xo.formatValue,Mo=_o("\t"),No=Mo.parse,Do=Mo.parseRows,Bo=Mo.format,Lo=Mo.formatBody,Oo=Mo.formatRows,Io=Mo.formatRow,Ro=Mo.formatValue;function Fo(t){for(var e in t){var n,r,i=t[e].trim();if(i)if("true"===i)i=!0;else if("false"===i)i=!1;else if("NaN"===i)i=NaN;else if(isNaN(n=+i)){if(!(r=i.match(/^([-+]\d{2})?\d{4}(-\d{2}(-\d{2})?)?(T\d{2}:\d{2}(:\d{2}(\.\d{3})?)?(Z|[-+]\d{2}:\d{2})?)?$/)))continue;Po&&r[4]&&!r[7]&&(i=i.replace(/-/g,"/").replace(/T/," ")),i=new Date(i)}else i=n;else i=null;t[e]=i}return t}var Po=new Date("2019-01-01T00:00").getHours()||new Date("2019-07-01T00:00").getHours();function Yo(t){return+t}function jo(t){return t*t}function Uo(t){return t*(2-t)}function zo(t){return((t*=2)<=1?t*t:--t*(2-t)+1)/2}var $o=function t(e){function n(t){return Math.pow(t,e)}return e=+e,n.exponent=t,n}(3),qo=function t(e){function n(t){return 1-Math.pow(1-t,e)}return e=+e,n.exponent=t,n}(3),Ho=function t(e){function n(t){return((t*=2)<=1?Math.pow(t,e):2-Math.pow(2-t,e))/2}return e=+e,n.exponent=t,n}(3),Wo=Math.PI,Vo=Wo/2;function Go(t){return 1==+t?1:1-Math.cos(t*Vo)}function Xo(t){return Math.sin(t*Vo)}function Zo(t){return(1-Math.cos(Wo*t))/2}function Qo(t){return 1.0009775171065494*(Math.pow(2,-10*t)-.0009765625)}function Ko(t){return Qo(1-+t)}function Jo(t){return 1-Qo(t)}function ts(t){return((t*=2)<=1?Qo(1-t):2-Qo(t-1))/2}function es(t){return 1-Math.sqrt(1-t*t)}function ns(t){return Math.sqrt(1- --t*t)}function rs(t){return((t*=2)<=1?1-Math.sqrt(1-t*t):Math.sqrt(1-(t-=2)*t)+1)/2}var is=7.5625;function as(t){return 1-os(1-t)}function os(t){return(t=+t)<.36363636363636365?is*t*t:t<.7272727272727273?is*(t-=.5454545454545454)*t+.75:t<.9090909090909091?is*(t-=.8181818181818182)*t+.9375:is*(t-=.9545454545454546)*t+.984375}function ss(t){return((t*=2)<=1?1-os(1-t):os(t-1)+1)/2}var cs=1.70158,us=function t(e){function n(t){return(t=+t)*t*(e*(t-1)+t)}return e=+e,n.overshoot=t,n}(cs),ls=function t(e){function n(t){return--t*t*((t+1)*e+t)+1}return e=+e,n.overshoot=t,n}(cs),hs=function t(e){function n(t){return((t*=2)<1?t*t*((e+1)*t-e):(t-=2)*t*((e+1)*t+e)+2)/2}return e=+e,n.overshoot=t,n}(cs),fs=2*Math.PI,ds=function t(e,n){var r=Math.asin(1/(e=Math.max(1,e)))*(n/=fs);function i(t){return e*Qo(- --t)*Math.sin((r-t)/n)}return i.amplitude=function(e){return t(e,n*fs)},i.period=function(n){return t(e,n)},i}(1,.3),ps=function t(e,n){var r=Math.asin(1/(e=Math.max(1,e)))*(n/=fs);function i(t){return 1-e*Qo(t=+t)*Math.sin((t+r)/n)}return i.amplitude=function(e){return t(e,n*fs)},i.period=function(n){return t(e,n)},i}(1,.3),gs=function t(e,n){var r=Math.asin(1/(e=Math.max(1,e)))*(n/=fs);function i(t){return((t=2*t-1)<0?e*Qo(-t)*Math.sin((r-t)/n):2-e*Qo(t)*Math.sin((r+t)/n))/2}return i.amplitude=function(e){return t(e,n*fs)},i.period=function(n){return t(e,n)},i}(1,.3);function ys(t){if(!t.ok)throw new Error(t.status+" "+t.statusText);return t.blob()}function ms(t,e){return fetch(t,e).then(ys)}function vs(t){if(!t.ok)throw new Error(t.status+" "+t.statusText);return t.arrayBuffer()}function bs(t,e){return fetch(t,e).then(vs)}function _s(t){if(!t.ok)throw new Error(t.status+" "+t.statusText);return t.text()}function xs(t,e){return fetch(t,e).then(_s)}function ws(t){return function(e,n,r){return 2===arguments.length&&"function"==typeof n&&(r=n,n=void 0),xs(e,n).then((function(e){return t(e,r)}))}}function ks(t,e,n,r){3===arguments.length&&"function"==typeof n&&(r=n,n=void 0);var i=_o(t);return xs(e,n).then((function(t){return i.parse(t,r)}))}var Ts=ws(wo),Cs=ws(No);function Es(t,e){return new Promise((function(n,r){var i=new Image;for(var a in e)i[a]=e[a];i.onerror=r,i.onload=function(){n(i)},i.src=t}))}function Ss(t){if(!t.ok)throw new Error(t.status+" "+t.statusText);if(204!==t.status&&205!==t.status)return t.json()}function As(t,e){return fetch(t,e).then(Ss)}function Ms(t){return function(e,n){return xs(e,n).then((function(e){return(new DOMParser).parseFromString(e,t)}))}}const Ns=Ms("application/xml");var Ds=Ms("text/html"),Bs=Ms("image/svg+xml");function Ls(t,e){var n;function r(){var r,i,a=n.length,o=0,s=0;for(r=0;r=(a=(g+m)/2))?g=a:m=a,(l=n>=(o=(y+v)/2))?y=o:v=o,i=d,!(d=d[h=l<<1|u]))return i[h]=p,t;if(s=+t._x.call(null,d.data),c=+t._y.call(null,d.data),e===s&&n===c)return p.next=d,i?i[h]=p:t._root=p,t;do{i=i?i[h]=new Array(4):t._root=new Array(4),(u=e>=(a=(g+m)/2))?g=a:m=a,(l=n>=(o=(y+v)/2))?y=o:v=o}while((h=l<<1|u)==(f=(c>=o)<<1|s>=a));return i[f]=d,i[h]=p,t}function Fs(t,e,n,r,i){this.node=t,this.x0=e,this.y0=n,this.x1=r,this.y1=i}function Ps(t){return t[0]}function Ys(t){return t[1]}function js(t,e,n){var r=new Us(null==e?Ps:e,null==n?Ys:n,NaN,NaN,NaN,NaN);return null==t?r:r.addAll(t)}function Us(t,e,n,r,i,a){this._x=t,this._y=e,this._x0=n,this._y0=r,this._x1=i,this._y1=a,this._root=void 0}function zs(t){for(var e={data:t.data},n=e;t=t.next;)n=n.next={data:t.data};return e}var $s=js.prototype=Us.prototype;function qs(t){return t.x+t.vx}function Hs(t){return t.y+t.vy}function Ws(t){var e,n,r=1,i=1;function a(){for(var t,a,s,c,u,l,h,f=e.length,d=0;dc+d||iu+d||as.index){var p=c-o.x-o.vx,g=u-o.y-o.vy,y=p*p+g*g;yt.r&&(t.r=t[e].r)}function s(){if(e){var r,i,a=e.length;for(n=new Array(a),r=0;rl&&(l=r),ih&&(h=i));if(c>l||u>h)return this;for(this.cover(c,u).cover(l,h),n=0;nt||t>=i||r>e||e>=a;)switch(s=(ef||(a=c.y0)>d||(o=c.x1)=m)<<1|t>=y)&&(c=p[p.length-1],p[p.length-1]=p[p.length-1-u],p[p.length-1-u]=c)}else{var v=t-+this._x.call(null,g.data),b=e-+this._y.call(null,g.data),_=v*v+b*b;if(_=(s=(p+y)/2))?p=s:y=s,(l=o>=(c=(g+m)/2))?g=c:m=c,e=d,!(d=d[h=l<<1|u]))return this;if(!d.length)break;(e[h+1&3]||e[h+2&3]||e[h+3&3])&&(n=e,f=h)}for(;d.data!==t;)if(r=d,!(d=d.next))return this;return(i=d.next)&&delete d.next,r?(i?r.next=i:delete r.next,this):e?(i?e[h]=i:delete e[h],(d=e[0]||e[1]||e[2]||e[3])&&d===(e[3]||e[2]||e[1]||e[0])&&!d.length&&(n?n[f]=d:this._root=d),this):(this._root=i,this)},$s.removeAll=function(t){for(var e=0,n=t.length;e1?(null==n?s.remove(t):s.set(t,d(n)),e):s.get(t)},find:function(e,n,r){var i,a,o,s,c,u=0,l=t.length;for(null==r?r=1/0:r*=r,u=0;u1?(u.on(t,n),e):u.on(t)}}}function tc(){var t,e,n,r,i=Os(-30),a=1,o=1/0,s=.81;function c(r){var i,a=t.length,o=js(t,Zs,Qs).visitAfter(l);for(n=r,i=0;i=o)){(t.data!==e||t.next)&&(0===l&&(d+=(l=Is())*l),0===h&&(d+=(h=Is())*h),d1?r[0]+r.slice(2):r,+t.slice(n+1)]}function ac(t){return(t=ic(Math.abs(t)))?t[1]:NaN}var oc,sc=/^(?:(.)?([<>=^]))?([+\-( ])?([$#])?(0)?(\d+)?(,)?(\.\d+)?(~)?([a-z%])?$/i;function cc(t){if(!(e=sc.exec(t)))throw new Error("invalid format: "+t);var e;return new uc({fill:e[1],align:e[2],sign:e[3],symbol:e[4],zero:e[5],width:e[6],comma:e[7],precision:e[8]&&e[8].slice(1),trim:e[9],type:e[10]})}function uc(t){this.fill=void 0===t.fill?" ":t.fill+"",this.align=void 0===t.align?">":t.align+"",this.sign=void 0===t.sign?"-":t.sign+"",this.symbol=void 0===t.symbol?"":t.symbol+"",this.zero=!!t.zero,this.width=void 0===t.width?void 0:+t.width,this.comma=!!t.comma,this.precision=void 0===t.precision?void 0:+t.precision,this.trim=!!t.trim,this.type=void 0===t.type?"":t.type+""}function lc(t,e){var n=ic(t,e);if(!n)return t+"";var r=n[0],i=n[1];return i<0?"0."+new Array(-i).join("0")+r:r.length>i+1?r.slice(0,i+1)+"."+r.slice(i+1):r+new Array(i-r.length+2).join("0")}cc.prototype=uc.prototype,uc.prototype.toString=function(){return this.fill+this.align+this.sign+this.symbol+(this.zero?"0":"")+(void 0===this.width?"":Math.max(1,0|this.width))+(this.comma?",":"")+(void 0===this.precision?"":"."+Math.max(0,0|this.precision))+(this.trim?"~":"")+this.type};const hc={"%":function(t,e){return(100*t).toFixed(e)},b:function(t){return Math.round(t).toString(2)},c:function(t){return t+""},d:function(t){return Math.abs(t=Math.round(t))>=1e21?t.toLocaleString("en").replace(/,/g,""):t.toString(10)},e:function(t,e){return t.toExponential(e)},f:function(t,e){return t.toFixed(e)},g:function(t,e){return t.toPrecision(e)},o:function(t){return Math.round(t).toString(8)},p:function(t,e){return lc(100*t,e)},r:lc,s:function(t,e){var n=ic(t,e);if(!n)return t+"";var r=n[0],i=n[1],a=i-(oc=3*Math.max(-8,Math.min(8,Math.floor(i/3))))+1,o=r.length;return a===o?r:a>o?r+new Array(a-o+1).join("0"):a>0?r.slice(0,a)+"."+r.slice(a):"0."+new Array(1-a).join("0")+ic(t,Math.max(0,e+a-1))[0]},X:function(t){return Math.round(t).toString(16).toUpperCase()},x:function(t){return Math.round(t).toString(16)}};function fc(t){return t}var dc,pc,gc,yc=Array.prototype.map,mc=["y","z","a","f","p","n","µ","m","","k","M","G","T","P","E","Z","Y"];function vc(t){var e,n,r=void 0===t.grouping||void 0===t.thousands?fc:(e=yc.call(t.grouping,Number),n=t.thousands+"",function(t,r){for(var i=t.length,a=[],o=0,s=e[0],c=0;i>0&&s>0&&(c+s+1>r&&(s=Math.max(1,r-c)),a.push(t.substring(i-=s,i+s)),!((c+=s+1)>r));)s=e[o=(o+1)%e.length];return a.reverse().join(n)}),i=void 0===t.currency?"":t.currency[0]+"",a=void 0===t.currency?"":t.currency[1]+"",o=void 0===t.decimal?".":t.decimal+"",s=void 0===t.numerals?fc:function(t){return function(e){return e.replace(/[0-9]/g,(function(e){return t[+e]}))}}(yc.call(t.numerals,String)),c=void 0===t.percent?"%":t.percent+"",u=void 0===t.minus?"-":t.minus+"",l=void 0===t.nan?"NaN":t.nan+"";function h(t){var e=(t=cc(t)).fill,n=t.align,h=t.sign,f=t.symbol,d=t.zero,p=t.width,g=t.comma,y=t.precision,m=t.trim,v=t.type;"n"===v?(g=!0,v="g"):hc[v]||(void 0===y&&(y=12),m=!0,v="g"),(d||"0"===e&&"="===n)&&(d=!0,e="0",n="=");var b="$"===f?i:"#"===f&&/[boxX]/.test(v)?"0"+v.toLowerCase():"",_="$"===f?a:/[%p]/.test(v)?c:"",x=hc[v],w=/[defgprs%]/.test(v);function k(t){var i,a,c,f=b,k=_;if("c"===v)k=x(t)+k,t="";else{var T=(t=+t)<0||1/t<0;if(t=isNaN(t)?l:x(Math.abs(t),y),m&&(t=function(t){t:for(var e,n=t.length,r=1,i=-1;r0&&(i=0)}return i>0?t.slice(0,i)+t.slice(e+1):t}(t)),T&&0==+t&&"+"!==h&&(T=!1),f=(T?"("===h?h:u:"-"===h||"("===h?"":h)+f,k=("s"===v?mc[8+oc/3]:"")+k+(T&&"("===h?")":""),w)for(i=-1,a=t.length;++i(c=t.charCodeAt(i))||c>57){k=(46===c?o+t.slice(i+1):t.slice(i))+k,t=t.slice(0,i);break}}g&&!d&&(t=r(t,1/0));var C=f.length+t.length+k.length,E=C>1)+f+t+k+E.slice(C);break;default:t=E+f+t+k}return s(t)}return y=void 0===y?6:/[gprs]/.test(v)?Math.max(1,Math.min(21,y)):Math.max(0,Math.min(20,y)),k.toString=function(){return t+""},k}return{format:h,formatPrefix:function(t,e){var n=h(((t=cc(t)).type="f",t)),r=3*Math.max(-8,Math.min(8,Math.floor(ac(e)/3))),i=Math.pow(10,-r),a=mc[8+r/3];return function(t){return n(i*t)+a}}}}function bc(t){return dc=vc(t),pc=dc.format,gc=dc.formatPrefix,dc}function _c(t){return Math.max(0,-ac(Math.abs(t)))}function xc(t,e){return Math.max(0,3*Math.max(-8,Math.min(8,Math.floor(ac(e)/3)))-ac(Math.abs(t)))}function wc(t,e){return t=Math.abs(t),e=Math.abs(e)-t,Math.max(0,ac(e)-ac(t))+1}function kc(){return new Tc}function Tc(){this.reset()}bc({decimal:".",thousands:",",grouping:[3],currency:["$",""],minus:"-"}),Tc.prototype={constructor:Tc,reset:function(){this.s=this.t=0},add:function(t){Ec(Cc,t,this.t),Ec(this,Cc.s,this.s),this.s?this.t+=Cc.t:this.s=Cc.t},valueOf:function(){return this.s}};var Cc=new Tc;function Ec(t,e,n){var r=t.s=e+n,i=r-e,a=r-i;t.t=e-a+(n-i)}var Sc=1e-6,Ac=1e-12,Mc=Math.PI,Nc=Mc/2,Dc=Mc/4,Bc=2*Mc,Lc=180/Mc,Oc=Mc/180,Ic=Math.abs,Rc=Math.atan,Fc=Math.atan2,Pc=Math.cos,Yc=Math.ceil,jc=Math.exp,Uc=(Math.floor,Math.log),zc=Math.pow,$c=Math.sin,qc=Math.sign||function(t){return t>0?1:t<0?-1:0},Hc=Math.sqrt,Wc=Math.tan;function Vc(t){return t>1?0:t<-1?Mc:Math.acos(t)}function Gc(t){return t>1?Nc:t<-1?-Nc:Math.asin(t)}function Xc(t){return(t=$c(t/2))*t}function Zc(){}function Qc(t,e){t&&Jc.hasOwnProperty(t.type)&&Jc[t.type](t,e)}var Kc={Feature:function(t,e){Qc(t.geometry,e)},FeatureCollection:function(t,e){for(var n=t.features,r=-1,i=n.length;++r=0?1:-1,i=r*n,a=Pc(e=(e*=Oc)/2+Dc),o=$c(e),s=su*o,c=ou*a+s*Pc(i),u=s*r*$c(i);cu.add(Fc(u,c)),au=t,ou=a,su=o}function gu(t){return uu.reset(),nu(t,lu),2*uu}function yu(t){return[Fc(t[1],t[0]),Gc(t[2])]}function mu(t){var e=t[0],n=t[1],r=Pc(n);return[r*Pc(e),r*$c(e),$c(n)]}function vu(t,e){return t[0]*e[0]+t[1]*e[1]+t[2]*e[2]}function bu(t,e){return[t[1]*e[2]-t[2]*e[1],t[2]*e[0]-t[0]*e[2],t[0]*e[1]-t[1]*e[0]]}function _u(t,e){t[0]+=e[0],t[1]+=e[1],t[2]+=e[2]}function xu(t,e){return[t[0]*e,t[1]*e,t[2]*e]}function wu(t){var e=Hc(t[0]*t[0]+t[1]*t[1]+t[2]*t[2]);t[0]/=e,t[1]/=e,t[2]/=e}var ku,Tu,Cu,Eu,Su,Au,Mu,Nu,Du,Bu,Lu,Ou,Iu,Ru,Fu,Pu,Yu,ju,Uu,zu,$u,qu,Hu,Wu,Vu,Gu,Xu=kc(),Zu={point:Qu,lineStart:Ju,lineEnd:tl,polygonStart:function(){Zu.point=el,Zu.lineStart=nl,Zu.lineEnd=rl,Xu.reset(),lu.polygonStart()},polygonEnd:function(){lu.polygonEnd(),Zu.point=Qu,Zu.lineStart=Ju,Zu.lineEnd=tl,cu<0?(ku=-(Cu=180),Tu=-(Eu=90)):Xu>Sc?Eu=90:Xu<-1e-6&&(Tu=-90),Bu[0]=ku,Bu[1]=Cu},sphere:function(){ku=-(Cu=180),Tu=-(Eu=90)}};function Qu(t,e){Du.push(Bu=[ku=t,Cu=t]),eEu&&(Eu=e)}function Ku(t,e){var n=mu([t*Oc,e*Oc]);if(Nu){var r=bu(Nu,n),i=bu([r[1],-r[0],0],r);wu(i),i=yu(i);var a,o=t-Su,s=o>0?1:-1,c=i[0]*Lc*s,u=Ic(o)>180;u^(s*SuEu&&(Eu=a):u^(s*Su<(c=(c+360)%360-180)&&cEu&&(Eu=e)),u?til(ku,Cu)&&(Cu=t):il(t,Cu)>il(ku,Cu)&&(ku=t):Cu>=ku?(tCu&&(Cu=t)):t>Su?il(ku,t)>il(ku,Cu)&&(Cu=t):il(t,Cu)>il(ku,Cu)&&(ku=t)}else Du.push(Bu=[ku=t,Cu=t]);eEu&&(Eu=e),Nu=n,Su=t}function Ju(){Zu.point=Ku}function tl(){Bu[0]=ku,Bu[1]=Cu,Zu.point=Qu,Nu=null}function el(t,e){if(Nu){var n=t-Su;Xu.add(Ic(n)>180?n+(n>0?360:-360):n)}else Au=t,Mu=e;lu.point(t,e),Ku(t,e)}function nl(){lu.lineStart()}function rl(){el(Au,Mu),lu.lineEnd(),Ic(Xu)>Sc&&(ku=-(Cu=180)),Bu[0]=ku,Bu[1]=Cu,Nu=null}function il(t,e){return(e-=t)<0?e+360:e}function al(t,e){return t[0]-e[0]}function ol(t,e){return t[0]<=t[1]?t[0]<=e&&e<=t[1]:eil(r[0],r[1])&&(r[1]=i[1]),il(i[0],r[1])>il(r[0],r[1])&&(r[0]=i[0])):a.push(r=i);for(o=-1/0,e=0,r=a[n=a.length-1];e<=n;r=i,++e)i=a[e],(s=il(r[1],i[0]))>o&&(o=s,ku=i[0],Cu=r[1])}return Du=Bu=null,ku===1/0||Tu===1/0?[[NaN,NaN],[NaN,NaN]]:[[ku,Tu],[Cu,Eu]]}var cl={sphere:Zc,point:ul,lineStart:hl,lineEnd:pl,polygonStart:function(){cl.lineStart=gl,cl.lineEnd=yl},polygonEnd:function(){cl.lineStart=hl,cl.lineEnd=pl}};function ul(t,e){t*=Oc;var n=Pc(e*=Oc);ll(n*Pc(t),n*$c(t),$c(e))}function ll(t,e,n){++Lu,Iu+=(t-Iu)/Lu,Ru+=(e-Ru)/Lu,Fu+=(n-Fu)/Lu}function hl(){cl.point=fl}function fl(t,e){t*=Oc;var n=Pc(e*=Oc);Wu=n*Pc(t),Vu=n*$c(t),Gu=$c(e),cl.point=dl,ll(Wu,Vu,Gu)}function dl(t,e){t*=Oc;var n=Pc(e*=Oc),r=n*Pc(t),i=n*$c(t),a=$c(e),o=Fc(Hc((o=Vu*a-Gu*i)*o+(o=Gu*r-Wu*a)*o+(o=Wu*i-Vu*r)*o),Wu*r+Vu*i+Gu*a);Ou+=o,Pu+=o*(Wu+(Wu=r)),Yu+=o*(Vu+(Vu=i)),ju+=o*(Gu+(Gu=a)),ll(Wu,Vu,Gu)}function pl(){cl.point=ul}function gl(){cl.point=ml}function yl(){vl(qu,Hu),cl.point=ul}function ml(t,e){qu=t,Hu=e,t*=Oc,e*=Oc,cl.point=vl;var n=Pc(e);Wu=n*Pc(t),Vu=n*$c(t),Gu=$c(e),ll(Wu,Vu,Gu)}function vl(t,e){t*=Oc;var n=Pc(e*=Oc),r=n*Pc(t),i=n*$c(t),a=$c(e),o=Vu*a-Gu*i,s=Gu*r-Wu*a,c=Wu*i-Vu*r,u=Hc(o*o+s*s+c*c),l=Gc(u),h=u&&-l/u;Uu+=h*o,zu+=h*s,$u+=h*c,Ou+=l,Pu+=l*(Wu+(Wu=r)),Yu+=l*(Vu+(Vu=i)),ju+=l*(Gu+(Gu=a)),ll(Wu,Vu,Gu)}function bl(t){Lu=Ou=Iu=Ru=Fu=Pu=Yu=ju=Uu=zu=$u=0,nu(t,cl);var e=Uu,n=zu,r=$u,i=e*e+n*n+r*r;return iMc?t+Math.round(-t/Bc)*Bc:t,e]}function kl(t,e,n){return(t%=Bc)?e||n?xl(Cl(t),El(e,n)):Cl(t):e||n?El(e,n):wl}function Tl(t){return function(e,n){return[(e+=t)>Mc?e-Bc:e<-Mc?e+Bc:e,n]}}function Cl(t){var e=Tl(t);return e.invert=Tl(-t),e}function El(t,e){var n=Pc(t),r=$c(t),i=Pc(e),a=$c(e);function o(t,e){var o=Pc(e),s=Pc(t)*o,c=$c(t)*o,u=$c(e),l=u*n+s*r;return[Fc(c*i-l*a,s*n-u*r),Gc(l*i+c*a)]}return o.invert=function(t,e){var o=Pc(e),s=Pc(t)*o,c=$c(t)*o,u=$c(e),l=u*i-c*a;return[Fc(c*i+u*a,s*n+l*r),Gc(l*n-s*r)]},o}function Sl(t){function e(e){return(e=t(e[0]*Oc,e[1]*Oc))[0]*=Lc,e[1]*=Lc,e}return t=kl(t[0]*Oc,t[1]*Oc,t.length>2?t[2]*Oc:0),e.invert=function(e){return(e=t.invert(e[0]*Oc,e[1]*Oc))[0]*=Lc,e[1]*=Lc,e},e}function Al(t,e,n,r,i,a){if(n){var o=Pc(e),s=$c(e),c=r*n;null==i?(i=e+r*Bc,a=e-c/2):(i=Ml(o,i),a=Ml(o,a),(r>0?ia)&&(i+=r*Bc));for(var u,l=i;r>0?l>a:l1&&e.push(e.pop().concat(e.shift()))},result:function(){var n=e;return e=[],t=null,n}}}function Bl(t,e){return Ic(t[0]-e[0])=0;--a)i.point((l=u[a])[0],l[1]);else r(f.x,f.p.x,-1,i);f=f.p}u=(f=f.o).z,d=!d}while(!f.v);i.lineEnd()}}}function Il(t){if(e=t.length){for(var e,n,r=0,i=t[0];++r=0?1:-1,C=T*k,E=C>Mc,S=g*x;if(Rl.add(Fc(S*T*$c(C),y*w+S*Pc(C))),o+=E?k+T*Bc:k,E^d>=n^b>=n){var A=bu(mu(f),mu(v));wu(A);var M=bu(a,A);wu(M);var N=(E^k>=0?-1:1)*Gc(M[2]);(r>N||r===N&&(A[0]||A[1]))&&(s+=E^k>=0?1:-1)}}return(o<-1e-6||o0){for(h||(i.polygonStart(),h=!0),i.lineStart(),t=0;t1&&2&c&&f.push(f.pop().concat(f.shift())),o.push(f.filter(jl))}return f}}function jl(t){return t.length>1}function Ul(t,e){return((t=t.x)[0]<0?t[1]-Nc-Sc:Nc-t[1])-((e=e.x)[0]<0?e[1]-Nc-Sc:Nc-e[1])}const zl=Yl((function(){return!0}),(function(t){var e,n=NaN,r=NaN,i=NaN;return{lineStart:function(){t.lineStart(),e=1},point:function(a,o){var s=a>0?Mc:-Mc,c=Ic(a-n);Ic(c-Mc)0?Nc:-Nc),t.point(i,r),t.lineEnd(),t.lineStart(),t.point(s,r),t.point(a,r),e=0):i!==s&&c>=Mc&&(Ic(n-i)Sc?Rc(($c(e)*(a=Pc(r))*$c(n)-$c(r)*(i=Pc(e))*$c(t))/(i*a*o)):(e+r)/2}(n,r,a,o),t.point(i,r),t.lineEnd(),t.lineStart(),t.point(s,r),e=0),t.point(n=a,r=o),i=s},lineEnd:function(){t.lineEnd(),n=r=NaN},clean:function(){return 2-e}}}),(function(t,e,n,r){var i;if(null==t)i=n*Nc,r.point(-Mc,i),r.point(0,i),r.point(Mc,i),r.point(Mc,0),r.point(Mc,-i),r.point(0,-i),r.point(-Mc,-i),r.point(-Mc,0),r.point(-Mc,i);else if(Ic(t[0]-e[0])>Sc){var a=t[0]0,i=Ic(e)>Sc;function a(t,n){return Pc(t)*Pc(n)>e}function o(t,n,r){var i=[1,0,0],a=bu(mu(t),mu(n)),o=vu(a,a),s=a[0],c=o-s*s;if(!c)return!r&&t;var u=e*o/c,l=-e*s/c,h=bu(i,a),f=xu(i,u);_u(f,xu(a,l));var d=h,p=vu(f,d),g=vu(d,d),y=p*p-g*(vu(f,f)-1);if(!(y<0)){var m=Hc(y),v=xu(d,(-p-m)/g);if(_u(v,f),v=yu(v),!r)return v;var b,_=t[0],x=n[0],w=t[1],k=n[1];x<_&&(b=_,_=x,x=b);var T=x-_,C=Ic(T-Mc)0^v[1]<(Ic(v[0]-_)Mc^(_<=v[0]&&v[0]<=x)){var E=xu(d,(-p+m)/g);return _u(E,f),[v,yu(E)]}}}function s(e,n){var i=r?t:Mc-t,a=0;return e<-i?a|=1:e>i&&(a|=2),n<-i?a|=4:n>i&&(a|=8),a}return Yl(a,(function(t){var e,n,c,u,l;return{lineStart:function(){u=c=!1,l=1},point:function(h,f){var d,p=[h,f],g=a(h,f),y=r?g?0:s(h,f):g?s(h+(h<0?Mc:-Mc),f):0;if(!e&&(u=c=g)&&t.lineStart(),g!==c&&(!(d=o(e,p))||Bl(e,d)||Bl(p,d))&&(p[2]=1),g!==c)l=0,g?(t.lineStart(),d=o(p,e),t.point(d[0],d[1])):(d=o(e,p),t.point(d[0],d[1],2),t.lineEnd()),e=d;else if(i&&e&&r^g){var m;y&n||!(m=o(p,e,!0))||(l=0,r?(t.lineStart(),t.point(m[0][0],m[0][1]),t.point(m[1][0],m[1][1]),t.lineEnd()):(t.point(m[1][0],m[1][1]),t.lineEnd(),t.lineStart(),t.point(m[0][0],m[0][1],3)))}!g||e&&Bl(e,p)||t.point(p[0],p[1]),e=p,c=g,n=y},lineEnd:function(){c&&t.lineEnd(),e=null},clean:function(){return l|(u&&c)<<1}}}),(function(e,r,i,a){Al(a,t,n,i,e,r)}),r?[0,-t]:[-Mc,t-Mc])}var ql=1e9,Hl=-ql;function Wl(t,e,n,r){function i(i,a){return t<=i&&i<=n&&e<=a&&a<=r}function a(i,a,s,u){var l=0,h=0;if(null==i||(l=o(i,s))!==(h=o(a,s))||c(i,a)<0^s>0)do{u.point(0===l||3===l?t:n,l>1?r:e)}while((l=(l+s+4)%4)!==h);else u.point(a[0],a[1])}function o(r,i){return Ic(r[0]-t)0?0:3:Ic(r[0]-n)0?2:1:Ic(r[1]-e)0?1:0:i>0?3:2}function s(t,e){return c(t.x,e.x)}function c(t,e){var n=o(t,1),r=o(e,1);return n!==r?n-r:0===n?e[1]-t[1]:1===n?t[0]-e[0]:2===n?t[1]-e[1]:e[0]-t[0]}return function(o){var c,u,l,h,f,d,p,g,y,m,v,b=o,_=Dl(),x={point:w,lineStart:function(){x.point=k,u&&u.push(l=[]),m=!0,y=!1,p=g=NaN},lineEnd:function(){c&&(k(h,f),d&&y&&_.rejoin(),c.push(_.result())),x.point=w,y&&b.lineEnd()},polygonStart:function(){b=_,c=[],u=[],v=!0},polygonEnd:function(){var e=function(){for(var e=0,n=0,i=u.length;nr&&(f-a)*(r-o)>(d-o)*(t-a)&&++e:d<=r&&(f-a)*(r-o)<(d-o)*(t-a)&&--e;return e}(),n=v&&e,i=(c=P(c)).length;(n||i)&&(o.polygonStart(),n&&(o.lineStart(),a(null,null,1,o),o.lineEnd()),i&&Ol(c,s,e,a,o),o.polygonEnd()),b=o,c=u=l=null}};function w(t,e){i(t,e)&&b.point(t,e)}function k(a,o){var s=i(a,o);if(u&&l.push([a,o]),m)h=a,f=o,d=s,m=!1,s&&(b.lineStart(),b.point(a,o));else if(s&&y)b.point(a,o);else{var c=[p=Math.max(Hl,Math.min(ql,p)),g=Math.max(Hl,Math.min(ql,g))],_=[a=Math.max(Hl,Math.min(ql,a)),o=Math.max(Hl,Math.min(ql,o))];!function(t,e,n,r,i,a){var o,s=t[0],c=t[1],u=0,l=1,h=e[0]-s,f=e[1]-c;if(o=n-s,h||!(o>0)){if(o/=h,h<0){if(o0){if(o>l)return;o>u&&(u=o)}if(o=i-s,h||!(o<0)){if(o/=h,h<0){if(o>l)return;o>u&&(u=o)}else if(h>0){if(o0)){if(o/=f,f<0){if(o0){if(o>l)return;o>u&&(u=o)}if(o=a-c,f||!(o<0)){if(o/=f,f<0){if(o>l)return;o>u&&(u=o)}else if(f>0){if(o0&&(t[0]=s+u*h,t[1]=c+u*f),l<1&&(e[0]=s+l*h,e[1]=c+l*f),!0}}}}}(c,_,t,e,n,r)?s&&(b.lineStart(),b.point(a,o),v=!1):(y||(b.lineStart(),b.point(c[0],c[1])),b.point(_[0],_[1]),s||b.lineEnd(),v=!1)}p=a,g=o,y=s}return x}}function Vl(){var t,e,n,r=0,i=0,a=960,o=500;return n={stream:function(n){return t&&e===n?t:t=Wl(r,i,a,o)(e=n)},extent:function(s){return arguments.length?(r=+s[0][0],i=+s[0][1],a=+s[1][0],o=+s[1][1],t=e=null,n):[[r,i],[a,o]]}}}var Gl,Xl,Zl,Ql=kc(),Kl={sphere:Zc,point:Zc,lineStart:function(){Kl.point=th,Kl.lineEnd=Jl},lineEnd:Zc,polygonStart:Zc,polygonEnd:Zc};function Jl(){Kl.point=Kl.lineEnd=Zc}function th(t,e){Gl=t*=Oc,Xl=$c(e*=Oc),Zl=Pc(e),Kl.point=eh}function eh(t,e){t*=Oc;var n=$c(e*=Oc),r=Pc(e),i=Ic(t-Gl),a=Pc(i),o=r*$c(i),s=Zl*n-Xl*r*a,c=Xl*n+Zl*r*a;Ql.add(Fc(Hc(o*o+s*s),c)),Gl=t,Xl=n,Zl=r}function nh(t){return Ql.reset(),nu(t,Kl),+Ql}var rh=[null,null],ih={type:"LineString",coordinates:rh};function ah(t,e){return rh[0]=t,rh[1]=e,nh(ih)}var oh={Feature:function(t,e){return ch(t.geometry,e)},FeatureCollection:function(t,e){for(var n=t.features,r=-1,i=n.length;++r0&&(i=ah(t[a],t[a-1]))>0&&n<=i&&r<=i&&(n+r-i)*(1-Math.pow((n-r)/i,2))Sc})).map(c)).concat(k(Yc(a/d)*d,i,d).filter((function(t){return Ic(t%g)>Sc})).map(u))}return m.lines=function(){return v().map((function(t){return{type:"LineString",coordinates:t}}))},m.outline=function(){return{type:"Polygon",coordinates:[l(r).concat(h(o).slice(1),l(n).reverse().slice(1),h(s).reverse().slice(1))]}},m.extent=function(t){return arguments.length?m.extentMajor(t).extentMinor(t):m.extentMinor()},m.extentMajor=function(t){return arguments.length?(r=+t[0][0],n=+t[1][0],s=+t[0][1],o=+t[1][1],r>n&&(t=r,r=n,n=t),s>o&&(t=s,s=o,o=t),m.precision(y)):[[r,s],[n,o]]},m.extentMinor=function(n){return arguments.length?(e=+n[0][0],t=+n[1][0],a=+n[0][1],i=+n[1][1],e>t&&(n=e,e=t,t=n),a>i&&(n=a,a=i,i=n),m.precision(y)):[[e,a],[t,i]]},m.step=function(t){return arguments.length?m.stepMajor(t).stepMinor(t):m.stepMinor()},m.stepMajor=function(t){return arguments.length?(p=+t[0],g=+t[1],m):[p,g]},m.stepMinor=function(t){return arguments.length?(f=+t[0],d=+t[1],m):[f,d]},m.precision=function(f){return arguments.length?(y=+f,c=gh(a,i,90),u=yh(e,t,y),l=gh(s,o,90),h=yh(r,n,y),m):y},m.extentMajor([[-180,-89.999999],[180,89.999999]]).extentMinor([[-180,-80.000001],[180,80.000001]])}function vh(){return mh()()}function bh(t,e){var n=t[0]*Oc,r=t[1]*Oc,i=e[0]*Oc,a=e[1]*Oc,o=Pc(r),s=$c(r),c=Pc(a),u=$c(a),l=o*Pc(n),h=o*$c(n),f=c*Pc(i),d=c*$c(i),p=2*Gc(Hc(Xc(a-r)+o*c*Xc(i-n))),g=$c(p),y=p?function(t){var e=$c(t*=p)/g,n=$c(p-t)/g,r=n*l+e*f,i=n*h+e*d,a=n*s+e*u;return[Fc(i,r)*Lc,Fc(a,Hc(r*r+i*i))*Lc]}:function(){return[n*Lc,r*Lc]};return y.distance=p,y}function _h(t){return t}var xh,wh,kh,Th,Ch=kc(),Eh=kc(),Sh={point:Zc,lineStart:Zc,lineEnd:Zc,polygonStart:function(){Sh.lineStart=Ah,Sh.lineEnd=Dh},polygonEnd:function(){Sh.lineStart=Sh.lineEnd=Sh.point=Zc,Ch.add(Ic(Eh)),Eh.reset()},result:function(){var t=Ch/2;return Ch.reset(),t}};function Ah(){Sh.point=Mh}function Mh(t,e){Sh.point=Nh,xh=kh=t,wh=Th=e}function Nh(t,e){Eh.add(Th*t-kh*e),kh=t,Th=e}function Dh(){Nh(xh,wh)}const Bh=Sh;var Lh=1/0,Oh=Lh,Ih=-Lh,Rh=Ih,Fh={point:function(t,e){tIh&&(Ih=t),eRh&&(Rh=e)},lineStart:Zc,lineEnd:Zc,polygonStart:Zc,polygonEnd:Zc,result:function(){var t=[[Lh,Oh],[Ih,Rh]];return Ih=Rh=-(Oh=Lh=1/0),t}};const Ph=Fh;var Yh,jh,Uh,zh,$h=0,qh=0,Hh=0,Wh=0,Vh=0,Gh=0,Xh=0,Zh=0,Qh=0,Kh={point:Jh,lineStart:tf,lineEnd:rf,polygonStart:function(){Kh.lineStart=af,Kh.lineEnd=of},polygonEnd:function(){Kh.point=Jh,Kh.lineStart=tf,Kh.lineEnd=rf},result:function(){var t=Qh?[Xh/Qh,Zh/Qh]:Gh?[Wh/Gh,Vh/Gh]:Hh?[$h/Hh,qh/Hh]:[NaN,NaN];return $h=qh=Hh=Wh=Vh=Gh=Xh=Zh=Qh=0,t}};function Jh(t,e){$h+=t,qh+=e,++Hh}function tf(){Kh.point=ef}function ef(t,e){Kh.point=nf,Jh(Uh=t,zh=e)}function nf(t,e){var n=t-Uh,r=e-zh,i=Hc(n*n+r*r);Wh+=i*(Uh+t)/2,Vh+=i*(zh+e)/2,Gh+=i,Jh(Uh=t,zh=e)}function rf(){Kh.point=Jh}function af(){Kh.point=sf}function of(){cf(Yh,jh)}function sf(t,e){Kh.point=cf,Jh(Yh=Uh=t,jh=zh=e)}function cf(t,e){var n=t-Uh,r=e-zh,i=Hc(n*n+r*r);Wh+=i*(Uh+t)/2,Vh+=i*(zh+e)/2,Gh+=i,Xh+=(i=zh*t-Uh*e)*(Uh+t),Zh+=i*(zh+e),Qh+=3*i,Jh(Uh=t,zh=e)}const uf=Kh;function lf(t){this._context=t}lf.prototype={_radius:4.5,pointRadius:function(t){return this._radius=t,this},polygonStart:function(){this._line=0},polygonEnd:function(){this._line=NaN},lineStart:function(){this._point=0},lineEnd:function(){0===this._line&&this._context.closePath(),this._point=NaN},point:function(t,e){switch(this._point){case 0:this._context.moveTo(t,e),this._point=1;break;case 1:this._context.lineTo(t,e);break;default:this._context.moveTo(t+this._radius,e),this._context.arc(t,e,this._radius,0,Bc)}},result:Zc};var hf,ff,df,pf,gf,yf=kc(),mf={point:Zc,lineStart:function(){mf.point=vf},lineEnd:function(){hf&&bf(ff,df),mf.point=Zc},polygonStart:function(){hf=!0},polygonEnd:function(){hf=null},result:function(){var t=+yf;return yf.reset(),t}};function vf(t,e){mf.point=bf,ff=pf=t,df=gf=e}function bf(t,e){pf-=t,gf-=e,yf.add(Hc(pf*pf+gf*gf)),pf=t,gf=e}const _f=mf;function xf(){this._string=[]}function wf(t){return"m0,"+t+"a"+t+","+t+" 0 1,1 0,"+-2*t+"a"+t+","+t+" 0 1,1 0,"+2*t+"z"}function kf(t,e){var n,r,i=4.5;function a(t){return t&&("function"==typeof i&&r.pointRadius(+i.apply(this,arguments)),nu(t,n(r))),r.result()}return a.area=function(t){return nu(t,n(Bh)),Bh.result()},a.measure=function(t){return nu(t,n(_f)),_f.result()},a.bounds=function(t){return nu(t,n(Ph)),Ph.result()},a.centroid=function(t){return nu(t,n(uf)),uf.result()},a.projection=function(e){return arguments.length?(n=null==e?(t=null,_h):(t=e).stream,a):t},a.context=function(t){return arguments.length?(r=null==t?(e=null,new xf):new lf(e=t),"function"!=typeof i&&r.pointRadius(i),a):e},a.pointRadius=function(t){return arguments.length?(i="function"==typeof t?t:(r.pointRadius(+t),+t),a):i},a.projection(t).context(e)}function Tf(t){return{stream:Cf(t)}}function Cf(t){return function(e){var n=new Ef;for(var r in t)n[r]=t[r];return n.stream=e,n}}function Ef(){}function Sf(t,e,n){var r=t.clipExtent&&t.clipExtent();return t.scale(150).translate([0,0]),null!=r&&t.clipExtent(null),nu(n,t.stream(Ph)),e(Ph.result()),null!=r&&t.clipExtent(r),t}function Af(t,e,n){return Sf(t,(function(n){var r=e[1][0]-e[0][0],i=e[1][1]-e[0][1],a=Math.min(r/(n[1][0]-n[0][0]),i/(n[1][1]-n[0][1])),o=+e[0][0]+(r-a*(n[1][0]+n[0][0]))/2,s=+e[0][1]+(i-a*(n[1][1]+n[0][1]))/2;t.scale(150*a).translate([o,s])}),n)}function Mf(t,e,n){return Af(t,[[0,0],e],n)}function Nf(t,e,n){return Sf(t,(function(n){var r=+e,i=r/(n[1][0]-n[0][0]),a=(r-i*(n[1][0]+n[0][0]))/2,o=-i*n[0][1];t.scale(150*i).translate([a,o])}),n)}function Df(t,e,n){return Sf(t,(function(n){var r=+e,i=r/(n[1][1]-n[0][1]),a=-i*n[0][0],o=(r-i*(n[1][1]+n[0][1]))/2;t.scale(150*i).translate([a,o])}),n)}xf.prototype={_radius:4.5,_circle:wf(4.5),pointRadius:function(t){return(t=+t)!==this._radius&&(this._radius=t,this._circle=null),this},polygonStart:function(){this._line=0},polygonEnd:function(){this._line=NaN},lineStart:function(){this._point=0},lineEnd:function(){0===this._line&&this._string.push("Z"),this._point=NaN},point:function(t,e){switch(this._point){case 0:this._string.push("M",t,",",e),this._point=1;break;case 1:this._string.push("L",t,",",e);break;default:null==this._circle&&(this._circle=wf(this._radius)),this._string.push("M",t,",",e,this._circle)}},result:function(){if(this._string.length){var t=this._string.join("");return this._string=[],t}return null}},Ef.prototype={constructor:Ef,point:function(t,e){this.stream.point(t,e)},sphere:function(){this.stream.sphere()},lineStart:function(){this.stream.lineStart()},lineEnd:function(){this.stream.lineEnd()},polygonStart:function(){this.stream.polygonStart()},polygonEnd:function(){this.stream.polygonEnd()}};var Bf=Pc(30*Oc);function Lf(t,e){return+e?function(t,e){function n(r,i,a,o,s,c,u,l,h,f,d,p,g,y){var m=u-r,v=l-i,b=m*m+v*v;if(b>4*e&&g--){var _=o+f,x=s+d,w=c+p,k=Hc(_*_+x*x+w*w),T=Gc(w/=k),C=Ic(Ic(w)-1)e||Ic((m*M+v*N)/b-.5)>.3||o*f+s*d+c*p2?t[2]%360*Oc:0,M()):[y*Lc,m*Lc,v*Lc]},S.angle=function(t){return arguments.length?(b=t%360*Oc,M()):b*Lc},S.reflectX=function(t){return arguments.length?(_=t?-1:1,M()):_<0},S.reflectY=function(t){return arguments.length?(x=t?-1:1,M()):x<0},S.precision=function(t){return arguments.length?(o=Lf(s,E=t*t),N()):Hc(E)},S.fitExtent=function(t,e){return Af(S,t,e)},S.fitSize=function(t,e){return Mf(S,t,e)},S.fitWidth=function(t,e){return Nf(S,t,e)},S.fitHeight=function(t,e){return Df(S,t,e)},function(){return e=t.apply(this,arguments),S.invert=e.invert&&A,M()}}function Yf(t){var e=0,n=Mc/3,r=Pf(t),i=r(e,n);return i.parallels=function(t){return arguments.length?r(e=t[0]*Oc,n=t[1]*Oc):[e*Lc,n*Lc]},i}function jf(t,e){var n=$c(t),r=(n+$c(e))/2;if(Ic(r)=.12&&i<.234&&r>=-.425&&r<-.214?s:i>=.166&&i<.234&&r>=-.214&&r<-.115?c:o).invert(t)},l.stream=function(n){return t&&e===n?t:(r=[o.stream(e=n),s.stream(n),c.stream(n)],i=r.length,t={point:function(t,e){for(var n=-1;++n0?e<-Nc+Sc&&(e=-Nc+Sc):e>Nc-Sc&&(e=Nc-Sc);var n=i/zc(Jf(e),r);return[n*$c(r*t),i-n*Pc(r*t)]}return a.invert=function(t,e){var n=i-e,a=qc(r)*Hc(t*t+n*n),o=Fc(t,Ic(n))*qc(n);return n*r<0&&(o-=Mc*qc(t)*qc(n)),[o/r,2*Rc(zc(i/a,1/r))-Nc]},a}function ed(){return Yf(td).scale(109.5).parallels([30,30])}function nd(t,e){return[t,e]}function rd(){return Ff(nd).scale(152.63)}function id(t,e){var n=Pc(t),r=t===e?$c(t):(n-Pc(e))/(e-t),i=n/r+t;if(Ic(r)2?t[2]+90:90]):[(t=n())[0],t[1],t[2]-90]},n([0,0,90]).scale(159.155)}function Td(t,e){return t.parent===e.parent?1:2}function Cd(t,e){return t+e.x}function Ed(t,e){return Math.max(t,e.y)}function Sd(){var t=Td,e=1,n=1,r=!1;function i(i){var a,o=0;i.eachAfter((function(e){var n=e.children;n?(e.x=function(t){return t.reduce(Cd,0)/t.length}(n),e.y=function(t){return 1+t.reduce(Ed,0)}(n)):(e.x=a?o+=t(e,a):0,e.y=0,a=e)}));var s=function(t){for(var e;e=t.children;)t=e[0];return t}(i),c=function(t){for(var e;e=t.children;)t=e[e.length-1];return t}(i),u=s.x-t(s,c)/2,l=c.x+t(c,s)/2;return i.eachAfter(r?function(t){t.x=(t.x-i.x)*e,t.y=(i.y-t.y)*n}:function(t){t.x=(t.x-u)/(l-u)*e,t.y=(1-(i.y?t.y/i.y:1))*n})}return i.separation=function(e){return arguments.length?(t=e,i):t},i.size=function(t){return arguments.length?(r=!1,e=+t[0],n=+t[1],i):r?null:[e,n]},i.nodeSize=function(t){return arguments.length?(r=!0,e=+t[0],n=+t[1],i):r?[e,n]:null},i}function Ad(t){var e=0,n=t.children,r=n&&n.length;if(r)for(;--r>=0;)e+=n[r].value;else e=1;t.value=e}function Md(t,e){var n,r,i,a,o,s=new Ld(t),c=+t.value&&(s.value=t.value),u=[s];for(null==e&&(e=Nd);n=u.pop();)if(c&&(n.value=+n.data.value),(i=e(n.data))&&(o=i.length))for(n.children=new Array(o),a=o-1;a>=0;--a)u.push(r=n.children[a]=new Ld(i[a])),r.parent=n,r.depth=n.depth+1;return s.eachBefore(Bd)}function Nd(t){return t.children}function Dd(t){t.data=t.data.data}function Bd(t){var e=0;do{t.height=e}while((t=t.parent)&&t.height<++e)}function Ld(t){this.data=t,this.depth=this.height=0,this.parent=null}hd.invert=function(t,e){for(var n,r=e,i=r*r,a=i*i*i,o=0;o<12&&(a=(i=(r-=n=(r*(od+sd*i+a*(cd+ud*i))-e)/(od+3*sd*i+a*(7*cd+9*ud*i)))*r)*i*i,!(Ic(n)Sc&&--i>0);return[t/(.8707+(a=r*r)*(a*(a*a*a*(.003971-.001529*a)-.013791)-.131979)),r]},vd.invert=Hf(Gc),_d.invert=Hf((function(t){return 2*Rc(t)})),wd.invert=function(t,e){return[-e,2*Rc(jc(t))-Nc]},Ld.prototype=Md.prototype={constructor:Ld,count:function(){return this.eachAfter(Ad)},each:function(t){var e,n,r,i,a=this,o=[a];do{for(e=o.reverse(),o=[];a=e.pop();)if(t(a),n=a.children)for(r=0,i=n.length;r=0;--n)i.push(e[n]);return this},sum:function(t){return this.eachAfter((function(e){for(var n=+t(e.data)||0,r=e.children,i=r&&r.length;--i>=0;)n+=r[i].value;e.value=n}))},sort:function(t){return this.eachBefore((function(e){e.children&&e.children.sort(t)}))},path:function(t){for(var e=this,n=function(t,e){if(t===e)return t;var n=t.ancestors(),r=e.ancestors(),i=null;for(t=n.pop(),e=r.pop();t===e;)i=t,t=n.pop(),e=r.pop();return i}(e,t),r=[e];e!==n;)e=e.parent,r.push(e);for(var i=r.length;t!==n;)r.splice(i,0,t),t=t.parent;return r},ancestors:function(){for(var t=this,e=[t];t=t.parent;)e.push(t);return e},descendants:function(){var t=[];return this.each((function(e){t.push(e)})),t},leaves:function(){var t=[];return this.eachBefore((function(e){e.children||t.push(e)})),t},links:function(){var t=this,e=[];return t.each((function(n){n!==t&&e.push({source:n.parent,target:n})})),e},copy:function(){return Md(this).eachBefore(Dd)}};var Od=Array.prototype.slice;function Id(t){for(var e,n,r=0,i=(t=function(t){for(var e,n,r=t.length;r;)n=Math.random()*r--|0,e=t[r],t[r]=t[n],t[n]=e;return t}(Od.call(t))).length,a=[];r0&&n*n>r*r+i*i}function Yd(t,e){for(var n=0;n(o*=o)?(r=(u+o-i)/(2*u),a=Math.sqrt(Math.max(0,o/u-r*r)),n.x=t.x-r*s-a*c,n.y=t.y-r*c+a*s):(r=(u+i-o)/(2*u),a=Math.sqrt(Math.max(0,i/u-r*r)),n.x=e.x+r*s-a*c,n.y=e.y+r*c+a*s)):(n.x=e.x+n.r,n.y=e.y)}function qd(t,e){var n=t.r+e.r-1e-6,r=e.x-t.x,i=e.y-t.y;return n>0&&n*n>r*r+i*i}function Hd(t){var e=t._,n=t.next._,r=e.r+n.r,i=(e.x*n.r+n.x*e.r)/r,a=(e.y*n.r+n.y*e.r)/r;return i*i+a*a}function Wd(t){this._=t,this.next=null,this.previous=null}function Vd(t){if(!(i=t.length))return 0;var e,n,r,i,a,o,s,c,u,l,h;if((e=t[0]).x=0,e.y=0,!(i>1))return e.r;if(n=t[1],e.x=-n.r,n.x=e.r,n.y=0,!(i>2))return e.r+n.r;$d(n,e,r=t[2]),e=new Wd(e),n=new Wd(n),r=new Wd(r),e.next=r.previous=n,n.next=e.previous=r,r.next=n.previous=e;t:for(s=3;s0)throw new Error("cycle");return a}return n.id=function(e){return arguments.length?(t=Zd(e),n):t},n.parentId=function(t){return arguments.length?(e=Zd(t),n):e},n}function fp(t,e){return t.parent===e.parent?1:2}function dp(t){var e=t.children;return e?e[0]:t.t}function pp(t){var e=t.children;return e?e[e.length-1]:t.t}function gp(t,e,n){var r=n/(e.i-t.i);e.c-=r,e.s+=n,t.c+=r,e.z+=n,e.m+=n}function yp(t,e,n){return t.a.parent===e.parent?t.a:n}function mp(t,e){this._=t,this.parent=null,this.children=null,this.A=null,this.a=this,this.z=0,this.m=0,this.c=0,this.s=0,this.t=null,this.i=e}function vp(){var t=fp,e=1,n=1,r=null;function i(i){var c=function(t){for(var e,n,r,i,a,o=new mp(t,0),s=[o];e=s.pop();)if(r=e._.children)for(e.children=new Array(a=r.length),i=a-1;i>=0;--i)s.push(n=e.children[i]=new mp(r[i],i)),n.parent=e;return(o.parent=new mp(null,0)).children=[o],o}(i);if(c.eachAfter(a),c.parent.m=-c.z,c.eachBefore(o),r)i.eachBefore(s);else{var u=i,l=i,h=i;i.eachBefore((function(t){t.xl.x&&(l=t),t.depth>h.depth&&(h=t)}));var f=u===l?1:t(u,l)/2,d=f-u.x,p=e/(l.x+f+d),g=n/(h.depth||1);i.eachBefore((function(t){t.x=(t.x+d)*p,t.y=t.depth*g}))}return i}function a(e){var n=e.children,r=e.parent.children,i=e.i?r[e.i-1]:null;if(n){!function(t){for(var e,n=0,r=0,i=t.children,a=i.length;--a>=0;)(e=i[a]).z+=n,e.m+=n,n+=e.s+(r+=e.c)}(e);var a=(n[0].z+n[n.length-1].z)/2;i?(e.z=i.z+t(e._,i._),e.m=e.z-a):e.z=a}else i&&(e.z=i.z+t(e._,i._));e.parent.A=function(e,n,r){if(n){for(var i,a=e,o=e,s=n,c=a.parent.children[0],u=a.m,l=o.m,h=s.m,f=c.m;s=pp(s),a=dp(a),s&&a;)c=dp(c),(o=pp(o)).a=e,(i=s.z+h-a.z-u+t(s._,a._))>0&&(gp(yp(s,e,r),e,i),u+=i,l+=i),h+=s.m,u+=a.m,f+=c.m,l+=o.m;s&&!pp(o)&&(o.t=s,o.m+=h-l),a&&!dp(c)&&(c.t=a,c.m+=u-f,r=e)}return r}(e,i,e.parent.A||r[0])}function o(t){t._.x=t.z+t.parent.m,t.m+=t.parent.m}function s(t){t.x*=e,t.y=t.depth*n}return i.separation=function(e){return arguments.length?(t=e,i):t},i.size=function(t){return arguments.length?(r=!1,e=+t[0],n=+t[1],i):r?null:[e,n]},i.nodeSize=function(t){return arguments.length?(r=!0,e=+t[0],n=+t[1],i):r?[e,n]:null},i}function bp(t,e,n,r,i){for(var a,o=t.children,s=-1,c=o.length,u=t.value&&(i-n)/t.value;++sf&&(f=s),y=l*l*g,(d=Math.max(f/y,y/h))>p){l-=s;break}p=d}m.push(o={value:l,dice:c1?e:1)},n}(_p);function kp(){var t=wp,e=!1,n=1,r=1,i=[0],a=Qd,o=Qd,s=Qd,c=Qd,u=Qd;function l(t){return t.x0=t.y0=0,t.x1=n,t.y1=r,t.eachBefore(h),i=[0],e&&t.eachBefore(ip),t}function h(e){var n=i[e.depth],r=e.x0+n,l=e.y0+n,h=e.x1-n,f=e.y1-n;h=n-1){var l=s[e];return l.x0=i,l.y0=a,l.x1=o,void(l.y1=c)}for(var h=u[e],f=r/2+h,d=e+1,p=n-1;d>>1;u[g]c-a){var v=(i*m+o*y)/r;t(e,d,y,i,a,v,c),t(d,n,m,v,a,o,c)}else{var b=(a*m+c*y)/r;t(e,d,y,i,a,o,b),t(d,n,m,i,b,o,c)}}(0,c,t.value,e,n,r,i)}function Cp(t,e,n,r,i){(1&t.depth?bp:ap)(t,e,n,r,i)}const Ep=function t(e){function n(t,n,r,i,a){if((o=t._squarify)&&o.ratio===e)for(var o,s,c,u,l,h=-1,f=o.length,d=t.value;++h1?e:1)},n}(_p);function Sp(t){var e=t.length;return function(n){return t[Math.max(0,Math.min(e-1,Math.floor(n*e)))]}}function Ap(t,e){var n=dn(+t,+e);return function(t){var e=n(t);return e-360*Math.floor(e/360)}}function Mp(t,e){return t=+t,e=+e,function(n){return Math.round(t*(1-n)+e*n)}}var Np=Math.SQRT2;function Dp(t){return((t=Math.exp(t))+1/t)/2}function Bp(t,e){var n,r,i=t[0],a=t[1],o=t[2],s=e[0],c=e[1],u=e[2],l=s-i,h=c-a,f=l*l+h*h;if(f<1e-12)r=Math.log(u/o)/Np,n=function(t){return[i+t*l,a+t*h,o*Math.exp(Np*t*r)]};else{var d=Math.sqrt(f),p=(u*u-o*o+4*f)/(2*o*2*d),g=(u*u-o*o-4*f)/(2*u*2*d),y=Math.log(Math.sqrt(p*p+1)-p),m=Math.log(Math.sqrt(g*g+1)-g);r=(m-y)/Np,n=function(t){var e,n=t*r,s=Dp(y),c=o/(2*d)*(s*(e=Np*n+y,((e=Math.exp(2*e))-1)/(e+1))-function(t){return((t=Math.exp(t))-1/t)/2}(y));return[i+c*l,a+c*h,o*s/Dp(Np*n+y)]}}return n.duration=1e3*r,n}function Lp(t){return function(e,n){var r=t((e=an(e)).h,(n=an(n)).h),i=pn(e.s,n.s),a=pn(e.l,n.l),o=pn(e.opacity,n.opacity);return function(t){return e.h=r(t),e.s=i(t),e.l=a(t),e.opacity=o(t),e+""}}}const Op=Lp(dn);var Ip=Lp(pn);function Rp(t,e){var n=pn((t=Ta(t)).l,(e=Ta(e)).l),r=pn(t.a,e.a),i=pn(t.b,e.b),a=pn(t.opacity,e.opacity);return function(e){return t.l=n(e),t.a=r(e),t.b=i(e),t.opacity=a(e),t+""}}function Fp(t){return function(e,n){var r=t((e=Ba(e)).h,(n=Ba(n)).h),i=pn(e.c,n.c),a=pn(e.l,n.l),o=pn(e.opacity,n.opacity);return function(t){return e.h=r(t),e.c=i(t),e.l=a(t),e.opacity=o(t),e+""}}}const Pp=Fp(dn);var Yp=Fp(pn);function jp(t){return function e(n){function r(e,r){var i=t((e=qa(e)).h,(r=qa(r)).h),a=pn(e.s,r.s),o=pn(e.l,r.l),s=pn(e.opacity,r.opacity);return function(t){return e.h=i(t),e.s=a(t),e.l=o(Math.pow(t,n)),e.opacity=s(t),e+""}}return n=+n,r.gamma=e,r}(1)}const Up=jp(dn);var zp=jp(pn);function $p(t,e){for(var n=0,r=e.length-1,i=e[0],a=new Array(r<0?0:r);n1&&Vp(t[n[r-2]],t[n[r-1]],t[i])<=0;)--r;n[r++]=i}return n.slice(0,r)}function Zp(t){if((n=t.length)<3)return null;var e,n,r=new Array(n),i=new Array(n);for(e=0;e=0;--e)u.push(t[r[a[e]][2]]);for(e=+s;es!=u>s&&o<(c-n)*(s-r)/(u-r)+n&&(l=!l),c=n,u=r;return l}function Kp(t){for(var e,n,r=-1,i=t.length,a=t[i-1],o=a[0],s=a[1],c=0;++r1);return t+n*a*Math.sqrt(-2*Math.log(i)/i)}}return n.source=t,n}(Jp),ng=function t(e){function n(){var t=eg.source(e).apply(this,arguments);return function(){return Math.exp(t())}}return n.source=t,n}(Jp),rg=function t(e){function n(t){return function(){for(var n=0,r=0;rr&&(e=n,n=r,r=e),function(t){return Math.max(n,Math.min(r,t))}}function xg(t,e,n){var r=t[0],i=t[1],a=e[0],o=e[1];return i2?wg:xg,i=a=null,h}function h(e){return isNaN(e=+e)?n:(i||(i=r(o.map(t),s,c)))(t(u(e)))}return h.invert=function(n){return u(e((a||(a=r(s,o.map(t),Tn)))(n)))},h.domain=function(t){return arguments.length?(o=ug.call(t,yg),u===vg||(u=_g(o)),l()):o.slice()},h.range=function(t){return arguments.length?(s=lg.call(t),l()):s.slice()},h.rangeRound=function(t){return s=lg.call(t),c=Mp,l()},h.clamp=function(t){return arguments.length?(u=t?_g(o):vg,h):u!==vg},h.interpolate=function(t){return arguments.length?(c=t,l()):c},h.unknown=function(t){return arguments.length?(n=t,h):n},function(n,r){return t=n,e=r,l()}}function Cg(t,e){return Tg()(t,e)}function Eg(t,e,n,r){var i,a=M(t,e,n);switch((r=cc(null==r?",f":r)).type){case"s":var o=Math.max(Math.abs(t),Math.abs(e));return null!=r.precision||isNaN(i=xc(a,o))||(r.precision=i),gc(r,o);case"":case"e":case"g":case"p":case"r":null!=r.precision||isNaN(i=wc(a,Math.max(Math.abs(t),Math.abs(e))))||(r.precision=i-("e"===r.type));break;case"f":case"%":null!=r.precision||isNaN(i=_c(a))||(r.precision=i-2*("%"===r.type))}return pc(r)}function Sg(t){var e=t.domain;return t.ticks=function(t){var n=e();return S(n[0],n[n.length-1],null==t?10:t)},t.tickFormat=function(t,n){var r=e();return Eg(r[0],r[r.length-1],null==t?10:t,n)},t.nice=function(n){null==n&&(n=10);var r,i=e(),a=0,o=i.length-1,s=i[a],c=i[o];return c0?r=A(s=Math.floor(s/r)*r,c=Math.ceil(c/r)*r,n):r<0&&(r=A(s=Math.ceil(s*r)/r,c=Math.floor(c*r)/r,n)),r>0?(i[a]=Math.floor(s/r)*r,i[o]=Math.ceil(c/r)*r,e(i)):r<0&&(i[a]=Math.ceil(s*r)/r,i[o]=Math.floor(c*r)/r,e(i)),t},t}function Ag(){var t=Cg(vg,vg);return t.copy=function(){return kg(t,Ag())},og.apply(t,arguments),Sg(t)}function Mg(t){var e;function n(t){return isNaN(t=+t)?e:t}return n.invert=n,n.domain=n.range=function(e){return arguments.length?(t=ug.call(e,yg),n):t.slice()},n.unknown=function(t){return arguments.length?(e=t,n):e},n.copy=function(){return Mg(t).unknown(e)},t=arguments.length?ug.call(t,yg):[0,1],Sg(n)}function Ng(t,e){var n,r=0,i=(t=t.slice()).length-1,a=t[r],o=t[i];return o0){for(;fc)break;g.push(h)}}else for(;f=1;--l)if(!((h=u*l)c)break;g.push(h)}}else g=S(f,d,Math.min(d-f,p)).map(n);return r?g.reverse():g},r.tickFormat=function(t,i){if(null==i&&(i=10===a?".0e":","),"function"!=typeof i&&(i=pc(i)),t===1/0)return i;null==t&&(t=10);var o=Math.max(1,a*t/r.ticks().length);return function(t){var r=t/n(Math.round(e(t)));return r*a0?r[i-1]:e[0],i=r?[i[r-1],n]:[i[o-1],i[o]]},o.unknown=function(e){return arguments.length?(t=e,o):o},o.thresholds=function(){return i.slice()},o.copy=function(){return Zg().domain([e,n]).range(a).unknown(t)},og.apply(Sg(o),arguments)}function Qg(){var t,e=[.5],n=[0,1],r=1;function i(i){return i<=i?n[u(e,i,0,r)]:t}return i.domain=function(t){return arguments.length?(e=lg.call(t),r=Math.min(e.length,n.length-1),i):e.slice()},i.range=function(t){return arguments.length?(n=lg.call(t),r=Math.min(e.length,n.length-1),i):n.slice()},i.invertExtent=function(t){var r=n.indexOf(t);return[e[r-1],e[r]]},i.unknown=function(e){return arguments.length?(t=e,i):t},i.copy=function(){return Qg().domain(e).range(n).unknown(t)},og.apply(i,arguments)}var Kg=new Date,Jg=new Date;function ty(t,e,n,r){function i(e){return t(e=0===arguments.length?new Date:new Date(+e)),e}return i.floor=function(e){return t(e=new Date(+e)),e},i.ceil=function(n){return t(n=new Date(n-1)),e(n,1),t(n),n},i.round=function(t){var e=i(t),n=i.ceil(t);return t-e0))return s;do{s.push(o=new Date(+n)),e(n,a),t(n)}while(o=e)for(;t(e),!n(e);)e.setTime(e-1)}),(function(t,r){if(t>=t)if(r<0)for(;++r<=0;)for(;e(t,-1),!n(t););else for(;--r>=0;)for(;e(t,1),!n(t););}))},n&&(i.count=function(e,r){return Kg.setTime(+e),Jg.setTime(+r),t(Kg),t(Jg),Math.floor(n(Kg,Jg))},i.every=function(t){return t=Math.floor(t),isFinite(t)&&t>0?t>1?i.filter(r?function(e){return r(e)%t==0}:function(e){return i.count(0,e)%t==0}):i:null}),i}var ey=ty((function(t){t.setMonth(0,1),t.setHours(0,0,0,0)}),(function(t,e){t.setFullYear(t.getFullYear()+e)}),(function(t,e){return e.getFullYear()-t.getFullYear()}),(function(t){return t.getFullYear()}));ey.every=function(t){return isFinite(t=Math.floor(t))&&t>0?ty((function(e){e.setFullYear(Math.floor(e.getFullYear()/t)*t),e.setMonth(0,1),e.setHours(0,0,0,0)}),(function(e,n){e.setFullYear(e.getFullYear()+n*t)})):null};const ny=ey;var ry=ey.range,iy=ty((function(t){t.setDate(1),t.setHours(0,0,0,0)}),(function(t,e){t.setMonth(t.getMonth()+e)}),(function(t,e){return e.getMonth()-t.getMonth()+12*(e.getFullYear()-t.getFullYear())}),(function(t){return t.getMonth()}));const ay=iy;var oy=iy.range,sy=1e3,cy=6e4,uy=36e5,ly=864e5,hy=6048e5;function fy(t){return ty((function(e){e.setDate(e.getDate()-(e.getDay()+7-t)%7),e.setHours(0,0,0,0)}),(function(t,e){t.setDate(t.getDate()+7*e)}),(function(t,e){return(e-t-(e.getTimezoneOffset()-t.getTimezoneOffset())*cy)/hy}))}var dy=fy(0),py=fy(1),gy=fy(2),yy=fy(3),my=fy(4),vy=fy(5),by=fy(6),_y=dy.range,xy=py.range,wy=gy.range,ky=yy.range,Ty=my.range,Cy=vy.range,Ey=by.range,Sy=ty((function(t){t.setHours(0,0,0,0)}),(function(t,e){t.setDate(t.getDate()+e)}),(function(t,e){return(e-t-(e.getTimezoneOffset()-t.getTimezoneOffset())*cy)/ly}),(function(t){return t.getDate()-1}));const Ay=Sy;var My=Sy.range,Ny=ty((function(t){t.setTime(t-t.getMilliseconds()-t.getSeconds()*sy-t.getMinutes()*cy)}),(function(t,e){t.setTime(+t+e*uy)}),(function(t,e){return(e-t)/uy}),(function(t){return t.getHours()}));const Dy=Ny;var By=Ny.range,Ly=ty((function(t){t.setTime(t-t.getMilliseconds()-t.getSeconds()*sy)}),(function(t,e){t.setTime(+t+e*cy)}),(function(t,e){return(e-t)/cy}),(function(t){return t.getMinutes()}));const Oy=Ly;var Iy=Ly.range,Ry=ty((function(t){t.setTime(t-t.getMilliseconds())}),(function(t,e){t.setTime(+t+e*sy)}),(function(t,e){return(e-t)/sy}),(function(t){return t.getUTCSeconds()}));const Fy=Ry;var Py=Ry.range,Yy=ty((function(){}),(function(t,e){t.setTime(+t+e)}),(function(t,e){return e-t}));Yy.every=function(t){return t=Math.floor(t),isFinite(t)&&t>0?t>1?ty((function(e){e.setTime(Math.floor(e/t)*t)}),(function(e,n){e.setTime(+e+n*t)}),(function(e,n){return(n-e)/t})):Yy:null};const jy=Yy;var Uy=Yy.range;function zy(t){return ty((function(e){e.setUTCDate(e.getUTCDate()-(e.getUTCDay()+7-t)%7),e.setUTCHours(0,0,0,0)}),(function(t,e){t.setUTCDate(t.getUTCDate()+7*e)}),(function(t,e){return(e-t)/hy}))}var $y=zy(0),qy=zy(1),Hy=zy(2),Wy=zy(3),Vy=zy(4),Gy=zy(5),Xy=zy(6),Zy=$y.range,Qy=qy.range,Ky=Hy.range,Jy=Wy.range,tm=Vy.range,em=Gy.range,nm=Xy.range,rm=ty((function(t){t.setUTCHours(0,0,0,0)}),(function(t,e){t.setUTCDate(t.getUTCDate()+e)}),(function(t,e){return(e-t)/ly}),(function(t){return t.getUTCDate()-1}));const im=rm;var am=rm.range,om=ty((function(t){t.setUTCMonth(0,1),t.setUTCHours(0,0,0,0)}),(function(t,e){t.setUTCFullYear(t.getUTCFullYear()+e)}),(function(t,e){return e.getUTCFullYear()-t.getUTCFullYear()}),(function(t){return t.getUTCFullYear()}));om.every=function(t){return isFinite(t=Math.floor(t))&&t>0?ty((function(e){e.setUTCFullYear(Math.floor(e.getUTCFullYear()/t)*t),e.setUTCMonth(0,1),e.setUTCHours(0,0,0,0)}),(function(e,n){e.setUTCFullYear(e.getUTCFullYear()+n*t)})):null};const sm=om;var cm=om.range;function um(t){if(0<=t.y&&t.y<100){var e=new Date(-1,t.m,t.d,t.H,t.M,t.S,t.L);return e.setFullYear(t.y),e}return new Date(t.y,t.m,t.d,t.H,t.M,t.S,t.L)}function lm(t){if(0<=t.y&&t.y<100){var e=new Date(Date.UTC(-1,t.m,t.d,t.H,t.M,t.S,t.L));return e.setUTCFullYear(t.y),e}return new Date(Date.UTC(t.y,t.m,t.d,t.H,t.M,t.S,t.L))}function hm(t,e,n){return{y:t,m:e,d:n,H:0,M:0,S:0,L:0}}function fm(t){var e=t.dateTime,n=t.date,r=t.time,i=t.periods,a=t.days,o=t.shortDays,s=t.months,c=t.shortMonths,u=Tm(i),l=Cm(i),h=Tm(a),f=Cm(a),d=Tm(o),p=Cm(o),g=Tm(s),y=Cm(s),m=Tm(c),v=Cm(c),b={a:function(t){return o[t.getDay()]},A:function(t){return a[t.getDay()]},b:function(t){return c[t.getMonth()]},B:function(t){return s[t.getMonth()]},c:null,d:Wm,e:Wm,f:Qm,g:cv,G:lv,H:Vm,I:Gm,j:Xm,L:Zm,m:Km,M:Jm,p:function(t){return i[+(t.getHours()>=12)]},q:function(t){return 1+~~(t.getMonth()/3)},Q:Lv,s:Ov,S:tv,u:ev,U:nv,V:iv,w:av,W:ov,x:null,X:null,y:sv,Y:uv,Z:hv,"%":Bv},_={a:function(t){return o[t.getUTCDay()]},A:function(t){return a[t.getUTCDay()]},b:function(t){return c[t.getUTCMonth()]},B:function(t){return s[t.getUTCMonth()]},c:null,d:fv,e:fv,f:mv,g:Av,G:Nv,H:dv,I:pv,j:gv,L:yv,m:vv,M:bv,p:function(t){return i[+(t.getUTCHours()>=12)]},q:function(t){return 1+~~(t.getUTCMonth()/3)},Q:Lv,s:Ov,S:_v,u:xv,U:wv,V:Tv,w:Cv,W:Ev,x:null,X:null,y:Sv,Y:Mv,Z:Dv,"%":Bv},x={a:function(t,e,n){var r=d.exec(e.slice(n));return r?(t.w=p[r[0].toLowerCase()],n+r[0].length):-1},A:function(t,e,n){var r=h.exec(e.slice(n));return r?(t.w=f[r[0].toLowerCase()],n+r[0].length):-1},b:function(t,e,n){var r=m.exec(e.slice(n));return r?(t.m=v[r[0].toLowerCase()],n+r[0].length):-1},B:function(t,e,n){var r=g.exec(e.slice(n));return r?(t.m=y[r[0].toLowerCase()],n+r[0].length):-1},c:function(t,n,r){return T(t,e,n,r)},d:Rm,e:Rm,f:zm,g:Bm,G:Dm,H:Pm,I:Pm,j:Fm,L:Um,m:Im,M:Ym,p:function(t,e,n){var r=u.exec(e.slice(n));return r?(t.p=l[r[0].toLowerCase()],n+r[0].length):-1},q:Om,Q:qm,s:Hm,S:jm,u:Sm,U:Am,V:Mm,w:Em,W:Nm,x:function(t,e,r){return T(t,n,e,r)},X:function(t,e,n){return T(t,r,e,n)},y:Bm,Y:Dm,Z:Lm,"%":$m};function w(t,e){return function(n){var r,i,a,o=[],s=-1,c=0,u=t.length;for(n instanceof Date||(n=new Date(+n));++s53)return null;"w"in a||(a.w=1),"Z"in a?(i=(r=lm(hm(a.y,0,1))).getUTCDay(),r=i>4||0===i?qy.ceil(r):qy(r),r=im.offset(r,7*(a.V-1)),a.y=r.getUTCFullYear(),a.m=r.getUTCMonth(),a.d=r.getUTCDate()+(a.w+6)%7):(i=(r=um(hm(a.y,0,1))).getDay(),r=i>4||0===i?py.ceil(r):py(r),r=Ay.offset(r,7*(a.V-1)),a.y=r.getFullYear(),a.m=r.getMonth(),a.d=r.getDate()+(a.w+6)%7)}else("W"in a||"U"in a)&&("w"in a||(a.w="u"in a?a.u%7:"W"in a?1:0),i="Z"in a?lm(hm(a.y,0,1)).getUTCDay():um(hm(a.y,0,1)).getDay(),a.m=0,a.d="W"in a?(a.w+6)%7+7*a.W-(i+5)%7:a.w+7*a.U-(i+6)%7);return"Z"in a?(a.H+=a.Z/100|0,a.M+=a.Z%100,lm(a)):um(a)}}function T(t,e,n,r){for(var i,a,o=0,s=e.length,c=n.length;o=c)return-1;if(37===(i=e.charCodeAt(o++))){if(i=e.charAt(o++),!(a=x[i in vm?e.charAt(o++):i])||(r=a(t,n,r))<0)return-1}else if(i!=n.charCodeAt(r++))return-1}return r}return b.x=w(n,b),b.X=w(r,b),b.c=w(e,b),_.x=w(n,_),_.X=w(r,_),_.c=w(e,_),{format:function(t){var e=w(t+="",b);return e.toString=function(){return t},e},parse:function(t){var e=k(t+="",!1);return e.toString=function(){return t},e},utcFormat:function(t){var e=w(t+="",_);return e.toString=function(){return t},e},utcParse:function(t){var e=k(t+="",!0);return e.toString=function(){return t},e}}}var dm,pm,gm,ym,mm,vm={"-":"",_:" ",0:"0"},bm=/^\s*\d+/,_m=/^%/,xm=/[\\^$*+?|[\]().{}]/g;function wm(t,e,n){var r=t<0?"-":"",i=(r?-t:t)+"",a=i.length;return r+(a68?1900:2e3),n+r[0].length):-1}function Lm(t,e,n){var r=/^(Z)|([+-]\d\d)(?::?(\d\d))?/.exec(e.slice(n,n+6));return r?(t.Z=r[1]?0:-(r[2]+(r[3]||"00")),n+r[0].length):-1}function Om(t,e,n){var r=bm.exec(e.slice(n,n+1));return r?(t.q=3*r[0]-3,n+r[0].length):-1}function Im(t,e,n){var r=bm.exec(e.slice(n,n+2));return r?(t.m=r[0]-1,n+r[0].length):-1}function Rm(t,e,n){var r=bm.exec(e.slice(n,n+2));return r?(t.d=+r[0],n+r[0].length):-1}function Fm(t,e,n){var r=bm.exec(e.slice(n,n+3));return r?(t.m=0,t.d=+r[0],n+r[0].length):-1}function Pm(t,e,n){var r=bm.exec(e.slice(n,n+2));return r?(t.H=+r[0],n+r[0].length):-1}function Ym(t,e,n){var r=bm.exec(e.slice(n,n+2));return r?(t.M=+r[0],n+r[0].length):-1}function jm(t,e,n){var r=bm.exec(e.slice(n,n+2));return r?(t.S=+r[0],n+r[0].length):-1}function Um(t,e,n){var r=bm.exec(e.slice(n,n+3));return r?(t.L=+r[0],n+r[0].length):-1}function zm(t,e,n){var r=bm.exec(e.slice(n,n+6));return r?(t.L=Math.floor(r[0]/1e3),n+r[0].length):-1}function $m(t,e,n){var r=_m.exec(e.slice(n,n+1));return r?n+r[0].length:-1}function qm(t,e,n){var r=bm.exec(e.slice(n));return r?(t.Q=+r[0],n+r[0].length):-1}function Hm(t,e,n){var r=bm.exec(e.slice(n));return r?(t.s=+r[0],n+r[0].length):-1}function Wm(t,e){return wm(t.getDate(),e,2)}function Vm(t,e){return wm(t.getHours(),e,2)}function Gm(t,e){return wm(t.getHours()%12||12,e,2)}function Xm(t,e){return wm(1+Ay.count(ny(t),t),e,3)}function Zm(t,e){return wm(t.getMilliseconds(),e,3)}function Qm(t,e){return Zm(t,e)+"000"}function Km(t,e){return wm(t.getMonth()+1,e,2)}function Jm(t,e){return wm(t.getMinutes(),e,2)}function tv(t,e){return wm(t.getSeconds(),e,2)}function ev(t){var e=t.getDay();return 0===e?7:e}function nv(t,e){return wm(dy.count(ny(t)-1,t),e,2)}function rv(t){var e=t.getDay();return e>=4||0===e?my(t):my.ceil(t)}function iv(t,e){return t=rv(t),wm(my.count(ny(t),t)+(4===ny(t).getDay()),e,2)}function av(t){return t.getDay()}function ov(t,e){return wm(py.count(ny(t)-1,t),e,2)}function sv(t,e){return wm(t.getFullYear()%100,e,2)}function cv(t,e){return wm((t=rv(t)).getFullYear()%100,e,2)}function uv(t,e){return wm(t.getFullYear()%1e4,e,4)}function lv(t,e){var n=t.getDay();return wm((t=n>=4||0===n?my(t):my.ceil(t)).getFullYear()%1e4,e,4)}function hv(t){var e=t.getTimezoneOffset();return(e>0?"-":(e*=-1,"+"))+wm(e/60|0,"0",2)+wm(e%60,"0",2)}function fv(t,e){return wm(t.getUTCDate(),e,2)}function dv(t,e){return wm(t.getUTCHours(),e,2)}function pv(t,e){return wm(t.getUTCHours()%12||12,e,2)}function gv(t,e){return wm(1+im.count(sm(t),t),e,3)}function yv(t,e){return wm(t.getUTCMilliseconds(),e,3)}function mv(t,e){return yv(t,e)+"000"}function vv(t,e){return wm(t.getUTCMonth()+1,e,2)}function bv(t,e){return wm(t.getUTCMinutes(),e,2)}function _v(t,e){return wm(t.getUTCSeconds(),e,2)}function xv(t){var e=t.getUTCDay();return 0===e?7:e}function wv(t,e){return wm($y.count(sm(t)-1,t),e,2)}function kv(t){var e=t.getUTCDay();return e>=4||0===e?Vy(t):Vy.ceil(t)}function Tv(t,e){return t=kv(t),wm(Vy.count(sm(t),t)+(4===sm(t).getUTCDay()),e,2)}function Cv(t){return t.getUTCDay()}function Ev(t,e){return wm(qy.count(sm(t)-1,t),e,2)}function Sv(t,e){return wm(t.getUTCFullYear()%100,e,2)}function Av(t,e){return wm((t=kv(t)).getUTCFullYear()%100,e,2)}function Mv(t,e){return wm(t.getUTCFullYear()%1e4,e,4)}function Nv(t,e){var n=t.getUTCDay();return wm((t=n>=4||0===n?Vy(t):Vy.ceil(t)).getUTCFullYear()%1e4,e,4)}function Dv(){return"+0000"}function Bv(){return"%"}function Lv(t){return+t}function Ov(t){return Math.floor(+t/1e3)}function Iv(t){return dm=fm(t),pm=dm.format,gm=dm.parse,ym=dm.utcFormat,mm=dm.utcParse,dm}Iv({dateTime:"%x, %X",date:"%-m/%-d/%Y",time:"%-I:%M:%S %p",periods:["AM","PM"],days:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],shortDays:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],months:["January","February","March","April","May","June","July","August","September","October","November","December"],shortMonths:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]});var Rv=31536e6;function Fv(t){return new Date(t)}function Pv(t){return t instanceof Date?+t:+new Date(+t)}function Yv(t,e,n,r,i,o,s,c,u){var l=Cg(vg,vg),h=l.invert,f=l.domain,d=u(".%L"),p=u(":%S"),g=u("%I:%M"),y=u("%I %p"),m=u("%a %d"),v=u("%b %d"),b=u("%B"),_=u("%Y"),x=[[s,1,1e3],[s,5,5e3],[s,15,15e3],[s,30,3e4],[o,1,6e4],[o,5,3e5],[o,15,9e5],[o,30,18e5],[i,1,36e5],[i,3,108e5],[i,6,216e5],[i,12,432e5],[r,1,864e5],[r,2,1728e5],[n,1,6048e5],[e,1,2592e6],[e,3,7776e6],[t,1,Rv]];function w(a){return(s(a)1)&&(t-=Math.floor(t));var e=Math.abs(t-.5);return S_.h=360*t-100,S_.s=1.5-1.5*e,S_.l=.8-.9*e,S_+""}var M_=Qe(),N_=Math.PI/3,D_=2*Math.PI/3;function B_(t){var e;return t=(.5-t)*Math.PI,M_.r=255*(e=Math.sin(t))*e,M_.g=255*(e=Math.sin(t+N_))*e,M_.b=255*(e=Math.sin(t+D_))*e,M_+""}function L_(t){return t=Math.max(0,Math.min(1,t)),"rgb("+Math.max(0,Math.min(255,Math.round(34.61+t*(1172.33-t*(10793.56-t*(33300.12-t*(38394.49-14825.05*t)))))))+", "+Math.max(0,Math.min(255,Math.round(23.31+t*(557.33+t*(1225.33-t*(3574.96-t*(1073.77+707.56*t)))))))+", "+Math.max(0,Math.min(255,Math.round(27.2+t*(3211.1-t*(15327.97-t*(27814-t*(22569.18-6838.66*t)))))))+")"}function O_(t){var e=t.length;return function(n){return t[Math.max(0,Math.min(e-1,Math.floor(n*e)))]}}const I_=O_(hb("44015444025645045745055946075a46085c460a5d460b5e470d60470e6147106347116447136548146748166848176948186a481a6c481b6d481c6e481d6f481f70482071482173482374482475482576482677482878482979472a7a472c7a472d7b472e7c472f7d46307e46327e46337f463480453581453781453882443983443a83443b84433d84433e85423f854240864241864142874144874045884046883f47883f48893e49893e4a893e4c8a3d4d8a3d4e8a3c4f8a3c508b3b518b3b528b3a538b3a548c39558c39568c38588c38598c375a8c375b8d365c8d365d8d355e8d355f8d34608d34618d33628d33638d32648e32658e31668e31678e31688e30698e306a8e2f6b8e2f6c8e2e6d8e2e6e8e2e6f8e2d708e2d718e2c718e2c728e2c738e2b748e2b758e2a768e2a778e2a788e29798e297a8e297b8e287c8e287d8e277e8e277f8e27808e26818e26828e26828e25838e25848e25858e24868e24878e23888e23898e238a8d228b8d228c8d228d8d218e8d218f8d21908d21918c20928c20928c20938c1f948c1f958b1f968b1f978b1f988b1f998a1f9a8a1e9b8a1e9c891e9d891f9e891f9f881fa0881fa1881fa1871fa28720a38620a48621a58521a68522a78522a88423a98324aa8325ab8225ac8226ad8127ad8128ae8029af7f2ab07f2cb17e2db27d2eb37c2fb47c31b57b32b67a34b67935b77937b87838b9773aba763bbb753dbc743fbc7340bd7242be7144bf7046c06f48c16e4ac16d4cc26c4ec36b50c46a52c56954c56856c66758c7655ac8645cc8635ec96260ca6063cb5f65cb5e67cc5c69cd5b6ccd5a6ece5870cf5773d05675d05477d1537ad1517cd2507fd34e81d34d84d44b86d54989d5488bd6468ed64590d74393d74195d84098d83e9bd93c9dd93ba0da39a2da37a5db36a8db34aadc32addc30b0dd2fb2dd2db5de2bb8de29bade28bddf26c0df25c2df23c5e021c8e020cae11fcde11dd0e11cd2e21bd5e21ad8e219dae319dde318dfe318e2e418e5e419e7e419eae51aece51befe51cf1e51df4e61ef6e620f8e621fbe723fde725"));var R_=O_(hb("00000401000501010601010802010902020b02020d03030f03031204041405041606051806051a07061c08071e0907200a08220b09240c09260d0a290e0b2b100b2d110c2f120d31130d34140e36150e38160f3b180f3d19103f1a10421c10441d11471e114920114b21114e22115024125325125527125829115a2a115c2c115f2d11612f116331116533106734106936106b38106c390f6e3b0f703d0f713f0f72400f74420f75440f764510774710784910784a10794c117a4e117b4f127b51127c52137c54137d56147d57157e59157e5a167e5c167f5d177f5f187f601880621980641a80651a80671b80681c816a1c816b1d816d1d816e1e81701f81721f817320817521817621817822817922827b23827c23827e24828025828125818326818426818627818827818928818b29818c29818e2a81902a81912b81932b80942c80962c80982d80992d809b2e7f9c2e7f9e2f7fa02f7fa1307ea3307ea5317ea6317da8327daa337dab337cad347cae347bb0357bb2357bb3367ab5367ab73779b83779ba3878bc3978bd3977bf3a77c03a76c23b75c43c75c53c74c73d73c83e73ca3e72cc3f71cd4071cf4070d0416fd2426fd3436ed5446dd6456cd8456cd9466bdb476adc4869de4968df4a68e04c67e24d66e34e65e44f64e55064e75263e85362e95462ea5661eb5760ec5860ed5a5fee5b5eef5d5ef05f5ef1605df2625df2645cf3655cf4675cf4695cf56b5cf66c5cf66e5cf7705cf7725cf8745cf8765cf9785df9795df97b5dfa7d5efa7f5efa815ffb835ffb8560fb8761fc8961fc8a62fc8c63fc8e64fc9065fd9266fd9467fd9668fd9869fd9a6afd9b6bfe9d6cfe9f6dfea16efea36ffea571fea772fea973feaa74feac76feae77feb078feb27afeb47bfeb67cfeb77efeb97ffebb81febd82febf84fec185fec287fec488fec68afec88cfeca8dfecc8ffecd90fecf92fed194fed395fed597fed799fed89afdda9cfddc9efddea0fde0a1fde2a3fde3a5fde5a7fde7a9fde9aafdebacfcecaefceeb0fcf0b2fcf2b4fcf4b6fcf6b8fcf7b9fcf9bbfcfbbdfcfdbf")),F_=O_(hb("00000401000501010601010802010a02020c02020e03021004031204031405041706041907051b08051d09061f0a07220b07240c08260d08290e092b10092d110a30120a32140b34150b37160b39180c3c190c3e1b0c411c0c431e0c451f0c48210c4a230c4c240c4f260c51280b53290b552b0b572d0b592f0a5b310a5c320a5e340a5f3609613809623909633b09643d09653e0966400a67420a68440a68450a69470b6a490b6a4a0c6b4c0c6b4d0d6c4f0d6c510e6c520e6d540f6d550f6d57106e59106e5a116e5c126e5d126e5f136e61136e62146e64156e65156e67166e69166e6a176e6c186e6d186e6f196e71196e721a6e741a6e751b6e771c6d781c6d7a1d6d7c1d6d7d1e6d7f1e6c801f6c82206c84206b85216b87216b88226a8a226a8c23698d23698f24699025689225689326679526679727669827669a28659b29649d29649f2a63a02a63a22b62a32c61a52c60a62d60a82e5fa92e5eab2f5ead305dae305cb0315bb1325ab3325ab43359b63458b73557b93556ba3655bc3754bd3853bf3952c03a51c13a50c33b4fc43c4ec63d4dc73e4cc83f4bca404acb4149cc4248ce4347cf4446d04545d24644d34743d44842d54a41d74b3fd84c3ed94d3dda4e3cdb503bdd513ade5238df5337e05536e15635e25734e35933e45a31e55c30e65d2fe75e2ee8602de9612bea632aeb6429eb6628ec6726ed6925ee6a24ef6c23ef6e21f06f20f1711ff1731df2741cf3761bf37819f47918f57b17f57d15f67e14f68013f78212f78410f8850ff8870ef8890cf98b0bf98c0af98e09fa9008fa9207fa9407fb9606fb9706fb9906fb9b06fb9d07fc9f07fca108fca309fca50afca60cfca80dfcaa0ffcac11fcae12fcb014fcb216fcb418fbb61afbb81dfbba1ffbbc21fbbe23fac026fac228fac42afac62df9c72ff9c932f9cb35f8cd37f8cf3af7d13df7d340f6d543f6d746f5d949f5db4cf4dd4ff4df53f4e156f3e35af3e55df2e661f2e865f2ea69f1ec6df1ed71f1ef75f1f179f2f27df2f482f3f586f3f68af4f88ef5f992f6fa96f8fb9af9fc9dfafda1fcffa4")),P_=O_(hb("0d088710078813078916078a19068c1b068d1d068e20068f2206902406912605912805922a05932c05942e05952f059631059733059735049837049938049a3a049a3c049b3e049c3f049c41049d43039e44039e46039f48039f4903a04b03a14c02a14e02a25002a25102a35302a35502a45601a45801a45901a55b01a55c01a65e01a66001a66100a76300a76400a76600a76700a86900a86a00a86c00a86e00a86f00a87100a87201a87401a87501a87701a87801a87a02a87b02a87d03a87e03a88004a88104a78305a78405a78606a68707a68808a68a09a58b0aa58d0ba58e0ca48f0da4910ea3920fa39410a29511a19613a19814a099159f9a169f9c179e9d189d9e199da01a9ca11b9ba21d9aa31e9aa51f99a62098a72197a82296aa2395ab2494ac2694ad2793ae2892b02991b12a90b22b8fb32c8eb42e8db52f8cb6308bb7318ab83289ba3388bb3488bc3587bd3786be3885bf3984c03a83c13b82c23c81c33d80c43e7fc5407ec6417dc7427cc8437bc9447aca457acb4679cc4778cc4977cd4a76ce4b75cf4c74d04d73d14e72d24f71d35171d45270d5536fd5546ed6556dd7566cd8576bd9586ada5a6ada5b69db5c68dc5d67dd5e66de5f65de6164df6263e06363e16462e26561e26660e3685fe4695ee56a5de56b5de66c5ce76e5be76f5ae87059e97158e97257ea7457eb7556eb7655ec7754ed7953ed7a52ee7b51ef7c51ef7e50f07f4ff0804ef1814df1834cf2844bf3854bf3874af48849f48948f58b47f58c46f68d45f68f44f79044f79143f79342f89441f89540f9973ff9983ef99a3efa9b3dfa9c3cfa9e3bfb9f3afba139fba238fca338fca537fca636fca835fca934fdab33fdac33fdae32fdaf31fdb130fdb22ffdb42ffdb52efeb72dfeb82cfeba2cfebb2bfebd2afebe2afec029fdc229fdc328fdc527fdc627fdc827fdca26fdcb26fccd25fcce25fcd025fcd225fbd324fbd524fbd724fad824fada24f9dc24f9dd25f8df25f8e125f7e225f7e425f6e626f6e826f5e926f5eb27f4ed27f3ee27f3f027f2f227f1f426f1f525f0f724f0f921"));function Y_(t){return Te(ie(t).call(document.documentElement))}var j_=0;function U_(){return new z_}function z_(){this._="@"+(++j_).toString(36)}function $_(t){return"string"==typeof t?new xe([document.querySelectorAll(t)],[document.documentElement]):new xe([null==t?[]:t],_e)}function q_(t,e){null==e&&(e=Nn().touches);for(var n=0,r=e?e.length:0,i=new Array(r);n1?0:t<-1?tx:Math.acos(t)}function ix(t){return t>=1?ex:t<=-1?-ex:Math.asin(t)}function ax(t){return t.innerRadius}function ox(t){return t.outerRadius}function sx(t){return t.startAngle}function cx(t){return t.endAngle}function ux(t){return t&&t.padAngle}function lx(t,e,n,r,i,a,o,s){var c=n-t,u=r-e,l=o-i,h=s-a,f=h*c-l*u;if(!(f*fN*N+D*D&&(T=E,C=S),{cx:T,cy:C,x01:-l,y01:-h,x11:T*(i/x-1),y11:C*(i/x-1)}}function fx(){var t=ax,e=ox,n=H_(0),r=null,i=sx,a=cx,o=ux,s=null;function c(){var c,u,l=+t.apply(this,arguments),h=+e.apply(this,arguments),f=i.apply(this,arguments)-ex,d=a.apply(this,arguments)-ex,p=W_(d-f),g=d>f;if(s||(s=c=Wi()),hJ_)if(p>nx-J_)s.moveTo(h*G_(f),h*Q_(f)),s.arc(0,0,h,f,d,!g),l>J_&&(s.moveTo(l*G_(d),l*Q_(d)),s.arc(0,0,l,d,f,g));else{var y,m,v=f,b=d,_=f,x=d,w=p,k=p,T=o.apply(this,arguments)/2,C=T>J_&&(r?+r.apply(this,arguments):K_(l*l+h*h)),E=Z_(W_(h-l)/2,+n.apply(this,arguments)),S=E,A=E;if(C>J_){var M=ix(C/l*Q_(T)),N=ix(C/h*Q_(T));(w-=2*M)>J_?(_+=M*=g?1:-1,x-=M):(w=0,_=x=(f+d)/2),(k-=2*N)>J_?(v+=N*=g?1:-1,b-=N):(k=0,v=b=(f+d)/2)}var D=h*G_(v),B=h*Q_(v),L=l*G_(x),O=l*Q_(x);if(E>J_){var I,R=h*G_(b),F=h*Q_(b),P=l*G_(_),Y=l*Q_(_);if(pJ_?A>J_?(y=hx(P,Y,D,B,h,A,g),m=hx(R,F,L,O,h,A,g),s.moveTo(y.cx+y.x01,y.cy+y.y01),AJ_&&w>J_?S>J_?(y=hx(L,O,R,F,l,-S,g),m=hx(D,B,P,Y,l,-S,g),s.lineTo(y.cx+y.x01,y.cy+y.y01),S=l;--h)s.point(y[h],m[h]);s.lineEnd(),s.areaEnd()}g&&(y[u]=+t(f,u,c),m[u]=+n(f,u,c),s.point(e?+e(f,u,c):y[u],r?+r(f,u,c):m[u]))}if(d)return s=null,d+""||null}function u(){return mx().defined(i).curve(o).context(a)}return c.x=function(n){return arguments.length?(t="function"==typeof n?n:H_(+n),e=null,c):t},c.x0=function(e){return arguments.length?(t="function"==typeof e?e:H_(+e),c):t},c.x1=function(t){return arguments.length?(e=null==t?null:"function"==typeof t?t:H_(+t),c):e},c.y=function(t){return arguments.length?(n="function"==typeof t?t:H_(+t),r=null,c):n},c.y0=function(t){return arguments.length?(n="function"==typeof t?t:H_(+t),c):n},c.y1=function(t){return arguments.length?(r=null==t?null:"function"==typeof t?t:H_(+t),c):r},c.lineX0=c.lineY0=function(){return u().x(t).y(n)},c.lineY1=function(){return u().x(t).y(r)},c.lineX1=function(){return u().x(e).y(n)},c.defined=function(t){return arguments.length?(i="function"==typeof t?t:H_(!!t),c):i},c.curve=function(t){return arguments.length?(o=t,null!=a&&(s=o(a)),c):o},c.context=function(t){return arguments.length?(null==t?a=s=null:s=o(a=t),c):a},c}function bx(t,e){return et?1:e>=t?0:NaN}function _x(t){return t}function xx(){var t=_x,e=bx,n=null,r=H_(0),i=H_(nx),a=H_(0);function o(o){var s,c,u,l,h,f=o.length,d=0,p=new Array(f),g=new Array(f),y=+r.apply(this,arguments),m=Math.min(nx,Math.max(-nx,i.apply(this,arguments)-y)),v=Math.min(Math.abs(m)/f,a.apply(this,arguments)),b=v*(m<0?-1:1);for(s=0;s0&&(d+=h);for(null!=e?p.sort((function(t,n){return e(g[t],g[n])})):null!=n&&p.sort((function(t,e){return n(o[t],o[e])})),s=0,u=d?(m-f*b)/d:0;s0?h*u:0)+b,g[c]={data:o[c],index:s,value:h,startAngle:y,endAngle:l,padAngle:v};return g}return o.value=function(e){return arguments.length?(t="function"==typeof e?e:H_(+e),o):t},o.sortValues=function(t){return arguments.length?(e=t,n=null,o):e},o.sort=function(t){return arguments.length?(n=t,e=null,o):n},o.startAngle=function(t){return arguments.length?(r="function"==typeof t?t:H_(+t),o):r},o.endAngle=function(t){return arguments.length?(i="function"==typeof t?t:H_(+t),o):i},o.padAngle=function(t){return arguments.length?(a="function"==typeof t?t:H_(+t),o):a},o}dx.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._point=0},lineEnd:function(){(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,e):this._context.moveTo(t,e);break;case 1:this._point=2;default:this._context.lineTo(t,e)}}};var wx=Tx(px);function kx(t){this._curve=t}function Tx(t){function e(e){return new kx(t(e))}return e._curve=t,e}function Cx(t){var e=t.curve;return t.angle=t.x,delete t.x,t.radius=t.y,delete t.y,t.curve=function(t){return arguments.length?e(Tx(t)):e()._curve},t}function Ex(){return Cx(mx().curve(wx))}function Sx(){var t=vx().curve(wx),e=t.curve,n=t.lineX0,r=t.lineX1,i=t.lineY0,a=t.lineY1;return t.angle=t.x,delete t.x,t.startAngle=t.x0,delete t.x0,t.endAngle=t.x1,delete t.x1,t.radius=t.y,delete t.y,t.innerRadius=t.y0,delete t.y0,t.outerRadius=t.y1,delete t.y1,t.lineStartAngle=function(){return Cx(n())},delete t.lineX0,t.lineEndAngle=function(){return Cx(r())},delete t.lineX1,t.lineInnerRadius=function(){return Cx(i())},delete t.lineY0,t.lineOuterRadius=function(){return Cx(a())},delete t.lineY1,t.curve=function(t){return arguments.length?e(Tx(t)):e()._curve},t}function Ax(t,e){return[(e=+e)*Math.cos(t-=Math.PI/2),e*Math.sin(t)]}kx.prototype={areaStart:function(){this._curve.areaStart()},areaEnd:function(){this._curve.areaEnd()},lineStart:function(){this._curve.lineStart()},lineEnd:function(){this._curve.lineEnd()},point:function(t,e){this._curve.point(e*Math.sin(t),e*-Math.cos(t))}};var Mx=Array.prototype.slice;function Nx(t){return t.source}function Dx(t){return t.target}function Bx(t){var e=Nx,n=Dx,r=gx,i=yx,a=null;function o(){var o,s=Mx.call(arguments),c=e.apply(this,s),u=n.apply(this,s);if(a||(a=o=Wi()),t(a,+r.apply(this,(s[0]=c,s)),+i.apply(this,s),+r.apply(this,(s[0]=u,s)),+i.apply(this,s)),o)return a=null,o+""||null}return o.source=function(t){return arguments.length?(e=t,o):e},o.target=function(t){return arguments.length?(n=t,o):n},o.x=function(t){return arguments.length?(r="function"==typeof t?t:H_(+t),o):r},o.y=function(t){return arguments.length?(i="function"==typeof t?t:H_(+t),o):i},o.context=function(t){return arguments.length?(a=null==t?null:t,o):a},o}function Lx(t,e,n,r,i){t.moveTo(e,n),t.bezierCurveTo(e=(e+r)/2,n,e,i,r,i)}function Ox(t,e,n,r,i){t.moveTo(e,n),t.bezierCurveTo(e,n=(n+i)/2,r,n,r,i)}function Ix(t,e,n,r,i){var a=Ax(e,n),o=Ax(e,n=(n+i)/2),s=Ax(r,n),c=Ax(r,i);t.moveTo(a[0],a[1]),t.bezierCurveTo(o[0],o[1],s[0],s[1],c[0],c[1])}function Rx(){return Bx(Lx)}function Fx(){return Bx(Ox)}function Px(){var t=Bx(Ix);return t.angle=t.x,delete t.x,t.radius=t.y,delete t.y,t}const Yx={draw:function(t,e){var n=Math.sqrt(e/tx);t.moveTo(n,0),t.arc(0,0,n,0,nx)}},jx={draw:function(t,e){var n=Math.sqrt(e/5)/2;t.moveTo(-3*n,-n),t.lineTo(-n,-n),t.lineTo(-n,-3*n),t.lineTo(n,-3*n),t.lineTo(n,-n),t.lineTo(3*n,-n),t.lineTo(3*n,n),t.lineTo(n,n),t.lineTo(n,3*n),t.lineTo(-n,3*n),t.lineTo(-n,n),t.lineTo(-3*n,n),t.closePath()}};var Ux=Math.sqrt(1/3),zx=2*Ux;const $x={draw:function(t,e){var n=Math.sqrt(e/zx),r=n*Ux;t.moveTo(0,-n),t.lineTo(r,0),t.lineTo(0,n),t.lineTo(-r,0),t.closePath()}};var qx=Math.sin(tx/10)/Math.sin(7*tx/10),Hx=Math.sin(nx/10)*qx,Wx=-Math.cos(nx/10)*qx;const Vx={draw:function(t,e){var n=Math.sqrt(.8908130915292852*e),r=Hx*n,i=Wx*n;t.moveTo(0,-n),t.lineTo(r,i);for(var a=1;a<5;++a){var o=nx*a/5,s=Math.cos(o),c=Math.sin(o);t.lineTo(c*n,-s*n),t.lineTo(s*r-c*i,c*r+s*i)}t.closePath()}},Gx={draw:function(t,e){var n=Math.sqrt(e),r=-n/2;t.rect(r,r,n,n)}};var Xx=Math.sqrt(3);const Zx={draw:function(t,e){var n=-Math.sqrt(e/(3*Xx));t.moveTo(0,2*n),t.lineTo(-Xx*n,-n),t.lineTo(Xx*n,-n),t.closePath()}};var Qx=-.5,Kx=Math.sqrt(3)/2,Jx=1/Math.sqrt(12),tw=3*(Jx/2+1);const ew={draw:function(t,e){var n=Math.sqrt(e/tw),r=n/2,i=n*Jx,a=r,o=n*Jx+n,s=-a,c=o;t.moveTo(r,i),t.lineTo(a,o),t.lineTo(s,c),t.lineTo(Qx*r-Kx*i,Kx*r+Qx*i),t.lineTo(Qx*a-Kx*o,Kx*a+Qx*o),t.lineTo(Qx*s-Kx*c,Kx*s+Qx*c),t.lineTo(Qx*r+Kx*i,Qx*i-Kx*r),t.lineTo(Qx*a+Kx*o,Qx*o-Kx*a),t.lineTo(Qx*s+Kx*c,Qx*c-Kx*s),t.closePath()}};var nw=[Yx,jx,$x,Gx,Vx,Zx,ew];function rw(){var t=H_(Yx),e=H_(64),n=null;function r(){var r;if(n||(n=r=Wi()),t.apply(this,arguments).draw(n,+e.apply(this,arguments)),r)return n=null,r+""||null}return r.type=function(e){return arguments.length?(t="function"==typeof e?e:H_(e),r):t},r.size=function(t){return arguments.length?(e="function"==typeof t?t:H_(+t),r):e},r.context=function(t){return arguments.length?(n=null==t?null:t,r):n},r}function iw(){}function aw(t,e,n){t._context.bezierCurveTo((2*t._x0+t._x1)/3,(2*t._y0+t._y1)/3,(t._x0+2*t._x1)/3,(t._y0+2*t._y1)/3,(t._x0+4*t._x1+e)/6,(t._y0+4*t._y1+n)/6)}function ow(t){this._context=t}function sw(t){return new ow(t)}function cw(t){this._context=t}function uw(t){return new cw(t)}function lw(t){this._context=t}function hw(t){return new lw(t)}function fw(t,e){this._basis=new ow(t),this._beta=e}ow.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=NaN,this._point=0},lineEnd:function(){switch(this._point){case 3:aw(this,this._x1,this._y1);case 2:this._context.lineTo(this._x1,this._y1)}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,e):this._context.moveTo(t,e);break;case 1:this._point=2;break;case 2:this._point=3,this._context.lineTo((5*this._x0+this._x1)/6,(5*this._y0+this._y1)/6);default:aw(this,t,e)}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=e}},cw.prototype={areaStart:iw,areaEnd:iw,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._y0=this._y1=this._y2=this._y3=this._y4=NaN,this._point=0},lineEnd:function(){switch(this._point){case 1:this._context.moveTo(this._x2,this._y2),this._context.closePath();break;case 2:this._context.moveTo((this._x2+2*this._x3)/3,(this._y2+2*this._y3)/3),this._context.lineTo((this._x3+2*this._x2)/3,(this._y3+2*this._y2)/3),this._context.closePath();break;case 3:this.point(this._x2,this._y2),this.point(this._x3,this._y3),this.point(this._x4,this._y4)}},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1,this._x2=t,this._y2=e;break;case 1:this._point=2,this._x3=t,this._y3=e;break;case 2:this._point=3,this._x4=t,this._y4=e,this._context.moveTo((this._x0+4*this._x1+t)/6,(this._y0+4*this._y1+e)/6);break;default:aw(this,t,e)}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=e}},lw.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=NaN,this._point=0},lineEnd:function(){(this._line||0!==this._line&&3===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3;var n=(this._x0+4*this._x1+t)/6,r=(this._y0+4*this._y1+e)/6;this._line?this._context.lineTo(n,r):this._context.moveTo(n,r);break;case 3:this._point=4;default:aw(this,t,e)}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=e}},fw.prototype={lineStart:function(){this._x=[],this._y=[],this._basis.lineStart()},lineEnd:function(){var t=this._x,e=this._y,n=t.length-1;if(n>0)for(var r,i=t[0],a=e[0],o=t[n]-i,s=e[n]-a,c=-1;++c<=n;)r=c/n,this._basis.point(this._beta*t[c]+(1-this._beta)*(i+r*o),this._beta*e[c]+(1-this._beta)*(a+r*s));this._x=this._y=null,this._basis.lineEnd()},point:function(t,e){this._x.push(+t),this._y.push(+e)}};const dw=function t(e){function n(t){return 1===e?new ow(t):new fw(t,e)}return n.beta=function(e){return t(+e)},n}(.85);function pw(t,e,n){t._context.bezierCurveTo(t._x1+t._k*(t._x2-t._x0),t._y1+t._k*(t._y2-t._y0),t._x2+t._k*(t._x1-e),t._y2+t._k*(t._y1-n),t._x2,t._y2)}function gw(t,e){this._context=t,this._k=(1-e)/6}gw.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._point=0},lineEnd:function(){switch(this._point){case 2:this._context.lineTo(this._x2,this._y2);break;case 3:pw(this,this._x1,this._y1)}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,e):this._context.moveTo(t,e);break;case 1:this._point=2,this._x1=t,this._y1=e;break;case 2:this._point=3;default:pw(this,t,e)}this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=e}};const yw=function t(e){function n(t){return new gw(t,e)}return n.tension=function(e){return t(+e)},n}(0);function mw(t,e){this._context=t,this._k=(1-e)/6}mw.prototype={areaStart:iw,areaEnd:iw,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._x5=this._y0=this._y1=this._y2=this._y3=this._y4=this._y5=NaN,this._point=0},lineEnd:function(){switch(this._point){case 1:this._context.moveTo(this._x3,this._y3),this._context.closePath();break;case 2:this._context.lineTo(this._x3,this._y3),this._context.closePath();break;case 3:this.point(this._x3,this._y3),this.point(this._x4,this._y4),this.point(this._x5,this._y5)}},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1,this._x3=t,this._y3=e;break;case 1:this._point=2,this._context.moveTo(this._x4=t,this._y4=e);break;case 2:this._point=3,this._x5=t,this._y5=e;break;default:pw(this,t,e)}this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=e}};const vw=function t(e){function n(t){return new mw(t,e)}return n.tension=function(e){return t(+e)},n}(0);function bw(t,e){this._context=t,this._k=(1-e)/6}bw.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._point=0},lineEnd:function(){(this._line||0!==this._line&&3===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3,this._line?this._context.lineTo(this._x2,this._y2):this._context.moveTo(this._x2,this._y2);break;case 3:this._point=4;default:pw(this,t,e)}this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=e}};const _w=function t(e){function n(t){return new bw(t,e)}return n.tension=function(e){return t(+e)},n}(0);function xw(t,e,n){var r=t._x1,i=t._y1,a=t._x2,o=t._y2;if(t._l01_a>J_){var s=2*t._l01_2a+3*t._l01_a*t._l12_a+t._l12_2a,c=3*t._l01_a*(t._l01_a+t._l12_a);r=(r*s-t._x0*t._l12_2a+t._x2*t._l01_2a)/c,i=(i*s-t._y0*t._l12_2a+t._y2*t._l01_2a)/c}if(t._l23_a>J_){var u=2*t._l23_2a+3*t._l23_a*t._l12_a+t._l12_2a,l=3*t._l23_a*(t._l23_a+t._l12_a);a=(a*u+t._x1*t._l23_2a-e*t._l12_2a)/l,o=(o*u+t._y1*t._l23_2a-n*t._l12_2a)/l}t._context.bezierCurveTo(r,i,a,o,t._x2,t._y2)}function ww(t,e){this._context=t,this._alpha=e}ww.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){switch(this._point){case 2:this._context.lineTo(this._x2,this._y2);break;case 3:this.point(this._x2,this._y2)}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,e){if(t=+t,e=+e,this._point){var n=this._x2-t,r=this._y2-e;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(n*n+r*r,this._alpha))}switch(this._point){case 0:this._point=1,this._line?this._context.lineTo(t,e):this._context.moveTo(t,e);break;case 1:this._point=2;break;case 2:this._point=3;default:xw(this,t,e)}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=e}};const kw=function t(e){function n(t){return e?new ww(t,e):new gw(t,0)}return n.alpha=function(e){return t(+e)},n}(.5);function Tw(t,e){this._context=t,this._alpha=e}Tw.prototype={areaStart:iw,areaEnd:iw,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._x5=this._y0=this._y1=this._y2=this._y3=this._y4=this._y5=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){switch(this._point){case 1:this._context.moveTo(this._x3,this._y3),this._context.closePath();break;case 2:this._context.lineTo(this._x3,this._y3),this._context.closePath();break;case 3:this.point(this._x3,this._y3),this.point(this._x4,this._y4),this.point(this._x5,this._y5)}},point:function(t,e){if(t=+t,e=+e,this._point){var n=this._x2-t,r=this._y2-e;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(n*n+r*r,this._alpha))}switch(this._point){case 0:this._point=1,this._x3=t,this._y3=e;break;case 1:this._point=2,this._context.moveTo(this._x4=t,this._y4=e);break;case 2:this._point=3,this._x5=t,this._y5=e;break;default:xw(this,t,e)}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=e}};const Cw=function t(e){function n(t){return e?new Tw(t,e):new mw(t,0)}return n.alpha=function(e){return t(+e)},n}(.5);function Ew(t,e){this._context=t,this._alpha=e}Ew.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){(this._line||0!==this._line&&3===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,e){if(t=+t,e=+e,this._point){var n=this._x2-t,r=this._y2-e;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(n*n+r*r,this._alpha))}switch(this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3,this._line?this._context.lineTo(this._x2,this._y2):this._context.moveTo(this._x2,this._y2);break;case 3:this._point=4;default:xw(this,t,e)}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=e}};const Sw=function t(e){function n(t){return e?new Ew(t,e):new bw(t,0)}return n.alpha=function(e){return t(+e)},n}(.5);function Aw(t){this._context=t}function Mw(t){return new Aw(t)}function Nw(t){return t<0?-1:1}function Dw(t,e,n){var r=t._x1-t._x0,i=e-t._x1,a=(t._y1-t._y0)/(r||i<0&&-0),o=(n-t._y1)/(i||r<0&&-0),s=(a*i+o*r)/(r+i);return(Nw(a)+Nw(o))*Math.min(Math.abs(a),Math.abs(o),.5*Math.abs(s))||0}function Bw(t,e){var n=t._x1-t._x0;return n?(3*(t._y1-t._y0)/n-e)/2:e}function Lw(t,e,n){var r=t._x0,i=t._y0,a=t._x1,o=t._y1,s=(a-r)/3;t._context.bezierCurveTo(r+s,i+s*e,a-s,o-s*n,a,o)}function Ow(t){this._context=t}function Iw(t){this._context=new Rw(t)}function Rw(t){this._context=t}function Fw(t){return new Ow(t)}function Pw(t){return new Iw(t)}function Yw(t){this._context=t}function jw(t){var e,n,r=t.length-1,i=new Array(r),a=new Array(r),o=new Array(r);for(i[0]=0,a[0]=2,o[0]=t[0]+2*t[1],e=1;e=0;--e)i[e]=(o[e]-i[e+1])/a[e];for(a[r-1]=(t[r]+i[r-1])/2,e=0;e1)for(var n,r,i,a=1,o=t[e[0]],s=o.length;a=0;)n[e]=e;return n}function Gw(t,e){return t[e]}function Xw(){var t=H_([]),e=Vw,n=Ww,r=Gw;function i(i){var a,o,s=t.apply(this,arguments),c=i.length,u=s.length,l=new Array(u);for(a=0;a0){for(var n,r,i,a=0,o=t[0].length;a0)for(var n,r,i,a,o,s,c=0,u=t[e[0]].length;c0?(r[0]=a,r[1]=a+=i):i<0?(r[1]=o,r[0]=o+=i):(r[0]=0,r[1]=i)}function Kw(t,e){if((n=t.length)>0){for(var n,r=0,i=t[e[0]],a=i.length;r0&&(r=(n=t[e[0]]).length)>0){for(var n,r,i,a=0,o=1;oa&&(a=e,r=n);return r}function nk(t){var e=t.map(rk);return Vw(t).sort((function(t,n){return e[t]-e[n]}))}function rk(t){for(var e,n=0,r=-1,i=t.length;++r=0&&(this._t=1-this._t,this._line=1-this._line)},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,e):this._context.moveTo(t,e);break;case 1:this._point=2;default:if(this._t<=0)this._context.lineTo(this._x,e),this._context.lineTo(t,e);else{var n=this._x*(1-this._t)+t*this._t;this._context.lineTo(n,this._y),this._context.lineTo(n,e)}}this._x=t,this._y=e}};var sk="%Y-%m-%dT%H:%M:%S.%LZ",ck=Date.prototype.toISOString?function(t){return t.toISOString()}:ym(sk);const uk=ck;var lk=+new Date("2000-01-01T00:00:00.000Z")?function(t){var e=new Date(t);return isNaN(e)?null:e}:mm(sk);const hk=lk;function fk(t,e,n){var r=new Wn,i=e;return null==e?(r.restart(t,e,n),r):(e=+e,n=null==n?qn():+n,r.restart((function a(o){o+=i,r.restart(a,i+=e,n),t(o)}),e,n),r)}function dk(t){return function(){return t}}function pk(t){return t[0]}function gk(t){return t[1]}function yk(){this._=null}function mk(t){t.U=t.C=t.L=t.R=t.P=t.N=null}function vk(t,e){var n=e,r=e.R,i=n.U;i?i.L===n?i.L=r:i.R=r:t._=r,r.U=i,n.U=r,n.R=r.L,n.R&&(n.R.U=n),r.L=n}function bk(t,e){var n=e,r=e.L,i=n.U;i?i.L===n?i.L=r:i.R=r:t._=r,r.U=i,n.U=r,n.L=r.R,n.L&&(n.L.U=n),r.R=n}function _k(t){for(;t.L;)t=t.L;return t}yk.prototype={constructor:yk,insert:function(t,e){var n,r,i;if(t){if(e.P=t,e.N=t.N,t.N&&(t.N.P=e),t.N=e,t.R){for(t=t.R;t.L;)t=t.L;t.L=e}else t.R=e;n=t}else this._?(t=_k(this._),e.P=null,e.N=t,t.P=t.L=e,n=t):(e.P=e.N=null,this._=e,n=null);for(e.L=e.R=null,e.U=n,e.C=!0,t=e;n&&n.C;)n===(r=n.U).L?(i=r.R)&&i.C?(n.C=i.C=!1,r.C=!0,t=r):(t===n.R&&(vk(this,n),n=(t=n).U),n.C=!1,r.C=!0,bk(this,r)):(i=r.L)&&i.C?(n.C=i.C=!1,r.C=!0,t=r):(t===n.L&&(bk(this,n),n=(t=n).U),n.C=!1,r.C=!0,vk(this,r)),n=t.U;this._.C=!1},remove:function(t){t.N&&(t.N.P=t.P),t.P&&(t.P.N=t.N),t.N=t.P=null;var e,n,r,i=t.U,a=t.L,o=t.R;if(n=a?o?_k(o):a:o,i?i.L===t?i.L=n:i.R=n:this._=n,a&&o?(r=n.C,n.C=t.C,n.L=a,a.U=n,n!==o?(i=n.U,n.U=t.U,t=n.R,i.L=t,n.R=o,o.U=n):(n.U=i,i=n,t=n.R)):(r=t.C,t=n),t&&(t.U=i),!r)if(t&&t.C)t.C=!1;else{do{if(t===this._)break;if(t===i.L){if((e=i.R).C&&(e.C=!1,i.C=!0,vk(this,i),e=i.R),e.L&&e.L.C||e.R&&e.R.C){e.R&&e.R.C||(e.L.C=!1,e.C=!0,bk(this,e),e=i.R),e.C=i.C,i.C=e.R.C=!1,vk(this,i),t=this._;break}}else if((e=i.L).C&&(e.C=!1,i.C=!0,bk(this,i),e=i.L),e.L&&e.L.C||e.R&&e.R.C){e.L&&e.L.C||(e.R.C=!1,e.C=!0,vk(this,e),e=i.L),e.C=i.C,i.C=e.L.C=!1,bk(this,i),t=this._;break}e.C=!0,t=i,i=i.U}while(!t.C);t&&(t.C=!1)}}};const xk=yk;function wk(t,e,n,r){var i=[null,null],a=Wk.push(i)-1;return i.left=t,i.right=e,n&&Tk(i,t,e,n),r&&Tk(i,e,t,r),qk[t.index].halfedges.push(a),qk[e.index].halfedges.push(a),i}function kk(t,e,n){var r=[e,n];return r.left=t,r}function Tk(t,e,n,r){t[0]||t[1]?t.left===n?t[1]=r:t[0]=r:(t[0]=r,t.left=e,t.right=n)}function Ck(t,e,n,r,i){var a,o=t[0],s=t[1],c=o[0],u=o[1],l=0,h=1,f=s[0]-c,d=s[1]-u;if(a=e-c,f||!(a>0)){if(a/=f,f<0){if(a0){if(a>h)return;a>l&&(l=a)}if(a=r-c,f||!(a<0)){if(a/=f,f<0){if(a>h)return;a>l&&(l=a)}else if(f>0){if(a0)){if(a/=d,d<0){if(a0){if(a>h)return;a>l&&(l=a)}if(a=i-u,d||!(a<0)){if(a/=d,d<0){if(a>h)return;a>l&&(l=a)}else if(d>0){if(a0||h<1)||(l>0&&(t[0]=[c+l*f,u+l*d]),h<1&&(t[1]=[c+h*f,u+h*d]),!0)}}}}}function Ek(t,e,n,r,i){var a=t[1];if(a)return!0;var o,s,c=t[0],u=t.left,l=t.right,h=u[0],f=u[1],d=l[0],p=l[1],g=(h+d)/2,y=(f+p)/2;if(p===f){if(g=r)return;if(h>d){if(c){if(c[1]>=i)return}else c=[g,n];a=[g,i]}else{if(c){if(c[1]1)if(h>d){if(c){if(c[1]>=i)return}else c=[(n-s)/o,n];a=[(i-s)/o,i]}else{if(c){if(c[1]=r)return}else c=[e,o*e+s];a=[r,o*r+s]}else{if(c){if(c[0]=-Gk)){var d=c*c+u*u,p=l*l+h*h,g=(h*d-u*p)/f,y=(c*p-l*d)/f,m=Dk.pop()||new Bk;m.arc=t,m.site=i,m.x=g+o,m.y=(m.cy=y+s)+Math.sqrt(g*g+y*y),t.circle=m;for(var v=null,b=Hk._;b;)if(m.yVk)s=s.L;else{if(!((i=a-zk(s,o))>Vk)){r>-Vk?(e=s.P,n=s):i>-Vk?(e=s,n=s.N):e=n=s;break}if(!s.R){e=s;break}s=s.R}!function(t){qk[t.index]={site:t,halfedges:[]}}(t);var c=Fk(t);if($k.insert(e,c),e||n){if(e===n)return Ok(e),n=Fk(e.site),$k.insert(c,n),c.edge=n.edge=wk(e.site,c.site),Lk(e),void Lk(n);if(n){Ok(e),Ok(n);var u=e.site,l=u[0],h=u[1],f=t[0]-l,d=t[1]-h,p=n.site,g=p[0]-l,y=p[1]-h,m=2*(f*y-d*g),v=f*f+d*d,b=g*g+y*y,_=[(y*v-d*b)/m+l,(f*b-g*v)/m+h];Tk(n.edge,u,p,_),c.edge=wk(u,t,null,_),n.edge=wk(t,p,null,_),Lk(e),Lk(n)}else c.edge=wk(e.site,c.site)}}function Uk(t,e){var n=t.site,r=n[0],i=n[1],a=i-e;if(!a)return r;var o=t.P;if(!o)return-1/0;var s=(n=o.site)[0],c=n[1],u=c-e;if(!u)return s;var l=s-r,h=1/a-1/u,f=l/u;return h?(-f+Math.sqrt(f*f-2*h*(l*l/(-2*u)-c+u/2+i-a/2)))/h+r:(r+s)/2}function zk(t,e){var n=t.N;if(n)return Uk(n,e);var r=t.site;return r[1]===e?r[0]:1/0}var $k,qk,Hk,Wk,Vk=1e-6,Gk=1e-12;function Xk(t,e,n){return(t[0]-n[0])*(e[1]-t[1])-(t[0]-e[0])*(n[1]-t[1])}function Zk(t,e){return e[1]-t[1]||e[0]-t[0]}function Qk(t,e){var n,r,i,a=t.sort(Zk).pop();for(Wk=[],qk=new Array(t.length),$k=new xk,Hk=new xk;;)if(i=Nk,a&&(!i||a[1]Vk||Math.abs(i[0][1]-i[1][1])>Vk)||delete Wk[a]}(o,s,c,u),function(t,e,n,r){var i,a,o,s,c,u,l,h,f,d,p,g,y=qk.length,m=!0;for(i=0;iVk||Math.abs(g-f)>Vk)&&(c.splice(s,0,Wk.push(kk(o,d,Math.abs(p-t)Vk?[t,Math.abs(h-t)Vk?[Math.abs(f-r)Vk?[n,Math.abs(h-n)Vk?[Math.abs(f-e)=s)return null;var c=t-i.site[0],u=e-i.site[1],l=c*c+u*u;do{i=a.cells[r=o],o=null,i.halfedges.forEach((function(n){var r=a.edges[n],s=r.left;if(s!==i.site&&s||(s=r.right)){var c=t-s[0],u=e-s[1],h=c*c+u*u;hr?(r+i)/2:Math.min(0,r)||Math.max(0,i),o>a?(a+o)/2:Math.min(0,a)||Math.max(0,o))}function fT(){var t,e,n=oT,r=sT,i=hT,a=uT,o=lT,s=[0,1/0],c=[[-1/0,-1/0],[1/0,1/0]],u=250,l=Bp,h=ft("start","zoom","end"),f=500,d=0;function p(t){t.property("__zoom",cT).on("wheel.zoom",x).on("mousedown.zoom",w).on("dblclick.zoom",k).filter(o).on("touchstart.zoom",T).on("touchmove.zoom",C).on("touchend.zoom touchcancel.zoom",E).style("touch-action","none").style("-webkit-tap-highlight-color","rgba(0,0,0,0)")}function g(t,e){return(e=Math.max(s[0],Math.min(s[1],e)))===t.k?t:new eT(e,t.x,t.y)}function y(t,e,n){var r=e[0]-n[0]*t.k,i=e[1]-n[1]*t.k;return r===t.x&&i===t.y?t:new eT(t.k,r,i)}function m(t){return[(+t[0][0]+ +t[1][0])/2,(+t[0][1]+ +t[1][1])/2]}function v(t,e,n){t.on("start.zoom",(function(){b(this,arguments).start()})).on("interrupt.zoom end.zoom",(function(){b(this,arguments).end()})).tween("zoom",(function(){var t=this,i=arguments,a=b(t,i),o=r.apply(t,i),s=null==n?m(o):"function"==typeof n?n.apply(t,i):n,c=Math.max(o[1][0]-o[0][0],o[1][1]-o[0][1]),u=t.__zoom,h="function"==typeof e?e.apply(t,i):e,f=l(u.invert(s).concat(c/u.k),h.invert(s).concat(c/h.k));return function(t){if(1===t)t=h;else{var e=f(t),n=c/e[2];t=new eT(n,s[0]-e[0]*n,s[1]-e[1]*n)}a.zoom(null,t)}}))}function b(t,e,n){return!n&&t.__zooming||new _(t,e)}function _(t,e){this.that=t,this.args=e,this.active=0,this.extent=r.apply(t,e),this.taps=0}function x(){if(n.apply(this,arguments)){var t=b(this,arguments),e=this.__zoom,r=Math.max(s[0],Math.min(s[1],e.k*Math.pow(2,a.apply(this,arguments)))),o=Ln(this);if(t.wheel)t.mouse[0][0]===o[0]&&t.mouse[0][1]===o[1]||(t.mouse[1]=e.invert(t.mouse[0]=o)),clearTimeout(t.wheel);else{if(e.k===r)return;t.mouse=[o,e.invert(o)],ar(this),t.start()}aT(),t.wheel=setTimeout(u,150),t.zoom("mouse",i(y(g(e,r),t.mouse[0],t.mouse[1]),t.extent,c))}function u(){t.wheel=null,t.end()}}function w(){if(!e&&n.apply(this,arguments)){var t=b(this,arguments,!0),r=Te(le.view).on("mousemove.zoom",u,!0).on("mouseup.zoom",l,!0),a=Ln(this),o=le.clientX,s=le.clientY;Se(le.view),iT(),t.mouse=[a,this.__zoom.invert(a)],ar(this),t.start()}function u(){if(aT(),!t.moved){var e=le.clientX-o,n=le.clientY-s;t.moved=e*e+n*n>d}t.zoom("mouse",i(y(t.that.__zoom,t.mouse[0]=Ln(t.that),t.mouse[1]),t.extent,c))}function l(){r.on("mousemove.zoom mouseup.zoom",null),Ae(le.view,t.moved),aT(),t.end()}}function k(){if(n.apply(this,arguments)){var t=this.__zoom,e=Ln(this),a=t.invert(e),o=t.k*(le.shiftKey?.5:2),s=i(y(g(t,o),e,a),r.apply(this,arguments),c);aT(),u>0?Te(this).transition().duration(u).call(v,s,e):Te(this).call(p.transform,s)}}function T(){if(n.apply(this,arguments)){var e,r,i,a,o=le.touches,s=o.length,c=b(this,arguments,le.changedTouches.length===s);for(iT(),r=0;r{t.exports={graphlib:n(574),layout:n(8123),debug:n(7570),util:{time:n(1138).time,notime:n(1138).notime},version:n(8177)}},2188:(t,e,n)=>{"use strict";var r=n(8436),i=n(4079);t.exports={run:function(t){var e="greedy"===t.graph().acyclicer?i(t,function(t){return function(e){return t.edge(e).weight}}(t)):function(t){var e=[],n={},i={};return r.forEach(t.nodes(),(function a(o){r.has(i,o)||(i[o]=!0,n[o]=!0,r.forEach(t.outEdges(o),(function(t){r.has(n,t.w)?e.push(t):a(t.w)})),delete n[o])})),e}(t);r.forEach(e,(function(e){var n=t.edge(e);t.removeEdge(e),n.forwardName=e.name,n.reversed=!0,t.setEdge(e.w,e.v,n,r.uniqueId("rev"))}))},undo:function(t){r.forEach(t.edges(),(function(e){var n=t.edge(e);if(n.reversed){t.removeEdge(e);var r=n.forwardName;delete n.reversed,delete n.forwardName,t.setEdge(e.w,e.v,n,r)}}))}}},1133:(t,e,n)=>{var r=n(8436),i=n(1138);function a(t,e,n,r,a,o){var s={width:0,height:0,rank:o,borderType:e},c=a[e][o-1],u=i.addDummyNode(t,"border",s,n);a[e][o]=u,t.setParent(u,r),c&&t.setEdge(c,u,{weight:1})}t.exports=function(t){r.forEach(t.children(),(function e(n){var i=t.children(n),o=t.node(n);if(i.length&&r.forEach(i,e),r.has(o,"minRank")){o.borderLeft=[],o.borderRight=[];for(var s=o.minRank,c=o.maxRank+1;s{"use strict";var r=n(8436);function i(t){r.forEach(t.nodes(),(function(e){a(t.node(e))})),r.forEach(t.edges(),(function(e){a(t.edge(e))}))}function a(t){var e=t.width;t.width=t.height,t.height=e}function o(t){t.y=-t.y}function s(t){var e=t.x;t.x=t.y,t.y=e}t.exports={adjust:function(t){var e=t.graph().rankdir.toLowerCase();"lr"!==e&&"rl"!==e||i(t)},undo:function(t){var e=t.graph().rankdir.toLowerCase();"bt"!==e&&"rl"!==e||function(t){r.forEach(t.nodes(),(function(e){o(t.node(e))})),r.forEach(t.edges(),(function(e){var n=t.edge(e);r.forEach(n.points,o),r.has(n,"y")&&o(n)}))}(t),"lr"!==e&&"rl"!==e||(function(t){r.forEach(t.nodes(),(function(e){s(t.node(e))})),r.forEach(t.edges(),(function(e){var n=t.edge(e);r.forEach(n.points,s),r.has(n,"x")&&s(n)}))}(t),i(t))}}},7822:t=>{function e(){var t={};t._next=t._prev=t,this._sentinel=t}function n(t){t._prev._next=t._next,t._next._prev=t._prev,delete t._next,delete t._prev}function r(t,e){if("_next"!==t&&"_prev"!==t)return e}t.exports=e,e.prototype.dequeue=function(){var t=this._sentinel,e=t._prev;if(e!==t)return n(e),e},e.prototype.enqueue=function(t){var e=this._sentinel;t._prev&&t._next&&n(t),t._next=e._next,e._next._prev=t,e._next=t,t._prev=e},e.prototype.toString=function(){for(var t=[],e=this._sentinel,n=e._prev;n!==e;)t.push(JSON.stringify(n,r)),n=n._prev;return"["+t.join(", ")+"]"}},7570:(t,e,n)=>{var r=n(8436),i=n(1138),a=n(574).Graph;t.exports={debugOrdering:function(t){var e=i.buildLayerMatrix(t),n=new a({compound:!0,multigraph:!0}).setGraph({});return r.forEach(t.nodes(),(function(e){n.setNode(e,{label:e}),n.setParent(e,"layer"+t.node(e).rank)})),r.forEach(t.edges(),(function(t){n.setEdge(t.v,t.w,{},t.name)})),r.forEach(e,(function(t,e){var i="layer"+e;n.setNode(i,{rank:"same"}),r.reduce(t,(function(t,e){return n.setEdge(t,e,{style:"invis"}),e}))})),n}}},574:(t,e,n)=>{var r;try{r=n(8282)}catch(t){}r||(r=window.graphlib),t.exports=r},4079:(t,e,n)=>{var r=n(8436),i=n(574).Graph,a=n(7822);t.exports=function(t,e){if(t.nodeCount()<=1)return[];var n=function(t,e){var n=new i,o=0,s=0;r.forEach(t.nodes(),(function(t){n.setNode(t,{v:t,in:0,out:0})})),r.forEach(t.edges(),(function(t){var r=n.edge(t.v,t.w)||0,i=e(t),a=r+i;n.setEdge(t.v,t.w,a),s=Math.max(s,n.node(t.v).out+=i),o=Math.max(o,n.node(t.w).in+=i)}));var u=r.range(s+o+3).map((function(){return new a})),l=o+1;return r.forEach(n.nodes(),(function(t){c(u,l,n.node(t))})),{graph:n,buckets:u,zeroIdx:l}}(t,e||o),u=function(t,e,n){for(var r,i=[],a=e[e.length-1],o=e[0];t.nodeCount();){for(;r=o.dequeue();)s(t,e,n,r);for(;r=a.dequeue();)s(t,e,n,r);if(t.nodeCount())for(var c=e.length-2;c>0;--c)if(r=e[c].dequeue()){i=i.concat(s(t,e,n,r,!0));break}}return i}(n.graph,n.buckets,n.zeroIdx);return r.flatten(r.map(u,(function(e){return t.outEdges(e.v,e.w)})),!0)};var o=r.constant(1);function s(t,e,n,i,a){var o=a?[]:void 0;return r.forEach(t.inEdges(i.v),(function(r){var i=t.edge(r),s=t.node(r.v);a&&o.push({v:r.v,w:r.w}),s.out-=i,c(e,n,s)})),r.forEach(t.outEdges(i.v),(function(r){var i=t.edge(r),a=r.w,o=t.node(a);o.in-=i,c(e,n,o)})),t.removeNode(i.v),o}function c(t,e,n){n.out?n.in?t[n.out-n.in+e].enqueue(n):t[t.length-1].enqueue(n):t[0].enqueue(n)}},8123:(t,e,n)=>{"use strict";var r=n(8436),i=n(2188),a=n(5995),o=n(8093),s=n(1138).normalizeRanks,c=n(4219),u=n(1138).removeEmptyRanks,l=n(2981),h=n(1133),f=n(3258),d=n(3408),p=n(7873),g=n(1138),y=n(574).Graph;t.exports=function(t,e){var n=e&&e.debugTiming?g.time:g.notime;n("layout",(function(){var e=n(" buildLayoutGraph",(function(){return function(t){var e=new y({multigraph:!0,compound:!0}),n=E(t.graph());return e.setGraph(r.merge({},v,C(n,m),r.pick(n,b))),r.forEach(t.nodes(),(function(n){var i=E(t.node(n));e.setNode(n,r.defaults(C(i,_),x)),e.setParent(n,t.parent(n))})),r.forEach(t.edges(),(function(n){var i=E(t.edge(n));e.setEdge(n,r.merge({},k,C(i,w),r.pick(i,T)))})),e}(t)}));n(" runLayout",(function(){!function(t,e){e(" makeSpaceForEdgeLabels",(function(){!function(t){var e=t.graph();e.ranksep/=2,r.forEach(t.edges(),(function(n){var r=t.edge(n);r.minlen*=2,"c"!==r.labelpos.toLowerCase()&&("TB"===e.rankdir||"BT"===e.rankdir?r.width+=r.labeloffset:r.height+=r.labeloffset)}))}(t)})),e(" removeSelfEdges",(function(){!function(t){r.forEach(t.edges(),(function(e){if(e.v===e.w){var n=t.node(e.v);n.selfEdges||(n.selfEdges=[]),n.selfEdges.push({e,label:t.edge(e)}),t.removeEdge(e)}}))}(t)})),e(" acyclic",(function(){i.run(t)})),e(" nestingGraph.run",(function(){l.run(t)})),e(" rank",(function(){o(g.asNonCompoundGraph(t))})),e(" injectEdgeLabelProxies",(function(){!function(t){r.forEach(t.edges(),(function(e){var n=t.edge(e);if(n.width&&n.height){var r=t.node(e.v),i={rank:(t.node(e.w).rank-r.rank)/2+r.rank,e};g.addDummyNode(t,"edge-proxy",i,"_ep")}}))}(t)})),e(" removeEmptyRanks",(function(){u(t)})),e(" nestingGraph.cleanup",(function(){l.cleanup(t)})),e(" normalizeRanks",(function(){s(t)})),e(" assignRankMinMax",(function(){!function(t){var e=0;r.forEach(t.nodes(),(function(n){var i=t.node(n);i.borderTop&&(i.minRank=t.node(i.borderTop).rank,i.maxRank=t.node(i.borderBottom).rank,e=r.max(e,i.maxRank))})),t.graph().maxRank=e}(t)})),e(" removeEdgeLabelProxies",(function(){!function(t){r.forEach(t.nodes(),(function(e){var n=t.node(e);"edge-proxy"===n.dummy&&(t.edge(n.e).labelRank=n.rank,t.removeNode(e))}))}(t)})),e(" normalize.run",(function(){a.run(t)})),e(" parentDummyChains",(function(){c(t)})),e(" addBorderSegments",(function(){h(t)})),e(" order",(function(){d(t)})),e(" insertSelfEdges",(function(){!function(t){var e=g.buildLayerMatrix(t);r.forEach(e,(function(e){var n=0;r.forEach(e,(function(e,i){var a=t.node(e);a.order=i+n,r.forEach(a.selfEdges,(function(e){g.addDummyNode(t,"selfedge",{width:e.label.width,height:e.label.height,rank:a.rank,order:i+ ++n,e:e.e,label:e.label},"_se")})),delete a.selfEdges}))}))}(t)})),e(" adjustCoordinateSystem",(function(){f.adjust(t)})),e(" position",(function(){p(t)})),e(" positionSelfEdges",(function(){!function(t){r.forEach(t.nodes(),(function(e){var n=t.node(e);if("selfedge"===n.dummy){var r=t.node(n.e.v),i=r.x+r.width/2,a=r.y,o=n.x-i,s=r.height/2;t.setEdge(n.e,n.label),t.removeNode(e),n.label.points=[{x:i+2*o/3,y:a-s},{x:i+5*o/6,y:a-s},{x:i+o,y:a},{x:i+5*o/6,y:a+s},{x:i+2*o/3,y:a+s}],n.label.x=n.x,n.label.y=n.y}}))}(t)})),e(" removeBorderNodes",(function(){!function(t){r.forEach(t.nodes(),(function(e){if(t.children(e).length){var n=t.node(e),i=t.node(n.borderTop),a=t.node(n.borderBottom),o=t.node(r.last(n.borderLeft)),s=t.node(r.last(n.borderRight));n.width=Math.abs(s.x-o.x),n.height=Math.abs(a.y-i.y),n.x=o.x+n.width/2,n.y=i.y+n.height/2}})),r.forEach(t.nodes(),(function(e){"border"===t.node(e).dummy&&t.removeNode(e)}))}(t)})),e(" normalize.undo",(function(){a.undo(t)})),e(" fixupEdgeLabelCoords",(function(){!function(t){r.forEach(t.edges(),(function(e){var n=t.edge(e);if(r.has(n,"x"))switch("l"!==n.labelpos&&"r"!==n.labelpos||(n.width-=n.labeloffset),n.labelpos){case"l":n.x-=n.width/2+n.labeloffset;break;case"r":n.x+=n.width/2+n.labeloffset}}))}(t)})),e(" undoCoordinateSystem",(function(){f.undo(t)})),e(" translateGraph",(function(){!function(t){var e=Number.POSITIVE_INFINITY,n=0,i=Number.POSITIVE_INFINITY,a=0,o=t.graph(),s=o.marginx||0,c=o.marginy||0;function u(t){var r=t.x,o=t.y,s=t.width,c=t.height;e=Math.min(e,r-s/2),n=Math.max(n,r+s/2),i=Math.min(i,o-c/2),a=Math.max(a,o+c/2)}r.forEach(t.nodes(),(function(e){u(t.node(e))})),r.forEach(t.edges(),(function(e){var n=t.edge(e);r.has(n,"x")&&u(n)})),e-=s,i-=c,r.forEach(t.nodes(),(function(n){var r=t.node(n);r.x-=e,r.y-=i})),r.forEach(t.edges(),(function(n){var a=t.edge(n);r.forEach(a.points,(function(t){t.x-=e,t.y-=i})),r.has(a,"x")&&(a.x-=e),r.has(a,"y")&&(a.y-=i)})),o.width=n-e+s,o.height=a-i+c}(t)})),e(" assignNodeIntersects",(function(){!function(t){r.forEach(t.edges(),(function(e){var n,r,i=t.edge(e),a=t.node(e.v),o=t.node(e.w);i.points?(n=i.points[0],r=i.points[i.points.length-1]):(i.points=[],n=o,r=a),i.points.unshift(g.intersectRect(a,n)),i.points.push(g.intersectRect(o,r))}))}(t)})),e(" reversePoints",(function(){!function(t){r.forEach(t.edges(),(function(e){var n=t.edge(e);n.reversed&&n.points.reverse()}))}(t)})),e(" acyclic.undo",(function(){i.undo(t)}))}(e,n)})),n(" updateInputGraph",(function(){!function(t,e){r.forEach(t.nodes(),(function(n){var r=t.node(n),i=e.node(n);r&&(r.x=i.x,r.y=i.y,e.children(n).length&&(r.width=i.width,r.height=i.height))})),r.forEach(t.edges(),(function(n){var i=t.edge(n),a=e.edge(n);i.points=a.points,r.has(a,"x")&&(i.x=a.x,i.y=a.y)})),t.graph().width=e.graph().width,t.graph().height=e.graph().height}(t,e)}))}))};var m=["nodesep","edgesep","ranksep","marginx","marginy"],v={ranksep:50,edgesep:20,nodesep:50,rankdir:"tb"},b=["acyclicer","ranker","rankdir","align"],_=["width","height"],x={width:0,height:0},w=["minlen","weight","width","height","labeloffset"],k={minlen:1,weight:1,width:0,height:0,labeloffset:10,labelpos:"r"},T=["labelpos"];function C(t,e){return r.mapValues(r.pick(t,e),Number)}function E(t){var e={};return r.forEach(t,(function(t,n){e[n.toLowerCase()]=t})),e}},8436:(t,e,n)=>{var r;try{r={cloneDeep:n(361),constant:n(5703),defaults:n(1747),each:n(6073),filter:n(3105),find:n(3311),flatten:n(5564),forEach:n(4486),forIn:n(2620),has:n(8721),isUndefined:n(2353),last:n(928),map:n(5161),mapValues:n(6604),max:n(6162),merge:n(3857),min:n(3632),minBy:n(2762),now:n(7771),pick:n(9722),range:n(6026),reduce:n(4061),sortBy:n(9734),uniqueId:n(3955),values:n(2628),zipObject:n(7287)}}catch(t){}r||(r=window._),t.exports=r},2981:(t,e,n)=>{var r=n(8436),i=n(1138);function a(t,e,n,o,s,c,u){var l=t.children(u);if(l.length){var h=i.addBorderNode(t,"_bt"),f=i.addBorderNode(t,"_bb"),d=t.node(u);t.setParent(h,u),d.borderTop=h,t.setParent(f,u),d.borderBottom=f,r.forEach(l,(function(r){a(t,e,n,o,s,c,r);var i=t.node(r),l=i.borderTop?i.borderTop:r,d=i.borderBottom?i.borderBottom:r,p=i.borderTop?o:2*o,g=l!==d?1:s-c[u]+1;t.setEdge(h,l,{weight:p,minlen:g,nestingEdge:!0}),t.setEdge(d,f,{weight:p,minlen:g,nestingEdge:!0})})),t.parent(u)||t.setEdge(e,h,{weight:0,minlen:s+c[u]})}else u!==e&&t.setEdge(e,u,{weight:0,minlen:n})}t.exports={run:function(t){var e=i.addDummyNode(t,"root",{},"_root"),n=function(t){var e={};function n(i,a){var o=t.children(i);o&&o.length&&r.forEach(o,(function(t){n(t,a+1)})),e[i]=a}return r.forEach(t.children(),(function(t){n(t,1)})),e}(t),o=r.max(r.values(n))-1,s=2*o+1;t.graph().nestingRoot=e,r.forEach(t.edges(),(function(e){t.edge(e).minlen*=s}));var c=function(t){return r.reduce(t.edges(),(function(e,n){return e+t.edge(n).weight}),0)}(t)+1;r.forEach(t.children(),(function(r){a(t,e,s,c,o,n,r)})),t.graph().nodeRankFactor=s},cleanup:function(t){var e=t.graph();t.removeNode(e.nestingRoot),delete e.nestingRoot,r.forEach(t.edges(),(function(e){t.edge(e).nestingEdge&&t.removeEdge(e)}))}}},5995:(t,e,n)=>{"use strict";var r=n(8436),i=n(1138);t.exports={run:function(t){t.graph().dummyChains=[],r.forEach(t.edges(),(function(e){!function(t,e){var n,r,a,o=e.v,s=t.node(o).rank,c=e.w,u=t.node(c).rank,l=e.name,h=t.edge(e),f=h.labelRank;if(u!==s+1){for(t.removeEdge(e),a=0,++s;s{var r=n(8436);t.exports=function(t,e,n){var i,a={};r.forEach(n,(function(n){for(var r,o,s=t.parent(n);s;){if((r=t.parent(s))?(o=a[r],a[r]=s):(o=i,i=s),o&&o!==s)return void e.setEdge(o,s);s=r}}))}},5439:(t,e,n)=>{var r=n(8436);t.exports=function(t,e){return r.map(e,(function(e){var n=t.inEdges(e);if(n.length){var i=r.reduce(n,(function(e,n){var r=t.edge(n),i=t.node(n.v);return{sum:e.sum+r.weight*i.order,weight:e.weight+r.weight}}),{sum:0,weight:0});return{v:e,barycenter:i.sum/i.weight,weight:i.weight}}return{v:e}}))}},3128:(t,e,n)=>{var r=n(8436),i=n(574).Graph;t.exports=function(t,e,n){var a=function(t){for(var e;t.hasNode(e=r.uniqueId("_root")););return e}(t),o=new i({compound:!0}).setGraph({root:a}).setDefaultNodeLabel((function(e){return t.node(e)}));return r.forEach(t.nodes(),(function(i){var s=t.node(i),c=t.parent(i);(s.rank===e||s.minRank<=e&&e<=s.maxRank)&&(o.setNode(i),o.setParent(i,c||a),r.forEach(t[n](i),(function(e){var n=e.v===i?e.w:e.v,a=o.edge(n,i),s=r.isUndefined(a)?0:a.weight;o.setEdge(n,i,{weight:t.edge(e).weight+s})})),r.has(s,"minRank")&&o.setNode(i,{borderLeft:s.borderLeft[e],borderRight:s.borderRight[e]}))})),o}},6630:(t,e,n)=>{"use strict";var r=n(8436);function i(t,e,n){for(var i=r.zipObject(n,r.map(n,(function(t,e){return e}))),a=r.flatten(r.map(e,(function(e){return r.sortBy(r.map(t.outEdges(e),(function(e){return{pos:i[e.w],weight:t.edge(e).weight}})),"pos")})),!0),o=1;o0;)e%2&&(n+=c[e+1]),c[e=e-1>>1]+=t.weight;u+=t.weight*n}))),u}t.exports=function(t,e){for(var n=0,r=1;r{"use strict";var r=n(8436),i=n(2588),a=n(6630),o=n(1026),s=n(3128),c=n(5093),u=n(574).Graph,l=n(1138);function h(t,e,n){return r.map(e,(function(e){return s(t,e,n)}))}function f(t,e){var n=new u;r.forEach(t,(function(t){var i=t.graph().root,a=o(t,i,n,e);r.forEach(a.vs,(function(e,n){t.node(e).order=n})),c(t,n,a.vs)}))}function d(t,e){r.forEach(e,(function(e){r.forEach(e,(function(e,n){t.node(e).order=n}))}))}t.exports=function(t){var e=l.maxRank(t),n=h(t,r.range(1,e+1),"inEdges"),o=h(t,r.range(e-1,-1,-1),"outEdges"),s=i(t);d(t,s);for(var c,u=Number.POSITIVE_INFINITY,p=0,g=0;g<4;++p,++g){f(p%2?n:o,p%4>=2),s=l.buildLayerMatrix(t);var y=a(t,s);y{"use strict";var r=n(8436);t.exports=function(t){var e={},n=r.filter(t.nodes(),(function(e){return!t.children(e).length})),i=r.max(r.map(n,(function(e){return t.node(e).rank}))),a=r.map(r.range(i+1),(function(){return[]})),o=r.sortBy(n,(function(e){return t.node(e).rank}));return r.forEach(o,(function n(i){if(!r.has(e,i)){e[i]=!0;var o=t.node(i);a[o.rank].push(i),r.forEach(t.successors(i),n)}})),a}},9567:(t,e,n)=>{"use strict";var r=n(8436);t.exports=function(t,e){var n={};return r.forEach(t,(function(t,e){var i=n[t.v]={indegree:0,in:[],out:[],vs:[t.v],i:e};r.isUndefined(t.barycenter)||(i.barycenter=t.barycenter,i.weight=t.weight)})),r.forEach(e.edges(),(function(t){var e=n[t.v],i=n[t.w];r.isUndefined(e)||r.isUndefined(i)||(i.indegree++,e.out.push(n[t.w]))})),function(t){var e=[];function n(t){return function(e){var n,i,a,o;e.merged||(r.isUndefined(e.barycenter)||r.isUndefined(t.barycenter)||e.barycenter>=t.barycenter)&&(i=e,a=0,o=0,(n=t).weight&&(a+=n.barycenter*n.weight,o+=n.weight),i.weight&&(a+=i.barycenter*i.weight,o+=i.weight),n.vs=i.vs.concat(n.vs),n.barycenter=a/o,n.weight=o,n.i=Math.min(i.i,n.i),i.merged=!0)}}function i(e){return function(n){n.in.push(e),0==--n.indegree&&t.push(n)}}for(;t.length;){var a=t.pop();e.push(a),r.forEach(a.in.reverse(),n(a)),r.forEach(a.out,i(a))}return r.map(r.filter(e,(function(t){return!t.merged})),(function(t){return r.pick(t,["vs","i","barycenter","weight"])}))}(r.filter(n,(function(t){return!t.indegree})))}},1026:(t,e,n)=>{var r=n(8436),i=n(5439),a=n(9567),o=n(7304);t.exports=function t(e,n,s,c){var u=e.children(n),l=e.node(n),h=l?l.borderLeft:void 0,f=l?l.borderRight:void 0,d={};h&&(u=r.filter(u,(function(t){return t!==h&&t!==f})));var p=i(e,u);r.forEach(p,(function(n){if(e.children(n.v).length){var i=t(e,n.v,s,c);d[n.v]=i,r.has(i,"barycenter")&&(a=n,o=i,r.isUndefined(a.barycenter)?(a.barycenter=o.barycenter,a.weight=o.weight):(a.barycenter=(a.barycenter*a.weight+o.barycenter*o.weight)/(a.weight+o.weight),a.weight+=o.weight))}var a,o}));var g=a(p,s);!function(t,e){r.forEach(t,(function(t){t.vs=r.flatten(t.vs.map((function(t){return e[t]?e[t].vs:t})),!0)}))}(g,d);var y=o(g,c);if(h&&(y.vs=r.flatten([h,y.vs,f],!0),e.predecessors(h).length)){var m=e.node(e.predecessors(h)[0]),v=e.node(e.predecessors(f)[0]);r.has(y,"barycenter")||(y.barycenter=0,y.weight=0),y.barycenter=(y.barycenter*y.weight+m.order+v.order)/(y.weight+2),y.weight+=2}return y}},7304:(t,e,n)=>{var r=n(8436),i=n(1138);function a(t,e,n){for(var i;e.length&&(i=r.last(e)).i<=n;)e.pop(),t.push(i.vs),n++;return n}t.exports=function(t,e){var n,o=i.partition(t,(function(t){return r.has(t,"barycenter")})),s=o.lhs,c=r.sortBy(o.rhs,(function(t){return-t.i})),u=[],l=0,h=0,f=0;s.sort((n=!!e,function(t,e){return t.barycentere.barycenter?1:n?e.i-t.i:t.i-e.i})),f=a(u,c,f),r.forEach(s,(function(t){f+=t.vs.length,u.push(t.vs),l+=t.barycenter*t.weight,h+=t.weight,f=a(u,c,f)}));var d={vs:r.flatten(u,!0)};return h&&(d.barycenter=l/h,d.weight=h),d}},4219:(t,e,n)=>{var r=n(8436);t.exports=function(t){var e=function(t){var e={},n=0;return r.forEach(t.children(),(function i(a){var o=n;r.forEach(t.children(a),i),e[a]={low:o,lim:n++}})),e}(t);r.forEach(t.graph().dummyChains,(function(n){for(var r=t.node(n),i=r.edgeObj,a=function(t,e,n,r){var i,a,o=[],s=[],c=Math.min(e[n].low,e[r].low),u=Math.max(e[n].lim,e[r].lim);i=n;do{i=t.parent(i),o.push(i)}while(i&&(e[i].low>c||u>e[i].lim));for(a=i,i=r;(i=t.parent(i))!==a;)s.push(i);return{path:o.concat(s.reverse()),lca:a}}(t,e,i.v,i.w),o=a.path,s=a.lca,c=0,u=o[c],l=!0;n!==i.w;){if(r=t.node(n),l){for(;(u=o[c])!==s&&t.node(u).maxRank{"use strict";var r=n(8436),i=n(574).Graph,a=n(1138);function o(t,e){var n={};return r.reduce(e,(function(e,i){var a=0,o=0,s=e.length,u=r.last(i);return r.forEach(i,(function(e,l){var h=function(t,e){if(t.node(e).dummy)return r.find(t.predecessors(e),(function(e){return t.node(e).dummy}))}(t,e),f=h?t.node(h).order:s;(h||e===u)&&(r.forEach(i.slice(o,l+1),(function(e){r.forEach(t.predecessors(e),(function(r){var i=t.node(r),o=i.order;!(os)&&c(n,e,u)}))}))}return r.reduce(e,(function(e,n){var a,o=-1,s=0;return r.forEach(n,(function(r,c){if("border"===t.node(r).dummy){var u=t.predecessors(r);u.length&&(a=t.node(u[0]).order,i(n,s,c,o,a),s=c,o=a)}i(n,s,n.length,a,e.length)})),n})),n}function c(t,e,n){if(e>n){var r=e;e=n,n=r}var i=t[e];i||(t[e]=i={}),i[n]=!0}function u(t,e,n){if(e>n){var i=e;e=n,n=i}return r.has(t[e],n)}function l(t,e,n,i){var a={},o={},s={};return r.forEach(e,(function(t){r.forEach(t,(function(t,e){a[t]=t,o[t]=t,s[t]=e}))})),r.forEach(e,(function(t){var e=-1;r.forEach(t,(function(t){var c=i(t);if(c.length){c=r.sortBy(c,(function(t){return s[t]}));for(var l=(c.length-1)/2,h=Math.floor(l),f=Math.ceil(l);h<=f;++h){var d=c[h];o[t]===t&&e{"use strict";var r=n(8436),i=n(1138),a=n(3573).positionX;t.exports=function(t){(function(t){var e=i.buildLayerMatrix(t),n=t.graph().ranksep,a=0;r.forEach(e,(function(e){var i=r.max(r.map(e,(function(e){return t.node(e).height})));r.forEach(e,(function(e){t.node(e).y=a+i/2})),a+=i+n}))})(t=i.asNonCompoundGraph(t)),r.forEach(a(t),(function(e,n){t.node(n).x=e}))}},300:(t,e,n)=>{"use strict";var r=n(8436),i=n(574).Graph,a=n(6681).slack;function o(t,e){return r.forEach(t.nodes(),(function n(i){r.forEach(e.nodeEdges(i),(function(r){var o=r.v,s=i===o?r.w:o;t.hasNode(s)||a(e,r)||(t.setNode(s,{}),t.setEdge(i,s,{}),n(s))}))})),t.nodeCount()}function s(t,e){return r.minBy(e.edges(),(function(n){if(t.hasNode(n.v)!==t.hasNode(n.w))return a(e,n)}))}function c(t,e,n){r.forEach(t.nodes(),(function(t){e.node(t).rank+=n}))}t.exports=function(t){var e,n,r=new i({directed:!1}),u=t.nodes()[0],l=t.nodeCount();for(r.setNode(u,{});o(r,t){"use strict";var r=n(6681).longestPath,i=n(300),a=n(2472);t.exports=function(t){switch(t.graph().ranker){case"network-simplex":default:!function(t){a(t)}(t);break;case"tight-tree":!function(t){r(t),i(t)}(t);break;case"longest-path":o(t)}};var o=r},2472:(t,e,n)=>{"use strict";var r=n(8436),i=n(300),a=n(6681).slack,o=n(6681).longestPath,s=n(574).alg.preorder,c=n(574).alg.postorder,u=n(1138).simplify;function l(t){t=u(t),o(t);var e,n=i(t);for(d(n),h(n,t);e=g(n);)m(n,t,e,y(n,t,e))}function h(t,e){var n=c(t,t.nodes());n=n.slice(0,n.length-1),r.forEach(n,(function(n){!function(t,e,n){var r=t.node(n).parent;t.edge(n,r).cutvalue=f(t,e,n)}(t,e,n)}))}function f(t,e,n){var i=t.node(n).parent,a=!0,o=e.edge(n,i),s=0;return o||(a=!1,o=e.edge(i,n)),s=o.weight,r.forEach(e.nodeEdges(n),(function(r){var o,c,u=r.v===n,l=u?r.w:r.v;if(l!==i){var h=u===a,f=e.edge(r).weight;if(s+=h?f:-f,o=n,c=l,t.hasEdge(o,c)){var d=t.edge(n,l).cutvalue;s+=h?-d:d}}})),s}function d(t,e){arguments.length<2&&(e=t.nodes()[0]),p(t,{},1,e)}function p(t,e,n,i,a){var o=n,s=t.node(i);return e[i]=!0,r.forEach(t.neighbors(i),(function(a){r.has(e,a)||(n=p(t,e,n,a,i))})),s.low=o,s.lim=n++,a?s.parent=a:delete s.parent,n}function g(t){return r.find(t.edges(),(function(e){return t.edge(e).cutvalue<0}))}function y(t,e,n){var i=n.v,o=n.w;e.hasEdge(i,o)||(i=n.w,o=n.v);var s=t.node(i),c=t.node(o),u=s,l=!1;s.lim>c.lim&&(u=c,l=!0);var h=r.filter(e.edges(),(function(e){return l===v(0,t.node(e.v),u)&&l!==v(0,t.node(e.w),u)}));return r.minBy(h,(function(t){return a(e,t)}))}function m(t,e,n,i){var a=n.v,o=n.w;t.removeEdge(a,o),t.setEdge(i.v,i.w,{}),d(t),h(t,e),function(t,e){var n=r.find(t.nodes(),(function(t){return!e.node(t).parent})),i=s(t,n);i=i.slice(1),r.forEach(i,(function(n){var r=t.node(n).parent,i=e.edge(n,r),a=!1;i||(i=e.edge(r,n),a=!0),e.node(n).rank=e.node(r).rank+(a?i.minlen:-i.minlen)}))}(t,e)}function v(t,e,n){return n.low<=e.lim&&e.lim<=n.lim}t.exports=l,l.initLowLimValues=d,l.initCutValues=h,l.calcCutValue=f,l.leaveEdge=g,l.enterEdge=y,l.exchangeEdges=m},6681:(t,e,n)=>{"use strict";var r=n(8436);t.exports={longestPath:function(t){var e={};r.forEach(t.sources(),(function n(i){var a=t.node(i);if(r.has(e,i))return a.rank;e[i]=!0;var o=r.min(r.map(t.outEdges(i),(function(e){return n(e.w)-t.edge(e).minlen})));return o!==Number.POSITIVE_INFINITY&&null!=o||(o=0),a.rank=o}))},slack:function(t,e){return t.node(e.w).rank-t.node(e.v).rank-t.edge(e).minlen}}},1138:(t,e,n)=>{"use strict";var r=n(8436),i=n(574).Graph;function a(t,e,n,i){var a;do{a=r.uniqueId(i)}while(t.hasNode(a));return n.dummy=e,t.setNode(a,n),a}function o(t){return r.max(r.map(t.nodes(),(function(e){var n=t.node(e).rank;if(!r.isUndefined(n))return n})))}t.exports={addDummyNode:a,simplify:function(t){var e=(new i).setGraph(t.graph());return r.forEach(t.nodes(),(function(n){e.setNode(n,t.node(n))})),r.forEach(t.edges(),(function(n){var r=e.edge(n.v,n.w)||{weight:0,minlen:1},i=t.edge(n);e.setEdge(n.v,n.w,{weight:r.weight+i.weight,minlen:Math.max(r.minlen,i.minlen)})})),e},asNonCompoundGraph:function(t){var e=new i({multigraph:t.isMultigraph()}).setGraph(t.graph());return r.forEach(t.nodes(),(function(n){t.children(n).length||e.setNode(n,t.node(n))})),r.forEach(t.edges(),(function(n){e.setEdge(n,t.edge(n))})),e},successorWeights:function(t){var e=r.map(t.nodes(),(function(e){var n={};return r.forEach(t.outEdges(e),(function(e){n[e.w]=(n[e.w]||0)+t.edge(e).weight})),n}));return r.zipObject(t.nodes(),e)},predecessorWeights:function(t){var e=r.map(t.nodes(),(function(e){var n={};return r.forEach(t.inEdges(e),(function(e){n[e.v]=(n[e.v]||0)+t.edge(e).weight})),n}));return r.zipObject(t.nodes(),e)},intersectRect:function(t,e){var n,r,i=t.x,a=t.y,o=e.x-i,s=e.y-a,c=t.width/2,u=t.height/2;if(!o&&!s)throw new Error("Not possible to find intersection inside of the rectangle");return Math.abs(s)*c>Math.abs(o)*u?(s<0&&(u=-u),n=u*o/s,r=u):(o<0&&(c=-c),n=c,r=c*s/o),{x:i+n,y:a+r}},buildLayerMatrix:function(t){var e=r.map(r.range(o(t)+1),(function(){return[]}));return r.forEach(t.nodes(),(function(n){var i=t.node(n),a=i.rank;r.isUndefined(a)||(e[a][i.order]=n)})),e},normalizeRanks:function(t){var e=r.min(r.map(t.nodes(),(function(e){return t.node(e).rank})));r.forEach(t.nodes(),(function(n){var i=t.node(n);r.has(i,"rank")&&(i.rank-=e)}))},removeEmptyRanks:function(t){var e=r.min(r.map(t.nodes(),(function(e){return t.node(e).rank}))),n=[];r.forEach(t.nodes(),(function(r){var i=t.node(r).rank-e;n[i]||(n[i]=[]),n[i].push(r)}));var i=0,a=t.graph().nodeRankFactor;r.forEach(n,(function(e,n){r.isUndefined(e)&&n%a!=0?--i:i&&r.forEach(e,(function(e){t.node(e).rank+=i}))}))},addBorderNode:function(t,e,n,r){var i={width:0,height:0};return arguments.length>=4&&(i.rank=n,i.order=r),a(t,"border",i,e)},maxRank:o,partition:function(t,e){var n={lhs:[],rhs:[]};return r.forEach(t,(function(t){e(t)?n.lhs.push(t):n.rhs.push(t)})),n},time:function(t,e){var n=r.now();try{return e()}finally{console.log(t+" time: "+(r.now()-n)+"ms")}},notime:function(t,e){return e()}}},8177:t=>{t.exports="0.8.5"},7856:function(t){t.exports=function(){"use strict";var t=Object.hasOwnProperty,e=Object.setPrototypeOf,n=Object.isFrozen,r=Object.getPrototypeOf,i=Object.getOwnPropertyDescriptor,a=Object.freeze,o=Object.seal,s=Object.create,c="undefined"!=typeof Reflect&&Reflect,u=c.apply,l=c.construct;u||(u=function(t,e,n){return t.apply(e,n)}),a||(a=function(t){return t}),o||(o=function(t){return t}),l||(l=function(t,e){return new(Function.prototype.bind.apply(t,[null].concat(function(t){if(Array.isArray(t)){for(var e=0,n=Array(t.length);e1?n-1:0),i=1;i/gm),Y=o(/^data-[\-\w.\u00B7-\uFFFF]/),j=o(/^aria-[\-\w]+$/),U=o(/^(?:(?:(?:f|ht)tps?|mailto|tel|callto|cid|xmpp):|[^a-z]|[a-z+.\-]+(?:[^a-z+.\-:]|$))/i),z=o(/^(?:\w+script|data):/i),$=o(/[\u0000-\u0020\u00A0\u1680\u180E\u2000-\u2029\u205F\u3000]/g),q=o(/^html$/i),H="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t};function W(t){if(Array.isArray(t)){for(var e=0,n=Array(t.length);e0&&void 0!==arguments[0]?arguments[0]:V(),n=function(e){return t(e)};if(n.version="2.3.6",n.removed=[],!e||!e.document||9!==e.document.nodeType)return n.isSupported=!1,n;var r=e.document,i=e.document,o=e.DocumentFragment,s=e.HTMLTemplateElement,c=e.Node,u=e.Element,l=e.NodeFilter,h=e.NamedNodeMap,w=void 0===h?e.NamedNodeMap||e.MozNamedAttrMap:h,X=e.HTMLFormElement,Z=e.DOMParser,Q=e.trustedTypes,K=u.prototype,J=C(K,"cloneNode"),tt=C(K,"nextSibling"),et=C(K,"childNodes"),nt=C(K,"parentNode");if("function"==typeof s){var rt=i.createElement("template");rt.content&&rt.content.ownerDocument&&(i=rt.content.ownerDocument)}var it=G(Q,r),at=it?it.createHTML(""):"",ot=i,st=ot.implementation,ct=ot.createNodeIterator,ut=ot.createDocumentFragment,lt=ot.getElementsByTagName,ht=r.importNode,ft={};try{ft=T(i).documentMode?i.documentMode:{}}catch(t){}var dt={};n.isSupported="function"==typeof nt&&st&&void 0!==st.createHTMLDocument&&9!==ft;var pt=F,gt=P,yt=Y,mt=j,vt=z,bt=$,_t=U,xt=null,wt=k({},[].concat(W(E),W(S),W(A),W(N),W(B))),kt=null,Tt=k({},[].concat(W(L),W(O),W(I),W(R))),Ct=Object.seal(Object.create(null,{tagNameCheck:{writable:!0,configurable:!1,enumerable:!0,value:null},attributeNameCheck:{writable:!0,configurable:!1,enumerable:!0,value:null},allowCustomizedBuiltInElements:{writable:!0,configurable:!1,enumerable:!0,value:!1}})),Et=null,St=null,At=!0,Mt=!0,Nt=!1,Dt=!1,Bt=!1,Lt=!1,Ot=!1,It=!1,Rt=!1,Ft=!1,Pt=!0,Yt=!0,jt=!1,Ut={},zt=null,$t=k({},["annotation-xml","audio","colgroup","desc","foreignobject","head","iframe","math","mi","mn","mo","ms","mtext","noembed","noframes","noscript","plaintext","script","style","svg","template","thead","title","video","xmp"]),qt=null,Ht=k({},["audio","video","img","source","image","track"]),Wt=null,Vt=k({},["alt","class","for","id","label","name","pattern","placeholder","role","summary","title","value","style","xmlns"]),Gt="http://www.w3.org/1998/Math/MathML",Xt="http://www.w3.org/2000/svg",Zt="http://www.w3.org/1999/xhtml",Qt=Zt,Kt=!1,Jt=void 0,te=["application/xhtml+xml","text/html"],ee="text/html",ne=void 0,re=null,ie=i.createElement("form"),ae=function(t){return t instanceof RegExp||t instanceof Function},oe=function(t){re&&re===t||(t&&"object"===(void 0===t?"undefined":H(t))||(t={}),t=T(t),xt="ALLOWED_TAGS"in t?k({},t.ALLOWED_TAGS):wt,kt="ALLOWED_ATTR"in t?k({},t.ALLOWED_ATTR):Tt,Wt="ADD_URI_SAFE_ATTR"in t?k(T(Vt),t.ADD_URI_SAFE_ATTR):Vt,qt="ADD_DATA_URI_TAGS"in t?k(T(Ht),t.ADD_DATA_URI_TAGS):Ht,zt="FORBID_CONTENTS"in t?k({},t.FORBID_CONTENTS):$t,Et="FORBID_TAGS"in t?k({},t.FORBID_TAGS):{},St="FORBID_ATTR"in t?k({},t.FORBID_ATTR):{},Ut="USE_PROFILES"in t&&t.USE_PROFILES,At=!1!==t.ALLOW_ARIA_ATTR,Mt=!1!==t.ALLOW_DATA_ATTR,Nt=t.ALLOW_UNKNOWN_PROTOCOLS||!1,Dt=t.SAFE_FOR_TEMPLATES||!1,Bt=t.WHOLE_DOCUMENT||!1,It=t.RETURN_DOM||!1,Rt=t.RETURN_DOM_FRAGMENT||!1,Ft=t.RETURN_TRUSTED_TYPE||!1,Ot=t.FORCE_BODY||!1,Pt=!1!==t.SANITIZE_DOM,Yt=!1!==t.KEEP_CONTENT,jt=t.IN_PLACE||!1,_t=t.ALLOWED_URI_REGEXP||_t,Qt=t.NAMESPACE||Zt,t.CUSTOM_ELEMENT_HANDLING&&ae(t.CUSTOM_ELEMENT_HANDLING.tagNameCheck)&&(Ct.tagNameCheck=t.CUSTOM_ELEMENT_HANDLING.tagNameCheck),t.CUSTOM_ELEMENT_HANDLING&&ae(t.CUSTOM_ELEMENT_HANDLING.attributeNameCheck)&&(Ct.attributeNameCheck=t.CUSTOM_ELEMENT_HANDLING.attributeNameCheck),t.CUSTOM_ELEMENT_HANDLING&&"boolean"==typeof t.CUSTOM_ELEMENT_HANDLING.allowCustomizedBuiltInElements&&(Ct.allowCustomizedBuiltInElements=t.CUSTOM_ELEMENT_HANDLING.allowCustomizedBuiltInElements),Jt=Jt=-1===te.indexOf(t.PARSER_MEDIA_TYPE)?ee:t.PARSER_MEDIA_TYPE,ne="application/xhtml+xml"===Jt?function(t){return t}:g,Dt&&(Mt=!1),Rt&&(It=!0),Ut&&(xt=k({},[].concat(W(B))),kt=[],!0===Ut.html&&(k(xt,E),k(kt,L)),!0===Ut.svg&&(k(xt,S),k(kt,O),k(kt,R)),!0===Ut.svgFilters&&(k(xt,A),k(kt,O),k(kt,R)),!0===Ut.mathMl&&(k(xt,N),k(kt,I),k(kt,R))),t.ADD_TAGS&&(xt===wt&&(xt=T(xt)),k(xt,t.ADD_TAGS)),t.ADD_ATTR&&(kt===Tt&&(kt=T(kt)),k(kt,t.ADD_ATTR)),t.ADD_URI_SAFE_ATTR&&k(Wt,t.ADD_URI_SAFE_ATTR),t.FORBID_CONTENTS&&(zt===$t&&(zt=T(zt)),k(zt,t.FORBID_CONTENTS)),Yt&&(xt["#text"]=!0),Bt&&k(xt,["html","head","body"]),xt.table&&(k(xt,["tbody"]),delete Et.tbody),a&&a(t),re=t)},se=k({},["mi","mo","mn","ms","mtext"]),ce=k({},["foreignobject","desc","title","annotation-xml"]),ue=k({},S);k(ue,A),k(ue,M);var le=k({},N);k(le,D);var he=function(t){var e=nt(t);e&&e.tagName||(e={namespaceURI:Zt,tagName:"template"});var n=g(t.tagName),r=g(e.tagName);if(t.namespaceURI===Xt)return e.namespaceURI===Zt?"svg"===n:e.namespaceURI===Gt?"svg"===n&&("annotation-xml"===r||se[r]):Boolean(ue[n]);if(t.namespaceURI===Gt)return e.namespaceURI===Zt?"math"===n:e.namespaceURI===Xt?"math"===n&&ce[r]:Boolean(le[n]);if(t.namespaceURI===Zt){if(e.namespaceURI===Xt&&!ce[r])return!1;if(e.namespaceURI===Gt&&!se[r])return!1;var i=k({},["title","style","font","a","script"]);return!le[n]&&(i[n]||!ue[n])}return!1},fe=function(t){p(n.removed,{element:t});try{t.parentNode.removeChild(t)}catch(e){try{t.outerHTML=at}catch(e){t.remove()}}},de=function(t,e){try{p(n.removed,{attribute:e.getAttributeNode(t),from:e})}catch(t){p(n.removed,{attribute:null,from:e})}if(e.removeAttribute(t),"is"===t&&!kt[t])if(It||Rt)try{fe(e)}catch(t){}else try{e.setAttribute(t,"")}catch(t){}},pe=function(t){var e=void 0,n=void 0;if(Ot)t=""+t;else{var r=y(t,/^[\r\n\t ]+/);n=r&&r[0]}"application/xhtml+xml"===Jt&&(t=''+t+"");var a=it?it.createHTML(t):t;if(Qt===Zt)try{e=(new Z).parseFromString(a,Jt)}catch(t){}if(!e||!e.documentElement){e=st.createDocument(Qt,"template",null);try{e.documentElement.innerHTML=Kt?"":a}catch(t){}}var o=e.body||e.documentElement;return t&&n&&o.insertBefore(i.createTextNode(n),o.childNodes[0]||null),Qt===Zt?lt.call(e,Bt?"html":"body")[0]:Bt?e.documentElement:o},ge=function(t){return ct.call(t.ownerDocument||t,t,l.SHOW_ELEMENT|l.SHOW_COMMENT|l.SHOW_TEXT,null,!1)},ye=function(t){return t instanceof X&&("string"!=typeof t.nodeName||"string"!=typeof t.textContent||"function"!=typeof t.removeChild||!(t.attributes instanceof w)||"function"!=typeof t.removeAttribute||"function"!=typeof t.setAttribute||"string"!=typeof t.namespaceURI||"function"!=typeof t.insertBefore)},me=function(t){return"object"===(void 0===c?"undefined":H(c))?t instanceof c:t&&"object"===(void 0===t?"undefined":H(t))&&"number"==typeof t.nodeType&&"string"==typeof t.nodeName},ve=function(t,e,r){dt[t]&&f(dt[t],(function(t){t.call(n,e,r,re)}))},be=function(t){var e=void 0;if(ve("beforeSanitizeElements",t,null),ye(t))return fe(t),!0;if(y(t.nodeName,/[\u0080-\uFFFF]/))return fe(t),!0;var r=ne(t.nodeName);if(ve("uponSanitizeElement",t,{tagName:r,allowedTags:xt}),!me(t.firstElementChild)&&(!me(t.content)||!me(t.content.firstElementChild))&&_(/<[/\w]/g,t.innerHTML)&&_(/<[/\w]/g,t.textContent))return fe(t),!0;if("select"===r&&_(/