diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index ba0b3010a..871bcf729 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -16,10 +16,10 @@ # COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER # IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - +--- name: main -on: +'on': push: branches: - 'main' @@ -168,7 +168,7 @@ jobs: - name: Install Qt uses: jurplel/install-qt-action@910f37ee23653fd4b243fe5c7f81ff26a8a6a64e with: - version: '6.3.2' + version: '6.4.3' setup-python: false - name: Run Checks @@ -189,7 +189,7 @@ jobs: frontend_distribute: name: Run frontend benchmarks, build, sign and distribute - needs: [ checks, backend_bench ] + needs: [checks, backend_bench] runs-on: ubuntu-latest steps: - shell: bash @@ -203,10 +203,10 @@ jobs: with: owner: swift-nav repo: swift-toolbox-ci - ref: main + ref: silverjam/py311 workflow_file_name: distribute.yml client_payload: '{ - "branch": "${{ github.head_ref || github.ref_name }}", + "branch": "${{ github.head_ref || github.ref_name }}", "run_id": "${{ github.run_id }}", "should_release": "${{ env.SHOULD_RELEASE || false}}", "version": "${{ env.VERSION }}", diff --git a/.gitignore b/.gitignore index e91a8bc42..28b8b76ce 100644 --- a/.gitignore +++ b/.gitignore @@ -11,8 +11,8 @@ main.app/ .DS_Store swiftnav_console/console_resources.py *.*~ -py39-dist/ -py39/ +py311-dist/ +py311/ dist/ console_backend/tests/fileout.json get-pip.py diff --git a/Makefile.toml b/Makefile.toml index b2dc969a2..de2cf3694 100644 --- a/Makefile.toml +++ b/Makefile.toml @@ -34,45 +34,45 @@ set_env NUM_PROCESSES "${num_processes}" # set os specific env vars set_env OS "${os}" if eq ${os} windows - set_env STANDALONE_PY_URL "${PY_BASE_URL}/cpython-3.9.7-x86_64-pc-windows-msvc-shared-pgo-20211017T1616.tar.zst" - set_env PYTHON "${WORKSPACE}\\py39\\python.exe" - set_env DIST_PYTHON "${WORKSPACE}\\py39-dist\\python" - set_env DIST_PYSIDE6_RCC "${WORKSPACE}\\py39-dist\\Lib\\site-packages\\PySide6\\rcc.exe" - set_env PYSIDE6_RCC "${WORKSPACE}\\py39\\Lib\\site-packages\\PySide6\\rcc.exe" - set_env BACKEND_WHEEL console_backend-0.1.0-cp39-cp39-win_amd64.whl + set_env STANDALONE_PY_URL "${PY_BASE_URL}/cpython-3.11.1+20230116-x86_64-pc-windows-msvc-shared-pgo-full.tar.zst" + set_env PYTHON "${WORKSPACE}\\py311\\python.exe" + set_env DIST_PYTHON "${WORKSPACE}\\py311-dist\\python" + set_env DIST_PYSIDE6_RCC "${WORKSPACE}\\py311-dist\\Lib\\site-packages\\PySide6\\rcc.exe" + set_env PYSIDE6_RCC "${WORKSPACE}\\py311\\Lib\\site-packages\\PySide6\\rcc.exe" + set_env BACKEND_WHEEL console_backend-0.1.0-cp311-cp311-win_amd64.whl set_env BUILD_TRIPLET "x86_64-pc-windows-msvc" set_env PYO3_CONFIG_FILE "${WORKSPACE}\\standalone-py\\pyo3_config.txt" set_env CONSOLE_PYO3_CONFIG_FILE "${WORKSPACE}\\standalone-py\\pyo3_config.txt" set_env PYSIDE_WHEEL fill_me_in.whl set_env SHIBOKEN_WHEEL fill_me_in.whl elseif eq ${os} linux - set_env STANDALONE_PY_URL "${SWFT_PY_BASE_URL}/cpython-3.9.10-x86_64-unknown-linux-gnu-pgo+lto-20220203T2103.tar.zst" - set_env PYTHON "${WORKSPACE}/py39/bin/python3" - set_env DIST_PYTHON "${WORKSPACE}/py39-dist/bin/python3" - set_env DIST_PYSIDE6_RCC "${WORKSPACE}/py39-dist/bin/pyside6-rcc" - set_env PYSIDE6_RCC "${WORKSPACE}/py39/bin/pyside6-rcc" - set_env BACKEND_WHEEL console_backend-0.1.0-cp39-cp39-linux_x86_64.whl + set_env STANDALONE_PY_URL "${PY_BASE_URL}/cpython-3.11.1+20230116-x86_64-unknown-linux-gnu-pgo+lto-full.tar.zst" + set_env PYTHON "${WORKSPACE}/py311/bin/python3" + set_env DIST_PYTHON "${WORKSPACE}/py311-dist/bin/python3" + set_env DIST_PYSIDE6_RCC "${WORKSPACE}/py311-dist/bin/pyside6-rcc" + set_env PYSIDE6_RCC "${WORKSPACE}/py311/bin/pyside6-rcc" + set_env BACKEND_WHEEL console_backend-0.1.0-cp311-cp311-linux_x86_64.whl set_env PYO3_CONFIG_FILE "${WORKSPACE}/standalone-py/pyo3_config.txt" set_env CONSOLE_PYO3_CONFIG_FILE "${WORKSPACE}/standalone-py/pyo3_config_console.txt" output = exec --fail-on-error gcc -dumpmachine triplet = trim ${output.stdout} set_env BUILD_TRIPLET ${triplet} - set_env PYSIDE_WHEEL PySide6-\${qt_version}-\${qt_version}-cp39-cp39-linux_x86_64.whl - set_env SHIBOKEN_WHEEL shiboken6-\${qt_version}-\${qt_version}-cp39-cp39-linux_x86_64.whl + set_env PYSIDE_WHEEL PySide6-\${qt_version}-\${qt_version}-cp311-cp311-linux_x86_64.whl + set_env SHIBOKEN_WHEEL shiboken6-\${qt_version}-\${qt_version}-cp311-cp311-linux_x86_64.whl else - set_env STANDALONE_PY_URL "${PY_BASE_URL}/cpython-3.9.7-x86_64-apple-darwin-pgo+lto-20211017T1616.tar.zst" - set_env PYTHON "${WORKSPACE}/py39/bin/python3" - set_env DIST_PYTHON "${WORKSPACE}/py39-dist/bin/python3" - set_env DIST_PYSIDE6_RCC "${WORKSPACE}/py39-dist/bin/pyside6-rcc" - set_env PYSIDE6_RCC "${WORKSPACE}/py39/bin/pyside6-rcc" - set_env BACKEND_WHEEL console_backend-0.1.0-cp39-cp39-macosx_10_15_x86_64.whl + set_env STANDALONE_PY_URL "${PY_BASE_URL}/cpython-3.11.1+20230116-x86_64-apple-darwin-pgo+lto-full.tar.zst" + set_env PYTHON "${WORKSPACE}/py311/bin/python3" + set_env DIST_PYTHON "${WORKSPACE}/py311-dist/bin/python3" + set_env DIST_PYSIDE6_RCC "${WORKSPACE}/py311-dist/bin/pyside6-rcc" + set_env PYSIDE6_RCC "${WORKSPACE}/py311/bin/pyside6-rcc" + set_env BACKEND_WHEEL console_backend-0.1.0-cp311-cp311-macosx_10_15_x86_64.whl set_env PYO3_CONFIG_FILE "${WORKSPACE}/standalone-py/pyo3_config.txt" set_env CONSOLE_PYO3_CONFIG_FILE "${WORKSPACE}/standalone-py/pyo3_config_console.txt" output = exec --fail-on-error gcc -dumpmachine triplet = trim ${output.stdout} set_env BUILD_TRIPLET ${triplet} - set_env PYSIDE_WHEEL PySide6-\${qt_version}-\${qt_version}-cp39-cp39-macosx_12_x86_64.whl - set_env SHIBOKEN_WHEEL shiboken6-\${qt_version}-\${qt_version}-cp39-cp39-macosx_12_x86_64.whl + set_env PYSIDE_WHEEL PySide6-\${qt_version}-\${qt_version}-cp311-cp311-macosx_12_x86_64.whl + set_env SHIBOKEN_WHEEL shiboken6-\${qt_version}-\${qt_version}-cp311-cp311-macosx_12_x86_64.whl end ''', ] @@ -86,11 +86,10 @@ skip_crate_env_info = true [env] WORKSPACE = "${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}" -PY_BASE_URL = "https://github.com/indygreg/python-build-standalone/releases/download/20211017" -SWFT_PY_BASE_URL = "https://github.com/swift-nav/python-build-standalone/releases/download/20220205%2Bswift" +PY_BASE_URL = "https://github.com/indygreg/python-build-standalone/releases/download/20230116" MACOSX_DEPLOYMENT_TARGET = "10.15" APP_NAME = "swift-console" -QT_VERSION = "6.3.2" # Also needs to be updated in .github/workflows/main.yml +QT_VERSION = "6.4.3" # Also needs to be updated in .github/workflows/main.yml [tasks.init] @@ -158,7 +157,7 @@ exec --fail-on-error ${python} -m py2many --rust=1 /tmp/piksi_tools_constants.py [tasks.check-python-exists] script_runner = "@duckscript" script = ''' -if not is_path_exists py39 +if not is_path_exists py311 echo "'cargo make setup-builder' needs to be run first." end ''' @@ -344,7 +343,7 @@ script_runner = "@duckscript" cwd = "standalone-py" condition = { files_not_exist = ["${CONSOLE_PYO3_CONFIG_FILE}"] } script = ''' -writefile pyo3_config_console.txt "implementation=CPython\nversion=3.9\nshared=true\nabi3=false\nlib_name=python3.9\nlib_dir=${WORKSPACE}/standalone-py/python/install/lib\nexecutable=${WORKSPACE}/standalone-py/python/install/bin/python3.9\npointer_width=64\nbuild_flags=WITH_THREAD\nsuppress_build_script_link_lines=false\nextra_build_script_line=cargo:rustc-link-lib=python3.9\nextra_build_script_line=cargo:rustc-link-search=${WORKSPACE}/standalone-py/python/install/lib\n" +writefile pyo3_config_console.txt "implementation=CPython\nversion=3.11\nshared=true\nabi3=false\nlib_name=python3.11\nlib_dir=${WORKSPACE}/standalone-py/python/install/lib\nexecutable=${WORKSPACE}/standalone-py/python/install/bin/python3.11\npointer_width=64\nbuild_flags=WITH_THREAD\nsuppress_build_script_link_lines=false\nextra_build_script_line=cargo:rustc-link-lib=python3.11\nextra_build_script_line=cargo:rustc-link-search=${WORKSPACE}/standalone-py/python/install/lib\n" ''' [tasks.write-pyo3-config-console.windows] @@ -356,7 +355,7 @@ script_runner = "@duckscript" cwd = "standalone-py" condition = { files_not_exist = ["${PYO3_CONFIG_FILE}"] } script = ''' -writefile pyo3_config.txt "implementation=CPython\nversion=3.9\nshared=true\nabi3=false\nlib_name=python3.9\nlib_dir=${WORKSPACE}/standalone-py/python/install/lib\nexecutable=${WORKSPACE}/standalone-py/python/install/bin/python3.9\npointer_width=64\nbuild_flags=WITH_THREAD\nsuppress_build_script_link_lines=false\nextra_build_script_line=\n" +writefile pyo3_config.txt "implementation=CPython\nversion=3.11\nshared=true\nabi3=false\nlib_name=python3.11\nlib_dir=${WORKSPACE}/standalone-py/python/install/lib\nexecutable=${WORKSPACE}/standalone-py/python/install/bin/python3.11\npointer_width=64\nbuild_flags=WITH_THREAD\nsuppress_build_script_link_lines=false\nextra_build_script_line=\n" ''' [tasks.write-pyo3-config.windows] @@ -364,7 +363,7 @@ script_runner = "@duckscript" cwd = "standalone-py" condition = { files_not_exist = ["${PYO3_CONFIG_FILE}"] } script = ''' -writefile pyo3_config.txt "implementation=CPython\nversion=3.9\nshared=true\nabi3=false\nlib_name=python39\nlib_dir=${WORKSPACE}\\standalone-py\\python\\install\\libs\nexecutable=${WORKSPACE}\\standalone-py\\python\\install\\python.exe\npointer_width=64\nbuild_flags=WITH_THREAD\nsuppress_build_script_link_lines=false\nextra_build_script_line=\n" +writefile pyo3_config.txt "implementation=CPython\nversion=3.11\nshared=true\nabi3=false\nlib_name=python311\nlib_dir=${WORKSPACE}\\standalone-py\\python\\install\\libs\nexecutable=${WORKSPACE}\\standalone-py\\python\\install\\python.exe\npointer_width=64\nbuild_flags=WITH_THREAD\nsuppress_build_script_link_lines=false\nextra_build_script_line=\n" ''' [tasks.get-standalone-py] @@ -373,9 +372,9 @@ cwd = "standalone-py" condition = { files_not_exist = ["${WORKSPACE}/standalone-py/python"] } script_runner = "@duckscript" script = ''' -wget -O py39.tar.zst ${STANDALONE_PY_URL} -exec --fail-on-error zstd -f -d py39.tar.zst -exec --fail-on-error tar -xf py39.tar +wget -O py311.tar.zst ${STANDALONE_PY_URL} +exec --fail-on-error zstd -f -d py311.tar.zst +exec --fail-on-error tar -xf py311.tar ''' [tasks.get-qt-version] @@ -397,12 +396,12 @@ script = ''' dependencies = ["get-standalone-py"] script_runner = "@duckscript" script = ''' -if is_path_exists py39 - rm -r py39 +if is_path_exists py311 + rm -r py311 end -mkdir py39 +mkdir py311 cd standalone-py/python/install -cp . ../../../py39 +cp . ../../../py311 cd ../../../ exec --fail-on-error ${PYTHON} ./get-pip.py exec --fail-on-error ${PYTHON} -m pip install wheel flit . ".[test]" ".[ssh-tunnel]" @@ -422,12 +421,12 @@ script_runner = "@duckscript" script = ''' cm_run_task copy-capnp cm_run_task store-version -if is_path_exists py39-dist - rm -r py39-dist +if is_path_exists py311-dist + rm -r py311-dist end -mkdir py39-dist +mkdir py311-dist cd standalone-py/python/install/ -cp . ../../../py39-dist +cp . ../../../py311-dist cd ../../.. exec --fail-on-error ${DIST_PYTHON} ./get-pip.py @@ -522,38 +521,38 @@ dependencies = ["build-console"] script_runner = "@duckscript" script = ''' app_name = get_env APP_NAME -cp target/release/${app_name} py39-dist/${app_name} +cp target/release/${app_name} py311-dist/${app_name} os = os_family if eq ${os} mac - exec --fail-on-error install_name_tool -change /install/lib/libpython3.9.dylib @rpath/libpython3.9.dylib py39-dist/${app_name} - exec --fail-on-error install_name_tool -add_rpath @executable_path/../Resources/lib py39-dist/${app_name} - exec --fail-on-error install_name_tool -add_rpath @executable_path/lib py39-dist/${app_name} + exec --fail-on-error install_name_tool -change /install/lib/libpython3.11.dylib @rpath/libpython3.11.dylib py311-dist/${app_name} + exec --fail-on-error install_name_tool -add_rpath @executable_path/../Resources/lib py311-dist/${app_name} + exec --fail-on-error install_name_tool -add_rpath @executable_path/lib py311-dist/${app_name} - cd py39-dist + cd py311-dist echo "START OF QTWEBENGINE TRANSLATIONS DISTRIBUTION" - mkdir ./lib/python3.9/site-packages/PySide6/Qt/translations - mv ./lib/python3.9/site-packages/PySide6/Qt/lib/QtWebEngineCore.framework/Resources/qtwebengine_locales ./lib/python3.9/site-packages/PySide6/Qt/translations + mkdir ./lib/python3.11/site-packages/PySide6/Qt/translations + mv ./lib/python3.11/site-packages/PySide6/Qt/lib/QtWebEngineCore.framework/Resources/qtwebengine_locales ./lib/python3.11/site-packages/PySide6/Qt/translations echo "START OF QTWEBENGINE RESOURCE DISTRIBUTION" - mkdir ./lib/python3.9/site-packages/PySide6/Qt/resources - web_resources = glob_array ./lib/python3.9/site-packages/PySide6/Qt/lib/QtWebEngineCore.framework/Resources/* + mkdir ./lib/python3.11/site-packages/PySide6/Qt/resources + web_resources = glob_array ./lib/python3.11/site-packages/PySide6/Qt/lib/QtWebEngineCore.framework/Resources/* for r in ${web_resources} - mv ${r} ./lib/python3.9/site-packages/PySide6/Qt/resources + mv ${r} ./lib/python3.11/site-packages/PySide6/Qt/resources end elseif eq ${os} linux - cp target/release/windowpos py39-dist/windowpos + cp target/release/windowpos py311-dist/windowpos end ''' [tasks.build-dist-install-console.windows] script = ''' -cp target/release/${APP_NAME}.exe py39-dist/${APP_NAME}.exe -cp target/release/${APP_NAME}.d py39-dist/${APP_NAME}.d +cp target/release/${APP_NAME}.exe py311-dist/${APP_NAME}.exe +cp target/release/${APP_NAME}.d py311-dist/${APP_NAME}.d if is_path_exists target/release/swift_console.pdb - cp target/release/swift_console.pdb py39-dist/swift_console.pdb + cp target/release/swift_console.pdb py311-dist/swift_console.pdb end -cp target/release/console_backend.pdb py39-dist/Lib/site-packages/console_backend/console_backend.pdb +cp target/release/console_backend.pdb py311-dist/Lib/site-packages/console_backend/console_backend.pdb ''' [tasks.build-dist-install-frontend-wheel] @@ -572,13 +571,13 @@ args = ["-m", "pip", "install", "console_backend/dist/${BACKEND_WHEEL}"] [tasks.build-dist-copy-resources] script_runner = "@duckscript" script = ''' -cp src/main/resources py39-dist/ +cp src/main/resources py311-dist/ ''' [tasks.build-dist-freeze] script_runner = "@duckscript" script = ''' -touch ./py39-dist/.frozen +touch ./py311-dist/.frozen ''' [tasks.pre-build-dist] @@ -654,7 +653,7 @@ cm_run_task build-dist-install-console [tasks.purge-dist] env = { PYTHONDONTWRITEBYTECODE = "1" } -cwd = "py39-dist" +cwd = "py311-dist" script_runner = "@duckscript" script = ''' py_folders = array test tests examples __pycache__ demos turtledemo idlelib lib/tcl8 lib/tcl8.6 lib/tk8.6 shiboken6/docs ensurepip lib2to3 tkinter unittest include @@ -732,10 +731,10 @@ if eq ${os} windows end elseif eq ${os} mac rm -r ./share - rm -r ./lib/python3.9/site-packages/PySide6/Qt/libexec + rm -r ./lib/python3.11/site-packages/PySide6/Qt/libexec bins = array qmllint lupdate lrelease scripts qmlls qmlformat for bin in ${bins} - files = glob_array ./lib/python3.9/site-packages/PySide6/${bin} + files = glob_array ./lib/python3.11/site-packages/PySide6/${bin} for bin in ${files} rm -r ${bin} end @@ -749,16 +748,16 @@ elseif eq ${os} mac # See: https://stackoverflow.com/a/29271922/12265938 echo "REMOVING EXCESS RESOURCES" - resources_a = glob_array ./lib/python3.9/site-packages/PySide6/Qt/lib/*.framework/Versions/A/Resources - resources_b = glob_array ./lib/python3.9/site-packages/PySide6/Qt/lib/*.framework/Resources + resources_a = glob_array ./lib/python3.11/site-packages/PySide6/Qt/lib/*.framework/Versions/A/Resources + resources_b = glob_array ./lib/python3.11/site-packages/PySide6/Qt/lib/*.framework/Resources resources = array_concat ${resources_a} ${resources_b} for dir in ${resources} rm -r ${dir} end # remove helper directory since this exists relatively at - # ./lib/python3.9/site-packages/PySide6/Qt/lib/QtWebEngineCore.framework/Versions/A/Helpers - rm -r ./lib/python3.9/site-packages/PySide6/Qt/lib/QtWebEngineCore.framework/Helpers + # ./lib/python3.11/site-packages/PySide6/Qt/lib/QtWebEngineCore.framework/Versions/A/Helpers + rm -r ./lib/python3.11/site-packages/PySide6/Qt/lib/QtWebEngineCore.framework/Helpers else rm -r ./share @@ -797,14 +796,14 @@ else endif if eq ${os} mac - rm -r lib/python3.9/site-packages/PySide6/Designer.app/ - rm -r lib/python3.9/site-packages/PySide6/Assistant.app/ - rm -r lib/python3.9/site-packages/PySide6/Linguist.app/ + rm -r lib/python3.11/site-packages/PySide6/Designer.app/ + rm -r lib/python3.11/site-packages/PySide6/Assistant.app/ + rm -r lib/python3.11/site-packages/PySide6/Linguist.app/ - rm lib/python3.9/lib-dynload/_testcapi.cpython-39-darwin.so - rm lib/python3.9/lib-dynload/xxlimited.cpython-39-darwin.so + rm lib/python3.11/lib-dynload/_testcapi.cpython-311-darwin.so + rm lib/python3.11/lib-dynload/xxlimited.cpython-311-darwin.so - rm -r lib/python3.9/venv/ + rm -r lib/python3.11/venv/ rm -r lib/pkgconfig/ rm -r lib/thread2.8.5/ @@ -827,7 +826,7 @@ end ''' [tasks.strip-dist-pdb.windows] -cwd = "py39-dist" +cwd = "py311-dist" script_runner = "@duckscript" script = ''' files = glob_array **/*.pdb @@ -841,8 +840,8 @@ end script_runner = "@duckscript" script = ''' app_name = get_env APP_NAME -exec --fail-on-error strip ./py39-dist/${app_name} -rm -r ./py39-dist/bin +exec --fail-on-error strip ./py311-dist/${app_name} +rm -r ./py311-dist/bin ''' [tasks.strip-dist.windows] @@ -850,9 +849,9 @@ script_runner = "@duckscript" script = ''' cm_run_task strip-dist-pdb app_name = get_env APP_NAME -exec --fail-on-error strip.exe ./py39-dist/${app_name}.exe -rm ./py39-dist/python.exe -rm ./py39-dist/pythonw.exe +exec --fail-on-error strip.exe ./py311-dist/${app_name}.exe +rm ./py311-dist/python.exe +rm ./py311-dist/pythonw.exe ''' [tasks.compress-dist] @@ -864,9 +863,9 @@ date = trim ${output.stdout} output = exec --fail-on-error git describe --always --tags --dirty version = trim ${output.stdout} output_name = set "${ARCHIVE_NAME}-${version}-${BUILD_TRIPLET}-${date}" -exec --fail-on-error tar -C "py39-dist" -cvf "${output_name}-debug.tar" . +exec --fail-on-error tar -C "py311-dist" -cvf "${output_name}-debug.tar" . cm_run_task strip-dist -exec --fail-on-error tar -C "py39-dist" -cvf "${output_name}.tar" . +exec --fail-on-error tar -C "py311-dist" -cvf "${output_name}.tar" . exec --fail-on-error rm -f "${output_name}.tar.xz" exec --fail-on-error xz -T 0 -e -9 "${output_name}-debug.tar" exec --fail-on-error xz -T 0 -e -9 "${output_name}.tar" @@ -927,7 +926,7 @@ end mkdir ${contents_dir} mkdir ${contents_mac_os} -exec --fail-on-error cp -r py39-dist "${contents_resources_dir}" +exec --fail-on-error cp -r py311-dist "${contents_resources_dir}" exec --fail-on-error mv ./${contents_resources_dir}/${app_original_name} "./${contents_mac_os}/${app_file_prefix}" cp ./${info_plist_path} ./${contents_dir}/Info.plist exec --fail-on-error sed -i "" -e "s/@@VERSION@@/${version}/g" "${contents_dir}/Info.plist" @@ -999,7 +998,7 @@ if [ -d $TMP_DIR ]; then fi mkdir -p "$TMP_DIR/$OPT_DIR" -cp -r py39-dist/ "$TMP_DIR/$OPT_DIR/$APP_DIR_NAME/" +cp -r py311-dist/ "$TMP_DIR/$OPT_DIR/$APP_DIR_NAME/" mkdir -p "$TMP_DIR/$USR_DIR/$APPS_DIR" cp "installers/Linux/$APP_DIR_NAME.desktop" "$TMP_DIR/$USR_DIR/$APPS_DIR/" @@ -1273,42 +1272,42 @@ args = ["utils/bench_runner.py", "--disk_usage"] script_runner = "@shell" script = ''' echo 'This is only used to validate locally.' -cd console_backend/tests && python ../../utils/bench_runner.py --frontend_cpu --executable=${WORKSPACE}/py39-dist/${APP_NAME} +cd console_backend/tests && python ../../utils/bench_runner.py --frontend_cpu --executable=${WORKSPACE}/py311-dist/${APP_NAME} ''' [tasks.frontend-cpu-bench.windows] script_runner = "@shell" script = ''' echo 'This is only used to validate locally.' -cd console_backend/tests && python ../../utils/bench_runner.py --frontend_cpu --executable=${WORKSPACE}/py39-dist/${APP_NAME}.exe +cd console_backend/tests && python ../../utils/bench_runner.py --frontend_cpu --executable=${WORKSPACE}/py311-dist/${APP_NAME}.exe ''' [tasks.frontend-cpu-bench.mac] script_runner = "@shell" script = ''' echo 'This is only used to validate locally.' -cd console_backend/tests && python ../../utils/bench_runner.py --frontend_cpu --executable=${WORKSPACE}/py39-dist/${APP_NAME} +cd console_backend/tests && python ../../utils/bench_runner.py --frontend_cpu --executable=${WORKSPACE}/py311-dist/${APP_NAME} ''' [tasks.frontend-mem-bench.linux] script_runner = "@shell" script = ''' echo 'This is only used to validate locally.' -cd console_backend/tests && python ../../utils/bench_runner.py --frontend_mem --executable=${WORKSPACE}/py39-dist/${APP_NAME} +cd console_backend/tests && python ../../utils/bench_runner.py --frontend_mem --executable=${WORKSPACE}/py311-dist/${APP_NAME} ''' [tasks.frontend-mem-bench.windows] script_runner = "@shell" script = ''' echo 'This is only used to validate locally.' -cd console_backend/tests && python ../../utils/bench_runner.py --frontend_mem --executable=${WORKSPACE}/py39-dist/${APP_NAME}.exe +cd console_backend/tests && python ../../utils/bench_runner.py --frontend_mem --executable=${WORKSPACE}/py311-dist/${APP_NAME}.exe ''' [tasks.frontend-mem-bench.mac] script_runner = "@shell" script = ''' echo 'This is only used to validate locally.' -cd console_backend/tests && python ../../utils/bench_runner.py --frontend_mem --executable=${WORKSPACE}/py39-dist/${APP_NAME} +cd console_backend/tests && python ../../utils/bench_runner.py --frontend_mem --executable=${WORKSPACE}/py311-dist/${APP_NAME} ''' [tasks.newline-terminator] diff --git a/README.debug.md b/README.debug.md index 60badec94..b50a82d52 100644 --- a/README.debug.md +++ b/README.debug.md @@ -79,7 +79,7 @@ rust backend: ``` cargo make prep-debug-pyside cargo make build-dist -py39-dist/bin/python3 -m swiftnav_console.main --read-capnp-recording console_backend/tests/data/console-capnp-20220419-033358.pickle +py311-dist/bin/python3 -m swiftnav_console.main --read-capnp-recording console_backend/tests/data/console-capnp-20220419-033358.pickle ``` ## Debugging an intermittent crash on startup diff --git a/README.md b/README.md index a4486be12..b99e1ff46 100644 --- a/README.md +++ b/README.md @@ -295,7 +295,7 @@ We're using Qt 6 via PySide6 (the official Python bindings for Qt). QML (QtQuick Mark-up Language) is used to model the UI. -### Python 3.9 Standalone Build +### Python 3.11 Standalone Build [python-build-standalone](https://github.com/indygreg/python-build-standalone) provides redistributable builds of Python, these builds are designed to function in a variety diff --git a/console_backend/setup.py b/console_backend/setup.py index 480b1a731..449913d73 100644 --- a/console_backend/setup.py +++ b/console_backend/setup.py @@ -26,7 +26,7 @@ def get_py_version_cfgs(): # For now each Cfg Py_3_X flag is interpreted as "at least 3.X" version = sys.version_info[0:2] - py3_min = 9 + py3_min = 11 out_cfg = [] for minor in range(py3_min, version[1] + 1): out_cfg.append(f"--cfg=Py_3_{minor}") diff --git a/entrypoint/src/main.rs b/entrypoint/src/main.rs index 4a4ca740f..d3ca19c9d 100644 --- a/entrypoint/src/main.rs +++ b/entrypoint/src/main.rs @@ -92,11 +92,11 @@ fn pythonhome_dir() -> Result { fn webengine_dir() -> Result { let app_dir = pythonhome_dir()?; if cfg!(target_os = "macos") { - Ok(app_dir.join("lib/python3.9/site-packages/PySide6/Qt/lib/QtWebEngineCore.framework/Versions/A/Helpers/QtWebEngineProcess.app/Contents/MacOS/QtWebEngineProcess")) + Ok(app_dir.join("lib/python3.11/site-packages/PySide6/Qt/lib/QtWebEngineCore.framework/Versions/A/Helpers/QtWebEngineProcess.app/Contents/MacOS/QtWebEngineProcess")) } else if cfg!(target_os = "windows") { Ok(app_dir.join("Lib/site-packages/PySide6/QtWebEngineProcess.exe")) } else { - Ok(app_dir.join("lib/python3.9/site-packages/PySide6/Qt/libexec/QtWebEngineProcess")) + Ok(app_dir.join("lib/python3.11/site-packages/PySide6/Qt/libexec/QtWebEngineProcess")) } } diff --git a/installers/Windows/Installer.nsi b/installers/Windows/Installer.nsi index c115f9150..c15cca0d5 100644 --- a/installers/Windows/Installer.nsi +++ b/installers/Windows/Installer.nsi @@ -24,7 +24,7 @@ ManifestDPIAware true !define app_name "Swift Console" !define app_executable "swift-console.exe" !define outfile_prefix "swift-console" -!define installer_dir "py39-dist" +!define installer_dir "py311-dist" !define company_name "Swift Navigation" !define old_shortcut "${app_name} (Old).lnk" diff --git a/standalone-py/.gitignore b/standalone-py/.gitignore index f08d02ae0..f2099bc63 100644 --- a/standalone-py/.gitignore +++ b/standalone-py/.gitignore @@ -1,4 +1,4 @@ *.*~ pyo3_config*.txt -py39.* +py311.* python/ diff --git a/swiftnav_console/main.py b/swiftnav_console/main.py index 8c9bb0a0d..91aeba57b 100644 --- a/swiftnav_console/main.py +++ b/swiftnav_console/main.py @@ -347,225 +347,227 @@ def _process_message_buffer(self, buffer): return False Message = self._messages.Message with Message.from_bytes(buffer) as m: - if m.which == Message.Union.Status: - app_state = ConnectionState(m.status.text) - if app_state == ConnectionState.CLOSED: - return False - if app_state == ConnectionState.DISCONNECTED: + match m.which: + case Message.Union.Status: + app_state = ConnectionState(m.status.text) + if app_state == ConnectionState.CLOSED: + return False + if app_state == ConnectionState.DISCONNECTED: + data = settings_table_update() + SettingsTableEntries.post_data_update(data) + ConnectionData.post_connection_state_update(app_state) + if MAP_ENABLED[0]: + SolutionMap.clear() + case Message.Union.ConnectionNotification: + data = m.connectionNotification.message + ConnectionData.post_connection_message_update(data) + case Message.Union.SolutionPositionStatus: + data = solution_position_update() + data[Keys.POINTS][:] = m.solutionPositionStatus.data + data[Keys.CUR_POINTS][:] = m.solutionPositionStatus.curData + data[Keys.LAT_MAX] = m.solutionPositionStatus.latMax + data[Keys.LAT_MIN] = m.solutionPositionStatus.latMin + data[Keys.LON_MAX] = m.solutionPositionStatus.lonMax + data[Keys.LON_MIN] = m.solutionPositionStatus.lonMin + data[Keys.AVAILABLE_UNITS][:] = m.solutionPositionStatus.availableUnits + data[Keys.SOLUTION_LINE] = m.solutionPositionStatus.lineData + + if MAP_ENABLED[0]: + SolutionMap.send_pos(m.solutionPositionStatus) + SolutionPositionPoints.post_data_update(data) + case Message.Union.SolutionProtectionLevel: + if MAP_ENABLED[0]: + SolutionMap.send_prot_lvl(m.solutionProtectionLevel) + case Message.Union.SolutionTableStatus: + data = solution_table_update() + data[Keys.ENTRIES][:] = [[entry.key, entry.val] for entry in m.solutionTableStatus.data] + SolutionTableEntries.post_data_update(data) + case Message.Union.SolutionVelocityStatus: + data = solution_velocity_update() + data[Keys.COLORS][:] = m.solutionVelocityStatus.colors + data[Keys.POINTS][:] = m.solutionVelocityStatus.data + data[Keys.MAX] = m.solutionVelocityStatus.max + data[Keys.MIN] = m.solutionVelocityStatus.min + data[Keys.AVAILABLE_UNITS][:] = m.solutionVelocityStatus.availableUnits + SolutionVelocityPoints.post_data_update(data) + case Message.Union.BaselinePlotStatus: + data = baseline_plot_update() + data[Keys.POINTS][:] = m.baselinePlotStatus.data + data[Keys.CUR_POINTS][:] = m.baselinePlotStatus.curData + data[Keys.N_MAX] = m.baselinePlotStatus.nMax + data[Keys.N_MIN] = m.baselinePlotStatus.nMin + data[Keys.E_MAX] = m.baselinePlotStatus.eMax + data[Keys.E_MIN] = m.baselinePlotStatus.eMin + BaselinePlotPoints.post_data_update(data) + case Message.Union.BaselineTableStatus: + data = baseline_table_update() + data[Keys.ENTRIES][:] = [[entry.key, entry.val] for entry in m.baselineTableStatus.data] + BaselineTableEntries.post_data_update(data) + case Message.Union.AdvancedImuStatus: + advanced_imu_tab = advanced_imu_tab_update() + advanced_imu_tab[Keys.FIELDS_DATA][:] = m.advancedImuStatus.fieldsData + advanced_imu_tab[Keys.POINTS][:] = m.advancedImuStatus.data + AdvancedImuPoints.post_data_update(advanced_imu_tab) + case Message.Union.AdvancedSpectrumAnalyzerStatus: + data = advanced_spectrum_analyzer_tab_update() + data[Keys.CHANNEL] = m.advancedSpectrumAnalyzerStatus.channel + data[Keys.POINTS][:] = m.advancedSpectrumAnalyzerStatus.data + data[Keys.YMAX] = m.advancedSpectrumAnalyzerStatus.ymax + data[Keys.YMIN] = m.advancedSpectrumAnalyzerStatus.ymin + data[Keys.XMAX] = m.advancedSpectrumAnalyzerStatus.xmax + data[Keys.XMIN] = m.advancedSpectrumAnalyzerStatus.xmin + AdvancedSpectrumAnalyzerPoints.post_data_update(data) + case Message.Union.AdvancedNetworkingStatus: + data = advanced_networking_tab_update() + data[Keys.RUNNING] = m.advancedNetworkingStatus.running + data[Keys.IP_ADDRESS] = m.advancedNetworkingStatus.ipAddress + data[Keys.PORT] = m.advancedNetworkingStatus.port + data[Keys.NETWORK_INFO][:] = [ + [entry.interfaceName, entry.ipv4Address, entry.running, entry.txUsage, entry.rxUsage] + for entry in m.advancedNetworkingStatus.networkInfo + ] + AdvancedNetworkingData.post_data_update(data) + case Message.Union.AdvancedSystemMonitorStatus: + data = advanced_system_monitor_tab_update() + data[Keys.OBS_LATENCY][:] = [ + [entry.key, entry.val] for entry in m.advancedSystemMonitorStatus.obsLatency + ] + data[Keys.OBS_PERIOD][:] = [[entry.key, entry.val] for entry in m.advancedSystemMonitorStatus.obsPeriod] + data[Keys.THREADS_TABLE][:] = [ + [entry.name, f"{entry.cpu:.1f}", entry.stackFree] + for entry in m.advancedSystemMonitorStatus.threadsTable + ] + data[Keys.ZYNQ_TEMP] = m.advancedSystemMonitorStatus.zynqTemp + data[Keys.FE_TEMP] = m.advancedSystemMonitorStatus.feTemp + AdvancedSystemMonitorData.post_data_update(data) + case Message.Union.AdvancedMagnetometerStatus: + data = advanced_magnetometer_tab_update() + data[Keys.YMAX] = m.advancedMagnetometerStatus.ymax + data[Keys.YMIN] = m.advancedMagnetometerStatus.ymin + data[Keys.POINTS][:] = m.advancedMagnetometerStatus.data + AdvancedMagnetometerPoints.post_data_update(data) + case Message.Union.FusionStatusFlagsStatus: + data = fusion_status_flags_update() + data[Keys.GNSSPOS] = m.fusionStatusFlagsStatus.gnsspos + data[Keys.GNSSVEL] = m.fusionStatusFlagsStatus.gnssvel + data[Keys.WHEELTICKS] = m.fusionStatusFlagsStatus.wheelticks + data[Keys.SPEED] = m.fusionStatusFlagsStatus.speed + data[Keys.NHC] = m.fusionStatusFlagsStatus.nhc + data[Keys.ZEROVEL] = m.fusionStatusFlagsStatus.zerovel + FusionStatusFlagsData.post_data_update(data) + case Message.Union.TrackingSignalsStatus: + data = tracking_signals_tab_update() + data[Keys.CHECK_LABELS][:] = m.trackingSignalsStatus.checkLabels + data[Keys.LABELS][:] = m.trackingSignalsStatus.labels + data[Keys.COLORS][:] = m.trackingSignalsStatus.colors + data[Keys.POINTS][:] = m.trackingSignalsStatus.data + data[Keys.XMIN_OFFSET] = m.trackingSignalsStatus.xminOffset + TrackingSignalsPoints.post_data_update(data) + case Message.Union.TrackingSkyPlotStatus: + data = tracking_sky_plot_update() + data[Keys.SATS][:] = m.trackingSkyPlotStatus.sats + data[Keys.LABELS][:] = [ + list(m.trackingSkyPlotStatus.labels[idx]) for idx in range(len(m.trackingSkyPlotStatus.labels)) + ] + TrackingSkyPlotPoints.post_data_update(data) + case Message.Union.ObservationStatus: + data = observation_update() + data[Keys.TOW] = m.observationStatus.tow + data[Keys.WEEK] = m.observationStatus.week + data[Keys.ROWS][:] = obs_rows_to_dict(m.observationStatus.rows) + if m.observationStatus.isRemote: + ObservationRemoteTableModel.post_data_update(data) + else: + ObservationLocalTableModel.post_data_update(data) + case Message.Union.StatusBarStatus: + data = status_bar_update() + data[Keys.POS] = m.statusBarStatus.pos + data[Keys.RTK] = m.statusBarStatus.rtk + data[Keys.SATS] = m.statusBarStatus.sats + data[Keys.CORR_AGE] = m.statusBarStatus.corrAge + data[Keys.INS] = m.statusBarStatus.ins + data[Keys.DATA_RATE] = m.statusBarStatus.dataRate + data[Keys.SOLID_CONNECTION] = m.statusBarStatus.solidConnection + data[Keys.TITLE] = m.statusBarStatus.title + data[Keys.ANTENNA_STATUS] = m.statusBarStatus.antennaStatus + StatusBarData.post_data_update(data) + case Message.Union.ConnectionStatus: + data = connection_update() + data[Keys.AVAILABLE_PORTS][:] = m.connectionStatus.availablePorts + data[Keys.AVAILABLE_BAUDRATES][:] = m.connectionStatus.availableBaudrates + data[Keys.AVAILABLE_FLOWS][:] = m.connectionStatus.availableFlows + data[Keys.PREVIOUS_HOSTS][:] = m.connectionStatus.previousHosts + data[Keys.PREVIOUS_PORTS][:] = m.connectionStatus.previousPorts + data[Keys.PREVIOUS_FILES][:] = m.connectionStatus.previousFiles + data[Keys.LAST_USED_SERIAL_DEVICE] = ( + m.connectionStatus.lastSerialDevice.port + if m.connectionStatus.lastSerialDevice.which() == "port" + else None + ) + data[Keys.PREVIOUS_SERIAL_CONFIGS][:] = [ + [entry.device, entry.baudrate, entry.flowControl] + for entry in m.connectionStatus.previousSerialConfigs + ] + data[Keys.CONSOLE_VERSION] = m.connectionStatus.consoleVersion + data[Keys.PREVIOUS_CONNECTION_TYPE] = ConnectionType(m.connectionStatus.previousConnectionType) + ConnectionData.post_connection_data_update(data) + case Message.Union.LoggingBarStatus: + data = logging_bar_update() + data[Keys.PREVIOUS_FOLDERS][:] = m.loggingBarStatus.previousFolders + data[Keys.CSV_LOGGING] = m.loggingBarStatus.csvLogging + data[Keys.SBP_LOGGING] = m.loggingBarStatus.sbpLogging + data[Keys.SBP_LOGGING_FORMAT] = m.loggingBarStatus.sbpLoggingFormat + LoggingBarData.post_data_update(data) + case Message.Union.LoggingBarStartRecording: + data = logging_bar_recording_update() + data[Keys.RECORDING_SIZE] = None # reset since name changed => new file + data[Keys.RECORDING_START_TIME] = time.time() + data[Keys.RECORDING_FILENAME] = m.loggingBarStartRecording.name + LoggingBarData.post_recording_data_update(data) + case Message.Union.LoggingBarRecordingSize: + data = logging_bar_recording_update() + data[Keys.RECORDING_SIZE] = m.loggingBarRecordingSize.size + LoggingBarData.post_recording_data_update(data) + case Message.Union.UpdateTabStatus: + data = update_tab_update() + data[Keys.HARDWARE_REVISION] = m.updateTabStatus.hardwareRevision + data[Keys.FW_VERSION_CURRENT] = m.updateTabStatus.fwVersionCurrent + data[Keys.FW_VERSION_LATEST] = m.updateTabStatus.fwVersionLatest + data[Keys.FW_LOCAL_FILENAME] = m.updateTabStatus.fwLocalFilename + data[Keys.DIRECTORY] = m.updateTabStatus.directory + data[Keys.DOWNLOADING] = m.updateTabStatus.downloading + data[Keys.UPGRADING] = m.updateTabStatus.upgrading + data[Keys.FW_TEXT] = m.updateTabStatus.fwText + data[Keys.FILEIO_LOCAL_FILEPATH] = m.updateTabStatus.fileioLocalFilepath + data[Keys.FILEIO_DESTINATION_FILEPATH] = m.updateTabStatus.fileioDestinationFilepath + data[Keys.FW_OUTDATED] = m.updateTabStatus.fwOutdated + data[Keys.FW_V2_OUTDATED] = m.updateTabStatus.fwV2Outdated + data[Keys.SERIAL_PROMPT] = m.updateTabStatus.serialPrompt + data[Keys.CONSOLE_OUTDATED] = m.updateTabStatus.consoleOutdated + data[Keys.CONSOLE_VERSION_CURRENT] = m.updateTabStatus.consoleVersionCurrent + data[Keys.CONSOLE_VERSION_LATEST] = m.updateTabStatus.consoleVersionLatest + UpdateTabData.post_data_update(data) + case Message.Union.LogAppend: + data = log_panel_update() + data[Keys.ENTRIES] += [entry.line for entry in m.logAppend.entries] + data[Keys.LOG_LEVEL] = m.logAppend.logLevel + LogPanelData.post_data_update(data) + case Message.Union.SettingsTableStatus: data = settings_table_update() + data[Keys.ENTRIES][:] = settings_rows_to_dict(m.settingsTableStatus.data) SettingsTableEntries.post_data_update(data) - ConnectionData.post_connection_state_update(app_state) - if MAP_ENABLED[0]: - SolutionMap.clear() - elif m.which == Message.Union.ConnectionNotification: - data = m.connectionNotification.message - ConnectionData.post_connection_message_update(data) - elif m.which == Message.Union.SolutionPositionStatus: - data = solution_position_update() - data[Keys.POINTS][:] = m.solutionPositionStatus.data - data[Keys.CUR_POINTS][:] = m.solutionPositionStatus.curData - data[Keys.LAT_MAX] = m.solutionPositionStatus.latMax - data[Keys.LAT_MIN] = m.solutionPositionStatus.latMin - data[Keys.LON_MAX] = m.solutionPositionStatus.lonMax - data[Keys.LON_MIN] = m.solutionPositionStatus.lonMin - data[Keys.AVAILABLE_UNITS][:] = m.solutionPositionStatus.availableUnits - data[Keys.SOLUTION_LINE] = m.solutionPositionStatus.lineData - - if MAP_ENABLED[0]: - SolutionMap.send_pos(m.solutionPositionStatus) - SolutionPositionPoints.post_data_update(data) - elif m.which == Message.Union.SolutionProtectionLevel and MAP_ENABLED[0]: - SolutionMap.send_prot_lvl(m.solutionProtectionLevel) - elif m.which == Message.Union.SolutionTableStatus: - data = solution_table_update() - data[Keys.ENTRIES][:] = [[entry.key, entry.val] for entry in m.solutionTableStatus.data] - SolutionTableEntries.post_data_update(data) - elif m.which == Message.Union.SolutionVelocityStatus: - data = solution_velocity_update() - data[Keys.COLORS][:] = m.solutionVelocityStatus.colors - data[Keys.POINTS][:] = m.solutionVelocityStatus.data - data[Keys.MAX] = m.solutionVelocityStatus.max - data[Keys.MIN] = m.solutionVelocityStatus.min - data[Keys.AVAILABLE_UNITS][:] = m.solutionVelocityStatus.availableUnits - SolutionVelocityPoints.post_data_update(data) - elif m.which == Message.Union.BaselinePlotStatus: - data = baseline_plot_update() - data[Keys.POINTS][:] = m.baselinePlotStatus.data - data[Keys.CUR_POINTS][:] = m.baselinePlotStatus.curData - data[Keys.N_MAX] = m.baselinePlotStatus.nMax - data[Keys.N_MIN] = m.baselinePlotStatus.nMin - data[Keys.E_MAX] = m.baselinePlotStatus.eMax - data[Keys.E_MIN] = m.baselinePlotStatus.eMin - BaselinePlotPoints.post_data_update(data) - elif m.which == Message.Union.BaselineTableStatus: - data = baseline_table_update() - data[Keys.ENTRIES][:] = [[entry.key, entry.val] for entry in m.baselineTableStatus.data] - BaselineTableEntries.post_data_update(data) - elif m.which == Message.Union.AdvancedImuStatus: - advanced_imu_tab = advanced_imu_tab_update() - advanced_imu_tab[Keys.FIELDS_DATA][:] = m.advancedImuStatus.fieldsData - advanced_imu_tab[Keys.POINTS][:] = m.advancedImuStatus.data - AdvancedImuPoints.post_data_update(advanced_imu_tab) - elif m.which == Message.Union.AdvancedSpectrumAnalyzerStatus: - data = advanced_spectrum_analyzer_tab_update() - data[Keys.CHANNEL] = m.advancedSpectrumAnalyzerStatus.channel - data[Keys.POINTS][:] = m.advancedSpectrumAnalyzerStatus.data - data[Keys.YMAX] = m.advancedSpectrumAnalyzerStatus.ymax - data[Keys.YMIN] = m.advancedSpectrumAnalyzerStatus.ymin - data[Keys.XMAX] = m.advancedSpectrumAnalyzerStatus.xmax - data[Keys.XMIN] = m.advancedSpectrumAnalyzerStatus.xmin - AdvancedSpectrumAnalyzerPoints.post_data_update(data) - elif m.which == Message.Union.AdvancedNetworkingStatus: - data = advanced_networking_tab_update() - data[Keys.RUNNING] = m.advancedNetworkingStatus.running - data[Keys.IP_ADDRESS] = m.advancedNetworkingStatus.ipAddress - data[Keys.PORT] = m.advancedNetworkingStatus.port - data[Keys.NETWORK_INFO][:] = [ - [entry.interfaceName, entry.ipv4Address, entry.running, entry.txUsage, entry.rxUsage] - for entry in m.advancedNetworkingStatus.networkInfo - ] - AdvancedNetworkingData.post_data_update(data) - elif m.which == Message.Union.AdvancedSystemMonitorStatus: - data = advanced_system_monitor_tab_update() - data[Keys.OBS_LATENCY][:] = [ - [entry.key, entry.val] for entry in m.advancedSystemMonitorStatus.obsLatency - ] - data[Keys.OBS_PERIOD][:] = [[entry.key, entry.val] for entry in m.advancedSystemMonitorStatus.obsPeriod] - data[Keys.THREADS_TABLE][:] = [ - [entry.name, f"{entry.cpu:.1f}", entry.stackFree] - for entry in m.advancedSystemMonitorStatus.threadsTable - ] - data[Keys.ZYNQ_TEMP] = m.advancedSystemMonitorStatus.zynqTemp - data[Keys.FE_TEMP] = m.advancedSystemMonitorStatus.feTemp - AdvancedSystemMonitorData.post_data_update(data) - elif m.which == Message.Union.AdvancedMagnetometerStatus: - data = advanced_magnetometer_tab_update() - data[Keys.YMAX] = m.advancedMagnetometerStatus.ymax - data[Keys.YMIN] = m.advancedMagnetometerStatus.ymin - data[Keys.POINTS][:] = m.advancedMagnetometerStatus.data - AdvancedMagnetometerPoints.post_data_update(data) - elif m.which == Message.Union.FusionStatusFlagsStatus: - data = fusion_status_flags_update() - data[Keys.GNSSPOS] = m.fusionStatusFlagsStatus.gnsspos - data[Keys.GNSSVEL] = m.fusionStatusFlagsStatus.gnssvel - data[Keys.WHEELTICKS] = m.fusionStatusFlagsStatus.wheelticks - data[Keys.SPEED] = m.fusionStatusFlagsStatus.speed - data[Keys.NHC] = m.fusionStatusFlagsStatus.nhc - data[Keys.ZEROVEL] = m.fusionStatusFlagsStatus.zerovel - FusionStatusFlagsData.post_data_update(data) - elif m.which == Message.Union.TrackingSignalsStatus: - data = tracking_signals_tab_update() - data[Keys.CHECK_LABELS][:] = m.trackingSignalsStatus.checkLabels - data[Keys.LABELS][:] = m.trackingSignalsStatus.labels - data[Keys.COLORS][:] = m.trackingSignalsStatus.colors - data[Keys.POINTS][:] = m.trackingSignalsStatus.data - data[Keys.XMIN_OFFSET] = m.trackingSignalsStatus.xminOffset - TrackingSignalsPoints.post_data_update(data) - elif m.which == Message.Union.TrackingSkyPlotStatus: - data = tracking_sky_plot_update() - data[Keys.SATS][:] = m.trackingSkyPlotStatus.sats - data[Keys.LABELS][:] = [ - list(m.trackingSkyPlotStatus.labels[idx]) for idx in range(len(m.trackingSkyPlotStatus.labels)) - ] - TrackingSkyPlotPoints.post_data_update(data) - elif m.which == Message.Union.ObservationStatus: - data = observation_update() - data[Keys.TOW] = m.observationStatus.tow - data[Keys.WEEK] = m.observationStatus.week - data[Keys.ROWS][:] = obs_rows_to_dict(m.observationStatus.rows) - if m.observationStatus.isRemote: - ObservationRemoteTableModel.post_data_update(data) - else: - ObservationLocalTableModel.post_data_update(data) - elif m.which == Message.Union.StatusBarStatus: - data = status_bar_update() - data[Keys.POS] = m.statusBarStatus.pos - data[Keys.RTK] = m.statusBarStatus.rtk - data[Keys.SATS] = m.statusBarStatus.sats - data[Keys.CORR_AGE] = m.statusBarStatus.corrAge - data[Keys.INS] = m.statusBarStatus.ins - data[Keys.DATA_RATE] = m.statusBarStatus.dataRate - data[Keys.SOLID_CONNECTION] = m.statusBarStatus.solidConnection - data[Keys.TITLE] = m.statusBarStatus.title - data[Keys.ANTENNA_STATUS] = m.statusBarStatus.antennaStatus - StatusBarData.post_data_update(data) - elif m.which == Message.Union.ConnectionStatus: - data = connection_update() - data[Keys.AVAILABLE_PORTS][:] = m.connectionStatus.availablePorts - data[Keys.AVAILABLE_BAUDRATES][:] = m.connectionStatus.availableBaudrates - data[Keys.AVAILABLE_FLOWS][:] = m.connectionStatus.availableFlows - data[Keys.PREVIOUS_HOSTS][:] = m.connectionStatus.previousHosts - data[Keys.PREVIOUS_PORTS][:] = m.connectionStatus.previousPorts - data[Keys.PREVIOUS_FILES][:] = m.connectionStatus.previousFiles - data[Keys.LAST_USED_SERIAL_DEVICE] = ( - m.connectionStatus.lastSerialDevice.port - if m.connectionStatus.lastSerialDevice.which() == "port" - else None - ) - data[Keys.PREVIOUS_SERIAL_CONFIGS][:] = [ - [entry.device, entry.baudrate, entry.flowControl] - for entry in m.connectionStatus.previousSerialConfigs - ] - data[Keys.CONSOLE_VERSION] = m.connectionStatus.consoleVersion - data[Keys.PREVIOUS_CONNECTION_TYPE] = ConnectionType(m.connectionStatus.previousConnectionType) - ConnectionData.post_connection_data_update(data) - elif m.which == Message.Union.LoggingBarStatus: - data = logging_bar_update() - data[Keys.PREVIOUS_FOLDERS][:] = m.loggingBarStatus.previousFolders - data[Keys.CSV_LOGGING] = m.loggingBarStatus.csvLogging - data[Keys.SBP_LOGGING] = m.loggingBarStatus.sbpLogging - data[Keys.SBP_LOGGING_FORMAT] = m.loggingBarStatus.sbpLoggingFormat - LoggingBarData.post_data_update(data) - elif m.which == Message.Union.LoggingBarStartRecording: - data = logging_bar_recording_update() - data[Keys.RECORDING_SIZE] = None # reset since name changed => new file - data[Keys.RECORDING_START_TIME] = time.time() - data[Keys.RECORDING_FILENAME] = m.loggingBarStartRecording.name - LoggingBarData.post_recording_data_update(data) - elif m.which == Message.Union.LoggingBarRecordingSize: - data = logging_bar_recording_update() - data[Keys.RECORDING_SIZE] = m.loggingBarRecordingSize.size - LoggingBarData.post_recording_data_update(data) - elif m.which == Message.Union.UpdateTabStatus: - data = update_tab_update() - data[Keys.HARDWARE_REVISION] = m.updateTabStatus.hardwareRevision - data[Keys.FW_VERSION_CURRENT] = m.updateTabStatus.fwVersionCurrent - data[Keys.FW_VERSION_LATEST] = m.updateTabStatus.fwVersionLatest - data[Keys.FW_LOCAL_FILENAME] = m.updateTabStatus.fwLocalFilename - data[Keys.DIRECTORY] = m.updateTabStatus.directory - data[Keys.DOWNLOADING] = m.updateTabStatus.downloading - data[Keys.UPGRADING] = m.updateTabStatus.upgrading - data[Keys.FW_TEXT] = m.updateTabStatus.fwText - data[Keys.FILEIO_LOCAL_FILEPATH] = m.updateTabStatus.fileioLocalFilepath - data[Keys.FILEIO_DESTINATION_FILEPATH] = m.updateTabStatus.fileioDestinationFilepath - data[Keys.FW_OUTDATED] = m.updateTabStatus.fwOutdated - data[Keys.FW_V2_OUTDATED] = m.updateTabStatus.fwV2Outdated - data[Keys.SERIAL_PROMPT] = m.updateTabStatus.serialPrompt - data[Keys.CONSOLE_OUTDATED] = m.updateTabStatus.consoleOutdated - data[Keys.CONSOLE_VERSION_CURRENT] = m.updateTabStatus.consoleVersionCurrent - data[Keys.CONSOLE_VERSION_LATEST] = m.updateTabStatus.consoleVersionLatest - UpdateTabData.post_data_update(data) - elif m.which == Message.Union.LogAppend: - data = log_panel_update() - data[Keys.ENTRIES] += [entry.line for entry in m.logAppend.entries] - data[Keys.LOG_LEVEL] = m.logAppend.logLevel - LogPanelData.post_data_update(data) - elif m.which == Message.Union.SettingsTableStatus: - data = settings_table_update() - data[Keys.ENTRIES][:] = settings_rows_to_dict(m.settingsTableStatus.data) - SettingsTableEntries.post_data_update(data) - elif m.which == Message.Union.SettingsImportResponse: - SettingsTabData.post_import_status_update(m.settingsImportResponse.status) - elif m.which == Message.Union.SettingsNotification: - SettingsTabData.post_notification_update(m.settingsNotification.message) - elif m.which == Message.Union.InsSettingsChangeResponse: - data = settings_ins_update() - data[Keys.RECOMMENDED_INS_SETTINGS][:] = [ - [entry.settingName, entry.currentValue, entry.recommendedValue] - for entry in m.insSettingsChangeResponse.recommendedSettings - ] - data[Keys.NEW_INS_CONFIRMATON] = True - SettingsTabData.post_ins_update(data) + case Message.Union.SettingsImportResponse: + SettingsTabData.post_import_status_update(m.settingsImportResponse.status) + case Message.Union.SettingsNotification: + SettingsTabData.post_notification_update(m.settingsNotification.message) + case Message.Union.InsSettingsChangeResponse: + data = settings_ins_update() + data[Keys.RECOMMENDED_INS_SETTINGS][:] = [ + [entry.settingName, entry.currentValue, entry.recommendedValue] + for entry in m.insSettingsChangeResponse.recommendedSettings + ] + data[Keys.NEW_INS_CONFIRMATON] = True + SettingsTabData.post_ins_update(data) return True diff --git a/utils/debug_intermittent_startup_crash.sh b/utils/debug_intermittent_startup_crash.sh index 2cbaa7240..283d695fc 100755 --- a/utils/debug_intermittent_startup_crash.sh +++ b/utils/debug_intermittent_startup_crash.sh @@ -48,7 +48,7 @@ function uniqueFn () { echo "$uniquefn" } -#if [ ! -d py39-dist ]; then +#if [ ! -d py311-dist ]; then # cargo make build-dist #fi @@ -73,7 +73,7 @@ trap 'exec 2>&4 1>&3; echo Ctrl-C pressed, exiting loop. ; exit' SIGINT SIGTERM echo "Logging to $logfn" -cd ${scriptpath}/../py39-dist +cd ${scriptpath}/../py311-dist # macOS (or any platform using lldb) works differently. # The looping is done within the debugger in a python script. # This is faster, as the debugger doesn't have to shutdown and restart, diff --git a/utils/find_intermittent_startup_crash.ps1 b/utils/find_intermittent_startup_crash.ps1 index e9573dd16..b2004f90a 100644 --- a/utils/find_intermittent_startup_crash.ps1 +++ b/utils/find_intermittent_startup_crash.ps1 @@ -1 +1 @@ -for ($i=0; $i -lt 1000; $i++) { .\py39-dist\python -m swiftnav_console.main --file .\console_backend\tests\data\ins_updates.sbp ; echo $i } +for ($i=0; $i -lt 1000; $i++) { .\py311-dist\python -m swiftnav_console.main --file .\console_backend\tests\data\ins_updates.sbp ; echo $i } diff --git a/utils/symlink-qt-installer-libs-to-pyside6.sh b/utils/symlink-qt-installer-libs-to-pyside6.sh index 45459a390..6b1c06cba 100755 --- a/utils/symlink-qt-installer-libs-to-pyside6.sh +++ b/utils/symlink-qt-installer-libs-to-pyside6.sh @@ -97,7 +97,7 @@ if [[ ! -e "${QTDIR}/lib/libQt6Core.so.6" ]]; then fi # Check if PySide6 is installed -pyside6_qt_lib_dir=$(realpath "${SCRIPT_DIR}/../py39/lib/python3.9/site-packages/PySide6/Qt/lib") +pyside6_qt_lib_dir=$(realpath "${SCRIPT_DIR}/../311/lib/python3.11/site-packages/PySide6/Qt/lib") if [[ ! -e "$pyside6_qt_lib_dir" ]]; then echo >&2 "Could not find $pyside6_qt_lib_dir. aborting." exit 1 @@ -105,7 +105,7 @@ fi cd "${QTDIR}/lib" qtdir_libs=$(ls -1 libQt6*.so.6 | sort) -cd "${SCRIPT_DIR}/../py39/lib/python3.9/site-packages/PySide6/Qt/lib" +cd "${SCRIPT_DIR}/../311/lib/python3.11/site-packages/PySide6/Qt/lib" bkup_dir_suffix="" while [[ -e "original_libs${bkup_dir_suffix}" ]]; do