diff --git a/.github/workflows/docker-hub.yml b/.github/workflows/docker-hub.yml index 3f4423ebf..7300376f7 100644 --- a/.github/workflows/docker-hub.yml +++ b/.github/workflows/docker-hub.yml @@ -13,13 +13,13 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - os: [ubuntu-latest, [Linux, ARM], [Linux, ARM64]] + os: [ubuntu-latest, [Linux, ARM], ubuntu-24.04-arm] include: - os: ubuntu-latest arch: amd64 - os: [Linux, ARM] arch: armv7 - - os: [Linux, ARM64] + - os: ubuntu-24.04-arm arch: armv8 steps: - name: Clean the workspace diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index b045659f8..2389e0a67 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -67,13 +67,13 @@ jobs: needs: build-docstrings strategy: matrix: - os: [ubuntu-latest, windows-latest, macos-latest] + os: [ubuntu-latest, windows-latest, macos-14] runs-on: ${{ matrix.os }} steps: - name: Print home directory run: echo Home directory inside container $HOME - name: Setup cmake - if: matrix.os == 'macos-latest' + if: matrix.os == 'macos-14' uses: jwlawson/actions-setup-cmake@v1.13 with: cmake-version: '3.29.x' @@ -113,7 +113,7 @@ jobs: sudo apt install libusb-1.0-0-dev - name: Install dependencies (MacOS) - if: matrix.os == 'macos-latest' + if: matrix.os == 'macos-14' run: | python -m pip install --upgrade pip brew install libusb @@ -184,7 +184,7 @@ jobs: runs-on: windows-latest strategy: matrix: - python-version: [3.7, 3.8, 3.9, '3.10', '3.11', '3.12', '3.13'] + python-version: [3.9, '3.10', '3.11', '3.12', '3.13', '3.14'] python-architecture: [x64, x86] fail-fast: false steps: @@ -207,6 +207,11 @@ jobs: - name: Select Windows SDK run: echo "CMAKE_ARGS=-DCMAKE_SYSTEM_VERSION=${{ env.CMAKE_WINDOWS_SDK_VERSION }}" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append + - name: Set UTF-8 encoding + run: | + echo "PYTHONIOENCODING=utf-8" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append + echo "PYTHONUTF8=1" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append + - name: Install dependencies run: choco install strawberryperl - name: Set up Python ${{ matrix.python-version }} @@ -240,7 +245,7 @@ jobs: needs: build-docstrings strategy: matrix: - python-version: [3.8, 3.9, '3.10', '3.11', '3.12', '3.13'] + python-version: [3.9, '3.10', '3.11', '3.12', '3.13', '3.14'] os: [macos-13, macos-14] # macos-13 is x64, macos-14 is arm64 runs-on: ${{ matrix.os }} steps: @@ -301,7 +306,7 @@ jobs: needs: build-docstrings runs-on: ubuntu-latest container: - image: quay.io/pypa/manylinux_2_28_x86_64:latest + image: quay.io/pypa/manylinux_2_28_x86_64:2025.11.10-2 env: PLAT: manylinux_2_28_x86_64 steps: @@ -317,7 +322,7 @@ jobs: run: yum install -y --disableplugin=fastestmirror libusb1-devel perl-core - name: Installing cmake dependency run: | - /opt/python/cp38-cp38/bin/python3.8 -m pip install cmake + /opt/python/cp38-cp38/bin/python3.8 -m pip install "cmake<4.0" ln -s /opt/python/cp38-cp38/bin/cmake /bin/ - name: Create folder structure run: mkdir -p wheelhouse/audited/ @@ -331,8 +336,8 @@ jobs: - name: Build and install depthai-core run: | - cmake -S depthai-core/ -B build_core -D CMAKE_BUILD_TYPE=Release -D CMAKE_TOOLCHAIN_FILE=$PWD/cmake/toolchain/pic.cmake - cmake --build build_core --target install --parallel 4 + /opt/python/cp38-cp38/bin/cmake -S depthai-core/ -B build_core -D CMAKE_BUILD_TYPE=Release -D CMAKE_TOOLCHAIN_FILE=$PWD/cmake/toolchain/pic.cmake + /opt/python/cp38-cp38/bin/cmake --build build_core --target install --parallel 4 echo "DEPTHAI_INSTALLATION_DIR=$PWD/build_core/install/" >> $GITHUB_ENV - name: Append build hash if not a tagged commit @@ -341,10 +346,11 @@ jobs: - name: Building source distribution run: | + /opt/python/cp38-cp38/bin/python3.8 -m pip install --upgrade setuptools wheel /opt/python/cp38-cp38/bin/python3.8 setup.py sdist --formats=gztar mv dist/* wheelhouse/audited/ - name: Build wheels - run: for PYBIN in /opt/python/cp3{7..13}*/bin; do "${PYBIN}/pip" wheel . -w ./wheelhouse/ --verbose; done + run: for PYBIN in /opt/python/cp3{9..14}*/bin; do "${PYBIN}/pip" wheel . -w ./wheelhouse/ --verbose; done - name: Audit wheels run: for whl in wheelhouse/*.whl; do auditwheel repair "$whl" --plat $PLAT -w wheelhouse/audited/; done - name: Archive wheel artifacts @@ -363,9 +369,9 @@ jobs: # This job builds wheels for ARM64 arch build-linux-arm64: needs: build-docstrings - runs-on: [self-hosted, linux, ARM64] + runs-on: ubuntu-24.04-arm container: - image: quay.io/pypa/manylinux_2_28_aarch64:latest + image: quay.io/pypa/manylinux_2_28_aarch64:2025.11.10-2 env: PLAT: manylinux_2_28_aarch64 # Mount local hunter cache directory, instead of transfering to Github and back @@ -379,7 +385,7 @@ jobs: run: yum install -y --disableplugin=fastestmirror libusb1-devel perl-core - name: Installing cmake dependency run: | - /opt/python/cp38-cp38/bin/python3.8 -m pip install cmake + /opt/python/cp38-cp38/bin/python3.8 -m pip install "cmake<4" ln -s /opt/python/cp38-cp38/bin/cmake /bin/ - name: Create folder structure run: mkdir -p wheelhouse/audited/ @@ -393,15 +399,15 @@ jobs: - name: Build and install depthai-core run: | - cmake -S depthai-core/ -B build_core -D CMAKE_BUILD_TYPE=Release -D CMAKE_TOOLCHAIN_FILE=$PWD/cmake/toolchain/pic.cmake - cmake --build build_core --target install --parallel 4 + /opt/python/cp38-cp38/bin/cmake -S depthai-core/ -B build_core -D CMAKE_BUILD_TYPE=Release -D CMAKE_TOOLCHAIN_FILE=$PWD/cmake/toolchain/pic.cmake + /opt/python/cp38-cp38/bin/cmake --build build_core --target install --parallel 4 echo "DEPTHAI_INSTALLATION_DIR=$PWD/build_core/install/" >> $GITHUB_ENV - name: Append build hash if not a tagged commit if: startsWith(github.ref, 'refs/tags/v') != true run: echo "BUILD_COMMIT_HASH=${{github.sha}}" >> $GITHUB_ENV - name: Building wheels - run: for PYBIN in /opt/python/cp3{7..13}*/bin; do "${PYBIN}/pip" wheel . -w ./wheelhouse/ --verbose; done + run: for PYBIN in /opt/python/cp3{9..14}*/bin; do "${PYBIN}/pip" wheel . -w ./wheelhouse/ --verbose; done - name: Auditing wheels run: for whl in wheelhouse/*.whl; do auditwheel repair "$whl" --plat $PLAT -w wheelhouse/audited/; done - name: Archive wheel artifacts diff --git a/CMakeLists.txt b/CMakeLists.txt index 9448ceaae..aacd56dd2 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -159,6 +159,7 @@ pybind11_add_module(${TARGET_NAME} src/pipeline/datatype/PointCloudConfigBindings.cpp src/pipeline/datatype/PointCloudDataBindings.cpp src/pipeline/datatype/ImageAlignConfigBindings.cpp + src/pipeline/datatype/ObjectTrackerConfigBindings.cpp ) if(WIN32) diff --git a/cmake/Hunter/config.cmake b/cmake/Hunter/config.cmake index 22fb2d494..dbd675e28 100644 --- a/cmake/Hunter/config.cmake +++ b/cmake/Hunter/config.cmake @@ -2,6 +2,6 @@ hunter_config( pybind11 VERSION "2.12.0" - URL "https://github.com/pybind/pybind11/archive/refs/tags/v2.12.0.tar.gz" - SHA1 "e70610cba7b6b7d7a57827d5357c016ad2155c0f" + URL "https://github.com/pybind/pybind11/archive/refs/tags/v3.0.1.tar.gz" + SHA1 "b20ddcd79e2b03b7e2777a2a0b06b646f2f23ce0" ) diff --git a/depthai-core b/depthai-core index e0f6b52d0..a99f24151 160000 --- a/depthai-core +++ b/depthai-core @@ -1 +1 @@ -Subproject commit e0f6b52d048ef7ceac2ecd44deb2e161dbb97699 +Subproject commit a99f2415120b7900d7ee34f949fa651f0c2d4f88 diff --git a/examples/ObjectTracker/object_tracker.py b/examples/ObjectTracker/object_tracker.py index 37e6f16a6..03a142a14 100755 --- a/examples/ObjectTracker/object_tracker.py +++ b/examples/ObjectTracker/object_tracker.py @@ -29,9 +29,11 @@ xlinkOut = pipeline.create(dai.node.XLinkOut) trackerOut = pipeline.create(dai.node.XLinkOut) +xinTrackerConfig = pipeline.create(dai.node.XLinkIn) xlinkOut.setStreamName("preview") trackerOut.setStreamName("tracklets") +xinTrackerConfig.setStreamName("trackerConfig") # Properties camRgb.setPreviewSize(300, 300) @@ -64,11 +66,19 @@ detectionNetwork.out.link(objectTracker.inputDetections) objectTracker.out.link(trackerOut.input) +# set tracking parameters +objectTracker.setOcclusionRatioThreshold(0.4) +objectTracker.setTrackletMaxLifespan(120) +objectTracker.setTrackletBirthThreshold(3) + +xinTrackerConfig.out.link(objectTracker.inputConfig) + # Connect to device and start pipeline with dai.Device(pipeline) as device: preview = device.getOutputQueue("preview", 4, False) tracklets = device.getOutputQueue("tracklets", 4, False) + trackerConfigQueue = device.getInputQueue("trackerConfig") startTime = time.monotonic() counter = 0 @@ -76,6 +86,7 @@ frame = None while(True): + latestTrackedIds = [] imgFrame = preview.get() track = tracklets.get() @@ -106,9 +117,26 @@ cv2.putText(frame, t.status.name, (x1 + 10, y1 + 50), cv2.FONT_HERSHEY_TRIPLEX, 0.5, 255) cv2.rectangle(frame, (x1, y1), (x2, y2), color, cv2.FONT_HERSHEY_SIMPLEX) + if t.status == dai.Tracklet.TrackingStatus.TRACKED: + latestTrackedIds.append(t.id) + cv2.putText(frame, "NN fps: {:.2f}".format(fps), (2, frame.shape[0] - 4), cv2.FONT_HERSHEY_TRIPLEX, 0.4, color) cv2.imshow("tracker", frame) - if cv2.waitKey(1) == ord('q'): + key = cv2.waitKey(1) + if key == ord('q'): break + elif key == ord('g'): + # send tracker config to device + config = dai.ObjectTrackerConfig() + + # take a random ID from the latest tracked IDs + if len(latestTrackedIds) > 0: + idToRemove = (np.random.choice(latestTrackedIds)) + print(f"Force removing ID: {idToRemove}") + config.forceRemoveID(idToRemove) + trackerConfigQueue.send(config) + else: + print("No tracked IDs available to force remove") + diff --git a/setup.py b/setup.py index 596c31c39..7dfab219f 100644 --- a/setup.py +++ b/setup.py @@ -238,19 +238,18 @@ def build_extension(self, ext): "Operating System :: Unix", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Programming Language :: C++", "Programming Language :: Python :: Implementation :: CPython", "Topic :: Scientific/Engineering", "Topic :: Software Development", ], - python_requires='>=3.7', + python_requires='>=3.9', entry_points={ "console_scripts": [ f'depthai={DEPTHAI_CLI_MODULE_NAME}.depthai_cli:cli' diff --git a/src/DatatypeBindings.cpp b/src/DatatypeBindings.cpp index 68d3dec47..aae56fbf3 100644 --- a/src/DatatypeBindings.cpp +++ b/src/DatatypeBindings.cpp @@ -28,6 +28,7 @@ void bind_tracklets(pybind11::module& m, void* pCallstack); void bind_pointcloudconfig(pybind11::module& m, void* pCallstack); void bind_pointclouddata(pybind11::module& m, void* pCallstack); void bind_imagealignconfig(pybind11::module& m, void* pCallstack); +void bind_objecttrackerconfig(pybind11::module& m, void* pCallstack); void DatatypeBindings::addToCallstack(std::deque& callstack) { // Bind common datatypebindings @@ -59,6 +60,7 @@ void DatatypeBindings::addToCallstack(std::deque& callstack) { callstack.push_front(bind_pointcloudconfig); callstack.push_front(bind_pointclouddata); callstack.push_front(bind_imagealignconfig); + callstack.push_front(bind_objecttrackerconfig); } void DatatypeBindings::bind(pybind11::module& m, void* pCallstack){ diff --git a/src/pipeline/datatype/ObjectTrackerConfigBindings.cpp b/src/pipeline/datatype/ObjectTrackerConfigBindings.cpp new file mode 100644 index 000000000..bc656c024 --- /dev/null +++ b/src/pipeline/datatype/ObjectTrackerConfigBindings.cpp @@ -0,0 +1,54 @@ +#include "DatatypeBindings.hpp" +#include "pipeline/CommonBindings.hpp" +#include +#include + +// depthai +#include "depthai/pipeline/datatype/ObjectTrackerConfig.hpp" + +//pybind +#include +#include + +// #include "spdlog/spdlog.h" + +void bind_objecttrackerconfig(pybind11::module& m, void* pCallstack){ + + using namespace dai; + + py::class_> rawConfig(m, "RawObjectTrackerConfig", DOC(dai, RawObjectTrackerConfig)); + py::class_> config(m, "ObjectTrackerConfig", DOC(dai, ObjectTrackerConfig)); + + /////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////// + // Call the rest of the type defines, then perform the actual bindings + Callstack* callstack = (Callstack*) pCallstack; + auto cb = callstack->top(); + callstack->pop(); + cb(m, pCallstack); + // Actual bindings + /////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////// + + // Metadata / raw + rawConfig + .def(py::init<>()) + .def_readwrite("trackletIdsToRemove", &RawObjectTrackerConfig::trackletIdsToRemove, DOC(dai, RawObjectTrackerConfig, trackletIdsToRemove)) + ; + + // Message + config + .def(py::init<>()) + .def(py::init>()) + + .def("set", &ObjectTrackerConfig::set, py::arg("config"), DOC(dai, ObjectTrackerConfig, set)) + .def("get", &ObjectTrackerConfig::get, DOC(dai, ObjectTrackerConfig, get)) + .def("forceRemoveID", &ObjectTrackerConfig::forceRemoveID, DOC(dai, ObjectTrackerConfig, forceRemoveID)) + .def("forceRemoveIDs", &ObjectTrackerConfig::forceRemoveIDs, DOC(dai, ObjectTrackerConfig, forceRemoveIDs)) + ; + + // add aliases + +} diff --git a/src/pipeline/node/ObjectTrackerBindings.cpp b/src/pipeline/node/ObjectTrackerBindings.cpp index b2b2b76a3..a06c6499f 100644 --- a/src/pipeline/node/ObjectTrackerBindings.cpp +++ b/src/pipeline/node/ObjectTrackerBindings.cpp @@ -48,6 +48,10 @@ void bind_objecttracker(pybind11::module& m, void* pCallstack){ .def_readwrite("detectionLabelsToTrack", &ObjectTrackerProperties::detectionLabelsToTrack, DOC(dai, ObjectTrackerProperties, detectionLabelsToTrack)) .def_readwrite("trackerType", &ObjectTrackerProperties::trackerType, DOC(dai, ObjectTrackerProperties, trackerType)) .def_readwrite("trackerIdAssignmentPolicy", &ObjectTrackerProperties::trackerIdAssignmentPolicy, DOC(dai, ObjectTrackerProperties, trackerIdAssignmentPolicy)) + .def_readwrite("trackingPerClass", &ObjectTrackerProperties::trackingPerClass, DOC(dai, ObjectTrackerProperties, trackingPerClass)) + .def_readwrite("occlusionRatioThreshold", &ObjectTrackerProperties::occlusionRatioThreshold, DOC(dai, ObjectTrackerProperties, occlusionRatioThreshold)) + .def_readwrite("trackletMaxLifespan", &ObjectTrackerProperties::trackletMaxLifespan, DOC(dai, ObjectTrackerProperties, trackletMaxLifespan)) + .def_readwrite("trackletBirthThreshold", &ObjectTrackerProperties::trackletBirthThreshold, DOC(dai, ObjectTrackerProperties, trackletBirthThreshold)) ; // Node @@ -55,6 +59,7 @@ void bind_objecttracker(pybind11::module& m, void* pCallstack){ .def_readonly("inputTrackerFrame", &ObjectTracker::inputTrackerFrame, DOC(dai, node, ObjectTracker, inputTrackerFrame)) .def_readonly("inputDetectionFrame", &ObjectTracker::inputDetectionFrame, DOC(dai, node, ObjectTracker, inputDetectionFrame)) .def_readonly("inputDetections", &ObjectTracker::inputDetections, DOC(dai, node, ObjectTracker, inputDetections)) + .def_readonly("inputConfig", &ObjectTracker::inputConfig, DOC(dai, node, ObjectTracker, inputConfig)) .def_readonly("out", &ObjectTracker::out, DOC(dai, node, ObjectTracker, out)) .def_readonly("passthroughTrackerFrame", &ObjectTracker::passthroughTrackerFrame, DOC(dai, node, ObjectTracker, passthroughTrackerFrame)) .def_readonly("passthroughDetectionFrame", &ObjectTracker::passthroughDetectionFrame, DOC(dai, node, ObjectTracker, passthroughDetectionFrame)) @@ -66,6 +71,9 @@ void bind_objecttracker(pybind11::module& m, void* pCallstack){ .def("setTrackerType", &ObjectTracker::setTrackerType, py::arg("type"), DOC(dai, node, ObjectTracker, setTrackerType)) .def("setTrackerIdAssignmentPolicy", &ObjectTracker::setTrackerIdAssignmentPolicy, py::arg("type"), DOC(dai, node, ObjectTracker, setTrackerIdAssignmentPolicy)) .def("setTrackingPerClass", &ObjectTracker::setTrackingPerClass, py::arg("trackingPerClass"), DOC(dai, node, ObjectTracker, setTrackingPerClass)) + .def("setOcclusionRatioThreshold", &ObjectTracker::setOcclusionRatioThreshold, py::arg("occlusionRatioThreshold"), DOC(dai, node, ObjectTracker, setOcclusionRatioThreshold)) + .def("setTrackletMaxLifespan", &ObjectTracker::setTrackletMaxLifespan, py::arg("lifespan"), DOC(dai, node, ObjectTracker, setTrackletMaxLifespan)) + .def("setTrackletBirthThreshold", &ObjectTracker::setTrackletBirthThreshold, py::arg("threshold"), DOC(dai, node, ObjectTracker, setTrackletBirthThreshold)) ; daiNodeModule.attr("ObjectTracker").attr("Properties") = objectTrackerProperties; diff --git a/utilities/cam_test.py b/utilities/cam_test.py index 482e9ac4d..41a61db73 100755 --- a/utilities/cam_test.py +++ b/utilities/cam_test.py @@ -276,6 +276,17 @@ def socket_to_socket_opt(socket: dai.CameraBoardSocket) -> str: cam_type_thermal[socket] = is_thermal print(socket.rjust(7), ':', 'tof' if is_tof else 'color' if is_color else 'thermal' if is_thermal else 'mono') + print('misc controls:', args.misc_controls) + args_misc_dict = dict(args.misc_controls) + + hdr_exp_ratio = int(math.log2(float(args_misc_dict.get('hdr-exposure-ratio', 1)))) + hdr_exp_base = args_misc_dict.get('hdr-exposure-base', 'long') + hdr_local_tone_weight = int(32 * float(args_misc_dict.get('hdr-local-tone-weight', 0.75))) + hdr_on = (hdr_exp_ratio > 0) + if hdr_on and args.fps > 10: + print('WARNING: HDR enabled for IMX582/IMX586, limiting FPS to 10') + args.fps = 10 + # Start defining a pipeline pipeline = dai.Pipeline() # Uncomment to get better throughput @@ -370,7 +381,8 @@ def socket_to_socket_opt(socket: dai.CameraBoardSocket) -> str: # cam[c].initialControl.setMisc("binning-mode", "sum") # default: "avg" # cam[c].initialControl.setMisc("manual-exposure-handling", "fast") # default: "default" # cam[c].initialControl.setMisc("hdr-exposure-ratio", 4) # enables HDR when set `> 1`, current options: 2, 4, 8 - # cam[c].initialControl.setMisc("hdr-local-tone-weight", 75) # default 75, range 0..100 + # cam[c].initialControl.setMisc("hdr-exposure-base", "middle") # default "long" + # cam[c].initialControl.setMisc("hdr-local-tone-weight", 0.75) # default 0.75, range 0..1 # cam[c].initialControl.setMisc("high-conversion-gain", 0) # 1 to enable (default on supported sensors) for kvPair in args.misc_controls: cam[c].initialControl.setMisc(*kvPair) @@ -505,6 +517,12 @@ def socket_to_socket_opt(socket: dai.CameraBoardSocket) -> str: controlQueue = device.getInputQueue('control') tofCfgQueue = device.getInputQueue('tofConfig') + # TMP workaround, send again HDR local tone weight which doesn't seem to take effect from initialControl + if hdr_on: + ctrl = dai.CameraControl() + ctrl.setMisc("hdr-local-tone-weight", hdr_local_tone_weight / 32) + controlQueue.send(ctrl) + # Manual exposure/focus set step EXP_STEP = 500 # us ISO_STEP = 50 @@ -549,12 +567,6 @@ def socket_to_socket_opt(socket: dai.CameraBoardSocket) -> str: control = 'none' show = args.show_meta high_conversion_gain = 1 - print(args.misc_controls) - args_misc_dict = dict(args.misc_controls) - - hdr_exp_ratio = int(math.log2(float(args_misc_dict.get('hdr-exposure-ratio', 1)))) - hdr_local_tone_weight = int(32 * float(args_misc_dict.get('hdr-local-tone-weight', 0.75))) - hdr_on = (hdr_exp_ratio > 0) jet_custom = cv2.applyColorMap( np.arange(256, dtype=np.uint8), cv2.COLORMAP_JET) @@ -639,8 +651,19 @@ def socket_to_socket_opt(socket: dai.CameraBoardSocket) -> str: capture_list.remove(c) print() if c.startswith('raw_') or c.startswith('tof_amplitude_') or c.startswith('tof_intensity_'): + # Custom handling for IMX462 that has extra metadata and offset in RAW frame + bits = 10 + order = cv2.COLOR_BayerGB2BGR + if pkt.getData().size == 1920*2*(1080+15): + full_raw = pkt.getData() + extra_offset = 384*2 + actual_frame = full_raw[(1920*2*15+extra_offset):] + missing_data = np.full(extra_offset, 0xAA, dtype=np.uint8) # FIXME + frame = np.append(actual_frame, missing_data).view(np.uint16).reshape((1080, 1920)) + bits = 12 + order = cv2.COLOR_BayerGR2BGR if capture: - filename = capture_file_info + '_10bit.bw' + filename = capture_file_info + f'_{bits}bit.bw' print('Saving:', filename) frame.tofile(filename) # Full range for display, use bits [15:6] of the 16-bit pixels @@ -649,14 +672,14 @@ def socket_to_socket_opt(socket: dai.CameraBoardSocket) -> str: if type == dai.ImgFrame.Type.RAW10: multiplier = (1 << (16-10)) if type == dai.ImgFrame.Type.RAW12: - multiplier = (1 << (16-4)) + multiplier = (1 << (16-12)) frame = frame * multiplier # Debayer as color for preview/png if cam_type_color[cam_skt]: # See this for the ordering, at the end of page: # https://docs.opencv.org/4.5.1/de/d25/imgproc_color_conversions.html # TODO add bayer order to ImgFrame getType() - frame = cv2.cvtColor(frame, cv2.COLOR_BayerGB2BGR) + frame = cv2.cvtColor(frame, order) else: # Save YUV too, but only when RAW is also enabled (for tuning purposes) if capture and args.enable_raw: @@ -745,6 +768,15 @@ def socket_to_socket_opt(socket: dai.CameraBoardSocket) -> str: ctrl = dai.CameraControl() ctrl.setAutoExposureLock(ae_lock) controlQueue.send(ctrl) + elif key == ord('b'): + if hdr_on: + hdr_exp_base = 'middle' if hdr_exp_base == 'long' else 'long' + print(f"HDR exposure base: {hdr_exp_base}") + ctrl = dai.CameraControl() + ctrl.setMisc("hdr-exposure-base", hdr_exp_base) + controlQueue.send(ctrl) + else: + print("HDR was not enabled, start with `-misc hdr-exposure-ratio=2` or higher to enable") elif key == ord('a'): dotIntensity = dotIntensity - DOT_STEP if dotIntensity < 0: @@ -868,7 +900,7 @@ def socket_to_socket_opt(socket: dai.CameraBoardSocket) -> str: elif control == 'hdr_exp_ratio': hdr_exp_ratio = clamp(hdr_exp_ratio + change, 0, 3) value = pow(2, hdr_exp_ratio) - print("HDR exposure ratio:", value) + print("HDR exposure ratio:", value, '(HDR disabled)' if value == 1 else '') ctrl.setMisc("hdr-exposure-ratio", value) elif control == 'hdr_local_tone_weight': hdr_local_tone_weight = clamp(hdr_local_tone_weight + change, 0, 32) diff --git a/utilities/stress_test.py b/utilities/stress_test.py index 7d7a1e002..88493336c 100644 --- a/utilities/stress_test.py +++ b/utilities/stress_test.py @@ -137,6 +137,9 @@ def create_yolo(pipeline: dai.Pipeline, camera: dai.node.ColorCamera) -> Tuple[s def clamp(num, v0, v1): return max(v0, min(num, v1)) +DOT_STEP = 0.05 +FLOOD_STEP = 0.05 + class PipelineContext: q_name_yolo_passthrough: Optional[str] = None """The name of the queue that the YOLO spatial detection network passthrough is connected to.""" @@ -150,8 +153,8 @@ def stress_test(mxid: str = ""): # May have some unknown args args, _ = parser.parse_known_args() - dot_intensity = 500 - flood_intensity = 500 + dot_intensity = 0.5 + flood_intensity = 0.5 iso = 800 exp_time = 20000 @@ -162,9 +165,9 @@ def stress_test(mxid: str = ""): cam_args.append(device_info) with dai.Device(*cam_args) as device: print("Setting default dot intensity to", dot_intensity) - device.setIrLaserDotProjectorBrightness(dot_intensity) + device.setIrLaserDotProjectorIntensity(dot_intensity) print("Setting default flood intensity to", flood_intensity) - device.setIrFloodLightBrightness(flood_intensity) + device.setIrFloodLightIntensity(flood_intensity) pipeline, outputs, pipeline_context = build_pipeline(device, args) device.startPipeline(pipeline) start_time = time.time() @@ -230,21 +233,21 @@ def stress_test(mxid: str = ""): print("Q Pressed, exiting stress test...") break elif key == ord('a'): - dot_intensity = clamp(dot_intensity - 100, 0, 1200) + dot_intensity = clamp(dot_intensity - DOT_STEP, 0, 1.0) print("Decreasing dot intensity by 100, new value:", dot_intensity) - device.setIrLaserDotProjectorBrightness(dot_intensity) + device.setIrLaserDotProjectorIntensity(dot_intensity) elif key == ord('d'): - dot_intensity = clamp(dot_intensity + 100, 0, 1200) + dot_intensity = clamp(dot_intensity + DOT_STEP, 0, 1.0) print("Increasing dot intensity by 100, new value:", dot_intensity) - device.setIrLaserDotProjectorBrightness(dot_intensity) + device.setIrLaserDotProjectorIntensity(dot_intensity) elif key == ord('w'): - flood_intensity = clamp(flood_intensity + 100, 0, 1500) + flood_intensity = clamp(flood_intensity + FLOOD_STEP, 0, 1.0) print("Increasing flood intensity by 100, new value:", flood_intensity) - device.setIrFloodLightBrightness(flood_intensity) + device.setIrFloodLightIntensity(flood_intensity) elif key == ord('s'): - flood_intensity = clamp(flood_intensity - 100, 0, 1500) + flood_intensity = clamp(flood_intensity - FLOOD_STEP, 0, 1.0) print("Decreasing flood intensity by 100, new value:", flood_intensity) - device.setIrFloodLightBrightness(flood_intensity) + device.setIrFloodLightIntensity(flood_intensity) elif key == ord('k'): iso = clamp(iso - 50, 0, 1600) print("Decreasing iso by 50, new value:", iso)