diff --git a/pieces/Video_Ctrl/README.md b/pieces/Video_Ctrl/README.md new file mode 100644 index 0000000..9176b1f --- /dev/null +++ b/pieces/Video_Ctrl/README.md @@ -0,0 +1 @@ +Needs an up to date list of active_sprawl_nodes in the hosts file in the root directory of this repository. diff --git a/pieces/Video_Ctrl/SC/video_osc_effect.scd b/pieces/Video_Ctrl/SC/video_osc_effect.scd new file mode 100644 index 0000000..aa29eb3 --- /dev/null +++ b/pieces/Video_Ctrl/SC/video_osc_effect.scd @@ -0,0 +1,183 @@ +"SC_JACK_DEFAULT_INPUTS".setenv("REAPER"); +//s.options.bindAddress = "0.0.0.0"; // allow connections from any address +s.options.numInputBusChannels = 16; // set to correct number of channels +s.options.numOutputBusChannels = 2; // set to correct number of channels +// s.options.maxLogins = 6; // set to correct number of clients +s.boot; +s.waitForBoot( + { + ~numInputs = 16; // set to correct number of channels + ~netAverage = 0.0; + ~scale_bools = [1, 1, 1, 1, 1]; + + ~ownBus = Bus.new('audio', 18, 1, s); + ~f1 = Buffer.alloc(s,1024,2); + ~f2 = Buffer.alloc(s,1024,2); + + + // Create the filterMix block + ~filterMixBlock = { + |cutoffFreq = 20000, vol = 1, gains = #[0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5]| + + // Create an array to hold the input signals + var inputSignals = Array.fill(~numInputs, { |i| SoundIn.ar(i) }); + + // Apply individual gains to the input signals + var scaledInputs = inputSignals.collect({ |input, i| input * gains[i] }); + + // Create a lowpass filter + var filterOutput = LPF.ar(Mix.new(scaledInputs), cutoffFreq, vol); + + // Connect the filtered output to a SuperCollider audio output bus + Out.ar(0, filterOutput); + ScopeOut2.ar(filterOutput, ~f2.bufnum); + + + }.play; + + ~nodeMix = { + |delay = 0.5| + Out.ar(~ownBus, SoundIn.ar(0)); + Out.ar(1, FreeVerb.ar(In.ar(~ownBus), 0.5, delay, 1-delay)); + ScopeOut2.ar(In.ar(~ownBus), ~f1.bufnum); + }.play; + + p = P2PSC(); + ~hostname = Pipe.new("hostname", "r").getLine; + p.setName(~hostname); // set your name accordingly + ~hostname.postln; + s.sync(); + + // Setups GUI + // Routine( + // {{ + ~window = Window(bounds:800@480).front().background_(Color.white); + + if (~hostname.beginsWith("AP")) { + ~window.fullScreen; + }; + + // ScopeView + ~text_scope1 = StaticText(~window, Rect(10, 5, 245, 20)).string = "Own input"; + // ~scopeView = ScopeView(~window, Rect(10,10,500,300)); + ~scopeView1 = ScopeView(~window, Rect(10,30,500,180)); + ~scopeView1.server = s; + // ~scopeView.canFocus = true; + ~scopeView1.bufnum = ~f1.bufnum; + ~scopeView1.start; + + ~text_scope2 = StaticText(~window, Rect(10, 215, 245, 20)).string = "Mix result"; + // ~scopeView = ScopeView(~window, Rect(10,10,500,300)); + ~scopeView2 = ScopeView(~window, Rect(10,235,500,180)); + ~scopeView2.server = s; + // ~scopeView.canFocus = true; + ~scopeView2.bufnum = ~f2.bufnum; + ~scopeView2.start; + + ~text_gains = StaticText(~window, Rect(10, 450, 780, 20)).string = ""; + + // Monitor slider + ~text_arp_slider = StaticText(~window, Rect(695, 5, 80, 30)).string = "Reverb"; + ~slider_pan_speed = Slider(~window, Rect(695, 50, 80, 320)); + ~text_pan_speed = StaticText(~window, Rect(695, 385, 80, 30)).string = ""; + ~slider_pan_speed.action = { |slider| + ~text_pan_speed.string = "Room size: " ++ slider.value.round(0.01); + ~nodeMix.set(\delay, slider.value); + }; + + // Generate checkboxes + ~checkboxHeight = (320 / 5.0).asInteger; + ~spacing = (~checkboxHeight / 5.0).asInteger; + ~yPos = 40; + + ~text_tones = StaticText(~window, Rect(560, 10, 150, 20)).string = "Arp tones"; + ~checkboxes = Array.fill(5, { |i| + var checkbox = CheckBox.new(~window, Rect(560, ~yPos, 150, ~checkboxHeight), "Tone " ++ (i + 1)); + ~yPos = ~yPos + (~checkboxHeight + ~spacing); + checkbox.value = ~scale_bools[i]; + checkbox.action = { |box| + if (box.value == true, { + ~scale_bools[i] = 1; + }, { + ~scale_bools[i] = 0; + }); + ~scale_bools.postln; + }; + checkbox; + }); + + ~window.front; + // }.defer;}); + + "Start listening for pings".postln; + p.addPath({ |msg| + + var sleeptime = 1; + var freq = 100 + 1000.rand; // Change this for every node + var pan = 2*msg[1].asFloat/100.0 - 1; + var pitch = 300*msg[2].asFloat/100.0; + var vol = msg[3].asFloat/100.0; + freq = pitch; + msg.postln; //print message for debugging + + //{SinOsc.ar(freq:freq)*0.5*EnvGen.kr(Env.perc(releaseTime:sleeptime-0.01), doneAction:2)}.play; + {Pan2.ar(SinOsc.ar(freq: freq) * 0.5 * EnvGen.kr(Env.perc(releaseTime: sleeptime - 0.01), doneAction: 2), pan, vol)}.play; + + fork { + var nextpeer; + var source_peer = msg[1].asString; + var peers = p.getPeers().select({ |item| item.beginsWith("AP") }); + sleeptime.wait; // wait for one second + + // send to the next peer in our list + nextpeer = peers.wrapAt(1+peers.indexOfEqual(source_peer)); + + p.sendMsg("/"++nextpeer++"/ping", p.name); + }; + + },"/ping" + ); + + "Start listening for synth inputs".postln; + p.addPath({ |msg| + + var sleeptime = 1; + var prog_idx = msg[1].asInteger; + var chord_progression = msg.drop(2).round(0.001); + var vol = (5 + 95.rand)/100.0; + var freq = chord_progression[prog_idx]; + // chord_progression.postln; //print message for debugging + + if (~scale_bools[prog_idx] == 0, { + freq = 0; + }); + + //{SinOsc.ar(freq:freq)*0.5*EnvGen.kr(Env.perc(releaseTime:sleeptime-0.01), doneAction:2)}.play; + {Out.ar(~ownBus, LFTri.ar(freq: freq) * vol * EnvGen.kr(Env.perc(releaseTime: sleeptime - 0.01), doneAction: 2))}.play; + + },"/synth" + ); + + "Start listening for shiftmix updates".postln; + p.addPath({ |msg| + var cutoffFreq = 20000*msg[1].asFloat/100.0; + var delay = 2*msg[1].asFloat/100.0; + var vol = msg[2].asFloat/100.0; + var gains = msg.drop(3).round(0.001); + + // gains.round(0.001).postln; //print message for debugging + + ~filterMixBlock.set(\vol, vol); + ~filterMixBlock.set(\gains, gains); + ~filterMixBlock.set(\cutoffFreq, cutoffFreq); + + // ~gui.set(\gains, gains); + {~text_gains.string = gains.round(0.1); + // ~slider_pan_speed.value = vol; + }.defer; + + },"/shiftmix" + ); + } +); +s.meter; \ No newline at end of file diff --git a/pieces/Video_Ctrl/launch_video_ctrl.yml b/pieces/Video_Ctrl/launch_video_ctrl.yml new file mode 100644 index 0000000..e9cb785 --- /dev/null +++ b/pieces/Video_Ctrl/launch_video_ctrl.yml @@ -0,0 +1,83 @@ +--- +- name: "Start the video control" + hosts: active_sprawl_nodes + gather_facts: false + vars: + + tasks: + - name: "Kill SC!" + shell: killall sclang + ignore_errors: true + + - name: "Kill SC!" + shell: killall scsynth + + ignore_errors: true + + - name: "Kill JackTrip!" + shell: killall jacktrip + ignore_errors: true + - name: Restart jackd service + ansible.builtin.systemd: + # daemon_reload: true + service: jackd.service + state: restarted + scope: user + become: false + + - name: "Ensure 'pieces' dir exists" + ansible.builtin.file: + path: /home/member/pieces/ + state: directory + owner: member + group: member + mode: "u=rwx,g=rx,o=rx" + + - name: "Copy Files onto the server" + ansible.builtin.copy: + src: SC + dest: /home/member/pieces/Video_Ctrl + owner: member + group: member + mode: "0644" + + - name: "Launch SC!" + async: 2592000 # run for 1 month + poll: 0 + shell: DISPLAY=:0 sclang video_osc_effect.scd >> /tmp/video_osc.log + args: + chdir: /home/member/pieces/Video_Ctrl/SC + + - name: "Launch JackTrip Server" + shell: jacktrip -S -p5 + async: 2592000 # run for 1 month + poll: 0 + + - name: "Launch lots of JackTrip clients" + # create connection to server with the name + shell: jacktrip -n 1 -C {{ item }} -K {{ inventory_hostname }} -J {{ item }} -B {{ base_port + index }} + async: 2592000 # run for 1 month + poll: 0 + loop: "{{ ansible_play_hosts | difference([inventory_hostname]) }}" + loop_control: + index_var: index + when: index < ansible_play_hosts.index(inventory_hostname) + vars: + base_port: 4464 + + - name: "Wait a couple of seconds" + ansible.builtin.wait_for: + timeout: 5 + + - name: "Connect local ins/outs" + shell: | + jack_connect system:capture_1 SuperCollider:in_1 + jack_connect SuperCollider:out_1 system:playback_1 + + - name: "Connect jacktrip clients" + shell: | + jack_connect {{ item }}:receive_1 SuperCollider:in_{{ index+2 }} + jack_connect SuperCollider:out_2 {{ item }}:send_1 + loop: "{{ groups['active_sprawl_nodes'] | difference([inventory_hostname]) }}" + loop_control: + index_var: index diff --git a/pieces/Video_Ctrl/main.qml b/pieces/Video_Ctrl/main.qml new file mode 100644 index 0000000..7ed4a21 --- /dev/null +++ b/pieces/Video_Ctrl/main.qml @@ -0,0 +1,250 @@ +import QtQuick 2.15 +import QtQuick.Controls 2.15 +import QtQuick.Layouts 1.15 +import VideoItem 1.0 +// import QtCharts 2.15 + +ApplicationWindow { + width: 640 + height: 680 + visible: true + title: qsTr("Camera OSC interaction") + + // Handle keyboard interrupt + onClosing: { + Qt.quit(); + } + + GridLayout { + id: grid + columns: 2 + rows: 8 + columnSpacing : spacing + rowSpacing : spacing + anchors.fill: parent + + VideoItem { + id: videoItem + Layout.rowSpan: 2 + Layout.columnSpan: 2 + anchors.top: parent.top + width: 640 + height: 480 + } + + Row { + id: rowSlider1 + anchors.left: parent.left + anchors.top: videoItem.bottom + anchors.margins: 10 + + Label { + text: "Threshold: " + slider1.value.toFixed(0) + width: 120 + } + + Slider { + id: slider1 + width: 300 + from: 0 + to: 255 + value: 50 + onValueChanged: videoItem.set_threshold(slider1.value) + } + } + + Row { + id: rowSlider2 + anchors.left: parent.left + anchors.top: rowSlider1.bottom + anchors.margins: 10 + + Label { + text: "Smoothing: " + slider2.value.toFixed(0) + width: 120 + } + Slider { + id: slider2 + width: 300 + from: 0 + to: 100 + value: 50 + onValueChanged: videoItem.set_smoothing(slider2.value) + } + } + + Row { + id: rowSlider3 + anchors.top: rowSlider2.bottom + anchors.left: parent.left + anchors.margins: 10 + + Label { + text: "Update ms: " + slider3.value.toFixed(0) + width: 120 + } + Slider { + id: slider3 + width: 300 + from: 0 + to: 1000 + value: 30 + onValueChanged: videoItem.set_speed(value) + } + } + + Row { + id: rowSlider4 + anchors.top: rowSlider3.bottom + anchors.left: parent.left + anchors.margins: 10 + + Label { + text: "Mix spread: " + slider4.value.toFixed(0) + width: 120 + } + Slider { + id: slider4 + width: 300 + from: 1 + to: 100 + value: 10 + onValueChanged: videoItem.set_sigma(value) + } + } + + TextField { + id: textBox1 + anchors.top: videoItem.bottom + anchors.right: parent.right + anchors.margins: 10 + width: parent.width + text: "/ALL/ping" + placeholderText: "Enter OSC path" + + Layout.margins: 10 + + Keys.onReturnPressed: { + videoItem.set_manual_osc_path(textBox1.text) + } + } + + TextField { + id: textBox2 + anchors.top: textBox1.bottom + anchors.right: parent.right + anchors.margins: 10 + width: parent.width + placeholderText: "Enter OSC msg" + text: "60,60,60" + + Layout.margins: 10 + + Keys.onReturnPressed: { + // Handle Enter key pressed event + // console.log("Enter key pressed:", textBox2.text) + videoItem.send_manual_osc(textBox1.text, textBox2.text) + } + } + + // Button + // Button { + // id: toggleButton + // anchors.top: textBox2.bottom + // anchors.right: parent.right + // anchors.margins: 10 + // text: "Toggle" + // onClicked: { + // // Handle button click event + // console.log("Button clicked") + // videoItem.toggle_mode() + // } + // } + CheckBox { + id: chechbox0 + anchors.top: textBox2.bottom + anchors.right: parent.right + anchors.margins: 10 + + checked: true + text: qsTr("Send shifts") + onClicked: { + videoItem.set_sending_shifts(checked) + } + } + + RowLayout { + id: checkboxRow1 + anchors.top: toggleButton.bottom + anchors.left: parent.left + anchors.margins: 10 + CheckBox { + checked: false + text: qsTr("Mod volume") + onClicked: { + // console.log("Checkbox clicked:", text, checked) + videoItem.set_mod_volume(checked) + } + } + CheckBox { + checked: false + text: qsTr("Pan override") + onClicked: { + videoItem.set_pan_override(checked) + } + } + CheckBox { + checked: false + text: qsTr("Cutoff override") + onClicked: { + videoItem.set_cutoff_override(checked) + } + } + } + RowLayout { + id: checkboxRow2 + anchors.top: checkboxRow1.bottom + anchors.left: parent.left + anchors.margins: 10 + CheckBox { + checked: false + text: qsTr("Auto circle panning") + onClicked: { + videoItem.set_auto_circ_pan(checked) + } + } + CheckBox { + checked: false + text: qsTr("Camera arp") + onClicked: { + videoItem.set_camera_arp(checked) + } + } + } + + // Rectangle { + // anchors.bottom: parent.bottom + // Layout.column: 0 + // width: 640 + // height: 400 + + // ChartView { + // id: chartView1 + // title: "Line" + // anchors.fill: parent + // legend.alignment: Qt.AlignBottom + // antialiasing: true + + // property var chartObject: chartData.getChart() + + // Component.onCompleted: { + // // Set the chart as the main chart object + // chartView.chart = chartObject + // } + + // onWidthChanged: chartObject.width = width + // onHeightChanged: chartObject.height = height + // } + // } + } +} diff --git a/pieces/Video_Ctrl/video_tracking.py b/pieces/Video_Ctrl/video_tracking.py new file mode 100644 index 0000000..b326c70 --- /dev/null +++ b/pieces/Video_Ctrl/video_tracking.py @@ -0,0 +1,122 @@ +import sys +import cv2 +from PyQt6.QtCore import Qt, QTimer, QUrl, pyqtSlot, QObject +from PyQt6.QtGui import QImage, QPixmap, QPainter +from PyQt6.QtQml import QQmlApplicationEngine, qmlRegisterType +from PyQt6.QtWidgets import QApplication +from PyQt6.QtQuick import QQuickPaintedItem +import asyncio + +from PyQt6.QtCharts import QLineSeries, QChart, QValueAxis +import psutil + +from video_tracking_obj import VideoTracking + +class VideoItem(QQuickPaintedItem): + engine = None + + def __init__(self, parent=None): + super().__init__(parent) + self.pixmap = None + + #def start(self): + self.slider_value = 50 + + #self.video_capture = cv2.VideoCapture(0) # Replace with your desired video source + + self.vt = VideoTracking() + asyncio.run(self.vt.init()) + + self.timer = QTimer(self) + self.timer.timeout.connect(self.update_frame) + self.timer.start(30) # Update every 30 milliseconds + + def update_frame(self): + frame = self.vt.update() + + if frame != "": + frame_rgb = cv2.cvtColor(frame, cv2.COLOR_GRAY2RGB)# cv2.COLOR_BGR2RGB) + image = QImage( + frame_rgb.data, + frame_rgb.shape[1], + frame_rgb.shape[0], + QImage.Format.Format_RGB888 + ) + + pixmap = QPixmap.fromImage(image) + self.pixmap = pixmap.scaled(int(self.width()), int(self.height()), Qt.AspectRatioMode.KeepAspectRatio) + + self.update() + + def paint(self, painter): + if self.pixmap: + painter.drawPixmap(0, 0, self.pixmap) + + @pyqtSlot(int) + def set_threshold(self, value): + # print(f"Slider value changed: {value}") + self.vt.set_threshold(value) + + @pyqtSlot(int) + def set_smoothing(self, value): + # print(f"Slider value changed: {value}") + self.vt.set_smoothing(value) + + @pyqtSlot(int) + def set_speed(self, value): + # print(f"Slider value changed: {value}") + self.timer.setInterval(value) + + @pyqtSlot(int) + def set_sigma(self, value): + # print(f"Slider value changed: {value}") + self.vt.set_sigma(value) + + @pyqtSlot() + def toggle_mode(self): + print("Toggle mode") + + @pyqtSlot(str) + def set_manual_osc_path(self, value): + print(f"Setting OSC path: {value}") + + @pyqtSlot(str, str) + def send_manual_osc(self, path, msg): + print(f"Sending OSC msg: {msg} to path: {path}") + self.vt.send_osc_msg(self.vt.transport, [path, msg.split(",")]) + + @pyqtSlot(bool) + def set_mod_volume(self, value): + # print(f"Setting mod volume: {value}") + self.vt.set_mod_volume(value) + + @pyqtSlot(bool) + def set_pan_override(self, value): + self.vt.set_pan_override(value) + + @pyqtSlot(bool) + def set_auto_circ_pan(self, value): + self.vt.set_auto_circ_pan(value) + + @pyqtSlot(bool) + def set_camera_arp(self, value): + self.vt.set_camera_arp(value) + + @pyqtSlot(bool) + def set_sending_shifts(self, value): + self.vt.set_sending_shifts(value) + +if __name__ == "__main__": + app = QApplication(sys.argv) + + # Register the VideoItem type with the QML engine + qmlRegisterType(VideoItem, "VideoItem", 1, 0, "VideoItem") + + engine = QQmlApplicationEngine() + + engine.load(QUrl.fromLocalFile("main.qml")) + + if not engine.rootObjects(): + sys.exit(-1) + + sys.exit(app.exec()) diff --git a/pieces/Video_Ctrl/video_tracking_obj.py b/pieces/Video_Ctrl/video_tracking_obj.py new file mode 100644 index 0000000..031e095 --- /dev/null +++ b/pieces/Video_Ctrl/video_tracking_obj.py @@ -0,0 +1,333 @@ +import numpy as np +from scipy.stats import norm +import cv2 +import asyncio +import time +from pythonosc.osc_message_builder import OscMessageBuilder +import logging +from typing import Tuple, Union + +from pythonosc.osc_message import OscMessage +from pythonosc.osc_bundle import OscBundle +from p2psc.peerRegistry import PeerRegistry +from p2psc.peerInfo import PeerInfo, PeerType +from p2psc import proto + +from ansible.inventory.manager import InventoryManager + +class VideoTracking(): + def __init__(self): + self._registry = PeerRegistry("video_tracking") + + async def init(self): + # Create the UDP socket + self.transport, _ = await asyncio.get_event_loop().create_datagram_endpoint( + lambda: OscProtocolUdp(self), local_addr=('0.0.0.0', 57122), remote_addr=("127.0.0.1", 3765)) + + # Register with the P2PSC + self.connect() + + self.cap = cv2.VideoCapture(0) #4 + + # Read nodes from Ansible hosts file + hosts_file = '../../hosts' + + # Create an inventory manager + inventory = InventoryManager(loader=None, sources=hosts_file) + + # Access groups + all_groups = inventory.get_groups_dict() + group_name = 'active_sprawl_nodes' + self.nodes = all_groups[group_name] + + # Iterate over hosts + for host in self.nodes: + print(f'Group: {group_name}, Host: {host}') + + # Init vars + self.last_image = "" + self.threshold = 100 + self.smoothing = 0.9 + self.circ_x = 0 + self.circ_y = 0 + self.rad = 0 + + self.sigma = 1 + self.mod_volume = False + self.pan_override = False + self.auto_circ_pan = False + self.camera_arp = False + self.sending_shifts = True + self.auto_pan_val = 0.0 + self.bpm = 120 + self.new_bpm_time = time.time() + + # Array with pentatonic scales in Hz: C, D, E, G, A + self.pentatonics = np.array([261.63, 293.66, 329.63, 392.00, 440.00]) + self.chord_threshold = 0.01 + self.chord_detect_avg = np.array([0.0, 0.0, 0.0, 0.0, 0.0]) + self.chord_progression = np.array([0, 1, 1, 4]) + self.progression_index = 0 + + def connect(self): + self.send_osc_msg(self.transport, ["/p2psc/peerinfo", [1, "T1 T2", "/test1 /test2"]]) + self.send_osc_msg(self.transport, ["/p2psc/name", ['video_tracking']]) + self.send_osc_msg(self.transport, ["/p2psc/peernames", []]) + + def update(self): + # Capture frame-by-frame + ret, frame = self.cap.read() + + # Our operations on the frame come here + gray = frame #cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) + + frame_dilated = "" + + # Loop through pentatonic scale and play notes if next bpm timestep is reached + now = time.time() + if (now > self.new_bpm_time and len(self.chord_progression) > 0 and self.camera_arp == True): + self.new_bpm_time = now + (60/self.bpm) + + # get index of next note in pentatonic scale and skip if 0 while looping over 5 steps + for i in range(len(self.chord_progression)): + self.progression_index += 1 + if (self.progression_index >= len(self.chord_progression)): + self.progression_index = 0 + if (self.chord_progression[self.progression_index] != 0): + break + + note = self.chord_progression[self.progression_index] + # print(f"Progression index: {self.progression_index}") + # print(f"Progression: {self.chord_progression}") + + if (note != 0): + print(f"Playing note: {note}") + self.send_osc_msg(self.transport, ["/ALL/synth", [self.progression_index, *self.chord_progression]]) + + + + if (self.last_image != ""): + frame_diff = cv2.absdiff(gray, self.last_image) + + frame_diff = cv2.cvtColor(frame_diff, cv2.COLOR_BGR2GRAY) + + ret, frame_thres = cv2.threshold(frame_diff, self.threshold, 255, cv2.THRESH_BINARY) + + frame_dilated = cv2.dilate(frame_thres, None, iterations=2) + + # Calc number of white pixels + white_pixels = cv2.countNonZero(frame_dilated) + + # print('white pixels:', white_pixels) + + # do canny edge detection + # canny = cv2.Canny(frame_dilated, 100, 200) + canny = frame_dilated + + # Split image into 5 vertical sections + section_width = int(gray.shape[1]/5) + self.chord_progression = np.array([]) + for i in range(len(self.chord_detect_avg)): + val = cv2.countNonZero(canny[:, i*section_width:(i+1)*section_width])/(section_width*gray.shape[0]) + # print(f"Section {i}: {val}") + adapt_speed = self.smoothing + self.chord_detect_avg[i] = adapt_speed*self.chord_detect_avg[i] + (1-adapt_speed)*val + if (self.chord_detect_avg[i] > self.chord_threshold): + note = self.pentatonics[i] + else: + note = 0.0 + self.chord_progression = np.append(self.chord_progression, note) + + # print(self.chord_detect_avg) + # print(self.chord_progression) + + # get canny points + # numpy points are (y,x) + points = np.argwhere(canny>0) + + # get min enclosing circle + center, radius = cv2.minEnclosingCircle(points) + # print('center:', center, 'radius:', radius) + + # Apply smoothing to the x,y and radius + if (center[0] != 0 and center[1] != 0 and radius != 0): + self.circ_x = self.smoothing*self.circ_x + (1-self.smoothing)*center[1] + self.circ_y = self.smoothing*self.circ_y + (1-self.smoothing)*center[0] + self.rad = 0.9*self.rad + 0.1*radius + + # Scale circ_x and circ_y to 0-100 + circ_x_perc = int(self.circ_x/gray.shape[1]*100) + circ_y_perc = int(self.circ_y/gray.shape[0]*100) + + rad_perc = int(self.rad/max(gray.shape[0], gray.shape[1])*100) + + rad_perc = int(rad_perc/100*90+10)*2 + if (rad_perc > 100): + rad_perc = 100 + + # self.send_osc_msg(self.transport, ["/ALL/shiftmix", [circ_x_perc, circ_y_perc, rad_perc]]) + #print(f"Sending OSC msg: {circ_x_perc}, {circ_y_perc}, {rad_perc}") + + # Calculate new volume distribution + if (self.auto_circ_pan == True): + pan_val = self.auto_pan_val + 0.01 + if (pan_val > 1.0): + pan_val = 0.0 + self.auto_pan_val = pan_val + elif (self.pan_override == True): + pan_val = self.smoothing + else: + pan_val = circ_x_perc/100.0 + + new_vols = self.get_vol_distr(len(self.nodes), pan_val) + + # Debug option + # new_vols = self.get_vol_distr(4, pan_val) + # self.nodes = ['ALL'] + + # Send shifts of gain list to nodes + if (self.sending_shifts == True): + for i in range(len(self.nodes)): + self.send_osc_msg(self.transport, [f"/{self.nodes[i]}/shiftmix", [circ_y_perc, (rad_perc if self.mod_volume == True else 100), *np.roll(new_vols, i)]]) + + cv2.circle(frame_dilated, (int(self.circ_x),int(self.circ_y)), int(self.rad), (255,255,255), 1) + + self.last_image = gray + + return frame_dilated + + def __del__(self): + self.cap.release() + self.transport.close() + + def get_vol_distr(self, number, shift): + values = np.linspace(-number, number, 200) # Generate values within a range + pmf = norm.pdf(values, 0, self.sigma) + vals = np.array_split(np.roll(pmf, int(np.round(shift*len(pmf)))), number) + + new_vols = [] + for i in range(number): + new_vols.append(np.sum(vals[i])) + + norm_new_vols = new_vols / np.max(new_vols) + return np.round(norm_new_vols,3) + + def set_threshold(self, val): + print(f"Setting threshold: {val}") + self.threshold = val + + def set_smoothing(self, val): + print(f"Setting smoothing: {val}") + self.smoothing = val/100.0 + + def set_sigma(self, val): + print(f"Setting sigma: {val}") + self.sigma = val/10.0 + + def set_mod_volume(self, val): + print(f"Setting mod volume: {val}") + self.mod_volume = val + + def set_pan_override(self, val): + print(f"Setting pan override: {val}") + self.pan_override = val + + def set_auto_circ_pan(self, val): + print(f"Setting auto circ pan: {val}") + self.auto_circ_pan = val + + def set_camera_arp(self, val): + print(f"Setting camera arp: {val}") + self.camera_arp = val + + def set_sending_shifts(self, val): + print(f"Setting sending shifts: {val}") + self.sending_shifts = val + + def osc_message(self, path, args): + mb = OscMessageBuilder(path) + for a in args: + mb.add_arg(a) + return mb.build() + + def osc_dgram(self, path, args): + mb = OscMessageBuilder(path) + for a in args: + mb.add_arg(a) + return mb.build().dgram + + def send_osc_msg(self, transport, msg): + # Serialize the message using pickle + # serialized_message = pickle.dumps(msg) + + transport.sendto(self.osc_dgram(msg[0], msg[1])) + + + async def on_osc(self, addr: Tuple[str, int], message: Union[OscBundle, OscMessage]): + """ + Handle incoming OSC messages + """ + if type(message) == OscBundle: + logging.error("OSC Bundle messages are not supported yet!") + return + + # Peerinfo messages are handled locally + if proto.get_group_from_path(message.address) == proto.P2PSC_PREFIX: + self._handle_local(addr, message) + return + + # All other messages are forwarded to clients/nodes depending on sender + try: + peer_type = self._registry.get_peer(addr).type # type: PeerInfo + except LookupError: + # If we don't know the peer we simply assume it is a client requesting us to forward the message + # TODO: Any implications here?! + peer_type = PeerType.client + + # Messages from clients are only forwarded to nodes + if peer_type == PeerType.client: + for pi in self._registry.get_by_path(message.address, filter_type=PeerType.node): + logging.info( + f"Forwarding {message.address} {message.params} to {pi.addr}") + self._transport.sendto(message.dgram, pi.addr) + else: # Messages from nodes are only forwarded to clients + # remove group from path + m = proto.osc_dgram(proto.remove_group_from_path( + message.address), message.params) + for pi in self._registry.get_by_path(message.address, filter_type=PeerType.client): + self._transport.sendto(m, pi.addr) + +class OscProtocolUdp(asyncio.DatagramProtocol): + def __init__(self, handler): + self._handler = handler + self._transport = None # type: asyncio.DatagramTransport + + def datagram_received(self, dgram, addr): + """Called when a UDP message is received """ + + # Parse OSC message + try: + if OscBundle.dgram_is_bundle(dgram): + msg = OscBundle(dgram) + elif OscMessage.dgram_is_message(dgram): + msg = OscMessage(dgram) + else: + raise # Invalid message + except: + logging.warning(f"Received invalid OSC from {addr}") + return + + asyncio.ensure_future(self._handler.on_osc(addr, msg)) + + def connection_made(self, transport): + self._transport = transport + + def connection_lost(self, exc): + logging.info(f"Connection lost: {str(exc)}") + self._transport = None + +if __name__ == "__main__": + logging.basicConfig(level=logging.INFO) + vt = VideoTracking() + + asyncio.run(vt.main())