Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 16 additions & 24 deletions result.py → ...ytics/generate_academic_latency_report.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,70 +2,62 @@
import matplotlib.pyplot as plt
import os

# --- 1. Setup ---
csv_path = os.path.join("logs", "test_results.csv")
# --- 1. Dynamic Path Setup ---
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
ROOT_DIR = os.path.dirname(SCRIPT_DIR)

csv_path = os.path.join(ROOT_DIR, "logs", "latency_results.csv")
output_dir = os.path.join(ROOT_DIR, "stats", "Objective 1 Performance")

if not os.path.exists(output_dir):
os.makedirs(output_dir)

def generate_consolidated_objective_1_report():
"""
Generates a single comprehensive graph validating Objective 1:
- Latency < 100ms
- Frame Rate >= 30 FPS
- Markerless/Standard Hardware efficiency
"""
if not os.path.exists(csv_path):
print(f"Error: {csv_path} not found. Run the game to generate logs first!")
return

df = pd.read_csv(csv_path)

# Derive FPS from recorded processing time to prove 'Measurable' criteria
df['RealTime_FPS'] = 1 / df['Proc_Time']

# Create figure with twin axes for a single-report focus
fig, ax1 = plt.subplots(figsize=(12, 8))

# --- PRIMARY AXIS: Latency (Seconds) ---
color_lat = '#1f77b4' # Tech Blue
color_lat = '#1f77b4'
ax1.set_xlabel('Punch Sample Sequence (Time)', fontsize=12)
ax1.set_ylabel('Processing Latency (Seconds)', color=color_lat, fontsize=12, fontweight='bold')
ax1.plot(df.index, df['Proc_Time'], color=color_lat, linewidth=2.5, label='Measured Latency')

# CRITICAL: Target Threshold Line (100ms) per Objective 1
ax1.axhline(y=0.1, color='#d62728', linestyle='--', linewidth=2, label='Max Target (100ms)')
ax1.tick_params(axis='y', labelcolor=color_lat)
ax1.set_ylim(0, 0.15) # Focused view around the 100ms threshold
ax1.set_ylim(0, 0.15)
ax1.grid(True, linestyle=':', alpha=0.5)

# --- SECONDARY AXIS: Frame Rate (FPS) ---
ax2 = ax1.twinx()
color_fps = '#2ca02c' # Success Green
color_fps = '#2ca02c'
ax2.set_ylabel('Frame Rate (FPS)', color=color_fps, fontsize=12, fontweight='bold')
ax2.plot(df.index, df['RealTime_FPS'], color=color_fps, linestyle='-', alpha=0.4, label='Real-time FPS')

# Target 30 FPS Line per SMART Criteria
ax2.axhline(y=30, color='#1b5e20', linestyle=':', linewidth=2, label='Target 30 FPS')
ax2.tick_params(axis='y', labelcolor=color_fps)
ax2.set_ylim(0, 60)

# --- Annotations & Styling ---
plt.title('Objective 1 Validation: Vision Pipeline Efficiency\n(Python/OpenCV Markerless Tracking)', pad=20, fontsize=14)

# Combined Legend
lines1, labels1 = ax1.get_legend_handles_labels()
lines2, labels2 = ax2.get_legend_handles_labels()
ax1.legend(lines1 + lines2, labels1 + labels2, loc='upper left', frameon=True, shadow=True)

# Objective Summary Text Box
avg_lat = df['Proc_Time'].mean() * 1000
avg_fps = df['RealTime_FPS'].mean()
summary_text = f"Avg Latency: {avg_lat:.1f}ms\nAvg FPS: {avg_fps:.1f}"
plt.gca().text(0.98, 0.02, summary_text, transform=ax1.transAxes,
bbox=dict(facecolor='white', alpha=0.8), ha='right', fontsize=10)

fig.tight_layout()
plt.savefig('objective_1_performance_validation.png')
print("Success: objective_1_performance_validation.png generated.")
output_file = os.path.join(output_dir, 'academic_latency_graph.png')
plt.savefig(output_file)
print(f"Success: Academic graph saved securely to {output_file}")

if __name__ == "__main__":
print("--- Generating Objective 1 Audit ---")
print("--- Generating Academic Objective 1 Audit ---")
generate_consolidated_objective_1_report()
153 changes: 153 additions & 0 deletions analytics/generate_color_mapping.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,153 @@
import cv2 as cv
import numpy as np
import os
import pandas as pd
import matplotlib.pyplot as plt

# =========================
# OUTPUT FOLDER
# =========================
base_folder = "stats"
output_folder = os.path.join(base_folder, "Pseudocolor Mapping Analysis")

if not os.path.exists(output_folder):
os.makedirs(output_folder)

# =========================
# MAIN FUNCTION
# =========================
def analyze_pseudocolor_mapping(video_source=0):

cap = cv.VideoCapture(video_source)

if not cap.isOpened():
print("Error: Cannot open video source.")
return

prev_gray = None
results = []

frame_count = 0
max_frames = 300 # limit for testing

while frame_count < max_frames:
ret, frame = cap.read()
if not ret:
break

# =========================
# PREPROCESSING
# =========================
gray = cv.cvtColor(frame, cv.COLOR_BGR2GRAY)
gray = cv.GaussianBlur(gray, (5, 5), 0)

# =========================
# MOTION DETECTION
# =========================
if prev_gray is None:
prev_gray = gray
continue

diff = cv.absdiff(prev_gray, gray)
_, mask = cv.threshold(diff, 25, 255, cv.THRESH_BINARY)

# Clean noise
kernel = np.ones((3,3), np.uint8)
mask = cv.morphologyEx(mask, cv.MORPH_OPEN, kernel)

prev_gray = gray

# =========================
# PSEUDOCOLOR (HEATMAP)
# =========================
heatmap = cv.applyColorMap(mask, cv.COLORMAP_JET)

# =========================
# ANALYSIS
# =========================
motion = cv.countNonZero(mask)

hsv = cv.cvtColor(heatmap, cv.COLOR_BGR2HSV)
avg_hue = hsv[:, :, 0].mean()

results.append((motion, avg_hue))

frame_count += 1

# Optional display
cv.imshow("Mask", mask)
cv.imshow("Heatmap", heatmap)

if cv.waitKey(1) & 0xFF == 27:
break

cap.release()
cv.destroyAllWindows()

# =========================
# SAVE RESULTS
# =========================
df = pd.DataFrame(results, columns=["Motion", "Hue"])

csv_path = os.path.join(output_folder, "mapping_results.csv")
df.to_csv(csv_path, index=False)

# =========================
# SIMPLE SUMMARY
# =========================
corr = df.corr().iloc[0,1]

summary_path = os.path.join(output_folder, "mapping_summary.txt")
with open(summary_path, "w") as f:
f.write("PSEUDOCOLOR MAPPING ANALYSIS\n\n")
f.write(f"Total Frames: {len(df)}\n")
f.write(f"Correlation (Motion vs Hue): {corr:.4f}\n\n")

if corr < -0.3:
f.write("Strong inverse relationship (Correct mapping)\n")
elif corr < -0.1:
f.write("Moderate relationship\n")
else:
f.write("Weak relationship (Needs improvement)\n")

print("✅ Analysis completed")
print(f"CSV saved at: {csv_path}")
print(f"Summary saved at: {summary_path}")

# =========================
# GENERATE SCATTER PLOT
# =========================
# Optional: remove zero motion for clearer visualization
df_plot = df[df["Motion"] > 0]

plt.figure(figsize=(10, 6))

# Scatter plot
plt.scatter(df_plot["Motion"], df_plot["Hue"], alpha=0.6)

# Trend line
if len(df_plot) > 1:
z = np.polyfit(df_plot["Motion"], df_plot["Hue"], 1)
p = np.poly1d(z)
plt.plot(df_plot["Motion"], p(df_plot["Motion"]))

# Labels
plt.xlabel("Motion Intensity")
plt.ylabel("Hue Value")
plt.title("Pseudocolor Mapping: Motion vs Hue")
plt.grid()

# Save chart
chart_path = os.path.join(output_folder, "mapping_scatter.png")
plt.savefig(chart_path)

plt.close()

print(f"✅ Chart saved at: {chart_path}")


# =========================
# RUN
# =========================
if __name__ == "__main__":
analyze_pseudocolor_mapping()
111 changes: 111 additions & 0 deletions analytics/generate_latency_report.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
import pandas as pd
import matplotlib.pyplot as plt
import os
import numpy as np

# =========================
# PATH SETUP (FIXED)
# =========================
csv_path = os.path.join("logs", "test_results.csv")

base_folder = "stats"
stats_folder = os.path.join(base_folder, "Real-Time Pipeline Performance")

if not os.path.exists(stats_folder):
os.makedirs(stats_folder)

# =========================
# MAIN FUNCTION
# =========================
def generate_consolidated_objective_1_report():

if not os.path.exists(csv_path):
print(f"Error: {csv_path} not found.")
return

df = pd.read_csv(csv_path)

# === Convert ===
df['Latency_ms'] = df['Proc_Time'] * 1000
df['FPS'] = 1 / df['Proc_Time']

# === Statistics ===
mean = df['Latency_ms'].mean()
std = df['Latency_ms'].std()
p99 = np.percentile(df['Latency_ms'], 99)

print("\n=== REAL-TIME PIPELINE PERFORMANCE ===")
print(f"Mean Latency: {mean:.2f} ms")
print(f"Std Dev: {std:.2f}")
print(f"99th Percentile: {p99:.2f} ms")

# =========================
# SAVE STATS
# =========================
stats_file = os.path.join(stats_folder, "latency_statistics.txt")

with open(stats_file, "w", encoding="utf-8") as f:
f.write("REAL-TIME PIPELINE PERFORMANCE\n\n")
f.write(f"Mean Latency: {mean:.2f} ms\n")
f.write(f"Standard Deviation: {std:.2f}\n")
f.write(f"99th Percentile: {p99:.2f} ms\n")

# =========================
# PLOT GRAPH
# =========================
fig, ax1 = plt.subplots(figsize=(12, 8))

# Latency
ax1.plot(df.index, df['Latency_ms'], linewidth=2, label='Latency (ms)')
ax1.set_xlabel("Frame Number")
ax1.set_ylabel("Latency (ms)")

# Thresholds
ax1.axhline(33.3, linestyle='--', label='30 FPS Threshold (33.3 ms)')
ax1.axhline(100, linestyle='--', label='Maximum Limit (100 ms)')

ax1.set_ylim(0, max(df['Latency_ms']) * 1.2)

# FPS axis
ax2 = ax1.twinx()
ax2.plot(df.index, df['FPS'], alpha=0.3, label='FPS')
ax2.axhline(30, linestyle=':', label='30 FPS Target')
ax2.set_ylabel("FPS")

# Legend
lines1, labels1 = ax1.get_legend_handles_labels()
lines2, labels2 = ax2.get_legend_handles_labels()
ax1.legend(lines1 + lines2, labels1 + labels2)

# Summary box
summary = (
f"Mean: {mean:.2f} ms\n"
f"Std: {std:.2f}\n"
f"99%: {p99:.2f} ms"
)

plt.gca().text(0.98, 0.02, summary,
transform=ax1.transAxes,
bbox=dict(facecolor='white', alpha=0.8),
ha='right')

plt.title("Real-Time Pipeline Performance")
plt.tight_layout()

# =========================
# SAVE GRAPH
# =========================
output_path = os.path.join(stats_folder, "latency_performance_graph.png")
plt.savefig(output_path)

plt.show()

print(f"✅ Graph saved at: {output_path}")
print(f"✅ Stats saved at: {stats_file}")


# =========================
# RUN
# =========================
if __name__ == "__main__":
generate_consolidated_objective_1_report()
Loading
Loading