Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit a8ac309

Browse files
committed
feat: Add Three.js visualization entry point and data processor
Add viz.html as the main entry point that loads Three.js from CDN and orchestrates all visualization components (scene, body model, signal viz, environment, HUD). Add data-processor.js that transforms API WebSocket messages into geometry updates and provides demo mode with pre-recorded pose cycling when the server is unavailable. https://claude.ai/code/session_01Ki7pvEZtJDvqJkmyn6B714
1 parent dd38282 commit a8ac309

2 files changed

Lines changed: 734 additions & 0 deletions

File tree

ui/services/data-processor.js

Lines changed: 380 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,380 @@
1+
// Data Processor - WiFi DensePose 3D Visualization
2+
// Transforms API data into Three.js geometry updates
3+
4+
export class DataProcessor {
5+
constructor() {
6+
// Demo mode state
7+
this.demoMode = false;
8+
this.demoElapsed = 0;
9+
this.demoPoseIndex = 0;
10+
this.demoPoseCycleTime = 4; // seconds per pose transition
11+
12+
// Pre-recorded demo poses (COCO 17-keypoint format, normalized [0,1])
13+
// Each pose: array of {x, y, confidence} for 17 keypoints
14+
this.demoPoses = this._buildDemoPoses();
15+
16+
// Smoothing buffers
17+
this._lastProcessedPersons = [];
18+
this._smoothingFactor = 0.3;
19+
}
20+
21+
// Process incoming WebSocket message into visualization-ready data
22+
processMessage(message) {
23+
if (!message) return null;
24+
25+
const result = {
26+
persons: [],
27+
zoneOccupancy: {},
28+
signalData: null,
29+
metadata: {
30+
isRealData: false,
31+
timestamp: null,
32+
processingTime: 0,
33+
frameId: null,
34+
sensingMode: 'Mock'
35+
}
36+
};
37+
38+
// Handle different message types from the API
39+
if (message.type === 'pose_data') {
40+
const payload = message.data || message.payload;
41+
if (payload) {
42+
result.persons = this._extractPersons(payload);
43+
result.zoneOccupancy = this._extractZoneOccupancy(payload, message.zone_id);
44+
result.signalData = this._extractSignalData(payload);
45+
46+
result.metadata.isRealData = payload.metadata?.mock_data === false;
47+
result.metadata.timestamp = message.timestamp;
48+
result.metadata.processingTime = payload.metadata?.processing_time_ms || 0;
49+
result.metadata.frameId = payload.metadata?.frame_id;
50+
51+
// Determine sensing mode
52+
if (payload.metadata?.source === 'csi') {
53+
result.metadata.sensingMode = 'CSI';
54+
} else if (payload.metadata?.source === 'rssi') {
55+
result.metadata.sensingMode = 'RSSI';
56+
} else if (payload.metadata?.mock_data !== false) {
57+
result.metadata.sensingMode = 'Mock';
58+
} else {
59+
result.metadata.sensingMode = 'CSI';
60+
}
61+
}
62+
}
63+
64+
return result;
65+
}
66+
67+
// Extract person data with keypoints in COCO format
68+
_extractPersons(payload) {
69+
const persons = [];
70+
71+
if (payload.pose && payload.pose.persons) {
72+
for (const person of payload.pose.persons) {
73+
const processed = {
74+
id: person.id || `person_${persons.length}`,
75+
confidence: person.confidence || 0,
76+
keypoints: this._normalizeKeypoints(person.keypoints),
77+
bbox: person.bbox || null,
78+
body_parts: person.densepose_parts || person.body_parts || null
79+
};
80+
persons.push(processed);
81+
}
82+
} else if (payload.persons) {
83+
// Alternative format: persons at top level
84+
for (const person of payload.persons) {
85+
persons.push({
86+
id: person.id || `person_${persons.length}`,
87+
confidence: person.confidence || 0,
88+
keypoints: this._normalizeKeypoints(person.keypoints),
89+
bbox: person.bbox || null,
90+
body_parts: person.densepose_parts || person.body_parts || null
91+
});
92+
}
93+
}
94+
95+
return persons;
96+
}
97+
98+
// Normalize keypoints to {x, y, confidence} format in [0,1] range
99+
_normalizeKeypoints(keypoints) {
100+
if (!keypoints || keypoints.length === 0) return [];
101+
102+
return keypoints.map(kp => {
103+
// Handle various formats
104+
if (Array.isArray(kp)) {
105+
return { x: kp[0], y: kp[1], confidence: kp[2] || 0.5 };
106+
}
107+
return {
108+
x: kp.x !== undefined ? kp.x : 0,
109+
y: kp.y !== undefined ? kp.y : 0,
110+
confidence: kp.confidence !== undefined ? kp.confidence : (kp.score || 0.5)
111+
};
112+
});
113+
}
114+
115+
// Extract zone occupancy data
116+
_extractZoneOccupancy(payload, zoneId) {
117+
const occupancy = {};
118+
119+
if (payload.zone_summary) {
120+
Object.assign(occupancy, payload.zone_summary);
121+
}
122+
123+
if (zoneId && payload.pose?.persons?.length > 0) {
124+
occupancy[zoneId] = payload.pose.persons.length;
125+
}
126+
127+
return occupancy;
128+
}
129+
130+
// Extract signal/CSI data if available
131+
_extractSignalData(payload) {
132+
if (payload.signal_data || payload.csi_data) {
133+
const sig = payload.signal_data || payload.csi_data;
134+
return {
135+
amplitude: sig.amplitude || null,
136+
phase: sig.phase || null,
137+
doppler: sig.doppler || sig.doppler_spectrum || null,
138+
motionEnergy: sig.motion_energy !== undefined ? sig.motion_energy : null
139+
};
140+
}
141+
return null;
142+
}
143+
144+
// Generate demo data that cycles through pre-recorded poses
145+
generateDemoData(deltaTime) {
146+
this.demoElapsed += deltaTime;
147+
148+
const totalPoses = this.demoPoses.length;
149+
const cycleProgress = (this.demoElapsed % (this.demoPoseCycleTime * totalPoses)) / this.demoPoseCycleTime;
150+
const currentPoseIdx = Math.floor(cycleProgress) % totalPoses;
151+
const nextPoseIdx = (currentPoseIdx + 1) % totalPoses;
152+
const t = cycleProgress - Math.floor(cycleProgress); // interpolation factor [0,1]
153+
154+
// Smooth interpolation between poses
155+
const smoothT = t * t * (3 - 2 * t); // smoothstep
156+
157+
const currentPose = this.demoPoses[currentPoseIdx];
158+
const nextPose = this.demoPoses[nextPoseIdx];
159+
160+
const interpolatedKeypoints = currentPose.map((kp, i) => {
161+
const next = nextPose[i];
162+
return {
163+
x: kp.x + (next.x - kp.x) * smoothT,
164+
y: kp.y + (next.y - kp.y) * smoothT,
165+
confidence: 0.7 + Math.sin(this.demoElapsed * 2 + i * 0.5) * 0.2
166+
};
167+
});
168+
169+
// Simulate confidence variation
170+
const baseConf = 0.65 + Math.sin(this.demoElapsed * 0.5) * 0.2;
171+
172+
// Determine active zone based on position
173+
const hipX = (interpolatedKeypoints[11].x + interpolatedKeypoints[12].x) / 2;
174+
let activeZone = 'zone_2';
175+
if (hipX < 0.35) activeZone = 'zone_1';
176+
else if (hipX > 0.65) activeZone = 'zone_3';
177+
178+
return {
179+
persons: [{
180+
id: 'demo_person_0',
181+
confidence: Math.max(0, Math.min(1, baseConf)),
182+
keypoints: interpolatedKeypoints,
183+
bbox: null,
184+
body_parts: this._generateDemoBodyParts(this.demoElapsed)
185+
}],
186+
zoneOccupancy: {
187+
[activeZone]: 1
188+
},
189+
signalData: null, // SignalVisualization generates its own demo data
190+
metadata: {
191+
isRealData: false,
192+
timestamp: new Date().toISOString(),
193+
processingTime: 8 + Math.random() * 5,
194+
frameId: `demo_${Math.floor(this.demoElapsed * 30)}`,
195+
sensingMode: 'Mock'
196+
}
197+
};
198+
}
199+
200+
_generateDemoBodyParts(elapsed) {
201+
const parts = {};
202+
for (let i = 1; i <= 24; i++) {
203+
// Simulate body parts being detected with varying confidence
204+
// Create a wave pattern across parts
205+
parts[i] = 0.4 + Math.sin(elapsed * 1.2 + i * 0.5) * 0.3 + Math.random() * 0.1;
206+
parts[i] = Math.max(0, Math.min(1, parts[i]));
207+
}
208+
return parts;
209+
}
210+
211+
_buildDemoPoses() {
212+
// Pre-recorded poses: normalized COCO 17 keypoints
213+
// Each keypoint: {x, y, confidence}
214+
// Standing at center
215+
const standing = [
216+
{ x: 0.50, y: 0.12, confidence: 0.9 }, // 0: nose
217+
{ x: 0.48, y: 0.10, confidence: 0.8 }, // 1: left_eye
218+
{ x: 0.52, y: 0.10, confidence: 0.8 }, // 2: right_eye
219+
{ x: 0.46, y: 0.12, confidence: 0.7 }, // 3: left_ear
220+
{ x: 0.54, y: 0.12, confidence: 0.7 }, // 4: right_ear
221+
{ x: 0.42, y: 0.22, confidence: 0.9 }, // 5: left_shoulder
222+
{ x: 0.58, y: 0.22, confidence: 0.9 }, // 6: right_shoulder
223+
{ x: 0.38, y: 0.38, confidence: 0.85 }, // 7: left_elbow
224+
{ x: 0.62, y: 0.38, confidence: 0.85 }, // 8: right_elbow
225+
{ x: 0.36, y: 0.52, confidence: 0.8 }, // 9: left_wrist
226+
{ x: 0.64, y: 0.52, confidence: 0.8 }, // 10: right_wrist
227+
{ x: 0.45, y: 0.50, confidence: 0.9 }, // 11: left_hip
228+
{ x: 0.55, y: 0.50, confidence: 0.9 }, // 12: right_hip
229+
{ x: 0.44, y: 0.70, confidence: 0.85 }, // 13: left_knee
230+
{ x: 0.56, y: 0.70, confidence: 0.85 }, // 14: right_knee
231+
{ x: 0.44, y: 0.90, confidence: 0.8 }, // 15: left_ankle
232+
{ x: 0.56, y: 0.90, confidence: 0.8 } // 16: right_ankle
233+
];
234+
235+
// Walking - left leg forward
236+
const walkLeft = [
237+
{ x: 0.50, y: 0.12, confidence: 0.9 },
238+
{ x: 0.48, y: 0.10, confidence: 0.8 },
239+
{ x: 0.52, y: 0.10, confidence: 0.8 },
240+
{ x: 0.46, y: 0.12, confidence: 0.7 },
241+
{ x: 0.54, y: 0.12, confidence: 0.7 },
242+
{ x: 0.42, y: 0.22, confidence: 0.9 },
243+
{ x: 0.58, y: 0.22, confidence: 0.9 },
244+
{ x: 0.40, y: 0.35, confidence: 0.85 },
245+
{ x: 0.60, y: 0.40, confidence: 0.85 },
246+
{ x: 0.42, y: 0.48, confidence: 0.8 },
247+
{ x: 0.56, y: 0.55, confidence: 0.8 },
248+
{ x: 0.45, y: 0.50, confidence: 0.9 },
249+
{ x: 0.55, y: 0.50, confidence: 0.9 },
250+
{ x: 0.40, y: 0.68, confidence: 0.85 },
251+
{ x: 0.58, y: 0.72, confidence: 0.85 },
252+
{ x: 0.38, y: 0.88, confidence: 0.8 },
253+
{ x: 0.56, y: 0.90, confidence: 0.8 }
254+
];
255+
256+
// Walking - right leg forward
257+
const walkRight = [
258+
{ x: 0.50, y: 0.12, confidence: 0.9 },
259+
{ x: 0.48, y: 0.10, confidence: 0.8 },
260+
{ x: 0.52, y: 0.10, confidence: 0.8 },
261+
{ x: 0.46, y: 0.12, confidence: 0.7 },
262+
{ x: 0.54, y: 0.12, confidence: 0.7 },
263+
{ x: 0.42, y: 0.22, confidence: 0.9 },
264+
{ x: 0.58, y: 0.22, confidence: 0.9 },
265+
{ x: 0.38, y: 0.40, confidence: 0.85 },
266+
{ x: 0.62, y: 0.35, confidence: 0.85 },
267+
{ x: 0.36, y: 0.55, confidence: 0.8 },
268+
{ x: 0.60, y: 0.48, confidence: 0.8 },
269+
{ x: 0.45, y: 0.50, confidence: 0.9 },
270+
{ x: 0.55, y: 0.50, confidence: 0.9 },
271+
{ x: 0.47, y: 0.72, confidence: 0.85 },
272+
{ x: 0.52, y: 0.68, confidence: 0.85 },
273+
{ x: 0.47, y: 0.90, confidence: 0.8 },
274+
{ x: 0.50, y: 0.88, confidence: 0.8 }
275+
];
276+
277+
// Arms raised
278+
const armsUp = [
279+
{ x: 0.50, y: 0.12, confidence: 0.9 },
280+
{ x: 0.48, y: 0.10, confidence: 0.8 },
281+
{ x: 0.52, y: 0.10, confidence: 0.8 },
282+
{ x: 0.46, y: 0.12, confidence: 0.7 },
283+
{ x: 0.54, y: 0.12, confidence: 0.7 },
284+
{ x: 0.42, y: 0.22, confidence: 0.9 },
285+
{ x: 0.58, y: 0.22, confidence: 0.9 },
286+
{ x: 0.38, y: 0.15, confidence: 0.85 },
287+
{ x: 0.62, y: 0.15, confidence: 0.85 },
288+
{ x: 0.36, y: 0.05, confidence: 0.8 },
289+
{ x: 0.64, y: 0.05, confidence: 0.8 },
290+
{ x: 0.45, y: 0.50, confidence: 0.9 },
291+
{ x: 0.55, y: 0.50, confidence: 0.9 },
292+
{ x: 0.44, y: 0.70, confidence: 0.85 },
293+
{ x: 0.56, y: 0.70, confidence: 0.85 },
294+
{ x: 0.44, y: 0.90, confidence: 0.8 },
295+
{ x: 0.56, y: 0.90, confidence: 0.8 }
296+
];
297+
298+
// Sitting
299+
const sitting = [
300+
{ x: 0.50, y: 0.22, confidence: 0.9 },
301+
{ x: 0.48, y: 0.20, confidence: 0.8 },
302+
{ x: 0.52, y: 0.20, confidence: 0.8 },
303+
{ x: 0.46, y: 0.22, confidence: 0.7 },
304+
{ x: 0.54, y: 0.22, confidence: 0.7 },
305+
{ x: 0.42, y: 0.32, confidence: 0.9 },
306+
{ x: 0.58, y: 0.32, confidence: 0.9 },
307+
{ x: 0.38, y: 0.45, confidence: 0.85 },
308+
{ x: 0.62, y: 0.45, confidence: 0.85 },
309+
{ x: 0.40, y: 0.55, confidence: 0.8 },
310+
{ x: 0.60, y: 0.55, confidence: 0.8 },
311+
{ x: 0.45, y: 0.55, confidence: 0.9 },
312+
{ x: 0.55, y: 0.55, confidence: 0.9 },
313+
{ x: 0.42, y: 0.58, confidence: 0.85 },
314+
{ x: 0.58, y: 0.58, confidence: 0.85 },
315+
{ x: 0.38, y: 0.90, confidence: 0.8 },
316+
{ x: 0.62, y: 0.90, confidence: 0.8 }
317+
];
318+
319+
// Waving (left hand up, right hand at side)
320+
const waving = [
321+
{ x: 0.50, y: 0.12, confidence: 0.9 },
322+
{ x: 0.48, y: 0.10, confidence: 0.8 },
323+
{ x: 0.52, y: 0.10, confidence: 0.8 },
324+
{ x: 0.46, y: 0.12, confidence: 0.7 },
325+
{ x: 0.54, y: 0.12, confidence: 0.7 },
326+
{ x: 0.42, y: 0.22, confidence: 0.9 },
327+
{ x: 0.58, y: 0.22, confidence: 0.9 },
328+
{ x: 0.35, y: 0.12, confidence: 0.85 },
329+
{ x: 0.62, y: 0.38, confidence: 0.85 },
330+
{ x: 0.30, y: 0.04, confidence: 0.8 },
331+
{ x: 0.64, y: 0.52, confidence: 0.8 },
332+
{ x: 0.45, y: 0.50, confidence: 0.9 },
333+
{ x: 0.55, y: 0.50, confidence: 0.9 },
334+
{ x: 0.44, y: 0.70, confidence: 0.85 },
335+
{ x: 0.56, y: 0.70, confidence: 0.85 },
336+
{ x: 0.44, y: 0.90, confidence: 0.8 },
337+
{ x: 0.56, y: 0.90, confidence: 0.8 }
338+
];
339+
340+
return [standing, walkLeft, standing, walkRight, armsUp, standing, sitting, standing, waving, standing];
341+
}
342+
343+
// Generate a confidence heatmap from person positions
344+
generateConfidenceHeatmap(persons, cols, rows, roomWidth, roomDepth) {
345+
const positions = (persons || []).map(p => {
346+
if (!p.keypoints || p.keypoints.length < 13) return null;
347+
const hipX = (p.keypoints[11].x + p.keypoints[12].x) / 2;
348+
const hipY = (p.keypoints[11].y + p.keypoints[12].y) / 2;
349+
return {
350+
x: (hipX - 0.5) * roomWidth,
351+
z: (hipY - 0.5) * roomDepth,
352+
confidence: p.confidence
353+
};
354+
}).filter(Boolean);
355+
356+
const map = new Float32Array(cols * rows);
357+
const cellW = roomWidth / cols;
358+
const cellD = roomDepth / rows;
359+
360+
for (const pos of positions) {
361+
for (let r = 0; r < rows; r++) {
362+
for (let c = 0; c < cols; c++) {
363+
const cx = (c + 0.5) * cellW - roomWidth / 2;
364+
const cz = (r + 0.5) * cellD - roomDepth / 2;
365+
const dx = cx - pos.x;
366+
const dz = cz - pos.z;
367+
const dist = Math.sqrt(dx * dx + dz * dz);
368+
const conf = Math.exp(-dist * dist * 0.5) * pos.confidence;
369+
map[r * cols + c] = Math.max(map[r * cols + c], conf);
370+
}
371+
}
372+
}
373+
374+
return map;
375+
}
376+
377+
dispose() {
378+
this.demoPoses = [];
379+
}
380+
}

0 commit comments

Comments
 (0)