Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 44e5382

Browse files
committed
Implement CSI processing and phase sanitization modules; add unit tests for DensePose and modality translation networks
1 parent f3c77b1 commit 44e5382

11 files changed

Lines changed: 739 additions & 49 deletions

requirements.txt

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
# Core dependencies
2+
numpy>=1.21.0
3+
scipy>=1.7.0
4+
torch>=1.12.0
5+
torchvision>=0.13.0
6+
7+
# Testing dependencies
8+
pytest>=7.0.0
9+
pytest-asyncio>=0.21.0
10+
pytest-mock>=3.10.0
11+
12+
# API dependencies
13+
fastapi>=0.95.0
14+
uvicorn>=0.20.0
15+
websockets>=10.4
16+
pydantic>=1.10.0
17+
18+
# Hardware interface dependencies
19+
asyncio-mqtt>=0.11.0
20+
aiohttp>=3.8.0
21+
22+
# Data processing dependencies
23+
opencv-python>=4.7.0
24+
scikit-learn>=1.2.0
25+
26+
# Monitoring dependencies
27+
prometheus-client>=0.16.0
28+
29+
# Development dependencies
30+
black>=23.0.0
31+
flake8>=6.0.0
32+
mypy>=1.0.0

src/__init__.py

Whitespace-only changes.

src/core/__init__.py

Whitespace-only changes.

src/core/csi_processor.py

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
"""CSI (Channel State Information) processor for WiFi-DensePose system."""
2+
3+
import numpy as np
4+
from typing import Dict, Any, Optional
5+
6+
7+
class CSIProcessor:
8+
"""Processes raw CSI data for neural network input."""
9+
10+
def __init__(self, config: Optional[Dict[str, Any]] = None):
11+
"""Initialize CSI processor with configuration.
12+
13+
Args:
14+
config: Configuration dictionary with processing parameters
15+
"""
16+
self.config = config or {}
17+
self.sample_rate = self.config.get('sample_rate', 1000)
18+
self.num_subcarriers = self.config.get('num_subcarriers', 56)
19+
self.num_antennas = self.config.get('num_antennas', 3)
20+
21+
def process_raw_csi(self, raw_data: np.ndarray) -> np.ndarray:
22+
"""Process raw CSI data into normalized format.
23+
24+
Args:
25+
raw_data: Raw CSI data array
26+
27+
Returns:
28+
Processed CSI data ready for neural network input
29+
"""
30+
if raw_data.size == 0:
31+
raise ValueError("Raw CSI data cannot be empty")
32+
33+
# Basic processing: normalize and reshape
34+
processed = raw_data.astype(np.float32)
35+
36+
# Handle NaN values by replacing with mean of non-NaN values
37+
if np.isnan(processed).any():
38+
nan_mask = np.isnan(processed)
39+
non_nan_mean = np.nanmean(processed)
40+
processed[nan_mask] = non_nan_mean
41+
42+
# Simple normalization
43+
if processed.std() > 0:
44+
processed = (processed - processed.mean()) / processed.std()
45+
46+
return processed

src/core/phase_sanitizer.py

Lines changed: 108 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,108 @@
1+
"""Phase sanitizer for WiFi-DensePose CSI phase data processing."""
2+
3+
import numpy as np
4+
from typing import Optional
5+
from scipy import signal
6+
7+
8+
class PhaseSanitizer:
9+
"""Sanitizes phase data by unwrapping, removing outliers, and smoothing."""
10+
11+
def __init__(self, outlier_threshold: float = 3.0, smoothing_window: int = 5):
12+
"""Initialize phase sanitizer with configuration.
13+
14+
Args:
15+
outlier_threshold: Standard deviations for outlier detection
16+
smoothing_window: Window size for smoothing filter
17+
"""
18+
self.outlier_threshold = outlier_threshold
19+
self.smoothing_window = smoothing_window
20+
21+
def unwrap_phase(self, phase_data: np.ndarray) -> np.ndarray:
22+
"""Unwrap phase data to remove 2π discontinuities.
23+
24+
Args:
25+
phase_data: Raw phase data array
26+
27+
Returns:
28+
Unwrapped phase data
29+
"""
30+
if phase_data.size == 0:
31+
raise ValueError("Phase data cannot be empty")
32+
33+
# Apply unwrapping along the last axis (temporal dimension)
34+
unwrapped = np.unwrap(phase_data, axis=-1)
35+
return unwrapped.astype(np.float32)
36+
37+
def remove_outliers(self, phase_data: np.ndarray) -> np.ndarray:
38+
"""Remove outliers from phase data using statistical thresholding.
39+
40+
Args:
41+
phase_data: Phase data array
42+
43+
Returns:
44+
Phase data with outliers replaced
45+
"""
46+
if phase_data.size == 0:
47+
raise ValueError("Phase data cannot be empty")
48+
49+
result = phase_data.copy().astype(np.float32)
50+
51+
# Calculate statistics for outlier detection
52+
mean_val = np.mean(result)
53+
std_val = np.std(result)
54+
55+
# Identify outliers
56+
outlier_mask = np.abs(result - mean_val) > (self.outlier_threshold * std_val)
57+
58+
# Replace outliers with mean value
59+
result[outlier_mask] = mean_val
60+
61+
return result
62+
63+
def smooth_phase(self, phase_data: np.ndarray) -> np.ndarray:
64+
"""Apply smoothing filter to reduce noise in phase data.
65+
66+
Args:
67+
phase_data: Phase data array
68+
69+
Returns:
70+
Smoothed phase data
71+
"""
72+
if phase_data.size == 0:
73+
raise ValueError("Phase data cannot be empty")
74+
75+
result = phase_data.copy().astype(np.float32)
76+
77+
# Apply simple moving average filter along temporal dimension
78+
if result.ndim > 1:
79+
for i in range(result.shape[0]):
80+
if result.shape[-1] >= self.smoothing_window:
81+
# Apply 1D smoothing along the last axis
82+
kernel = np.ones(self.smoothing_window) / self.smoothing_window
83+
result[i] = np.convolve(result[i], kernel, mode='same')
84+
else:
85+
if result.shape[0] >= self.smoothing_window:
86+
kernel = np.ones(self.smoothing_window) / self.smoothing_window
87+
result = np.convolve(result, kernel, mode='same')
88+
89+
return result
90+
91+
def sanitize(self, phase_data: np.ndarray) -> np.ndarray:
92+
"""Apply full sanitization pipeline to phase data.
93+
94+
Args:
95+
phase_data: Raw phase data array
96+
97+
Returns:
98+
Fully sanitized phase data
99+
"""
100+
if phase_data.size == 0:
101+
raise ValueError("Phase data cannot be empty")
102+
103+
# Apply sanitization pipeline
104+
result = self.unwrap_phase(phase_data)
105+
result = self.remove_outliers(result)
106+
result = self.smooth_phase(result)
107+
108+
return result

src/models/__init__.py

Whitespace-only changes.

src/models/modality_translation.py

Lines changed: 114 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,114 @@
1+
"""Modality translation network for WiFi-DensePose system."""
2+
3+
import torch
4+
import torch.nn as nn
5+
import torch.nn.functional as F
6+
from typing import Dict, Any
7+
8+
9+
class ModalityTranslationNetwork(nn.Module):
10+
"""Neural network for translating CSI data to visual feature space."""
11+
12+
def __init__(self, config: Dict[str, Any]):
13+
"""Initialize modality translation network.
14+
15+
Args:
16+
config: Configuration dictionary with network parameters
17+
"""
18+
super().__init__()
19+
20+
self.input_channels = config['input_channels']
21+
self.hidden_dim = config['hidden_dim']
22+
self.output_dim = config['output_dim']
23+
self.num_layers = config['num_layers']
24+
self.dropout_rate = config['dropout_rate']
25+
26+
# Encoder: CSI -> Feature space
27+
self.encoder = self._build_encoder()
28+
29+
# Decoder: Feature space -> Visual-like features
30+
self.decoder = self._build_decoder()
31+
32+
# Initialize weights
33+
self._initialize_weights()
34+
35+
def _build_encoder(self) -> nn.Module:
36+
"""Build encoder network."""
37+
layers = []
38+
39+
# Initial convolution
40+
layers.append(nn.Conv2d(self.input_channels, 64, kernel_size=3, padding=1))
41+
layers.append(nn.BatchNorm2d(64))
42+
layers.append(nn.ReLU(inplace=True))
43+
layers.append(nn.Dropout2d(self.dropout_rate))
44+
45+
# Progressive downsampling
46+
in_channels = 64
47+
for i in range(self.num_layers - 1):
48+
out_channels = min(in_channels * 2, self.hidden_dim)
49+
layers.extend([
50+
nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=2, padding=1),
51+
nn.BatchNorm2d(out_channels),
52+
nn.ReLU(inplace=True),
53+
nn.Dropout2d(self.dropout_rate)
54+
])
55+
in_channels = out_channels
56+
57+
return nn.Sequential(*layers)
58+
59+
def _build_decoder(self) -> nn.Module:
60+
"""Build decoder network."""
61+
layers = []
62+
63+
# Get the actual output channels from encoder (should be hidden_dim)
64+
encoder_out_channels = self.hidden_dim
65+
66+
# Progressive upsampling
67+
in_channels = encoder_out_channels
68+
for i in range(self.num_layers - 1):
69+
out_channels = max(in_channels // 2, 64)
70+
layers.extend([
71+
nn.ConvTranspose2d(in_channels, out_channels, kernel_size=3, stride=2, padding=1, output_padding=1),
72+
nn.BatchNorm2d(out_channels),
73+
nn.ReLU(inplace=True),
74+
nn.Dropout2d(self.dropout_rate)
75+
])
76+
in_channels = out_channels
77+
78+
# Final output layer
79+
layers.append(nn.Conv2d(in_channels, self.output_dim, kernel_size=3, padding=1))
80+
layers.append(nn.Tanh()) # Normalize output
81+
82+
return nn.Sequential(*layers)
83+
84+
def _initialize_weights(self):
85+
"""Initialize network weights."""
86+
for m in self.modules():
87+
if isinstance(m, (nn.Conv2d, nn.ConvTranspose2d)):
88+
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
89+
if m.bias is not None:
90+
nn.init.constant_(m.bias, 0)
91+
elif isinstance(m, nn.BatchNorm2d):
92+
nn.init.constant_(m.weight, 1)
93+
nn.init.constant_(m.bias, 0)
94+
95+
def forward(self, x: torch.Tensor) -> torch.Tensor:
96+
"""Forward pass through the network.
97+
98+
Args:
99+
x: Input CSI tensor of shape (batch_size, channels, height, width)
100+
101+
Returns:
102+
Translated features tensor
103+
"""
104+
# Validate input shape
105+
if x.shape[1] != self.input_channels:
106+
raise RuntimeError(f"Expected {self.input_channels} input channels, got {x.shape[1]}")
107+
108+
# Encode CSI data
109+
encoded = self.encoder(x)
110+
111+
# Decode to visual-like features
112+
decoded = self.decoder(encoded)
113+
114+
return decoded

0 commit comments

Comments
 (0)