%%{init: {'theme': 'base', 'themeVariables': {'primaryColor': '#2C3E50', 'primaryTextColor': '#fff', 'primaryBorderColor': '#16A085', 'lineColor': '#E67E22', 'secondaryColor': '#f5f5f5', 'tertiaryColor': '#fff'}}}%%
flowchart TB
subgraph "PWA Components"
A[index.html<br/>Main App]
B[manifest.json<br/>App Metadata]
C[sw.js<br/>Service Worker]
end
subgraph "Capabilities"
D[Offline Storage<br/>localStorage + Cache]
E[Sensor Access<br/>Generic Sensor API]
F[Background Sync<br/>Queue uploads]
end
subgraph "User Experience"
G[Home Screen Icon]
H[Full Screen Mode]
I[Offline Indicator]
end
A --> B
A --> C
C --> D
A --> E
C --> F
B --> G
B --> H
A --> I
585 Mobile Phone Labs: PWA and Audio Sensing
585.1 Learning Objectives
By completing these labs, you will be able to:
- Create Progressive Web Apps (PWAs) with offline capability
- Implement Service Workers for resource caching
- Build installable mobile applications without app stores
- Access smartphone microphones using the Web Audio API
- Process audio data in real-time for noise level measurement
- Implement participatory sensing with location-tagged data
585.2 Lab 3: Progressive Web App for Multi-Sensor Data Collection
Objective: Create an installable PWA that collects data from multiple smartphone sensors and works offline.
Materials: - Smartphone - Web server (can use Python’s http.server) - Text editor
A Progressive Web App (PWA) is like a website that pretends to be a real app! You can add it to your home screen, use it offline, and it looks just like any other app on your phone - but you don’t need to download it from an app store.
Think of it like a magic website that can: - Work without internet (it saves stuff on your phone) - Send you notifications - Look and feel like a “real” app
The Sensor Squad says: “PWAs are super cool because they’re easier to make than regular apps, but they can do almost everything a regular app can do!”
585.2.1 Step 1: Create the Main Application (index.html)
<!DOCTYPE html>
<html>
<head>
<title>IoT Multi-Sensor App</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
<link rel="manifest" href="manifest.json">
<meta name="theme-color" content="#007bff">
<style>
body {
font-family: Arial, sans-serif;
padding: 20px;
max-width: 600px;
margin: 0 auto;
background: #f5f5f5;
}
.sensor-card {
background: white;
border-radius: 10px;
padding: 15px;
margin: 15px 0;
box-shadow: 0 2px 10px rgba(0,0,0,0.1);
}
.sensor-title {
font-size: 18px;
font-weight: bold;
color: #007bff;
margin-bottom: 10px;
}
.sensor-value {
font-size: 24px;
font-weight: bold;
color: #333;
}
.sensor-unit {
font-size: 14px;
color: #666;
}
button {
width: 100%;
padding: 15px;
font-size: 18px;
border: none;
border-radius: 8px;
cursor: pointer;
margin: 5px 0;
}
.btn-primary { background: #007bff; color: white; }
.btn-success { background: #28a745; color: white; }
.btn-danger { background: #dc3545; color: white; }
.offline-indicator {
position: fixed;
top: 0;
left: 0;
right: 0;
background: #dc3545;
color: white;
text-align: center;
padding: 10px;
display: none;
}
.offline-indicator.show { display: block; }
.data-count {
text-align: center;
padding: 10px;
background: #e9ecef;
border-radius: 5px;
margin: 10px 0;
}
</style>
</head>
<body>
<div class="offline-indicator" id="offline-indicator">
Offline - Data will sync when connected
</div>
<h1>IoT Multi-Sensor</h1>
<button class="btn-primary" onclick="startAllSensors()">Start All Sensors</button>
<button class="btn-danger" onclick="stopAllSensors()">Stop All Sensors</button>
<div class="sensor-card">
<div class="sensor-title">Accelerometer</div>
<div class="sensor-value" id="accel-value">--</div>
<div class="sensor-unit">m/s² (magnitude)</div>
</div>
<div class="sensor-card">
<div class="sensor-title">Gyroscope</div>
<div class="sensor-value" id="gyro-value">--</div>
<div class="sensor-unit">rad/s (rotation rate)</div>
</div>
<div class="sensor-card">
<div class="sensor-title">Location</div>
<div class="sensor-value" id="location-value">--</div>
<div class="sensor-unit">latitude, longitude</div>
</div>
<div class="sensor-card">
<div class="sensor-title">Light Level</div>
<div class="sensor-value" id="light-value">--</div>
<div class="sensor-unit">lux</div>
</div>
<div class="data-count">
<strong>Collected Readings:</strong> <span id="reading-count">0</span>
</div>
<button class="btn-success" onclick="syncData()">Sync Data to Cloud</button>
<script>
let sensors = {};
let readings = [];
let readingCount = 0;
// Register Service Worker for offline support
if ('serviceWorker' in navigator) {
navigator.serviceWorker.register('sw.js')
.then(reg => console.log('Service Worker registered'))
.catch(err => console.error('SW registration failed:', err));
}
// Offline detection
window.addEventListener('online', () => {
document.getElementById('offline-indicator').classList.remove('show');
syncData(); // Auto-sync when back online
});
window.addEventListener('offline', () => {
document.getElementById('offline-indicator').classList.add('show');
});
function startAllSensors() {
// Accelerometer
if ('Accelerometer' in window) {
sensors.accel = new Accelerometer({ frequency: 10 });
sensors.accel.addEventListener('reading', () => {
const mag = Math.sqrt(
sensors.accel.x ** 2 +
sensors.accel.y ** 2 +
sensors.accel.z ** 2
);
document.getElementById('accel-value').textContent = mag.toFixed(2);
storeReading('accelerometer', { x: sensors.accel.x, y: sensors.accel.y, z: sensors.accel.z });
});
sensors.accel.start();
}
// Gyroscope
if ('Gyroscope' in window) {
sensors.gyro = new Gyroscope({ frequency: 10 });
sensors.gyro.addEventListener('reading', () => {
const rate = Math.sqrt(
sensors.gyro.x ** 2 +
sensors.gyro.y ** 2 +
sensors.gyro.z ** 2
);
document.getElementById('gyro-value').textContent = rate.toFixed(3);
storeReading('gyroscope', { x: sensors.gyro.x, y: sensors.gyro.y, z: sensors.gyro.z });
});
sensors.gyro.start();
}
// Geolocation
if ('geolocation' in navigator) {
sensors.geoWatch = navigator.geolocation.watchPosition(
pos => {
const lat = pos.coords.latitude.toFixed(5);
const lon = pos.coords.longitude.toFixed(5);
document.getElementById('location-value').textContent = `${lat}, ${lon}`;
storeReading('location', {
lat: pos.coords.latitude,
lon: pos.coords.longitude,
accuracy: pos.coords.accuracy
});
},
err => console.error('Geolocation error:', err),
{ enableHighAccuracy: true }
);
}
// Ambient Light Sensor
if ('AmbientLightSensor' in window) {
sensors.light = new AmbientLightSensor();
sensors.light.addEventListener('reading', () => {
document.getElementById('light-value').textContent = sensors.light.illuminance.toFixed(0);
storeReading('light', { illuminance: sensors.light.illuminance });
});
sensors.light.start();
}
}
function stopAllSensors() {
if (sensors.accel) sensors.accel.stop();
if (sensors.gyro) sensors.gyro.stop();
if (sensors.geoWatch) navigator.geolocation.clearWatch(sensors.geoWatch);
if (sensors.light) sensors.light.stop();
}
function storeReading(type, data) {
const reading = {
type,
data,
timestamp: Date.now()
};
readings.push(reading);
readingCount++;
document.getElementById('reading-count').textContent = readingCount;
// Store in localStorage for offline persistence
localStorage.setItem('sensor-readings', JSON.stringify(readings));
// Keep only last 1000 readings in memory
if (readings.length > 1000) {
readings = readings.slice(-1000);
}
}
async function syncData() {
const stored = localStorage.getItem('sensor-readings');
if (!stored) {
alert('No data to sync');
return;
}
const data = JSON.parse(stored);
try {
const response = await fetch('https://iot-backend.example.com/api/sensor-data', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
deviceId: 'pwa-' + (localStorage.getItem('device-id') || Math.random().toString(36).substr(2, 9)),
readings: data
})
});
if (response.ok) {
localStorage.removeItem('sensor-readings');
readings = [];
readingCount = 0;
document.getElementById('reading-count').textContent = '0';
alert('Data synced successfully!');
}
} catch (error) {
console.error('Sync failed:', error);
alert('Sync failed. Data saved locally for later.');
}
}
// Load saved readings on startup
const savedReadings = localStorage.getItem('sensor-readings');
if (savedReadings) {
readings = JSON.parse(savedReadings);
readingCount = readings.length;
document.getElementById('reading-count').textContent = readingCount;
}
</script>
</body>
</html>585.2.2 Step 2: Create the Web App Manifest (manifest.json)
{
"name": "IoT Multi-Sensor App",
"short_name": "IoT Sensors",
"description": "Collect data from smartphone sensors for IoT applications",
"start_url": "/",
"display": "standalone",
"background_color": "#ffffff",
"theme_color": "#007bff",
"icons": [
{
"src": "/icon-192.png",
"sizes": "192x192",
"type": "image/png"
},
{
"src": "/icon-512.png",
"sizes": "512x512",
"type": "image/png"
}
]
}585.2.3 Step 3: Create the Service Worker (sw.js)
const CACHE_NAME = 'iot-sensors-v1';
const urlsToCache = [
'/',
'/index.html',
'/manifest.json'
];
// Install service worker and cache resources
self.addEventListener('install', event => {
event.waitUntil(
caches.open(CACHE_NAME)
.then(cache => cache.addAll(urlsToCache))
);
});
// Serve cached content when offline
self.addEventListener('fetch', event => {
event.respondWith(
caches.match(event.request)
.then(response => response || fetch(event.request))
);
});
// Clean up old caches on activation
self.addEventListener('activate', event => {
event.waitUntil(
caches.keys().then(cacheNames => {
return Promise.all(
cacheNames.filter(name => name !== CACHE_NAME)
.map(name => caches.delete(name))
);
})
);
});585.2.4 Step 4: Serve and Install the PWA
# Start a local web server
python3 -m http.server 8000
# Access on smartphone: http://[your-computer-ip]:8000
# Use browser's "Add to Home Screen" option to install585.2.5 PWA Architecture
585.2.6 Expected Learning Outcomes
After completing this lab, you will understand:
- Create Progressive Web Apps with offline support
- Use Service Workers for caching
- Implement installable mobile applications
- Access multiple sensors simultaneously
585.2.7 Exercises
- Add background sync for uploading data when online using
navigator.serviceWorker.sync - Implement push notifications for sensor alerts
- Add camera integration for QR code scanning
- Create data visualization dashboard with Chart.js
585.3 Lab 4: Participatory Noise Monitoring Application
Objective: Build a crowdsourced noise level monitoring system using smartphone microphones.
Materials: - Smartphone with microphone - Web browser - Optional: Location services enabled
Sound is vibrations in the air, and we measure how loud sounds are using decibels (dB). Your phone’s microphone can “hear” these vibrations and tell you how loud things are around you!
Here’s what different noise levels feel like: - 0-30 dB: Very quiet, like a library - 30-60 dB: Normal talking volume - 60-80 dB: Busy traffic - 80-100 dB: Lawnmower - getting uncomfortably loud! - 100+ dB: Concert or fireworks - can hurt your ears!
The Sensor Squad explains: “When lots of people use an app like this and share their noise readings, we can make a map of where it’s noisy and where it’s quiet in a whole city! That’s called participatory sensing - everyone participates to help!”
585.3.1 Complete Application Code
<!DOCTYPE html>
<html>
<head>
<title>Noise Level Monitor</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
<style>
body {
font-family: Arial, sans-serif;
padding: 20px;
max-width: 600px;
margin: 0 auto;
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
min-height: 100vh;
}
.container {
background: white;
border-radius: 15px;
padding: 20px;
box-shadow: 0 10px 40px rgba(0,0,0,0.3);
}
h1 {
color: #667eea;
text-align: center;
}
.noise-meter {
background: linear-gradient(to right, green, yellow, orange, red);
height: 50px;
border-radius: 25px;
position: relative;
margin: 30px 0;
}
.noise-indicator {
position: absolute;
width: 10px;
height: 60px;
background: #333;
border-radius: 5px;
top: -5px;
transition: left 0.1s;
}
.db-display {
text-align: center;
font-size: 72px;
font-weight: bold;
color: #667eea;
margin: 20px 0;
}
.db-label {
text-align: center;
color: #666;
font-size: 18px;
}
button {
width: 100%;
padding: 15px;
font-size: 18px;
border: none;
border-radius: 8px;
cursor: pointer;
margin: 10px 0;
}
.start-btn {
background: #4CAF50;
color: white;
}
.stop-btn {
background: #f44336;
color: white;
}
.upload-btn {
background: #2196F3;
color: white;
}
.noise-level {
padding: 15px;
margin: 15px 0;
border-radius: 8px;
text-align: center;
font-weight: bold;
}
.quiet { background: #d4edda; color: #155724; }
.moderate { background: #fff3cd; color: #856404; }
.loud { background: #f8d7da; color: #721c24; }
.stats {
background: #f5f5f5;
padding: 15px;
border-radius: 8px;
margin: 15px 0;
}
</style>
</head>
<body>
<div class="container">
<h1>Noise Level Monitor</h1>
<button class="start-btn" onclick="startMonitoring()">Start Monitoring</button>
<button class="stop-btn" onclick="stopMonitoring()">Stop Monitoring</button>
<div class="db-display" id="db-display">--</div>
<div class="db-label">Decibels (dB)</div>
<div class="noise-meter">
<div class="noise-indicator" id="indicator"></div>
</div>
<div class="noise-level" id="noise-level">Quiet</div>
<div class="stats" id="stats">
Click "Start Monitoring" to begin
</div>
<button class="upload-btn" onclick="uploadData()">Upload to Server</button>
<div style="margin-top: 20px; padding: 10px; background: #e3f2fd; border-radius: 5px; font-size: 14px;">
<strong>About Noise Levels:</strong><br>
- 0-30 dB: Very Quiet (library)<br>
- 30-60 dB: Quiet (conversation)<br>
- 60-80 dB: Moderate (traffic)<br>
- 80-100 dB: Loud (lawnmower)<br>
- 100+ dB: Very Loud (concert, harmful)
</div>
</div>
<script>
let audioContext = null;
let microphone = null;
let analyser = null;
let dataArray = null;
let animationId = null;
let measurements = [];
let startTime = null;
async function startMonitoring() {
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
audioContext = new (window.AudioContext || window.webkitAudioContext)();
microphone = audioContext.createMediaStreamSource(stream);
analyser = audioContext.createAnalyser();
analyser.fftSize = 2048;
analyser.smoothingTimeConstant = 0.8;
microphone.connect(analyser);
dataArray = new Uint8Array(analyser.frequencyBinCount);
startTime = Date.now();
measureNoise();
} catch (error) {
alert('Error accessing microphone: ' + error.message);
}
}
function stopMonitoring() {
if (animationId) {
cancelAnimationFrame(animationId);
}
if (microphone) {
microphone.disconnect();
}
if (audioContext) {
audioContext.close();
}
updateStats();
}
function measureNoise() {
analyser.getByteFrequencyData(dataArray);
// Calculate RMS (root mean square) of audio data
let sum = 0;
for (let i = 0; i < dataArray.length; i++) {
sum += dataArray[i] * dataArray[i];
}
const rms = Math.sqrt(sum / dataArray.length);
// Convert to decibels (rough approximation)
// This is a simplified conversion - real dB meters require calibration
const db = Math.round(20 * Math.log10(rms + 1));
// Store measurement
measurements.push({
db: db,
timestamp: Date.now()
});
// Update display
document.getElementById('db-display').textContent = db;
// Update indicator position (0-100dB range)
const percent = Math.min(db / 100 * 100, 100);
document.getElementById('indicator').style.left = percent + '%';
// Update noise level category
let category, className;
if (db < 40) {
category = 'Quiet';
className = 'quiet';
} else if (db < 70) {
category = 'Moderate';
className = 'moderate';
} else {
category = 'Loud';
className = 'loud';
}
const levelDiv = document.getElementById('noise-level');
levelDiv.textContent = category + ` (${db} dB)`;
levelDiv.className = 'noise-level ' + className;
// Update stats
if (measurements.length % 10 === 0) {
updateStats();
}
// Continue measuring
animationId = requestAnimationFrame(measureNoise);
}
function updateStats() {
if (measurements.length === 0) {
return;
}
const dbs = measurements.map(m => m.db);
const avg = dbs.reduce((a, b) => a + b) / dbs.length;
const max = Math.max(...dbs);
const min = Math.min(...dbs);
const duration = (Date.now() - startTime) / 1000;
document.getElementById('stats').innerHTML = `
<strong>Statistics:</strong><br>
Average: ${avg.toFixed(1)} dB<br>
Maximum: ${max} dB<br>
Minimum: ${min} dB<br>
Measurements: ${measurements.length}<br>
Duration: ${duration.toFixed(1)} seconds
`;
}
async function uploadData() {
if (measurements.length === 0) {
alert('No measurements to upload');
return;
}
// Get current location
if ('geolocation' in navigator) {
navigator.geolocation.getCurrentPosition(async (position) => {
const data = {
measurements: measurements,
location: {
latitude: position.coords.latitude,
longitude: position.coords.longitude
},
deviceId: 'web-' + Math.random().toString(36).substr(2, 9),
timestamp: new Date().toISOString()
};
try {
const response = await fetch('https://iot-backend.example.com/api/noise-data', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify(data)
});
if (response.ok) {
alert('Data uploaded successfully!');
measurements = []; // Clear measurements
} else {
alert('Upload failed: ' + response.statusText);
}
} catch (error) {
// If server not available, store locally
localStorage.setItem('noise-data', JSON.stringify(data));
alert('Server unavailable. Data saved locally.');
}
}, error => {
alert('Location unavailable. Data will be uploaded without location.');
});
} else {
alert('Geolocation not supported');
}
}
</script>
</body>
</html>585.3.2 Audio Processing Pipeline
%%{init: {'theme': 'base', 'themeVariables': {'primaryColor': '#2C3E50', 'primaryTextColor': '#fff', 'primaryBorderColor': '#16A085', 'lineColor': '#E67E22', 'secondaryColor': '#f5f5f5', 'tertiaryColor': '#fff'}}}%%
flowchart TD
A[Microphone Input] --> B[AudioContext]
B --> C[MediaStreamSource]
C --> D[AnalyserNode]
D --> E[FFT Analysis<br/>2048 samples]
E --> F[Frequency Data<br/>Uint8Array]
F --> G[Calculate RMS<br/>√(Σx²/n)]
G --> H[Convert to dB<br/>20·log₁₀(rms)]
H --> I[Classify Level]
I --> J{dB Range}
J -->|< 40| K[Quiet]
J -->|40-70| L[Moderate]
J -->|> 70| M[Loud]
585.3.3 Expected Learning Outcomes
After completing this lab, you will understand:
- Access smartphone microphone via Web Audio API
- Process audio data in real-time
- Calculate noise levels from audio signals
- Implement participatory sensing with location
- Handle offline data storage
585.3.4 Exercises
- Implement frequency analysis to identify noise sources (traffic vs. construction vs. voices)
- Create noise pollution heatmap from crowdsourced data using mapping libraries
- Add time-based analysis showing noise patterns throughout the day
- Implement privacy-preserving location obfuscation using grid-based reporting (snap to 100m grid)
585.4 Summary
In these two labs, you built advanced mobile sensing applications:
Lab 3 - Progressive Web App: - Created an installable PWA with manifest.json - Implemented Service Worker for offline caching - Built multi-sensor data collection (accelerometer, gyroscope, GPS, light) - Added automatic sync when connectivity restored
Lab 4 - Noise Monitoring: - Accessed microphone using Web Audio API - Processed audio with FFT analysis - Calculated dB levels from RMS values - Implemented participatory sensing with location tagging
585.5 What’s Next
Test your understanding of all mobile sensing concepts:
585.6 Resources
585.6.1 Web APIs Used
- Web Audio API - Microphone access and audio processing
- Service Workers API - Offline caching
- Web App Manifest - PWA configuration
- Background Sync API - Deferred uploads
585.6.2 PWA Resources
- Progressive Web Apps - Google’s PWA guide
- PWA Builder - Tools for creating PWAs
- Workbox - Service Worker libraries
585.6.3 Participatory Sensing
- Citizen Science Apps - Examples of crowdsourced data collection
- Noise Pollution Studies - WHO guidelines on noise exposure