diff --git a/api.js b/api.js
index 915b1e4..14c3f06 100644
--- a/api.js
+++ b/api.js
@@ -2,11 +2,17 @@
import { getSession } from 'next-auth/react';
// https://allover.twodee.org/remote-state/fetching-memories/
-function assertResponse(response) {
+async function assertResponse(response) {
if (response.status >= 200 && response.status < 300) {
return response;
}
- throw new Error(`${response.status}: ${response.statusText}`);
+ // Include response body in error for debugging
+ let detail = '';
+ try {
+ const body = await response.clone().text();
+ detail = ` — ${body}`;
+ } catch (_) { /* ignore */ }
+ throw new Error(`${response.status}: ${response.statusText}${detail}`);
}
async function getDjangoToken() {
@@ -39,7 +45,7 @@ async function makeRequest(
body: body ? JSON.stringify(body) : null,
});
- assertResponse(response);
+ await assertResponse(response);
const data = await response.json();
return data;
diff --git a/components/audio/DAW/CustomWaveform/CustomTimeline.jsx b/components/audio/DAW/CustomWaveform/CustomTimeline.jsx
index c57d057..7743a2f 100644
--- a/components/audio/DAW/CustomWaveform/CustomTimeline.jsx
+++ b/components/audio/DAW/CustomWaveform/CustomTimeline.jsx
@@ -76,19 +76,24 @@ export default function CustomTimeline() {
// Apply the spliced audio
await applyProcessedAudio(splicedBuffer);
-
- // Log for study protocol (retain/scissor operation)
- if (logOperation) {
- logOperation('clip_cut', { start: activeRegion.start, end: activeRegion.end });
- logOperation('region_retained', { start: activeRegion.start, end: activeRegion.end });
- }
-
} catch (error) {
console.error('Error splicing region:', error);
alert('Failed to splice region');
- } finally {
setIsProcessing(false);
+ return;
+ }
+
+ // Log for study protocol — separated so logging failures don't block UI
+ if (logOperation) {
+ try {
+ await logOperation('clip_cut', { start: activeRegion.start, end: activeRegion.end });
+ await logOperation('region_retained', { start: activeRegion.start, end: activeRegion.end });
+ } catch (error) {
+ console.error('⚠️ Failed to log operation (audio edit succeeded):', error);
+ }
}
+
+ setIsProcessing(false);
}, [activeRegion, audioBuffer, applyProcessedAudio, audioContext, isProcessing, logOperation]);
// Handle cut (delete) region - remove the selected region
@@ -101,12 +106,12 @@ export default function CustomTimeline() {
if (isProcessing) return;
setIsProcessing(true);
- try {
- // Check if deletion is at beginning or end (silence trimming)
- const duration = audioBuffer.duration;
- const isStartTrim = activeRegion.start < 0.5; // Within 0.5s of start
- const isEndTrim = activeRegion.end > (duration - 0.5); // Within 0.5s of end
+ // Check if deletion is at beginning or end (silence trimming)
+ const duration = audioBuffer.duration;
+ const isStartTrim = activeRegion.start < 0.5; // Within 0.5s of start
+ const isEndTrim = activeRegion.end > (duration - 0.5); // Within 0.5s of end
+ try {
// Remove the selected region
const cutBuffer = cutRegionFromBuffer(
audioBuffer,
@@ -117,31 +122,30 @@ export default function CustomTimeline() {
// Apply the cut audio
await applyProcessedAudio(cutBuffer);
+ } catch (error) {
+ console.error('Error cutting region:', error);
+ alert('Failed to cut region');
+ setIsProcessing(false);
+ return;
+ }
- // Log for study protocol (delete operation)
- if (logOperation) {
- console.log('🎯 Logging clip_delete operation:', { start: activeRegion.start, end: activeRegion.end });
- logOperation('clip_delete', { start: activeRegion.start, end: activeRegion.end });
-
- // Also log silence trimming if applicable
+ // Log for study protocol — separated from audio processing so logging
+ // failures don't show "Failed to cut region" or block the UI
+ if (logOperation) {
+ try {
+ await logOperation('clip_delete', { start: activeRegion.start, end: activeRegion.end });
if (isStartTrim) {
- console.log('🎯 Logging silence_trimmed_start operation');
- logOperation('silence_trimmed_start', { region: activeRegion });
+ await logOperation('silence_trimmed_start', { region: activeRegion });
}
if (isEndTrim) {
- console.log('🎯 Logging silence_trimmed_end operation');
- logOperation('silence_trimmed_end', { region: activeRegion });
+ await logOperation('silence_trimmed_end', { region: activeRegion });
}
- } else {
- console.warn('⚠️ logOperation is not available for clip_delete');
+ } catch (error) {
+ console.error('⚠️ Failed to log operation (audio edit succeeded):', error);
}
-
- } catch (error) {
- console.error('Error cutting region:', error);
- alert('Failed to cut region');
- } finally {
- setIsProcessing(false);
}
+
+ setIsProcessing(false);
}, [activeRegion, audioBuffer, applyProcessedAudio, audioContext, isProcessing, logOperation]);
return (
@@ -195,15 +199,15 @@ export default function CustomTimeline() {
- {formatDuration(take.duration)} •{' '}
+ {formatDuration(resolvedDurations[take.id] ?? take.duration)} •{' '}
{formatDate(take.createdAt)}
diff --git a/components/audio/DAW/Multitrack/Track.js b/components/audio/DAW/Multitrack/Track.js
index b405844..3ac0771 100644
--- a/components/audio/DAW/Multitrack/Track.js
+++ b/components/audio/DAW/Multitrack/Track.js
@@ -430,6 +430,7 @@ export default function Track({ track, index, zoomLevel = 100 }) {
id: `clip-${track.id}-${Date.now()}`,
start: recordingStartPosition, // Use the stored start position
duration: audioDuration,
+ sourceDuration: audioDuration, // total buffer length for trim clamping
src: url,
offset: 0,
color: track.color || '#ff6b6b',
diff --git a/components/audio/DAW/index.js b/components/audio/DAW/index.js
index 6238bfe..375fdee 100644
--- a/components/audio/DAW/index.js
+++ b/components/audio/DAW/index.js
@@ -37,6 +37,7 @@ export default function DAW({
showSubmitButton = false,
silenceWarning = false,
logOperation = null, // For study protocol tracking
+ sampleTakes = [], // Pre-loaded takes (e.g. bassline) for Import Takes modal
}) {
const { audioURL, dawMode, setDawMode, activityLogger } = useAudio();
const { loadFFmpeg, loaded: ffmpegLoaded } = useFFmpeg();
@@ -125,7 +126,7 @@ export default function DAW({
-
+
{showSubmitButton && (
diff --git a/contexts/MultitrackContext.js b/contexts/MultitrackContext.js
index cd95804..09661c7 100644
--- a/contexts/MultitrackContext.js
+++ b/contexts/MultitrackContext.js
@@ -175,6 +175,7 @@ export const MultitrackProvider = ({ children, initialTracks = [] }) => {
id: `clip-${track.id}`,
start: 0,
duration: d,
+ sourceDuration: d, // total buffer length for trim clamping
color: track.color || '#7bafd4',
src: track.audioURL,
offset: 0,
diff --git a/contexts/TrackClipCanvas.jsx b/contexts/TrackClipCanvas.jsx
index 71ab0db..6a027dc 100644
--- a/contexts/TrackClipCanvas.jsx
+++ b/contexts/TrackClipCanvas.jsx
@@ -21,7 +21,7 @@ export default function TrackClipCanvas({ track, zoomLevel = 100, height = 100,
} = useMultitrack();
const canvasRef = useRef(null);
- const dragRef = useRef({ op: null, clipIndex: -1, startX: 0, pxPerSecCSS: 1, orig: null });
+ const dragRef = useRef({ op: null, clipIndex: -1, startX: 0, pxPerSecCSS: 1, orig: null, sourceDuration: null });
// selectionBoxRef removed - selection box now handled by SelectionOverlay component
const [peaksCache, setPeaksCache] = useState(new Map()); // clip.id -> peaks
const clips = Array.isArray(track?.clips) ? track.clips : [];
@@ -429,15 +429,12 @@ export default function TrackClipCanvas({ track, zoomLevel = 100, height = 100,
// Handle select tool
if (editorTool === 'select') {
- console.log('🔶 TrackClipCanvas: Select tool click', { trackId: track.id, hitIndex: hit.index, clipCount: clips.length });
if (hit.index >= 0) {
// Clicked on a clip - handle selection
const c = clips[hit.index];
const isShift = e.shiftKey;
const isCtrl = e.ctrlKey || e.metaKey;
- console.log('🔶 TrackClipCanvas: Clicked on clip', { clipId: c.id, isShift, isCtrl });
-
if (isShift || isCtrl) {
// Add to or toggle from selection
if (selectedClipIds.includes(c.id)) {
@@ -449,18 +446,22 @@ export default function TrackClipCanvas({ track, zoomLevel = 100, height = 100,
// Single select (replace selection)
setSelectedClipId(c.id);
setSelectedClipIds([c.id]);
- // Set track as selected when doing single selection
setSelectedTrackId(track.id);
}
+ // Initialize drag so the clip can be moved in one click-drag motion
+ const op = hit.edge === 'L' ? 'resizeL' : hit.edge === 'R' ? 'resizeR' : 'move';
+ dragRef.current.op = op;
+ dragRef.current.clipIndex = hit.index;
+ dragRef.current.startX = e.clientX;
+ dragRef.current.orig = { start: c.start || 0, duration: c.duration || 0, offset: c.offset || 0 };
+ dragRef.current.sourceDuration = c.sourceDuration || null;
+
// Stop propagation so SelectionOverlay doesn't interfere
e.stopPropagation();
return;
- } else {
- console.log('🔶 TrackClipCanvas: Clicked on empty space, letting SelectionOverlay handle it');
}
- // Note: Selection box dragging is now handled by SelectionOverlay component
- // which operates at the container level for cross-track selection
+ // Empty space click — let SelectionOverlay handle it
return;
}
@@ -514,6 +515,7 @@ export default function TrackClipCanvas({ track, zoomLevel = 100, height = 100,
dragRef.current.clipIndex = hit.index;
dragRef.current.startX = e.clientX;
dragRef.current.orig = { start: c.start || 0, duration: c.duration || 0, offset: c.offset || 0 };
+ dragRef.current.sourceDuration = c.sourceDuration || null;
} else {
dragRef.current.op = null;
dragRef.current.clipIndex = -1;
@@ -546,6 +548,7 @@ export default function TrackClipCanvas({ track, zoomLevel = 100, height = 100,
const dxSecRaw = dxCss / dragRef.current.pxPerSecCSS;
const dxSec = snapEnabled ? quantize(dxSecRaw) : dxSecRaw;
const { start, duration: dur, offset } = dragRef.current.orig;
+ const srcDur = dragRef.current.sourceDuration; // total audio buffer length
const op = dragRef.current.op;
let newStart = start;
let newDur = dur;
@@ -554,12 +557,25 @@ export default function TrackClipCanvas({ track, zoomLevel = 100, height = 100,
if (op === 'move') {
newStart = Math.max(0, start + dxSec);
} else if (op === 'resizeL') {
+ // Trim from left: advance offset into the buffer, can't go past offset 0
newStart = Math.max(0, start + dxSec);
const delta = newStart - start;
- newDur = Math.max(MIN_DUR, dur - delta);
newOffset = Math.max(0, (offset || 0) + delta);
+ newDur = Math.max(MIN_DUR, dur - delta);
+ // Clamp: can't reveal audio before the buffer start
+ if (newOffset < 0) {
+ const correction = -newOffset;
+ newOffset = 0;
+ newStart += correction;
+ newDur -= correction;
+ }
} else if (op === 'resizeR') {
newDur = Math.max(MIN_DUR, dur + dxSec);
+ // Clamp: can't extend past the end of the source audio buffer
+ if (srcDur != null) {
+ const maxDur = srcDur - (newOffset || offset || 0);
+ newDur = Math.min(newDur, maxDur);
+ }
}
draw();
diff --git a/pages/courses/[slug]/[piece]/[actCategory]/[partType]/activity/[step].js b/pages/courses/[slug]/[piece]/[actCategory]/[partType]/activity/[step].js
index 84eecb9..70f3fbb 100644
--- a/pages/courses/[slug]/[piece]/[actCategory]/[partType]/activity/[step].js
+++ b/pages/courses/[slug]/[piece]/[actCategory]/[partType]/activity/[step].js
@@ -281,35 +281,26 @@ export default function ActivityPage() {
completedOpsArray: [...completedOps] // Expand the array to see actual values
});
- // Pre-populate bassline track for Activity 3 using the assignment's sample audio
- const basslineName = assignment?.part?.piece?.name
- ? `${assignment.part.piece.name} - Bassline`
- : 'Bassline';
- const basslineURL = preferredSample || null;
- const initialTracks = (stepNumber === 3 && basslineURL) ? [{
- name: basslineName,
- type: 'audio',
+ // Build sample takes for Activity 3 — bassline available via "Import Takes" modal
+ const basslineAssignment = loadedActivities
+ ? activities[piece]?.find((a) => a.part_type === 'Bassline')
+ : null;
+ const basslineURL = basslineAssignment?.part?.sample_audio || null;
+ const sampleTakes = (stepNumber === 3 && basslineURL) ? [{
+ id: 'sample-bassline',
+ name: basslineAssignment?.part?.piece?.name
+ ? `${basslineAssignment.part.piece.name} - Bassline`
+ : 'Bassline',
+ partType: 'bassline',
+ takeNumber: 0,
+ duration: 0,
+ createdAt: null,
audioURL: basslineURL,
- volume: 1,
- pan: 0,
- muted: false,
- color: '#4a9eff',
- clips: [{
- id: `clip-bassline-${Date.now()}`,
- start: 0,
- duration: 0, // Will be set when audio loads
- color: '#4a9eff',
- src: basslineURL,
- offset: 0,
- name: basslineName,
- }],
+ mimeType: 'audio/mpeg',
+ originalData: null,
}] : [];
- // Wait for activity progress to load, and for Activity 3 also wait for the
- // sample audio URL so initialTracks is populated before MultitrackProvider mounts
- // (useState only reads initialTracks on first mount — late arrivals are ignored)
- const awaitingSample = stepNumber === 3 && !preferredSample && !!assignment;
- if (isLoading || awaitingSample) {
+ if (isLoading) {
return (
@@ -324,7 +315,6 @@ export default function ActivityPage() {
return (
{/* Initialize DAW visibility and mode for multitrack activities */}
@@ -392,6 +382,7 @@ export default function ActivityPage() {
onSubmit={null} // No submission from DAW itself in study mode
showSubmitButton={false} // Hide DAW's submit button
logOperation={logOperation} // Pass operation logger to DAW
+ sampleTakes={sampleTakes} // Bassline available via Import Takes modal
/>
)}