diff --git a/pages/hearing/[hearingId].tsx b/pages/hearing/[hearingId].tsx new file mode 100644 index 000000000..9dc0e24db --- /dev/null +++ b/pages/hearing/[hearingId].tsx @@ -0,0 +1,179 @@ +import { useEffect, useRef, useState } from "react" +import Head from "next/head" +import styles from "../../styles/VideoTranscription.module.css" // Adjust the path as necessary +import { firestore } from "../../components/firebase" +import { + collection, + doc, + getDoc, + getDocs, + where, + orderBy, + query as fbQuery +} from "firebase/firestore" +import { z } from "zod" +import { GetServerSideProps } from "next" +import { serverSideTranslations } from "next-i18next/serverSideTranslations" + +const Query = z.object({ hearingId: z.string({}) }) + +export default function VideoTranscription({ + videoUrl, + utterances +}: { + videoUrl: any + utterances: Array +}) { + const [currentTime, setCurrentTime] = useState(0) + const videoRef = useRef(null) + const transcriptionRef = useRef(null) + const utteranceRefs = useRef({}) + + // Update current time when video plays + const handleTimeUpdate = () => { + if (videoRef.current) { + setCurrentTime(videoRef.current.currentTime * 1000) // Convert to ms + } + } + + // Scroll to the current utterance + useEffect(() => { + const currentUtterance = utterances.find( + utterance => + currentTime >= utterance.start && currentTime <= utterance.end + ) + + if (currentUtterance && utteranceRefs.current[currentUtterance.start]) { + const element = utteranceRefs.current[currentUtterance.start] + const container = transcriptionRef.current + + if (container) { + container.scrollTop = element.offsetTop - container.offsetTop - 100 // Offset for better visibility + } + } + }, [currentTime, utterances]) + + // Click on transcription to seek video + const seekToTime = (startTime: number) => { + if (videoRef.current) { + videoRef.current.currentTime = startTime / 1000 // Convert ms to seconds + } + } + + return ( +
+ + Video Transcription + + + +
+
+
+ +
+

Transcription

+
+ {utterances.map(utterance => { + const isActive = + currentTime >= utterance.start && currentTime <= utterance.end + return ( +
(utteranceRefs.current[utterance.start] = el)} + className={`${styles.utterance} ${ + isActive ? styles.active : "" + }`} + onClick={() => seekToTime(utterance.start)} + > + + {formatTime(utterance.start)} - {formatTime(utterance.end)} + +

{utterance.text}

+
+ ) + })} +
+
+
+
+ ) +} + +// Helper function to format milliseconds to MM:SS format +function formatTime(ms: number) { + const totalSeconds = Math.floor(ms / 1000) + const minutes = Math.floor(totalSeconds / 60) + const seconds = totalSeconds % 60 + return `${minutes.toString().padStart(2, "0")}:${seconds + .toString() + .padStart(2, "0")}` +} + +export const getServerSideProps: GetServerSideProps = async ctx => { + const locale = ctx.locale ?? ctx.defaultLocale ?? "en" + const query = Query.safeParse(ctx.params) + if (!query.success) return { notFound: true } + const { hearingId } = query.data + + // Example: const hearingId = "hearing-5180" + const rawHearing = await getDoc(doc(firestore, `events/hearing-${hearingId}`)) + if (!rawHearing.exists()) return { notFound: true } + const hearing = rawHearing.data() as any + const { videoTranscriptionId, videoURL } = hearing + if (!videoTranscriptionId || !videoURL) { + return { notFound: true } + } + + // Example: constt videoTranscriptionId = "639e73ff-bd01-4902-bba7-88faaf39afa9" + const rawTranscription = await getDoc( + doc(firestore, `transcriptions/${videoTranscriptionId}`) + ) + if (!rawTranscription.exists()) return { notFound: true } + const transcription = rawTranscription.data() as any + console.log( + `Hearing ${hearingId} was transcribed at: ${transcription.createdAt?.toDate()}` + ) + + const rawUtterances = await getDocs( + fbQuery( + collection( + firestore, + `transcriptions/${videoTranscriptionId}/utterances` + ), + orderBy("start", "asc") + ) + ) + if (rawUtterances.empty) { + console.log("No utterances found") + return { notFound: true } + } + const utterances = rawUtterances.docs.map(doc => ({ + ...doc.data(), + id: doc.id + })) + + return { + props: { + videoUrl: videoURL, + utterances, + ...(await serverSideTranslations(locale, [ + "auth", + "common", + "footer", + "testimony", + "profile" + ])) + } + } +} diff --git a/pages/video-transcription.jsx b/pages/video-transcription.jsx new file mode 100644 index 000000000..28a07b83c --- /dev/null +++ b/pages/video-transcription.jsx @@ -0,0 +1,139 @@ +import { useEffect, useRef, useState } from "react" +import Head from "next/head" +import styles from "../styles/VideoTranscription.module.css" +import { firestore } from "../components/firebase" +import { + collection, + doc, + getDoc, + getDocs, + orderBy, + query +} from "firebase/firestore" +import { useQuery } from "react-query" +import { useRouter } from "next/router" + +export default function VideoTranscription({ videoUrl, utterances }) { + const [currentTime, setCurrentTime] = useState(0) + const videoRef = useRef(null) + const transcriptionRef = useRef(null) + const utteranceRefs = useRef({}) + + // Update current time when video plays + const handleTimeUpdate = () => { + if (videoRef.current) { + setCurrentTime(videoRef.current.currentTime * 1000) // Convert to ms + } + } + + // Scroll to the current utterance + useEffect(() => { + const currentUtterance = utterances.find( + utterance => + currentTime >= utterance.start && currentTime <= utterance.end + ) + + if (currentUtterance && utteranceRefs.current[currentUtterance.start]) { + const element = utteranceRefs.current[currentUtterance.start] + const container = transcriptionRef.current + + if (container) { + container.scrollTop = element.offsetTop - container.offsetTop - 100 // Offset for better visibility + } + } + }, [currentTime, utterances]) + + // Click on transcription to seek video + const seekToTime = startTime => { + if (videoRef.current) { + videoRef.current.currentTime = startTime / 1000 // Convert ms to seconds + } + } + + return ( +
+ + Video Transcription + + + +
+
+
+ +
+

Transcription

+
+ {utterances.map(utterance => { + const isActive = + currentTime >= utterance.start && currentTime <= utterance.end + return ( +
(utteranceRefs.current[utterance.start] = el)} + className={`${styles.utterance} ${ + isActive ? styles.active : "" + }`} + onClick={() => seekToTime(utterance.start)} + > + + {formatTime(utterance.start)} - {formatTime(utterance.end)} + +

{utterance.text}

+
+ ) + })} +
+
+
+
+ ) +} + +// Helper function to format milliseconds to MM:SS format +function formatTime(ms) { + const totalSeconds = Math.floor(ms / 1000) + const minutes = Math.floor(totalSeconds / 60) + const seconds = totalSeconds % 60 + return `${minutes.toString().padStart(2, "0")}:${seconds + .toString() + .padStart(2, "0")}` +} + +export async function getServerSideProps() { + const hearingId = "hearing-5180" + const hearing = await getDoc(doc(firestore, `events/${hearingId}`)) + const { videoTranscriptionId, videoURL } = hearing.data() + + // should be + // const exampleTranscriptionId = "639e73ff-bd01-4902-bba7-88faaf39afa9" + const transcription = await getDoc( + doc(firestore, `transcriptions/${videoTranscriptionId}`) + ) + const utterances = await getDocs( + query( + collection( + firestore, + `transcriptions/${videoTranscriptionId}/utterances` + ), + orderBy("start", "asc") + ) + ) + + return { + props: { + videoUrl: videoURL, + utterances: utterances.docs.map(doc => doc.data()) + } + } +} diff --git a/styles/VideoTranscription.module.css b/styles/VideoTranscription.module.css new file mode 100644 index 000000000..386f2a3fd --- /dev/null +++ b/styles/VideoTranscription.module.css @@ -0,0 +1,71 @@ +.container { + padding: 2rem; + max-width: 1200px; + margin: 0 auto; +} + +.main { + display: flex; + flex-direction: column; + gap: 2rem; +} + +@media (min-width: 768px) { + .main { + flex-direction: row; + } +} + +.videoContainer { + flex: 1; +} + +.video { + width: 100%; + border-radius: 8px; + box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1); +} + +.transcriptionContainer { + flex: 1; + max-height: 600px; + overflow-y: auto; + padding: 1rem; + border: 1px solid #e5e5e5; + border-radius: 8px; + background-color: #f9f9f9; +} + +.transcription { + display: flex; + flex-direction: column; + gap: 1rem; +} + +.utterance { + padding: 0.75rem; + border-radius: 6px; + cursor: pointer; + transition: background-color 0.2s ease; +} + +.utterance:hover { + background-color: #f0f0f0; +} + +.active { + background-color: #e6f7ff; + border-left: 3px solid #1890ff; +} + +.timestamp { + font-size: 0.8rem; + color: #666; + display: block; + margin-bottom: 0.25rem; +} + +.utterance p { + margin: 0; + line-height: 1.5; +}