diff options
-rw-r--r-- | public/style/master.css | 2 | ||||
-rw-r--r-- | source/client/index.ts | 1 | ||||
-rw-r--r-- | source/client/local_user.ts | 168 | ||||
-rw-r--r-- | source/client/logger.ts | 2 | ||||
-rw-r--r-- | source/client/remote_user.ts | 6 | ||||
-rw-r--r-- | source/client/track_handle.ts | 14 | ||||
-rw-r--r-- | source/client/user.ts | 108 |
7 files changed, 97 insertions, 204 deletions
diff --git a/public/style/master.css b/public/style/master.css index 794eb43..b042a1d 100644 --- a/public/style/master.css +++ b/public/style/master.css @@ -65,12 +65,12 @@ input[type="text"] { } .user { + display: grid; background-color: var(--bg); border: 0px soly transparent; border-radius: 5px; padding: 1em; vertical-align: baseline; - height: 15em; min-width: 10em; margin: 0.5em; } diff --git a/source/client/index.ts b/source/client/index.ts index 0c7e728..85943eb 100644 --- a/source/client/index.ts +++ b/source/client/index.ts @@ -14,7 +14,6 @@ export interface User { stream: MediaStream, } - export const parameters = get_query_params() window.onload = () => main() diff --git a/source/client/local_user.ts b/source/client/local_user.ts index 7cbd93f..2886a76 100644 --- a/source/client/local_user.ts +++ b/source/client/local_user.ts @@ -3,114 +3,83 @@ import { log } from "./logger.ts"; import { RemoteUser } from "./remote_user.ts"; import { get_rnnoise_node } from "./rnnoise.ts"; import { Room } from "./room.ts"; +import { TrackHandle } from "./track_handle.ts"; import { User } from "./user.ts"; export class LocalUser extends User { - - private audio_track?: MediaStreamTrack - private video_track?: MediaStreamTrack - private screen_track?: MediaStreamTrack - private audio_disable_cleanup?: () => void - mic_gain?: GainNode default_gain: number = parameter_number("mic_gain", 1) - controls?: { audio: HTMLElement, video: HTMLElement, mute: HTMLElement, screen: HTMLElement } - constructor(room: Room, name: string) { super(room, name) this.el.classList.add("local") this.local = true this.create_controls() - if (parameter_bool("audio_enabled", false)) this.enable_audio() - if (parameter_bool("video_enabled", false)) this.enable_video() + this.add_initial_tracks() + } + + async add_initial_tracks() { + if (parameter_bool("mic_enabled", false)) this.publish_track(await this.create_mic_track()) + if (parameter_bool("camera_enabled", false)) this.publish_track(await this.create_camera_track()) + if (parameter_bool("screen_enabled", false)) this.publish_track(await this.create_screen_track()) + } + + publish_track(t: TrackHandle) { + this.room.remote_users.forEach(u => u.peer.addTrack(t.track)) + this.add_track(t) + t.addEventListener("ended", () => { + this.room.remote_users.forEach(u => { + u.peer.getSenders().forEach(s => { + if (s.track == t.track) u.peer.removeTrack(s) + }) + }) + }) + } + + add_initial_to_remote(u: RemoteUser) { + this.tracks.forEach(t => u.peer.addTrack(t.track)) } create_controls() { - const audio_toggle = document.createElement("input") - const video_toggle = document.createElement("input") - const mute_toggle = document.createElement("input") + const mic_toggle = document.createElement("input") + const camera_toggle = document.createElement("input") const screen_toggle = document.createElement("input") - audio_toggle.type = video_toggle.type = mute_toggle.type = screen_toggle.type = "button" - audio_toggle.value = "Audio" - video_toggle.value = "Video" + mic_toggle.type = camera_toggle.type = screen_toggle.type = "button" + mic_toggle.value = "Microphone" + camera_toggle.value = "Camera" screen_toggle.value = "Screen" - mute_toggle.value = "Mute" - let audio = parameter_bool("audio_enabled", false), - video = parameter_bool("video_enabled", false), - mute = parameter_bool("video_enabled", false), - screen = parameter_bool("screen_enabled", false) + const create = async (_e: HTMLElement, tp: Promise<TrackHandle>) => { + log("media", "awaiting track") + const t = await tp + log("media", "got track") + this.publish_track(t) + } - audio_toggle.addEventListener("click", () => { - audio = !audio - if (audio) this.enable_audio() - else this.disable_audio() - }) - video_toggle.addEventListener("click", () => { - video = !video - if (video) this.enable_video() - else this.disable_video() - }) - screen_toggle.addEventListener("click", () => { - screen = !screen - if (screen) this.enable_screen() - else this.disable_screen() - }) - mute_toggle.addEventListener("click", () => { - mute = !mute - this.mic_gain?.gain?.setValueAtTime(mute ? 0 : this.default_gain, 0) - if (mute) this.controls?.mute.classList.add("enabled") - else this.controls?.mute.classList.remove("enabled") - }) + mic_toggle.addEventListener("click", () => create(mic_toggle, this.create_mic_track())) + camera_toggle.addEventListener("click", () => create(camera_toggle, this.create_camera_track())) + screen_toggle.addEventListener("click", () => create(screen_toggle, this.create_screen_track())) const el = document.createElement("div") el.classList.add("local-controls") - el.append(audio_toggle, video_toggle, mute_toggle, screen_toggle) - this.controls = { video: video_toggle, audio: audio_toggle, mute: mute_toggle, screen: screen_toggle } + el.append(mic_toggle, camera_toggle, screen_toggle) document.body.append(el) } - update_view_w() { - this.update_view() - if (this.stream.getAudioTracks().length > 0) - this.controls?.audio.classList.add("enabled") - else this.controls?.audio.classList.remove("enabled") - - if (this.stream.getVideoTracks().length > 0) - this.controls?.video.classList.add("enabled") - else this.controls?.video.classList.remove("enabled") - } - - add_initial_to_remote(ru: RemoteUser) { - if (this.audio_track) ru.peer.addTrack(this.audio_track) - if (this.video_track) ru.peer.addTrack(this.video_track) - if (this.screen_track) ru.peer.addTrack(this.screen_track) - } - async enable_video() { - if (this.video_track) return - log("media", "requesting user media (video)") + async create_camera_track() { + log("media", "requesting user media (camera)") const user_media = await window.navigator.mediaDevices.getUserMedia({ video: true }) - const t = this.video_track = user_media.getVideoTracks()[0] - this.room.remote_users.forEach(u => u.peer.addTrack(t)) - this.stream.addTrack(t) - this.update_view_w() + return new TrackHandle(user_media.getVideoTracks()[0]) } - async enable_screen() { - if (this.video_track) return + async create_screen_track() { log("media", "requesting user media (screen)") const user_media = await window.navigator.mediaDevices.getDisplayMedia({ video: true }) - const t = this.video_track = user_media.getVideoTracks()[0] - this.room.remote_users.forEach(u => u.peer.addTrack(t)) - this.stream.addTrack(t) - this.update_view_w() + return new TrackHandle(user_media.getVideoTracks()[0]) } - async enable_audio() { - if (this.audio_track) return + async create_mic_track() { log("media", "requesting user media (audio)") - const use_rnnoise = parameter_bool("rnnoise", true) const audio_contraints = use_rnnoise ? { channelCount: { ideal: 1 }, @@ -137,53 +106,16 @@ export class LocalUser extends User { } gain.connect(destination) - this.audio_disable_cleanup = () => { + const t = new TrackHandle(destination.stream.getAudioTracks()[0]) + + t.addEventListener("ended", () => { source.disconnect() if (rnnoise) rnnoise.disconnect() gain.disconnect() destination.disconnect() this.mic_gain = undefined - } - - const t = destination.stream.getAudioTracks()[0] - this.audio_track = t - this.room.remote_users.forEach(u => u.peer.addTrack(t)) - this.stream.addTrack(t) - this.update_view_w() - } - - disable_video() { - if (!this.video_track) return - this.room.remote_users.forEach(u => { - u.peer.getSenders().forEach(s => { - if (s.track == this.video_track) u.peer.removeTrack(s) - }) - }) - this.stream.removeTrack(this.video_track) - this.update_view_w() - this.video_track = undefined - } - disable_screen() { - if (!this.screen_track) return - this.room.remote_users.forEach(u => { - u.peer.getSenders().forEach(s => { - if (s.track == this.screen_track) u.peer.removeTrack(s) - }) }) - this.stream.removeTrack(this.screen_track) - this.update_view_w() - this.screen_track = undefined - } - disable_audio() { - if (!this.audio_track) return - if (this.audio_disable_cleanup) this.audio_disable_cleanup() - this.room.remote_users.forEach(u => { - u.peer.getSenders().forEach(s => { - if (s.track == this.audio_track) u.peer.removeTrack(s) - }) - }) - this.stream.removeTrack(this.audio_track) - this.update_view_w() - this.audio_track = undefined + + return t } } diff --git a/source/client/logger.ts b/source/client/logger.ts index 745ce88..06309a6 100644 --- a/source/client/logger.ts +++ b/source/client/logger.ts @@ -17,5 +17,3 @@ export function log(tag: LogTag, message: string, ...data: any[]) { } console.log(`%c[${tag}] ${message}`, "color:" + log_tag_color[tag], ...data); } - - diff --git a/source/client/remote_user.ts b/source/client/remote_user.ts index 9e33a09..2f7c751 100644 --- a/source/client/remote_user.ts +++ b/source/client/remote_user.ts @@ -1,10 +1,9 @@ import { servers } from "./index.ts" import { log } from "./logger.ts" import { Room } from "./room.ts" +import { TrackHandle } from "./track_handle.ts"; import { User } from "./user.ts" - - export class RemoteUser extends User { peer: RTCPeerConnection negotiation_busy = false @@ -19,7 +18,7 @@ export class RemoteUser extends User { this.peer.ontrack = ev => { const t = ev.track log("media", "remote track", t) - this.add_track(t) + this.add_track(new TrackHandle(t)) } this.peer.onnegotiationneeded = async () => { log("webrtc", "negotiation needed") @@ -30,7 +29,6 @@ export class RemoteUser extends User { } } - async offer() { this.negotiation_busy = true const offer_description = await this.peer.createOffer() diff --git a/source/client/track_handle.ts b/source/client/track_handle.ts new file mode 100644 index 0000000..bf3858f --- /dev/null +++ b/source/client/track_handle.ts @@ -0,0 +1,14 @@ + +export class TrackHandle extends EventTarget { + constructor(public track: MediaStreamTrack) { + super() + track.onended = () => this.dispatchEvent(new CustomEvent("ended")) + track.onmute = () => this.dispatchEvent(new CustomEvent("mute")) + track.onunmute = () => this.dispatchEvent(new CustomEvent("unmute")) + } + + get kind() { return this.track.kind } + get label() { return this.track.label } + get muted() { return this.track.muted } + get id() { return this.track.id } +} diff --git a/source/client/user.ts b/source/client/user.ts index ef1449f..4f458d6 100644 --- a/source/client/user.ts +++ b/source/client/user.ts @@ -1,6 +1,7 @@ import { log } from "./logger.ts" import { Room } from "./room.ts" +import { TrackHandle } from "./track_handle.ts"; export abstract class User { @@ -8,13 +9,10 @@ export abstract class User { room: Room el: HTMLElement - media_el?: HTMLElement - - display?: { audio_status_el: HTMLElement, video_status_el: HTMLElement } local = false - stream: MediaStream = new MediaStream() + protected tracks: Set<TrackHandle> = new Set() constructor(room: Room, name: string) { this.name = name @@ -23,27 +21,28 @@ export abstract class User { this.el.classList.add("user") this.room.el.append(this.el) this.setup_view() - setTimeout(() => this.update_view(), 1) } - add_track(t: MediaStreamTrack) { - this.stream.addTrack(t) - this.update_view() - t.onended = () => { + add_track(t: TrackHandle) { + this.tracks.add(t) + this.create_track_element(t) + t.addEventListener("ended", () => { log("media", "track ended", t) - this.stream.removeTrack(t) - this.update_view() - } - t.onmute = () => { + this.tracks.delete(t) + }) + t.addEventListener("mute", () => { log("media", "track muted", t) - this.stream.removeTrack(t) - this.update_view() - } - t.onunmute = () => { + }) + t.addEventListener("unmute", () => { log("media", "track unmuted", t) - this.stream.addTrack(t) - this.update_view() - } + }) + //@ts-ignore a + window.blub = t + // setTimeout(() => { + // console.log("ev"); + // t.dispatchEvent(new Event("ended")) + // // t.dispatchEvent(new MediaStreamTrackEvent("ended", { track: t, bubbles: false, cancelable: true, composed: false })) + // }, 5000) } setup_view() { @@ -52,70 +51,23 @@ export abstract class User { const name_el = document.createElement("span") name_el.textContent = this.name name_el.classList.add("name") - const audio_status_el = document.createElement("span") - const video_status_el = document.createElement("span") - video_status_el.classList.add("status", "video-status") - audio_status_el.classList.add("status", "audio-status") - audio_status_el.textContent = "A" - video_status_el.textContent = "V" - info_el.append(audio_status_el, video_status_el, name_el) - this.display = { video_status_el, audio_status_el } + info_el.append(name_el) this.el.append(info_el) } - update_view() { - if (this.stream.getAudioTracks().length > 0) - this.display?.audio_status_el.classList.add("enabled") - else this.display?.audio_status_el.classList.remove("enabled") - - if (this.stream.getVideoTracks().length > 0) - this.display?.video_status_el.classList.add("enabled") - else this.display?.video_status_el.classList.remove("enabled") - - if (this.media_el) this.el.removeChild(this.media_el) - this.media_el = this.create_media_view() - this.el.appendChild(this.media_el) - } - - create_media_view() { - const has_video = this.stream.getVideoTracks().length > 0 - const has_audio = this.stream.getAudioTracks().length > 0 - if (this.local && !has_video) return document.createElement("div") - const media_el = has_video ? document.createElement("video") : document.createElement("audio") + create_track_element(t: TrackHandle) { + const is_video = t.kind == "video" + const media_el = is_video ? document.createElement("video") : document.createElement("audio") + media_el.srcObject = new MediaStream([t.track]) media_el.classList.add("media") media_el.autoplay = true - if (has_video) media_el.toggleAttribute("playsinline") - media_el.srcObject = this.stream - if (has_video) media_el.addEventListener("click", () => { - media_el.classList.remove("maximized") - }) - if (this.local) media_el.muted = true + media_el.controls = true - const controls_el = document.createElement("div") - controls_el.classList.add("media-controls") - if (has_video) { - const pip_el = document.createElement("input") - pip_el.type = "button" - pip_el.addEventListener("click", () => { - // @ts-ignore firefox feature - media_el.requestPictureInPicture() - }) - pip_el.value = "Picture-in-Picture" - const max_el = document.createElement("input") - max_el.type = "button" - max_el.addEventListener("click", () => { - media_el.classList.add("maximized") - }) - max_el.value = "Maximize" - controls_el.append(max_el, pip_el) - } - if (has_audio) { - // TODO volume controls - } + if (this.local) media_el.muted = true - const el = document.createElement("div") - el.classList.add("media-container") - el.append(media_el, controls_el) - return el + this.el.append(media_el) + t.addEventListener("ended", () => { + media_el.remove() + }) } }
\ No newline at end of file |