Files
go-ts/pkg/audio/playback.go

352 lines
7.7 KiB
Go
Raw Normal View History

//go:build windows
package audio
import (
"encoding/binary"
"fmt"
"log"
"sync"
"time"
"unsafe"
"github.com/go-ole/go-ole"
"github.com/moutend/go-wca/pkg/wca"
)
// Player handles WASAPI audio playback with mixing support
type Player struct {
client *wca.IAudioClient
renderClient *wca.IAudioRenderClient
waveFormat *wca.WAVEFORMATEX
bufferSize uint32
volume float32
muted bool
mu sync.Mutex
running bool
stopChan chan struct{}
// User buffers for mixing
// map[SenderID] -> AudioQueue
userBuffers map[uint16][]int16
// User settings
userSettings map[uint16]*UserSettings
bufferMu sync.Mutex
}
// NewPlayer creates a new WASAPI audio player
func NewPlayer() (*Player, error) {
// Initialize COM
ole.CoInitializeEx(0, ole.COINIT_APARTMENTTHREADED)
log.Printf("[Audio] Windows/WASAPI initializing...")
var deviceEnumerator *wca.IMMDeviceEnumerator
if err := wca.CoCreateInstance(
wca.CLSID_MMDeviceEnumerator,
0,
wca.CLSCTX_ALL,
wca.IID_IMMDeviceEnumerator,
&deviceEnumerator,
); err != nil {
return nil, fmt.Errorf("failed to create device enumerator: %w", err)
}
defer deviceEnumerator.Release()
var device *wca.IMMDevice
if err := deviceEnumerator.GetDefaultAudioEndpoint(wca.ERender, wca.EConsole, &device); err != nil {
return nil, fmt.Errorf("failed to get default render device: %w", err)
}
defer device.Release()
var audioClient *wca.IAudioClient
if err := device.Activate(wca.IID_IAudioClient, wca.CLSCTX_ALL, nil, &audioClient); err != nil {
return nil, fmt.Errorf("failed to activate audio client: %w", err)
}
waveFormat := &wca.WAVEFORMATEX{
WFormatTag: wca.WAVE_FORMAT_PCM,
NChannels: 1,
NSamplesPerSec: 48000,
WBitsPerSample: 16,
NBlockAlign: 2,
NAvgBytesPerSec: 96000,
CbSize: 0,
}
duration := wca.REFERENCE_TIME(100 * 10000) // 100ms buffer
if err := audioClient.Initialize(
wca.AUDCLNT_SHAREMODE_SHARED,
wca.AUDCLNT_STREAMFLAGS_AUTOCONVERTPCM|wca.AUDCLNT_STREAMFLAGS_SRC_DEFAULT_QUALITY,
duration,
0,
waveFormat,
nil,
); err != nil {
audioClient.Release()
return nil, fmt.Errorf("failed to initialize audio client: %w", err)
}
var bufferSize uint32
if err := audioClient.GetBufferSize(&bufferSize); err != nil {
audioClient.Release()
return nil, fmt.Errorf("failed to get buffer size: %w", err)
}
var renderClient *wca.IAudioRenderClient
if err := audioClient.GetService(wca.IID_IAudioRenderClient, &renderClient); err != nil {
audioClient.Release()
return nil, fmt.Errorf("failed to get render client: %w", err)
}
return &Player{
client: audioClient,
renderClient: renderClient,
waveFormat: waveFormat,
bufferSize: bufferSize,
volume: 1.0,
muted: false,
stopChan: make(chan struct{}),
userBuffers: make(map[uint16][]int16),
userSettings: make(map[uint16]*UserSettings),
}, nil
}
// Start begins audio playback
func (p *Player) Start() error {
p.mu.Lock()
if p.running {
p.mu.Unlock()
return nil
}
p.running = true
p.stopChan = make(chan struct{})
p.mu.Unlock()
if err := p.client.Start(); err != nil {
return fmt.Errorf("failed to start audio client: %w", err)
}
go p.playbackLoop()
return nil
}
// Stop stops audio playback
func (p *Player) Stop() {
p.mu.Lock()
if !p.running {
p.mu.Unlock()
return
}
p.running = false
p.mu.Unlock()
close(p.stopChan)
p.client.Stop()
}
// Close releases all resources
func (p *Player) Close() {
p.Stop()
if p.renderClient != nil {
p.renderClient.Release()
}
if p.client != nil {
p.client.Release()
}
ole.CoUninitialize()
}
// PlayPCM adds audio samples to a specific user's buffer
func (p *Player) PlayPCM(senderID uint16, samples []int16) {
if p.muted {
return
}
p.bufferMu.Lock()
defer p.bufferMu.Unlock()
// Check per-user mute
if settings, ok := p.userSettings[senderID]; ok && settings.Muted {
return
}
// Append to user's specific buffer
// This ensures sequential playback for the same user
p.userBuffers[senderID] = append(p.userBuffers[senderID], samples...)
// Limit buffer size per user to avoid memory leaks if stalled
if len(p.userBuffers[senderID]) > 48000*2 { // 2 seconds max
// Drop oldest
drop := len(p.userBuffers[senderID]) - 48000
p.userBuffers[senderID] = p.userBuffers[senderID][drop:]
}
}
// SetVolume sets playback volume (0.0 to 1.0)
func (p *Player) SetVolume(vol float32) {
if vol < 0 {
vol = 0
}
if vol > 1.0 {
vol = 1.0
}
p.mu.Lock()
p.volume = vol
p.mu.Unlock()
}
// GetVolume returns current volume (0.0 to 1.0)
func (p *Player) GetVolume() float32 {
p.mu.Lock()
defer p.mu.Unlock()
return p.volume
}
// SetMuted sets mute state
func (p *Player) SetMuted(muted bool) {
p.mu.Lock()
p.muted = muted
p.mu.Unlock()
}
func (p *Player) IsMuted() bool {
p.mu.Lock()
defer p.mu.Unlock()
return p.muted
}
// SetUserVolume sets volume for a specific user (1.0 is default)
func (p *Player) SetUserVolume(clientID uint16, vol float32) {
p.bufferMu.Lock()
defer p.bufferMu.Unlock()
if _, ok := p.userSettings[clientID]; !ok {
p.userSettings[clientID] = &UserSettings{Volume: 1.0, Muted: false}
}
p.userSettings[clientID].Volume = vol
}
// SetUserMuted sets mute state for a specific user
func (p *Player) SetUserMuted(clientID uint16, muted bool) {
p.bufferMu.Lock()
defer p.bufferMu.Unlock()
if _, ok := p.userSettings[clientID]; !ok {
p.userSettings[clientID] = &UserSettings{Volume: 1.0, Muted: false}
}
p.userSettings[clientID].Muted = muted
}
// GetUserSettings gets current volume and mute state for user
func (p *Player) GetUserSettings(clientID uint16) (float32, bool) {
p.bufferMu.Lock()
defer p.bufferMu.Unlock()
if settings, ok := p.userSettings[clientID]; ok {
return settings.Volume, settings.Muted
}
return 1.0, false
}
func (p *Player) playbackLoop() {
ticker := time.NewTicker(10 * time.Millisecond)
defer ticker.Stop()
for {
select {
case <-p.stopChan:
return
case <-ticker.C:
p.writeFrame()
}
}
}
func (p *Player) writeFrame() {
for {
var padding uint32
if err := p.client.GetCurrentPadding(&padding); err != nil {
return
}
available := p.bufferSize - padding
if available < frameSamples {
return
}
p.bufferMu.Lock()
// Mix audio from all active user buffers
mixed := make([]int32, frameSamples)
activeUsers := 0
hasAnyAudio := false
for id, buf := range p.userBuffers {
if len(buf) > 0 {
hasAnyAudio = true
activeUsers++
// Take up to frameSamples from this user
toTake := frameSamples
if len(buf) < frameSamples {
toTake = len(buf)
}
for i := 0; i < toTake; i++ {
sample := int32(buf[i])
// Apply user volume if set
if settings, ok := p.userSettings[id]; ok {
sample = int32(float32(sample) * settings.Volume)
}
mixed[i] += sample
}
// Advance buffer
if len(buf) <= frameSamples {
delete(p.userBuffers, id)
} else {
p.userBuffers[id] = buf[frameSamples:]
}
}
}
p.bufferMu.Unlock()
// If no audio is playing, don't write anything (keep buffer empty for lower latency when audio starts)
if !hasAnyAudio {
return
}
// Get WASAPI buffer
var buffer *byte
if err := p.renderClient.GetBuffer(uint32(frameSamples), &buffer); err != nil {
return
}
p.mu.Lock()
vol := p.volume
p.mu.Unlock()
// Write mixed samples with clipping protection and volume application
bufSlice := unsafe.Slice(buffer, int(frameSamples)*2)
for i := 0; i < int(frameSamples); i++ {
val := mixed[i]
// Apply master volume
val = int32(float32(val) * vol)
// Hard clipping
if val > 32767 {
val = 32767
} else if val < -32768 {
val = -32768
}
binary.LittleEndian.PutUint16(bufSlice[i*2:], uint16(val))
}
p.renderClient.ReleaseBuffer(uint32(frameSamples), 0)
}
}