Camera and Microphone streaming library via RTMP and SRT for iOS, macOS, tvOS and visionOS.
LBLogger.with(kHaishinKitIdentifier).level = .trace
.Project name | Notes | License |
---|---|---|
HaishinKit for Android. | Camera and Microphone streaming library via RTMP for Android. | BSD 3-Clause โNewโ or โRevisedโ License |
HaishinKit for Flutter. | Camera and Microphone streaming library via RTMP for Flutter. | BSD 3-Clause โNewโ or โRevisedโ License |
Starting from version 2.0.0, multiple streams are supported, allowing live streaming to separate services. Views also support this, enabling the verification of raw video data
let mixer = MediaMixer()
let stream0 = RTMPStream() // for Y Service.
let stream1 = RTMPStream() // for F Service.
let view = MTHKView()
view.track = 0 // Video Track Number 0 or 1, UInt8.max.
mixer.addOutput(stream0)
mixer.addOutput(stream1)
mixer.addOutput(view)
let view2 = MTHKView()
stream0.addOutput(view2)
Through off-screen rendering capabilities, it is possible to display any text or bitmap on a video during broadcasting or viewing. This allows for various applications such as watermarking and time display.
Ingest | Playback |
---|---|
Task { ScreenActor in
var videoMixerSettings = VideoMixerSettings()
videoMixerSettings.mode = .offscreen
await mixer.setVideoMixerSettings(videoMixerSettings)
textScreenObject.horizontalAlignment = .right
textScreenObject.verticalAlignment = .bottom
textScreenObject.layoutMargin = .init(top: 0, left: 0, bottom: 16, right: 16)
await stream.screen.backgroundColor = UIColor.black.cgColor
let videoScreenObject = VideoTrackScreenObject()
videoScreenObject.cornerRadius = 32.0
videoScreenObject.track = 1
videoScreenObject.horizontalAlignment = .right
videoScreenObject.layoutMargin = .init(top: 16, left: 0, bottom: 0, right: 16)
videoScreenObject.size = .init(width: 160 * 2, height: 90 * 2)
_ = videoScreenObject.registerVideoEffect(MonochromeEffect())
let imageScreenObject = ImageScreenObject()
let imageURL = URL(fileURLWithPath: Bundle.main.path(forResource: "game_jikkyou", ofType: "png") ?? "")
if let provider = CGDataProvider(url: imageURL as CFURL) {
imageScreenObject.verticalAlignment = .bottom
imageScreenObject.layoutMargin = .init(top: 0, left: 0, bottom: 16, right: 0)
imageScreenObject.cgImage = CGImage(
pngDataProviderSource: provider,
decode: nil,
shouldInterpolate: false,
intent: .defaultIntent
)
} else {
logger.info("no image")
}
let assetScreenObject = AssetScreenObject()
assetScreenObject.size = .init(width: 180, height: 180)
assetScreenObject.layoutMargin = .init(top: 16, left: 16, bottom: 0, right: 0)
try? assetScreenObject.startReading(AVAsset(url: URL(fileURLWithPath: Bundle.main.path(forResource: "SampleVideo_360x240_5mb", ofType: "mp4") ?? "")))
try? mixer.screen.addChild(assetScreenObject)
try? mixer.screen.addChild(videoScreenObject)
try? mixer.screen.addChild(imageScreenObject)
try? mixer.screen.addChild(textScreenObject)
stream.screen.delegate = self
}
Features | PiPHKView | MTHKView |
---|---|---|
Engine | AVSampleBufferDisplayLayer | Metal |
Publish | โ | โ |
Playback | โ | โ |
VisualEffect | โ | โ |
MultiCamera | โ | โ |
PictureInPicture | โ |
Examples project are available for iOS with UIKit, iOS with SwiftUI, macOS and tvOS. Example macOS requires Apple Silicon mac.
git clone https://github.com/shogo4405/HaishinKit.swift.git
cd HaishinKit.swift
open HaishinKit.xcodeproj
Version | Xcode | Swift |
---|---|---|
2.0.0+ | 16.0+ | 5.10+ |
1.9.0+ | 15.4+ | 5.10+ |
- | iOS | tvOS | macOS | visionOS | watchOS |
---|---|---|---|---|---|
HaishinKit | 13.0+ | 13.0+ | 10.15+ | 1.0+ | - |
SRTHaishinKit | 13.0+ | 13.0+ | 13.0+ | 1.0+ | - |
Please contains Info.plist.
iOS 10.0+
macOS 10.14+
tvOS 17.0+
HaishinKit has a multi-module configuration. If you want to use the SRT protocol, please use SRTHaishinKit.
HaishinKit | SRTHaishinKit | |
---|---|---|
SPM | https://github.com/shogo4405/HaishinKit.swift | https://github.com/shogo4405/HaishinKit.swift |
CocoaPods | def import_pods |
def import_pods |
Make sure you setup and activate your AVAudioSession iOS.
import AVFoundation
let session = AVAudioSession.sharedInstance()
do {
try session.setCategory(.playAndRecord, mode: .default, options: [.defaultToSpeaker, .allowBluetooth])
try session.setActive(true)
} catch {
print(error)
}
let mixer = MediaMixer()
let connection = RTMPConnection()
let stream = RTMPStream(connection: connection)
let hkView = MTHKView(frame: view.bounds)
Task {
do {
try await mixer.attachAudio(AVCaptureDevice.default(for: .audio))
} catch {
print(error)
}
do {
try await mixer.attachVideo(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back))
} catch {
print(error)
}
await mixer.addOutput(stream)
}
Task { MainActor in
await stream.addOutput(hkView)
// add ViewController#view
view.addSubview(hkView)
}
Task {
do {
try await connection.connect("rtmp://localhost/appName/instanceName")
try await stream.publish(streamName)
} catch RTMPConnection.Error.requestFailed(let response) {
print(response)
} catch RTMPStream.Error.requestFailed(let response) {
print(response)
} catch {
print(error)
}
}
let connection = RTMPConnection()
let stream = RTMPStream(connection: connection)
let audioPlayer = AudioPlayer(AVAudioEngine())
let hkView = MTHKView(frame: view.bounds)
Task { MainActor in
await stream.addOutput(hkView)
}
Task {
// requires attachAudioPlayer
await stream.attachAudioPlayer(audioPlayer)
do {
try await connection.connect("rtmp://localhost/appName/instanceName")
try await stream.play(streamName)
} catch RTMPConnection.Error.requestFailed(let response) {
print(response)
} catch RTMPStream.Error.requestFailed(let response) {
print(response)
} catch {
print(error)
}
}
var connection = RTMPConnection()
connection.connect("rtmp://username:password@localhost/appName/instanceName")
let mixer = MediaMixer()
let connection = SRTConnection()
let stream = SRTStream(connection: connection)
let hkView = MTHKView(frame: view.bounds)
Task {
do {
try await mixer.attachAudio(AVCaptureDevice.default(for: .audio))
} catch {
print(error)
}
do {
try await mixer.attachVideo(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back))
} catch {
print(error)
}
await mixer.addOutput(stream)
}
Task { MainActor in
await stream.addOutput(hkView)
// add ViewController#view
view.addSubview(hkView)
}
Task {
stream.attachAudioPlayer(audioPlayer)
do {
try await connection.connect("rtmp://localhost/appName/instanceName")
try await stream.publish(streamName)
} catch {
print(error)
}
}
let connection = SRTConnection()
let stream = SRTStream(connection: connection)
let hkView = MTHKView(frame: view.bounds)
let audioPlayer = AudioPlayer(AVAudioEngine())
Task { MainActor in
await stream.addOutput(hkView)
// add ViewController#view
view.addSubview(hkView)
}
Task {
// requires attachAudioPlayer
await stream.attachAudioPlayer(audioPlayer)
do {
try await connection.connect("srt://host:port?option=foo")
try await stream.play()
} catch {
print(error)
}
}
let mixer = MediaMixer()
await mixer.setFrameRate(30)
await mixer.setSessionPreset(AVCaptureSession.Preset.medium)
// Do not call beginConfiguration() and commitConfiguration() internally within the scope of the method, as they are called internally.
await mixer.configuration { session in
session.automaticallyConfiguresApplicationAudioSession = true
}
Specifies the audio device settings.
let front = AVCaptureDevice.default(for: .audio)
try? await mixer.attachAudio(front, track: 0) { audioDeviceUnit in }
If you want to mix multiple audio tracks, please enable the feature flag.
await mixer.setMultiTrackAudioMixingEnabled(true)
When you specify the sampling rate, it will perform resampling. Additionally, in the case of multiple channels, downsampling can be applied.
// Setting the value to 0 will be the same as the value specified in mainTrack.
var settings = AudioMixerSettings(
sampleRate: Float64 = 44100,
channels: UInt32 = 0,
)
settings.tracks = [
0: .init(
isMuted: Bool = false,
downmix: Bool = true,
channelMap: [Int]? = nil
)
]
async mixer.setAudioMixerSettings(settings)
var audioSettings = AudioCodecSettings()
/// Specifies the bitRate of audio output.
audioSettings.bitrate = 64 * 1000
/// Specifies the mixes the channels or not. Currently, it supports input sources with 4, 5, 6, and 8 channels.
audioSettings.downmix = true
/// Specifies the map of the output to input channels.
audioSettings.channelMap: [Int]? = nil
await stream.setAudioSettings(audioSettings)
Specifies the video capture settings.
let front = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
do {
try await mixer.attachCamera(front, track: 0) { videoUnit in
videoUnit.isVideoMirrored = true
videoUnit.preferredVideoStabilizationMode = .standard
videoUnit.colorFormat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
}
} catch {
print(error)
}
var videoMixerSettings = VideoMixerSettings()
/// Specifies the image rendering mode.
videoMixerSettings.mode = .passthrough or .offscreen
/// Specifies the muted indicies whether freeze video signal or not.
videoMixerSettings.isMuted = false
/// Specifies the main track number.
videoMixerSettings.mainTrack = 0
await mixer.setVideoMixerSettings(videoMixerSettings)
var videoSettings = VideoCodecSettings(
videoSize: .init(width: 854, height: 480),
profileLevel: kVTProfileLevel_H264_Baseline_3_1 as String,
bitRate: 640 * 1000,
maxKeyFrameIntervalDuration: 2,
scalingMode: .trim,
bitRateMode: .average,
allowFrameReordering: nil,
isHardwareEncoderEnabled: true
)
await stream.setVideoSettings(videoSettings)
// Specifies the recording settings. 0" means the same of input.
let recorder = HKStreamRecorder()
stream.addOutput(recorder)
try await recorder.startRecording(fileName, settings: [
AVMediaType.audio: [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 0,
AVNumberOfChannelsKey: 0,
// AVEncoderBitRateKey: 128000,
],
AVMediaType.video: [
AVVideoCodecKey: AVVideoCodecH264,
AVVideoHeightKey: 0,
AVVideoWidthKey: 0,
/*
AVVideoCompressionPropertiesKey: [
AVVideoMaxKeyFrameIntervalDurationKey: 2,
AVVideoProfileLevelKey: AVVideoProfileLevelH264Baseline30,
AVVideoAverageBitRateKey: 512000
]
*/
]
])
try await recorder.stopRecording()
BSD-3-Clause