Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bump Xcode to 15.0. #1284

Merged
merged 7 commits into from
Sep 13, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion .github/workflows/review.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,12 @@ on: [pull_request]
jobs:
build:

runs-on: macOS-12
runs-on: macOS-13

steps:
- uses: actions/checkout@v1
- name: Select Xcode version
run: sudo xcode-select -s '/Applications/Xcode_15.0.app'
- name: bundle install
run: |
bundle install
Expand Down
2 changes: 1 addition & 1 deletion Cartfile
Original file line number Diff line number Diff line change
@@ -1 +1 @@
github "shogo4405/Logboard" ~> 2.3.1
github "shogo4405/Logboard" ~> 2.4.1
2 changes: 1 addition & 1 deletion Cartfile.resolved
Original file line number Diff line number Diff line change
@@ -1 +1 @@
github "shogo4405/Logboard" "2.3.1"
github "shogo4405/Logboard" "2.4.1"
2 changes: 1 addition & 1 deletion Examples/iOS/LiveViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ final class LiveViewController: UIViewController {
logger.warn(error)
}
rtmpStream.addObserver(self, forKeyPath: "currentFPS", options: .new, context: nil)
(view as? NetStreamDrawable)?.attachStream(rtmpStream)
(view as? (any NetStreamDrawable))?.attachStream(rtmpStream)
NotificationCenter.default.addObserver(self, selector: #selector(didInterruptionNotification(_:)), name: AVAudioSession.interruptionNotification, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(didRouteChangeNotification(_:)), name: AVAudioSession.routeChangeNotification, object: nil)
}
Expand Down
2 changes: 1 addition & 1 deletion Examples/iOS/Screencast/SampleHandler.swift
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ open class SampleHandler: RPBroadcastSampleHandler {
case .video:
if let description = CMSampleBufferGetFormatDescription(sampleBuffer) {
let dimensions = CMVideoFormatDescriptionGetDimensions(description)
rtmpStream.videoSettings.videoSize = .init(width: dimensions.width, height: dimensions.height)
rtmpStream.videoSettings.videoSize = .init(width: CGFloat(dimensions.width), height: CGFloat(dimensions.height))
rtmpStream.videoSettings.profileLevel = kVTProfileLevel_H264_Baseline_AutoLevel as String
}
rtmpStream.appendSampleBuffer(sampleBuffer)
Expand Down
10 changes: 5 additions & 5 deletions HaishinKit.podspec
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
Pod::Spec.new do |s|

s.name = "HaishinKit"
s.version = "1.5.8"
s.version = "1.6.0"
s.summary = "Camera and Microphone streaming library via RTMP, HLS for iOS, macOS and tvOS."
s.swift_version = "5.7"
s.swift_version = "5.8"

s.description = <<-DESC
HaishinKit. Camera and Microphone streaming library via RTMP, HLS for iOS, macOS and tvOS.
Expand All @@ -15,16 +15,16 @@ Pod::Spec.new do |s|
s.authors = { "shogo4405" => "[email protected]" }
s.source = { :git => "https://github.com/shogo4405/HaishinKit.swift.git", :tag => "#{s.version}" }

s.ios.deployment_target = "11.0"
s.ios.deployment_target = "12.0"
s.ios.source_files = "Platforms/iOS/*.{h,swift}"

s.osx.deployment_target = "10.13"
s.osx.source_files = "Platforms/iOS/*.{h,swift}"

s.tvos.deployment_target = "11.0"
s.tvos.deployment_target = "12.0"
s.tvos.source_files = "Platforms/iOS/*.{h,swift}"

s.source_files = "Sources/**/*.swift"
s.dependency 'Logboard', '~> 2.3.1'
s.dependency 'Logboard', '~> 2.4.1'

end
85 changes: 40 additions & 45 deletions HaishinKit.xcodeproj/project.pbxproj

Large diffs are not rendered by default.

10 changes: 5 additions & 5 deletions Package.swift
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
// swift-tools-version:5.5
// swift-tools-version:5.9
// The swift-tools-version declares the minimum version of Swift required to build this package.
import PackageDescription

let package = Package(
name: "HaishinKit",
platforms: [
.iOS(.v11),
.tvOS(.v11),
.iOS(.v12),
.tvOS(.v12),
.macOS(.v10_13),
.macCatalyst(.v14)
],
Expand All @@ -15,7 +15,7 @@ let package = Package(
.library(name: "SRTHaishinKit", targets: ["SRTHaishinKit"])
],
dependencies: [
.package(url: "https://github.com/shogo4405/Logboard.git", "2.3.1"..<"2.4.0")
.package(url: "https://github.com/shogo4405/Logboard.git", "2.4.1"..<"2.5.0")
],
targets: [
.binaryTarget(
Expand All @@ -35,7 +35,7 @@ let package = Package(
"MPEG",
"Net",
"RTMP",
"Util",
"Util"
]),
.target(name: "SRTHaishinKit",
dependencies: [
Expand Down
86 changes: 30 additions & 56 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -57,32 +57,20 @@ Project name |Notes |License
- [x] _Tunneled (RTMPT over SSL/TLS) (Technical Preview)_
- [x] _RTMPT (Technical Preview)_
- [x] ReplayKit Live as a Broadcast Upload Extension
- [x] Supported codec
- Audio
- [x] AAC
- Video
- [x] H264/AVC
- ex: `stream.videoSettings.profileLevel = kVTProfileLevel_H264_Baseline_3_1 as String`
- [x] H265/HEVC ([Also server-side must support Enhanced RTMP.](https://github.com/veovera/enhanced-rtmp))
- ex: `stream.videoSettings.profileLevel = kVTProfileLevel_HEVC_Main_AutoLevel as String`
- [x] Enhanced RTMP

### HLS
- [x] HTTPService
- [x] HLS Publish

### SRT
It's a different framework. You can use it through SwiftPM.
```swift
import SRTHaishinKit
```
- [x] Publish and Recording (H264/AAC)
- [x] Playback(beta)
- [ ] mode
- [x] caller
- [x] listener
- [ ] rendezvous


### HLS
- [x] HTTPService
- [x] HLS Publish

### Multi Camera
Supports two camera video sources. A picture-in-picture display that shows the image of the secondary camera of the primary camera. Supports camera split display that displays horizontally and vertically.

Expand Down Expand Up @@ -113,65 +101,51 @@ rtmpStream.attachAudio(AVCaptureDevice.default(for: .audio))

### Others
- [x] [Support multitasking camera access.](https://developer.apple.com/documentation/avfoundation/capture_setup/accessing_the_camera_while_multitasking)
- [x] _Support tvOS 11.0+ (Technical Preview)_
- tvOS can't use camera and microphone devices.
- [x] Hardware acceleration for H264 video encoding, AAC audio encoding
- [x] Support "Allow app extension API only" option
- [ ] ~~Support GPUImage framework (~> 0.5.12)~~
- ~~https://github.com/shogo4405/GPUHaishinKit.swift/blob/master/README.md~~
- [ ] ~~Objective-C Bridging~~

## 🌏 Requirements
|-|iOS|OSX|tvOS|Xcode|Swift|
|:----:|:----:|:----:|:----:|:----:|:----:|
|1.5.0+|11.0+|10.13+|10.2+|14.3+|5.7+|
|1.4.0+|11.0+|10.13+|10.2+|14.0+|5.7+|

## 🐾 Examples
Examples project are available for iOS with UIKit, iOS with SwiftUI, macOS and tvOS.
- [x] Camera and microphone publish.
- [x] RTMP Playback
- [x] RTMP Playback
```sh
git clone https://github.com/shogo4405/HaishinKit.swift.git
cd HaishinKit.swift
carthage bootstrap --use-xcframeworks
carthage bootstrap -platform iOS,macOS,tvOS --use-xcframeworks
open HaishinKit.xcodeproj
```

## ☕ Cocoa Keys
## 🌏 Requirements

### Development
|Version|Xcode|Swift|
|:----:|:----:|:----:|
|1.6.0+|15.0+|5.8+|
|1.5.0+|14.0+|5.7+|

### OS
|-|iOS|tvOS|macOS|visionOS|watchOS|
|:----|:----:|:----:|:----:|:----:|:----:|
|HaishinKit|12.0+|12.0+|10.13+|-|-|
|SRTHaishinKit|12.0+|-|-|-|-|

### Cocoa Keys
Please contains Info.plist.

iOS 10.0+
**iOS 10.0+**
* NSMicrophoneUsageDescription
* NSCameraUsageDescription

macOS 10.14+
**macOS 10.14+**
* NSMicrophoneUsageDescription
* NSCameraUsageDescription

## 🔧 Installation
### CocoaPods
```rb
source 'https://github.com/CocoaPods/Specs.git'
use_frameworks!

def import_pods
pod 'HaishinKit', '~> 1.5.8
end

target 'Your Target' do
platform :ios, '11.0'
import_pods
end
```
### Carthage
```
github "shogo4405/HaishinKit.swift" ~> 1.5.8
```
### Swift Package Manager
```
https://github.com/shogo4405/HaishinKit.swift
```
HaishinKit has a multi-module configuration. If you want to use the SRT protocol, please use SRTHaishinKit. SRTHaishinKit supports SPM only.
| | HaishinKit | SRTHaishinKit |
| - | :- | :- |
| SPM | https://github.com/shogo4405/HaishinKit.swift | https://github.com/shogo4405/HaishinKit.swift |
| CocoaPods | source 'https://github.com/CocoaPods/Specs.git'<br>use_frameworks!<br><br>def import_pods<br> pod 'HaishinKit', '~> 1.6.0<br>end<br><br>target 'Your Target' do<br> platform :ios, '12.0'<br> import_pods<br>end<br> | Not supported. |
| Carthage | github "shogo4405/HaishinKit.swift" ~> 1.6.0 | Not supported. |

## 🔧 Prerequisites
Make sure you setup and activate your AVAudioSession iOS.
Expand Down
4 changes: 2 additions & 2 deletions SRTHaishinKit/SRTStream.swift
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ public class SRTStream: NetStream {
writer.expectedMedias.remove(type)
}

override public func attachCamera(_ camera: AVCaptureDevice?, onError: ((Error) -> Void)? = nil) {
override public func attachCamera(_ camera: AVCaptureDevice?, onError: ((any Error) -> Void)? = nil) {
if camera == nil {
writer.expectedMedias.remove(.video)
} else {
Expand All @@ -121,7 +121,7 @@ public class SRTStream: NetStream {
super.attachCamera(camera, onError: onError)
}

override public func attachAudio(_ audio: AVCaptureDevice?, automaticallyConfiguresApplicationAudioSession: Bool = true, onError: ((Error) -> Void)? = nil) {
override public func attachAudio(_ audio: AVCaptureDevice?, automaticallyConfiguresApplicationAudioSession: Bool = true, onError: ((any Error) -> Void)? = nil) {
if audio == nil {
writer.expectedMedias.remove(.audio)
} else {
Expand Down
4 changes: 2 additions & 2 deletions Sources/Codec/VTSessionMode.swift
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@ enum VTSessionMode {
var session: VTCompressionSession?
var status = VTCompressionSessionCreate(
allocator: kCFAllocatorDefault,
width: videoCodec.settings.videoSize.width,
height: videoCodec.settings.videoSize.height,
width: Int32(videoCodec.settings.videoSize.width),
height: Int32(videoCodec.settings.videoSize.height),
codecType: videoCodec.settings.format.codecType,
encoderSpecification: nil,
imageBufferAttributes: videoCodec.attributes as CFDictionary?,
Expand Down
4 changes: 2 additions & 2 deletions Sources/Codec/VideoCodecSettings.swift
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ public struct VideoCodecSettings: Codable {
}

/// Specifies the video size of encoding video.
public var videoSize: VideoSize
public var videoSize: CGSize
/// Specifies the bitrate.
public var bitRate: UInt32
/// Specifies the keyframeInterval.
Expand Down Expand Up @@ -102,7 +102,7 @@ public struct VideoCodecSettings: Codable {

/// Creates a new VideoCodecSettings instance.
public init(
videoSize: VideoSize = .init(width: 854, height: 480),
videoSize: CGSize = .init(width: 854, height: 480),
profileLevel: String = kVTProfileLevel_H264_Baseline_3_1 as String,
bitRate: UInt32 = 640 * 1000,
maxKeyFrameIntervalDuration: Int32 = 2,
Expand Down
20 changes: 0 additions & 20 deletions Sources/Codec/VideoSize.swift

This file was deleted.

5 changes: 3 additions & 2 deletions Sources/Extension/ExpressibleByIntegerLiteral+Extension.swift
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,9 @@ import Foundation

extension ExpressibleByIntegerLiteral {
var data: Data {
var value: Self = self
return Data(bytes: &value, count: MemoryLayout<Self>.size)
return withUnsafePointer(to: self) { value in
return Data(bytes: UnsafeRawPointer(value), count: MemoryLayout<Self>.size)
}
}

init(data: Data) {
Expand Down
4 changes: 2 additions & 2 deletions Sources/HTTP/HTTPStream.swift
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ open class HTTPStream: NetStream {
}

#if os(iOS) || os(macOS)
override open func attachCamera(_ device: AVCaptureDevice?, onError: ((Error) -> Void)? = nil) {
override open func attachCamera(_ device: AVCaptureDevice?, onError: ((any Error) -> Void)? = nil) {
if device == nil {
tsWriter.expectedMedias.remove(.video)
} else {
Expand All @@ -41,7 +41,7 @@ open class HTTPStream: NetStream {
super.attachCamera(device, onError: onError)
}

override open func attachAudio(_ device: AVCaptureDevice?, automaticallyConfiguresApplicationAudioSession: Bool = true, onError: ((Error) -> Void)? = nil) {
override open func attachAudio(_ device: AVCaptureDevice?, automaticallyConfiguresApplicationAudioSession: Bool = true, onError: ((any Error) -> Void)? = nil) {
if device == nil {
tsWriter.expectedMedias.remove(.audio)
} else {
Expand Down
4 changes: 2 additions & 2 deletions Sources/Media/IOMixer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -381,8 +381,8 @@ extension IOMixer: Running {
let isMultiCamSupported = true
#endif
guard let device = error.device, let format = device.videoFormat(
width: sessionPreset.width ?? videoIO.codec.settings.videoSize.width,
height: sessionPreset.height ?? videoIO.codec.settings.videoSize.height,
width: sessionPreset.width ?? Int32(videoIO.codec.settings.videoSize.width),
height: sessionPreset.height ?? Int32(videoIO.codec.settings.videoSize.height),
frameRate: videoIO.frameRate,
isMultiCamSupported: isMultiCamSupported
), device.activeFormat != format else {
Expand Down
6 changes: 3 additions & 3 deletions Sources/Media/IORecorder.swift
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,13 @@ public class IORecorder {
/// The IORecorder error domain codes.
public enum Error: Swift.Error {
/// Failed to create the AVAssetWriter.
case failedToCreateAssetWriter(error: Swift.Error)
case failedToCreateAssetWriter(error: any Swift.Error)
/// Failed to create the AVAssetWriterInput.
case failedToCreateAssetWriterInput(error: NSException)
/// Failed to append the PixelBuffer or SampleBuffer.
case failedToAppend(error: Swift.Error?)
case failedToAppend(error: (any Swift.Error)?)
/// Failed to finish writing the AVAssetWriter.
case failedToFinishWriting(error: Swift.Error?)
case failedToFinishWriting(error: (any Swift.Error)?)
}

/// The default output settings for an IORecorder.
Expand Down
6 changes: 3 additions & 3 deletions Sources/Media/IOUIScreenCaptureUnit.swift
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ private extension CGRect {
// MARK: -
/// The IOUIScreenCaptureUnit class captures the UIView.
public class IOUIScreenCaptureUnit: NSObject, IOScreenCaptureUnit {
static let defaultFrameInterval: Int = 2
static let defaultPreferredFramesPerSecond: Int = 30
static let defaultAttributes: [NSString: NSObject] = [
kCVPixelBufferPixelFormatTypeKey: NSNumber(value: kCVPixelFormatType_32BGRA),
kCVPixelBufferCGBitmapContextCompatibilityKey: true as NSObject
Expand All @@ -24,7 +24,7 @@ public class IOUIScreenCaptureUnit: NSObject, IOScreenCaptureUnit {
/// Specifies the boolean value that indicates whether the snapshot should be rendered after recent changes have been incorporated.
public var afterScreenUpdates = false
/// Specifies the number of shaphot that must pass before the display link notifies the target again.
public var frameInterval: Int = IOUIScreenCaptureUnit.defaultFrameInterval
public var preferredFramesPerSecond: Int = IOUIScreenCaptureUnit.defaultPreferredFramesPerSecond
/// Specifies the CVPixelBufferPool's attrivutes.
public var attributes: [NSString: NSObject] {
var attributes: [NSString: NSObject] = IOUIScreenCaptureUnit.defaultAttributes
Expand Down Expand Up @@ -153,7 +153,7 @@ extension IOUIScreenCaptureUnit: Running {
self.pixelBufferPool = nil
self.colorSpace = CGColorSpaceCreateDeviceRGB()
self.displayLink = CADisplayLink(target: self, selector: #selector(onScreen))
self.displayLink.frameInterval = self.frameInterval
self.displayLink.preferredFramesPerSecond = self.preferredFramesPerSecond
self.displayLink.add(to: .main, forMode: .common)
}
}
Expand Down
Loading