diff --git a/griffin/griffin_objc.m b/griffin/griffin_objc.m
index ec64a64803f7..36fd553c5b8b 100644
--- a/griffin/griffin_objc.m
+++ b/griffin/griffin_objc.m
@@ -51,6 +51,10 @@
#include "../input/drivers_joypad/mfi_joypad.m"
#endif
+#ifdef HAVE_AVF
+#include "../record/drivers/record_avf.m"
+#endif
+
#ifdef HAVE_COREAUDIO3
#include "../audio/drivers/coreaudio3.m"
#endif
diff --git a/record/drivers/record_avf.m b/record/drivers/record_avf.m
new file mode 100644
index 000000000000..9e9ba8d52b70
--- /dev/null
+++ b/record/drivers/record_avf.m
@@ -0,0 +1,193 @@
+/* RetroArch - A frontend for libretro.
+ *
+ * RetroArch is free software: you can redistribute it and/or modify it under the terms
+ * of the GNU General Public License as published by the Free Software Found-
+ * ation, either version 3 of the License, or (at your option) any later version.
+ *
+ * RetroArch is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+ * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+ * PURPOSE. See the GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License along with RetroArch.
+ * If not, see .
+ */
+
+#import
+
+#import "../record_driver.h"
+
+@interface AVFRecorder : NSObject
+
+// Declare properties
+@property (nonatomic, strong) AVAssetWriter *assetWriter;
+@property (nonatomic, strong) AVAssetWriterInput *videoInput;
+@property (nonatomic, strong) AVAssetWriterInput *audioInput;
+@property (nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor *videoPixelBufferAdaptor;
+@property (nonatomic, strong) NSDictionary *audioSettings;
+
+@end
+
+@implementation AVFRecorder
+
+- (void)setupAssetWriter {
+ NSError *error = nil;
+ // Create a URL for the output file
+ NSURL *outputFileURL = [NSURL fileURLWithPath:@"output.mov"];
+
+ // Create an AVAssetWriter with the desired output URL and file type
+ self.assetWriter = [AVAssetWriter assetWriterWithURL:outputFileURL fileType:AVFileTypeQuickTimeMovie error:&error];
+
+ // Configure video settings
+ NSDictionary *videoSettings = @{
+ AVVideoCodecKey: AVVideoCodecTypeH264,
+ AVVideoWidthKey: @(256),
+ AVVideoHeightKey: @(256)
+ };
+ self.videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
+
+ // Create a pixel buffer adaptor for video input
+ NSDictionary *sourcePixelBufferAttributes = @{
+ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32ARGB)
+ };
+ self.videoPixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.videoInput sourcePixelBufferAttributes:sourcePixelBufferAttributes];
+
+ if ([self.assetWriter canAddInput:self.videoInput]) {
+ [self.assetWriter addInput:self.videoInput];
+ }
+
+ // Configure audio settings
+ self.audioSettings = @{
+ AVFormatIDKey: @(kAudioFormatLinearPCM),
+ AVSampleRateKey: @(2),
+ AVNumberOfChannelsKey: @(2),
+ AVLinearPCMBitDepthKey: @(16), // Example: 16-bit audio
+ AVLinearPCMIsBigEndianKey: @(NO),
+ AVLinearPCMIsFloatKey: @(NO),
+ };
+ self.audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:self.audioSettings];
+
+ if ([self.assetWriter canAddInput:self.audioInput]) {
+ [self.assetWriter addInput:self.audioInput];
+ }
+}
+
+- (void)writePixelBuffer:(CVPixelBufferRef)pixelBuffer presentationTime:(CMTime)presentationTime {
+ if (!self.videoPixelBufferAdaptor.assetWriterInput.readyForMoreMediaData) {
+ return;
+ }
+
+ if (self.assetWriter.status == AVAssetWriterStatusUnknown) {
+ [self.assetWriter startWriting];
+ [self.assetWriter startSessionAtSourceTime:presentationTime];
+ }
+
+ [self.videoPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];
+}
+
+- (void)writePCMData:(AudioBuffer)audioBuffer presentationTimeStamp:(CMTime)presentationTimeStamp {
+ if (!self.audioInput.isReadyForMoreMediaData) {
+ return;
+ }
+
+ if (self.assetWriter.status == AVAssetWriterStatusUnknown) {
+ [self.assetWriter startWriting];
+ [self.assetWriter startSessionAtSourceTime:presentationTimeStamp];
+ }
+
+// CMItemCount frameCount = audioBuffer.mDataByteSize / [self.audioSettings[AVLinearPCMBitDepthKey] intValue];
+ CMSampleBufferRef audioBufferCopy = NULL;
+ CMSampleTimingInfo timing = {kCMTimeInvalid, presentationTimeStamp, kCMTimeInvalid};
+
+ OSStatus status = CMSampleBufferCreate(kCFAllocatorDefault,
+ NULL,
+ false,
+ NULL,
+ NULL,
+ NULL,
+ 1,
+ 1,
+ &timing,
+ 0,
+ NULL,
+ &audioBufferCopy);
+
+ if (status == noErr) {
+ AudioBufferList audioBufferList;
+ audioBufferList.mNumberBuffers = 1;
+ audioBufferList.mBuffers[0] = audioBuffer;
+
+ status = CMSampleBufferSetDataBufferFromAudioBufferList(audioBufferCopy,
+ kCFAllocatorDefault,
+ kCFAllocatorDefault,
+ 0,
+ &audioBufferList);
+ }
+
+ if (status == noErr) {
+ [self.audioInput appendSampleBuffer:audioBufferCopy];
+ CFRelease(audioBufferCopy);
+ }
+}
+
+- (void)endRecordingWithCompletion:(void (^)(void))completion {
+ [self.videoInput markAsFinished];
+ [self.audioInput markAsFinished];
+
+ [self.assetWriter finishWritingWithCompletionHandler:completion];}
+
+
+@end
+
+static void *avf_record_new(const struct record_params *params)
+{
+ AVFRecorder *recorder = [[AVFRecorder alloc] init];
+ [recorder setupAssetWriter];
+ return (__bridge_retained void *)recorder;
+}
+
+static void avf_record_free(void *data)
+{
+ __block AVFRecorder *recorder = (__bridge_transfer AVFRecorder *)data;
+ if (recorder == nil)
+ return;
+
+ [recorder endRecordingWithCompletion:^{
+ recorder = nil;
+ }];
+}
+
+static bool avf_record_push_video(void *data, const struct record_video_data *video_data)
+{
+ AVFRecorder *recorder = (__bridge AVFRecorder *)data;
+ if (recorder == nil)
+ return false;
+
+ return true;
+}
+
+static bool avf_record_push_audio(void *data, const struct record_audio_data *audio_data)
+{
+ AVFRecorder *recorder = (__bridge AVFRecorder *)data;
+ if (recorder == nil)
+ return false;
+
+ return true;
+}
+
+static bool avf_record_finalize(void *data)
+{
+ AVFRecorder *recorder = (__bridge AVFRecorder *)data;
+ if (recorder == nil)
+ return false;
+
+ return true;
+}
+
+const record_driver_t record_avf = {
+ avf_record_new,
+ avf_record_free,
+ avf_record_push_video,
+ avf_record_push_audio,
+ avf_record_finalize,
+ "avfoundation",
+};
diff --git a/record/record_driver.c b/record/record_driver.c
index 89491665ad0c..c1029c4cba3f 100644
--- a/record/record_driver.c
+++ b/record/record_driver.c
@@ -44,6 +44,9 @@ static const record_driver_t record_null = {
};
const record_driver_t *record_drivers[] = {
+#ifdef HAVE_AVF
+ &record_avf,
+#endif
#ifdef HAVE_FFMPEG
&record_ffmpeg,
#endif
diff --git a/record/record_driver.h b/record/record_driver.h
index 58392d2230f8..b433bae57d9e 100644
--- a/record/record_driver.h
+++ b/record/record_driver.h
@@ -134,6 +134,7 @@ struct recording
typedef struct recording recording_state_t;
extern const record_driver_t record_ffmpeg;
+extern const record_driver_t record_avf;
/**
* config_get_record_driver_options: