Commit e0174fef authored by Hannes Barfuss's avatar Hannes Barfuss
Browse files

restructured pd folder

parent 09c37179
No preview for this file type
pd-for-ios @ be1593bf
Subproject commit be1593bf83b6820a804fc1e7418e62f646a33a92
File added
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>$(DEVELOPMENT_LANGUAGE)</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>$(PRODUCT_BUNDLE_PACKAGE_TYPE)</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleVersion</key>
<string>$(CURRENT_PROJECT_VERSION)</string>
</dict>
</plist>
//
// Pd4Unity_ios.h
// Pd4Unity-ios
//
// Created by Hannes on 02.05.21.
//
#import <Foundation/Foundation.h>
#import "PdBase.h"
#import "PdAudioController.h"
#import "PdDispatcher.h"
#import "Pd4Unity_defs.h"
//! Project version number for Pd4Unity_ios.
FOUNDATION_EXPORT double Pd4Unity_iosVersionNumber;
//! Project version string for Pd4Unity_ios.
FOUNDATION_EXPORT const unsigned char Pd4Unity_iosVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <Pd4Unity_ios/PublicHeader.h>
//
// PdInstance-ios.swift
// Pd4Unity-ios
//
// Created by Hannes on 02.05.21.
//
import Foundation
@objc public class PdInstance : NSObject {
var pd:PdAudioController?
override init() {
}
@objc public func setup() -> Int {
if(pd != nil) {
print("error: setup called while pd is already running")
return -1
}
pd = PdAudioController()
//to do: how to set mic to mono?
let pdInit = pd!.configurePlayback(withSampleRate: PD4UNITY_SAMPLERATE, numberChannels: PD4UNITY_NUM_OUTPUTS, inputEnabled: true, mixingEnabled: true)
if(pdInit == PdAudioOK) {
print("PD is ready.")
}
else {
print("PD initialization failed.")
return -1
}
pd!.configureTicksPerBuffer(1)
PDReceiver.shared.registerListeners()
/*print((Bundle.main.url(forResource: "osc440.pd", withExtension: nil)?.path)!)
print("\n")
let path:String = (Bundle.main.url(forResource: "osc440.pd", withExtension: nil)?.deletingLastPathComponent().path)!
if (PdBase.openFile("osc440.pd", path: path) == nil) {
print("error opening pd file: \(path)/osc440.pd")
return -1
}*/
pd!.isActive = true
PdBase.computeAudio(true)
pd!.print()
return 0
}
@objc public func stop() {
pd = nil
}
@objc public func checkForMicrophone() -> Bool {
let currentRoute = AVAudioSession.sharedInstance().currentRoute
if currentRoute.inputs != nil {
for description in currentRoute.inputs {
print("Available inputs:")
print("\(description.portName)\n")
if description.portType == AVAudioSession.Port.headsetMic {
return true
} else {
print("no headphones plugged in")
return false
}
}
} else {
print("no outputs at all")
return false
}
return false
}
@objc public func checkForHeadphones() -> Bool {
let currentRoute = AVAudioSession.sharedInstance().currentRoute
if currentRoute.outputs != nil {
for description in currentRoute.outputs {
print("Available outputs:")
print("\(description.portName)\n")
if description.portType == AVAudioSession.Port.headphones {
return true
} else {
print("no headphones plugged in")
return false
}
}
} else {
print("no outputs at all")
return false
}
return false
}
}
@objc class PDReceiver:NSObject, PdListener {
var pdDispatcher:PdDispatcher?
override private init() {
pdDispatcher = PdDispatcher()
PdBase.setDelegate(pdDispatcher)
super.init()
}
static let shared = PDReceiver()
func registerListeners() {
pdDispatcher?.add(self, forSource:"toAPI")
}
@objc func receiveBang(fromSource:String) {
print("received bang from " + fromSource)
}
@objc func receive(_ value:Float, fromSource:String) {
print("received float: \(value) from source: \(fromSource)")
}
@objc func receiveSymbol(_ symbol: String, fromSource:String) {
print("received symbol from " + (fromSource as String) + ": " + (symbol as String))
}
@objc func receiveList(_ list:[Any], fromSource:String) {
print("received list from \(fromSource):")
for item in list {
print(item)
}
}
@objc func receiveMessage(_ message:String, withArguments:[Any], fromSource:String) {
print("received message from \(fromSource): " + (message as String))
for s in withArguments {
print(s)
}
}
}
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>$(DEVELOPMENT_LANGUAGE)</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>$(PRODUCT_BUNDLE_PACKAGE_TYPE)</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleVersion</key>
<string>$(CURRENT_PROJECT_VERSION)</string>
</dict>
</plist>
//
// Pd4Unity_watchos.h
// Pd4Unity-watchos
//
// Created by Hannes on 08.05.21.
//
#import <Foundation/Foundation.h>
//! Project version number for Pd4Unity_watchos.
FOUNDATION_EXPORT double Pd4Unity_watchosVersionNumber;
//! Project version string for Pd4Unity_watchos.
FOUNDATION_EXPORT const unsigned char Pd4Unity_watchosVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <Pd4Unity_watchos/PublicHeader.h>
//
// NativeAudioEngine.swift
// MicMonitor
//
// Created by Hannes on 27.04.21.
//
/*
Ideas: reverb and delay on steps
spatialized copy of steps! just make two buses that get mic input and spatialize them!
*/
import Foundation
import AVFoundation
enum EngineParams:Int {
case directGain = 0
case spat1Gain = 1
case spat2Gain = 2
}
enum NodeType: Int {
case Mic = 0
case Reverb = 1
case Delay = 2
case EQ = 3
case Spatializer = 4
case Distortion = 5
case CustomDSP = 6
}
@objc public class PdInstance : AVAudioEngine {
let standardFormat = AVAudioFormat.init(standardFormatWithSampleRate: 44100, channels: 2)
var master:AVAudioMixerNode?
var pdNode:AVAudioInputNode?
override init() {
super.init()
}
@objc public func setup() -> Int {
if(self.isRunning) {
print("error: engine is already running")
return -1
}
/* macOS doesn't have and doesn't need AVAudioSession. */
#if os(iOS) || os(watchOS) || os(tvOS)
do {
// Here recordingSession is just a shared instance of AVAudioSession
try AVAudioSession.sharedInstance().setActive(true)
try self.enableManualRenderingMode(.realtime, format: standardFormat!, maximumFrameCount: 1024)
pdNode = self.inputNode //in manual rendering mode, engine expects us to feed the input node with audio buffer data.
//self.attach(pdNode!) This is probably unnecessary
pdNode!.setManualRenderingInputPCMFormat(standardFormat!, inputBlock: {
_ in
return nil
})
print("AVAudiosession configuration: \ninput latency: \(AVAudioSession.sharedInstance().inputLatency)\noutput latency: \(AVAudioSession.sharedInstance().outputLatency)\nIO buffer duration: \(AVAudioSession.sharedInstance().ioBufferDuration)\nestimated buffer size:\(AVAudioSession.sharedInstance().sampleRate * AVAudioSession.sharedInstance().ioBufferDuration)")
// I suggest adding notifications here for route and configuration changes
}
catch {
print("error creating audio engine")
return -1
}
#endif
//Note: Spatialization only works if the input node is mono.
//Note: implicit samplerate conversion is NOT supported and in most cases results in bad aliasing, so use one samplerate for the whole signal chain.
let hwSamplerate = self.inputNode.inputFormat(forBus: 0).sampleRate
print("hardware samplerate is \(hwSamplerate)")
let inputFormat = AVAudioFormat.init(standardFormatWithSampleRate: hwSamplerate, channels: 1)
let outputFormat = AVAudioFormat.init(standardFormatWithSampleRate: hwSamplerate, channels: 2)
master = AVAudioMixerNode()
self.attach(master!)
//Use a tap for audio analysis
/*eqNode.installTap(onBus: 0, bufferSize: 64, format: inputFormat, block: {
buf, when in
let time = when
let sample = buf.floatChannelData?[0]
})*/
self.prepare()
do {
try self.start()
}
catch {
print("error starting audio engine")
return -1
}
print("successfully started recording session.")
return 1
}
}
libpd and MacOs and iOS:
You cannot use PdAudioController and PdAudioUnit in macos, since they use frameworks which are only available on ios (for example AVAudioSession, AVAudioUnit).
libpd itself works fine on macos, but libpd itself "doesn't do anything", meaning you need some audio thread calling libpd's libpd_process_float callback regularly. portaudio can be used for this, but there are a few details that demand your attention:
1.) You have to download, build and correctly link portaudio to your Xcode project. To do so:
-Get the sources: http://files.portaudio.com/download.html
-Compile the sources as shown in the doc: http://files.portaudio.com/docs/v19-doxydocs/compile_mac_coreaudio.html (basically ./configure && make)
-Add the resulting libportaudio.a static library to your Xcode project
-#include "portaudio.h" wherever you need it
-Add libportaudio.a to "Link binary with libraries" for every target that needs it
-Add the path of libportaudio.a to "Library Search Paths" for every target that needs it (probably something like $(PROJECT_DIR)/../../portaudio/lib/.libs)
2.) You have to correctly setup an I/O stream with portaudio as well as initialize libpd. There are some caveats here:
-portaudio: You need to at least call Pa_Initialize(), Pa_OpenDefaultStream(...) and Pa_StartStream(...) for audio i/o to work.
-libpd: You have to at least call libpd_queued_init() and libpd_init_audio(...) for pd audio to work.
3.) You have to correctly release portaudio and libpd resources once you are done.
-libpd: call libpd_queued_release();
-portaudio: call Pa_Terminate();
OTHER ERRORS ENCOUNTERED
-When importing libpd into an Xcode project, linker may fail with an error like "undefined symbol: _pd_queued_init". This is likely due to some configuration in the xcode project, but i could'nt figure it out. Take one of the test projects from pd-for-ios (https://github.com/libpd/pd-for-ios) and add a new target, this way it usually works.
-"Include of non-modular header inside framework module": The usual way of making C code accessible to Swift is by defining a "bridging header" and import all .h files you need in the bridging header. However, if you want to build a framework module, the process is different because frameworks do NOT support briding headers. You probably tried to include/import you headers in the framework's umbrella header (the header that is automatically created when you create a new framework target), which will sometimes result in a "Include of non-modular header inside framework module" compiletime error. A pssible workaround is to make these headers publicly available to your framework under Target->Build Phases->Headers. Note, however, that this way the headers are also publicly available to the users of your framework.
-All sorts of troubles with *-Swift.h files: It is sometimes very unclear how Xcode names these and where they are. Some naming conventions that seem to be true:
-For bundles, the file is at toplevel: try #include "TargetName-Swift.h"
-For frameworks, the file is inside a folder named after the target: try #include "TargetName/TargetName-Swift.h"
-hyphens in the target name are replaced by underscores, so the file for a target named "SomeTarget-ios" will be "SomeTarget_ios-Swift.h"
-try using <> instead of "" if it doesn't work
-If you have no idea what the file is called, build your project (it doesn't matter that the build fails) and then go to Xcode->Preferences and open your DerivedData folder. search for *Swift.h inside the folder and you will probably find the file.
-In case it is not self-explanatory: the iOS framework has to be built for "Any iOS Device (arm64)", not for "Mac"!!! It will probably compile for mac too, but when you try to use it you will have all sorts of linker errors.
-This should be a no-brainer, but i wasted several hours on it: PdAudioController must be kept alive for pd to actually run - meaning you have to declare the PdAudioController instance variable in a global or class variable scope, NOT inside a function! it will get deallocated when the function returns...
-You can adjsut blocksize (and thus latency) by calling configureTicksPerBuffer on your PdAudioController instance. The base buffer size is hardcoded in z_libpd.c as a define called "DEFDACBLKSIZE", default is 64. if you want lower block size you have to change that value. 1 tick per buffer means that PdAudioUnit requests a blocksize equal to DEFDACBLKSIZE from the system. Take care: any call to configureTicksPerBuffer before calling configurePlayback(...) or configureAmbient(...) will fail bc the samplerate is not yet set!
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>$(DEVELOPMENT_LANGUAGE)</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>$(PRODUCT_BUNDLE_PACKAGE_TYPE)</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>NSPrincipalClass</key>
<string></string>
</dict>
</plist>
//
// Use this file to import your target's public headers that you would like to expose to Swift.
//
#import "portaudio.h"
#import "pa_mac_core.h"
#import "z_libpd.h"
#import "z_queued.h"
#import "PdBase.h"
#import "portaudio-pd-bridge.h"
#import "Pd4Unity_defs.h"
//
// PdInstance-osx.swift
// Pd4Unity
//
// Created by Hannes on 02.05.21.
//
import Foundation
import AVFoundation
@objc public class PdInstance : NSObject {
override init() {
}
@objc public func setup() -> Int {
if (Pa_Initialize() != 0) {
print("error initializing portaudio")
return -1
}
if (painit() != 0) {
print("error opening portaudio stream")
return -1
}
if (pastart() != 0) {
print("error starting portaudio stream")
return -1
}
if (libpd_queued_init() != 0) {
print("error initializing libpd")
return -1
}
if( libpd_init_audio(PD4UNITY_NUM_INPUTS, PD4UNITY_NUM_OUTPUTS, PD4UNITY_SAMPLERATE) != 0) {
print("error initializing pd audio")
return -1
}
/*if (PdBase.openFile("osc440.pd", path: "/Users/hannes/Desktop/MA/RunnersHigh/Assets/StreamingAssets/PdAssets") == nil) {
print("error opening pd file")
return -1
}*/
return 0
}
@objc public func stop() {
libpd_queued_release()
Pa_Terminate()
}
}
//
// portaudio-pd-bridge.c
// Pd4Unity
//
// Created by Hannes on 02.05.21.
//
#include "portaudio-pd-bridge.h"
#include "z_libpd.h"
#include "Pd4Unity_defs.h"
paTestData data;
PaStream *stream;
int pdcallback( const void *inputBuffer, void *outputBuffer,
unsigned long framesPerBuffer,
const PaStreamCallbackTimeInfo* timeInfo,
PaStreamCallbackFlags statusFlags,
void *userData )
{
/* Cast data passed through stream to our structure. */
paTestData *data = (paTestData*)userData;
float *out = (float*)outputBuffer;
float *in = (float*)inputBuffer;
unsigned int i;
int ticks = framesPerBuffer / libpd_blocksize();
libpd_process_float(ticks, inputBuffer, outputBuffer);
return 0;
}
PaError painit() {
int numDevices;
numDevices = Pa_GetDeviceCount();
if( numDevices < 0 )
{
printf( "ERROR: Pa_CountDevices returned 0x%x\n", numDevices );
}
const PaDeviceInfo *deviceInfo;
for( int i=0; i<numDevices; i++ )
{
deviceInfo = Pa_GetDeviceInfo( i );
printf("name: %s\n", deviceInfo->name);
}
return Pa_OpenDefaultStream( &stream,
PD4UNITY_NUM_INPUTS, /* no input channels */
PD4UNITY_NUM_OUTPUTS, /* stereo output */
paFloat32, /* 32 bit floating point output */
PD4UNITY_SAMPLERATE,
libpd_blocksize(), /* frames per buffer, i.e. the number
of sample frames that PortAudio will
request from the callback. Many apps
may want to use
paFramesPerBufferUnspecified, which
tells PortAudio to pick the best,
possibly changing, buffer size.*/
pdcallback, /* this is your callback function */
&data ); /*This is a pointer that will be passed to
your callback*/
}
PaError pastart() {
return Pa_StartStream( stream );
}
//
// portaudio-pd-bridge.h
// Pd4Unity
//
// Created by Hannes on 02.05.21.
//
#ifndef portaudio_pd_bridge_h
#define portaudio_pd_bridge_h
#include <stdio.h>
#include "portaudio.h"
typedef struct
{
float left_phase;
float right_phase;
}
paTestData;
int pdcallback( const void *inputBuffer, void *outputBuffer,
unsigned long framesPerBuffer,
const PaStreamCallbackTimeInfo* timeInfo,
PaStreamCallbackFlags statusFlags,
void *userData );
PaError painit(void);
PaError pastart(void);
#endif /* portaudio_pd_bridge_h */
//
// Pd4Unity_defs.g.h
// PdTest01
//
// Created by Hannes on 03.05.21.
//
#ifndef Pd4Unity_defs_g_h
#define Pd4Unity_defs_g_h
#define PD4UNITY_SAMPLERATE 44100
#define PD4UNTIY_TICKSPERBUFFER 1
#define PD4UNITY_NUM_INPUTS 2
#define PD4UNITY_NUM_OUTPUTS 2
#endif /* Pd4Unity_defs_g_h */
//
// Swift4Unity.m
// Pd4Unity
//
// Created by Hannes on 02.05.21.
//
#pragma mark - C interface
/* Need to explicitly import AVAudioEngine, although it was already imported in the corresponding swift file. */
#import <AVFoundation/AVAudioEngine.h>
#include <TargetConditionals.h>
#if TARGET_OS_WATCH
#include "Pd4Unity_watchos/Pd4Unity_watchos-Swift.h"
#elif TARGET_OS_IPHONE
#include "Pd4Unity_ios/Pd4Unity_ios-Swift.h"
#else
#include "Pd4Unity-Swift.h"
#endif