@Override protected AudioDevice createAudioDevice() { // It could be done like this : // AudioDevice device = Gdx.audio.newAudioDevice(config.sampleRate, config.outputChannels < 2); // but we need to align buffer size : Pd.process and device.write. int samplePerFrame = config.bufferSize; int samplePerBuffer = samplePerFrame * config.outputChannels; int bufferSizeBytes = samplePerBuffer * 2; return new OpenALAudioDevice((OpenALAudio)Gdx.audio, config.sampleRate, config.outputChannels<2, bufferSizeBytes, config.bufferCount); }
/** {@inheritDoc} */ @Override public AudioDevice newAudioDevice (int samplingRate, boolean isMono) { if (soundPool == null) { throw new GdxRuntimeException("Android audio is not enabled by the application config."); } return new AndroidAudioDevice(samplingRate, isMono); }
@Override public void create () { if (thread == null) { final AudioDevice device = Gdx.app.getAudio().newAudioDevice(44100, false); thread = new Thread(new Runnable() { @Override public void run () { final float frequency = 440; float increment = (float)(2 * Math.PI) * frequency / 44100; // angular increment for each sample float angle = 0; float samples[] = new float[1024]; while (!stop) { for (int i = 0; i < samples.length; i += 2) { samples[i] = 0.5f * (float)Math.sin(angle); samples[i + 1] = 2 * samples[i]; angle += increment; } device.writeSamples(samples, 0, samples.length); } device.dispose(); } }); thread.start(); } }
public static void main(String[] args) { LwjglApplicationConfiguration config = new LwjglApplicationConfiguration(); config.audioDeviceBufferSize = 512; config.audioDeviceBufferCount = 17; new LwjglApplication(new Game(){ @Override public void create() { // play a pd patch Pd.audio.create(new PdConfiguration()); Pd.audio.open(Gdx.files.local("resources/test.pd")); // and write on raw device at the same time new Thread(new Runnable() { @Override public void run() { int channels = 2; int sampleRate = 44100; AudioDevice device = Gdx.audio.newAudioDevice(sampleRate, channels < 2); // simple sinus float duration = 4.f; float pitch = 440; int samples = (int)(duration * sampleRate) * channels; float [] data = new float[samples]; int stride = samples/channels; for(int i=0 ; i<stride ; i+=1){ float s = (float)i/(float)stride; float t = s * duration * pitch; float value = MathUtils.sin(MathUtils.PI2 * t); for(int j=0 ; j<channels ; j++) data[i+j*stride] = value; } device.writeSamples(data, 0, data.length); device.dispose(); } }).start(); }}, config); }
protected AudioDevice createAudioDevice() { return Gdx.audio.newAudioDevice(config.sampleRate, config.outputChannels<2); }
@Override public void run() { int ticks = Math.max(1, config.bufferSize / PdBase.blockSize()); short [] inBuffer = new short[config.bufferSize * config.inputChannels]; short [] outBuffer = new short[config.bufferSize * config.outputChannels]; AudioDevice device = createAudioDevice(); AudioRecorder recorder = null; if(config.inputChannels > 0){ recorder = Gdx.audio.newAudioRecorder(config.sampleRate, config.inputChannels < 2); } PdBase.openAudio(config.inputChannels, config.outputChannels, config.sampleRate); PdBase.computeAudio(true); final Runnable pollRunnable = new Runnable() { @Override public void run() { PdBase.pollPdMessageQueue(); PdBase.pollMidiQueue(); requirePolling = true; } }; long nanoDuration = (long)(1e9 * (double)config.bufferSize / (double)config.sampleRate); long realTime = System.nanoTime(); long logicTime = realTime; while(processing){ if(recorder != null){ recorder.read(inBuffer, 0, inBuffer.length); } realTime = System.nanoTime(); long waitTime = (logicTime - realTime) / 1000000; if(waitTime > 0){ try { Thread.sleep(waitTime); } catch (InterruptedException e) { } }else{ logicTime = realTime; } logicTime += nanoDuration; PdBase.process(ticks, inBuffer, outBuffer); device.writeSamples(outBuffer, 0, outBuffer.length); if(requirePolling){ Gdx.app.postRunnable(pollRunnable); } } device.dispose(); if(recorder != null){ recorder.dispose(); } }
@Override public AudioDevice newAudioDevice(int samplingRate, boolean isMono) { throw new GdxRuntimeException("AudioDevice not supported by TeaVM backend"); }
@Override public AudioDevice newAudioDevice (int samplingRate, boolean isMono) { throw new GdxRuntimeException("AudioDevice not supported by GWT backend"); }
@Override public AudioDevice newAudioDevice(int samplingRate, boolean isMono) { return new MockAudioDevice(); }
@Override public AudioDevice newAudioDevice (int samplingRate, boolean isMono) { // TODO Auto-generated method stub return null; }
@Override public AudioDevice newAudioDevice(int samplingRate, boolean isMono) { return new TestAudioDevice(); }
@Override public AudioDevice newAudioDevice(int samplingRate, boolean isMono) { throw new ServerAudioException(); }
public final AudioDevice newAudioDevice(int paramInt, boolean paramBoolean) { return new AndroidAudioDevice(paramInt, paramBoolean); }
/** Creates a new {@link AudioDevice} either in mono or stereo mode. The AudioDevice has to be disposed via its * {@link AudioDevice#dispose()} method when it is no longer used. * * @param samplingRate the sampling rate. * @param isMono whether the AudioDevice should be in mono or stereo mode * @return the AudioDevice * * @throws GdxRuntimeException in case the device could not be created */ public AudioDevice newAudioDevice (int samplingRate, boolean isMono);
/** * Creates a new {@link AudioDevice} either in mono or stereo mode. The AudioDevice has to be disposed via its * {@link AudioDevice#dispose()} method when it is no longer used. * * @param samplingRate the sampling rate. * @param isMono whether the AudioDevice should be in mono or stereo mode * @return the AudioDevice * * @throws GdxRuntimeException in case the device could not be created */ public AudioDevice newAudioDevice(int samplingRate, boolean isMono);
public abstract AudioDevice newAudioDevice(int paramInt, boolean paramBoolean);