001package jmri.jmrit; 002 003import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; 004import java.io.File; 005import java.io.IOException; 006import java.net.MalformedURLException; 007import java.net.URISyntaxException; 008import java.net.URL; 009import java.util.Arrays; 010import java.util.concurrent.atomic.AtomicReference; 011import javax.annotation.Nonnull; 012import javax.sound.sampled.AudioFormat; 013import javax.sound.sampled.AudioInputStream; 014import javax.sound.sampled.AudioSystem; 015import javax.sound.sampled.Clip; 016import javax.sound.sampled.DataLine; 017import javax.sound.sampled.LineEvent; 018import javax.sound.sampled.LineUnavailableException; 019import javax.sound.sampled.SourceDataLine; 020import javax.sound.sampled.UnsupportedAudioFileException; 021import jmri.util.FileUtil; 022 023/** 024 * Provide simple way to load and play sounds in JMRI. 025 * <p> 026 * This is placed in the jmri.jmrit package by process of elimination. It 027 * doesn't belong in the base jmri package, as it's not a basic interface. Nor 028 * is it a specific implementation of a basic interface, which would put it in 029 * jmri.jmrix. It seems most like a "tool using JMRI", or perhaps a tool for use 030 * with JMRI, so it was placed in jmri.jmrit. 031 * 032 * @see jmri.jmrit.sound 033 * 034 * @author Bob Jacobsen Copyright (C) 2004, 2006 035 * @author Dave Duchamp Copyright (C) 2011 - add streaming play of large files 036 */ 037public class Sound { 038 039 // files over this size will be streamed 040 public static final long LARGE_SIZE = 100000; 041 private final URL url; 042 private boolean streaming = false; 043 private boolean streamingStop = false; 044 private AtomicReference<Clip> clipRef = new AtomicReference<>(); 045 private boolean autoClose = true; 046 047 /** 048 * Create a Sound object using the media file at path 049 * 050 * @param path path, portable or absolute, to the media 051 * @throws NullPointerException if path cannot be converted into a URL by 052 * {@link jmri.util.FileUtilSupport#findURL(java.lang.String)} 053 */ 054 public Sound(@Nonnull String path) throws NullPointerException { 055 this(FileUtil.findURL(path)); 056 } 057 058 /** 059 * Create a Sound object using the media file 060 * 061 * @param file reference to the media 062 * @throws java.net.MalformedURLException if file cannot be converted into a 063 * valid URL 064 */ 065 public Sound(@Nonnull File file) throws MalformedURLException { 066 this(file.toURI().toURL()); 067 } 068 069 /** 070 * Create a Sound object using the media URL 071 * 072 * @param url path to the media 073 * @throws NullPointerException if URL is null 074 */ 075 public Sound(@Nonnull URL url) throws NullPointerException { 076 if (url == null) { 077 throw new NullPointerException(); 078 } 079 this.url = url; 080 try { 081 streaming = this.needStreaming(); 082 if (!streaming) { 083 clipRef.updateAndGet(clip -> { 084 return openClip(); 085 }); 086 } 087 } catch (URISyntaxException ex) { 088 streaming = false; 089 } catch (IOException ex) { 090 log.error("Unable to open {}", url); 091 } 092 } 093 094 private Clip openClip() { 095 Clip newClip = null; 096 try { 097 newClip = AudioSystem.getClip(null); 098 newClip.addLineListener(event -> { 099 if (LineEvent.Type.STOP.equals(event.getType())) { 100 if (autoClose) { 101 clipRef.updateAndGet(clip -> { 102 if (clip != null) { 103 clip.close(); 104 } 105 return null; 106 }); 107 } 108 } 109 }); 110 newClip.open(AudioSystem.getAudioInputStream(url)); 111 } catch (IOException ex) { 112 log.error("Unable to open {}", url); 113 } catch (LineUnavailableException ex) { 114 log.error("Unable to provide audio playback", ex); 115 } catch (UnsupportedAudioFileException ex) { 116 log.error("{} is not a recognised audio format", url); 117 } 118 119 return newClip; 120 } 121 122 /** 123 * Set if the clip be closed automatically. 124 * @param autoClose true if closed automatically 125 */ 126 public void setAutoClose(boolean autoClose) { 127 this.autoClose = autoClose; 128 } 129 130 /** 131 * Get if the clip is closed automatically. 132 * @return true if closed automatically 133 */ 134 public boolean getAutoClose() { 135 return autoClose; 136 } 137 138 /** 139 * Closes the sound. 140 */ 141 public void close() { 142 if (streaming) { 143 streamingStop = true; 144 } else { 145 clipRef.updateAndGet(clip -> { 146 if (clip != null) { 147 clip.close(); 148 } 149 return null; 150 }); 151 } 152 } 153 154 /** {@inheritDoc} */ 155 @Override 156 @SuppressWarnings("deprecation") // Object.finalize 157 protected void finalize() throws Throwable { 158 try { 159 if (!streaming) { 160 clipRef.updateAndGet(clip -> { 161 if (clip != null) { 162 clip.close(); 163 } 164 return null; 165 }); 166 } 167 } finally { 168 super.finalize(); 169 } 170 } 171 172 /** 173 * Play the sound once. 174 */ 175 public void play() { 176 if (streaming) { 177 Runnable streamSound = new StreamingSound(this.url); 178 Thread tStream = jmri.util.ThreadingUtil.newThread(streamSound); 179 tStream.start(); 180 } else { 181 clipRef.updateAndGet(clip -> { 182 if (clip == null) { 183 clip = openClip(); 184 } 185 if (clip != null) { 186 clip.start(); 187 } 188 return clip; 189 }); 190 } 191 } 192 193 /** 194 * Play the sound as an endless loop 195 */ 196 public void loop() { 197 this.loop(Clip.LOOP_CONTINUOUSLY); 198 } 199 200 /** 201 * Play the sound in a loop count times. Use 202 * {@link javax.sound.sampled.Clip#LOOP_CONTINUOUSLY} to create an endless 203 * loop. 204 * 205 * @param count the number of times to loop 206 */ 207 public void loop(int count) { 208 if (streaming) { 209 Runnable streamSound = new StreamingSound(this.url, count); 210 Thread tStream = jmri.util.ThreadingUtil.newThread(streamSound); 211 tStream.start(); 212 } else { 213 clipRef.updateAndGet(clip -> { 214 if (clip == null) { 215 clip = openClip(); 216 } 217 if (clip != null) { 218 clip.loop(count); 219 } 220 return clip; 221 }); 222 } 223 } 224 225 /** 226 * Stop playing a loop. 227 */ 228 public void stop() { 229 if (streaming) { 230 streamingStop = true; 231 } else { 232 clipRef.updateAndGet(clip -> { 233 if (clip != null) { 234 clip.stop(); 235 } 236 return clip; 237 }); 238 } 239 } 240 241 private boolean needStreaming() throws URISyntaxException, IOException { 242 if (url != null) { 243 if ("file".equals(this.url.getProtocol())) { 244 return (new File(this.url.toURI()).length() > LARGE_SIZE); 245 } else { 246 return this.url.openConnection().getContentLengthLong() > LARGE_SIZE; 247 } 248 } 249 return false; 250 } 251 252 /** 253 * Play a sound from a buffer 254 * 255 * @param wavData data to play 256 */ 257 public static void playSoundBuffer(byte[] wavData) { 258 259 // get characteristics from buffer 260 float sampleRate = 11200.0f; 261 int sampleSizeInBits = 8; 262 int channels = 1; 263 boolean signed = (sampleSizeInBits > 8); 264 boolean bigEndian = true; 265 266 AudioFormat format = new AudioFormat(sampleRate, sampleSizeInBits, channels, signed, bigEndian); 267 SourceDataLine line; 268 DataLine.Info info = new DataLine.Info(SourceDataLine.class, format); // format is an AudioFormat object 269 if (!AudioSystem.isLineSupported(info)) { 270 // Handle the error. 271 log.warn("line not supported: {}", info); 272 return; 273 } 274 // Obtain and open the line. 275 try { 276 line = (SourceDataLine) AudioSystem.getLine(info); 277 line.open(format); 278 } catch (LineUnavailableException ex) { 279 // Handle the error. 280 log.error("error opening line", ex); 281 return; 282 } 283 line.start(); 284 // write(byte[] b, int off, int len) 285 line.write(wavData, 0, wavData.length); 286 287 } 288 289 public static class WavBuffer { 290 291 public WavBuffer(byte[] content) { 292 buffer = Arrays.copyOf(content, content.length); 293 294 // find fmt chunk and set offset 295 int index = 12; // skip RIFF header 296 while (index < buffer.length) { 297 // new chunk 298 if (buffer[index] == 0x66 299 && buffer[index + 1] == 0x6D 300 && buffer[index + 2] == 0x74 301 && buffer[index + 3] == 0x20) { 302 // found it 303 fmtOffset = index; 304 return; 305 } else { 306 // skip 307 index = index + 8 308 + buffer[index + 4] 309 + buffer[index + 5] * 256 310 + buffer[index + 6] * 256 * 256 311 + buffer[index + 7] * 256 * 256 * 256; 312 log.debug("index now {}", index); 313 } 314 } 315 log.error("Didn't find fmt chunk"); 316 317 } 318 319 // we maintain this, but don't use it for anything yet 320 @SuppressFBWarnings(value = "URF_UNREAD_FIELD") 321 int fmtOffset; 322 323 byte[] buffer; 324 325 float getSampleRate() { 326 return 11200.0f; 327 } 328 329 int getSampleSizeInBits() { 330 return 8; 331 } 332 333 int getChannels() { 334 return 1; 335 } 336 337 boolean getBigEndian() { 338 return false; 339 } 340 341 boolean getSigned() { 342 return (getSampleSizeInBits() > 8); 343 } 344 } 345 346 public class StreamingSound implements Runnable { 347 348 private final URL localUrl; 349 private AudioInputStream stream = null; 350 private AudioFormat format = null; 351 private SourceDataLine line = null; 352 private jmri.Sensor streamingSensor = null; 353 private final int count; 354 355 /** 356 * A runnable to stream in sound and play it This method does not read 357 * in an entire large sound file at one time, but instead reads in 358 * smaller chunks as needed. 359 * 360 * @param url the URL containing audio media 361 */ 362 public StreamingSound(URL url) { 363 this(url, 1); 364 } 365 366 /** 367 * A runnable to stream in sound and play it This method does not read 368 * in an entire large sound file at one time, but instead reads in 369 * smaller chunks as needed. 370 * 371 * @param url the URL containing audio media 372 * @param count the number of times to loop 373 */ 374 public StreamingSound(URL url, int count) { 375 this.localUrl = url; 376 this.count = count; 377 } 378 379 /** {@inheritDoc} */ 380 @Override 381 public void run() { 382 // Note: some of the following is based on code from 383 // "Killer Game Programming in Java" by A. Davidson. 384 // Set up the audio input stream from the sound file 385 try { 386 // link an audio stream to the sampled sound's file 387 stream = AudioSystem.getAudioInputStream(localUrl); 388 format = stream.getFormat(); 389 log.debug("Audio format: {}", format); 390 // convert ULAW/ALAW formats to PCM format 391 if ((format.getEncoding() == AudioFormat.Encoding.ULAW) 392 || (format.getEncoding() == AudioFormat.Encoding.ALAW)) { 393 AudioFormat newFormat 394 = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 395 format.getSampleRate(), 396 format.getSampleSizeInBits() * 2, 397 format.getChannels(), 398 format.getFrameSize() * 2, 399 format.getFrameRate(), true); // big endian 400 // update stream and format details 401 stream = AudioSystem.getAudioInputStream(newFormat, stream); 402 log.info("Converted Audio format: {}", newFormat); 403 format = newFormat; 404 log.debug("new converted Audio format: {}", format); 405 } 406 } catch (UnsupportedAudioFileException e) { 407 log.error("AudioFileException {}", e.getMessage()); 408 return; 409 } catch (IOException e) { 410 log.error("IOException {}", e.getMessage()); 411 return; 412 } 413 streamingStop = false; 414 if (streamingSensor == null) { 415 streamingSensor = jmri.InstanceManager.sensorManagerInstance().provideSensor("ISSOUNDSTREAMING"); 416 } 417 418 setSensor(jmri.Sensor.ACTIVE); 419 420 if (!streamingStop) { 421 // set up the SourceDataLine going to the JVM's mixer 422 try { 423 // gather information for line creation 424 DataLine.Info info 425 = new DataLine.Info(SourceDataLine.class, format); 426 if (!AudioSystem.isLineSupported(info)) { 427 log.error("Audio play() does not support: {}", format); 428 return; 429 } 430 // get a line of the required format 431 line = (SourceDataLine) AudioSystem.getLine(info); 432 line.open(format); 433 } catch (Exception e) { 434 log.error("Exception while creating Audio out {}", e.getMessage()); 435 return; 436 } 437 } 438 if (streamingStop) { 439 line.close(); 440 setSensor(jmri.Sensor.INACTIVE); 441 return; 442 } 443 // Read the sound file in chunks of bytes into buffer, and 444 // pass them on through the SourceDataLine 445 int numRead; 446 byte[] buffer = new byte[line.getBufferSize()]; 447 log.debug("streaming sound buffer size = {}", line.getBufferSize()); 448 line.start(); 449 // read and play chunks of the audio 450 try { 451 if (stream.markSupported()) stream.mark(Integer.MAX_VALUE); 452 453 int i=0; 454 while (!streamingStop && ((i++ < count) || (count == Clip.LOOP_CONTINUOUSLY))) { 455 int offset; 456 while ((numRead = stream.read(buffer, 0, buffer.length)) >= 0) { 457 offset = 0; 458 while (offset < numRead) { 459 offset += line.write(buffer, offset, numRead - offset); 460 } 461 } 462 if (stream.markSupported()) { 463 stream.reset(); 464 } else { 465 stream.close(); 466 try { 467 stream = AudioSystem.getAudioInputStream(localUrl); 468 } catch (UnsupportedAudioFileException e) { 469 log.error("AudioFileException {}", e.getMessage()); 470 closeLine(); 471 return; 472 } catch (IOException e) { 473 log.error("IOException {}", e.getMessage()); 474 closeLine(); 475 return; 476 } 477 } 478 } 479 } catch (IOException e) { 480 log.error("IOException while reading sound file {}", e.getMessage()); 481 } 482 closeLine(); 483 } 484 485 private void closeLine() { 486 // wait until all data is played, then close the line 487 line.drain(); 488 line.stop(); 489 line.close(); 490 setSensor(jmri.Sensor.INACTIVE); 491 } 492 493 private void setSensor(int mode) { 494 if (streamingSensor != null) { 495 try { 496 streamingSensor.setState(mode); 497 } catch (jmri.JmriException ex) { 498 log.error("Exception while setting ISSOUNDSTREAMING sensor {} to {}", streamingSensor.getDisplayName(), mode); 499 } 500 } 501 } 502 503 } 504 505 private final static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(Sound.class); 506}