Java Server side code -
@GetMapping(value = "/api/stream/{songFile}", produces = MediaType.APPLICATION_OCTET_STREAM_VALUE)
public ResponseEntity<StreamingResponseBody> playSong(@PathVariable String songFile) throws IOException {
File file = new File(projectPath + "assets/audio/" + songFile + ".mp3");
FileInputStream in = new FileInputStream(file);
StreamingResponseBody songStream = out -> {
try {
Thread.sleep(10);
IOUtils.copy(in, out);
}
catch (InterruptedException e) {
LOGGER.error("Streaming Thread Interrupted - {}", e.getMessage());
}
};
return ResponseEntity.ok()
.header(HttpHeaders.ACCEPT_RANGES, "128")
.header(HttpHeaders.CONTENT_TYPE, "audio/mp3")
.contentLength(file.length())
.body(songStream);
}
SwiftUi function for fetching the data -
var player : AVPlayer?
var playerItem : AVPlayerItem?
var playerAudio : AVAudioPlayer?
var totalDuration : Double?
@Published var speed = 0.0{ willSet{
ObjectWillChangePublisher().send()
}
}
func playSound(sound: String) {
if let url = URL(string: sound) {
print(url)
self.player = AVPlayer(url: url)
self.playerItem = AVPlayerItem(url: url)
print(player?.currentItem?.duration)
let asset = AVURLAsset(url: url, options: nil)
let audioDuration = asset.duration
print(CMTimeGetSeconds(audioDuration))
}
else {
print("Problem in url \(sound)")
}
}
After passing the url for sound in the playSound and printing the output
print(audioManager.player)
print(audioManager.player?.currentItem?.loadedTimeRanges)
print(audioManager.player?.currentItem?.asset)
print(audioManager.player?.currentItem?.asset.duration)
if audioManager.player?.status == .readyToPlay {
print(audioManager.player?.currentItem?.asset.duration.seconds) // it't not nan
}
else {
print("not ready to play")
}
Terminal Output - Optional(__C.CMTime(value: 0, timescale: 0, flags: __C.CMTimeFlags(rawValue: 17), epoch: 0)) nan Optional(<AVPlayer: 0x600002092dc0>) Optional([]) Optional(<AVURLAsset: 0x60000222f160, URL = http://localhost:8080/api/stream/DummySong>) Optional(__C.CMTime(value: 0, timescale: 0, flags: __C.CMTimeFlags(rawValue: 17), epoch: 0)) not ready to play
I am unable to understand why the problem is arising as the function is playing the audio but not showing the duration. Though when the url is hitting disposition link, the duration and other information is fetched properly as it should.
Kindly, suggest me a way out of fetching the duration while using a streaming api.
I have tried different ways of calling the duration through the AVPlayer, AVPlayerItem, AVAssets but am unable to receive proper values in any of the following.