create a LinkedItemAudioProperties struct that can be passed to AudioController (instead of passing LinkedItem)

This commit is contained in:
Satindar Dhillon
2022-09-12 13:40:48 -07:00
committed by Jackson Harper
parent bc9b0b0752
commit c4152a05a0
5 changed files with 77 additions and 56 deletions

View File

@ -30,7 +30,7 @@ public struct MiniPlayer: View {
}
var isPresented: Bool {
audioController.item != nil && audioController.state != .stopped
audioController.itemAudioProperties != nil && audioController.state != .stopped
}
var playPauseButtonImage: String {
@ -47,16 +47,16 @@ public struct MiniPlayer: View {
}
var playPauseButtonItem: some View {
if let item = audioController.item, audioController.isLoadingItem(item: item) {
if let itemID = audioController.itemAudioProperties?.itemID, audioController.isLoadingItem(itemID: itemID) {
return AnyView(ProgressView())
} else {
return AnyView(Button(
action: {
switch audioController.state {
case .playing:
_ = audioController.pause()
audioController.pause()
case .paused:
_ = audioController.unpause()
audioController.unpause()
case .reachedEnd:
audioController.seek(to: 0.0)
audioController.unpause()
@ -120,8 +120,8 @@ public struct MiniPlayer: View {
}
func viewArticle() {
if let item = audioController.item {
NSNotification.pushReaderItem(objectID: item.objectID)
if let objectID = audioController.itemAudioProperties?.objectID {
NSNotification.pushReaderItem(objectID: objectID)
withAnimation(.easeIn(duration: 0.1)) {
expanded = false
}
@ -129,7 +129,7 @@ public struct MiniPlayer: View {
}
// swiftlint:disable:next function_body_length
func playerContent(_ item: LinkedItem) -> some View {
func playerContent(_ itemAudioProperties: LinkedItemAudioProperties) -> some View {
GeometryReader { geom in
VStack {
if expanded {
@ -156,7 +156,7 @@ public struct MiniPlayer: View {
let maxSize = 2 * (min(geom.size.width, geom.size.height) / 3)
let dim = expanded ? maxSize : 64
AsyncImage(url: item.imageURL) { image in
AsyncImage(url: itemAudioProperties.imageURL) { image in
image
.resizable()
.aspectRatio(contentMode: .fill)
@ -169,7 +169,7 @@ public struct MiniPlayer: View {
}
if !expanded {
Text(item.unwrappedTitle)
Text(itemAudioProperties.title)
.font(expanded ? .appTitle : .appCallout)
.lineSpacing(1.25)
.foregroundColor(.appGrayTextContrast)
@ -188,7 +188,7 @@ public struct MiniPlayer: View {
Spacer()
if expanded {
Text(item.unwrappedTitle)
Text(itemAudioProperties.title)
.lineLimit(1)
.font(expanded ? .appTitle : .appCallout)
.lineSpacing(1.25)
@ -201,7 +201,7 @@ public struct MiniPlayer: View {
HStack {
Spacer()
if let author = item.author {
if let author = itemAudioProperties.author {
Text(author)
.lineLimit(1)
.font(.appCallout)
@ -209,14 +209,14 @@ public struct MiniPlayer: View {
.foregroundColor(.appGrayText)
.frame(alignment: .trailing)
}
if item.author != nil, item.siteName != nil {
if itemAudioProperties.author != nil, itemAudioProperties.siteName != nil {
Text("")
.font(.appCallout)
.lineSpacing(1.25)
.foregroundColor(.appGrayText)
}
if let site = item.siteName {
Text(site)
if let siteName = itemAudioProperties.siteName {
Text(siteName)
.lineLimit(1)
.font(.appCallout)
.lineSpacing(1.25)
@ -350,8 +350,8 @@ public struct MiniPlayer: View {
presentingView
VStack {
Spacer(minLength: 0)
if let item = self.audioController.item, isPresented {
playerContent(item)
if let itemAudioProperties = self.audioController.itemAudioProperties, isPresented {
playerContent(itemAudioProperties)
.offset(y: offset)
.frame(maxHeight: expanded ? .infinity : 88)
.tint(.appGrayTextContrast)

View File

@ -57,7 +57,7 @@ struct WebReaderContainerView: View {
}
var audioNavbarItem: some View {
if audioController.isLoadingItem(item: item) {
if audioController.isLoadingItem(itemID: item.unwrappedID) {
return AnyView(ProgressView()
.padding(.horizontal)
.scaleEffect(navBarVisibilityRatio))
@ -66,23 +66,23 @@ struct WebReaderContainerView: View {
action: {
switch audioController.state {
case .playing:
if audioController.item == self.item {
if audioController.itemAudioProperties?.itemID == self.item.unwrappedID {
audioController.pause()
return
}
fallthrough
case .paused:
if audioController.item == self.item {
if audioController.itemAudioProperties?.itemID == self.item.unwrappedID {
audioController.unpause()
return
}
fallthrough
default:
audioController.play(item: self.item)
audioController.play(itemAudioProperties: item.audioProperties)
}
},
label: {
Image(systemName: audioController.isPlayingItem(item: item) ? "pause.circle" : "play.circle")
Image(systemName: audioController.isPlayingItem(itemID: item.unwrappedID) ? "pause.circle" : "play.circle")
.font(.appTitleTwo)
}
)

View File

@ -12,6 +12,15 @@ public struct LinkedItemQueryResult {
}
}
public struct LinkedItemAudioProperties {
public let itemID: String
public let objectID: NSManagedObjectID
public let title: String
public let author: String?
public let siteName: String?
public let imageURL: URL?
}
// Internal model used for parsing a push notification object only
public struct JSONArticle: Decodable {
public let id: String
@ -84,6 +93,17 @@ public extension LinkedItem {
return String(data: JSON, encoding: .utf8) ?? "[]"
}
var audioProperties: LinkedItemAudioProperties {
LinkedItemAudioProperties(
itemID: unwrappedID,
objectID: objectID,
title: unwrappedTitle,
author: author,
siteName: siteName,
imageURL: imageURL
)
}
static func lookup(byID itemID: String, inContext context: NSManagedObjectContext) -> LinkedItem? {
let fetchRequest: NSFetchRequest<Models.LinkedItem> = LinkedItem.fetchRequest()
fetchRequest.predicate = NSPredicate(

View File

@ -80,7 +80,7 @@ class SpeechPlayerItem: AVPlayerItem {
// swiftlint:disable all
public class AudioController: NSObject, ObservableObject, AVAudioPlayerDelegate {
@Published public var state: AudioControllerState = .stopped
@Published public var item: LinkedItem?
@Published public var itemAudioProperties: LinkedItemAudioProperties?
@Published public var timeElapsed: TimeInterval = 0
@Published public var duration: TimeInterval = 0
@ -107,10 +107,10 @@ public class AudioController: NSObject, ObservableObject, AVAudioPlayerDelegate
self.voiceList = generateVoiceList()
}
public func play(item: LinkedItem) {
public func play(itemAudioProperties: LinkedItemAudioProperties) {
stop()
self.item = item
self.itemAudioProperties = itemAudioProperties
startAudio()
}
@ -128,7 +128,7 @@ public class AudioController: NSObject, ObservableObject, AVAudioPlayerDelegate
player = nil
synthesizer = nil
item = nil
itemAudioProperties = nil
state = .stopped
timeElapsed = 0
duration = 1
@ -145,9 +145,9 @@ public class AudioController: NSObject, ObservableObject, AVAudioPlayerDelegate
}
public func preload(itemIDs: [String], retryCount _: Int = 0) async -> Bool {
for pageId in itemIDs {
print("preloading speech file: ", pageId)
_ = try? await downloadSpeechFile(pageId: pageId, priority: .low)
for itemID in itemIDs {
print("preloading speech file: ", itemID)
_ = try? await downloadSpeechFile(itemID: itemID, priority: .low)
}
return true
}
@ -246,9 +246,9 @@ public class AudioController: NSObject, ObservableObject, AVAudioPlayerDelegate
document = nil
synthesizer = nil
if let pageId = item?.id {
if let itemID = itemAudioProperties?.itemID {
Task {
self.document = try? await downloadSpeechFile(pageId: pageId, priority: .high)
self.document = try? await downloadSpeechFile(itemID: itemID, priority: .high)
DispatchQueue.main.async {
let synthesizer = SpeechSynthesizer(appEnvironment: self.appEnvironment, networker: self.networker, document: self.document!)
self.durations = synthesizer.estimatedDurations(forSpeed: self.playbackRate)
@ -280,15 +280,17 @@ public class AudioController: NSObject, ObservableObject, AVAudioPlayerDelegate
}
}
public func isLoadingItem(item: LinkedItem) -> Bool {
public func isLoadingItem(itemID: String) -> Bool {
if state == .reachedEnd {
return false
}
return self.item == item && (state == .loading || player?.currentItem == nil || player?.currentItem?.status == .unknown)
return
itemAudioProperties?.itemID == itemID &&
(state == .loading || player?.currentItem == nil || player?.currentItem?.status == .unknown)
}
public func isPlayingItem(item: LinkedItem) -> Bool {
state == .playing && self.item == item
public func isPlayingItem(itemID: String) -> Bool {
state == .playing && itemAudioProperties?.itemID == itemID
}
public func skipForward(seconds: Double) {
@ -299,18 +301,18 @@ public class AudioController: NSObject, ObservableObject, AVAudioPlayerDelegate
seek(to: timeElapsed - seconds)
}
public func fileNameForAudioFile(_ pageId: String) -> String {
pageId + "-" + currentVoice + ".mp3"
public func fileNameForAudioFile(_ itemID: String) -> String {
itemID + "-" + currentVoice + ".mp3"
}
public func pathForAudioDirectory(pageId: String) -> URL {
public func pathForAudioDirectory(itemID: String) -> URL {
FileManager.default
.urls(for: .documentDirectory, in: .userDomainMask)[0]
.appendingPathComponent("audio-\(pageId)/")
.appendingPathComponent("audio-\(itemID)/")
}
public func pathForSpeechFile(pageId: String) -> URL {
pathForAudioDirectory(pageId: pageId)
public func pathForSpeechFile(itemID: String) -> URL {
pathForAudioDirectory(itemID: itemID)
.appendingPathComponent("speech-\(currentVoice).json")
}
@ -318,18 +320,18 @@ public class AudioController: NSObject, ObservableObject, AVAudioPlayerDelegate
state = .loading
setupNotifications()
if let pageId = item?.id {
if let itemID = itemAudioProperties?.itemID {
Task {
self.document = try? await downloadSpeechFile(pageId: pageId, priority: .high)
self.document = try? await downloadSpeechFile(itemID: itemID, priority: .high)
DispatchQueue.main.async {
self.startStreamingAudio(pageId: pageId)
self.startStreamingAudio(itemID: itemID)
}
}
}
}
// swiftlint:disable all
private func startStreamingAudio(pageId _: String) {
private func startStreamingAudio(itemID _: String) {
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [])
} catch {
@ -457,7 +459,7 @@ public class AudioController: NSObject, ObservableObject, AVAudioPlayerDelegate
}
}
if let item = self.item, let speechItem = player?.currentItem as? SpeechPlayerItem {
// if let item = self.item, let speechItem = player?.currentItem as? SpeechPlayerItem {
// NotificationCenter.default.post(
// name: NSNotification.SpeakingReaderItem,
// object: nil,
@ -466,7 +468,7 @@ public class AudioController: NSObject, ObservableObject, AVAudioPlayerDelegate
// "anchorIdx": String(speechItem.speechItem.htmlIdx)
// ]
// )
}
// }
}
func clearNowPlayingInfo() {
@ -476,10 +478,10 @@ public class AudioController: NSObject, ObservableObject, AVAudioPlayerDelegate
func setupRemoteControl() {
UIApplication.shared.beginReceivingRemoteControlEvents()
if let item = item {
if let itemAudioProperties = itemAudioProperties {
MPNowPlayingInfoCenter.default().nowPlayingInfo = [
MPMediaItemPropertyTitle: NSString(string: item.title ?? "Your Omnivore Article"),
MPMediaItemPropertyArtist: NSString(string: item.author ?? "Omnivore"),
MPMediaItemPropertyTitle: NSString(string: itemAudioProperties.title),
MPMediaItemPropertyArtist: NSString(string: itemAudioProperties.author ?? "Omnivore"),
MPMediaItemPropertyPlaybackDuration: NSNumber(value: duration),
MPNowPlayingInfoPropertyElapsedPlaybackTime: NSNumber(value: timeElapsed)
]
@ -529,9 +531,9 @@ public class AudioController: NSObject, ObservableObject, AVAudioPlayerDelegate
}
}
func downloadSpeechFile(pageId: String, priority: DownloadPriority) async throws -> SpeechDocument {
func downloadSpeechFile(itemID: String, priority: DownloadPriority) async throws -> SpeechDocument {
let decoder = JSONDecoder()
let speechFileUrl = pathForSpeechFile(pageId: pageId)
let speechFileUrl = pathForSpeechFile(itemID: itemID)
if FileManager.default.fileExists(atPath: speechFileUrl.path) {
print("SPEECH FILE ALREADY EXISTS: ", speechFileUrl.path)
@ -543,7 +545,7 @@ public class AudioController: NSObject, ObservableObject, AVAudioPlayerDelegate
}
}
let path = "/api/article/\(pageId)/speech?voice=\(currentVoice)&secondaryVoice=\(secondaryVoice)&priority=\(priority)"
let path = "/api/article/\(itemID)/speech?voice=\(currentVoice)&secondaryVoice=\(secondaryVoice)&priority=\(priority)"
guard let url = URL(string: path, relativeTo: appEnvironment.serverBaseURL) else {
throw BasicError.message(messageText: "Invalid audio URL")
}

View File

@ -32,18 +32,17 @@ struct Utterance: Decodable {
}
struct SpeechDocument: Decodable {
static let averageWPM: Double = 195
public let pageId: String
public let averageWPM: Double = 195
public let wordCount: Double
public let language: String
public let defaultVoice: String
public let utterances: [Utterance]
public func estimatedDuration(utterance: Utterance, speed: Double) -> Double {
utterance.wordCount / averageWPM / speed * 60.0
utterance.wordCount / SpeechDocument.averageWPM / speed * 60.0
}
var audioDirectory: URL {