Save listen position independent of read position when listening to articles

When you restart an audio session it will start at the previous
listen position.
This commit is contained in:
Jackson Harper
2022-11-24 13:58:24 +08:00
parent 0e5de28a45
commit d7290044a8
4 changed files with 77 additions and 9 deletions

View File

@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<model type="com.apple.IDECoreDataModeler.DataModel" documentVersion="1.0" lastSavedToolsVersion="21279" systemVersion="21G115" minimumToolsVersion="Automatic" sourceLanguage="Swift" userDefinedModelVersionIdentifier="">
<model type="com.apple.IDECoreDataModeler.DataModel" documentVersion="1.0" lastSavedToolsVersion="21512" systemVersion="21G115" minimumToolsVersion="Automatic" sourceLanguage="Swift" userDefinedModelVersionIdentifier="">
<entity name="Highlight" representedClassName="Highlight" syncable="YES" codeGenerationType="class">
<attribute name="annotation" optional="YES" attributeType="String"/>
<attribute name="createdAt" optional="YES" attributeType="Date" usesScalarValueType="NO"/>
@ -31,6 +31,9 @@
<attribute name="imageURLString" optional="YES" attributeType="String"/>
<attribute name="isArchived" attributeType="Boolean" usesScalarValueType="YES"/>
<attribute name="language" optional="YES" attributeType="String"/>
<attribute name="listenPositionIndex" optional="YES" attributeType="Integer 64" defaultValueString="0" usesScalarValueType="YES"/>
<attribute name="listenPositionOffset" optional="YES" attributeType="Double" defaultValueString="0.0" usesScalarValueType="YES"/>
<attribute name="listenPositionTime" optional="YES" attributeType="Double" defaultValueString="0.0" usesScalarValueType="YES"/>
<attribute name="localPDF" optional="YES" attributeType="String"/>
<attribute name="onDeviceImageURLString" optional="YES" attributeType="String"/>
<attribute name="originalHtml" optional="YES" attributeType="String"/>

View File

@ -29,6 +29,8 @@ public struct LinkedItemAudioProperties {
public let byline: String?
public let imageURL: URL?
public let language: String?
public let startIndex: Int
public let startOffset: Double
}
// Internal model used for parsing a push notification object only
@ -149,7 +151,9 @@ public extension LinkedItem {
title: unwrappedTitle,
byline: formattedByline,
imageURL: imageURL,
language: language
language: language,
startIndex: Int(listenPositionIndex),
startOffset: listenPositionOffset
)
}
@ -174,7 +178,10 @@ public extension LinkedItem {
newAnchorIndex: Int? = nil,
newIsArchivedValue: Bool? = nil,
newTitle: String? = nil,
newDescription: String? = nil
newDescription: String? = nil,
listenPositionIndex: Int? = nil,
listenPositionOffset: Double? = nil,
listenPositionTime: Double? = nil
) {
context.perform {
if let newReadingProgress = newReadingProgress {
@ -197,6 +204,18 @@ public extension LinkedItem {
self.descriptionText = newDescription
}
if let listenPositionIndex = listenPositionIndex {
self.listenPositionIndex = Int64(listenPositionIndex)
}
if let listenPositionOffset = listenPositionOffset {
self.listenPositionOffset = listenPositionOffset
}
if let listenPositionTime = listenPositionTime {
self.listenPositionTime = listenPositionTime
}
guard context.hasChanges else { return }
self.updatedAt = Date()

View File

@ -75,7 +75,7 @@
stop()
self.itemAudioProperties = itemAudioProperties
startAudio()
startAudio(atIndex: itemAudioProperties.startIndex, andOffset: itemAudioProperties.startOffset)
EventTracker.track(
.audioSessionStart(linkID: itemAudioProperties.itemID)
@ -86,6 +86,8 @@
let stoppedId = itemAudioProperties?.itemID
let stoppedTimeElapsed = timeElapsed
savePositionInfo(force: true)
player?.pause()
timer?.invalidate()
@ -206,6 +208,9 @@
public func seek(to: TimeInterval) {
let position = max(0, to)
// Always reset this state when seeking so we trigger a re-saving of positional info
lastReadUpdate = 0
// If we are in reachedEnd state, and seek back, we need to move to
// paused state
if to < duration, state == .reachedEnd {
@ -248,6 +253,7 @@
// There was no foundIdx, so we are probably trying to seek past the end, so
// just seek to the last possible duration.
if let durations = self.durations, let last = durations.last {
lastReadUpdate = 0
player?.removeAllItems()
synthesizeFrom(start: durations.count - 1, playWhenReady: state == .playing, atOffset: last)
}
@ -536,7 +542,7 @@
.appendingPathComponent("speech-\(currentVoice).json")
}
public func startAudio() {
public func startAudio(atIndex index: Int, andOffset offset: Double) {
state = .loading
setupNotifications()
@ -547,7 +553,11 @@
DispatchQueue.main.async {
self.setTextItems()
if let document = document {
self.startStreamingAudio(itemID: itemID, document: document)
// Don't attempt to seek past the end, restart from beginning if we are
// past the max utterances in the document.
let startIndex = index < document.utterances.count ? index : 0
let startOffset = index < document.utterances.count ? offset : 0.0
self.startStreamingAudio(itemID: itemID, document: document, atIndex: startIndex, andOffset: startOffset)
} else {
print("unable to load speech document")
// TODO: Post error to SnackBar
@ -558,7 +568,7 @@
}
// swiftlint:disable all
private func startStreamingAudio(itemID _: String, document: SpeechDocument) {
private func startStreamingAudio(itemID _: String, document: SpeechDocument, atIndex index: Int, andOffset offset: Double) {
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [])
} catch {
@ -579,7 +589,7 @@
durations = synthesizer.estimatedDurations(forSpeed: playbackRate)
self.synthesizer = synthesizer
synthesizeFrom(start: 0, playWhenReady: true)
synthesizeFrom(start: index, playWhenReady: true, atOffset: offset)
}
func synthesizeFrom(start: Int, playWhenReady: Bool, atOffset: Double = 0.0) {
@ -618,6 +628,7 @@
if let player = player {
player.pause()
state = .paused
savePositionInfo(force: true)
}
}
@ -702,14 +713,29 @@
}
}
if timeElapsed - 10 > lastReadUpdate {
savePositionInfo()
}
func savePositionInfo(force: Bool = false) {
if force || (timeElapsed - 10 > lastReadUpdate) {
let percentProgress = timeElapsed / duration
let speechIndex = (player?.currentItem as? SpeechPlayerItem)?.speechItem.audioIdx ?? 0
let anchorIndex = Int((player?.currentItem as? SpeechPlayerItem)?.speechItem.htmlIdx ?? "") ?? 0
if let itemID = itemAudioProperties?.itemID {
dataService.updateLinkReadingProgress(itemID: itemID, readingProgress: percentProgress, anchorIndex: anchorIndex)
}
if let itemID = itemAudioProperties?.itemID, let player = player, let currentItem = player.currentItem {
let currentOffset = CMTimeGetSeconds(currentItem.currentTime())
print("updating listening info: ", speechIndex, currentOffset, timeElapsed)
dataService.updateLinkListeningProgress(itemID: itemID,
listenIndex: speechIndex,
listenOffset: currentOffset,
listenTime: timeElapsed)
}
lastReadUpdate = timeElapsed
}
}

View File

@ -0,0 +1,20 @@
import CoreData
import Foundation
import Models
import SwiftGraphQL
public extension DataService {
func updateLinkListeningProgress(itemID: String, listenIndex: Int, listenOffset: Double, listenTime: Double) {
backgroundContext.perform { [weak self] in
guard let self = self else { return }
guard let linkedItem = LinkedItem.lookup(byID: itemID, inContext: self.backgroundContext) else { return }
linkedItem.update(
inContext: self.backgroundContext,
listenPositionIndex: listenIndex,
listenPositionOffset: listenOffset,
listenPositionTime: listenTime
)
}
}
}