I am totally new to iOS and I am working on an application that has many frame animations. Everything is going well until I try to do my final animation in a method that I am posting below. The method is a delegate assigned to the Built in Text to Speech synthesizer
func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didFinish utterance: AVSpeechUtterance) {
if spoken == 0{
spoken += 1
print("speaking finished")
self.ImageView.stopAnimating()
self.ImageView.image = self.circleImages.last
self.ImageView.animationImages = self.circleImages
self.ImageView.animationDuration = 1.5
self.ImageView.animationRepeatCount = 0
self.ImageView.startAnimating()
do{
try self.recordAndRecognizeSpeech()
}catch let error{
print(error)
}
}else if spoken == 1{
//animation 1
spoken += 1
//animation 1
self.ImageView.image = self.comingOutImages.first
self.ImageView.animationImages = self.comingOutImages
self.ImageView.animationDuration = 6.0
self.ImageView.animationRepeatCount = 1
self.ImageView.startAnimating()
print(String(self.comingOutImages.count) + " #of images")
print("should have animated go in" + String(spoken))
}else{
print("done")
}
}
The firs block of the method where spoken == 0, that animation works fine, but when it gets to the block where spoken == 1 that animation does not play it sets the image but it does not play the animation. Ive been looking around forever, I tried to run it on the main thread and a bunch of other things.
EDIT:
I have this method which is the animation right before the animation above that will not play unless set animationRepeatCount to 0, and if I set this animation to anything but 0 then the animation above plays but then this animation doesn't play. In this method I have animation repeat count to 16 and then the above animation works but this one doesn't. I would like to have this animation set to repeatCount 0 and when speech is done to fire the one above to repeatCount 1. Im very new to iOS and there is something I am just not getting right.
func handleSend(){
if Thread.isMainThread{
print("send on main thread")
}else{
print("send not on main thread")
}
do{
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
try AVAudioSession.sharedInstance().setActive(true)
}catch{}
//UIView.animate(withDuration: 4.0, animations: {
//animation 2
print("second animation")
self.imageView.stopAnimating()
self.imageView.image = self.talkingImages.last
self.imageView.animationImages = self.talkingImages
self.imageView.animationDuration = 0.25
self.imageView.animationRepeatCount = 16
self.imageView.startAnimating()
print("should have animated")
//})
let utterance = AVSpeechUtterance(string: self.finalString)
//utterance.voice = AVSpeechSynthesisVoice(language: "en-GB")
//utterance.rate = 0.1
self.synthesizer.speak(utterance)
}
EDIT2: I am posting the whole viewController just for reference, and it won't let me because it exceeds the character limit, but ill post all of the animations and explain the flow
this is the touchended override method for touching the image view, it is also where the first animation starts.
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
timesRubbed += 1
if timesRubbed == 1{
player?.stop()
//Toast(text: keepRubbing).show()
self.view.makeToast(keepRubbing, duration: 3.0, position: .top)
}
if timesRubbed == 2{
player?.stop()
//Toast(text: keepAgain).show()
self.view.makeToast(keepAgain, duration: 3.0, position: .top)
}
if timesRubbed == 3{
player?.stop()
timesRubbed += 1
playSoundComeOut()
CATransaction.begin()
CATransaction.setCompletionBlock {
print("after animation 1")
DispatchQueue.main.async {
CATransaction.begin()
CATransaction.setCompletionBlock{
print("after circle animation")
//UIView.animate(withDuration: 3.0, animations: {
//animation 2
print("second animation")
//self.imageView.stopAnimating()
self.imageView.image = self.talkingImages.last
self.imageView.animationImages = self.talkingImages
self.imageView.animationDuration = 0.25
self.imageView.animationRepeatCount = 0
self.imageView.startAnimating()
print("should have animated")
//})
let utterance = AVSpeechUtterance(string: self.greeting)
//utterance.voice = AVSpeechSynthesisVoice(language: "en-GB")
//utterance.rate = 0.1
self.synthesizer.speak(utterance)
//CATransaction.commit()
}
//UIView.animate(withDuration: 3.0, animations: {
//animation 2
print("second animation")
self.imageView.stopAnimating()
self.imageView.image = self.circleImages.last
self.imageView.animationImages = self.circleImages
self.imageView.animationDuration = 1.5
self.imageView.animationRepeatCount = 2
self.imageView.startAnimating()
print("should have animated")
//})
CATransaction.commit()
}
}
//UIView.animate(withDuration: 6.0, animations: {
//animation 1
if self.imageView.isFocused{
print("imageview is focused")
}else{
print("not focused")
}
self.imageView.image = self.comingOutImages.last
self.imageView.animationImages = self.comingOutImages
self.imageView.animationDuration = 6.0
self.imageView.animationRepeatCount = 1
self.imageView.startAnimating()
// })
CATransaction.commit() }
}
then at the end of that animation sequence it goes to the speechSynthesizer method where spoken == 0
then it does the handleSend()
method posted above and then it goes back to the speechSynthesizer method above where spoken == 1
If you need any more information please let me know
I ended up doing this for the last animation and it worked