Issues with transparent PNG in UIActivityViewController for FB Messenger and iMessage - uiactivityviewcontroller

I have been using the following code to call the UIActivityViewController for sharing stickers via the various social media apps:
if let image = sticker.getUIImage(), let imgData = UIImagePNGRepresentation(image) {
let activityVC = UIActivityViewController(activityItems: [imgData], applicationActivities: nil)
activityVC.popoverPresentationController?.sourceView = gesture.view
self.present(activityVC, animated: true, completion: nil)
} else {
ErrorHandler.handleError(STICKER_IMAGE_NOT_FOUND, sticker)
}
This code has been working fine until the most recent update to FB messenger (version 98.0). Now it shows an error "Couldn't load content". FB messenger appears to prefer a URL like this:
if let item = sticker.getImageURL() {
let activityVC = UIActivityViewController(activityItems: [item], applicationActivities: nil)
activityVC.popoverPresentationController?.sourceView = gesture.view
self.present(activityVC, animated: true, completion: nil)
} else {
ErrorHandler.handleError(STICKER_IMAGE_NOT_FOUND, sticker)
}
This works fine with FB Messenger but iMessage displays the transparent PNG with a black background.
I was looking at UIActivityViewControllerCompletionWithItemsHandler but the discussion states it runs after the activity, too late for what I need to do. I also tried creating a custom UIActivity returning UIActivityType.message for activityType but it was added on to the bottom of the controller rather than taking over the default.
Is there a way to intercept the selection of the item in UIActivityViewController so I can use the MFMessageComposeViewController and add the UIImagePNGRepresentation to the message and allow all the others to use the URL?
Is there a particular argument type that I can pass to UIActivityViewController that will correctly display transparent PNG with all the social apps?
TIA
Mike

Circling back to close this up. I eventually switched from using the UIActivityViewController universally to a system that customizes what is done for each type of service. I use BDGShare by Bob de Graaf. I build a list of the services that the app supports, show a button for each and then a switch to jump to each type of share. Just in case someone's working on this, here's what I came up with:
The types of sharing the app wants to support:
public enum ShareServiceType:Int {
case iMessage=0, facebook, twitter, instagram, whatsApp, facebookMessenger, email, more
}
A class to store information about the type of sharing service
public class ShareTargetVO: NSObject
{
var icon:String!
var label:String!
var type:ShareServiceType
init( serviceType:ShareServiceType, icon:String, label:String )
{
self.type = serviceType
self.icon = icon
self.label = label
}
}
A function in my social networking helper to populate the list of available services:
static func getShareTargetList(for sticker : ReeSticker) -> [ShareTargetVO] {
var services:Array<ShareTargetVO> = []
// Check to see which capabilities are present and add buttons
if (BDGShare.shared().isAvailable(forServiceType: SLServiceTypeFacebook)) {
services.append(ShareTargetVO(serviceType: ShareServiceType.facebook, icon: "Icon-Share-Facebook", label: "Facebook"))
}
// Checking for facebook service type because there's no availability check with FBSDK for messenger (could be rolled into the lib)
if (BDGShare.shared().isAvailable(forServiceType: SLServiceTypeFacebook) && !sticker.type.doesContain("video")) {
services.append(ShareTargetVO(serviceType: ShareServiceType.facebookMessenger, icon: "Icon-Share-Messenger", label: "Messenger"))
}
if (BDGShare.shared().isAvailable(forServiceType: SLServiceTypeTwitter)) {
services.append(ShareTargetVO(serviceType: ShareServiceType.twitter, icon: "Icon-Share-Twitter", label: "Twitter"))
}
if (BDGShare.shared().canSendSMS()) {
services.append(ShareTargetVO(serviceType: ShareServiceType.iMessage, icon: "Icon-Share-Messages", label: "Messages"))
}
if UIApplication.shared.canOpenURL(URL(string: "whatsapp://")! as URL) && !sticker.type.contains("video") {
services.append(ShareTargetVO(serviceType: ShareServiceType.whatsApp, icon: "Icon-Share-Whatsapp", label: "What's App?"))
}
if UIApplication.shared.canOpenURL(URL(string: "instagram://app")! as URL) && !sticker.type.contains("video") {
services.append(ShareTargetVO(serviceType: ShareServiceType.instagram, icon: "Icon-Share-Instagram", label: "Instagram"))
}
if (BDGShare.shared().canSendEmail()) {
services.append(ShareTargetVO(serviceType: ShareServiceType.email, icon: "Icon-Share-Mail", label: "Email"))
}
services.append(ShareTargetVO(serviceType: ShareServiceType.more, icon: "Icon-Share-More", label: "More"))
return services
}
A function in my view controller to populate a UICollectionView of buttons for sharing limited to those services that are returned from the list function:
func layoutShareButtons() {
let f = self.view.frame
let btnWidth = f.width * 0.82
let bannerWidth = btnWidth + 10
mask = UIView(frame: CGRect(x: 0, y: 0, width: f.width, height: f.height))
mask.backgroundColor = .black
mask.alpha = 0.3
self.view.addSubview(mask)
buttonList = SocialHelper.getShareTargetList(for: self.sticker)
let buttonGridLayout = UICollectionViewFlowLayout()
buttonGridLayout.sectionInset = UIEdgeInsets(top: 5, left: 5, bottom: 5, right: 5)
buttonGridLayout.itemSize = CGSize(width: 60, height: 60)
buttonGridLayout.scrollDirection = .horizontal
buttonListView = UICollectionView(frame: CGRect(x: (f.width - bannerWidth) / 2,
y: self.preview.frame.origin.y + self.preview.frame.height + 10,
width: bannerWidth,
height: 80),
collectionViewLayout: buttonGridLayout)
buttonListView.register(ShareButtonCell.self, forCellWithReuseIdentifier: "shareButtonCell")
buttonListView.dataSource = self
buttonListView.delegate = self
self.view.addSubview(buttonListView)
// cancel button for sharing view
// Button (added last to ensure it's on top of the z-order)
cancelButton = SimpleButton(frame: CGRect(x: (f.width - bannerWidth) / 2, y: self.buttonListView.frame.origin.y + self.buttonListView.frame.height + 10, width: bannerWidth, height: 52))
cancelButton.backgroundColor = UIColor(netHex:0x202020)
cancelButton.layoutComponent(0, label: "Cancel")
cancelButton.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.cancelButtonPressed(_:))))
self.view.addSubview(self.cancelButton)
}
UICollectionView didSelect handler (for better SoC depending on your app remove the "share" function to a separate class just for the share implementation, in this app the screen we're working with is specifically for share):
func collectionView(_ collectionView: UICollectionView, didSelectItemAt indexPath: IndexPath) {
self.share(shareTarget: buttonList[indexPath.row])
}
Finally, a function call the correct share type:
func share(shareTarget: ShareTargetVO) {
// Params to submit to service
self.shareUrl = self.sticker.getStickerURL()
let textStr = "" // BDGShare supports a message passed in as well but we just send the sticker
// we need the NSData either way (sticker / video)
var ok = true
// try? is fine here because result is tested below
let urlData : Data? = try? Data(contentsOf: self.shareUrl as URL)
ok = (urlData != nil)
var img: UIImage? = nil
// if it's an image type then get it
if ok {
if (self.shareUrl.pathExtension.contains("png")) || (self.shareUrl.pathExtension.contains("jpg")) {
img = UIImage(data: urlData! as Data)
ok = (img != nil)
}
}
if !ok {
let alertCtrl = UIAlertController(title: "Error sending", message: "There was an error gathering the information for sending this sticker.", preferredStyle: .alert)
alertCtrl.addAction(UIAlertAction(title: "OK", style: UIAlertActionStyle.default, handler: nil))
self.present(alertCtrl, animated: true, completion: nil)
return
}
switch shareTarget.type
{
case .iMessage:
BDGShare.shared().shareSMS(textStr, recipient: nil, image: img, data:urlData! as Data, imageName: "sendSticker.png", completion: {(SharingResult) -> Void in
// Handle share result...
self.handleShareResult(shareTarget.type, shareResult: SharingResult)
})
break
case .facebook:
BDGShare.shared().shareFacebook("", urlStr: "", image: img, completion: {(SharingResult) -> Void in
// Handle share result...
self.handleShareResult(shareTarget.type, shareResult: SharingResult)
})
break
case .twitter:
... more code for handling each type of share
}
}
So there are all the pieces I used to implement using BDGShare to get around the UIActivityViewController. BTW - the "More" option at the end of the list calls that UIActivityViewController so it's still available to the user if they so choose.
HTH, Mike

Related

Saving pre-loaded values in memory - Swift/SwiftUI

I have a class of "questions" - basically cards that store text, a color, and a few other properties.
One of these properties is a favorite button. I have it set as a class so that a user can click on a heart icon to favorite that question.
This is persisting through that version of the app, but every time I restart the app, all my favorites disappear. There is no user data stored on the app, just this list of which questions are favorited. Do I have to use UserDefaults in order to store that data? Or is there some other way I can get that to persist across launches of the app (and also even if they update the version of the app?)
Here is the class:
class Question: Hashable, Identifiable, Codable, Equatable {
static func == (lhs: Question, rhs: Question) -> Bool {
let areEqual = lhs.id == rhs.id
return areEqual
}
func hash(into hasher: inout Hasher) {
hasher.combine(id)
}
var id = UUID()
var text: String
var color: String
var foregroundColor: String?
var submittedBy: String?
var favorited: Bool = false
init (text: String, color: String, submittedBy: String? = "") {
self.text = text
self.color = color
}
}
And an example variable with a "deck" of questions:
var whoQs: [Question] = [
Question(text: "Who are you with when you feel the best?", color: "Who", submittedBy: "Testing Person"),
Question(text: "Who do you let inform you?", color: "Who"),
Question(text: "Who do you miss right now?", color: "Who")
]
And here is how I'm toggling the favorites item:
Button(action: {
currentQuestions.last?.favorited.toggle()
print("\(currentQuestions.last?.text) is favorited: \(currentQuestions.last?.favorited)")
}, label: {
Image(systemName: currentQuestions.last?.favorited ?? false ? "heart.fill" : "heart")
// .foregroundColor(.black)
.imageScale(.large)
}).padding()
.disabled(!shareQuestionVisible)

ImageView inside of ScrollView showing wrong picture

I have a scroll view that has an image view embedded into it. The user clicks the "Take Photo" button, takes a picture, and then those photos are stored in an array and then displayed on the scrollable imageView. My issue is that after the didFinishSelectingMedia is called and the camera closes, the imageView always shows the first image in the array. I want the imageView to show the most recent image added to the array and scroll backward through the images until the user comes to the first image in the array. How do I make this happen?
Btw: Idk if this is even plausible, but I tried reversing the for loop and that didn't work.
I'm a new programmer, so please explain your answers.
Here's my code:
import UIKit
class ViewController: UIViewController, UINavigationControllerDelegate, UIImagePickerControllerDelegate {
#IBOutlet weak var scrollImageView: UIScrollView!
var imageTaken: [UIImage] = []
var imagePicker = UIImagePickerController()
override func viewDidLoad() {
super.viewDidLoad()
scrollImageView.frame = scrollImageView.frame
}
#IBAction func takePhotoButton(_ sender: Any) {
imagePicker = UIImagePickerController()
imagePicker.delegate = self
imagePicker.sourceType = .camera
present(imagePicker, animated: true, completion: nil)
}
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
imageTaken.append((info[.originalImage] as? UIImage)!)
imagePicker.dismiss(animated: true, completion: nil)
for i in 0..<imageTaken.count {
let imageView = UIImageView()
imageView.image = imageTaken[i]
imageView.contentMode = .scaleAspectFit
let xPosition = self.scrollImageView.frame.width * CGFloat(i)
imageView.frame = CGRect(x: xPosition, y: 0, width: self.scrollImageView.frame.width, height: self.scrollImageView.frame.height)
scrollImageView.contentSize.width = scrollImageView.frame.width * CGFloat(i + 1)
scrollImageView.addSubview(imageView)
}
}
}
Two options I guess you can take which ever one is seems easier.
OOP — Create an array of objects example:
struct TakePhotos {
var image : UIImage
var timestamp : Date
}
Then sort the array by the timestamp.
Use PHAsset
import Photos
Using PHAsset you should be able to retrieve the images taken from the user's album. Set a count when the user takes a new image.
Then you can run a query and sort the images in reverse order, they will already have the timestamp.
let photoOptions = PHFetchOptions()
photoOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
This is what I did in my application.
// FIXME: - Query only happens once this needs to be refactored so it happens when the album is updated on the device.
// Query only happens one time.
if imageArray.isEmpty {
let photoOptions = PHFetchOptions()
photoOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
let allPhotos = PHAsset.fetchAssets(with: photoOptions)
let fetchOptions = PHImageRequestOptions()
fetchOptions.deliveryMode = .highQualityFormat
fetchOptions.resizeMode = .exact
fetchOptions.normalizedCropRect = CGRect(x: 0, y: 0, width: view.frame.width, height: view.frame.width)
// Must be a synchronous call otherwise the view loads before images are retrieved (result is an empty view).
fetchOptions.isSynchronous = true
let imageSize : CGSize = CGSize(width: view.frame.width, height: view.frame.height)
//Retrieves Images using assets.
allPhotos.enumerateObjects { (assets, index, pointer) in
DispatchQueue.global(qos: .background).sync {
PHImageManager.default().requestImage(for: assets, targetSize: imageSize, contentMode: .aspectFit, options: fetchOptions) { (image, hashDictionary) in
guard let image = image else {return}
self.imageArray.append(image)
self.albumImages.append(ImageAlbum(images: image))
}
}
}
selectedImage.image = imageArray.first
selectedImage.contentMode = .scaleAspectFill
}

Access first item in an array decoded from an API

Hello everybody and Happy Holiday!
I am a SwiftUI novice just starting out and playing with APIs. I managed to get used to decoding API but I encounter an issue when I try to access the first item in the array. I am using SwiftyJSON to decode. I tried accessing the index 0 but the build fails with Error: Index Out of Range
Any tip would be extremely helpful especially for a novice like me.
Thank you in advance!
Model
struct dataType: Identifiable {
var id: String
var title: String
var desc: String
var url: String
var image: String
Decoding the JSON
class getData: ObservableObject {
#Published var datas = [dataType]()
init() {
let source = "https://newsapi.org/v2/top-headlines?country=ro&apiKey=c602e42864e148dea1144f1f55255888"
let url = URL(string: source)!
let session = URLSession(configuration: .default)
session.dataTask(with: url) { (data, _, err) in
if err != nil {
print((err?.localizedDescription)!)
return
}
let json = try! JSON(data: data!)
for i in json["articles"]{
let title = i.1["title"].stringValue
let description = i.1["description"].stringValue
let url = i.1["url"].stringValue
let image = i.1["urlToImage"].stringValue
let id = i.1["publishedAt"].stringValue
DispatchQueue.main.async {
self.datas.append(dataType(id: id, title: title, desc: description, url: url, image: image))
}
}
}.resume()
}
And finally, the view
struct ContentView: View {
#ObservedObject var list = getData()
var body: some View {
VStack {
WebImage(url: URL(string: list.datas[0].title), options: .highPriority, context: nil)
.resizable()
.frame(width:400, height:250)
NavigationView {
List(list.datas){i in
NavigationLink(destination: webView(url: i.url).navigationBarTitle("", displayMode: .inline))
{
HStack {
VStack(alignment: .leading, spacing: 10) {
Text(i.title).fontWeight(.heavy)
Text(i.desc)
.lineLimit(3)
}
if i.image != "" {
WebImage(url: URL(string:i.image), options: .highPriority, context: nil)
.resizable()
.frame(width: 110, height: 135)
.cornerRadius(20)
}
}.padding(.vertical, 15)
}
}.navigationBarTitle("Noutati")
}
}
}
URLSession executes dataTask asynchronously, so at the start there is no data yet, ie. datas is empty, so trying to get first (below datas[0]) element of empty array result in exception (which you see)
WebImage(url: URL(string: list.datas[0].title), options: .highPriority, context: nil)
.resizable()
.frame(width:400, height:250)
instead you need to show some stub view till data got loaded, like below
if list.data.isEmpty {
Text("Loading...")
} else {
WebImage(url: URL(string: list.datas[0].title), options: .highPriority, context: nil)
.resizable()
.frame(width:400, height:250)
}

UIImage isn't showing up in UIScrollView

Im grabbing the photos from library using a cocoa pod called BSImagePicker. The Images are then stored into a photoArray. I want to take those images from the array and present them in an image view that's held inside the scrollView. However the images aren't showing up at all.
This is what I've currently tried.
This is the code that the cocoa pod uses to store images into the array:
func openImagePicker() {
let vc = BSImagePickerViewController()
self.bs_presentImagePickerController(
vc,
animated: true,
select: { (assest: PHAsset) -> Void in },
deselect: { (assest: PHAsset) -> Void in },
cancel: { (assest: [PHAsset]) -> Void in },
finish: { (assest: [PHAsset]) -> Void in
for i in 0..<assest.count {
self.SelectedAssets.append(assest[i])
}
self.convertAssetToImages()
},
completion: nil
)
print(photoArray)
setupImages(photoArray)
}
this is the code that converts asset to image
extension addImages {
func convertAssetToImages() -> Void {
if SelectedAssets.count != 0{
for i in 0..<SelectedAssets.count{
let manager = PHImageManager.default()
let option = PHImageRequestOptions()
var thumbnail = UIImage()
option.isSynchronous = true
let width = scrollView.frame.width
let height = scrollView.frame.height
manager.requestImage(for: SelectedAssets[i], targetSize: CGSize(width: width, height: height), contentMode: .aspectFill, options: option, resultHandler: {(result,info) -> Void in
thumbnail = result!
})
let data = thumbnail.jpegData(compressionQuality: 1)
let newImage = UIImage(data: data! as Data)
self.photoArray.append(newImage! as UIImage)
}
}
}
}
this code takes image from photo array into scrollview
func setupImages(_ images: [UIImage]){
for a in 0..<photoArray.count {
let theImage = UIImageView()
theImage.image = self.photoArray[a]
let xPosition = UIScreen.main.bounds.width * CGFloat(a)
self.scrollView.frame = CGRect(x: xPosition, y: 0, width: self.scrollView.frame.width, height: self.scrollView.frame.height)
theImage.contentMode = .scaleAspectFit
self.scrollView.contentSize.width = self.scrollView.frame.width * CGFloat(a + 1)
self.scrollView.addSubview(theImage)
}
}
I have no idea why it isn't working.

Check photoassets for matching urls

I have an array of url's of images the user selected from the photo library. Is there a quick way to get those assets (they'll never be more than 5) from the photo library without iterating over all the users photos (which in my test is really slow).
Here is the code I am using now:
func loadCollectionView() {
arrImageViews.removeAll()
let options = PHFetchOptions()
//options.predicate = NSPredicate(format: "favorite == YES")
options.sortDescriptors = [
NSSortDescriptor(key: "creationDate", ascending: true)
]
let results = PHAsset.fetchAssets(with: .image, options: options)
var assets: [PHAsset] = []
results.enumerateObjects { (object, _, _) in
if let asset = object as? PHAsset {
assets.append(asset)
}
}
let manager = PHImageManager.default()
for thisAsset in assets {
thisAsset.getPHAssetURL(completionHandler: {(assetURL) in
if let strAssetURL = assetURL?.absoluteString {
if self.arrImagePaths.contains(strAssetURL) {
manager.requestImage(for: thisAsset, targetSize: CGSize(width: 80.0, height: 80.0), contentMode: .aspectFit, options: nil, resultHandler: {(thisImage, _) in
self.arrImageViews.append(UIImageView(image: thisImage))
self.cImages.reloadData()
})
}
}
})
}
}
getPHAssetURL is an extension that extracts the url from the asset.
So what I am looking for is a match between assetURL?.absoluteString and the strings in my array.
Having checked the docs, I think if you want to get info of only a single selected PHAsset, you can try this:
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : Any]) {
guard let asset = info[UIImagePickerControllerPHAsset] as? PHAsset else { return }
let localId = asset.localIdentifier
//Save this variable in your model.
}
If you have implemented multi-photo selection in a collectionview, you'd have to check their docs to see how to fetch the PHAssets for the selected media. But it should be quite easy.
Then with your var localIds: [String] you can try this:
let options = PHFetchOptions()
options.sortDescriptors = [
NSSortDescriptor(key: "creationDate", ascending: true)
]
let results = PHAsset.fetchAssets(withLocalIdentifiers: localIds, options: options)
Note that you can perform further filtering with predicates in your fetch request based on the keys here.

Resources