2017年3月26日 星期日

Ref : https://medium.com/redbeard-blog/ios-developer-26-awesome-resources-for-beginners-4a9f4a80fe25#.6c2pnsb0e

how-to-rotate-image-in-swift-3

Ref :  http://stackoverflow.com/questions/40882487/how-to-rotate-image-in-swift-3


extension UIImage {

func fixImageOrientation() -> UIImage? {
    var flip:Bool = false //used to see if the image is mirrored
    var isRotatedBy90:Bool = false // used to check whether aspect ratio is to be changed or not

    var transform = CGAffineTransform.identity

    //check current orientation of original image
    switch self.imageOrientation {
    case .down, .downMirrored:
        transform = transform.rotated(by: CGFloat(M_PI));

    case .left, .leftMirrored:
        transform = transform.rotated(by: CGFloat(M_PI_2));
        isRotatedBy90 = true
    case .right, .rightMirrored:
        transform = transform.rotated(by: CGFloat(-M_PI_2));
        isRotatedBy90 = true
    case .up, .upMirrored:
        break
    }

    switch self.imageOrientation {

    case .upMirrored, .downMirrored:
        transform = transform.translatedBy(x: self.size.width, y: 0)
        flip = true

    case .leftMirrored, .rightMirrored:
        transform = transform.translatedBy(x: self.size.height, y: 0)
        flip = true
    default:
        break;
    }

    // calculate the size of the rotated view's containing box for our drawing space
    let rotatedViewBox = UIView(frame: CGRect(origin: CGPoint(x:0, y:0), size: size))
    rotatedViewBox.transform = transform
    let rotatedSize = rotatedViewBox.frame.size

    // Create the bitmap context
    UIGraphicsBeginImageContext(rotatedSize)
    let bitmap = UIGraphicsGetCurrentContext()

    // Move the origin to the middle of the image so we will rotate and scale around the center.
    bitmap!.translateBy(x: rotatedSize.width / 2.0, y: rotatedSize.height / 2.0);

    // Now, draw the rotated/scaled image into the context
    var yFlip: CGFloat

    if(flip){
        yFlip = CGFloat(-1.0)
    } else {
        yFlip = CGFloat(1.0)
    }

    bitmap!.scaleBy(x: yFlip, y: -1.0)

    //check if we have to fix the aspect ratio
    if isRotatedBy90 {
        bitmap?.draw(self.cgImage!, in: CGRect(x: -size.width / 2, y: -size.height / 2, width: size.height,height: size.width))
    } else {
        bitmap?.draw(self.cgImage!, in: CGRect(x: -size.width / 2, y: -size.height / 2, width: size.width,height: size.height))
    }

    let fixedImage = UIGraphicsGetImageFromCurrentImageContext()
    UIGraphicsEndImageContext()

    return fixedImage
}
share

ios save image with name


Ref : http://stackoverflow.com/questions/35796218/ios-save-image-with-name

let newImage = UIImage.init(named: "myimage")


        let date :NSDate = NSDate()

        let dateFormatter = NSDateFormatter()
        //dateFormatter.dateFormat = "yyyy-MM-dd'_'HH:mm:ss"
        dateFormatter.dateFormat = "yyyy-MM-dd'_'HH_mm_ss"

        dateFormatter.timeZone = NSTimeZone(name: "GMT")

        let imageName = "/\(dateFormatter.stringFromDate(date)).jpg"
        print(imageName)
        //var paths: [AnyObject] = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, true)
        //var documentsDirectoryPath = getDocumentsURL().relativePath
        var documentsDirectoryPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] 
        documentsDirectoryPath += imageName
        print(documentsDirectoryPath)

        let settingsData: NSData = UIImageJPEGRepresentation(newImage!, 1.0)!
        settingsData.writeToFile(documentsDirectoryPath, atomically: true)

2017年3月25日 星期六

The Easy Way to Switch Container Views in iOS

Ref : https://spin.atomicobject.com/2015/09/02/switch-container-views/





1. First, add a container view to your view controller. I applied a blue color to the child view controller so you could tell the difference between the two.
2. Next, position and constrain the container view however you want it to appear in your parent view controller.
3. Then drag a second container view right on top of container view A as shown here.
4. Constrain container view B to have equal width and height to container view A, and center container view B to the middle of container view A. This way, any layout changes that happen to our first container view will be applied to the second. They should both be the same size and have the same position.


class ViewController: UIViewController {
    @IBOutlet weak var containerViewA: UIView!
    @IBOutlet weak var containerViewB: UIView!
    
    override func viewDidLoad() {
        super.viewDidLoad()
    }
    
    @IBAction func showComponent(_ sender: UISegmentedControl) {
        if sender.selectedSegmentIndex == 0 {
            UIView.animate(withDuration: 0.5, animations: {
                self.containerViewA.alpha = 1
                self.containerViewB.alpha = 0
            })
        } else {
            UIView.animate(withDuration: 0.5, animations: {
                self.containerViewA.alpha = 0
                self.containerViewB.alpha = 1
            })
        }
    }

}

custom camera view in ios

Ref : https://github.com/codepath/ios_guides/wiki/Creating-a-Custom-Camera-View






上面的是UIView -> previewView

左下角是button

右下角是ImageView()


自己try 玩得結果

import UIKit
import AVFoundation

class ViewController: UIViewController {

    
    var session: AVCaptureSession?
    var stillImageOutput: AVCaptureStillImageOutput?
    var videoPreviewLayer: AVCaptureVideoPreviewLayer?
    
    
    @IBOutlet weak var previewView: UIView!
    
    @IBOutlet weak var captureImageView: UIImageView!
    
    
    @IBAction func didTakePhoto(_ sender: Any) {
        
        print("takePhoto")
        /*
        if let videoConnection = stillImageOutput!.connection(withMediaType: AVMediaTypeVideo) {
            // ...
            // Code for photo capture goes here...
            
            
            
            stillImageOutput?.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: { (sampleBuffer, error) -> Void in
                // ...
                // Process the image data (sampleBuffer) here to get an image file we can put in our captureImageView
                
                if sampleBuffer != nil {
                    let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
                    let dataProvider = CGDataProviderCreateWithCFData(imageData)
                    let cgImageRef = CGImageCreateWithJPEGDataProvider(dataProvider, nil, true, CGColorRenderingIntent.RenderingIntentDefault)
                    let image = UIImage(CGImage: cgImageRef!, scale: 1.0, orientation: UIImageOrientation.Right)
                    // ...
                    // Add the image to captureImageView here...
                    self.captureImageView.image = image
                }
         
            })
        }*/
        
      //  var videoConnection :AVCaptureConnection?
        
        if let videoConnection = stillImageOutput?.connection(withMediaType: AVMediaTypeVideo){
            
            print("videoConnection")

            
            /*
            stillImageOutput?.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: { (buffer:CMSampleBuffer!, error: NSError!) -> Void in
                
                if let exifAttachments = CMGetAttachment(buffer, kCGImagePropertyExifDictionary, nil) {
                    let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer)
                    self.previewImage.image = UIImage(data: imageData)
                    UIImageWriteToSavedPhotosAlbum(self.previewImage.image, nil, nil, nil)
                }
            })
 */
            stillImageOutput?.captureStillImageAsynchronously(from: videoConnection) {
                (imageDataSampleBuffer, error) -> Void in
                let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer)
                self.captureImageView.image = UIImage(data: imageData!)
                UIImageWriteToSavedPhotosAlbum(UIImage(data: imageData!)!, nil, nil, nil)
            }
        }
        
        
        
    }
    
    override func viewDidLoad() {
        super.viewDidLoad()
        
        setup()
     
        let cellImage = UIImage(named: "path.png")//Array(Array(model.categories.values)[cellCount])[1]

        let imageView = UIImageView(image: cellImage as UIImage?)
        imageView.frame = CGRect(x: 0, y:0, width: 300, height: 300)

        view.addSubview(imageView)
        
    //    self.view.bringSubviewToFront()
        
        // Do any additional setup after loading the view, typically from a nib.
    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
        // Dispose of any resources that can be recreated.
    }

    override func viewWillAppear(_ animated: Bool) {
        super.viewWillAppear(animated)
        // Setup your camera here...
        videoPreviewLayer!.frame = previewView.bounds
    }
    
    
    func setup2(){
        
        
          }
    
    
    
    func setup(){
        session = AVCaptureSession()
        session!.sessionPreset = AVCaptureSessionPresetPhoto
        
        let backCamera = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
        
        var error: NSError?
        var input: AVCaptureDeviceInput!
        do {
            input = try AVCaptureDeviceInput(device: backCamera)
        } catch let error1 as NSError {
            error = error1
            input = nil
            print(error!.localizedDescription)
        }
        
        if error == nil && session!.canAddInput(input) {
            session!.addInput(input)
            // ...
            // The remainder of the session setup will go here...
        }
        
        //var stillImageOutput : AVCaptureStillImageOutput?
        stillImageOutput = AVCaptureStillImageOutput()
        stillImageOutput?.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
        
        if session!.canAddOutput(stillImageOutput) {
            session!.addOutput(stillImageOutput)
            // ...
            // Configure the Live Preview here...
            
            videoPreviewLayer = AVCaptureVideoPreviewLayer(session: session)
            videoPreviewLayer!.videoGravity = AVLayerVideoGravityResizeAspect
            videoPreviewLayer!.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
            previewView.layer.addSublayer(videoPreviewLayer!)
            session!.startRunning()
            
        }
        
       
        
        
    }


}

2017年3月23日 星期四

請問有人知道如何在post request加上header cookie嗎?

Ref : http://stackoverflow.com/questions/36372341/swift-http-request-with-parameters-in-header

var request = URLRequest(url: url)
request.setValue("secret-keyValue", forHTTPHeaderField: "secret-key")

URLSession.shared.dataTask(with: request) { data, response, error in }

2017年3月22日 星期三

呈現所關注的用戶的影像和留言



想法是這樣....

兩階段的資料庫的找法

先去查     Followers entity

找出目前登入者有關注的人的所有名單,  放在  followeringusers array

然後在這個   followeringusers array...

去找   entity name 為   Posts .... 所記錄的圖片和留言

 func searchFollower(){
        let appDelegate = UIApplication.shared.delegate as! AppDelegate
        let context = appDelegate.persistentContainer.viewContext
        let followersrequest = NSFetchRequest<NSFetchRequestResult>(entityName: "Followers")
        
        followersrequest.predicate = NSPredicate(format: "follower = %@", currentUser)
        followersrequest.returnsObjectsAsFaults = false
        
        do{
            let  followersResults = try context.fetch(followersrequest)
            if(followersResults.count > 0 ){
                for followersResult in followersResults as! [NSManagedObject] {
                    
                    let followeringUser = followersResult.value(forKey: "following") as! String
                    followeringusers.append(followeringUser)
                }
            }
        }catch{
            
        }
        
        // 加上自己

        followeringusers.append(currentUser)

 for followinguser in followeringusers {
            let  postsRequest = NSFetchRequest<NSFetchRequestResult>(entityName: "Posts")
            postsRequest.predicate = NSPredicate(format: "username = %@", followinguser)
            postsRequest.returnsObjectsAsFaults = false
            
            do {
                 let results = try context.fetch(postsRequest)
                
                if  results.count > 0 {
                    for result in results as! [NSManagedObject]{
                        let username = result.value(forKey: "username") as! String
                        let message = result.value(forKey: "message")   as! String
                        let image = result.value(forKey: "image")       as! NSData
                        usernames.append(username)
                        messages.append(message)
                        imageFiles.append(image)

                    }
                }
            }catch{
                 print("Could not fin following User info")
            }
                  

            
        }

       
        //print(followeringusers)
        
    }


找完之後... 在  這邊加載

圖片這邊...  要做一下處理..  先把 NSData 轉成 Data... 然後呼叫  UIimage(data: data)



 override func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
        let cell = tableView.dequeueReusableCell(withIdentifier: "cell", for: indexPath) as! FeedTableViewCell

        // Configure the cell...

        cell.usernameLabel.text = usernames[indexPath.row]
        cell.messageLabel.text  = messages[indexPath.row]
        cell.postedImage.image = UIImage(data: imageFiles[indexPath.row] as Data)
        
        
        
        return cell
    }