swift 改变导航栏的颜色

.swift

Using the Document Outline, select your Navigation Bar.
In the Attributes Inspector, in the Navigation Bar group, change the Style from Default to Black. This changes the background colour of the Navigation and Status bars to black, and their text to white. So the battery and other icons and text in the status bar will look white when the app is running.
In the same Navigation Bar group, change the Bar Tint to the colour of your liking.
If you have Bar Button items in your Navigation Bar, those will still show their text in the default blue colour, so in the Attributes Inspector, View group, change the Tint to White Colour.
That should get you what you want. Here is a screenshot that would make it easier to see where to make the changes.

Bar Tint: Background color of navigation bar
Title Color/Large Title color: color of title text in navigation bar
Tint: color of left and right button text in navigation bar

Source: https://stackoverflow.com/questions/18929864/how-to-change-navigation-bar-color-in-ios-7

swift 添加导航栏按钮

.swift
from object library drag and drop Bar Button Iem (IMPORTANT, don't select regular button), then press ctrl and drag to next view controller 
Source: https://www.coursera.org/lecture/ios-app-design-development/navigation-controllers-MMAQN

swift 创建一个新的viewController文件并使用storyboard附加swift文件

.swift
create view controller swift file for second view controller
	New-> ios -> Cocoa Touch library -> next -> Subclass of UIViewController -> next 
now link the file to Second view controller by selecting the second view and going to identity inspector and in the class field enter the new class

swift 在Swift 5中播放音频

audioApp.xcodeproj
import UIKit
import AVFoundation

class ViewController: UIViewController, AVAudioPlayerDelegate {
    
    var audioPlayer: AVAudioPlayer!
    
    override func viewDidLoad() {
        super.viewDidLoad()
    }
    

    @IBAction func notePressed(_ sender: UIButton) {
        
        let soundURL = Bundle.main.url(forResource: "note1", withExtension: "wav")
        
        do {
            audioPlayer = try AVAudioPlayer(contentsOf: soundURL!)
        } catch {
            print(error)
        }
        audioPlayer.play()
        
    }
    
  

}

swift 这会将html文本更改为字符串。

HTMLString.swift
extension String {
    var htmlToAttributedString: NSAttributedString? {
        guard let data = data(using: .utf8) else { return NSAttributedString() }
        do {
            return try NSAttributedString(data: data, options: [.documentType: NSAttributedString.DocumentType.html, .characterEncoding:String.Encoding.utf8.rawValue], documentAttributes: nil)
        } catch {
            return NSAttributedString()
        }
    }
    var htmlToString: String {
        return htmlToAttributedString?.string ?? ""
    }
}

swift Alamofire GET请求

alamofire.swift
Alamofire.request(url).responseJSON { response in
  print("Request: \(String(describing: response.request))")   // original url request
  print("Response: \(String(describing: response.response))") // http url response
  print("Result: \(response.result)")                         // response serialization result
  
  if let json = response.result.value {
      print("JSON: \(json)") // serialized json response
  }
  
  if let data = response.data, let utf8Text = String(data: data, encoding: .utf8) {
      print("Data: \(utf8Text)") // original server data as UTF8 string
  }
}

swift 使用基本HTTP authirization发送请求

basic-http-authorisation
let username = "username"
let password = "password"
let credentialData = "\(username):\(password)".data(using: String.Encoding.utf8)!
let base64Credentials = credentialData.base64EncodedString(options: [])
let headers = ["Authorization": "Basic \(base64Credentials)"]

let parameters: [String: Any] = ["someKey" : "<my_email>"]

Alamofire.request("url_here",
                  method: .post,
                  parameters: parameters,
                  encoding: URLEncoding.queryString,
                  headers: headers)
    .responseString { response in
        print(response)

        if response.result.isSuccess {

        } else {

        }
}

swift 音频缓冲区的数据

data-from-audio-buffer
static func create(from buffer: AVAudioPCMBuffer) -> Data {
        let channelCount = 1  // given PCMBuffer channel count is 1.
        let channels = UnsafeBufferPointer(start: buffer.floatChannelData, count: channelCount)
        let ch0Data = NSData(bytes: channels[0],
                             length:Int(buffer.frameCapacity * buffer.format.streamDescription.pointee.mBytesPerFrame)) as Data
        
        return ch0Data
    }

swift 没有测试!从麦克风捕获音频

capture-audio-from-mic
import Foundation
import AVFoundation

class AudioCaptureSession: NSObject, AVCaptureAudioDataOutputSampleBufferDelegate {

    let settings = [
        AVFormatIDKey: kAudioFormatMPEG4AAC,
        AVNumberOfChannelsKey : 1,
        AVSampleRateKey : 44100]
    let captureSession = AVCaptureSession()

    override init() {
        super.init()

        let queue = DispatchQueue(label: "AudioSessionQueue", attributes: [])
        let captureDevice = AVCaptureDevice.default(for: AVMediaType.audio)
        var audioInput : AVCaptureDeviceInput? = nil
        var audioOutput : AVCaptureAudioDataOutput? = nil

        do {
            try captureDevice?.lockForConfiguration()
            audioInput = try AVCaptureDeviceInput(device: captureDevice!)
            captureDevice?.unlockForConfiguration()
            audioOutput = AVCaptureAudioDataOutput()
            audioOutput?.setSampleBufferDelegate(self, queue: queue)
            audioOutput?.audioSettings = settings
        } catch {
            print("Capture devices could not be set")
            print(error.localizedDescription)
        }

        if audioInput != nil && audioOutput != nil {
            captureSession.beginConfiguration()
            if (captureSession.canAddInput(audioInput!)) {
                captureSession.addInput(audioInput!)
            } else {
                print("cannot add input")
            }
            if (captureSession.canAddOutput(audioOutput!)) {
                captureSession.addOutput(audioOutput!)
            } else {
                print("cannot add output")
            }
            captureSession.commitConfiguration()

            print("Starting capture session")
            captureSession.startRunning()
        }
    }

    func captureOutput(_ output: AVCaptureOutput,
                       didOutput sampleBuffer: CMSampleBuffer,
                       from connection: AVCaptureConnection) {

        print("Audio data recieved")
    }
}

swift 延迟功能调用

delay-function-call
//Version 1, with Dispatch queues.
DispatchQueue.main.asyncAfter(deadline: .now() + 2) {
  //code to delay
}


//Version 2, with selectors.
@objc func authenticate() {
    print("Authentication")
}

perform(#selector(authenticate), with: nil, afterDelay: 1)