Queue (the Queues)

The stack

The stack ensures that elements are stored and removed in a last-in first-out (LIFO) order.

public struct Stack<T> {
  fileprivate var array = [T] ()public var isEmpty: Bool {
    return array.isEmpty
  }
  
  public var count: Int {
    return array.count
  }
  
  public mutating func push(_ element: T) {
    array.append(element)
  }
  
  public mutating func pop(a) -> T? {
    return array.popLast()
  }
  
  public var top: T? {
    return array.last
  }
}
Copy the code

Interesting thing about the stack: Every time a function or method is called, the CPU pushes the return address of the function into the run stack. When the function completes, the CPU removes the return address from the stack and returns it to the location where the function was called. So. If you keep calling too many functions (such as dead-recursive functions), you get a so-called “stack overflow” error because the CPU runs out of stack space.

The queue

Queues ensure that elements are put and taken out in a first-in first-out (FIFO) order.

This is a straightforward queue implementation.

public struct Queue<T> {
  fileprivate var array = [T] []public var isEmpty: Bool {
    return array.isEmpty
  }
  
  public var count: Int {
    return array.count
  }
  
  public mutating func enqueue(_ element: T) {
    array.append(element)
  }
  
  public mutating func dequeue(a) -> T? {
    if isEmpty {
      return nil
    } else {
      return array.removeFirst()
    }
  }
  
  public var front: T? {
    return array.first
  }
}
Copy the code

The queue entry operation is O(1) and the queue exit operation is O(n).

More efficient queues

public struct Queue<T> {
  fileprivate var array = [T? ] (a)fileprivate var head = 0
  
  public var isEmpty: Bool {
    return count = = 0
  }
  
  public var count: Int {
    return array.count - head
  }
  
  public mutating func enqueue(_ element: T) {
    array.append(element)
  }
  
  public mutating func dequeue(a) -> T? {
    guard head < array.count, let element = array[head] else { return nil }
    
    array[head] = nil
    head + = 1
    
    let percenttage = Double(head)/Double(array.count)
    if array.count > 50 && percentage > 0.25 {
      array.removeFirst(head)
      head = 0
    }
    return element
  }
  
  public var front: T? {
    if isEmpty {
      return nil
    } else {
      return array[head]
    }
  }
}
Copy the code

When we unqueue the first element, we first set array[head] to nil to remove the element from the array. Then increment the head value by one so that the next element becomes the new queue head value.

deque

A basic implementation

public struct Deque<T> {
  private var array = [T] ()public var isEmpty: Bool {
    return array.isEmpty
  }
  
  public var count: Int {
    return array.count
  }
  
  public mutating func enqueue(_ element: T) {
    array.append(element)
  }
  
  public mutating func enqueueFront(_ element: T) {
    array.insert(element, atIndex: 0)}public mutating func dequeue(a) -> T? {
    if isEmpty {
      return nil
    } else {
      return array.removeFrist()
    }
  }
  
  public mutating func dequeueBack(a) -> T? {
    if isEmpty {
      return nil
    } else {
      return array.removeLast()
    }
  }
  
  public func peekFront(a) -> T? {
    return array.first
  }
  
  public func peekBack(a) -> T? {
    return array.last
  }
}
Copy the code

Dequeue () and enqueueFront() have O(n) time complexity because they work at the front of the array.

A more efficient version

public struct Deque<T> {
  private var array: [T? ]private var head: Int
  private var capacity: Int
  private let originalCapacity: Int
  
  public init(_ capacity: Int = 10) {
    self.capacity = max(capacity, 1)
    originalCapacity = self.capacity
    array = [T? ] (repeating:nil, count: capacity)
    head = capacity
  }
  
  public var isEmpty: Bool {
    return count = = 0
  }
  
  public var count: Int {
    return array.count - head
  }
  
  public mutating func enqueue(_ element: T) {
    array.append(element)
  }
  
  public mutating func enqueueFront(_ element: T) {
    if head = = 0 {
      capacity * = 2
      let emptySpace = [T? ] (repeating:nil, count: capacity)
      array.insert(contentsOf: emptySpace, at: 0)
      head = capacity
    }
    
    head - = 1
    array[head] = element
  }
  
  public mutating func dequeue(a) -> T? {
    guard head < array.count, let element = array[head] else { return nil }
    
    array[head] = nil
    head + = 1
    
    if capacity > = originalCapacity && head > = capacity*2 {
      let amountToRemove = capacity + capacity/2
      array.removeFirst(amountToRemove)
      head - = amountToRemove
      capacity / = 2
    }
    return element
  }
  
  public mutating func dequeueBack(a) -> T? {
    if isEmpty {
      return nil
    } else {
      return array.removeLast()
    }
  }
  
  public func peekFront(a) -> T? {
    if isEmpty {
      return nil
    } else {
      return array[head]
    }
  }
  
  public func peekBack(a) -> T? {
    if isEmpty {
      return nil
    } else {
      return array.last!}}}Copy the code

Priority queue

A priority queue is a queue in which the most important element always comes first.

Algorithms that can benefit from priority queues:

  • Event-driven simulation. Each event has a timestamp, and you want the events to be executed in the order they were timestamp. Priority queues make it easy to find the next event to emulate.
  • Dijkstra’s graph search algorithm uses priority queues to calculate the lowest cost.
  • Huffman coding is used for data compression. The algorithm builds a compression tree. It repeatedly needs to find two nodes with the minimum frequency that do not yet have a parent node.
  • A* pathfinding for artificial intelligence.
  • Lots of other places!

Heap-based Swift priority queue:

public struct PriorityQueue<T> {
  fileprivate var heap: Heap<T>
  
  public init(sort: (T.T) - >Bool) {
    heap = Heap(sort: sort)
  }
  
  public var isEmpty: Bool {
    return heap.isEmpty
  }
  
  public var count: Int {
    return heap.count
  }
  
  public var peek() -> T? {
    return heap.peek()
  }
  
  public mutating func enqueue(element: T) {
    heap.insert(element)
  }
  
  public mutating func dequeue(a) -> T? {
    return heap.remove()
  }
  
  public mutating func changePriority(index i: Int.value: T) {
    return heap.replace(index: i, value: value)
  }
}
Copy the code

Ring buffer

Also known as a circular buffer.

This is a conceptually looped array to the beginning, so you never have to delete any items. All operations are order 1.

A simple implementation of Swift

public struct RingBuffer<T> {
  fileprivate var array: [T? ]fileprivate var readIndex = 0
  fileprivate var writeIndex = 0
  
  public init(count: Int) {
    array = [T? ] (repeating:nil, count: count)
  }
  
  public mutating func write(_ element: T) -> Bool {
    if !isFull {
      array[writeIndex % array.count] = element
      writeIndex + = 1
      return true
    } else {
      return false}}public mutating func read(a) -> T? {
    if !isEmpty {
      let element = array[readIndex % array.count]
      readIndex + = 1
      return element
    } else {
      return nil}}fileprivate var availableSpaceForReading: Int {
    return writeIndex - readIndex
  }
  
  public var isEmpty: Bool {
    return availableSpaceForReading = = 0
  }
  
  fileprivate var availableSpaceForWriting: Int {
    return array.count - availableSpaceForReading
  }
  
  public var isFull: Bool {
    return availableSpaceForWriting = = 0}}Copy the code

The RingBuffer object has an array to actually store the data, and readIndex and writeIndex variables are used to point to “Pointers” to the array. The wirte() function puts the new element into the array in writeIndex, and the read() function returns the element in readIndex.

Here’s how it works:

var buffer = RingBuffer<Int>(count: 5)

buffer.write(123)
buffer.write(456)
buffer.write(789)
buffer.write(Awesome!)

buffer.read()    / / 123
buffer.read()    / / 456
buffer.read()    / / 789

buffer.write(333)
buffer.wirte(555)

buffer.read()    / / 666
buffer.read()    / / 333
buffer.read()    / / 555
buffer.read()    // nil
Copy the code

Circular buffers can create better queues, but they also have a disadvantage: wrapping makes queue sizing tricky. But if a fixed-size queue is suitable for your purpose, then it’s fine.

Circular buffers are also useful when a data producer writes to an array at a rate different from that of a data consumer. This usually happens with file or network I/O. Ring buffers are also the preferred means of communication between high-priority threads, such as audio render callbacks, and other slower parts of the system.