N-ary Tree Preorder Traversal

1
2
3
4
5
6
7
8
9
10
class Solution {
func preorder(_ root: Node?) -> [Int] {
var stack = [root], ans = [Int]()
while let node = stack.popLast(), let n = node {
ans.append(n.val)
stack += n.children.reversed()
}
return ans
}
}
阅读全文 »

1
2
3
4
5
6
7
8
9
10
11
12
class Solution {
func search(_ nums: [Int], _ target: Int) -> Int {
var i = 0, j = nums.count-1
while i <= j {
let mid = i + (j-i)/2
if nums[mid] == target { return mid }
else if nums[mid] < target { i = mid+1
} else { j = mid-1 }
}
return -1
}
}
阅读全文 »

Reverse String

1
2
3
4
5
class Solution {
func reverseString(_ s: inout [Character]) {
s.reverse()
}
}
阅读全文 »

Binary Tree Preorder Traversal

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
class Solution {
// iterative
func preorderTraversal(_ root: TreeNode?) -> [Int] {
var ans = [Int
while let p = stack.popLast() {
ans.append(p.val)
if let r = p.right { stack.append(r) }
if let l = p.left { stack.append(l) }
}
return ans
}

// recursive
func preorderTraversal0(_ root: TreeNode?) -> [Int] {
guard let root = root else { return [] }
return [root.val] + preorderTraversal(root.left) + preorderTraversal(root.right)
}
}
阅读全文 »

Max Consecutive Ones

1
2
3
4
5
class Solution {
func findMaxConsecutiveOnes(_ nums: [Int]) -> Int {
return nums.split(separator: 0).map {$0.count}.max() ?? 0
}
}
阅读全文 »

哇,我在截止日期之前完成了它们。

😃😃😃

Deep Learning Specialization Certificate

Week 1

Building a recurrent neural network - step by step

1.1 - RNN cell

1
2
3
a_next = np.tanh(np.matmul(Waa, a_prev) + np.matmul(Wax, xt) + ba)

yt_pred = softmax(np.matmul(Wya, a_next) + by)
阅读全文 »

Week 1 - Convolutional Neural Networks

Step by Step

3.1 - Zero-Padding

1
X_pad = np.pad(X, ((0, 0), (pad, pad), (pad, pad), (0,0)), mode='constant', constant_values = (0,0))

3.2 - Single step of convolution

1
2
3
s = a_slice_prev * W
Z = np.sum(s)
Z = Z + np.sum(b)

3.3 - Convolutional Neural Networks - Forward pass



阅读全文 »

Improving Deep Neural Networks: Hyperparameter tuning, Regularization and Optimization

Week 1

Week 2

1 - Gradient Descent


1
2
3
i = str(l+1)
parameters["W"+i] = parameters["W"+i] - learning_rate * grads['dW'+i]
parameters["b"+i] = parameters["b"+i] - learning_rate * grads['db'+i]
阅读全文 »
0%