You are given the root
of a binary tree where each node has a value 0
or 1
. Each root-to-leaf path represents a binary number starting with the most significant bit.
- For example, if the path is
0 -> 1 -> 1 -> 0 -> 1
, then this could represent 01101
in binary, which is 13
.
For all leaves in the tree, consider the numbers represented by the path from the root to that leaf. Return the sum of these numbers.
The test cases are generated so that the answer fits in a 32-bits integer.
Example 1:
1
/ \
0 1
/ \ / \
0 1 0 1
Input: root = [1,0,1,0,1,0,1]
Output: 22
Explanation: (100) + (101) + (110) + (111) = 4 + 5 + 6 + 7 = 22
Example 2:
Input: root = [0]
Output: 0
Example 3:
Input: root = [1]
Output: 1
Example 4:
Input: root = [1,1]
Output: 3
Constraints:
- The number of nodes in the tree is in the range
[1, 1000]
.
Node.val
is 0
or 1
.
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
|
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def sumRootToLeaf(self, root: Optional[TreeNode]) -> int:
return self.dfs(root, 0)
def dfs(self, node: Optional[TreeNode], current: int) -> int:
if not node:
return 0
current = (current << 1) + node.val
if node.left == node.right:
return current
return self.dfs(node.left, current) + self.dfs(node.right, current)
|
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
|
/**
* Definition for a binary tree node.
* type TreeNode struct {
* Val int
* Left *TreeNode
* Right *TreeNode
* }
*/
func sumRootToLeaf(root *TreeNode) int {
return sum(root, 0)
}
func sum(tree *TreeNode, num int) int {
if tree == nil {
return 0
}
currentVal := num << 1 + tree.Val
if tree.Left == nil && tree.Right == nil {
return currentVal
}
return sum(tree.Left, currentVal) + sum(tree.Right, currentVal)
}
|