Welcome to Subscribe On Youtube

3599. Partition Array to Minimize XOR

Description

You are given an integer array nums and an integer k.

Your task is to partition nums into k non-empty subarrays. For each subarray, compute the bitwise XOR of all its elements.

Return the minimum possible value of the maximum XOR among these k subarrays.

 

Example 1:

Input: nums = [1,2,3], k = 2

Output: 1

Explanation:

The optimal partition is [1] and [2, 3].

  • XOR of the first subarray is 1.
  • XOR of the second subarray is 2 XOR 3 = 1.

The maximum XOR among the subarrays is 1, which is the minimum possible.

Example 2:

Input: nums = [2,3,3,2], k = 3

Output: 2

Explanation:

The optimal partition is [2], [3, 3], and [2].

  • XOR of the first subarray is 2.
  • XOR of the second subarray is 3 XOR 3 = 0.
  • XOR of the third subarray is 2.

The maximum XOR among the subarrays is 2, which is the minimum possible.

Example 3:

Input: nums = [1,1,2,3,1], k = 2

Output: 0

Explanation:

The optimal partition is [1, 1] and [2, 3, 1].

  • XOR of the first subarray is 1 XOR 1 = 0.
  • XOR of the second subarray is 2 XOR 3 XOR 1 = 0.

The maximum XOR among the subarrays is 0, which is the minimum possible.

 

Constraints:

  • 1 <= nums.length <= 250
  • 1 <= nums[i] <= 109
  • 1 <= k <= n

Solutions

Solution 1: Dynamic Programming

We define $f[i][j]$ as the minimum possible value of the maximum XOR among all ways to partition the first $i$ elements into $j$ subarrays. Initially, set $f[0][0] = 0$, and all other $f[i][j] = +\infty$.

To quickly compute the XOR of a subarray, we can use a prefix XOR array $g$, where $g[i]$ represents the XOR of the first $i$ elements. For the subarray $[h + 1…i]$ (with indices starting from $1$), its XOR value is $g[i] \oplus g[h]$.

Next, we iterate $i$ from $1$ to $n$, $j$ from $1$ to $\min(i, k)$, and $h$ from $j - 1$ to $i - 1$, where $h$ represents the end position of the previous subarray (indices starting from $1$). We update $f[i][j]$ using the following state transition equation:

\[f[i][j] = \min_{h \in [j - 1, i - 1]} \max(f[h][j - 1], g[i] \oplus g[h])\]

Finally, we return $f[n][k]$, which is the minimum possible value of the maximum XOR when partitioning the entire array into $k$ subarrays.

The time complexity is $O(n^2 \times k)$, and the space complexity is $O(n \times k)$, where $n$ is the length of the array.

  • class Solution {
        public int minXor(int[] nums, int k) {
            int n = nums.length;
            int[] g = new int[n + 1];
            for (int i = 1; i <= n; ++i) {
                g[i] = g[i - 1] ^ nums[i - 1];
            }
    
            int[][] f = new int[n + 1][k + 1];
            for (int i = 0; i <= n; ++i) {
                Arrays.fill(f[i], Integer.MAX_VALUE);
            }
            f[0][0] = 0;
    
            for (int i = 1; i <= n; ++i) {
                for (int j = 1; j <= Math.min(i, k); ++j) {
                    for (int h = j - 1; h < i; ++h) {
                        f[i][j] = Math.min(f[i][j], Math.max(f[h][j - 1], g[i] ^ g[h]));
                    }
                }
            }
    
            return f[n][k];
        }
    }
    
    
  • class Solution {
    public:
        int minXor(vector<int>& nums, int k) {
            int n = nums.size();
            vector<int> g(n + 1);
            for (int i = 1; i <= n; ++i) {
                g[i] = g[i - 1] ^ nums[i - 1];
            }
    
            const int inf = numeric_limits<int>::max();
            vector f(n + 1, vector(k + 1, inf));
            f[0][0] = 0;
    
            for (int i = 1; i <= n; ++i) {
                for (int j = 1; j <= min(i, k); ++j) {
                    for (int h = j - 1; h < i; ++h) {
                        f[i][j] = min(f[i][j], max(f[h][j - 1], g[i] ^ g[h]));
                    }
                }
            }
    
            return f[n][k];
        }
    };
    
    
  • min = lambda a, b: a if a < b else b
    max = lambda a, b: a if a > b else b
    
    
    class Solution:
        def minXor(self, nums: List[int], k: int) -> int:
            n = len(nums)
            g = [0] * (n + 1)
            for i, x in enumerate(nums, 1):
                g[i] = g[i - 1] ^ x
    
            f = [[inf] * (k + 1) for _ in range(n + 1)]
            f[0][0] = 0
            for i in range(1, n + 1):
                for j in range(1, min(i, k) + 1):
                    for h in range(j - 1, i):
                        f[i][j] = min(f[i][j], max(f[h][j - 1], g[i] ^ g[h]))
            return f[n][k]
    
    
  • func minXor(nums []int, k int) int {
    	n := len(nums)
    	g := make([]int, n+1)
    	for i := 1; i <= n; i++ {
    		g[i] = g[i-1] ^ nums[i-1]
    	}
    
    	const inf = math.MaxInt32
    	f := make([][]int, n+1)
    	for i := range f {
    		f[i] = make([]int, k+1)
    		for j := range f[i] {
    			f[i][j] = inf
    		}
    	}
    	f[0][0] = 0
    
    	for i := 1; i <= n; i++ {
    		for j := 1; j <= min(i, k); j++ {
    			for h := j - 1; h < i; h++ {
    				f[i][j] = min(f[i][j], max(f[h][j-1], g[i]^g[h]))
    			}
    		}
    	}
    
    	return f[n][k]
    }
    
  • function minXor(nums: number[], k: number): number {
        const n = nums.length;
        const g: number[] = Array(n + 1).fill(0);
        for (let i = 1; i <= n; ++i) {
            g[i] = g[i - 1] ^ nums[i - 1];
        }
    
        const inf = Number.MAX_SAFE_INTEGER;
        const f: number[][] = Array.from({ length: n + 1 }, () => Array(k + 1).fill(inf));
        f[0][0] = 0;
    
        for (let i = 1; i <= n; ++i) {
            for (let j = 1; j <= Math.min(i, k); ++j) {
                for (let h = j - 1; h < i; ++h) {
                    f[i][j] = Math.min(f[i][j], Math.max(f[h][j - 1], g[i] ^ g[h]));
                }
            }
        }
    
        return f[n][k];
    }
    
    

All Problems

All Solutions