My issue
I created a subset multiset which stores the subset sums that can be formed till i-1 position using valueSet[1…i-1]. Now if subsetSum[i] is present in subset multiset then I erase it from subset and go to i+1 and if it isn’t present, then I insert subsetSum[i] in my valueSet and insert all new sums that can be formed using this element by looping over subset but it gives tle. how to optimise?
My code
#include <iostream>
#include<bits/stdc++.h>
using namespace std;
using ll = long long;
void solve(){
int n; cin>>n;
ll a[1<<n];
ll c[1<<n]; // subset sums till now
vector<int> ans;
for(int i=0; i<(1<<n); i++){
cin>>a[i];
}
sort(a,a+n);
multiset<int> subsets;
for(int i=1; i<(1<<n); i++){
if(subsets.find(a[i])==subsets.end()){
ans.push_back(a[i]);
for(int j=1; j<=i-1; j++){
subsets.insert(a[i]+a[j]);
}
}
else{
auto it = subsets.find(a[i]);
subsets.erase(it);
}
}
for(auto x:ans) cout << x << " ";
cout << "\n";
}
int main() {
// your code goes here
int t; cin>>t;
while(t--){
solve();
}
return 0;
}
Problem Link: ANUMLA Problem - CodeChef