Help me in solving GDST problem

My issue

im getting judge error in this code. what can be the issue

My code

#include <bits/stdc++.h>
using namespace std;
#define f(i,a,b) for(int i=a;i<b;i++)
#define input(a) for(int i=0;i<n;i++) cin>>a[i]
#define output(a) for(int i=0;i<n;i++) cout<<a[i]
#define fast_io ios_base::sync_with_stdio(NULL); cin.tie(0);

int main(){
   fast_io;
   int test;
   cin>>test;
   while(test--){
       int n;cin>>n;
       string s;cin>>s;
       vector<int>v;
      char a='1';
       for(int i=0;i<n-1;i+=2){
            if(s[i]!=s[i+1]){
               if((s[i])==a){
                    v.push_back(i);
                    if(s[i]=='1')a='0';
                    else a='1';
               }
               else if((s[i+1])==a){
                   v.push_back(i+1);
                    if(s[i+1]=='1')a='0';
                    else a='1';
               }
            } 
       }
       int x=v.size();
       if(x==0){
           cout<<"0"<<endl;
       }
       else{
       cout<<v[x-1]-v[0]+1<<endl;
       f(i,0,x){
           cout<<v[i]+1<<" ";
       }
       cout<<endl;
       }}
}

Problem Link: Good Binary String Practice Coding Problem - CodeChef

       cout<<v[x-1]-v[0]+1<<endl;
       f(i,0,x){
           cout<<v[i]+1<<" ";
       }

Here we need to print the subsequence size which is v.size() in your case. Because of v[x-1]-v[0]+1 its showing the judge error

You are printing the size of the resultant array in incorrect manner. All you have to do is simply print x instead of v[x - 1] - v[0] + 1.

thanks for the help