In this problem, you are given an integer number s. You can transform any integer number A to another integer numberB by adding x to A. This x is an integer number which is a prime factor of A (please note that 1 and A are not being considered as a factor of A). Now, your task is to find the minimum number of transformations required to transform sto another integer number t.
Input starts with an integer T (≤ 500), denoting the number of test cases.
Each case contains two integers: s (1 ≤ s ≤ 100) and t (1 ≤ t ≤ 1000).
For each case, print the case number and the minimum number of transformations needed. If it's impossible, then print-1.
2
6 12
6 13
Case 1: 2
Case 2: -1
#include<cstdio>
#include<cstring>
#include<algorithm>
#include<vector>
#include<queue>
using namespace std;
const int MAX = 1e3 + 10;
const int INF = 0x3f3f3f3f / 2;
#define LL long long
int p[MAX],vis[MAX],s,t,ok,cut;
vector <int> v[MAX];
struct node
{
int x,pl;
};
void init()
{
p[1]=1;
int i,j;
for(i=2;i<MAX;i++)
for(j=i+i;j<MAX;j+=i)
p[j]=1;
for(i=2;i<MAX;i++)
for(j=2;j<i;j++)
if(i%j==0&&!p[j])
v[i].push_back(j);
}
void bfs()
{
memset(vis,0,sizeof(vis));
vis[s]=1;
node o;
o.x=s;
o.pl=0;
queue <node> q;
q.push(o);
while(!q.empty())
{
o=q.front();
q.pop();
if(o.x==t)
{
ok=1;
cut=min(cut,o.pl);
continue;
}
int i;
for(i=0;i<v[o.x].size();i++)
{
node w;
w.x=o.x+v[o.x][i];
w.pl=o.pl+1;
if(w.x <= t && !vis[w.x]) vis[w.x] = 1,q.push(w);
}
}
}
int main()
{
init();
int T,k=1;
scanf("%d",&T);
while(T--)
{
scanf("%d %d",&s,&t);
if(s==t)
printf("Case %d: 0\n",k++);
else if(!p[s]||!p[t])
printf("Case %d: -1\n",k++);
else
{
ok=0;
cut=INF;
bfs();
if(ok)
printf("Case %d: %d\n",k++,cut);
else
printf("Case %d: -1\n",k++);
}
}
return 0;
}