SumTime Limit: 2000/1000 MS (Java/Others) Memory Limit: 65536/65536 K (Java/Others)Total Submission(s): 60 Accepted Submission(s): 40
Problem Description
There is a number sequence
A1,A2....An
,you can select a interval [l,r] or not,all the numbers
Ai(l≤i≤r)
will become
f(Ai)
.
f(x)=(1890x+143)mod10007
.After that,the sum of n numbers should be as much as possible.What is the maximum sum?
Input
There are multiple test cases.
First line of each case contains a single integer n. (1≤n≤105) Next line contains n integers A1,A2....An . (0≤Ai≤104) It's guaranteed that ∑n≤106 .
Output
For each test case,output the answer in a line.
Sample Input
Sample Output
|
题意:给定n个元素的序列a[],f[i] = (1890*a[i]+143) % 10007,你可以选择改变任意一个连续的区间(或者不改变),使区间里面的a[]变成f[],问这个序列的最大元素和。
思路:求出f[]-a[]的代价v[],然后找出最大连续子序列和就行了。
AC代码:
#include <cstdio>
#include <cstring>
#include <cmath>
#include <cstdlib>
#include <algorithm>
#include <queue>
#include <stack>
#include <map>
#include <vector>
#define INF 0x3f3f3f
#define eps 1e-8
#define MAXN (100000+10)
#define MAXM (100000)
#define Ri(a) scanf("%d", &a)
#define Rl(a) scanf("%lld", &a)
#define Rf(a) scanf("%lf", &a)
#define Rs(a) scanf("%s", a)
#define Pi(a) printf("%d\n", (a))
#define Pf(a) printf("%.2lf\n", (a))
#define Pl(a) printf("%lld\n", (a))
#define Ps(a) printf("%s\n", (a))
#define W(a) while(a--)
#define CLR(a, b) memset(a, (b), sizeof(a))
#define MOD 1000000007
#define LL long long
#define lson o<<1, l, mid
#define rson o<<1|1, mid+1, r
#define ll o<<1
#define rr o<<1|1
using namespace std;
LL F(LL x){
return (1890 * x + 143) % 10007;
}
LL a[MAXN], f[MAXN];
int main()
{
int n;
while(Ri(n) != EOF)
{
LL ans = 0;
for(int i = 0; i < n; i++)
{
Rl(a[i]); ans += a[i];
f[i] = F(a[i]) - a[i];
}
LL sum = 0, s = 0;
for(int i = 0; i < n; i++)
{
s += f[i];
if(sum < s)
sum = s;
if(s < 0)
s = 0;
}
Pl(ans + sum);
}
return 0;
}