题意:
给定序列a,合并多个连续的段,使得整个序列成为回文序列;下一行给定序列v,v[i]表示合并i个数需要的花费;
思路:
可以直接记忆化往下递归,可能数据不够强,能够(900ms)卡着时间过,当然内存消耗也很大;
另一种真实的dp见下篇博客; 手动滑稽
#include <iostream>
#include <cstdio>
#include <cstring>
#include <algorithm>
using namespace std;
typedef long long ll;
const int maxn = 5000 + 7;
const ll mod = 1e9+7;
int n;
ll a[maxn], v[maxn], sum[maxn];
bool vis[maxn][maxn];
ll dp[maxn][maxn];
int find_(int l, int r, ll v) {
int l_ = l, r_ = r;
// int res = 0;
while(l_ <= r_) {
int mid = (l_+r_) / 2;
if(sum[r]-sum[mid-1] == v) return mid;
if(sum[r]-sum[mid-1] > v) l_ = mid+1;
else r_ = mid-1;
}
return 0;
}
ll dfs(int l, int r) {
if(l >= r) return 0;
if(vis[l][r]) return dp[l][r];
vis[l][r] = 1;
ll ans = v[r-l+1];
for(int i = l; i < r; ++i) {
int r_ = find_(i+1, r, sum[i]-sum[l-1]);
if(r_ == 0) continue;
ans = min(ans, dfs(i+1, r_-1)+v[i-l+1]+v[r-r_+1]);
}
return dp[l][r] = ans;
}
int main() {
while(~scanf("%d", &n) && n) {
memset(vis, 0, sizeof vis);
for(int i = 1; i <= n; ++i) {
scanf("%lld", &a[i]);
sum[i] = sum[i-1] + a[i];
}
for(int i = 1; i <= n; ++i) {
scanf("%lld", &v[i]);
}
printf("%lld\n", dfs(1, n));
}
return 0;
}