报错002

在使用如下代码使用NPU训练LLaMA模型时,在执行trainer.train()时报错。
代码如下:

import os
from datasets import load_dataset
from transformers import (
    AutoModelForCausalLM,
    AutoTokenizer,
    BitsAndBytesConfig,
    HfArgumentParser,
    TrainingArguments,
    pipeline,
    logging,
)
from peft import LoraConfig, PeftModel
from trl import SFTTrainer
import torch
import torch_npu
from accelerate import Accelerator
accelerator = Accelerator()

device_map = accelerator.device

# source '/home/HwHiAiUser/Ascend/ascend-toolkit/set_env.sh'

x = torch.randn(2, 2).npu()
y = torch.randn(2, 2).npu()
z = x.mm(y)

print(z)
print(device_map)

# The model that you want to train from the Hugging Face hub
model_name = "/home/HwHiAiUser/Code/model/llama-3b"

# The instruction dataset to use
dataset_name = "/home/HwHiAiUser/Code"

# Fine-tuned model name
new_model = "llama-3b-NPU"

################################################################################
# QLoRA parameters
################################################################################

# LoRA attention dimension
lora_r = 64

# Alpha parameter for LoRA scaling
lora_alpha = 16

# Dropout probability for LoRA layers
lora_dropout = 0.1

################################################################################
# bitsandbytes parameters
################################################################################

# Activate 4-bit precision base model loading
use_4bit = True

# Compute dtype for 4-bit base models
bnb_4bit_compute_dtype = "float16"

# Quantization type (fp4 or nf4)
bnb_4bit_quant_type = "nf4"

# Activate nested quantization for 4-bit base models (double quantization)
use_nested_quant = False

################################################################################
# TrainingArguments parameters
################################################################################

# Output directory where the model predictions and checkpoints will be stored
output_dir = "./results"

# Number of training epochs
num_train_epochs = 1

# Enable fp16/bf16 training (set bf16 to True with an A100)
fp16 = False
bf16 = False

# Batch size per GPU for training
per_device_train_batch_size = 2

# Batch size per GPU for evaluation
per_device_eval_batch_size = 2

# Number of update steps to accumulate the gradients for
gradient_accumulation_steps = 1

# Enable gradient checkpointing
gradient_checkpointing = True

# Maximum gradient normal (gradient clipping)
max_grad_norm = 0.3

# Initial learning rate (AdamW optimizer)
learning_rate = 2e-4

# Weight decay to apply to all layers except bias/LayerNorm weights
weight_decay = 0.001

# Optimizer to use
optim = "paged_adamw_32bit"

# Learning rate schedule (constant a bit better than cosine)
lr_scheduler_type = "constant"

# Number of training steps (overrides num_train_epochs)
max_steps = -1

# Ratio of steps for a linear warmup (from 0 to learning rate)
warmup_ratio = 0.03

# Group sequences into batches with same length
# Saves memory and speeds up training considerably
group_by_length = True

# Save checkpoint every X updates steps
save_steps = 50

# Log every X updates steps
logging_steps = 50

################################################################################
# SFT parameters
################################################################################

# Maximum sequence length to use
max_seq_length = None

# Pack multiple short examples in the same input sequence to increase efficiency
packing = False

# Load dataset (you can process it here)
dataset = load_dataset(dataset_name, split="train")
# dataset = dataset.select([0])
# Load tokenizer and model with QLoRA configuration
compute_dtype = getattr(torch, bnb_4bit_compute_dtype)

bnb_config = BitsAndBytesConfig(
    load_in_4bit=use_4bit,
    bnb_4bit_quant_type=bnb_4bit_quant_type,
    bnb_4bit_compute_dtype=compute_dtype,
    bnb_4bit_use_double_quant=use_nested_quant,
)


# Load base model

model = AutoModelForCausalLM.from_pretrained(
    model_name,
    # torch_dtype=torch.float16,
    quantization_config=bnb_config,
    trust_remote_code=True,
    device_map=device_map
)
model.config.use_cache = False
model.config.pretraining_tp = 1

# Load LLaMA tokenizer
tokenizer = AutoTokenizer.from_pretrained(
    model_name, use_fast=False, trust_remote_code=True)
tokenizer.pad_token = tokenizer.eos_token
tokenizer.padding_side = "right"  # Fix weird overflow issue with fp16 training

 # Load LoRA configuration
peft_config = LoraConfig(
    lora_alpha=lora_alpha,
    lora_dropout=lora_dropout,
    r=lora_r,
    bias="none",
    task_type="CAUSAL_LM",
)

# Set training parameters
training_arguments = TrainingArguments(
    output_dir=output_dir,
    num_train_epochs=num_train_epochs,
    per_device_train_batch_size=per_device_train_batch_size,
    gradient_accumulation_steps=gradient_accumulation_steps,
    optim=optim,
    save_steps=save_steps,
    logging_steps=logging_steps,
    learning_rate=learning_rate,
    weight_decay=weight_decay,
    fp16=fp16,
    bf16=bf16,
    max_grad_norm=max_grad_norm,
    max_steps=max_steps,
    warmup_ratio=warmup_ratio,
    group_by_length=group_by_length,
    lr_scheduler_type=lr_scheduler_type,
    report_to="tensorboard"
)

# Set supervised fine-tuning parameters
trainer = SFTTrainer(
    model=model,
    train_dataset=dataset,
    peft_config=peft_config,
    dataset_text_field="text",
    max_seq_length=max_seq_length,
    tokenizer=tokenizer,
    args=training_arguments,
    packing=packing,
)

# Train model
trainer.train()

# Save trained model
trainer.model.save_pretrained(new_model)

报错信息如下:

(NPU) [HwHiAiUser@localhost Code]$  cd /home/HwHiAiUser/Code ; /usr/bin/env /home/HwHiAiUser/下载/yes/envs/NPU/bin/python /home/HwHiAiUser/.vscode/extensions/ms-python.python-2023.18.0/pythonFiles/lib/python/debugpy/adapter/../../debugpy/launcher 58227 -- /home/HwHiAiUser/Code/main.py 
/home/HwHiAiUser/下载/yes/envs/NPU/lib/python3.9/site-packages/bitsandbytes/cextension.py:34: UserWarning: The installed version of bitsandbytes was compiled without GPU support. 8-bit optimizers, 8-bit multiplication, and GPU quantization are unavailable.
  warn("The installed version of bitsandbytes was compiled without GPU support. "
/home/HwHiAiUser/下载/yes/envs/NPU/lib/python3.9/site-packages/bitsandbytes/libbitsandbytes_cpu.so: undefined symbol: cadam32bit_grad_fp32
Warning: Device do not support double dtype now, dtype cast repalce with float.
tensor([[ 1.7275, -0.7689],
        [-2.0348,  1.7172]], device='npu:0')
npu
/home/HwHiAiUser/下载/yes/envs/NPU/lib/python3.9/site-packages/torch_npu/utils/storage.py:36: UserWarning: TypedStorage is deprecated. It will be removed in the future and UntypedStorage will be the only storage class. This should only matter to you if you are using storages directly.  To access UntypedStorage directly, use tensor.untyped_storage() instead of tensor.storage()
  if self.device.type != 'cpu':
You are using the legacy behaviour of the <class 'transformers.models.llama.tokenization_llama.LlamaTokenizer'>. This means that tokens that come after special tokens will not be properly handled. We recommend you to read the related pull request available at https://github.com/huggingface/transformers/pull/24565
/home/HwHiAiUser/下载/yes/envs/NPU/lib/python3.9/site-packages/peft/utils/other.py:102: FutureWarning: prepare_model_for_int8_training is deprecated and will be removed in a future version. Use prepare_model_for_kbit_training instead.
  warnings.warn(
/home/HwHiAiUser/下载/yes/envs/NPU/lib/python3.9/site-packages/trl/trainer/sft_trainer.py:159: UserWarning: You didn't pass a `max_seq_length` argument to the SFTTrainer, this will default to 1024
  warnings.warn(
Map: 100%|██████████████████████████| 122606/122606 [03:51<00:00, 529.14 examples/s]
Traceback (most recent call last):
  File "/home/HwHiAiUser/下载/yes/envs/NPU/lib/python3.9/runpy.py", line 197, in _run_module_as_main
    return _run_code(code, main_globals, None,
  File "/home/HwHiAiUser/下载/yes/envs/NPU/lib/python3.9/runpy.py", line 87, in _run_code
    exec(code, run_globals)
  File "/home/HwHiAiUser/.vscode/extensions/ms-python.python-2023.18.0/pythonFiles/lib/python/debugpy/adapter/../../debugpy/launcher/../../debugpy/__main__.py", line 39, in <module>
    cli.main()
  File "/home/HwHiAiUser/.vscode/extensions/ms-python.python-2023.18.0/pythonFiles/lib/python/debugpy/adapter/../../debugpy/launcher/../../debugpy/../debugpy/server/cli.py", line 430, in main
    run()
  File "/home/HwHiAiUser/.vscode/extensions/ms-python.python-2023.18.0/pythonFiles/lib/python/debugpy/adapter/../../debugpy/launcher/../../debugpy/../debugpy/server/cli.py", line 284, in run_file
    runpy.run_path(target, run_name="__main__")
  File "/home/HwHiAiUser/.vscode/extensions/ms-python.python-2023.18.0/pythonFiles/lib/python/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_runpy.py", line 321, in run_path
    return _run_module_code(code, init_globals, run_name,
  File "/home/HwHiAiUser/.vscode/extensions/ms-python.python-2023.18.0/pythonFiles/lib/python/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_runpy.py", line 135, in _run_module_code
    _run_code(code, mod_globals, init_globals,
  File "/home/HwHiAiUser/.vscode/extensions/ms-python.python-2023.18.0/pythonFiles/lib/python/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_runpy.py", line 124, in _run_code
    exec(code, run_globals)
  File "/home/HwHiAiUser/Code/main.py", line 216, in <module>
    trainer.train()
  File "/home/HwHiAiUser/下载/yes/envs/NPU/lib/python3.9/site-packages/transformers/trainer.py", line 1539, in train
    return inner_training_loop(
  File "/home/HwHiAiUser/下载/yes/envs/NPU/lib/python3.9/site-packages/transformers/trainer.py", line 1656, in _inner_training_loop
    model, self.optimizer = self.accelerator.prepare(self.model, self.optimizer)
  File "/home/HwHiAiUser/下载/yes/envs/NPU/lib/python3.9/site-packages/accelerate/accelerator.py", line 1202, in prepare
    result = tuple(
  File "/home/HwHiAiUser/下载/yes/envs/NPU/lib/python3.9/site-packages/accelerate/accelerator.py", line 1203, in <genexpr>
    self._prepare_one(obj, first_pass=True, device_placement=d) for obj, d in zip(args, device_placement)
  File "/home/HwHiAiUser/下载/yes/envs/NPU/lib/python3.9/site-packages/accelerate/accelerator.py", line 1030, in _prepare_one
    return self.prepare_model(obj, device_placement=device_placement)
  File "/home/HwHiAiUser/下载/yes/envs/NPU/lib/python3.9/site-packages/accelerate/accelerator.py", line 1278, in prepare_model
    if torch.device(current_device_index) != self.device:
  File "/home/HwHiAiUser/下载/yes/envs/NPU/lib/python3.9/site-packages/torch_npu/utils/_device.py", line 29, in __new__
    return origin_device(*args, **kwargs)
TypeError: device() received an invalid combination of arguments - got (NoneType), but expected one of:
 * (torch.device device)
      didn't match because some of the arguments have invalid types: (NoneType)
 * (str type, int index)

/home/HwHiAiUser/下载/yes/envs/NPU/lib/python3.9/tempfile.py:821: ResourceWarning: Implicitly cleaning up <TemporaryDirectory '/tmp/tmptkmy13d4'>
  _warnings.warn(warn_message, ResourceWarning)
(NPU) [HwHiAiUser@localhost ~]$ pip list
Package            Version
------------------ ------------
absl-py            2.0.0
accelerate         0.21.0
aiohttp            3.8.6
aiosignal          1.3.1
ascendctools       0.1.0
asttokens          2.4.0
async-timeout      4.0.3
attrs              23.1.0
auto-tune          0.1.0
backcall           0.2.0
bitsandbytes       0.40.2
certifi            2022.12.7
cffi               1.16.0
charset-normalizer 2.1.1
comm               0.1.4
dataflow           0.0.1
datasets           2.14.6
decorator          5.1.1
dill               0.3.7
exceptiongroup     1.1.3
executing          2.0.0
filelock           3.9.0
frozenlist         1.4.0
fsspec             2023.10.0
hccl               0.1.0
hccl-parser        0.1
huggingface-hub    0.18.0
idna               3.4
ipython            8.16.1
ipywidgets         8.1.1
jedi               0.19.1
Jinja2             3.1.2
jupyterlab-widgets 3.0.9
MarkupSafe         2.1.2
matplotlib-inline  0.1.6
mpmath             1.3.0
msadvisor          1.0.0
multidict          6.0.4
multiprocess       0.70.15
networkx           3.0
numpy              1.24.1
op-gen             0.1
op-test-frame      0.1
opc-tool           0.1.0
packaging          23.2
pandas             2.1.1
parso              0.8.3
pathlib2           2.3.7.post1
peft               0.4.0
pexpect            4.8.0
pickleshare        0.7.5
Pillow             9.3.0
pip                23.3
prompt-toolkit     3.0.39
protobuf           3.20.1
psutil             5.9.6
ptyprocess         0.7.0
pure-eval          0.2.2
pyarrow            13.0.0
pycparser          2.21
Pygments           2.16.1
python-dateutil    2.8.2
pytz               2023.3.post1
PyYAML             6.0.1
regex              2023.10.3
requests           2.28.1
safetensors        0.4.0
schedule-search    0.0.1
scipy              1.11.3
sentencepiece      0.1.99
setuptools         68.0.0
six                1.16.0
stack-data         0.6.3
sympy              1.12
te                 0.4.0
tensorboardX       2.6.2.2
tokenizers         0.13.3
torch              2.1.0+cpu
torch-npu          2.1.0rc1
torchaudio         2.1.0+cpu
torchvision        0.16.0+cpu
tqdm               4.66.1
traitlets          5.11.2
transformers       4.31.0
trl                0.4.7
typing_extensions  4.4.0
tzdata             2023.3
urllib3            1.26.13
wcwidth            0.2.8
wheel              0.41.2
widgetsnbextension 4.0.9
xxhash             3.4.1
yarl               1.9.2
WARNING: There was an error checking the latest version of pip.

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
USE [UFDATA_002_2004] GO /****** Object: Table [dbo].[JustInVouchs] Script Date: 06/12/2014 14:09:57 ******/ drop table JustInVouchs drop table IA_Subsidiary SET ANSI_NULLS ON GO SET QUOTED_IDENTIFIER ON GO SET ANSI_PADDING ON GO CREATE TABLE [dbo].[JustInVouchs]( [AutoID] [int] NOT NULL, [cJVCode] [varchar](30) NOT NULL, [CorID] [int] NULL, [cCorCode] [varchar](30) NULL, [cInvCode] [varchar](20) NOT NULL, [iJVPrice] [money] NULL, [cJVBatch] [varchar](20) NULL, [cObjCode] [varchar](12) NULL, [cFree1] [varchar](20) NULL, [cFree2] [varchar](20) NULL, [cDefine22] [nvarchar](60) NULL, [cDefine23] [nvarchar](60) NULL, [cDefine24] [nvarchar](60) NULL, [cDefine25] [nvarchar](60) NULL, [cDefine26] [float] NULL, [cDefine27] [float] NULL, [cItemCode] [varchar](20) NULL, [cItem_class] [varchar](10) NULL, [cName] [varchar](60) NULL, [cItemCName] [varchar](20) NULL, [iJustInsId] [int] NULL, [cBVencode] [varchar](20) NULL, [cFree3] [varchar](20) NULL, [iMassDate] [int] NULL, [dMadeDate] [datetime] NULL, [cInVouchCode] [varchar](30) NULL, [cFree4] [varchar](20) NULL, [cFree5] [varchar](20) NULL, [cFree6] [varchar](20) NULL, [cFree7] [varchar](20) NULL, [cFree8] [varchar](20) NULL, [cFree9] [varchar](20) NULL, [cFree10] [varchar](20) NULL, [cDefine28] [varchar](120) NULL, [cDefine29] [varchar](120) NULL, [cDefine30] [varchar](120) NULL, [cDefine31] [varchar](120) NULL, [cDefine32] [varchar](120) NULL, [cDefine33] [varchar](120) NULL, [cDefine34] [int] NULL, [cDefine35] [int] NULL, [cDefine36] [datetime] NULL, [cDefine37] [datetime] NULL, CONSTRAINT [aaaaaJustInVouchs_PK] PRIMARY KEY NONCLUSTERED ( [AutoID] ASC ) ON [PRIMARY] ) ON [PRIMARY] GO SET ANSI_PADDING OFF GO /****** Object: Table [dbo].[IA_Subsidiary] Script Date: 06/12/2014 14:09:57 ******/ SET ANSI_NULLS ON GO SET QUOTED_IDENTIFIER ON GO SET ANSI_PADDING ON GO CREATE TABLE [dbo].[IA_Subsidiary]( [AutoID] [int] IDENTITY(1,1) NOT NULL, [bRdFlag] [bit] NOT NULL, [cBusType] [varchar](8) NULL, [cBusCode] [varchar](30) NULL, [cVouCode] [varchar](30) NULL, [ID] [int] NULL, [ValueID] [int] NULL, [JustID] [int] NULL, [dVouDate] [datetime] NULL, [dKeepDate] [datetime] NULL, [iMonth] [tinyint] NOT NULL, [iPZID] [int] NULL, [iPZDate] [datetime] NULL, [cPZtype] [varchar](8) NULL, [cPZdigest] [varchar](60) NULL, [cInvHead] [varchar](15) NULL, [cDifHead] [varchar](15) NULL, [cOppHead] [varchar](15) NULL, [cVouType] [varchar](4) NULL, [cPTCode] [varchar](2) NULL, [cSTCode] [varchar](2) NULL, [cWhCode] [varchar](10) NULL, [cAccDep] [varchar](12) NULL, [cInvCode] [varchar](20) NOT NULL, [cRdCode] [varchar](5) NULL, [cVenCode] [varchar](20) NULL, [cCusCode] [varchar](20) NULL, [cOrderCode] [varchar](30) NULL, [cARVCode] [varchar](30) NULL, [cBillCode] [int] NULL, [cDLCode] [int] NULL, [cPSPCode] [varchar](12) NULL, [cProCode] [varchar](50) NULL, [cDepCode] [varchar](12) NULL, [cPersonCode] [varchar](8) NULL, [cHandler] [varchar](20) NULL, [iAInQuantity] [float] NULL, [iAOutQuantity] [float] NULL, [iInCost] [float] NULL, [iOutCost] [float] NULL, [iAInPrice] [money] NULL, [iAOutPrice] [money] NULL, [cBatchCode] [varchar](20) NULL, [iDebitDifCost] [money] NULL, [iCreditDifCost] [money] NULL, [cAccounter] [varchar](20) NULL, [cMaker] [varchar](20) NULL, [bFlag] [tinyint] NULL, [bMoneyFlag] [bit] NOT NULL, [bSale] [bit] NOT NULL, [cMemo] [varchar](60) NULL, [cDefine1] [varchar](20) NULL, [cDefine2] [varchar](20) NULL, [cDefine3] [varchar](20) NULL, [cDefine4] [datetime] NULL, [cDefine5] [int] NULL, [cDefine6] [datetime] NULL, [cDefine7] [float] NULL, [cDefine8] [varchar](4) NULL, [cDefine9] [varchar](8) NULL, [cDefine10] [varchar](60) NULL, [cFree1] [varchar](20) NULL, [cFree2] [varchar](20) NULL, [cPZID] [varchar](30) NULL, [cDefine22] [nvarchar](60) NULL, [cDefine23] [nvarchar](60) NULL, [cDefine24] [nvarchar](60) NULL, [cDefine25] [nvarchar](60) NULL, [cDefine26] [float] NULL, [cDefine27] [float] NULL, [cItem_class] [varchar](10) NULL, [cItemCode] [varchar](20) NULL, [cName] [varchar](60) NULL, [cItemCName] [varchar](20) NULL, [noJustQuantity] [userdecimal] NULL, [cFree3] [varchar](20) NULL, [cFree4] [varchar](20) NULL, [cFree5] [varchar](20) NULL, [cFree6] [varchar](20) NULL, [cFree7] [varchar](20) NULL, [cFree8] [varchar](20) NULL, [cFree9] [varchar](20) NULL, [cFree10] [varchar](20) NULL, [cDefine11] [varchar](120) NULL, [cDefine12] [varchar](120) NULL, [cDefine13] [varchar](120) NULL, [cDefine14] [varchar](120) NULL, [cDefine15] [int] NULL, [cDefine16] [float] NULL, [cDefine28] [varchar](120) NULL, [cDefine29] [varchar](120) NULL, [cDefine30] [varchar](120) NULL, [cDefine31] [varchar](120) NULL, [cDefine32] [varchar](120) NULL, [cDefine33] [varchar](120) NULL, [cDefine34] [int] NULL, [cDefine35] [int] NULL, [cDefine36] [datetime] NULL, [cDefine37] [datetime] NULL, [psvsid] [int] NULL, [cCXHDcode] [varchar](30) NULL, [cCXFScode] [varchar](20) NULL, CONSTRAINT [aaaaaIA_Subsidiary_PK] PRIMARY KEY NONCLUSTERED ( [AutoID] ASC ) ON [PRIMARY] ) ON [PRIMARY] GO SET ANSI_PADDING OFF GO /****** Object: Default [DF__ia_subsid__cDefi__11165497] Script Date: 06/12/2014 14:09:57 ******/ ALTER TABLE [dbo].[IA_Subsidiary] ADD CONSTRAINT [DF__ia_subsid__cDefi__11165497] DEFAULT (NULL) FOR [cDefine22] GO /****** Object: Default [DF__ia_subsid__cDefi__120A78D0] Script Date: 06/12/2014 14:09:57 ******/ ALTER TABLE [dbo].[IA_Subsidiary] ADD CONSTRAINT [DF__ia_subsid__cDefi__120A78D0] DEFAULT (NULL) FOR [cDefine23] GO /****** Object: Default [DF__ia_subsid__cDefi__12FE9D09] Script Date: 06/12/2014 14:09:57 ******/ ALTER TABLE [dbo].[IA_Subsidiary] ADD CONSTRAINT [DF__ia_subsid__cDefi__12FE9D09] DEFAULT (NULL) FOR [cDefine24] GO /****** Object: Default [DF__ia_subsid__cDefi__13F2C142] Script Date: 06/12/2014 14:09:57 ******/ ALTER TABLE [dbo].[IA_Subsidiary] ADD CONSTRAINT [DF__ia_subsid__cDefi__13F2C142] DEFAULT (NULL) FOR [cDefine25] GO /****** Object: Default [DF__ia_subsid__cDefi__14E6E57B] Script Date: 06/12/2014 14:09:57 ******/ ALTER TABLE [dbo].[IA_Subsidiary] ADD CONSTRAINT [DF__ia_subsid__cDefi__14E6E57B] DEFAULT (NULL) FOR [cDefine26] GO /****** Object: Default [DF__ia_subsid__cDefi__15DB09B4] Script Date: 06/12/2014 14:09:57 ******/ ALTER TABLE [dbo].[IA_Subsidiary] ADD CONSTRAINT [DF__ia_subsid__cDefi__15DB09B4] DEFAULT (NULL) FOR [cDefine27] GO /****** Object: Default [DF__JustInVou__cDefi__16CF2DED] Script Date: 06/12/2014 14:09:57 ******/ ALTER TABLE [dbo].[JustInVouchs] ADD CONSTRAINT [DF__JustInVou__cDefi__16CF2DED] DEFAULT (NULL) FOR [cDefine22] GO /****** Object: Default [DF__JustInVou__cDefi__17C35226] Script Date: 06/12/2014 14:09:57 ******/ ALTER TABLE [dbo].[JustInVouchs] ADD CONSTRAINT [DF__JustInVou__cDefi__17C35226] DEFAULT (NULL) FOR [cDefine23] GO /****** Object: Default [DF__JustInVou__cDefi__18B7765F] Script Date: 06/12/2014 14:09:57 ******/ ALTER TABLE [dbo].[JustInVouchs] ADD CONSTRAINT [DF__JustInVou__cDefi__18B7765F] DEFAULT (NULL) FOR [cDefine24] GO /****** Object: Default [DF__JustInVou__cDefi__19AB9A98] Script Date: 06/12/2014 14:09:57 ******/ ALTER TABLE [dbo].[JustInVouchs] ADD CONSTRAINT [DF__JustInVou__cDefi__19AB9A98] DEFAULT (NULL) FOR [cDefine25] GO /****** Object: Default [DF__JustInVou__cDefi__1A9FBED1] Script Date: 06/12/2014 14:09:57 ******/ ALTER TABLE [dbo].[JustInVouchs] ADD CONSTRAINT [DF__JustInVou__cDefi__1A9FBED1] DEFAULT (NULL) FOR [cDefine26] GO /****** Object: Default [DF__JustInVou__cDefi__1B93E30A] Script Date: 06/12/2014 14:09:57 ******/ ALTER TABLE [dbo].[JustInVouchs] ADD CONSTRAINT [DF__JustInVou__cDefi__1B93E30A] DEFAULT (NULL) FOR [cDefine27] GO /****** Object: ForeignKey [FK__IA_Subsid__cDepC__4F7F3212] Script Date: 06/12/2014 14:09:57 ******/ ALTER TABLE [dbo].[IA_Subsidiary] WITH CHECK ADD FOREIGN KEY([cDepCode]) REFERENCES [dbo].[Department] ([cDepCode]) GO /****** Object: ForeignKey [FK__IA_Subsid__cDepC__5073564B] Script Date: 06/12/2014 14:09:57 ******/ ALTER TABLE [dbo].[IA_Subsidiary] WITH CHECK ADD FOREIGN KEY([cDepCode]) REFERENCES [dbo].[Department] ([cDepCode]) GO /****** Object: ForeignKey [FK__IA_Subsid__cDepC__51677A84] Script Date: 06/12/2014 14:09:57 ******/ ALTER TABLE [dbo].[IA_Subsidiary] WITH CHECK ADD FOREIGN KEY([cDepCode]) REFERENCES [dbo].[Department] ([cDepCode]) GO /****** Object: ForeignKey [FK__IA_Subsid__cInvC__525B9EBD] Script Date: 06/12/2014 14:09:57 ******/ ALTER TABLE [dbo].[IA_Subsidiary] WITH CHECK ADD FOREIGN KEY([cInvCode]) REFERENCES [dbo].[Inventory] ([cInvCode]) GO /****** Object: ForeignKey [FK__IA_Subsid__cInvC__534FC2F6] Script Date: 06/12/2014 14:09:57 ******/ ALTER TABLE [dbo].[IA_Subsidiary] WITH CHECK ADD FOREIGN KEY([cInvCode]) REFERENCES [dbo].[Inventory] ([cInvCode]) GO /****** Object: ForeignKey [FK__IA_Subsid__cInvC__5443E72F] Script Date: 06/12/2014 14:09:57 ******/ ALTER TABLE [dbo].[IA_Subsidiary] WITH CHECK ADD FOREIGN KEY([cInvCode]) REFERENCES [dbo].[Inventory] ([cInvCode]) GO /****** Object: ForeignKey [FK__IA_Subsid__cWhCo__55380B68] Script Date: 06/12/2014 14:09:57 ******/ ALTER TABLE [dbo].[IA_Subsidiary] WITH CHECK ADD FOREIGN KEY([cWhCode]) REFERENCES [dbo].[Warehouse] ([cWhCode]) GO /****** Object: ForeignKey [FK__IA_Subsid__cWhCo__562C2FA1] Script Date: 06/12/2014 14:09:57 ******/ ALTER TABLE [dbo].[IA_Subsidiary] WITH CHECK ADD FOREIGN KEY([cWhCode]) REFERENCES [dbo].[Warehouse] ([cWhCode]) GO /****** Object: ForeignKey [FK__IA_Subsid__cWhCo__572053DA] Script Date: 06/12/2014 14:09:57 ******/ ALTER TABLE [dbo].[IA_Subsidiary] WITH CHECK ADD FOREIGN KEY([cWhCode]) REFERENCES [dbo].[Warehouse] ([cWhCode]) GO /****** Object: ForeignKey [FK__JustInVou__cInvC__58147813] Script Date: 06/12/2014 14:09:57 ******/ ALTER TABLE [dbo].[JustInVouchs] WITH CHECK ADD FOREIGN KEY([cInvCode]) REFERENCES [dbo].[Inventory] ([cInvCode]) GO

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值