Hibernate One-to-Many|Many-to-One(双向+自身)

package com.hibernate.ano.self;

import java.util.Set;

import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.Table;

import org.hibernate.annotations.GenericGenerator;
import org.hibernate.annotations.NotFound;
import org.hibernate.annotations.NotFoundAction;

import com.infohold.icqm.db.dbo.Organization;

/**
 * @ClassName: Group.java
 *
 * @Description: 用户组
 *
 * @author GERRARD
 *
 * @date 2015年1月26日下午2:22:24
 * 
 */
@Entity
@Table(name = "group")
public class Group implements java.io.Serializable {

	/**
	 * 
	 */
	private static final long serialVersionUID = -8694664905284095892L;
	
	/**
	 * 主键ID
	 */
	private String pid;
	
	/**
	 * 组名
	 */
	private String groupName;
	
	/**
	 * 父类(自身  Many-to-One)
	 */
	private Group parentGroup;

	/**
	 * 子集(自身  One-to-Many)
	 */
	private Set<Group> childGroup;
	
	/**
	 * 所属机构下用户
	 */
	private Set<Users> users;

	public Group() {

	}

	public Group(String pid, String groupName, Group parentGroup,
			Set<Group> childGroup) {
		this.pid = pid;
		this.groupName = groupName;
		this.parentGroup = parentGroup;
		this.childGroup = childGroup;
	}

	@Id
    @Column(name = "pid", unique = true, nullable = false, length = 32)
    @GeneratedValue(generator = "generator")
    @GenericGenerator(name = "generator", strategy = "uuid")
	public String getPid() {
		return pid;
	}

	public void setPid(String pid) {
		this.pid = pid;
	}

	@Column(name = "group_name", length = 64)
	public String getGroupName() {
		return groupName;
	}

	public void setGroupName(String groupName) {
		this.groupName = groupName;
	}

	@ManyToOne  
	@JoinColumn(name = "parent_id")
	public Group getParentGroup() {
		return parentGroup;
	}

	public void setParentGroup(Group parentGroup) {
		this.parentGroup = parentGroup;
	}

	@OneToMany(mappedBy = "parentGroup", targetEntity = Organization.class, cascade = CascadeType.ALL)
	public Set<Group> getChildGroup() {
		return childGroup;
	}

	public void setChildGroup(Set<Group> childGroup) {
		this.childGroup = childGroup;
	}

	@OneToMany(fetch = FetchType.LAZY, mappedBy = "group")
    @NotFound(action = NotFoundAction.IGNORE)
	public Set<Users> getUsers() {
		return users;
	}

	public void setUsers(Set<Users> users) {
		this.users = users;
	}

}

 

package com.hibernate.ano.self;

import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.Table;

import org.hibernate.annotations.GenericGenerator;

@Entity
@Table(name = "users")
public class Users implements java.io.Serializable {

	/**
	 * 
	 */
	private static final long serialVersionUID = 1668563130071655650L;
	
	/**
	 * 主键ID
	 */
	private String pid;
	
	/**
	 * 用户ID
	 */
	private String userId;
	
	/**
	 * 用户名
	 */
	private String userName;
	
	/**
	 * 所属机构
	 */
	private Group group;

	public Users() {
		
	}

	public Users(String pid, String userId, String userName, Group group) {
		this.pid = pid;
		this.userId = userId;
		this.userName = userName;
		this.group = group;
	}

	@Id
    @Column(name = "pid", unique = true, nullable = false, length = 32)
    @GeneratedValue(generator = "generator")
    @GenericGenerator(name = "generator", strategy = "uuid")
	public String getPid() {
		return pid;
	}

	public void setPid(String pid) {
		this.pid = pid;
	}

	@Column(name = "user_id", length = 20)
	public String getUserId() {
		return userId;
	}

	public void setUserId(String userId) {
		this.userId = userId;
	}

	@Column(name = "user_name", length = 64)
	public String getUserName() {
		return userName;
	}

	public void setUserName(String userName) {
		this.userName = userName;
	}

	@ManyToOne(fetch = FetchType.EAGER)
	@JoinColumn(name = "group_id")
	public Group getGroup() {
		return group;
	}

	public void setGroup(Group group) {
		this.group = group;
	}

}

 

 

 

 

 

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
这里是一个简单的 Spring-boot+Spring-batch+hibernate+Quartz 的批量读文件写数据的例子。 首先,需要在 pom.xml 文件中添加以下依赖: ```xml <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-batch</artifactId> </dependency> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-quartz</artifactId> </dependency> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-data-jpa</artifactId> </dependency> ``` 在 application.yml 文件中配置数据源和 Quartz: ```yaml spring: datasource: url: jdbc:mysql://localhost:3306/test?useUnicode=true&characterEncoding=utf-8&useSSL=false&allowPublicKeyRetrieval=true username: root password: root jpa: hibernate: ddl-auto: update show-sql: true quartz: job-store-type: jdbc jdbc: initialize-schema: always ``` 接下来,定义实体类 FileData: ```java @Entity @Table(name = "file_data") public class FileData { @Id @GeneratedValue(strategy = GenerationType.IDENTITY) private Long id; @Column(name = "file_name") private String fileName; @Column(name = "line_number") private Integer lineNumber; @Column(name = "line_data") private String lineData; // getter and setter } ``` 定义读取文件的 ItemReader: ```java @Component @StepScope public class FileItemReader implements ItemReader<String> { private static final Logger LOGGER = LoggerFactory.getLogger(FileItemReader.class); private String file; private BufferedReader reader; @Value("#{jobParameters['file']}") public void setFile(String file) { this.file = file; } @BeforeStep public void beforeStep(StepExecution stepExecution) throws Exception { LOGGER.info("Starting to read file: {}", file); reader = new BufferedReader(new FileReader(file)); } @Override public String read() throws Exception { String line = reader.readLine(); if (line != null) { LOGGER.debug("Read line: {}", line); } else { LOGGER.info("Finished reading file: {}", file); reader.close(); } return line; } } ``` 定义处理数据的 ItemProcessor: ```java @Component public class FileItemProcessor implements ItemProcessor<String, FileData> { private static final Logger LOGGER = LoggerFactory.getLogger(FileItemProcessor.class); @Override public FileData process(String line) throws Exception { LOGGER.debug("Processing line: {}", line); String[] parts = line.split(","); FileData fileData = new FileData(); fileData.setFileName(parts[0]); fileData.setLineNumber(Integer.parseInt(parts[1])); fileData.setLineData(parts[2]); return fileData; } } ``` 定义写数据的 ItemWriter: ```java @Component public class FileItemWriter implements ItemWriter<FileData> { private static final Logger LOGGER = LoggerFactory.getLogger(FileItemWriter.class); @Autowired private EntityManager entityManager; @Override @Transactional public void write(List<? extends FileData> items) throws Exception { LOGGER.info("Writing {} items", items.size()); for (FileData item : items) { entityManager.persist(item); } entityManager.flush(); } } ``` 定义 Job: ```java @Configuration @EnableBatchProcessing public class BatchConfiguration { @Autowired private JobBuilderFactory jobBuilderFactory; @Autowired private StepBuilderFactory stepBuilderFactory; @Autowired private FileItemReader fileItemReader; @Autowired private FileItemProcessor fileItemProcessor; @Autowired private FileItemWriter fileItemWriter; @Bean public Job fileToDatabaseJob() { return jobBuilderFactory.get("fileToDatabaseJob") .incrementer(new RunIdIncrementer()) .start(step1()) .build(); } @Bean public Step step1() { return stepBuilderFactory.get("step1") .<String, FileData>chunk(10) .reader(fileItemReader) .processor(fileItemProcessor) .writer(fileItemWriter) .build(); } } ``` 定义 Quartz 定时任务: ```java @Component public class FileToDatabaseJobScheduler { @Autowired private SchedulerFactory schedulerFactory; @Autowired private JobDetail fileToDatabaseJobDetail; @Autowired private CronTriggerFactoryBean fileToDatabaseJobTrigger; @PostConstruct public void scheduleFileToDatabaseJob() throws SchedulerException { Scheduler scheduler = schedulerFactory.getScheduler(); scheduler.scheduleJob(fileToDatabaseJobDetail, fileToDatabaseJobTrigger.getObject()); scheduler.start(); } } ``` 最后,启动应用程序并将文件作为参数传递: ```java @SpringBootApplication public class Application { public static void main(String[] args) { SpringApplication.run(Application.class, args); } @Bean @StepScope public FileItemReader fileItemReader(@Value("#{jobParameters['file']}") String file) { FileItemReader reader = new FileItemReader(); reader.setFile(file); return reader; } @Bean public JobDetail fileToDatabaseJobDetail() { return JobBuilder.newJob(BatchConfiguration.class) .withIdentity("fileToDatabaseJob") .storeDurably() .build(); } @Bean public CronTriggerFactoryBean fileToDatabaseJobTrigger(@Autowired JobDetail fileToDatabaseJobDetail) { CronTriggerFactoryBean trigger = new CronTriggerFactoryBean(); trigger.setJobDetail(fileToDatabaseJobDetail); trigger.setCronExpression("0 0/1 * 1/1 * ? *"); // 每分钟执行一次 return trigger; } } ``` 以上就是一个简单的 Spring-boot+Spring-batch+hibernate+Quartz 的批量读文件写数据的例子。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值